##// END OF EJS Templates
global: use raw strings for regular expressions with escapes...
Gregory Szorc -
r41673:bd3f03d8 default
parent child Browse files
Show More
@@ -1,197 +1,197 b''
1 1 # blackbox.py - log repository events to a file for post-mortem debugging
2 2 #
3 3 # Copyright 2010 Nicolas Dumazet
4 4 # Copyright 2013 Facebook, Inc.
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """log repository events to a blackbox for debugging
10 10
11 11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 12 The events that get logged can be configured via the blackbox.track config key.
13 13
14 14 Examples::
15 15
16 16 [blackbox]
17 17 track = *
18 18 # dirty is *EXPENSIVE* (slow);
19 19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
20 20 dirty = True
21 21 # record the source of log messages
22 22 logsource = True
23 23
24 24 [blackbox]
25 25 track = command, commandfinish, commandexception, exthook, pythonhook
26 26
27 27 [blackbox]
28 28 track = incoming
29 29
30 30 [blackbox]
31 31 # limit the size of a log file
32 32 maxsize = 1.5 MB
33 33 # rotate up to N log files when the current one gets too big
34 34 maxfiles = 3
35 35
36 36 [blackbox]
37 37 # Include nanoseconds in log entries with %f (see Python function
38 38 # datetime.datetime.strftime)
39 39 date-format = '%Y-%m-%d @ %H:%M:%S.%f'
40 40
41 41 """
42 42
43 43 from __future__ import absolute_import
44 44
45 45 import re
46 46
47 47 from mercurial.i18n import _
48 48 from mercurial.node import hex
49 49
50 50 from mercurial import (
51 51 encoding,
52 52 loggingutil,
53 53 registrar,
54 54 )
55 55 from mercurial.utils import (
56 56 dateutil,
57 57 procutil,
58 58 )
59 59
60 60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 62 # be specifying the version(s) of Mercurial they are tested with, or
63 63 # leave the attribute unspecified.
64 64 testedwith = 'ships-with-hg-core'
65 65
66 66 cmdtable = {}
67 67 command = registrar.command(cmdtable)
68 68
69 69 configtable = {}
70 70 configitem = registrar.configitem(configtable)
71 71
72 72 configitem('blackbox', 'dirty',
73 73 default=False,
74 74 )
75 75 configitem('blackbox', 'maxsize',
76 76 default='1 MB',
77 77 )
78 78 configitem('blackbox', 'logsource',
79 79 default=False,
80 80 )
81 81 configitem('blackbox', 'maxfiles',
82 82 default=7,
83 83 )
84 84 configitem('blackbox', 'track',
85 85 default=lambda: ['*'],
86 86 )
87 87 configitem('blackbox', 'date-format',
88 88 default='%Y/%m/%d %H:%M:%S',
89 89 )
90 90
91 91 _lastlogger = loggingutil.proxylogger()
92 92
93 93 class blackboxlogger(object):
94 94 def __init__(self, ui, repo):
95 95 self._repo = repo
96 96 self._trackedevents = set(ui.configlist('blackbox', 'track'))
97 97 self._maxfiles = ui.configint('blackbox', 'maxfiles')
98 98 self._maxsize = ui.configbytes('blackbox', 'maxsize')
99 99 self._inlog = False
100 100
101 101 def tracked(self, event):
102 102 return b'*' in self._trackedevents or event in self._trackedevents
103 103
104 104 def log(self, ui, event, msg, opts):
105 105 # self._log() -> ctx.dirty() may create new subrepo instance, which
106 106 # ui is derived from baseui. So the recursion guard in ui.log()
107 107 # doesn't work as it's local to the ui instance.
108 108 if self._inlog:
109 109 return
110 110 self._inlog = True
111 111 try:
112 112 self._log(ui, event, msg, opts)
113 113 finally:
114 114 self._inlog = False
115 115
116 116 def _log(self, ui, event, msg, opts):
117 117 default = ui.configdate('devel', 'default-date')
118 118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
119 119 user = procutil.getuser()
120 120 pid = '%d' % procutil.getpid()
121 121 changed = ''
122 122 ctx = self._repo[None]
123 123 parents = ctx.parents()
124 124 rev = ('+'.join([hex(p.node()) for p in parents]))
125 125 if (ui.configbool('blackbox', 'dirty') and
126 126 ctx.dirty(missing=True, merge=False, branch=False)):
127 127 changed = '+'
128 128 if ui.configbool('blackbox', 'logsource'):
129 129 src = ' [%s]' % event
130 130 else:
131 131 src = ''
132 132 try:
133 133 fmt = '%s %s @%s%s (%s)%s> %s'
134 134 args = (date, user, rev, changed, pid, src, msg)
135 135 with loggingutil.openlogfile(
136 136 ui, self._repo.vfs, name='blackbox.log',
137 137 maxfiles=self._maxfiles, maxsize=self._maxsize) as fp:
138 138 fp.write(fmt % args)
139 139 except (IOError, OSError) as err:
140 140 # deactivate this to avoid failed logging again
141 141 self._trackedevents.clear()
142 142 ui.debug('warning: cannot write to blackbox.log: %s\n' %
143 143 encoding.strtolocal(err.strerror))
144 144 return
145 145 _lastlogger.logger = self
146 146
147 147 def uipopulate(ui):
148 148 ui.setlogger(b'blackbox', _lastlogger)
149 149
150 150 def reposetup(ui, repo):
151 151 # During 'hg pull' a httppeer repo is created to represent the remote repo.
152 152 # It doesn't have a .hg directory to put a blackbox in, so we don't do
153 153 # the blackbox setup for it.
154 154 if not repo.local():
155 155 return
156 156
157 157 # Since blackbox.log is stored in the repo directory, the logger should be
158 158 # instantiated per repository.
159 159 logger = blackboxlogger(ui, repo)
160 160 ui.setlogger(b'blackbox', logger)
161 161
162 162 # Set _lastlogger even if ui.log is not called. This gives blackbox a
163 163 # fallback place to log
164 164 if _lastlogger.logger is None:
165 165 _lastlogger.logger = logger
166 166
167 167 repo._wlockfreeprefix.add('blackbox.log')
168 168
169 169 @command('blackbox',
170 170 [('l', 'limit', 10, _('the number of events to show')),
171 171 ],
172 172 _('hg blackbox [OPTION]...'),
173 173 helpcategory=command.CATEGORY_MAINTENANCE,
174 174 helpbasic=True)
175 175 def blackbox(ui, repo, *revs, **opts):
176 176 '''view the recent repository events
177 177 '''
178 178
179 179 if not repo.vfs.exists('blackbox.log'):
180 180 return
181 181
182 182 limit = opts.get(r'limit')
183 183 fp = repo.vfs('blackbox.log', 'r')
184 184 lines = fp.read().split('\n')
185 185
186 186 count = 0
187 187 output = []
188 188 for line in reversed(lines):
189 189 if count >= limit:
190 190 break
191 191
192 192 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
193 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
193 if re.match(br'^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
194 194 count += 1
195 195 output.append(line)
196 196
197 197 ui.status('\n'.join(reversed(output)))
@@ -1,72 +1,72 b''
1 1 # commitextras.py
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''adds a new flag extras to commit (ADVANCED)'''
9 9
10 10 from __future__ import absolute_import
11 11
12 12 import re
13 13
14 14 from mercurial.i18n import _
15 15 from mercurial import (
16 16 commands,
17 17 error,
18 18 extensions,
19 19 registrar,
20 20 util,
21 21 )
22 22
23 23 cmdtable = {}
24 24 command = registrar.command(cmdtable)
25 25 testedwith = 'ships-with-hg-core'
26 26
27 27 usedinternally = {
28 28 'amend_source',
29 29 'branch',
30 30 'close',
31 31 'histedit_source',
32 32 'topic',
33 33 'rebase_source',
34 34 'intermediate-source',
35 35 '__touch-noise__',
36 36 'source',
37 37 'transplant_source',
38 38 }
39 39
40 40 def extsetup(ui):
41 41 entry = extensions.wrapcommand(commands.table, 'commit', _commit)
42 42 options = entry[1]
43 43 options.append(('', 'extra', [],
44 44 _('set a changeset\'s extra values'), _("KEY=VALUE")))
45 45
46 46 def _commit(orig, ui, repo, *pats, **opts):
47 47 if util.safehasattr(repo, 'unfiltered'):
48 48 repo = repo.unfiltered()
49 49 class repoextra(repo.__class__):
50 50 def commit(self, *innerpats, **inneropts):
51 51 extras = opts.get(r'extra')
52 52 for raw in extras:
53 53 if '=' not in raw:
54 54 msg = _("unable to parse '%s', should follow "
55 55 "KEY=VALUE format")
56 56 raise error.Abort(msg % raw)
57 57 k, v = raw.split('=', 1)
58 58 if not k:
59 59 msg = _("unable to parse '%s', keys can't be empty")
60 60 raise error.Abort(msg % raw)
61 if re.search('[^\w-]', k):
61 if re.search(br'[^\w-]', k):
62 62 msg = _("keys can only contain ascii letters, digits,"
63 63 " '_' and '-'")
64 64 raise error.Abort(msg)
65 65 if k in usedinternally:
66 66 msg = _("key '%s' is used internally, can't be set "
67 67 "manually")
68 68 raise error.Abort(msg % k)
69 69 inneropts[r'extra'][k] = v
70 70 return super(repoextra, self).commit(*innerpats, **inneropts)
71 71 repo.__class__ = repoextra
72 72 return orig(ui, repo, *pats, **opts)
@@ -1,965 +1,965 b''
1 1 # Mercurial built-in replacement for cvsps.
2 2 #
3 3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import functools
10 10 import os
11 11 import re
12 12
13 13 from mercurial.i18n import _
14 14 from mercurial import (
15 15 encoding,
16 16 error,
17 17 hook,
18 18 pycompat,
19 19 util,
20 20 )
21 21 from mercurial.utils import (
22 22 dateutil,
23 23 procutil,
24 24 stringutil,
25 25 )
26 26
27 27 pickle = util.pickle
28 28
29 29 class logentry(object):
30 30 '''Class logentry has the following attributes:
31 31 .author - author name as CVS knows it
32 32 .branch - name of branch this revision is on
33 33 .branches - revision tuple of branches starting at this revision
34 34 .comment - commit message
35 35 .commitid - CVS commitid or None
36 36 .date - the commit date as a (time, tz) tuple
37 37 .dead - true if file revision is dead
38 38 .file - Name of file
39 39 .lines - a tuple (+lines, -lines) or None
40 40 .parent - Previous revision of this entry
41 41 .rcs - name of file as returned from CVS
42 42 .revision - revision number as tuple
43 43 .tags - list of tags on the file
44 44 .synthetic - is this a synthetic "file ... added on ..." revision?
45 45 .mergepoint - the branch that has been merged from (if present in
46 46 rlog output) or None
47 47 .branchpoints - the branches that start at the current entry or empty
48 48 '''
49 49 def __init__(self, **entries):
50 50 self.synthetic = False
51 51 self.__dict__.update(entries)
52 52
53 53 def __repr__(self):
54 54 items = (r"%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
55 55 return r"%s(%s)"%(type(self).__name__, r", ".join(items))
56 56
57 57 class logerror(Exception):
58 58 pass
59 59
60 60 def getrepopath(cvspath):
61 61 """Return the repository path from a CVS path.
62 62
63 63 >>> getrepopath(b'/foo/bar')
64 64 '/foo/bar'
65 65 >>> getrepopath(b'c:/foo/bar')
66 66 '/foo/bar'
67 67 >>> getrepopath(b':pserver:10/foo/bar')
68 68 '/foo/bar'
69 69 >>> getrepopath(b':pserver:10c:/foo/bar')
70 70 '/foo/bar'
71 71 >>> getrepopath(b':pserver:/foo/bar')
72 72 '/foo/bar'
73 73 >>> getrepopath(b':pserver:c:/foo/bar')
74 74 '/foo/bar'
75 75 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
76 76 '/foo/bar'
77 77 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
78 78 '/foo/bar'
79 79 >>> getrepopath(b'user@server/path/to/repository')
80 80 '/path/to/repository'
81 81 """
82 82 # According to CVS manual, CVS paths are expressed like:
83 83 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
84 84 #
85 85 # CVSpath is splitted into parts and then position of the first occurrence
86 86 # of the '/' char after the '@' is located. The solution is the rest of the
87 87 # string after that '/' sign including it
88 88
89 89 parts = cvspath.split(':')
90 90 atposition = parts[-1].find('@')
91 91 start = 0
92 92
93 93 if atposition != -1:
94 94 start = atposition
95 95
96 96 repopath = parts[-1][parts[-1].find('/', start):]
97 97 return repopath
98 98
99 99 def createlog(ui, directory=None, root="", rlog=True, cache=None):
100 100 '''Collect the CVS rlog'''
101 101
102 102 # Because we store many duplicate commit log messages, reusing strings
103 103 # saves a lot of memory and pickle storage space.
104 104 _scache = {}
105 105 def scache(s):
106 106 "return a shared version of a string"
107 107 return _scache.setdefault(s, s)
108 108
109 109 ui.status(_('collecting CVS rlog\n'))
110 110
111 111 log = [] # list of logentry objects containing the CVS state
112 112
113 113 # patterns to match in CVS (r)log output, by state of use
114 114 re_00 = re.compile(b'RCS file: (.+)$')
115 115 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
116 116 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
117 117 re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
118 118 b"(can't create temporary directory.+)$")
119 119 re_10 = re.compile(b'Working file: (.+)$')
120 120 re_20 = re.compile(b'symbolic names:')
121 121 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
122 122 re_31 = re.compile(b'----------------------------$')
123 123 re_32 = re.compile(b'======================================='
124 124 b'======================================$')
125 re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
125 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
126 126 re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
127 127 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
128 128 br'(\s+commitid:\s+([^;]+);)?'
129 129 br'(.*mergepoint:\s+([^;]+);)?')
130 130 re_70 = re.compile(b'branches: (.+);$')
131 131
132 132 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
133 133
134 134 prefix = '' # leading path to strip of what we get from CVS
135 135
136 136 if directory is None:
137 137 # Current working directory
138 138
139 139 # Get the real directory in the repository
140 140 try:
141 141 prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
142 142 directory = prefix
143 143 if prefix == ".":
144 144 prefix = ""
145 145 except IOError:
146 146 raise logerror(_('not a CVS sandbox'))
147 147
148 148 if prefix and not prefix.endswith(pycompat.ossep):
149 149 prefix += pycompat.ossep
150 150
151 151 # Use the Root file in the sandbox, if it exists
152 152 try:
153 153 root = open(os.path.join('CVS','Root'), 'rb').read().strip()
154 154 except IOError:
155 155 pass
156 156
157 157 if not root:
158 158 root = encoding.environ.get('CVSROOT', '')
159 159
160 160 # read log cache if one exists
161 161 oldlog = []
162 162 date = None
163 163
164 164 if cache:
165 165 cachedir = os.path.expanduser('~/.hg.cvsps')
166 166 if not os.path.exists(cachedir):
167 167 os.mkdir(cachedir)
168 168
169 169 # The cvsps cache pickle needs a uniquified name, based on the
170 170 # repository location. The address may have all sort of nasties
171 171 # in it, slashes, colons and such. So here we take just the
172 172 # alphanumeric characters, concatenated in a way that does not
173 173 # mix up the various components, so that
174 174 # :pserver:user@server:/path
175 175 # and
176 176 # /pserver/user/server/path
177 177 # are mapped to different cache file names.
178 178 cachefile = root.split(":") + [directory, "cache"]
179 179 cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
180 180 cachefile = os.path.join(cachedir,
181 181 '.'.join([s for s in cachefile if s]))
182 182
183 183 if cache == 'update':
184 184 try:
185 185 ui.note(_('reading cvs log cache %s\n') % cachefile)
186 186 oldlog = pickle.load(open(cachefile, 'rb'))
187 187 for e in oldlog:
188 188 if not (util.safehasattr(e, 'branchpoints') and
189 189 util.safehasattr(e, 'commitid') and
190 190 util.safehasattr(e, 'mergepoint')):
191 191 ui.status(_('ignoring old cache\n'))
192 192 oldlog = []
193 193 break
194 194
195 195 ui.note(_('cache has %d log entries\n') % len(oldlog))
196 196 except Exception as e:
197 197 ui.note(_('error reading cache: %r\n') % e)
198 198
199 199 if oldlog:
200 200 date = oldlog[-1].date # last commit date as a (time,tz) tuple
201 201 date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
202 202
203 203 # build the CVS commandline
204 204 cmd = ['cvs', '-q']
205 205 if root:
206 206 cmd.append('-d%s' % root)
207 207 p = util.normpath(getrepopath(root))
208 208 if not p.endswith('/'):
209 209 p += '/'
210 210 if prefix:
211 211 # looks like normpath replaces "" by "."
212 212 prefix = p + util.normpath(prefix)
213 213 else:
214 214 prefix = p
215 215 cmd.append(['log', 'rlog'][rlog])
216 216 if date:
217 217 # no space between option and date string
218 218 cmd.append('-d>%s' % date)
219 219 cmd.append(directory)
220 220
221 221 # state machine begins here
222 222 tags = {} # dictionary of revisions on current file with their tags
223 223 branchmap = {} # mapping between branch names and revision numbers
224 224 rcsmap = {}
225 225 state = 0
226 226 store = False # set when a new record can be appended
227 227
228 228 cmd = [procutil.shellquote(arg) for arg in cmd]
229 229 ui.note(_("running %s\n") % (' '.join(cmd)))
230 230 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
231 231
232 232 pfp = procutil.popen(' '.join(cmd), 'rb')
233 233 peek = util.fromnativeeol(pfp.readline())
234 234 while True:
235 235 line = peek
236 236 if line == '':
237 237 break
238 238 peek = util.fromnativeeol(pfp.readline())
239 239 if line.endswith('\n'):
240 240 line = line[:-1]
241 241 #ui.debug('state=%d line=%r\n' % (state, line))
242 242
243 243 if state == 0:
244 244 # initial state, consume input until we see 'RCS file'
245 245 match = re_00.match(line)
246 246 if match:
247 247 rcs = match.group(1)
248 248 tags = {}
249 249 if rlog:
250 250 filename = util.normpath(rcs[:-2])
251 251 if filename.startswith(prefix):
252 252 filename = filename[len(prefix):]
253 253 if filename.startswith('/'):
254 254 filename = filename[1:]
255 255 if filename.startswith('Attic/'):
256 256 filename = filename[6:]
257 257 else:
258 258 filename = filename.replace('/Attic/', '/')
259 259 state = 2
260 260 continue
261 261 state = 1
262 262 continue
263 263 match = re_01.match(line)
264 264 if match:
265 265 raise logerror(match.group(1))
266 266 match = re_02.match(line)
267 267 if match:
268 268 raise logerror(match.group(2))
269 269 if re_03.match(line):
270 270 raise logerror(line)
271 271
272 272 elif state == 1:
273 273 # expect 'Working file' (only when using log instead of rlog)
274 274 match = re_10.match(line)
275 275 assert match, _('RCS file must be followed by working file')
276 276 filename = util.normpath(match.group(1))
277 277 state = 2
278 278
279 279 elif state == 2:
280 280 # expect 'symbolic names'
281 281 if re_20.match(line):
282 282 branchmap = {}
283 283 state = 3
284 284
285 285 elif state == 3:
286 286 # read the symbolic names and store as tags
287 287 match = re_30.match(line)
288 288 if match:
289 289 rev = [int(x) for x in match.group(2).split('.')]
290 290
291 291 # Convert magic branch number to an odd-numbered one
292 292 revn = len(rev)
293 293 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
294 294 rev = rev[:-2] + rev[-1:]
295 295 rev = tuple(rev)
296 296
297 297 if rev not in tags:
298 298 tags[rev] = []
299 299 tags[rev].append(match.group(1))
300 300 branchmap[match.group(1)] = match.group(2)
301 301
302 302 elif re_31.match(line):
303 303 state = 5
304 304 elif re_32.match(line):
305 305 state = 0
306 306
307 307 elif state == 4:
308 308 # expecting '------' separator before first revision
309 309 if re_31.match(line):
310 310 state = 5
311 311 else:
312 312 assert not re_32.match(line), _('must have at least '
313 313 'some revisions')
314 314
315 315 elif state == 5:
316 316 # expecting revision number and possibly (ignored) lock indication
317 317 # we create the logentry here from values stored in states 0 to 4,
318 318 # as this state is re-entered for subsequent revisions of a file.
319 319 match = re_50.match(line)
320 320 assert match, _('expected revision number')
321 321 e = logentry(rcs=scache(rcs),
322 322 file=scache(filename),
323 323 revision=tuple([int(x) for x in
324 324 match.group(1).split('.')]),
325 325 branches=[],
326 326 parent=None,
327 327 commitid=None,
328 328 mergepoint=None,
329 329 branchpoints=set())
330 330
331 331 state = 6
332 332
333 333 elif state == 6:
334 334 # expecting date, author, state, lines changed
335 335 match = re_60.match(line)
336 336 assert match, _('revision must be followed by date line')
337 337 d = match.group(1)
338 338 if d[2] == '/':
339 339 # Y2K
340 340 d = '19' + d
341 341
342 342 if len(d.split()) != 3:
343 343 # cvs log dates always in GMT
344 344 d = d + ' UTC'
345 345 e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
346 346 '%Y/%m/%d %H:%M:%S',
347 347 '%Y-%m-%d %H:%M:%S'])
348 348 e.author = scache(match.group(2))
349 349 e.dead = match.group(3).lower() == 'dead'
350 350
351 351 if match.group(5):
352 352 if match.group(6):
353 353 e.lines = (int(match.group(5)), int(match.group(6)))
354 354 else:
355 355 e.lines = (int(match.group(5)), 0)
356 356 elif match.group(6):
357 357 e.lines = (0, int(match.group(6)))
358 358 else:
359 359 e.lines = None
360 360
361 361 if match.group(7): # cvs 1.12 commitid
362 362 e.commitid = match.group(8)
363 363
364 364 if match.group(9): # cvsnt mergepoint
365 365 myrev = match.group(10).split('.')
366 366 if len(myrev) == 2: # head
367 367 e.mergepoint = 'HEAD'
368 368 else:
369 369 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
370 370 branches = [b for b in branchmap if branchmap[b] == myrev]
371 371 assert len(branches) == 1, ('unknown branch: %s'
372 372 % e.mergepoint)
373 373 e.mergepoint = branches[0]
374 374
375 375 e.comment = []
376 376 state = 7
377 377
378 378 elif state == 7:
379 379 # read the revision numbers of branches that start at this revision
380 380 # or store the commit log message otherwise
381 381 m = re_70.match(line)
382 382 if m:
383 383 e.branches = [tuple([int(y) for y in x.strip().split('.')])
384 384 for x in m.group(1).split(';')]
385 385 state = 8
386 386 elif re_31.match(line) and re_50.match(peek):
387 387 state = 5
388 388 store = True
389 389 elif re_32.match(line):
390 390 state = 0
391 391 store = True
392 392 else:
393 393 e.comment.append(line)
394 394
395 395 elif state == 8:
396 396 # store commit log message
397 397 if re_31.match(line):
398 398 cpeek = peek
399 399 if cpeek.endswith('\n'):
400 400 cpeek = cpeek[:-1]
401 401 if re_50.match(cpeek):
402 402 state = 5
403 403 store = True
404 404 else:
405 405 e.comment.append(line)
406 406 elif re_32.match(line):
407 407 state = 0
408 408 store = True
409 409 else:
410 410 e.comment.append(line)
411 411
412 412 # When a file is added on a branch B1, CVS creates a synthetic
413 413 # dead trunk revision 1.1 so that the branch has a root.
414 414 # Likewise, if you merge such a file to a later branch B2 (one
415 415 # that already existed when the file was added on B1), CVS
416 416 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
417 417 # these revisions now, but mark them synthetic so
418 418 # createchangeset() can take care of them.
419 419 if (store and
420 420 e.dead and
421 421 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
422 422 len(e.comment) == 1 and
423 423 file_added_re.match(e.comment[0])):
424 424 ui.debug('found synthetic revision in %s: %r\n'
425 425 % (e.rcs, e.comment[0]))
426 426 e.synthetic = True
427 427
428 428 if store:
429 429 # clean up the results and save in the log.
430 430 store = False
431 431 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
432 432 e.comment = scache('\n'.join(e.comment))
433 433
434 434 revn = len(e.revision)
435 435 if revn > 3 and (revn % 2) == 0:
436 436 e.branch = tags.get(e.revision[:-1], [None])[0]
437 437 else:
438 438 e.branch = None
439 439
440 440 # find the branches starting from this revision
441 441 branchpoints = set()
442 442 for branch, revision in branchmap.iteritems():
443 443 revparts = tuple([int(i) for i in revision.split('.')])
444 444 if len(revparts) < 2: # bad tags
445 445 continue
446 446 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
447 447 # normal branch
448 448 if revparts[:-2] == e.revision:
449 449 branchpoints.add(branch)
450 450 elif revparts == (1, 1, 1): # vendor branch
451 451 if revparts in e.branches:
452 452 branchpoints.add(branch)
453 453 e.branchpoints = branchpoints
454 454
455 455 log.append(e)
456 456
457 457 rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
458 458
459 459 if len(log) % 100 == 0:
460 460 ui.status(stringutil.ellipsis('%d %s' % (len(log), e.file), 80)
461 461 + '\n')
462 462
463 463 log.sort(key=lambda x: (x.rcs, x.revision))
464 464
465 465 # find parent revisions of individual files
466 466 versions = {}
467 467 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
468 468 rcs = e.rcs.replace('/Attic/', '/')
469 469 if rcs in rcsmap:
470 470 e.rcs = rcsmap[rcs]
471 471 branch = e.revision[:-1]
472 472 versions[(e.rcs, branch)] = e.revision
473 473
474 474 for e in log:
475 475 branch = e.revision[:-1]
476 476 p = versions.get((e.rcs, branch), None)
477 477 if p is None:
478 478 p = e.revision[:-2]
479 479 e.parent = p
480 480 versions[(e.rcs, branch)] = e.revision
481 481
482 482 # update the log cache
483 483 if cache:
484 484 if log:
485 485 # join up the old and new logs
486 486 log.sort(key=lambda x: x.date)
487 487
488 488 if oldlog and oldlog[-1].date >= log[0].date:
489 489 raise logerror(_('log cache overlaps with new log entries,'
490 490 ' re-run without cache.'))
491 491
492 492 log = oldlog + log
493 493
494 494 # write the new cachefile
495 495 ui.note(_('writing cvs log cache %s\n') % cachefile)
496 496 pickle.dump(log, open(cachefile, 'wb'))
497 497 else:
498 498 log = oldlog
499 499
500 500 ui.status(_('%d log entries\n') % len(log))
501 501
502 502 encodings = ui.configlist('convert', 'cvsps.logencoding')
503 503 if encodings:
504 504 def revstr(r):
505 505 # this is needed, because logentry.revision is a tuple of "int"
506 506 # (e.g. (1, 2) for "1.2")
507 507 return '.'.join(pycompat.maplist(pycompat.bytestr, r))
508 508
509 509 for entry in log:
510 510 comment = entry.comment
511 511 for e in encodings:
512 512 try:
513 513 entry.comment = comment.decode(
514 514 pycompat.sysstr(e)).encode('utf-8')
515 515 if ui.debugflag:
516 516 ui.debug("transcoding by %s: %s of %s\n" %
517 517 (e, revstr(entry.revision), entry.file))
518 518 break
519 519 except UnicodeDecodeError:
520 520 pass # try next encoding
521 521 except LookupError as inst: # unknown encoding, maybe
522 522 raise error.Abort(inst,
523 523 hint=_('check convert.cvsps.logencoding'
524 524 ' configuration'))
525 525 else:
526 526 raise error.Abort(_("no encoding can transcode"
527 527 " CVS log message for %s of %s")
528 528 % (revstr(entry.revision), entry.file),
529 529 hint=_('check convert.cvsps.logencoding'
530 530 ' configuration'))
531 531
532 532 hook.hook(ui, None, "cvslog", True, log=log)
533 533
534 534 return log
535 535
536 536
537 537 class changeset(object):
538 538 '''Class changeset has the following attributes:
539 539 .id - integer identifying this changeset (list index)
540 540 .author - author name as CVS knows it
541 541 .branch - name of branch this changeset is on, or None
542 542 .comment - commit message
543 543 .commitid - CVS commitid or None
544 544 .date - the commit date as a (time,tz) tuple
545 545 .entries - list of logentry objects in this changeset
546 546 .parents - list of one or two parent changesets
547 547 .tags - list of tags on this changeset
548 548 .synthetic - from synthetic revision "file ... added on branch ..."
549 549 .mergepoint- the branch that has been merged from or None
550 550 .branchpoints- the branches that start at the current entry or empty
551 551 '''
552 552 def __init__(self, **entries):
553 553 self.id = None
554 554 self.synthetic = False
555 555 self.__dict__.update(entries)
556 556
557 557 def __repr__(self):
558 558 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
559 559 return "%s(%s)"%(type(self).__name__, ", ".join(items))
560 560
561 561 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
562 562 '''Convert log into changesets.'''
563 563
564 564 ui.status(_('creating changesets\n'))
565 565
566 566 # try to order commitids by date
567 567 mindate = {}
568 568 for e in log:
569 569 if e.commitid:
570 570 if e.commitid not in mindate:
571 571 mindate[e.commitid] = e.date
572 572 else:
573 573 mindate[e.commitid] = min(e.date, mindate[e.commitid])
574 574
575 575 # Merge changesets
576 576 log.sort(key=lambda x: (mindate.get(x.commitid, (-1, 0)),
577 577 x.commitid or '', x.comment,
578 578 x.author, x.branch or '', x.date, x.branchpoints))
579 579
580 580 changesets = []
581 581 files = set()
582 582 c = None
583 583 for i, e in enumerate(log):
584 584
585 585 # Check if log entry belongs to the current changeset or not.
586 586
587 587 # Since CVS is file-centric, two different file revisions with
588 588 # different branchpoints should be treated as belonging to two
589 589 # different changesets (and the ordering is important and not
590 590 # honoured by cvsps at this point).
591 591 #
592 592 # Consider the following case:
593 593 # foo 1.1 branchpoints: [MYBRANCH]
594 594 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
595 595 #
596 596 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
597 597 # later version of foo may be in MYBRANCH2, so foo should be the
598 598 # first changeset and bar the next and MYBRANCH and MYBRANCH2
599 599 # should both start off of the bar changeset. No provisions are
600 600 # made to ensure that this is, in fact, what happens.
601 601 if not (c and e.branchpoints == c.branchpoints and
602 602 (# cvs commitids
603 603 (e.commitid is not None and e.commitid == c.commitid) or
604 604 (# no commitids, use fuzzy commit detection
605 605 (e.commitid is None or c.commitid is None) and
606 606 e.comment == c.comment and
607 607 e.author == c.author and
608 608 e.branch == c.branch and
609 609 ((c.date[0] + c.date[1]) <=
610 610 (e.date[0] + e.date[1]) <=
611 611 (c.date[0] + c.date[1]) + fuzz) and
612 612 e.file not in files))):
613 613 c = changeset(comment=e.comment, author=e.author,
614 614 branch=e.branch, date=e.date,
615 615 entries=[], mergepoint=e.mergepoint,
616 616 branchpoints=e.branchpoints, commitid=e.commitid)
617 617 changesets.append(c)
618 618
619 619 files = set()
620 620 if len(changesets) % 100 == 0:
621 621 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
622 622 ui.status(stringutil.ellipsis(t, 80) + '\n')
623 623
624 624 c.entries.append(e)
625 625 files.add(e.file)
626 626 c.date = e.date # changeset date is date of latest commit in it
627 627
628 628 # Mark synthetic changesets
629 629
630 630 for c in changesets:
631 631 # Synthetic revisions always get their own changeset, because
632 632 # the log message includes the filename. E.g. if you add file3
633 633 # and file4 on a branch, you get four log entries and three
634 634 # changesets:
635 635 # "File file3 was added on branch ..." (synthetic, 1 entry)
636 636 # "File file4 was added on branch ..." (synthetic, 1 entry)
637 637 # "Add file3 and file4 to fix ..." (real, 2 entries)
638 638 # Hence the check for 1 entry here.
639 639 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
640 640
641 641 # Sort files in each changeset
642 642
643 643 def entitycompare(l, r):
644 644 'Mimic cvsps sorting order'
645 645 l = l.file.split('/')
646 646 r = r.file.split('/')
647 647 nl = len(l)
648 648 nr = len(r)
649 649 n = min(nl, nr)
650 650 for i in range(n):
651 651 if i + 1 == nl and nl < nr:
652 652 return -1
653 653 elif i + 1 == nr and nl > nr:
654 654 return +1
655 655 elif l[i] < r[i]:
656 656 return -1
657 657 elif l[i] > r[i]:
658 658 return +1
659 659 return 0
660 660
661 661 for c in changesets:
662 662 c.entries.sort(key=functools.cmp_to_key(entitycompare))
663 663
664 664 # Sort changesets by date
665 665
666 666 odd = set()
667 667 def cscmp(l, r):
668 668 d = sum(l.date) - sum(r.date)
669 669 if d:
670 670 return d
671 671
672 672 # detect vendor branches and initial commits on a branch
673 673 le = {}
674 674 for e in l.entries:
675 675 le[e.rcs] = e.revision
676 676 re = {}
677 677 for e in r.entries:
678 678 re[e.rcs] = e.revision
679 679
680 680 d = 0
681 681 for e in l.entries:
682 682 if re.get(e.rcs, None) == e.parent:
683 683 assert not d
684 684 d = 1
685 685 break
686 686
687 687 for e in r.entries:
688 688 if le.get(e.rcs, None) == e.parent:
689 689 if d:
690 690 odd.add((l, r))
691 691 d = -1
692 692 break
693 693 # By this point, the changesets are sufficiently compared that
694 694 # we don't really care about ordering. However, this leaves
695 695 # some race conditions in the tests, so we compare on the
696 696 # number of files modified, the files contained in each
697 697 # changeset, and the branchpoints in the change to ensure test
698 698 # output remains stable.
699 699
700 700 # recommended replacement for cmp from
701 701 # https://docs.python.org/3.0/whatsnew/3.0.html
702 702 c = lambda x, y: (x > y) - (x < y)
703 703 # Sort bigger changes first.
704 704 if not d:
705 705 d = c(len(l.entries), len(r.entries))
706 706 # Try sorting by filename in the change.
707 707 if not d:
708 708 d = c([e.file for e in l.entries], [e.file for e in r.entries])
709 709 # Try and put changes without a branch point before ones with
710 710 # a branch point.
711 711 if not d:
712 712 d = c(len(l.branchpoints), len(r.branchpoints))
713 713 return d
714 714
715 715 changesets.sort(key=functools.cmp_to_key(cscmp))
716 716
717 717 # Collect tags
718 718
719 719 globaltags = {}
720 720 for c in changesets:
721 721 for e in c.entries:
722 722 for tag in e.tags:
723 723 # remember which is the latest changeset to have this tag
724 724 globaltags[tag] = c
725 725
726 726 for c in changesets:
727 727 tags = set()
728 728 for e in c.entries:
729 729 tags.update(e.tags)
730 730 # remember tags only if this is the latest changeset to have it
731 731 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
732 732
733 733 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
734 734 # by inserting dummy changesets with two parents, and handle
735 735 # {{mergefrombranch BRANCHNAME}} by setting two parents.
736 736
737 737 if mergeto is None:
738 738 mergeto = br'{{mergetobranch ([-\w]+)}}'
739 739 if mergeto:
740 740 mergeto = re.compile(mergeto)
741 741
742 742 if mergefrom is None:
743 743 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
744 744 if mergefrom:
745 745 mergefrom = re.compile(mergefrom)
746 746
747 747 versions = {} # changeset index where we saw any particular file version
748 748 branches = {} # changeset index where we saw a branch
749 749 n = len(changesets)
750 750 i = 0
751 751 while i < n:
752 752 c = changesets[i]
753 753
754 754 for f in c.entries:
755 755 versions[(f.rcs, f.revision)] = i
756 756
757 757 p = None
758 758 if c.branch in branches:
759 759 p = branches[c.branch]
760 760 else:
761 761 # first changeset on a new branch
762 762 # the parent is a changeset with the branch in its
763 763 # branchpoints such that it is the latest possible
764 764 # commit without any intervening, unrelated commits.
765 765
766 766 for candidate in pycompat.xrange(i):
767 767 if c.branch not in changesets[candidate].branchpoints:
768 768 if p is not None:
769 769 break
770 770 continue
771 771 p = candidate
772 772
773 773 c.parents = []
774 774 if p is not None:
775 775 p = changesets[p]
776 776
777 777 # Ensure no changeset has a synthetic changeset as a parent.
778 778 while p.synthetic:
779 779 assert len(p.parents) <= 1, \
780 780 _('synthetic changeset cannot have multiple parents')
781 781 if p.parents:
782 782 p = p.parents[0]
783 783 else:
784 784 p = None
785 785 break
786 786
787 787 if p is not None:
788 788 c.parents.append(p)
789 789
790 790 if c.mergepoint:
791 791 if c.mergepoint == 'HEAD':
792 792 c.mergepoint = None
793 793 c.parents.append(changesets[branches[c.mergepoint]])
794 794
795 795 if mergefrom:
796 796 m = mergefrom.search(c.comment)
797 797 if m:
798 798 m = m.group(1)
799 799 if m == 'HEAD':
800 800 m = None
801 801 try:
802 802 candidate = changesets[branches[m]]
803 803 except KeyError:
804 804 ui.warn(_("warning: CVS commit message references "
805 805 "non-existent branch %r:\n%s\n")
806 806 % (pycompat.bytestr(m), c.comment))
807 807 if m in branches and c.branch != m and not candidate.synthetic:
808 808 c.parents.append(candidate)
809 809
810 810 if mergeto:
811 811 m = mergeto.search(c.comment)
812 812 if m:
813 813 if m.groups():
814 814 m = m.group(1)
815 815 if m == 'HEAD':
816 816 m = None
817 817 else:
818 818 m = None # if no group found then merge to HEAD
819 819 if m in branches and c.branch != m:
820 820 # insert empty changeset for merge
821 821 cc = changeset(
822 822 author=c.author, branch=m, date=c.date,
823 823 comment='convert-repo: CVS merge from branch %s'
824 824 % c.branch,
825 825 entries=[], tags=[],
826 826 parents=[changesets[branches[m]], c])
827 827 changesets.insert(i + 1, cc)
828 828 branches[m] = i + 1
829 829
830 830 # adjust our loop counters now we have inserted a new entry
831 831 n += 1
832 832 i += 2
833 833 continue
834 834
835 835 branches[c.branch] = i
836 836 i += 1
837 837
838 838 # Drop synthetic changesets (safe now that we have ensured no other
839 839 # changesets can have them as parents).
840 840 i = 0
841 841 while i < len(changesets):
842 842 if changesets[i].synthetic:
843 843 del changesets[i]
844 844 else:
845 845 i += 1
846 846
847 847 # Number changesets
848 848
849 849 for i, c in enumerate(changesets):
850 850 c.id = i + 1
851 851
852 852 if odd:
853 853 for l, r in odd:
854 854 if l.id is not None and r.id is not None:
855 855 ui.warn(_('changeset %d is both before and after %d\n')
856 856 % (l.id, r.id))
857 857
858 858 ui.status(_('%d changeset entries\n') % len(changesets))
859 859
860 860 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
861 861
862 862 return changesets
863 863
864 864
865 865 def debugcvsps(ui, *args, **opts):
866 866 '''Read CVS rlog for current directory or named path in
867 867 repository, and convert the log to changesets based on matching
868 868 commit log entries and dates.
869 869 '''
870 870 opts = pycompat.byteskwargs(opts)
871 871 if opts["new_cache"]:
872 872 cache = "write"
873 873 elif opts["update_cache"]:
874 874 cache = "update"
875 875 else:
876 876 cache = None
877 877
878 878 revisions = opts["revisions"]
879 879
880 880 try:
881 881 if args:
882 882 log = []
883 883 for d in args:
884 884 log += createlog(ui, d, root=opts["root"], cache=cache)
885 885 else:
886 886 log = createlog(ui, root=opts["root"], cache=cache)
887 887 except logerror as e:
888 888 ui.write("%r\n"%e)
889 889 return
890 890
891 891 changesets = createchangeset(ui, log, opts["fuzz"])
892 892 del log
893 893
894 894 # Print changesets (optionally filtered)
895 895
896 896 off = len(revisions)
897 897 branches = {} # latest version number in each branch
898 898 ancestors = {} # parent branch
899 899 for cs in changesets:
900 900
901 901 if opts["ancestors"]:
902 902 if cs.branch not in branches and cs.parents and cs.parents[0].id:
903 903 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
904 904 cs.parents[0].id)
905 905 branches[cs.branch] = cs.id
906 906
907 907 # limit by branches
908 908 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
909 909 continue
910 910
911 911 if not off:
912 912 # Note: trailing spaces on several lines here are needed to have
913 913 # bug-for-bug compatibility with cvsps.
914 914 ui.write('---------------------\n')
915 915 ui.write(('PatchSet %d \n' % cs.id))
916 916 ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
917 917 '%Y/%m/%d %H:%M:%S %1%2')))
918 918 ui.write(('Author: %s\n' % cs.author))
919 919 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
920 920 ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
921 921 ','.join(cs.tags) or '(none)')))
922 922 if cs.branchpoints:
923 923 ui.write(('Branchpoints: %s \n') %
924 924 ', '.join(sorted(cs.branchpoints)))
925 925 if opts["parents"] and cs.parents:
926 926 if len(cs.parents) > 1:
927 927 ui.write(('Parents: %s\n' %
928 928 (','.join([(b"%d" % p.id) for p in cs.parents]))))
929 929 else:
930 930 ui.write(('Parent: %d\n' % cs.parents[0].id))
931 931
932 932 if opts["ancestors"]:
933 933 b = cs.branch
934 934 r = []
935 935 while b:
936 936 b, c = ancestors[b]
937 937 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
938 938 if r:
939 939 ui.write(('Ancestors: %s\n' % (','.join(r))))
940 940
941 941 ui.write(('Log:\n'))
942 942 ui.write('%s\n\n' % cs.comment)
943 943 ui.write(('Members: \n'))
944 944 for f in cs.entries:
945 945 fn = f.file
946 946 if fn.startswith(opts["prefix"]):
947 947 fn = fn[len(opts["prefix"]):]
948 948 ui.write('\t%s:%s->%s%s \n' % (
949 949 fn,
950 950 '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
951 951 '.'.join([(b"%d" % x) for x in f.revision]),
952 952 ['', '(DEAD)'][f.dead]))
953 953 ui.write('\n')
954 954
955 955 # have we seen the start tag?
956 956 if revisions and off:
957 957 if revisions[0] == (b"%d" % cs.id) or \
958 958 revisions[0] in cs.tags:
959 959 off = False
960 960
961 961 # see if we reached the end tag
962 962 if len(revisions) > 1 and not off:
963 963 if revisions[1] == (b"%d" % cs.id) or \
964 964 revisions[1] in cs.tags:
965 965 break
@@ -1,3701 +1,3701 b''
1 1 # mq.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''manage a stack of patches
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use :hg:`help COMMAND` for more details)::
18 18
19 19 create new patch qnew
20 20 import existing patch qimport
21 21
22 22 print patch series qseries
23 23 print applied patches qapplied
24 24
25 25 add known patch to applied stack qpush
26 26 remove patch from applied stack qpop
27 27 refresh contents of top applied patch qrefresh
28 28
29 29 By default, mq will automatically use git patches when required to
30 30 avoid losing file mode changes, copy records, binary files or empty
31 31 files creations or deletions. This behavior can be configured with::
32 32
33 33 [mq]
34 34 git = auto/keep/yes/no
35 35
36 36 If set to 'keep', mq will obey the [diff] section configuration while
37 37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 38 'no', mq will override the [diff] section and always generate git or
39 39 regular patches, possibly losing data in the second case.
40 40
41 41 It may be desirable for mq changesets to be kept in the secret phase (see
42 42 :hg:`help phases`), which can be enabled with the following setting::
43 43
44 44 [mq]
45 45 secret = True
46 46
47 47 You will by default be managing a patch queue named "patches". You can
48 48 create other, independent patch queues with the :hg:`qqueue` command.
49 49
50 50 If the working directory contains uncommitted files, qpush, qpop and
51 51 qgoto abort immediately. If -f/--force is used, the changes are
52 52 discarded. Setting::
53 53
54 54 [mq]
55 55 keepchanges = True
56 56
57 57 make them behave as if --keep-changes were passed, and non-conflicting
58 58 local changes will be tolerated and preserved. If incompatible options
59 59 such as -f/--force or --exact are passed, this setting is ignored.
60 60
61 61 This extension used to provide a strip command. This command now lives
62 62 in the strip extension.
63 63 '''
64 64
65 65 from __future__ import absolute_import, print_function
66 66
67 67 import errno
68 68 import os
69 69 import re
70 70 import shutil
71 71 from mercurial.i18n import _
72 72 from mercurial.node import (
73 73 bin,
74 74 hex,
75 75 nullid,
76 76 nullrev,
77 77 short,
78 78 )
79 79 from mercurial import (
80 80 cmdutil,
81 81 commands,
82 82 dirstateguard,
83 83 encoding,
84 84 error,
85 85 extensions,
86 86 hg,
87 87 localrepo,
88 88 lock as lockmod,
89 89 logcmdutil,
90 90 patch as patchmod,
91 91 phases,
92 92 pycompat,
93 93 registrar,
94 94 revsetlang,
95 95 scmutil,
96 96 smartset,
97 97 subrepoutil,
98 98 util,
99 99 vfs as vfsmod,
100 100 )
101 101 from mercurial.utils import (
102 102 dateutil,
103 103 stringutil,
104 104 )
105 105
106 106 release = lockmod.release
107 107 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
108 108
109 109 cmdtable = {}
110 110 command = registrar.command(cmdtable)
111 111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
112 112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
113 113 # be specifying the version(s) of Mercurial they are tested with, or
114 114 # leave the attribute unspecified.
115 115 testedwith = 'ships-with-hg-core'
116 116
117 117 configtable = {}
118 118 configitem = registrar.configitem(configtable)
119 119
120 120 configitem('mq', 'git',
121 121 default='auto',
122 122 )
123 123 configitem('mq', 'keepchanges',
124 124 default=False,
125 125 )
126 126 configitem('mq', 'plain',
127 127 default=False,
128 128 )
129 129 configitem('mq', 'secret',
130 130 default=False,
131 131 )
132 132
133 133 # force load strip extension formerly included in mq and import some utility
134 134 try:
135 135 stripext = extensions.find('strip')
136 136 except KeyError:
137 137 # note: load is lazy so we could avoid the try-except,
138 138 # but I (marmoute) prefer this explicit code.
139 139 class dummyui(object):
140 140 def debug(self, msg):
141 141 pass
142 142 def log(self, event, msgfmt, *msgargs, **opts):
143 143 pass
144 144 stripext = extensions.load(dummyui(), 'strip', '')
145 145
146 146 strip = stripext.strip
147 147 checksubstate = stripext.checksubstate
148 148 checklocalchanges = stripext.checklocalchanges
149 149
150 150
151 151 # Patch names looks like unix-file names.
152 152 # They must be joinable with queue directory and result in the patch path.
153 153 normname = util.normpath
154 154
155 155 class statusentry(object):
156 156 def __init__(self, node, name):
157 157 self.node, self.name = node, name
158 158
159 159 def __bytes__(self):
160 160 return hex(self.node) + ':' + self.name
161 161
162 162 __str__ = encoding.strmethod(__bytes__)
163 163 __repr__ = encoding.strmethod(__bytes__)
164 164
165 165 # The order of the headers in 'hg export' HG patches:
166 166 HGHEADERS = [
167 167 # '# HG changeset patch',
168 168 '# User ',
169 169 '# Date ',
170 170 '# ',
171 171 '# Branch ',
172 172 '# Node ID ',
173 173 '# Parent ', # can occur twice for merges - but that is not relevant for mq
174 174 ]
175 175 # The order of headers in plain 'mail style' patches:
176 176 PLAINHEADERS = {
177 177 'from': 0,
178 178 'date': 1,
179 179 'subject': 2,
180 180 }
181 181
182 182 def inserthgheader(lines, header, value):
183 183 """Assuming lines contains a HG patch header, add a header line with value.
184 184 >>> try: inserthgheader([], b'# Date ', b'z')
185 185 ... except ValueError as inst: print("oops")
186 186 oops
187 187 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
188 188 ['# HG changeset patch', '# Date z']
189 189 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
190 190 ['# HG changeset patch', '# Date z', '']
191 191 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
192 192 ['# HG changeset patch', '# User y', '# Date z']
193 193 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
194 194 ... b'# User ', b'z')
195 195 ['# HG changeset patch', '# Date x', '# User z']
196 196 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
197 197 ['# HG changeset patch', '# Date z']
198 198 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
199 199 ... b'# Date ', b'z')
200 200 ['# HG changeset patch', '# Date z', '', '# Date y']
201 201 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
202 202 ... b'# Date ', b'z')
203 203 ['# HG changeset patch', '# Date z', '# Parent y']
204 204 """
205 205 start = lines.index('# HG changeset patch') + 1
206 206 newindex = HGHEADERS.index(header)
207 207 bestpos = len(lines)
208 208 for i in range(start, len(lines)):
209 209 line = lines[i]
210 210 if not line.startswith('# '):
211 211 bestpos = min(bestpos, i)
212 212 break
213 213 for lineindex, h in enumerate(HGHEADERS):
214 214 if line.startswith(h):
215 215 if lineindex == newindex:
216 216 lines[i] = header + value
217 217 return lines
218 218 if lineindex > newindex:
219 219 bestpos = min(bestpos, i)
220 220 break # next line
221 221 lines.insert(bestpos, header + value)
222 222 return lines
223 223
224 224 def insertplainheader(lines, header, value):
225 225 """For lines containing a plain patch header, add a header line with value.
226 226 >>> insertplainheader([], b'Date', b'z')
227 227 ['Date: z']
228 228 >>> insertplainheader([b''], b'Date', b'z')
229 229 ['Date: z', '']
230 230 >>> insertplainheader([b'x'], b'Date', b'z')
231 231 ['Date: z', '', 'x']
232 232 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
233 233 ['From: y', 'Date: z', '', 'x']
234 234 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
235 235 [' date : x', 'From: z', '']
236 236 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
237 237 ['Date: z', '', 'Date: y']
238 238 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
239 239 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
240 240 """
241 241 newprio = PLAINHEADERS[header.lower()]
242 242 bestpos = len(lines)
243 243 for i, line in enumerate(lines):
244 244 if ':' in line:
245 245 lheader = line.split(':', 1)[0].strip().lower()
246 246 lprio = PLAINHEADERS.get(lheader, newprio + 1)
247 247 if lprio == newprio:
248 248 lines[i] = '%s: %s' % (header, value)
249 249 return lines
250 250 if lprio > newprio and i < bestpos:
251 251 bestpos = i
252 252 else:
253 253 if line:
254 254 lines.insert(i, '')
255 255 if i < bestpos:
256 256 bestpos = i
257 257 break
258 258 lines.insert(bestpos, '%s: %s' % (header, value))
259 259 return lines
260 260
261 261 class patchheader(object):
262 262 def __init__(self, pf, plainmode=False):
263 263 def eatdiff(lines):
264 264 while lines:
265 265 l = lines[-1]
266 266 if (l.startswith("diff -") or
267 267 l.startswith("Index:") or
268 268 l.startswith("===========")):
269 269 del lines[-1]
270 270 else:
271 271 break
272 272 def eatempty(lines):
273 273 while lines:
274 274 if not lines[-1].strip():
275 275 del lines[-1]
276 276 else:
277 277 break
278 278
279 279 message = []
280 280 comments = []
281 281 user = None
282 282 date = None
283 283 parent = None
284 284 format = None
285 285 subject = None
286 286 branch = None
287 287 nodeid = None
288 288 diffstart = 0
289 289
290 290 for line in open(pf, 'rb'):
291 291 line = line.rstrip()
292 292 if (line.startswith('diff --git')
293 293 or (diffstart and line.startswith('+++ '))):
294 294 diffstart = 2
295 295 break
296 296 diffstart = 0 # reset
297 297 if line.startswith("--- "):
298 298 diffstart = 1
299 299 continue
300 300 elif format == "hgpatch":
301 301 # parse values when importing the result of an hg export
302 302 if line.startswith("# User "):
303 303 user = line[7:]
304 304 elif line.startswith("# Date "):
305 305 date = line[7:]
306 306 elif line.startswith("# Parent "):
307 307 parent = line[9:].lstrip() # handle double trailing space
308 308 elif line.startswith("# Branch "):
309 309 branch = line[9:]
310 310 elif line.startswith("# Node ID "):
311 311 nodeid = line[10:]
312 312 elif not line.startswith("# ") and line:
313 313 message.append(line)
314 314 format = None
315 315 elif line == '# HG changeset patch':
316 316 message = []
317 317 format = "hgpatch"
318 318 elif (format != "tagdone" and (line.startswith("Subject: ") or
319 319 line.startswith("subject: "))):
320 320 subject = line[9:]
321 321 format = "tag"
322 322 elif (format != "tagdone" and (line.startswith("From: ") or
323 323 line.startswith("from: "))):
324 324 user = line[6:]
325 325 format = "tag"
326 326 elif (format != "tagdone" and (line.startswith("Date: ") or
327 327 line.startswith("date: "))):
328 328 date = line[6:]
329 329 format = "tag"
330 330 elif format == "tag" and line == "":
331 331 # when looking for tags (subject: from: etc) they
332 332 # end once you find a blank line in the source
333 333 format = "tagdone"
334 334 elif message or line:
335 335 message.append(line)
336 336 comments.append(line)
337 337
338 338 eatdiff(message)
339 339 eatdiff(comments)
340 340 # Remember the exact starting line of the patch diffs before consuming
341 341 # empty lines, for external use by TortoiseHg and others
342 342 self.diffstartline = len(comments)
343 343 eatempty(message)
344 344 eatempty(comments)
345 345
346 346 # make sure message isn't empty
347 347 if format and format.startswith("tag") and subject:
348 348 message.insert(0, subject)
349 349
350 350 self.message = message
351 351 self.comments = comments
352 352 self.user = user
353 353 self.date = date
354 354 self.parent = parent
355 355 # nodeid and branch are for external use by TortoiseHg and others
356 356 self.nodeid = nodeid
357 357 self.branch = branch
358 358 self.haspatch = diffstart > 1
359 359 self.plainmode = (plainmode or
360 360 '# HG changeset patch' not in self.comments and
361 361 any(c.startswith('Date: ') or
362 362 c.startswith('From: ')
363 363 for c in self.comments))
364 364
365 365 def setuser(self, user):
366 366 try:
367 367 inserthgheader(self.comments, '# User ', user)
368 368 except ValueError:
369 369 if self.plainmode:
370 370 insertplainheader(self.comments, 'From', user)
371 371 else:
372 372 tmp = ['# HG changeset patch', '# User ' + user]
373 373 self.comments = tmp + self.comments
374 374 self.user = user
375 375
376 376 def setdate(self, date):
377 377 try:
378 378 inserthgheader(self.comments, '# Date ', date)
379 379 except ValueError:
380 380 if self.plainmode:
381 381 insertplainheader(self.comments, 'Date', date)
382 382 else:
383 383 tmp = ['# HG changeset patch', '# Date ' + date]
384 384 self.comments = tmp + self.comments
385 385 self.date = date
386 386
387 387 def setparent(self, parent):
388 388 try:
389 389 inserthgheader(self.comments, '# Parent ', parent)
390 390 except ValueError:
391 391 if not self.plainmode:
392 392 tmp = ['# HG changeset patch', '# Parent ' + parent]
393 393 self.comments = tmp + self.comments
394 394 self.parent = parent
395 395
396 396 def setmessage(self, message):
397 397 if self.comments:
398 398 self._delmsg()
399 399 self.message = [message]
400 400 if message:
401 401 if self.plainmode and self.comments and self.comments[-1]:
402 402 self.comments.append('')
403 403 self.comments.append(message)
404 404
405 405 def __bytes__(self):
406 406 s = '\n'.join(self.comments).rstrip()
407 407 if not s:
408 408 return ''
409 409 return s + '\n\n'
410 410
411 411 __str__ = encoding.strmethod(__bytes__)
412 412
413 413 def _delmsg(self):
414 414 '''Remove existing message, keeping the rest of the comments fields.
415 415 If comments contains 'subject: ', message will prepend
416 416 the field and a blank line.'''
417 417 if self.message:
418 418 subj = 'subject: ' + self.message[0].lower()
419 419 for i in pycompat.xrange(len(self.comments)):
420 420 if subj == self.comments[i].lower():
421 421 del self.comments[i]
422 422 self.message = self.message[2:]
423 423 break
424 424 ci = 0
425 425 for mi in self.message:
426 426 while mi != self.comments[ci]:
427 427 ci += 1
428 428 del self.comments[ci]
429 429
430 430 def newcommit(repo, phase, *args, **kwargs):
431 431 """helper dedicated to ensure a commit respect mq.secret setting
432 432
433 433 It should be used instead of repo.commit inside the mq source for operation
434 434 creating new changeset.
435 435 """
436 436 repo = repo.unfiltered()
437 437 if phase is None:
438 438 if repo.ui.configbool('mq', 'secret'):
439 439 phase = phases.secret
440 440 overrides = {('ui', 'allowemptycommit'): True}
441 441 if phase is not None:
442 442 overrides[('phases', 'new-commit')] = phase
443 443 with repo.ui.configoverride(overrides, 'mq'):
444 444 repo.ui.setconfig('ui', 'allowemptycommit', True)
445 445 return repo.commit(*args, **kwargs)
446 446
447 447 class AbortNoCleanup(error.Abort):
448 448 pass
449 449
450 450 class queue(object):
451 451 def __init__(self, ui, baseui, path, patchdir=None):
452 452 self.basepath = path
453 453 try:
454 454 with open(os.path.join(path, 'patches.queue'), r'rb') as fh:
455 455 cur = fh.read().rstrip()
456 456
457 457 if not cur:
458 458 curpath = os.path.join(path, 'patches')
459 459 else:
460 460 curpath = os.path.join(path, 'patches-' + cur)
461 461 except IOError:
462 462 curpath = os.path.join(path, 'patches')
463 463 self.path = patchdir or curpath
464 464 self.opener = vfsmod.vfs(self.path)
465 465 self.ui = ui
466 466 self.baseui = baseui
467 467 self.applieddirty = False
468 468 self.seriesdirty = False
469 469 self.added = []
470 470 self.seriespath = "series"
471 471 self.statuspath = "status"
472 472 self.guardspath = "guards"
473 473 self.activeguards = None
474 474 self.guardsdirty = False
475 475 # Handle mq.git as a bool with extended values
476 476 gitmode = ui.config('mq', 'git').lower()
477 477 boolmode = stringutil.parsebool(gitmode)
478 478 if boolmode is not None:
479 479 if boolmode:
480 480 gitmode = 'yes'
481 481 else:
482 482 gitmode = 'no'
483 483 self.gitmode = gitmode
484 484 # deprecated config: mq.plain
485 485 self.plainmode = ui.configbool('mq', 'plain')
486 486 self.checkapplied = True
487 487
488 488 @util.propertycache
489 489 def applied(self):
490 490 def parselines(lines):
491 491 for l in lines:
492 492 entry = l.split(':', 1)
493 493 if len(entry) > 1:
494 494 n, name = entry
495 495 yield statusentry(bin(n), name)
496 496 elif l.strip():
497 497 self.ui.warn(_('malformated mq status line: %s\n') %
498 498 stringutil.pprint(entry))
499 499 # else we ignore empty lines
500 500 try:
501 501 lines = self.opener.read(self.statuspath).splitlines()
502 502 return list(parselines(lines))
503 503 except IOError as e:
504 504 if e.errno == errno.ENOENT:
505 505 return []
506 506 raise
507 507
508 508 @util.propertycache
509 509 def fullseries(self):
510 510 try:
511 511 return self.opener.read(self.seriespath).splitlines()
512 512 except IOError as e:
513 513 if e.errno == errno.ENOENT:
514 514 return []
515 515 raise
516 516
517 517 @util.propertycache
518 518 def series(self):
519 519 self.parseseries()
520 520 return self.series
521 521
522 522 @util.propertycache
523 523 def seriesguards(self):
524 524 self.parseseries()
525 525 return self.seriesguards
526 526
527 527 def invalidate(self):
528 528 for a in 'applied fullseries series seriesguards'.split():
529 529 if a in self.__dict__:
530 530 delattr(self, a)
531 531 self.applieddirty = False
532 532 self.seriesdirty = False
533 533 self.guardsdirty = False
534 534 self.activeguards = None
535 535
536 536 def diffopts(self, opts=None, patchfn=None, plain=False):
537 537 """Return diff options tweaked for this mq use, possibly upgrading to
538 538 git format, and possibly plain and without lossy options."""
539 539 diffopts = patchmod.difffeatureopts(self.ui, opts,
540 540 git=True, whitespace=not plain, formatchanging=not plain)
541 541 if self.gitmode == 'auto':
542 542 diffopts.upgrade = True
543 543 elif self.gitmode == 'keep':
544 544 pass
545 545 elif self.gitmode in ('yes', 'no'):
546 546 diffopts.git = self.gitmode == 'yes'
547 547 else:
548 548 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
549 549 ' got %s') % self.gitmode)
550 550 if patchfn:
551 551 diffopts = self.patchopts(diffopts, patchfn)
552 552 return diffopts
553 553
554 554 def patchopts(self, diffopts, *patches):
555 555 """Return a copy of input diff options with git set to true if
556 556 referenced patch is a git patch and should be preserved as such.
557 557 """
558 558 diffopts = diffopts.copy()
559 559 if not diffopts.git and self.gitmode == 'keep':
560 560 for patchfn in patches:
561 561 patchf = self.opener(patchfn, 'r')
562 562 # if the patch was a git patch, refresh it as a git patch
563 563 diffopts.git = any(line.startswith('diff --git')
564 564 for line in patchf)
565 565 patchf.close()
566 566 return diffopts
567 567
568 568 def join(self, *p):
569 569 return os.path.join(self.path, *p)
570 570
571 571 def findseries(self, patch):
572 572 def matchpatch(l):
573 573 l = l.split('#', 1)[0]
574 574 return l.strip() == patch
575 575 for index, l in enumerate(self.fullseries):
576 576 if matchpatch(l):
577 577 return index
578 578 return None
579 579
580 580 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
581 581
582 582 def parseseries(self):
583 583 self.series = []
584 584 self.seriesguards = []
585 585 for l in self.fullseries:
586 586 h = l.find('#')
587 587 if h == -1:
588 588 patch = l
589 589 comment = ''
590 590 elif h == 0:
591 591 continue
592 592 else:
593 593 patch = l[:h]
594 594 comment = l[h:]
595 595 patch = patch.strip()
596 596 if patch:
597 597 if patch in self.series:
598 598 raise error.Abort(_('%s appears more than once in %s') %
599 599 (patch, self.join(self.seriespath)))
600 600 self.series.append(patch)
601 601 self.seriesguards.append(self.guard_re.findall(comment))
602 602
603 603 def checkguard(self, guard):
604 604 if not guard:
605 605 return _('guard cannot be an empty string')
606 606 bad_chars = '# \t\r\n\f'
607 607 first = guard[0]
608 608 if first in '-+':
609 609 return (_('guard %r starts with invalid character: %r') %
610 610 (guard, first))
611 611 for c in bad_chars:
612 612 if c in guard:
613 613 return _('invalid character in guard %r: %r') % (guard, c)
614 614
615 615 def setactive(self, guards):
616 616 for guard in guards:
617 617 bad = self.checkguard(guard)
618 618 if bad:
619 619 raise error.Abort(bad)
620 620 guards = sorted(set(guards))
621 621 self.ui.debug('active guards: %s\n' % ' '.join(guards))
622 622 self.activeguards = guards
623 623 self.guardsdirty = True
624 624
625 625 def active(self):
626 626 if self.activeguards is None:
627 627 self.activeguards = []
628 628 try:
629 629 guards = self.opener.read(self.guardspath).split()
630 630 except IOError as err:
631 631 if err.errno != errno.ENOENT:
632 632 raise
633 633 guards = []
634 634 for i, guard in enumerate(guards):
635 635 bad = self.checkguard(guard)
636 636 if bad:
637 637 self.ui.warn('%s:%d: %s\n' %
638 638 (self.join(self.guardspath), i + 1, bad))
639 639 else:
640 640 self.activeguards.append(guard)
641 641 return self.activeguards
642 642
643 643 def setguards(self, idx, guards):
644 644 for g in guards:
645 645 if len(g) < 2:
646 646 raise error.Abort(_('guard %r too short') % g)
647 647 if g[0] not in '-+':
648 648 raise error.Abort(_('guard %r starts with invalid char') % g)
649 649 bad = self.checkguard(g[1:])
650 650 if bad:
651 651 raise error.Abort(bad)
652 652 drop = self.guard_re.sub('', self.fullseries[idx])
653 653 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
654 654 self.parseseries()
655 655 self.seriesdirty = True
656 656
657 657 def pushable(self, idx):
658 658 if isinstance(idx, bytes):
659 659 idx = self.series.index(idx)
660 660 patchguards = self.seriesguards[idx]
661 661 if not patchguards:
662 662 return True, None
663 663 guards = self.active()
664 664 exactneg = [g for g in patchguards
665 665 if g.startswith('-') and g[1:] in guards]
666 666 if exactneg:
667 667 return False, stringutil.pprint(exactneg[0])
668 668 pos = [g for g in patchguards if g.startswith('+')]
669 669 exactpos = [g for g in pos if g[1:] in guards]
670 670 if pos:
671 671 if exactpos:
672 672 return True, stringutil.pprint(exactpos[0])
673 673 return False, ' '.join([stringutil.pprint(p) for p in pos])
674 674 return True, ''
675 675
676 676 def explainpushable(self, idx, all_patches=False):
677 677 if all_patches:
678 678 write = self.ui.write
679 679 else:
680 680 write = self.ui.warn
681 681
682 682 if all_patches or self.ui.verbose:
683 683 if isinstance(idx, bytes):
684 684 idx = self.series.index(idx)
685 685 pushable, why = self.pushable(idx)
686 686 if all_patches and pushable:
687 687 if why is None:
688 688 write(_('allowing %s - no guards in effect\n') %
689 689 self.series[idx])
690 690 else:
691 691 if not why:
692 692 write(_('allowing %s - no matching negative guards\n') %
693 693 self.series[idx])
694 694 else:
695 695 write(_('allowing %s - guarded by %s\n') %
696 696 (self.series[idx], why))
697 697 if not pushable:
698 698 if why:
699 699 write(_('skipping %s - guarded by %s\n') %
700 700 (self.series[idx], why))
701 701 else:
702 702 write(_('skipping %s - no matching guards\n') %
703 703 self.series[idx])
704 704
705 705 def savedirty(self):
706 706 def writelist(items, path):
707 707 fp = self.opener(path, 'wb')
708 708 for i in items:
709 709 fp.write("%s\n" % i)
710 710 fp.close()
711 711 if self.applieddirty:
712 712 writelist(map(bytes, self.applied), self.statuspath)
713 713 self.applieddirty = False
714 714 if self.seriesdirty:
715 715 writelist(self.fullseries, self.seriespath)
716 716 self.seriesdirty = False
717 717 if self.guardsdirty:
718 718 writelist(self.activeguards, self.guardspath)
719 719 self.guardsdirty = False
720 720 if self.added:
721 721 qrepo = self.qrepo()
722 722 if qrepo:
723 723 qrepo[None].add(f for f in self.added if f not in qrepo[None])
724 724 self.added = []
725 725
726 726 def removeundo(self, repo):
727 727 undo = repo.sjoin('undo')
728 728 if not os.path.exists(undo):
729 729 return
730 730 try:
731 731 os.unlink(undo)
732 732 except OSError as inst:
733 733 self.ui.warn(_('error removing undo: %s\n') %
734 734 stringutil.forcebytestr(inst))
735 735
736 736 def backup(self, repo, files, copy=False):
737 737 # backup local changes in --force case
738 738 for f in sorted(files):
739 739 absf = repo.wjoin(f)
740 740 if os.path.lexists(absf):
741 741 self.ui.note(_('saving current version of %s as %s\n') %
742 742 (f, scmutil.origpath(self.ui, repo, f)))
743 743
744 744 absorig = scmutil.origpath(self.ui, repo, absf)
745 745 if copy:
746 746 util.copyfile(absf, absorig)
747 747 else:
748 748 util.rename(absf, absorig)
749 749
750 750 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
751 751 fp=None, changes=None, opts=None):
752 752 if opts is None:
753 753 opts = {}
754 754 stat = opts.get('stat')
755 755 m = scmutil.match(repo[node1], files, opts)
756 756 logcmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
757 757 changes, stat, fp)
758 758
759 759 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
760 760 # first try just applying the patch
761 761 (err, n) = self.apply(repo, [patch], update_status=False,
762 762 strict=True, merge=rev)
763 763
764 764 if err == 0:
765 765 return (err, n)
766 766
767 767 if n is None:
768 768 raise error.Abort(_("apply failed for patch %s") % patch)
769 769
770 770 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
771 771
772 772 # apply failed, strip away that rev and merge.
773 773 hg.clean(repo, head)
774 774 strip(self.ui, repo, [n], update=False, backup=False)
775 775
776 776 ctx = repo[rev]
777 777 ret = hg.merge(repo, rev)
778 778 if ret:
779 779 raise error.Abort(_("update returned %d") % ret)
780 780 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
781 781 if n is None:
782 782 raise error.Abort(_("repo commit failed"))
783 783 try:
784 784 ph = patchheader(mergeq.join(patch), self.plainmode)
785 785 except Exception:
786 786 raise error.Abort(_("unable to read %s") % patch)
787 787
788 788 diffopts = self.patchopts(diffopts, patch)
789 789 patchf = self.opener(patch, "w")
790 790 comments = bytes(ph)
791 791 if comments:
792 792 patchf.write(comments)
793 793 self.printdiff(repo, diffopts, head, n, fp=patchf)
794 794 patchf.close()
795 795 self.removeundo(repo)
796 796 return (0, n)
797 797
798 798 def qparents(self, repo, rev=None):
799 799 """return the mq handled parent or p1
800 800
801 801 In some case where mq get himself in being the parent of a merge the
802 802 appropriate parent may be p2.
803 803 (eg: an in progress merge started with mq disabled)
804 804
805 805 If no parent are managed by mq, p1 is returned.
806 806 """
807 807 if rev is None:
808 808 (p1, p2) = repo.dirstate.parents()
809 809 if p2 == nullid:
810 810 return p1
811 811 if not self.applied:
812 812 return None
813 813 return self.applied[-1].node
814 814 p1, p2 = repo.changelog.parents(rev)
815 815 if p2 != nullid and p2 in [x.node for x in self.applied]:
816 816 return p2
817 817 return p1
818 818
819 819 def mergepatch(self, repo, mergeq, series, diffopts):
820 820 if not self.applied:
821 821 # each of the patches merged in will have two parents. This
822 822 # can confuse the qrefresh, qdiff, and strip code because it
823 823 # needs to know which parent is actually in the patch queue.
824 824 # so, we insert a merge marker with only one parent. This way
825 825 # the first patch in the queue is never a merge patch
826 826 #
827 827 pname = ".hg.patches.merge.marker"
828 828 n = newcommit(repo, None, '[mq]: merge marker', force=True)
829 829 self.removeundo(repo)
830 830 self.applied.append(statusentry(n, pname))
831 831 self.applieddirty = True
832 832
833 833 head = self.qparents(repo)
834 834
835 835 for patch in series:
836 836 patch = mergeq.lookup(patch, strict=True)
837 837 if not patch:
838 838 self.ui.warn(_("patch %s does not exist\n") % patch)
839 839 return (1, None)
840 840 pushable, reason = self.pushable(patch)
841 841 if not pushable:
842 842 self.explainpushable(patch, all_patches=True)
843 843 continue
844 844 info = mergeq.isapplied(patch)
845 845 if not info:
846 846 self.ui.warn(_("patch %s is not applied\n") % patch)
847 847 return (1, None)
848 848 rev = info[1]
849 849 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
850 850 if head:
851 851 self.applied.append(statusentry(head, patch))
852 852 self.applieddirty = True
853 853 if err:
854 854 return (err, head)
855 855 self.savedirty()
856 856 return (0, head)
857 857
858 858 def patch(self, repo, patchfile):
859 859 '''Apply patchfile to the working directory.
860 860 patchfile: name of patch file'''
861 861 files = set()
862 862 try:
863 863 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
864 864 files=files, eolmode=None)
865 865 return (True, list(files), fuzz)
866 866 except Exception as inst:
867 867 self.ui.note(stringutil.forcebytestr(inst) + '\n')
868 868 if not self.ui.verbose:
869 869 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
870 870 self.ui.traceback()
871 871 return (False, list(files), False)
872 872
873 873 def apply(self, repo, series, list=False, update_status=True,
874 874 strict=False, patchdir=None, merge=None, all_files=None,
875 875 tobackup=None, keepchanges=False):
876 876 wlock = lock = tr = None
877 877 try:
878 878 wlock = repo.wlock()
879 879 lock = repo.lock()
880 880 tr = repo.transaction("qpush")
881 881 try:
882 882 ret = self._apply(repo, series, list, update_status,
883 883 strict, patchdir, merge, all_files=all_files,
884 884 tobackup=tobackup, keepchanges=keepchanges)
885 885 tr.close()
886 886 self.savedirty()
887 887 return ret
888 888 except AbortNoCleanup:
889 889 tr.close()
890 890 self.savedirty()
891 891 raise
892 892 except: # re-raises
893 893 try:
894 894 tr.abort()
895 895 finally:
896 896 self.invalidate()
897 897 raise
898 898 finally:
899 899 release(tr, lock, wlock)
900 900 self.removeundo(repo)
901 901
902 902 def _apply(self, repo, series, list=False, update_status=True,
903 903 strict=False, patchdir=None, merge=None, all_files=None,
904 904 tobackup=None, keepchanges=False):
905 905 """returns (error, hash)
906 906
907 907 error = 1 for unable to read, 2 for patch failed, 3 for patch
908 908 fuzz. tobackup is None or a set of files to backup before they
909 909 are modified by a patch.
910 910 """
911 911 # TODO unify with commands.py
912 912 if not patchdir:
913 913 patchdir = self.path
914 914 err = 0
915 915 n = None
916 916 for patchname in series:
917 917 pushable, reason = self.pushable(patchname)
918 918 if not pushable:
919 919 self.explainpushable(patchname, all_patches=True)
920 920 continue
921 921 self.ui.status(_("applying %s\n") % patchname)
922 922 pf = os.path.join(patchdir, patchname)
923 923
924 924 try:
925 925 ph = patchheader(self.join(patchname), self.plainmode)
926 926 except IOError:
927 927 self.ui.warn(_("unable to read %s\n") % patchname)
928 928 err = 1
929 929 break
930 930
931 931 message = ph.message
932 932 if not message:
933 933 # The commit message should not be translated
934 934 message = "imported patch %s\n" % patchname
935 935 else:
936 936 if list:
937 937 # The commit message should not be translated
938 938 message.append("\nimported patch %s" % patchname)
939 939 message = '\n'.join(message)
940 940
941 941 if ph.haspatch:
942 942 if tobackup:
943 943 touched = patchmod.changedfiles(self.ui, repo, pf)
944 944 touched = set(touched) & tobackup
945 945 if touched and keepchanges:
946 946 raise AbortNoCleanup(
947 947 _("conflicting local changes found"),
948 948 hint=_("did you forget to qrefresh?"))
949 949 self.backup(repo, touched, copy=True)
950 950 tobackup = tobackup - touched
951 951 (patcherr, files, fuzz) = self.patch(repo, pf)
952 952 if all_files is not None:
953 953 all_files.update(files)
954 954 patcherr = not patcherr
955 955 else:
956 956 self.ui.warn(_("patch %s is empty\n") % patchname)
957 957 patcherr, files, fuzz = 0, [], 0
958 958
959 959 if merge and files:
960 960 # Mark as removed/merged and update dirstate parent info
961 961 removed = []
962 962 merged = []
963 963 for f in files:
964 964 if os.path.lexists(repo.wjoin(f)):
965 965 merged.append(f)
966 966 else:
967 967 removed.append(f)
968 968 with repo.dirstate.parentchange():
969 969 for f in removed:
970 970 repo.dirstate.remove(f)
971 971 for f in merged:
972 972 repo.dirstate.merge(f)
973 973 p1 = repo.dirstate.p1()
974 974 repo.setparents(p1, merge)
975 975
976 976 if all_files and '.hgsubstate' in all_files:
977 977 wctx = repo[None]
978 978 pctx = repo['.']
979 979 overwrite = False
980 980 mergedsubstate = subrepoutil.submerge(repo, pctx, wctx, wctx,
981 981 overwrite)
982 982 files += mergedsubstate.keys()
983 983
984 984 match = scmutil.matchfiles(repo, files or [])
985 985 oldtip = repo.changelog.tip()
986 986 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
987 987 force=True)
988 988 if repo.changelog.tip() == oldtip:
989 989 raise error.Abort(_("qpush exactly duplicates child changeset"))
990 990 if n is None:
991 991 raise error.Abort(_("repository commit failed"))
992 992
993 993 if update_status:
994 994 self.applied.append(statusentry(n, patchname))
995 995
996 996 if patcherr:
997 997 self.ui.warn(_("patch failed, rejects left in working "
998 998 "directory\n"))
999 999 err = 2
1000 1000 break
1001 1001
1002 1002 if fuzz and strict:
1003 1003 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
1004 1004 err = 3
1005 1005 break
1006 1006 return (err, n)
1007 1007
1008 1008 def _cleanup(self, patches, numrevs, keep=False):
1009 1009 if not keep:
1010 1010 r = self.qrepo()
1011 1011 if r:
1012 1012 r[None].forget(patches)
1013 1013 for p in patches:
1014 1014 try:
1015 1015 os.unlink(self.join(p))
1016 1016 except OSError as inst:
1017 1017 if inst.errno != errno.ENOENT:
1018 1018 raise
1019 1019
1020 1020 qfinished = []
1021 1021 if numrevs:
1022 1022 qfinished = self.applied[:numrevs]
1023 1023 del self.applied[:numrevs]
1024 1024 self.applieddirty = True
1025 1025
1026 1026 unknown = []
1027 1027
1028 1028 sortedseries = []
1029 1029 for p in patches:
1030 1030 idx = self.findseries(p)
1031 1031 if idx is None:
1032 1032 sortedseries.append((-1, p))
1033 1033 else:
1034 1034 sortedseries.append((idx, p))
1035 1035
1036 1036 sortedseries.sort(reverse=True)
1037 1037 for (i, p) in sortedseries:
1038 1038 if i != -1:
1039 1039 del self.fullseries[i]
1040 1040 else:
1041 1041 unknown.append(p)
1042 1042
1043 1043 if unknown:
1044 1044 if numrevs:
1045 1045 rev = dict((entry.name, entry.node) for entry in qfinished)
1046 1046 for p in unknown:
1047 1047 msg = _('revision %s refers to unknown patches: %s\n')
1048 1048 self.ui.warn(msg % (short(rev[p]), p))
1049 1049 else:
1050 1050 msg = _('unknown patches: %s\n')
1051 1051 raise error.Abort(''.join(msg % p for p in unknown))
1052 1052
1053 1053 self.parseseries()
1054 1054 self.seriesdirty = True
1055 1055 return [entry.node for entry in qfinished]
1056 1056
1057 1057 def _revpatches(self, repo, revs):
1058 1058 firstrev = repo[self.applied[0].node].rev()
1059 1059 patches = []
1060 1060 for i, rev in enumerate(revs):
1061 1061
1062 1062 if rev < firstrev:
1063 1063 raise error.Abort(_('revision %d is not managed') % rev)
1064 1064
1065 1065 ctx = repo[rev]
1066 1066 base = self.applied[i].node
1067 1067 if ctx.node() != base:
1068 1068 msg = _('cannot delete revision %d above applied patches')
1069 1069 raise error.Abort(msg % rev)
1070 1070
1071 1071 patch = self.applied[i].name
1072 1072 for fmt in ('[mq]: %s', 'imported patch %s'):
1073 1073 if ctx.description() == fmt % patch:
1074 1074 msg = _('patch %s finalized without changeset message\n')
1075 1075 repo.ui.status(msg % patch)
1076 1076 break
1077 1077
1078 1078 patches.append(patch)
1079 1079 return patches
1080 1080
1081 1081 def finish(self, repo, revs):
1082 1082 # Manually trigger phase computation to ensure phasedefaults is
1083 1083 # executed before we remove the patches.
1084 1084 repo._phasecache
1085 1085 patches = self._revpatches(repo, sorted(revs))
1086 1086 qfinished = self._cleanup(patches, len(patches))
1087 1087 if qfinished and repo.ui.configbool('mq', 'secret'):
1088 1088 # only use this logic when the secret option is added
1089 1089 oldqbase = repo[qfinished[0]]
1090 1090 tphase = phases.newcommitphase(repo.ui)
1091 1091 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1092 1092 with repo.transaction('qfinish') as tr:
1093 1093 phases.advanceboundary(repo, tr, tphase, qfinished)
1094 1094
1095 1095 def delete(self, repo, patches, opts):
1096 1096 if not patches and not opts.get('rev'):
1097 1097 raise error.Abort(_('qdelete requires at least one revision or '
1098 1098 'patch name'))
1099 1099
1100 1100 realpatches = []
1101 1101 for patch in patches:
1102 1102 patch = self.lookup(patch, strict=True)
1103 1103 info = self.isapplied(patch)
1104 1104 if info:
1105 1105 raise error.Abort(_("cannot delete applied patch %s") % patch)
1106 1106 if patch not in self.series:
1107 1107 raise error.Abort(_("patch %s not in series file") % patch)
1108 1108 if patch not in realpatches:
1109 1109 realpatches.append(patch)
1110 1110
1111 1111 numrevs = 0
1112 1112 if opts.get('rev'):
1113 1113 if not self.applied:
1114 1114 raise error.Abort(_('no patches applied'))
1115 1115 revs = scmutil.revrange(repo, opts.get('rev'))
1116 1116 revs.sort()
1117 1117 revpatches = self._revpatches(repo, revs)
1118 1118 realpatches += revpatches
1119 1119 numrevs = len(revpatches)
1120 1120
1121 1121 self._cleanup(realpatches, numrevs, opts.get('keep'))
1122 1122
1123 1123 def checktoppatch(self, repo):
1124 1124 '''check that working directory is at qtip'''
1125 1125 if self.applied:
1126 1126 top = self.applied[-1].node
1127 1127 patch = self.applied[-1].name
1128 1128 if repo.dirstate.p1() != top:
1129 1129 raise error.Abort(_("working directory revision is not qtip"))
1130 1130 return top, patch
1131 1131 return None, None
1132 1132
1133 1133 def putsubstate2changes(self, substatestate, changes):
1134 1134 for files in changes[:3]:
1135 1135 if '.hgsubstate' in files:
1136 1136 return # already listed up
1137 1137 # not yet listed up
1138 1138 if substatestate in 'a?':
1139 1139 changes[1].append('.hgsubstate')
1140 1140 elif substatestate in 'r':
1141 1141 changes[2].append('.hgsubstate')
1142 1142 else: # modified
1143 1143 changes[0].append('.hgsubstate')
1144 1144
1145 1145 def checklocalchanges(self, repo, force=False, refresh=True):
1146 1146 excsuffix = ''
1147 1147 if refresh:
1148 1148 excsuffix = ', qrefresh first'
1149 1149 # plain versions for i18n tool to detect them
1150 1150 _("local changes found, qrefresh first")
1151 1151 _("local changed subrepos found, qrefresh first")
1152 1152 return checklocalchanges(repo, force, excsuffix)
1153 1153
1154 1154 _reserved = ('series', 'status', 'guards', '.', '..')
1155 1155 def checkreservedname(self, name):
1156 1156 if name in self._reserved:
1157 1157 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1158 1158 % name)
1159 1159 if name != name.strip():
1160 1160 # whitespace is stripped by parseseries()
1161 1161 raise error.Abort(_('patch name cannot begin or end with '
1162 1162 'whitespace'))
1163 1163 for prefix in ('.hg', '.mq'):
1164 1164 if name.startswith(prefix):
1165 1165 raise error.Abort(_('patch name cannot begin with "%s"')
1166 1166 % prefix)
1167 1167 for c in ('#', ':', '\r', '\n'):
1168 1168 if c in name:
1169 1169 raise error.Abort(_('%r cannot be used in the name of a patch')
1170 1170 % pycompat.bytestr(c))
1171 1171
1172 1172 def checkpatchname(self, name, force=False):
1173 1173 self.checkreservedname(name)
1174 1174 if not force and os.path.exists(self.join(name)):
1175 1175 if os.path.isdir(self.join(name)):
1176 1176 raise error.Abort(_('"%s" already exists as a directory')
1177 1177 % name)
1178 1178 else:
1179 1179 raise error.Abort(_('patch "%s" already exists') % name)
1180 1180
1181 1181 def makepatchname(self, title, fallbackname):
1182 1182 """Return a suitable filename for title, adding a suffix to make
1183 1183 it unique in the existing list"""
1184 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1184 namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_')
1185 1185 namebase = namebase[:75] # avoid too long name (issue5117)
1186 1186 if namebase:
1187 1187 try:
1188 1188 self.checkreservedname(namebase)
1189 1189 except error.Abort:
1190 1190 namebase = fallbackname
1191 1191 else:
1192 1192 namebase = fallbackname
1193 1193 name = namebase
1194 1194 i = 0
1195 1195 while True:
1196 1196 if name not in self.fullseries:
1197 1197 try:
1198 1198 self.checkpatchname(name)
1199 1199 break
1200 1200 except error.Abort:
1201 1201 pass
1202 1202 i += 1
1203 1203 name = '%s__%d' % (namebase, i)
1204 1204 return name
1205 1205
1206 1206 def checkkeepchanges(self, keepchanges, force):
1207 1207 if force and keepchanges:
1208 1208 raise error.Abort(_('cannot use both --force and --keep-changes'))
1209 1209
1210 1210 def new(self, repo, patchfn, *pats, **opts):
1211 1211 """options:
1212 1212 msg: a string or a no-argument function returning a string
1213 1213 """
1214 1214 opts = pycompat.byteskwargs(opts)
1215 1215 msg = opts.get('msg')
1216 1216 edit = opts.get('edit')
1217 1217 editform = opts.get('editform', 'mq.qnew')
1218 1218 user = opts.get('user')
1219 1219 date = opts.get('date')
1220 1220 if date:
1221 1221 date = dateutil.parsedate(date)
1222 1222 diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
1223 1223 if opts.get('checkname', True):
1224 1224 self.checkpatchname(patchfn)
1225 1225 inclsubs = checksubstate(repo)
1226 1226 if inclsubs:
1227 1227 substatestate = repo.dirstate['.hgsubstate']
1228 1228 if opts.get('include') or opts.get('exclude') or pats:
1229 1229 # detect missing files in pats
1230 1230 def badfn(f, msg):
1231 1231 if f != '.hgsubstate': # .hgsubstate is auto-created
1232 1232 raise error.Abort('%s: %s' % (f, msg))
1233 1233 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1234 1234 changes = repo.status(match=match)
1235 1235 else:
1236 1236 changes = self.checklocalchanges(repo, force=True)
1237 1237 commitfiles = list(inclsubs)
1238 1238 for files in changes[:3]:
1239 1239 commitfiles.extend(files)
1240 1240 match = scmutil.matchfiles(repo, commitfiles)
1241 1241 if len(repo[None].parents()) > 1:
1242 1242 raise error.Abort(_('cannot manage merge changesets'))
1243 1243 self.checktoppatch(repo)
1244 1244 insert = self.fullseriesend()
1245 1245 with repo.wlock():
1246 1246 try:
1247 1247 # if patch file write fails, abort early
1248 1248 p = self.opener(patchfn, "w")
1249 1249 except IOError as e:
1250 1250 raise error.Abort(_('cannot write patch "%s": %s')
1251 1251 % (patchfn, encoding.strtolocal(e.strerror)))
1252 1252 try:
1253 1253 defaultmsg = "[mq]: %s" % patchfn
1254 1254 editor = cmdutil.getcommiteditor(editform=editform)
1255 1255 if edit:
1256 1256 def finishdesc(desc):
1257 1257 if desc.rstrip():
1258 1258 return desc
1259 1259 else:
1260 1260 return defaultmsg
1261 1261 # i18n: this message is shown in editor with "HG: " prefix
1262 1262 extramsg = _('Leave message empty to use default message.')
1263 1263 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1264 1264 extramsg=extramsg,
1265 1265 editform=editform)
1266 1266 commitmsg = msg
1267 1267 else:
1268 1268 commitmsg = msg or defaultmsg
1269 1269
1270 1270 n = newcommit(repo, None, commitmsg, user, date, match=match,
1271 1271 force=True, editor=editor)
1272 1272 if n is None:
1273 1273 raise error.Abort(_("repo commit failed"))
1274 1274 try:
1275 1275 self.fullseries[insert:insert] = [patchfn]
1276 1276 self.applied.append(statusentry(n, patchfn))
1277 1277 self.parseseries()
1278 1278 self.seriesdirty = True
1279 1279 self.applieddirty = True
1280 1280 nctx = repo[n]
1281 1281 ph = patchheader(self.join(patchfn), self.plainmode)
1282 1282 if user:
1283 1283 ph.setuser(user)
1284 1284 if date:
1285 1285 ph.setdate('%d %d' % date)
1286 1286 ph.setparent(hex(nctx.p1().node()))
1287 1287 msg = nctx.description().strip()
1288 1288 if msg == defaultmsg.strip():
1289 1289 msg = ''
1290 1290 ph.setmessage(msg)
1291 1291 p.write(bytes(ph))
1292 1292 if commitfiles:
1293 1293 parent = self.qparents(repo, n)
1294 1294 if inclsubs:
1295 1295 self.putsubstate2changes(substatestate, changes)
1296 1296 chunks = patchmod.diff(repo, node1=parent, node2=n,
1297 1297 changes=changes, opts=diffopts)
1298 1298 for chunk in chunks:
1299 1299 p.write(chunk)
1300 1300 p.close()
1301 1301 r = self.qrepo()
1302 1302 if r:
1303 1303 r[None].add([patchfn])
1304 1304 except: # re-raises
1305 1305 repo.rollback()
1306 1306 raise
1307 1307 except Exception:
1308 1308 patchpath = self.join(patchfn)
1309 1309 try:
1310 1310 os.unlink(patchpath)
1311 1311 except OSError:
1312 1312 self.ui.warn(_('error unlinking %s\n') % patchpath)
1313 1313 raise
1314 1314 self.removeundo(repo)
1315 1315
1316 1316 def isapplied(self, patch):
1317 1317 """returns (index, rev, patch)"""
1318 1318 for i, a in enumerate(self.applied):
1319 1319 if a.name == patch:
1320 1320 return (i, a.node, a.name)
1321 1321 return None
1322 1322
1323 1323 # if the exact patch name does not exist, we try a few
1324 1324 # variations. If strict is passed, we try only #1
1325 1325 #
1326 1326 # 1) a number (as string) to indicate an offset in the series file
1327 1327 # 2) a unique substring of the patch name was given
1328 1328 # 3) patchname[-+]num to indicate an offset in the series file
1329 1329 def lookup(self, patch, strict=False):
1330 1330 def partialname(s):
1331 1331 if s in self.series:
1332 1332 return s
1333 1333 matches = [x for x in self.series if s in x]
1334 1334 if len(matches) > 1:
1335 1335 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1336 1336 for m in matches:
1337 1337 self.ui.warn(' %s\n' % m)
1338 1338 return None
1339 1339 if matches:
1340 1340 return matches[0]
1341 1341 if self.series and self.applied:
1342 1342 if s == 'qtip':
1343 1343 return self.series[self.seriesend(True) - 1]
1344 1344 if s == 'qbase':
1345 1345 return self.series[0]
1346 1346 return None
1347 1347
1348 1348 if patch in self.series:
1349 1349 return patch
1350 1350
1351 1351 if not os.path.isfile(self.join(patch)):
1352 1352 try:
1353 1353 sno = int(patch)
1354 1354 except (ValueError, OverflowError):
1355 1355 pass
1356 1356 else:
1357 1357 if -len(self.series) <= sno < len(self.series):
1358 1358 return self.series[sno]
1359 1359
1360 1360 if not strict:
1361 1361 res = partialname(patch)
1362 1362 if res:
1363 1363 return res
1364 1364 minus = patch.rfind('-')
1365 1365 if minus >= 0:
1366 1366 res = partialname(patch[:minus])
1367 1367 if res:
1368 1368 i = self.series.index(res)
1369 1369 try:
1370 1370 off = int(patch[minus + 1:] or 1)
1371 1371 except (ValueError, OverflowError):
1372 1372 pass
1373 1373 else:
1374 1374 if i - off >= 0:
1375 1375 return self.series[i - off]
1376 1376 plus = patch.rfind('+')
1377 1377 if plus >= 0:
1378 1378 res = partialname(patch[:plus])
1379 1379 if res:
1380 1380 i = self.series.index(res)
1381 1381 try:
1382 1382 off = int(patch[plus + 1:] or 1)
1383 1383 except (ValueError, OverflowError):
1384 1384 pass
1385 1385 else:
1386 1386 if i + off < len(self.series):
1387 1387 return self.series[i + off]
1388 1388 raise error.Abort(_("patch %s not in series") % patch)
1389 1389
1390 1390 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1391 1391 all=False, move=False, exact=False, nobackup=False,
1392 1392 keepchanges=False):
1393 1393 self.checkkeepchanges(keepchanges, force)
1394 1394 diffopts = self.diffopts()
1395 1395 with repo.wlock():
1396 1396 heads = []
1397 1397 for hs in repo.branchmap().itervalues():
1398 1398 heads.extend(hs)
1399 1399 if not heads:
1400 1400 heads = [nullid]
1401 1401 if repo.dirstate.p1() not in heads and not exact:
1402 1402 self.ui.status(_("(working directory not at a head)\n"))
1403 1403
1404 1404 if not self.series:
1405 1405 self.ui.warn(_('no patches in series\n'))
1406 1406 return 0
1407 1407
1408 1408 # Suppose our series file is: A B C and the current 'top'
1409 1409 # patch is B. qpush C should be performed (moving forward)
1410 1410 # qpush B is a NOP (no change) qpush A is an error (can't
1411 1411 # go backwards with qpush)
1412 1412 if patch:
1413 1413 patch = self.lookup(patch)
1414 1414 info = self.isapplied(patch)
1415 1415 if info and info[0] >= len(self.applied) - 1:
1416 1416 self.ui.warn(
1417 1417 _('qpush: %s is already at the top\n') % patch)
1418 1418 return 0
1419 1419
1420 1420 pushable, reason = self.pushable(patch)
1421 1421 if pushable:
1422 1422 if self.series.index(patch) < self.seriesend():
1423 1423 raise error.Abort(
1424 1424 _("cannot push to a previous patch: %s") % patch)
1425 1425 else:
1426 1426 if reason:
1427 1427 reason = _('guarded by %s') % reason
1428 1428 else:
1429 1429 reason = _('no matching guards')
1430 1430 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1431 1431 return 1
1432 1432 elif all:
1433 1433 patch = self.series[-1]
1434 1434 if self.isapplied(patch):
1435 1435 self.ui.warn(_('all patches are currently applied\n'))
1436 1436 return 0
1437 1437
1438 1438 # Following the above example, starting at 'top' of B:
1439 1439 # qpush should be performed (pushes C), but a subsequent
1440 1440 # qpush without an argument is an error (nothing to
1441 1441 # apply). This allows a loop of "...while hg qpush..." to
1442 1442 # work as it detects an error when done
1443 1443 start = self.seriesend()
1444 1444 if start == len(self.series):
1445 1445 self.ui.warn(_('patch series already fully applied\n'))
1446 1446 return 1
1447 1447 if not force and not keepchanges:
1448 1448 self.checklocalchanges(repo, refresh=self.applied)
1449 1449
1450 1450 if exact:
1451 1451 if keepchanges:
1452 1452 raise error.Abort(
1453 1453 _("cannot use --exact and --keep-changes together"))
1454 1454 if move:
1455 1455 raise error.Abort(_('cannot use --exact and --move '
1456 1456 'together'))
1457 1457 if self.applied:
1458 1458 raise error.Abort(_('cannot push --exact with applied '
1459 1459 'patches'))
1460 1460 root = self.series[start]
1461 1461 target = patchheader(self.join(root), self.plainmode).parent
1462 1462 if not target:
1463 1463 raise error.Abort(
1464 1464 _("%s does not have a parent recorded") % root)
1465 1465 if not repo[target] == repo['.']:
1466 1466 hg.update(repo, target)
1467 1467
1468 1468 if move:
1469 1469 if not patch:
1470 1470 raise error.Abort(_("please specify the patch to move"))
1471 1471 for fullstart, rpn in enumerate(self.fullseries):
1472 1472 # strip markers for patch guards
1473 1473 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1474 1474 break
1475 1475 for i, rpn in enumerate(self.fullseries[fullstart:]):
1476 1476 # strip markers for patch guards
1477 1477 if self.guard_re.split(rpn, 1)[0] == patch:
1478 1478 break
1479 1479 index = fullstart + i
1480 1480 assert index < len(self.fullseries)
1481 1481 fullpatch = self.fullseries[index]
1482 1482 del self.fullseries[index]
1483 1483 self.fullseries.insert(fullstart, fullpatch)
1484 1484 self.parseseries()
1485 1485 self.seriesdirty = True
1486 1486
1487 1487 self.applieddirty = True
1488 1488 if start > 0:
1489 1489 self.checktoppatch(repo)
1490 1490 if not patch:
1491 1491 patch = self.series[start]
1492 1492 end = start + 1
1493 1493 else:
1494 1494 end = self.series.index(patch, start) + 1
1495 1495
1496 1496 tobackup = set()
1497 1497 if (not nobackup and force) or keepchanges:
1498 1498 status = self.checklocalchanges(repo, force=True)
1499 1499 if keepchanges:
1500 1500 tobackup.update(status.modified + status.added +
1501 1501 status.removed + status.deleted)
1502 1502 else:
1503 1503 tobackup.update(status.modified + status.added)
1504 1504
1505 1505 s = self.series[start:end]
1506 1506 all_files = set()
1507 1507 try:
1508 1508 if mergeq:
1509 1509 ret = self.mergepatch(repo, mergeq, s, diffopts)
1510 1510 else:
1511 1511 ret = self.apply(repo, s, list, all_files=all_files,
1512 1512 tobackup=tobackup, keepchanges=keepchanges)
1513 1513 except AbortNoCleanup:
1514 1514 raise
1515 1515 except: # re-raises
1516 1516 self.ui.warn(_('cleaning up working directory...\n'))
1517 1517 cmdutil.revert(self.ui, repo, repo['.'],
1518 1518 repo.dirstate.parents(), no_backup=True)
1519 1519 # only remove unknown files that we know we touched or
1520 1520 # created while patching
1521 1521 for f in all_files:
1522 1522 if f not in repo.dirstate:
1523 1523 repo.wvfs.unlinkpath(f, ignoremissing=True)
1524 1524 self.ui.warn(_('done\n'))
1525 1525 raise
1526 1526
1527 1527 if not self.applied:
1528 1528 return ret[0]
1529 1529 top = self.applied[-1].name
1530 1530 if ret[0] and ret[0] > 1:
1531 1531 msg = _("errors during apply, please fix and qrefresh %s\n")
1532 1532 self.ui.write(msg % top)
1533 1533 else:
1534 1534 self.ui.write(_("now at: %s\n") % top)
1535 1535 return ret[0]
1536 1536
1537 1537 def pop(self, repo, patch=None, force=False, update=True, all=False,
1538 1538 nobackup=False, keepchanges=False):
1539 1539 self.checkkeepchanges(keepchanges, force)
1540 1540 with repo.wlock():
1541 1541 if patch:
1542 1542 # index, rev, patch
1543 1543 info = self.isapplied(patch)
1544 1544 if not info:
1545 1545 patch = self.lookup(patch)
1546 1546 info = self.isapplied(patch)
1547 1547 if not info:
1548 1548 raise error.Abort(_("patch %s is not applied") % patch)
1549 1549
1550 1550 if not self.applied:
1551 1551 # Allow qpop -a to work repeatedly,
1552 1552 # but not qpop without an argument
1553 1553 self.ui.warn(_("no patches applied\n"))
1554 1554 return not all
1555 1555
1556 1556 if all:
1557 1557 start = 0
1558 1558 elif patch:
1559 1559 start = info[0] + 1
1560 1560 else:
1561 1561 start = len(self.applied) - 1
1562 1562
1563 1563 if start >= len(self.applied):
1564 1564 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1565 1565 return
1566 1566
1567 1567 if not update:
1568 1568 parents = repo.dirstate.parents()
1569 1569 rr = [x.node for x in self.applied]
1570 1570 for p in parents:
1571 1571 if p in rr:
1572 1572 self.ui.warn(_("qpop: forcing dirstate update\n"))
1573 1573 update = True
1574 1574 else:
1575 1575 parents = [p.node() for p in repo[None].parents()]
1576 1576 update = any(entry.node in parents
1577 1577 for entry in self.applied[start:])
1578 1578
1579 1579 tobackup = set()
1580 1580 if update:
1581 1581 s = self.checklocalchanges(repo, force=force or keepchanges)
1582 1582 if force:
1583 1583 if not nobackup:
1584 1584 tobackup.update(s.modified + s.added)
1585 1585 elif keepchanges:
1586 1586 tobackup.update(s.modified + s.added +
1587 1587 s.removed + s.deleted)
1588 1588
1589 1589 self.applieddirty = True
1590 1590 end = len(self.applied)
1591 1591 rev = self.applied[start].node
1592 1592
1593 1593 try:
1594 1594 heads = repo.changelog.heads(rev)
1595 1595 except error.LookupError:
1596 1596 node = short(rev)
1597 1597 raise error.Abort(_('trying to pop unknown node %s') % node)
1598 1598
1599 1599 if heads != [self.applied[-1].node]:
1600 1600 raise error.Abort(_("popping would remove a revision not "
1601 1601 "managed by this patch queue"))
1602 1602 if not repo[self.applied[-1].node].mutable():
1603 1603 raise error.Abort(
1604 1604 _("popping would remove a public revision"),
1605 1605 hint=_("see 'hg help phases' for details"))
1606 1606
1607 1607 # we know there are no local changes, so we can make a simplified
1608 1608 # form of hg.update.
1609 1609 if update:
1610 1610 qp = self.qparents(repo, rev)
1611 1611 ctx = repo[qp]
1612 1612 m, a, r, d = repo.status(qp, '.')[:4]
1613 1613 if d:
1614 1614 raise error.Abort(_("deletions found between repo revs"))
1615 1615
1616 1616 tobackup = set(a + m + r) & tobackup
1617 1617 if keepchanges and tobackup:
1618 1618 raise error.Abort(_("local changes found, qrefresh first"))
1619 1619 self.backup(repo, tobackup)
1620 1620 with repo.dirstate.parentchange():
1621 1621 for f in a:
1622 1622 repo.wvfs.unlinkpath(f, ignoremissing=True)
1623 1623 repo.dirstate.drop(f)
1624 1624 for f in m + r:
1625 1625 fctx = ctx[f]
1626 1626 repo.wwrite(f, fctx.data(), fctx.flags())
1627 1627 repo.dirstate.normal(f)
1628 1628 repo.setparents(qp, nullid)
1629 1629 for patch in reversed(self.applied[start:end]):
1630 1630 self.ui.status(_("popping %s\n") % patch.name)
1631 1631 del self.applied[start:end]
1632 1632 strip(self.ui, repo, [rev], update=False, backup=False)
1633 1633 for s, state in repo['.'].substate.items():
1634 1634 repo['.'].sub(s).get(state)
1635 1635 if self.applied:
1636 1636 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1637 1637 else:
1638 1638 self.ui.write(_("patch queue now empty\n"))
1639 1639
1640 1640 def diff(self, repo, pats, opts):
1641 1641 top, patch = self.checktoppatch(repo)
1642 1642 if not top:
1643 1643 self.ui.write(_("no patches applied\n"))
1644 1644 return
1645 1645 qp = self.qparents(repo, top)
1646 1646 if opts.get('reverse'):
1647 1647 node1, node2 = None, qp
1648 1648 else:
1649 1649 node1, node2 = qp, None
1650 1650 diffopts = self.diffopts(opts, patch)
1651 1651 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1652 1652
1653 1653 def refresh(self, repo, pats=None, **opts):
1654 1654 opts = pycompat.byteskwargs(opts)
1655 1655 if not self.applied:
1656 1656 self.ui.write(_("no patches applied\n"))
1657 1657 return 1
1658 1658 msg = opts.get('msg', '').rstrip()
1659 1659 edit = opts.get('edit')
1660 1660 editform = opts.get('editform', 'mq.qrefresh')
1661 1661 newuser = opts.get('user')
1662 1662 newdate = opts.get('date')
1663 1663 if newdate:
1664 1664 newdate = '%d %d' % dateutil.parsedate(newdate)
1665 1665 wlock = repo.wlock()
1666 1666
1667 1667 try:
1668 1668 self.checktoppatch(repo)
1669 1669 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1670 1670 if repo.changelog.heads(top) != [top]:
1671 1671 raise error.Abort(_("cannot qrefresh a revision with children"))
1672 1672 if not repo[top].mutable():
1673 1673 raise error.Abort(_("cannot qrefresh public revision"),
1674 1674 hint=_("see 'hg help phases' for details"))
1675 1675
1676 1676 cparents = repo.changelog.parents(top)
1677 1677 patchparent = self.qparents(repo, top)
1678 1678
1679 1679 inclsubs = checksubstate(repo, patchparent)
1680 1680 if inclsubs:
1681 1681 substatestate = repo.dirstate['.hgsubstate']
1682 1682
1683 1683 ph = patchheader(self.join(patchfn), self.plainmode)
1684 1684 diffopts = self.diffopts({'git': opts.get('git')}, patchfn,
1685 1685 plain=True)
1686 1686 if newuser:
1687 1687 ph.setuser(newuser)
1688 1688 if newdate:
1689 1689 ph.setdate(newdate)
1690 1690 ph.setparent(hex(patchparent))
1691 1691
1692 1692 # only commit new patch when write is complete
1693 1693 patchf = self.opener(patchfn, 'w', atomictemp=True)
1694 1694
1695 1695 # update the dirstate in place, strip off the qtip commit
1696 1696 # and then commit.
1697 1697 #
1698 1698 # this should really read:
1699 1699 # mm, dd, aa = repo.status(top, patchparent)[:3]
1700 1700 # but we do it backwards to take advantage of manifest/changelog
1701 1701 # caching against the next repo.status call
1702 1702 mm, aa, dd = repo.status(patchparent, top)[:3]
1703 1703 changes = repo.changelog.read(top)
1704 1704 man = repo.manifestlog[changes[0]].read()
1705 1705 aaa = aa[:]
1706 1706 match1 = scmutil.match(repo[None], pats, opts)
1707 1707 # in short mode, we only diff the files included in the
1708 1708 # patch already plus specified files
1709 1709 if opts.get('short'):
1710 1710 # if amending a patch, we start with existing
1711 1711 # files plus specified files - unfiltered
1712 1712 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1713 1713 # filter with include/exclude options
1714 1714 match1 = scmutil.match(repo[None], opts=opts)
1715 1715 else:
1716 1716 match = scmutil.matchall(repo)
1717 1717 m, a, r, d = repo.status(match=match)[:4]
1718 1718 mm = set(mm)
1719 1719 aa = set(aa)
1720 1720 dd = set(dd)
1721 1721
1722 1722 # we might end up with files that were added between
1723 1723 # qtip and the dirstate parent, but then changed in the
1724 1724 # local dirstate. in this case, we want them to only
1725 1725 # show up in the added section
1726 1726 for x in m:
1727 1727 if x not in aa:
1728 1728 mm.add(x)
1729 1729 # we might end up with files added by the local dirstate that
1730 1730 # were deleted by the patch. In this case, they should only
1731 1731 # show up in the changed section.
1732 1732 for x in a:
1733 1733 if x in dd:
1734 1734 dd.remove(x)
1735 1735 mm.add(x)
1736 1736 else:
1737 1737 aa.add(x)
1738 1738 # make sure any files deleted in the local dirstate
1739 1739 # are not in the add or change column of the patch
1740 1740 forget = []
1741 1741 for x in d + r:
1742 1742 if x in aa:
1743 1743 aa.remove(x)
1744 1744 forget.append(x)
1745 1745 continue
1746 1746 else:
1747 1747 mm.discard(x)
1748 1748 dd.add(x)
1749 1749
1750 1750 m = list(mm)
1751 1751 r = list(dd)
1752 1752 a = list(aa)
1753 1753
1754 1754 # create 'match' that includes the files to be recommitted.
1755 1755 # apply match1 via repo.status to ensure correct case handling.
1756 1756 cm, ca, cr, cd = repo.status(patchparent, match=match1)[:4]
1757 1757 allmatches = set(cm + ca + cr + cd)
1758 1758 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1759 1759
1760 1760 files = set(inclsubs)
1761 1761 for x in refreshchanges:
1762 1762 files.update(x)
1763 1763 match = scmutil.matchfiles(repo, files)
1764 1764
1765 1765 bmlist = repo[top].bookmarks()
1766 1766
1767 1767 dsguard = None
1768 1768 try:
1769 1769 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1770 1770 if diffopts.git or diffopts.upgrade:
1771 1771 copies = {}
1772 1772 for dst in a:
1773 1773 src = repo.dirstate.copied(dst)
1774 1774 # during qfold, the source file for copies may
1775 1775 # be removed. Treat this as a simple add.
1776 1776 if src is not None and src in repo.dirstate:
1777 1777 copies.setdefault(src, []).append(dst)
1778 1778 repo.dirstate.add(dst)
1779 1779 # remember the copies between patchparent and qtip
1780 1780 for dst in aaa:
1781 1781 f = repo.file(dst)
1782 1782 src = f.renamed(man[dst])
1783 1783 if src:
1784 1784 copies.setdefault(src[0], []).extend(
1785 1785 copies.get(dst, []))
1786 1786 if dst in a:
1787 1787 copies[src[0]].append(dst)
1788 1788 # we can't copy a file created by the patch itself
1789 1789 if dst in copies:
1790 1790 del copies[dst]
1791 1791 for src, dsts in copies.iteritems():
1792 1792 for dst in dsts:
1793 1793 repo.dirstate.copy(src, dst)
1794 1794 else:
1795 1795 for dst in a:
1796 1796 repo.dirstate.add(dst)
1797 1797 # Drop useless copy information
1798 1798 for f in list(repo.dirstate.copies()):
1799 1799 repo.dirstate.copy(None, f)
1800 1800 for f in r:
1801 1801 repo.dirstate.remove(f)
1802 1802 # if the patch excludes a modified file, mark that
1803 1803 # file with mtime=0 so status can see it.
1804 1804 mm = []
1805 1805 for i in pycompat.xrange(len(m) - 1, -1, -1):
1806 1806 if not match1(m[i]):
1807 1807 mm.append(m[i])
1808 1808 del m[i]
1809 1809 for f in m:
1810 1810 repo.dirstate.normal(f)
1811 1811 for f in mm:
1812 1812 repo.dirstate.normallookup(f)
1813 1813 for f in forget:
1814 1814 repo.dirstate.drop(f)
1815 1815
1816 1816 user = ph.user or changes[1]
1817 1817
1818 1818 oldphase = repo[top].phase()
1819 1819
1820 1820 # assumes strip can roll itself back if interrupted
1821 1821 repo.setparents(*cparents)
1822 1822 self.applied.pop()
1823 1823 self.applieddirty = True
1824 1824 strip(self.ui, repo, [top], update=False, backup=False)
1825 1825 dsguard.close()
1826 1826 finally:
1827 1827 release(dsguard)
1828 1828
1829 1829 try:
1830 1830 # might be nice to attempt to roll back strip after this
1831 1831
1832 1832 defaultmsg = "[mq]: %s" % patchfn
1833 1833 editor = cmdutil.getcommiteditor(editform=editform)
1834 1834 if edit:
1835 1835 def finishdesc(desc):
1836 1836 if desc.rstrip():
1837 1837 ph.setmessage(desc)
1838 1838 return desc
1839 1839 return defaultmsg
1840 1840 # i18n: this message is shown in editor with "HG: " prefix
1841 1841 extramsg = _('Leave message empty to use default message.')
1842 1842 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1843 1843 extramsg=extramsg,
1844 1844 editform=editform)
1845 1845 message = msg or "\n".join(ph.message)
1846 1846 elif not msg:
1847 1847 if not ph.message:
1848 1848 message = defaultmsg
1849 1849 else:
1850 1850 message = "\n".join(ph.message)
1851 1851 else:
1852 1852 message = msg
1853 1853 ph.setmessage(msg)
1854 1854
1855 1855 # Ensure we create a new changeset in the same phase than
1856 1856 # the old one.
1857 1857 lock = tr = None
1858 1858 try:
1859 1859 lock = repo.lock()
1860 1860 tr = repo.transaction('mq')
1861 1861 n = newcommit(repo, oldphase, message, user, ph.date,
1862 1862 match=match, force=True, editor=editor)
1863 1863 # only write patch after a successful commit
1864 1864 c = [list(x) for x in refreshchanges]
1865 1865 if inclsubs:
1866 1866 self.putsubstate2changes(substatestate, c)
1867 1867 chunks = patchmod.diff(repo, patchparent,
1868 1868 changes=c, opts=diffopts)
1869 1869 comments = bytes(ph)
1870 1870 if comments:
1871 1871 patchf.write(comments)
1872 1872 for chunk in chunks:
1873 1873 patchf.write(chunk)
1874 1874 patchf.close()
1875 1875
1876 1876 marks = repo._bookmarks
1877 1877 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
1878 1878 tr.close()
1879 1879
1880 1880 self.applied.append(statusentry(n, patchfn))
1881 1881 finally:
1882 1882 lockmod.release(tr, lock)
1883 1883 except: # re-raises
1884 1884 ctx = repo[cparents[0]]
1885 1885 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1886 1886 self.savedirty()
1887 1887 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1888 1888 '(revert --all, qpush to recover)\n'))
1889 1889 raise
1890 1890 finally:
1891 1891 wlock.release()
1892 1892 self.removeundo(repo)
1893 1893
1894 1894 def init(self, repo, create=False):
1895 1895 if not create and os.path.isdir(self.path):
1896 1896 raise error.Abort(_("patch queue directory already exists"))
1897 1897 try:
1898 1898 os.mkdir(self.path)
1899 1899 except OSError as inst:
1900 1900 if inst.errno != errno.EEXIST or not create:
1901 1901 raise
1902 1902 if create:
1903 1903 return self.qrepo(create=True)
1904 1904
1905 1905 def unapplied(self, repo, patch=None):
1906 1906 if patch and patch not in self.series:
1907 1907 raise error.Abort(_("patch %s is not in series file") % patch)
1908 1908 if not patch:
1909 1909 start = self.seriesend()
1910 1910 else:
1911 1911 start = self.series.index(patch) + 1
1912 1912 unapplied = []
1913 1913 for i in pycompat.xrange(start, len(self.series)):
1914 1914 pushable, reason = self.pushable(i)
1915 1915 if pushable:
1916 1916 unapplied.append((i, self.series[i]))
1917 1917 self.explainpushable(i)
1918 1918 return unapplied
1919 1919
1920 1920 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1921 1921 summary=False):
1922 1922 def displayname(pfx, patchname, state):
1923 1923 if pfx:
1924 1924 self.ui.write(pfx)
1925 1925 if summary:
1926 1926 ph = patchheader(self.join(patchname), self.plainmode)
1927 1927 if ph.message:
1928 1928 msg = ph.message[0]
1929 1929 else:
1930 1930 msg = ''
1931 1931
1932 1932 if self.ui.formatted():
1933 1933 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1934 1934 if width > 0:
1935 1935 msg = stringutil.ellipsis(msg, width)
1936 1936 else:
1937 1937 msg = ''
1938 1938 self.ui.write(patchname, label='qseries.' + state)
1939 1939 self.ui.write(': ')
1940 1940 self.ui.write(msg, label='qseries.message.' + state)
1941 1941 else:
1942 1942 self.ui.write(patchname, label='qseries.' + state)
1943 1943 self.ui.write('\n')
1944 1944
1945 1945 applied = set([p.name for p in self.applied])
1946 1946 if length is None:
1947 1947 length = len(self.series) - start
1948 1948 if not missing:
1949 1949 if self.ui.verbose:
1950 1950 idxwidth = len("%d" % (start + length - 1))
1951 1951 for i in pycompat.xrange(start, start + length):
1952 1952 patch = self.series[i]
1953 1953 if patch in applied:
1954 1954 char, state = 'A', 'applied'
1955 1955 elif self.pushable(i)[0]:
1956 1956 char, state = 'U', 'unapplied'
1957 1957 else:
1958 1958 char, state = 'G', 'guarded'
1959 1959 pfx = ''
1960 1960 if self.ui.verbose:
1961 1961 pfx = '%*d %s ' % (idxwidth, i, char)
1962 1962 elif status and status != char:
1963 1963 continue
1964 1964 displayname(pfx, patch, state)
1965 1965 else:
1966 1966 msng_list = []
1967 1967 for root, dirs, files in os.walk(self.path):
1968 1968 d = root[len(self.path) + 1:]
1969 1969 for f in files:
1970 1970 fl = os.path.join(d, f)
1971 1971 if (fl not in self.series and
1972 1972 fl not in (self.statuspath, self.seriespath,
1973 1973 self.guardspath)
1974 1974 and not fl.startswith('.')):
1975 1975 msng_list.append(fl)
1976 1976 for x in sorted(msng_list):
1977 1977 pfx = self.ui.verbose and ('D ') or ''
1978 1978 displayname(pfx, x, 'missing')
1979 1979
1980 1980 def issaveline(self, l):
1981 1981 if l.name == '.hg.patches.save.line':
1982 1982 return True
1983 1983
1984 1984 def qrepo(self, create=False):
1985 1985 ui = self.baseui.copy()
1986 1986 # copy back attributes set by ui.pager()
1987 1987 if self.ui.pageractive and not ui.pageractive:
1988 1988 ui.pageractive = self.ui.pageractive
1989 1989 # internal config: ui.formatted
1990 1990 ui.setconfig('ui', 'formatted',
1991 1991 self.ui.config('ui', 'formatted'), 'mqpager')
1992 1992 ui.setconfig('ui', 'interactive',
1993 1993 self.ui.config('ui', 'interactive'), 'mqpager')
1994 1994 if create or os.path.isdir(self.join(".hg")):
1995 1995 return hg.repository(ui, path=self.path, create=create)
1996 1996
1997 1997 def restore(self, repo, rev, delete=None, qupdate=None):
1998 1998 desc = repo[rev].description().strip()
1999 1999 lines = desc.splitlines()
2000 2000 i = 0
2001 2001 datastart = None
2002 2002 series = []
2003 2003 applied = []
2004 2004 qpp = None
2005 2005 for i, line in enumerate(lines):
2006 2006 if line == 'Patch Data:':
2007 2007 datastart = i + 1
2008 2008 elif line.startswith('Dirstate:'):
2009 2009 l = line.rstrip()
2010 2010 l = l[10:].split(' ')
2011 2011 qpp = [bin(x) for x in l]
2012 2012 elif datastart is not None:
2013 2013 l = line.rstrip()
2014 2014 n, name = l.split(':', 1)
2015 2015 if n:
2016 2016 applied.append(statusentry(bin(n), name))
2017 2017 else:
2018 2018 series.append(l)
2019 2019 if datastart is None:
2020 2020 self.ui.warn(_("no saved patch data found\n"))
2021 2021 return 1
2022 2022 self.ui.warn(_("restoring status: %s\n") % lines[0])
2023 2023 self.fullseries = series
2024 2024 self.applied = applied
2025 2025 self.parseseries()
2026 2026 self.seriesdirty = True
2027 2027 self.applieddirty = True
2028 2028 heads = repo.changelog.heads()
2029 2029 if delete:
2030 2030 if rev not in heads:
2031 2031 self.ui.warn(_("save entry has children, leaving it alone\n"))
2032 2032 else:
2033 2033 self.ui.warn(_("removing save entry %s\n") % short(rev))
2034 2034 pp = repo.dirstate.parents()
2035 2035 if rev in pp:
2036 2036 update = True
2037 2037 else:
2038 2038 update = False
2039 2039 strip(self.ui, repo, [rev], update=update, backup=False)
2040 2040 if qpp:
2041 2041 self.ui.warn(_("saved queue repository parents: %s %s\n") %
2042 2042 (short(qpp[0]), short(qpp[1])))
2043 2043 if qupdate:
2044 2044 self.ui.status(_("updating queue directory\n"))
2045 2045 r = self.qrepo()
2046 2046 if not r:
2047 2047 self.ui.warn(_("unable to load queue repository\n"))
2048 2048 return 1
2049 2049 hg.clean(r, qpp[0])
2050 2050
2051 2051 def save(self, repo, msg=None):
2052 2052 if not self.applied:
2053 2053 self.ui.warn(_("save: no patches applied, exiting\n"))
2054 2054 return 1
2055 2055 if self.issaveline(self.applied[-1]):
2056 2056 self.ui.warn(_("status is already saved\n"))
2057 2057 return 1
2058 2058
2059 2059 if not msg:
2060 2060 msg = _("hg patches saved state")
2061 2061 else:
2062 2062 msg = "hg patches: " + msg.rstrip('\r\n')
2063 2063 r = self.qrepo()
2064 2064 if r:
2065 2065 pp = r.dirstate.parents()
2066 2066 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2067 2067 msg += "\n\nPatch Data:\n"
2068 2068 msg += ''.join('%s\n' % x for x in self.applied)
2069 2069 msg += ''.join(':%s\n' % x for x in self.fullseries)
2070 2070 n = repo.commit(msg, force=True)
2071 2071 if not n:
2072 2072 self.ui.warn(_("repo commit failed\n"))
2073 2073 return 1
2074 2074 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2075 2075 self.applieddirty = True
2076 2076 self.removeundo(repo)
2077 2077
2078 2078 def fullseriesend(self):
2079 2079 if self.applied:
2080 2080 p = self.applied[-1].name
2081 2081 end = self.findseries(p)
2082 2082 if end is None:
2083 2083 return len(self.fullseries)
2084 2084 return end + 1
2085 2085 return 0
2086 2086
2087 2087 def seriesend(self, all_patches=False):
2088 2088 """If all_patches is False, return the index of the next pushable patch
2089 2089 in the series, or the series length. If all_patches is True, return the
2090 2090 index of the first patch past the last applied one.
2091 2091 """
2092 2092 end = 0
2093 2093 def nextpatch(start):
2094 2094 if all_patches or start >= len(self.series):
2095 2095 return start
2096 2096 for i in pycompat.xrange(start, len(self.series)):
2097 2097 p, reason = self.pushable(i)
2098 2098 if p:
2099 2099 return i
2100 2100 self.explainpushable(i)
2101 2101 return len(self.series)
2102 2102 if self.applied:
2103 2103 p = self.applied[-1].name
2104 2104 try:
2105 2105 end = self.series.index(p)
2106 2106 except ValueError:
2107 2107 return 0
2108 2108 return nextpatch(end + 1)
2109 2109 return nextpatch(end)
2110 2110
2111 2111 def appliedname(self, index):
2112 2112 pname = self.applied[index].name
2113 2113 if not self.ui.verbose:
2114 2114 p = pname
2115 2115 else:
2116 2116 p = ("%d" % self.series.index(pname)) + " " + pname
2117 2117 return p
2118 2118
2119 2119 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2120 2120 force=None, git=False):
2121 2121 def checkseries(patchname):
2122 2122 if patchname in self.series:
2123 2123 raise error.Abort(_('patch %s is already in the series file')
2124 2124 % patchname)
2125 2125
2126 2126 if rev:
2127 2127 if files:
2128 2128 raise error.Abort(_('option "-r" not valid when importing '
2129 2129 'files'))
2130 2130 rev = scmutil.revrange(repo, rev)
2131 2131 rev.sort(reverse=True)
2132 2132 elif not files:
2133 2133 raise error.Abort(_('no files or revisions specified'))
2134 2134 if (len(files) > 1 or len(rev) > 1) and patchname:
2135 2135 raise error.Abort(_('option "-n" not valid when importing multiple '
2136 2136 'patches'))
2137 2137 imported = []
2138 2138 if rev:
2139 2139 # If mq patches are applied, we can only import revisions
2140 2140 # that form a linear path to qbase.
2141 2141 # Otherwise, they should form a linear path to a head.
2142 2142 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2143 2143 if len(heads) > 1:
2144 2144 raise error.Abort(_('revision %d is the root of more than one '
2145 2145 'branch') % rev.last())
2146 2146 if self.applied:
2147 2147 base = repo.changelog.node(rev.first())
2148 2148 if base in [n.node for n in self.applied]:
2149 2149 raise error.Abort(_('revision %d is already managed')
2150 2150 % rev.first())
2151 2151 if heads != [self.applied[-1].node]:
2152 2152 raise error.Abort(_('revision %d is not the parent of '
2153 2153 'the queue') % rev.first())
2154 2154 base = repo.changelog.rev(self.applied[0].node)
2155 2155 lastparent = repo.changelog.parentrevs(base)[0]
2156 2156 else:
2157 2157 if heads != [repo.changelog.node(rev.first())]:
2158 2158 raise error.Abort(_('revision %d has unmanaged children')
2159 2159 % rev.first())
2160 2160 lastparent = None
2161 2161
2162 2162 diffopts = self.diffopts({'git': git})
2163 2163 with repo.transaction('qimport') as tr:
2164 2164 for r in rev:
2165 2165 if not repo[r].mutable():
2166 2166 raise error.Abort(_('revision %d is not mutable') % r,
2167 2167 hint=_("see 'hg help phases' "
2168 2168 'for details'))
2169 2169 p1, p2 = repo.changelog.parentrevs(r)
2170 2170 n = repo.changelog.node(r)
2171 2171 if p2 != nullrev:
2172 2172 raise error.Abort(_('cannot import merge revision %d')
2173 2173 % r)
2174 2174 if lastparent and lastparent != r:
2175 2175 raise error.Abort(_('revision %d is not the parent of '
2176 2176 '%d')
2177 2177 % (r, lastparent))
2178 2178 lastparent = p1
2179 2179
2180 2180 if not patchname:
2181 2181 patchname = self.makepatchname(
2182 2182 repo[r].description().split('\n', 1)[0],
2183 2183 '%d.diff' % r)
2184 2184 checkseries(patchname)
2185 2185 self.checkpatchname(patchname, force)
2186 2186 self.fullseries.insert(0, patchname)
2187 2187
2188 2188 with self.opener(patchname, "w") as fp:
2189 2189 cmdutil.exportfile(repo, [n], fp, opts=diffopts)
2190 2190
2191 2191 se = statusentry(n, patchname)
2192 2192 self.applied.insert(0, se)
2193 2193
2194 2194 self.added.append(patchname)
2195 2195 imported.append(patchname)
2196 2196 patchname = None
2197 2197 if rev and repo.ui.configbool('mq', 'secret'):
2198 2198 # if we added anything with --rev, move the secret root
2199 2199 phases.retractboundary(repo, tr, phases.secret, [n])
2200 2200 self.parseseries()
2201 2201 self.applieddirty = True
2202 2202 self.seriesdirty = True
2203 2203
2204 2204 for i, filename in enumerate(files):
2205 2205 if existing:
2206 2206 if filename == '-':
2207 2207 raise error.Abort(_('-e is incompatible with import from -')
2208 2208 )
2209 2209 filename = normname(filename)
2210 2210 self.checkreservedname(filename)
2211 2211 if util.url(filename).islocal():
2212 2212 originpath = self.join(filename)
2213 2213 if not os.path.isfile(originpath):
2214 2214 raise error.Abort(
2215 2215 _("patch %s does not exist") % filename)
2216 2216
2217 2217 if patchname:
2218 2218 self.checkpatchname(patchname, force)
2219 2219
2220 2220 self.ui.write(_('renaming %s to %s\n')
2221 2221 % (filename, patchname))
2222 2222 util.rename(originpath, self.join(patchname))
2223 2223 else:
2224 2224 patchname = filename
2225 2225
2226 2226 else:
2227 2227 if filename == '-' and not patchname:
2228 2228 raise error.Abort(_('need --name to import a patch from -'))
2229 2229 elif not patchname:
2230 2230 patchname = normname(os.path.basename(filename.rstrip('/')))
2231 2231 self.checkpatchname(patchname, force)
2232 2232 try:
2233 2233 if filename == '-':
2234 2234 text = self.ui.fin.read()
2235 2235 else:
2236 2236 fp = hg.openpath(self.ui, filename)
2237 2237 text = fp.read()
2238 2238 fp.close()
2239 2239 except (OSError, IOError):
2240 2240 raise error.Abort(_("unable to read file %s") % filename)
2241 2241 patchf = self.opener(patchname, "w")
2242 2242 patchf.write(text)
2243 2243 patchf.close()
2244 2244 if not force:
2245 2245 checkseries(patchname)
2246 2246 if patchname not in self.series:
2247 2247 index = self.fullseriesend() + i
2248 2248 self.fullseries[index:index] = [patchname]
2249 2249 self.parseseries()
2250 2250 self.seriesdirty = True
2251 2251 self.ui.warn(_("adding %s to series file\n") % patchname)
2252 2252 self.added.append(patchname)
2253 2253 imported.append(patchname)
2254 2254 patchname = None
2255 2255
2256 2256 self.removeundo(repo)
2257 2257 return imported
2258 2258
2259 2259 def fixkeepchangesopts(ui, opts):
2260 2260 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2261 2261 or opts.get('exact')):
2262 2262 return opts
2263 2263 opts = dict(opts)
2264 2264 opts['keep_changes'] = True
2265 2265 return opts
2266 2266
2267 2267 @command("qdelete|qremove|qrm",
2268 2268 [('k', 'keep', None, _('keep patch file')),
2269 2269 ('r', 'rev', [],
2270 2270 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2271 2271 _('hg qdelete [-k] [PATCH]...'),
2272 2272 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2273 2273 def delete(ui, repo, *patches, **opts):
2274 2274 """remove patches from queue
2275 2275
2276 2276 The patches must not be applied, and at least one patch is required. Exact
2277 2277 patch identifiers must be given. With -k/--keep, the patch files are
2278 2278 preserved in the patch directory.
2279 2279
2280 2280 To stop managing a patch and move it into permanent history,
2281 2281 use the :hg:`qfinish` command."""
2282 2282 q = repo.mq
2283 2283 q.delete(repo, patches, pycompat.byteskwargs(opts))
2284 2284 q.savedirty()
2285 2285 return 0
2286 2286
2287 2287 @command("qapplied",
2288 2288 [('1', 'last', None, _('show only the preceding applied patch'))
2289 2289 ] + seriesopts,
2290 2290 _('hg qapplied [-1] [-s] [PATCH]'),
2291 2291 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2292 2292 def applied(ui, repo, patch=None, **opts):
2293 2293 """print the patches already applied
2294 2294
2295 2295 Returns 0 on success."""
2296 2296
2297 2297 q = repo.mq
2298 2298 opts = pycompat.byteskwargs(opts)
2299 2299
2300 2300 if patch:
2301 2301 if patch not in q.series:
2302 2302 raise error.Abort(_("patch %s is not in series file") % patch)
2303 2303 end = q.series.index(patch) + 1
2304 2304 else:
2305 2305 end = q.seriesend(True)
2306 2306
2307 2307 if opts.get('last') and not end:
2308 2308 ui.write(_("no patches applied\n"))
2309 2309 return 1
2310 2310 elif opts.get('last') and end == 1:
2311 2311 ui.write(_("only one patch applied\n"))
2312 2312 return 1
2313 2313 elif opts.get('last'):
2314 2314 start = end - 2
2315 2315 end = 1
2316 2316 else:
2317 2317 start = 0
2318 2318
2319 2319 q.qseries(repo, length=end, start=start, status='A',
2320 2320 summary=opts.get('summary'))
2321 2321
2322 2322
2323 2323 @command("qunapplied",
2324 2324 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2325 2325 _('hg qunapplied [-1] [-s] [PATCH]'),
2326 2326 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2327 2327 def unapplied(ui, repo, patch=None, **opts):
2328 2328 """print the patches not yet applied
2329 2329
2330 2330 Returns 0 on success."""
2331 2331
2332 2332 q = repo.mq
2333 2333 opts = pycompat.byteskwargs(opts)
2334 2334 if patch:
2335 2335 if patch not in q.series:
2336 2336 raise error.Abort(_("patch %s is not in series file") % patch)
2337 2337 start = q.series.index(patch) + 1
2338 2338 else:
2339 2339 start = q.seriesend(True)
2340 2340
2341 2341 if start == len(q.series) and opts.get('first'):
2342 2342 ui.write(_("all patches applied\n"))
2343 2343 return 1
2344 2344
2345 2345 if opts.get('first'):
2346 2346 length = 1
2347 2347 else:
2348 2348 length = None
2349 2349 q.qseries(repo, start=start, length=length, status='U',
2350 2350 summary=opts.get('summary'))
2351 2351
2352 2352 @command("qimport",
2353 2353 [('e', 'existing', None, _('import file in patch directory')),
2354 2354 ('n', 'name', '',
2355 2355 _('name of patch file'), _('NAME')),
2356 2356 ('f', 'force', None, _('overwrite existing files')),
2357 2357 ('r', 'rev', [],
2358 2358 _('place existing revisions under mq control'), _('REV')),
2359 2359 ('g', 'git', None, _('use git extended diff format')),
2360 2360 ('P', 'push', None, _('qpush after importing'))],
2361 2361 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
2362 2362 helpcategory=command.CATEGORY_IMPORT_EXPORT)
2363 2363 def qimport(ui, repo, *filename, **opts):
2364 2364 """import a patch or existing changeset
2365 2365
2366 2366 The patch is inserted into the series after the last applied
2367 2367 patch. If no patches have been applied, qimport prepends the patch
2368 2368 to the series.
2369 2369
2370 2370 The patch will have the same name as its source file unless you
2371 2371 give it a new one with -n/--name.
2372 2372
2373 2373 You can register an existing patch inside the patch directory with
2374 2374 the -e/--existing flag.
2375 2375
2376 2376 With -f/--force, an existing patch of the same name will be
2377 2377 overwritten.
2378 2378
2379 2379 An existing changeset may be placed under mq control with -r/--rev
2380 2380 (e.g. qimport --rev . -n patch will place the current revision
2381 2381 under mq control). With -g/--git, patches imported with --rev will
2382 2382 use the git diff format. See the diffs help topic for information
2383 2383 on why this is important for preserving rename/copy information
2384 2384 and permission changes. Use :hg:`qfinish` to remove changesets
2385 2385 from mq control.
2386 2386
2387 2387 To import a patch from standard input, pass - as the patch file.
2388 2388 When importing from standard input, a patch name must be specified
2389 2389 using the --name flag.
2390 2390
2391 2391 To import an existing patch while renaming it::
2392 2392
2393 2393 hg qimport -e existing-patch -n new-name
2394 2394
2395 2395 Returns 0 if import succeeded.
2396 2396 """
2397 2397 opts = pycompat.byteskwargs(opts)
2398 2398 with repo.lock(): # cause this may move phase
2399 2399 q = repo.mq
2400 2400 try:
2401 2401 imported = q.qimport(
2402 2402 repo, filename, patchname=opts.get('name'),
2403 2403 existing=opts.get('existing'), force=opts.get('force'),
2404 2404 rev=opts.get('rev'), git=opts.get('git'))
2405 2405 finally:
2406 2406 q.savedirty()
2407 2407
2408 2408 if imported and opts.get('push') and not opts.get('rev'):
2409 2409 return q.push(repo, imported[-1])
2410 2410 return 0
2411 2411
2412 2412 def qinit(ui, repo, create):
2413 2413 """initialize a new queue repository
2414 2414
2415 2415 This command also creates a series file for ordering patches, and
2416 2416 an mq-specific .hgignore file in the queue repository, to exclude
2417 2417 the status and guards files (these contain mostly transient state).
2418 2418
2419 2419 Returns 0 if initialization succeeded."""
2420 2420 q = repo.mq
2421 2421 r = q.init(repo, create)
2422 2422 q.savedirty()
2423 2423 if r:
2424 2424 if not os.path.exists(r.wjoin('.hgignore')):
2425 2425 fp = r.wvfs('.hgignore', 'w')
2426 2426 fp.write('^\\.hg\n')
2427 2427 fp.write('^\\.mq\n')
2428 2428 fp.write('syntax: glob\n')
2429 2429 fp.write('status\n')
2430 2430 fp.write('guards\n')
2431 2431 fp.close()
2432 2432 if not os.path.exists(r.wjoin('series')):
2433 2433 r.wvfs('series', 'w').close()
2434 2434 r[None].add(['.hgignore', 'series'])
2435 2435 commands.add(ui, r)
2436 2436 return 0
2437 2437
2438 2438 @command("qinit",
2439 2439 [('c', 'create-repo', None, _('create queue repository'))],
2440 2440 _('hg qinit [-c]'),
2441 2441 helpcategory=command.CATEGORY_REPO_CREATION,
2442 2442 helpbasic=True)
2443 2443 def init(ui, repo, **opts):
2444 2444 """init a new queue repository (DEPRECATED)
2445 2445
2446 2446 The queue repository is unversioned by default. If
2447 2447 -c/--create-repo is specified, qinit will create a separate nested
2448 2448 repository for patches (qinit -c may also be run later to convert
2449 2449 an unversioned patch repository into a versioned one). You can use
2450 2450 qcommit to commit changes to this queue repository.
2451 2451
2452 2452 This command is deprecated. Without -c, it's implied by other relevant
2453 2453 commands. With -c, use :hg:`init --mq` instead."""
2454 2454 return qinit(ui, repo, create=opts.get(r'create_repo'))
2455 2455
2456 2456 @command("qclone",
2457 2457 [('', 'pull', None, _('use pull protocol to copy metadata')),
2458 2458 ('U', 'noupdate', None,
2459 2459 _('do not update the new working directories')),
2460 2460 ('', 'uncompressed', None,
2461 2461 _('use uncompressed transfer (fast over LAN)')),
2462 2462 ('p', 'patches', '',
2463 2463 _('location of source patch repository'), _('REPO')),
2464 2464 ] + cmdutil.remoteopts,
2465 2465 _('hg qclone [OPTION]... SOURCE [DEST]'),
2466 2466 helpcategory=command.CATEGORY_REPO_CREATION,
2467 2467 norepo=True)
2468 2468 def clone(ui, source, dest=None, **opts):
2469 2469 '''clone main and patch repository at same time
2470 2470
2471 2471 If source is local, destination will have no patches applied. If
2472 2472 source is remote, this command can not check if patches are
2473 2473 applied in source, so cannot guarantee that patches are not
2474 2474 applied in destination. If you clone remote repository, be sure
2475 2475 before that it has no patches applied.
2476 2476
2477 2477 Source patch repository is looked for in <src>/.hg/patches by
2478 2478 default. Use -p <url> to change.
2479 2479
2480 2480 The patch directory must be a nested Mercurial repository, as
2481 2481 would be created by :hg:`init --mq`.
2482 2482
2483 2483 Return 0 on success.
2484 2484 '''
2485 2485 opts = pycompat.byteskwargs(opts)
2486 2486 def patchdir(repo):
2487 2487 """compute a patch repo url from a repo object"""
2488 2488 url = repo.url()
2489 2489 if url.endswith('/'):
2490 2490 url = url[:-1]
2491 2491 return url + '/.hg/patches'
2492 2492
2493 2493 # main repo (destination and sources)
2494 2494 if dest is None:
2495 2495 dest = hg.defaultdest(source)
2496 2496 sr = hg.peer(ui, opts, ui.expandpath(source))
2497 2497
2498 2498 # patches repo (source only)
2499 2499 if opts.get('patches'):
2500 2500 patchespath = ui.expandpath(opts.get('patches'))
2501 2501 else:
2502 2502 patchespath = patchdir(sr)
2503 2503 try:
2504 2504 hg.peer(ui, opts, patchespath)
2505 2505 except error.RepoError:
2506 2506 raise error.Abort(_('versioned patch repository not found'
2507 2507 ' (see init --mq)'))
2508 2508 qbase, destrev = None, None
2509 2509 if sr.local():
2510 2510 repo = sr.local()
2511 2511 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2512 2512 qbase = repo.mq.applied[0].node
2513 2513 if not hg.islocal(dest):
2514 2514 heads = set(repo.heads())
2515 2515 destrev = list(heads.difference(repo.heads(qbase)))
2516 2516 destrev.append(repo.changelog.parents(qbase)[0])
2517 2517 elif sr.capable('lookup'):
2518 2518 try:
2519 2519 qbase = sr.lookup('qbase')
2520 2520 except error.RepoError:
2521 2521 pass
2522 2522
2523 2523 ui.note(_('cloning main repository\n'))
2524 2524 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2525 2525 pull=opts.get('pull'),
2526 2526 revs=destrev,
2527 2527 update=False,
2528 2528 stream=opts.get('uncompressed'))
2529 2529
2530 2530 ui.note(_('cloning patch repository\n'))
2531 2531 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2532 2532 pull=opts.get('pull'), update=not opts.get('noupdate'),
2533 2533 stream=opts.get('uncompressed'))
2534 2534
2535 2535 if dr.local():
2536 2536 repo = dr.local()
2537 2537 if qbase:
2538 2538 ui.note(_('stripping applied patches from destination '
2539 2539 'repository\n'))
2540 2540 strip(ui, repo, [qbase], update=False, backup=None)
2541 2541 if not opts.get('noupdate'):
2542 2542 ui.note(_('updating destination repository\n'))
2543 2543 hg.update(repo, repo.changelog.tip())
2544 2544
2545 2545 @command("qcommit|qci",
2546 2546 commands.table["commit|ci"][1],
2547 2547 _('hg qcommit [OPTION]... [FILE]...'),
2548 2548 helpcategory=command.CATEGORY_COMMITTING,
2549 2549 inferrepo=True)
2550 2550 def commit(ui, repo, *pats, **opts):
2551 2551 """commit changes in the queue repository (DEPRECATED)
2552 2552
2553 2553 This command is deprecated; use :hg:`commit --mq` instead."""
2554 2554 q = repo.mq
2555 2555 r = q.qrepo()
2556 2556 if not r:
2557 2557 raise error.Abort('no queue repository')
2558 2558 commands.commit(r.ui, r, *pats, **opts)
2559 2559
2560 2560 @command("qseries",
2561 2561 [('m', 'missing', None, _('print patches not in series')),
2562 2562 ] + seriesopts,
2563 2563 _('hg qseries [-ms]'),
2564 2564 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2565 2565 def series(ui, repo, **opts):
2566 2566 """print the entire series file
2567 2567
2568 2568 Returns 0 on success."""
2569 2569 repo.mq.qseries(repo, missing=opts.get(r'missing'),
2570 2570 summary=opts.get(r'summary'))
2571 2571 return 0
2572 2572
2573 2573 @command("qtop", seriesopts, _('hg qtop [-s]'),
2574 2574 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2575 2575 def top(ui, repo, **opts):
2576 2576 """print the name of the current patch
2577 2577
2578 2578 Returns 0 on success."""
2579 2579 q = repo.mq
2580 2580 if q.applied:
2581 2581 t = q.seriesend(True)
2582 2582 else:
2583 2583 t = 0
2584 2584
2585 2585 if t:
2586 2586 q.qseries(repo, start=t - 1, length=1, status='A',
2587 2587 summary=opts.get(r'summary'))
2588 2588 else:
2589 2589 ui.write(_("no patches applied\n"))
2590 2590 return 1
2591 2591
2592 2592 @command("qnext", seriesopts, _('hg qnext [-s]'),
2593 2593 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2594 2594 def next(ui, repo, **opts):
2595 2595 """print the name of the next pushable patch
2596 2596
2597 2597 Returns 0 on success."""
2598 2598 q = repo.mq
2599 2599 end = q.seriesend()
2600 2600 if end == len(q.series):
2601 2601 ui.write(_("all patches applied\n"))
2602 2602 return 1
2603 2603 q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
2604 2604
2605 2605 @command("qprev", seriesopts, _('hg qprev [-s]'),
2606 2606 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2607 2607 def prev(ui, repo, **opts):
2608 2608 """print the name of the preceding applied patch
2609 2609
2610 2610 Returns 0 on success."""
2611 2611 q = repo.mq
2612 2612 l = len(q.applied)
2613 2613 if l == 1:
2614 2614 ui.write(_("only one patch applied\n"))
2615 2615 return 1
2616 2616 if not l:
2617 2617 ui.write(_("no patches applied\n"))
2618 2618 return 1
2619 2619 idx = q.series.index(q.applied[-2].name)
2620 2620 q.qseries(repo, start=idx, length=1, status='A',
2621 2621 summary=opts.get(r'summary'))
2622 2622
2623 2623 def setupheaderopts(ui, opts):
2624 2624 if not opts.get('user') and opts.get('currentuser'):
2625 2625 opts['user'] = ui.username()
2626 2626 if not opts.get('date') and opts.get('currentdate'):
2627 2627 opts['date'] = "%d %d" % dateutil.makedate()
2628 2628
2629 2629 @command("qnew",
2630 2630 [('e', 'edit', None, _('invoke editor on commit messages')),
2631 2631 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2632 2632 ('g', 'git', None, _('use git extended diff format')),
2633 2633 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2634 2634 ('u', 'user', '',
2635 2635 _('add "From: <USER>" to patch'), _('USER')),
2636 2636 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2637 2637 ('d', 'date', '',
2638 2638 _('add "Date: <DATE>" to patch'), _('DATE'))
2639 2639 ] + cmdutil.walkopts + cmdutil.commitopts,
2640 2640 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2641 2641 helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
2642 2642 inferrepo=True)
2643 2643 def new(ui, repo, patch, *args, **opts):
2644 2644 """create a new patch
2645 2645
2646 2646 qnew creates a new patch on top of the currently-applied patch (if
2647 2647 any). The patch will be initialized with any outstanding changes
2648 2648 in the working directory. You may also use -I/--include,
2649 2649 -X/--exclude, and/or a list of files after the patch name to add
2650 2650 only changes to matching files to the new patch, leaving the rest
2651 2651 as uncommitted modifications.
2652 2652
2653 2653 -u/--user and -d/--date can be used to set the (given) user and
2654 2654 date, respectively. -U/--currentuser and -D/--currentdate set user
2655 2655 to current user and date to current date.
2656 2656
2657 2657 -e/--edit, -m/--message or -l/--logfile set the patch header as
2658 2658 well as the commit message. If none is specified, the header is
2659 2659 empty and the commit message is '[mq]: PATCH'.
2660 2660
2661 2661 Use the -g/--git option to keep the patch in the git extended diff
2662 2662 format. Read the diffs help topic for more information on why this
2663 2663 is important for preserving permission changes and copy/rename
2664 2664 information.
2665 2665
2666 2666 Returns 0 on successful creation of a new patch.
2667 2667 """
2668 2668 opts = pycompat.byteskwargs(opts)
2669 2669 msg = cmdutil.logmessage(ui, opts)
2670 2670 q = repo.mq
2671 2671 opts['msg'] = msg
2672 2672 setupheaderopts(ui, opts)
2673 2673 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
2674 2674 q.savedirty()
2675 2675 return 0
2676 2676
2677 2677 @command("qrefresh",
2678 2678 [('e', 'edit', None, _('invoke editor on commit messages')),
2679 2679 ('g', 'git', None, _('use git extended diff format')),
2680 2680 ('s', 'short', None,
2681 2681 _('refresh only files already in the patch and specified files')),
2682 2682 ('U', 'currentuser', None,
2683 2683 _('add/update author field in patch with current user')),
2684 2684 ('u', 'user', '',
2685 2685 _('add/update author field in patch with given user'), _('USER')),
2686 2686 ('D', 'currentdate', None,
2687 2687 _('add/update date field in patch with current date')),
2688 2688 ('d', 'date', '',
2689 2689 _('add/update date field in patch with given date'), _('DATE'))
2690 2690 ] + cmdutil.walkopts + cmdutil.commitopts,
2691 2691 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2692 2692 helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
2693 2693 inferrepo=True)
2694 2694 def refresh(ui, repo, *pats, **opts):
2695 2695 """update the current patch
2696 2696
2697 2697 If any file patterns are provided, the refreshed patch will
2698 2698 contain only the modifications that match those patterns; the
2699 2699 remaining modifications will remain in the working directory.
2700 2700
2701 2701 If -s/--short is specified, files currently included in the patch
2702 2702 will be refreshed just like matched files and remain in the patch.
2703 2703
2704 2704 If -e/--edit is specified, Mercurial will start your configured editor for
2705 2705 you to enter a message. In case qrefresh fails, you will find a backup of
2706 2706 your message in ``.hg/last-message.txt``.
2707 2707
2708 2708 hg add/remove/copy/rename work as usual, though you might want to
2709 2709 use git-style patches (-g/--git or [diff] git=1) to track copies
2710 2710 and renames. See the diffs help topic for more information on the
2711 2711 git diff format.
2712 2712
2713 2713 Returns 0 on success.
2714 2714 """
2715 2715 opts = pycompat.byteskwargs(opts)
2716 2716 q = repo.mq
2717 2717 message = cmdutil.logmessage(ui, opts)
2718 2718 setupheaderopts(ui, opts)
2719 2719 with repo.wlock():
2720 2720 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
2721 2721 q.savedirty()
2722 2722 return ret
2723 2723
2724 2724 @command("qdiff",
2725 2725 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
2726 2726 _('hg qdiff [OPTION]... [FILE]...'),
2727 2727 helpcategory=command.CATEGORY_FILE_CONTENTS, helpbasic=True,
2728 2728 inferrepo=True)
2729 2729 def diff(ui, repo, *pats, **opts):
2730 2730 """diff of the current patch and subsequent modifications
2731 2731
2732 2732 Shows a diff which includes the current patch as well as any
2733 2733 changes which have been made in the working directory since the
2734 2734 last refresh (thus showing what the current patch would become
2735 2735 after a qrefresh).
2736 2736
2737 2737 Use :hg:`diff` if you only want to see the changes made since the
2738 2738 last qrefresh, or :hg:`export qtip` if you want to see changes
2739 2739 made by the current patch without including changes made since the
2740 2740 qrefresh.
2741 2741
2742 2742 Returns 0 on success.
2743 2743 """
2744 2744 ui.pager('qdiff')
2745 2745 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
2746 2746 return 0
2747 2747
2748 2748 @command('qfold',
2749 2749 [('e', 'edit', None, _('invoke editor on commit messages')),
2750 2750 ('k', 'keep', None, _('keep folded patch files')),
2751 2751 ] + cmdutil.commitopts,
2752 2752 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
2753 2753 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
2754 2754 def fold(ui, repo, *files, **opts):
2755 2755 """fold the named patches into the current patch
2756 2756
2757 2757 Patches must not yet be applied. Each patch will be successively
2758 2758 applied to the current patch in the order given. If all the
2759 2759 patches apply successfully, the current patch will be refreshed
2760 2760 with the new cumulative patch, and the folded patches will be
2761 2761 deleted. With -k/--keep, the folded patch files will not be
2762 2762 removed afterwards.
2763 2763
2764 2764 The header for each folded patch will be concatenated with the
2765 2765 current patch header, separated by a line of ``* * *``.
2766 2766
2767 2767 Returns 0 on success."""
2768 2768 opts = pycompat.byteskwargs(opts)
2769 2769 q = repo.mq
2770 2770 if not files:
2771 2771 raise error.Abort(_('qfold requires at least one patch name'))
2772 2772 if not q.checktoppatch(repo)[0]:
2773 2773 raise error.Abort(_('no patches applied'))
2774 2774 q.checklocalchanges(repo)
2775 2775
2776 2776 message = cmdutil.logmessage(ui, opts)
2777 2777
2778 2778 parent = q.lookup('qtip')
2779 2779 patches = []
2780 2780 messages = []
2781 2781 for f in files:
2782 2782 p = q.lookup(f)
2783 2783 if p in patches or p == parent:
2784 2784 ui.warn(_('skipping already folded patch %s\n') % p)
2785 2785 if q.isapplied(p):
2786 2786 raise error.Abort(_('qfold cannot fold already applied patch %s')
2787 2787 % p)
2788 2788 patches.append(p)
2789 2789
2790 2790 for p in patches:
2791 2791 if not message:
2792 2792 ph = patchheader(q.join(p), q.plainmode)
2793 2793 if ph.message:
2794 2794 messages.append(ph.message)
2795 2795 pf = q.join(p)
2796 2796 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2797 2797 if not patchsuccess:
2798 2798 raise error.Abort(_('error folding patch %s') % p)
2799 2799
2800 2800 if not message:
2801 2801 ph = patchheader(q.join(parent), q.plainmode)
2802 2802 message = ph.message
2803 2803 for msg in messages:
2804 2804 if msg:
2805 2805 if message:
2806 2806 message.append('* * *')
2807 2807 message.extend(msg)
2808 2808 message = '\n'.join(message)
2809 2809
2810 2810 diffopts = q.patchopts(q.diffopts(), *patches)
2811 2811 with repo.wlock():
2812 2812 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2813 2813 editform='mq.qfold')
2814 2814 q.delete(repo, patches, opts)
2815 2815 q.savedirty()
2816 2816
2817 2817 @command("qgoto",
2818 2818 [('', 'keep-changes', None,
2819 2819 _('tolerate non-conflicting local changes')),
2820 2820 ('f', 'force', None, _('overwrite any local changes')),
2821 2821 ('', 'no-backup', None, _('do not save backup copies of files'))],
2822 2822 _('hg qgoto [OPTION]... PATCH'),
2823 2823 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2824 2824 def goto(ui, repo, patch, **opts):
2825 2825 '''push or pop patches until named patch is at top of stack
2826 2826
2827 2827 Returns 0 on success.'''
2828 2828 opts = pycompat.byteskwargs(opts)
2829 2829 opts = fixkeepchangesopts(ui, opts)
2830 2830 q = repo.mq
2831 2831 patch = q.lookup(patch)
2832 2832 nobackup = opts.get('no_backup')
2833 2833 keepchanges = opts.get('keep_changes')
2834 2834 if q.isapplied(patch):
2835 2835 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2836 2836 keepchanges=keepchanges)
2837 2837 else:
2838 2838 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2839 2839 keepchanges=keepchanges)
2840 2840 q.savedirty()
2841 2841 return ret
2842 2842
2843 2843 @command("qguard",
2844 2844 [('l', 'list', None, _('list all patches and guards')),
2845 2845 ('n', 'none', None, _('drop all guards'))],
2846 2846 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
2847 2847 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2848 2848 def guard(ui, repo, *args, **opts):
2849 2849 '''set or print guards for a patch
2850 2850
2851 2851 Guards control whether a patch can be pushed. A patch with no
2852 2852 guards is always pushed. A patch with a positive guard ("+foo") is
2853 2853 pushed only if the :hg:`qselect` command has activated it. A patch with
2854 2854 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2855 2855 has activated it.
2856 2856
2857 2857 With no arguments, print the currently active guards.
2858 2858 With arguments, set guards for the named patch.
2859 2859
2860 2860 .. note::
2861 2861
2862 2862 Specifying negative guards now requires '--'.
2863 2863
2864 2864 To set guards on another patch::
2865 2865
2866 2866 hg qguard other.patch -- +2.6.17 -stable
2867 2867
2868 2868 Returns 0 on success.
2869 2869 '''
2870 2870 def status(idx):
2871 2871 guards = q.seriesguards[idx] or ['unguarded']
2872 2872 if q.series[idx] in applied:
2873 2873 state = 'applied'
2874 2874 elif q.pushable(idx)[0]:
2875 2875 state = 'unapplied'
2876 2876 else:
2877 2877 state = 'guarded'
2878 2878 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2879 2879 ui.write('%s: ' % ui.label(q.series[idx], label))
2880 2880
2881 2881 for i, guard in enumerate(guards):
2882 2882 if guard.startswith('+'):
2883 2883 ui.write(guard, label='qguard.positive')
2884 2884 elif guard.startswith('-'):
2885 2885 ui.write(guard, label='qguard.negative')
2886 2886 else:
2887 2887 ui.write(guard, label='qguard.unguarded')
2888 2888 if i != len(guards) - 1:
2889 2889 ui.write(' ')
2890 2890 ui.write('\n')
2891 2891 q = repo.mq
2892 2892 applied = set(p.name for p in q.applied)
2893 2893 patch = None
2894 2894 args = list(args)
2895 2895 if opts.get(r'list'):
2896 2896 if args or opts.get(r'none'):
2897 2897 raise error.Abort(_('cannot mix -l/--list with options or '
2898 2898 'arguments'))
2899 2899 for i in pycompat.xrange(len(q.series)):
2900 2900 status(i)
2901 2901 return
2902 2902 if not args or args[0][0:1] in '-+':
2903 2903 if not q.applied:
2904 2904 raise error.Abort(_('no patches applied'))
2905 2905 patch = q.applied[-1].name
2906 2906 if patch is None and args[0][0:1] not in '-+':
2907 2907 patch = args.pop(0)
2908 2908 if patch is None:
2909 2909 raise error.Abort(_('no patch to work with'))
2910 2910 if args or opts.get(r'none'):
2911 2911 idx = q.findseries(patch)
2912 2912 if idx is None:
2913 2913 raise error.Abort(_('no patch named %s') % patch)
2914 2914 q.setguards(idx, args)
2915 2915 q.savedirty()
2916 2916 else:
2917 2917 status(q.series.index(q.lookup(patch)))
2918 2918
2919 2919 @command("qheader", [], _('hg qheader [PATCH]'),
2920 2920 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2921 2921 def header(ui, repo, patch=None):
2922 2922 """print the header of the topmost or specified patch
2923 2923
2924 2924 Returns 0 on success."""
2925 2925 q = repo.mq
2926 2926
2927 2927 if patch:
2928 2928 patch = q.lookup(patch)
2929 2929 else:
2930 2930 if not q.applied:
2931 2931 ui.write(_('no patches applied\n'))
2932 2932 return 1
2933 2933 patch = q.lookup('qtip')
2934 2934 ph = patchheader(q.join(patch), q.plainmode)
2935 2935
2936 2936 ui.write('\n'.join(ph.message) + '\n')
2937 2937
2938 2938 def lastsavename(path):
2939 2939 (directory, base) = os.path.split(path)
2940 2940 names = os.listdir(directory)
2941 2941 namere = re.compile("%s.([0-9]+)" % base)
2942 2942 maxindex = None
2943 2943 maxname = None
2944 2944 for f in names:
2945 2945 m = namere.match(f)
2946 2946 if m:
2947 2947 index = int(m.group(1))
2948 2948 if maxindex is None or index > maxindex:
2949 2949 maxindex = index
2950 2950 maxname = f
2951 2951 if maxname:
2952 2952 return (os.path.join(directory, maxname), maxindex)
2953 2953 return (None, None)
2954 2954
2955 2955 def savename(path):
2956 2956 (last, index) = lastsavename(path)
2957 2957 if last is None:
2958 2958 index = 0
2959 2959 newpath = path + ".%d" % (index + 1)
2960 2960 return newpath
2961 2961
2962 2962 @command("qpush",
2963 2963 [('', 'keep-changes', None,
2964 2964 _('tolerate non-conflicting local changes')),
2965 2965 ('f', 'force', None, _('apply on top of local changes')),
2966 2966 ('e', 'exact', None,
2967 2967 _('apply the target patch to its recorded parent')),
2968 2968 ('l', 'list', None, _('list patch name in commit text')),
2969 2969 ('a', 'all', None, _('apply all patches')),
2970 2970 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2971 2971 ('n', 'name', '',
2972 2972 _('merge queue name (DEPRECATED)'), _('NAME')),
2973 2973 ('', 'move', None,
2974 2974 _('reorder patch series and apply only the patch')),
2975 2975 ('', 'no-backup', None, _('do not save backup copies of files'))],
2976 2976 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
2977 2977 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2978 2978 helpbasic=True)
2979 2979 def push(ui, repo, patch=None, **opts):
2980 2980 """push the next patch onto the stack
2981 2981
2982 2982 By default, abort if the working directory contains uncommitted
2983 2983 changes. With --keep-changes, abort only if the uncommitted files
2984 2984 overlap with patched files. With -f/--force, backup and patch over
2985 2985 uncommitted changes.
2986 2986
2987 2987 Return 0 on success.
2988 2988 """
2989 2989 q = repo.mq
2990 2990 mergeq = None
2991 2991
2992 2992 opts = pycompat.byteskwargs(opts)
2993 2993 opts = fixkeepchangesopts(ui, opts)
2994 2994 if opts.get('merge'):
2995 2995 if opts.get('name'):
2996 2996 newpath = repo.vfs.join(opts.get('name'))
2997 2997 else:
2998 2998 newpath, i = lastsavename(q.path)
2999 2999 if not newpath:
3000 3000 ui.warn(_("no saved queues found, please use -n\n"))
3001 3001 return 1
3002 3002 mergeq = queue(ui, repo.baseui, repo.path, newpath)
3003 3003 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
3004 3004 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
3005 3005 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
3006 3006 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
3007 3007 keepchanges=opts.get('keep_changes'))
3008 3008 return ret
3009 3009
3010 3010 @command("qpop",
3011 3011 [('a', 'all', None, _('pop all patches')),
3012 3012 ('n', 'name', '',
3013 3013 _('queue name to pop (DEPRECATED)'), _('NAME')),
3014 3014 ('', 'keep-changes', None,
3015 3015 _('tolerate non-conflicting local changes')),
3016 3016 ('f', 'force', None, _('forget any local changes to patched files')),
3017 3017 ('', 'no-backup', None, _('do not save backup copies of files'))],
3018 3018 _('hg qpop [-a] [-f] [PATCH | INDEX]'),
3019 3019 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3020 3020 helpbasic=True)
3021 3021 def pop(ui, repo, patch=None, **opts):
3022 3022 """pop the current patch off the stack
3023 3023
3024 3024 Without argument, pops off the top of the patch stack. If given a
3025 3025 patch name, keeps popping off patches until the named patch is at
3026 3026 the top of the stack.
3027 3027
3028 3028 By default, abort if the working directory contains uncommitted
3029 3029 changes. With --keep-changes, abort only if the uncommitted files
3030 3030 overlap with patched files. With -f/--force, backup and discard
3031 3031 changes made to such files.
3032 3032
3033 3033 Return 0 on success.
3034 3034 """
3035 3035 opts = pycompat.byteskwargs(opts)
3036 3036 opts = fixkeepchangesopts(ui, opts)
3037 3037 localupdate = True
3038 3038 if opts.get('name'):
3039 3039 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
3040 3040 ui.warn(_('using patch queue: %s\n') % q.path)
3041 3041 localupdate = False
3042 3042 else:
3043 3043 q = repo.mq
3044 3044 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
3045 3045 all=opts.get('all'), nobackup=opts.get('no_backup'),
3046 3046 keepchanges=opts.get('keep_changes'))
3047 3047 q.savedirty()
3048 3048 return ret
3049 3049
3050 3050 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'),
3051 3051 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3052 3052 def rename(ui, repo, patch, name=None, **opts):
3053 3053 """rename a patch
3054 3054
3055 3055 With one argument, renames the current patch to PATCH1.
3056 3056 With two arguments, renames PATCH1 to PATCH2.
3057 3057
3058 3058 Returns 0 on success."""
3059 3059 q = repo.mq
3060 3060 if not name:
3061 3061 name = patch
3062 3062 patch = None
3063 3063
3064 3064 if patch:
3065 3065 patch = q.lookup(patch)
3066 3066 else:
3067 3067 if not q.applied:
3068 3068 ui.write(_('no patches applied\n'))
3069 3069 return
3070 3070 patch = q.lookup('qtip')
3071 3071 absdest = q.join(name)
3072 3072 if os.path.isdir(absdest):
3073 3073 name = normname(os.path.join(name, os.path.basename(patch)))
3074 3074 absdest = q.join(name)
3075 3075 q.checkpatchname(name)
3076 3076
3077 3077 ui.note(_('renaming %s to %s\n') % (patch, name))
3078 3078 i = q.findseries(patch)
3079 3079 guards = q.guard_re.findall(q.fullseries[i])
3080 3080 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3081 3081 q.parseseries()
3082 3082 q.seriesdirty = True
3083 3083
3084 3084 info = q.isapplied(patch)
3085 3085 if info:
3086 3086 q.applied[info[0]] = statusentry(info[1], name)
3087 3087 q.applieddirty = True
3088 3088
3089 3089 destdir = os.path.dirname(absdest)
3090 3090 if not os.path.isdir(destdir):
3091 3091 os.makedirs(destdir)
3092 3092 util.rename(q.join(patch), absdest)
3093 3093 r = q.qrepo()
3094 3094 if r and patch in r.dirstate:
3095 3095 wctx = r[None]
3096 3096 with r.wlock():
3097 3097 if r.dirstate[patch] == 'a':
3098 3098 r.dirstate.drop(patch)
3099 3099 r.dirstate.add(name)
3100 3100 else:
3101 3101 wctx.copy(patch, name)
3102 3102 wctx.forget([patch])
3103 3103
3104 3104 q.savedirty()
3105 3105
3106 3106 @command("qrestore",
3107 3107 [('d', 'delete', None, _('delete save entry')),
3108 3108 ('u', 'update', None, _('update queue working directory'))],
3109 3109 _('hg qrestore [-d] [-u] REV'),
3110 3110 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3111 3111 def restore(ui, repo, rev, **opts):
3112 3112 """restore the queue state saved by a revision (DEPRECATED)
3113 3113
3114 3114 This command is deprecated, use :hg:`rebase` instead."""
3115 3115 rev = repo.lookup(rev)
3116 3116 q = repo.mq
3117 3117 q.restore(repo, rev, delete=opts.get(r'delete'),
3118 3118 qupdate=opts.get(r'update'))
3119 3119 q.savedirty()
3120 3120 return 0
3121 3121
3122 3122 @command("qsave",
3123 3123 [('c', 'copy', None, _('copy patch directory')),
3124 3124 ('n', 'name', '',
3125 3125 _('copy directory name'), _('NAME')),
3126 3126 ('e', 'empty', None, _('clear queue status file')),
3127 3127 ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
3128 3128 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
3129 3129 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3130 3130 def save(ui, repo, **opts):
3131 3131 """save current queue state (DEPRECATED)
3132 3132
3133 3133 This command is deprecated, use :hg:`rebase` instead."""
3134 3134 q = repo.mq
3135 3135 opts = pycompat.byteskwargs(opts)
3136 3136 message = cmdutil.logmessage(ui, opts)
3137 3137 ret = q.save(repo, msg=message)
3138 3138 if ret:
3139 3139 return ret
3140 3140 q.savedirty() # save to .hg/patches before copying
3141 3141 if opts.get('copy'):
3142 3142 path = q.path
3143 3143 if opts.get('name'):
3144 3144 newpath = os.path.join(q.basepath, opts.get('name'))
3145 3145 if os.path.exists(newpath):
3146 3146 if not os.path.isdir(newpath):
3147 3147 raise error.Abort(_('destination %s exists and is not '
3148 3148 'a directory') % newpath)
3149 3149 if not opts.get('force'):
3150 3150 raise error.Abort(_('destination %s exists, '
3151 3151 'use -f to force') % newpath)
3152 3152 else:
3153 3153 newpath = savename(path)
3154 3154 ui.warn(_("copy %s to %s\n") % (path, newpath))
3155 3155 util.copyfiles(path, newpath)
3156 3156 if opts.get('empty'):
3157 3157 del q.applied[:]
3158 3158 q.applieddirty = True
3159 3159 q.savedirty()
3160 3160 return 0
3161 3161
3162 3162
3163 3163 @command("qselect",
3164 3164 [('n', 'none', None, _('disable all guards')),
3165 3165 ('s', 'series', None, _('list all guards in series file')),
3166 3166 ('', 'pop', None, _('pop to before first guarded applied patch')),
3167 3167 ('', 'reapply', None, _('pop, then reapply patches'))],
3168 3168 _('hg qselect [OPTION]... [GUARD]...'),
3169 3169 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3170 3170 def select(ui, repo, *args, **opts):
3171 3171 '''set or print guarded patches to push
3172 3172
3173 3173 Use the :hg:`qguard` command to set or print guards on patch, then use
3174 3174 qselect to tell mq which guards to use. A patch will be pushed if
3175 3175 it has no guards or any positive guards match the currently
3176 3176 selected guard, but will not be pushed if any negative guards
3177 3177 match the current guard. For example::
3178 3178
3179 3179 qguard foo.patch -- -stable (negative guard)
3180 3180 qguard bar.patch +stable (positive guard)
3181 3181 qselect stable
3182 3182
3183 3183 This activates the "stable" guard. mq will skip foo.patch (because
3184 3184 it has a negative match) but push bar.patch (because it has a
3185 3185 positive match).
3186 3186
3187 3187 With no arguments, prints the currently active guards.
3188 3188 With one argument, sets the active guard.
3189 3189
3190 3190 Use -n/--none to deactivate guards (no other arguments needed).
3191 3191 When no guards are active, patches with positive guards are
3192 3192 skipped and patches with negative guards are pushed.
3193 3193
3194 3194 qselect can change the guards on applied patches. It does not pop
3195 3195 guarded patches by default. Use --pop to pop back to the last
3196 3196 applied patch that is not guarded. Use --reapply (which implies
3197 3197 --pop) to push back to the current patch afterwards, but skip
3198 3198 guarded patches.
3199 3199
3200 3200 Use -s/--series to print a list of all guards in the series file
3201 3201 (no other arguments needed). Use -v for more information.
3202 3202
3203 3203 Returns 0 on success.'''
3204 3204
3205 3205 q = repo.mq
3206 3206 opts = pycompat.byteskwargs(opts)
3207 3207 guards = q.active()
3208 3208 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3209 3209 if args or opts.get('none'):
3210 3210 old_unapplied = q.unapplied(repo)
3211 3211 old_guarded = [i for i in pycompat.xrange(len(q.applied))
3212 3212 if not pushable(i)]
3213 3213 q.setactive(args)
3214 3214 q.savedirty()
3215 3215 if not args:
3216 3216 ui.status(_('guards deactivated\n'))
3217 3217 if not opts.get('pop') and not opts.get('reapply'):
3218 3218 unapplied = q.unapplied(repo)
3219 3219 guarded = [i for i in pycompat.xrange(len(q.applied))
3220 3220 if not pushable(i)]
3221 3221 if len(unapplied) != len(old_unapplied):
3222 3222 ui.status(_('number of unguarded, unapplied patches has '
3223 3223 'changed from %d to %d\n') %
3224 3224 (len(old_unapplied), len(unapplied)))
3225 3225 if len(guarded) != len(old_guarded):
3226 3226 ui.status(_('number of guarded, applied patches has changed '
3227 3227 'from %d to %d\n') %
3228 3228 (len(old_guarded), len(guarded)))
3229 3229 elif opts.get('series'):
3230 3230 guards = {}
3231 3231 noguards = 0
3232 3232 for gs in q.seriesguards:
3233 3233 if not gs:
3234 3234 noguards += 1
3235 3235 for g in gs:
3236 3236 guards.setdefault(g, 0)
3237 3237 guards[g] += 1
3238 3238 if ui.verbose:
3239 3239 guards['NONE'] = noguards
3240 3240 guards = list(guards.items())
3241 3241 guards.sort(key=lambda x: x[0][1:])
3242 3242 if guards:
3243 3243 ui.note(_('guards in series file:\n'))
3244 3244 for guard, count in guards:
3245 3245 ui.note('%2d ' % count)
3246 3246 ui.write(guard, '\n')
3247 3247 else:
3248 3248 ui.note(_('no guards in series file\n'))
3249 3249 else:
3250 3250 if guards:
3251 3251 ui.note(_('active guards:\n'))
3252 3252 for g in guards:
3253 3253 ui.write(g, '\n')
3254 3254 else:
3255 3255 ui.write(_('no active guards\n'))
3256 3256 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3257 3257 popped = False
3258 3258 if opts.get('pop') or opts.get('reapply'):
3259 3259 for i in pycompat.xrange(len(q.applied)):
3260 3260 if not pushable(i):
3261 3261 ui.status(_('popping guarded patches\n'))
3262 3262 popped = True
3263 3263 if i == 0:
3264 3264 q.pop(repo, all=True)
3265 3265 else:
3266 3266 q.pop(repo, q.applied[i - 1].name)
3267 3267 break
3268 3268 if popped:
3269 3269 try:
3270 3270 if reapply:
3271 3271 ui.status(_('reapplying unguarded patches\n'))
3272 3272 q.push(repo, reapply)
3273 3273 finally:
3274 3274 q.savedirty()
3275 3275
3276 3276 @command("qfinish",
3277 3277 [('a', 'applied', None, _('finish all applied changesets'))],
3278 3278 _('hg qfinish [-a] [REV]...'),
3279 3279 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3280 3280 def finish(ui, repo, *revrange, **opts):
3281 3281 """move applied patches into repository history
3282 3282
3283 3283 Finishes the specified revisions (corresponding to applied
3284 3284 patches) by moving them out of mq control into regular repository
3285 3285 history.
3286 3286
3287 3287 Accepts a revision range or the -a/--applied option. If --applied
3288 3288 is specified, all applied mq revisions are removed from mq
3289 3289 control. Otherwise, the given revisions must be at the base of the
3290 3290 stack of applied patches.
3291 3291
3292 3292 This can be especially useful if your changes have been applied to
3293 3293 an upstream repository, or if you are about to push your changes
3294 3294 to upstream.
3295 3295
3296 3296 Returns 0 on success.
3297 3297 """
3298 3298 if not opts.get(r'applied') and not revrange:
3299 3299 raise error.Abort(_('no revisions specified'))
3300 3300 elif opts.get(r'applied'):
3301 3301 revrange = ('qbase::qtip',) + revrange
3302 3302
3303 3303 q = repo.mq
3304 3304 if not q.applied:
3305 3305 ui.status(_('no patches applied\n'))
3306 3306 return 0
3307 3307
3308 3308 revs = scmutil.revrange(repo, revrange)
3309 3309 if repo['.'].rev() in revs and repo[None].files():
3310 3310 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3311 3311 # queue.finish may changes phases but leave the responsibility to lock the
3312 3312 # repo to the caller to avoid deadlock with wlock. This command code is
3313 3313 # responsibility for this locking.
3314 3314 with repo.lock():
3315 3315 q.finish(repo, revs)
3316 3316 q.savedirty()
3317 3317 return 0
3318 3318
3319 3319 @command("qqueue",
3320 3320 [('l', 'list', False, _('list all available queues')),
3321 3321 ('', 'active', False, _('print name of active queue')),
3322 3322 ('c', 'create', False, _('create new queue')),
3323 3323 ('', 'rename', False, _('rename active queue')),
3324 3324 ('', 'delete', False, _('delete reference to queue')),
3325 3325 ('', 'purge', False, _('delete queue, and remove patch dir')),
3326 3326 ],
3327 3327 _('[OPTION] [QUEUE]'),
3328 3328 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3329 3329 def qqueue(ui, repo, name=None, **opts):
3330 3330 '''manage multiple patch queues
3331 3331
3332 3332 Supports switching between different patch queues, as well as creating
3333 3333 new patch queues and deleting existing ones.
3334 3334
3335 3335 Omitting a queue name or specifying -l/--list will show you the registered
3336 3336 queues - by default the "normal" patches queue is registered. The currently
3337 3337 active queue will be marked with "(active)". Specifying --active will print
3338 3338 only the name of the active queue.
3339 3339
3340 3340 To create a new queue, use -c/--create. The queue is automatically made
3341 3341 active, except in the case where there are applied patches from the
3342 3342 currently active queue in the repository. Then the queue will only be
3343 3343 created and switching will fail.
3344 3344
3345 3345 To delete an existing queue, use --delete. You cannot delete the currently
3346 3346 active queue.
3347 3347
3348 3348 Returns 0 on success.
3349 3349 '''
3350 3350 q = repo.mq
3351 3351 _defaultqueue = 'patches'
3352 3352 _allqueues = 'patches.queues'
3353 3353 _activequeue = 'patches.queue'
3354 3354
3355 3355 def _getcurrent():
3356 3356 cur = os.path.basename(q.path)
3357 3357 if cur.startswith('patches-'):
3358 3358 cur = cur[8:]
3359 3359 return cur
3360 3360
3361 3361 def _noqueues():
3362 3362 try:
3363 3363 fh = repo.vfs(_allqueues, 'r')
3364 3364 fh.close()
3365 3365 except IOError:
3366 3366 return True
3367 3367
3368 3368 return False
3369 3369
3370 3370 def _getqueues():
3371 3371 current = _getcurrent()
3372 3372
3373 3373 try:
3374 3374 fh = repo.vfs(_allqueues, 'r')
3375 3375 queues = [queue.strip() for queue in fh if queue.strip()]
3376 3376 fh.close()
3377 3377 if current not in queues:
3378 3378 queues.append(current)
3379 3379 except IOError:
3380 3380 queues = [_defaultqueue]
3381 3381
3382 3382 return sorted(queues)
3383 3383
3384 3384 def _setactive(name):
3385 3385 if q.applied:
3386 3386 raise error.Abort(_('new queue created, but cannot make active '
3387 3387 'as patches are applied'))
3388 3388 _setactivenocheck(name)
3389 3389
3390 3390 def _setactivenocheck(name):
3391 3391 fh = repo.vfs(_activequeue, 'w')
3392 3392 if name != 'patches':
3393 3393 fh.write(name)
3394 3394 fh.close()
3395 3395
3396 3396 def _addqueue(name):
3397 3397 fh = repo.vfs(_allqueues, 'a')
3398 3398 fh.write('%s\n' % (name,))
3399 3399 fh.close()
3400 3400
3401 3401 def _queuedir(name):
3402 3402 if name == 'patches':
3403 3403 return repo.vfs.join('patches')
3404 3404 else:
3405 3405 return repo.vfs.join('patches-' + name)
3406 3406
3407 3407 def _validname(name):
3408 3408 for n in name:
3409 3409 if n in ':\\/.':
3410 3410 return False
3411 3411 return True
3412 3412
3413 3413 def _delete(name):
3414 3414 if name not in existing:
3415 3415 raise error.Abort(_('cannot delete queue that does not exist'))
3416 3416
3417 3417 current = _getcurrent()
3418 3418
3419 3419 if name == current:
3420 3420 raise error.Abort(_('cannot delete currently active queue'))
3421 3421
3422 3422 fh = repo.vfs('patches.queues.new', 'w')
3423 3423 for queue in existing:
3424 3424 if queue == name:
3425 3425 continue
3426 3426 fh.write('%s\n' % (queue,))
3427 3427 fh.close()
3428 3428 repo.vfs.rename('patches.queues.new', _allqueues)
3429 3429
3430 3430 opts = pycompat.byteskwargs(opts)
3431 3431 if not name or opts.get('list') or opts.get('active'):
3432 3432 current = _getcurrent()
3433 3433 if opts.get('active'):
3434 3434 ui.write('%s\n' % (current,))
3435 3435 return
3436 3436 for queue in _getqueues():
3437 3437 ui.write('%s' % (queue,))
3438 3438 if queue == current and not ui.quiet:
3439 3439 ui.write(_(' (active)\n'))
3440 3440 else:
3441 3441 ui.write('\n')
3442 3442 return
3443 3443
3444 3444 if not _validname(name):
3445 3445 raise error.Abort(
3446 3446 _('invalid queue name, may not contain the characters ":\\/."'))
3447 3447
3448 3448 with repo.wlock():
3449 3449 existing = _getqueues()
3450 3450
3451 3451 if opts.get('create'):
3452 3452 if name in existing:
3453 3453 raise error.Abort(_('queue "%s" already exists') % name)
3454 3454 if _noqueues():
3455 3455 _addqueue(_defaultqueue)
3456 3456 _addqueue(name)
3457 3457 _setactive(name)
3458 3458 elif opts.get('rename'):
3459 3459 current = _getcurrent()
3460 3460 if name == current:
3461 3461 raise error.Abort(_('can\'t rename "%s" to its current name')
3462 3462 % name)
3463 3463 if name in existing:
3464 3464 raise error.Abort(_('queue "%s" already exists') % name)
3465 3465
3466 3466 olddir = _queuedir(current)
3467 3467 newdir = _queuedir(name)
3468 3468
3469 3469 if os.path.exists(newdir):
3470 3470 raise error.Abort(_('non-queue directory "%s" already exists') %
3471 3471 newdir)
3472 3472
3473 3473 fh = repo.vfs('patches.queues.new', 'w')
3474 3474 for queue in existing:
3475 3475 if queue == current:
3476 3476 fh.write('%s\n' % (name,))
3477 3477 if os.path.exists(olddir):
3478 3478 util.rename(olddir, newdir)
3479 3479 else:
3480 3480 fh.write('%s\n' % (queue,))
3481 3481 fh.close()
3482 3482 repo.vfs.rename('patches.queues.new', _allqueues)
3483 3483 _setactivenocheck(name)
3484 3484 elif opts.get('delete'):
3485 3485 _delete(name)
3486 3486 elif opts.get('purge'):
3487 3487 if name in existing:
3488 3488 _delete(name)
3489 3489 qdir = _queuedir(name)
3490 3490 if os.path.exists(qdir):
3491 3491 shutil.rmtree(qdir)
3492 3492 else:
3493 3493 if name not in existing:
3494 3494 raise error.Abort(_('use --create to create a new queue'))
3495 3495 _setactive(name)
3496 3496
3497 3497 def mqphasedefaults(repo, roots):
3498 3498 """callback used to set mq changeset as secret when no phase data exists"""
3499 3499 if repo.mq.applied:
3500 3500 if repo.ui.configbool('mq', 'secret'):
3501 3501 mqphase = phases.secret
3502 3502 else:
3503 3503 mqphase = phases.draft
3504 3504 qbase = repo[repo.mq.applied[0].node]
3505 3505 roots[mqphase].add(qbase.node())
3506 3506 return roots
3507 3507
3508 3508 def reposetup(ui, repo):
3509 3509 class mqrepo(repo.__class__):
3510 3510 @localrepo.unfilteredpropertycache
3511 3511 def mq(self):
3512 3512 return queue(self.ui, self.baseui, self.path)
3513 3513
3514 3514 def invalidateall(self):
3515 3515 super(mqrepo, self).invalidateall()
3516 3516 if localrepo.hasunfilteredcache(self, r'mq'):
3517 3517 # recreate mq in case queue path was changed
3518 3518 delattr(self.unfiltered(), r'mq')
3519 3519
3520 3520 def abortifwdirpatched(self, errmsg, force=False):
3521 3521 if self.mq.applied and self.mq.checkapplied and not force:
3522 3522 parents = self.dirstate.parents()
3523 3523 patches = [s.node for s in self.mq.applied]
3524 3524 if any(p in patches for p in parents):
3525 3525 raise error.Abort(errmsg)
3526 3526
3527 3527 def commit(self, text="", user=None, date=None, match=None,
3528 3528 force=False, editor=False, extra=None):
3529 3529 if extra is None:
3530 3530 extra = {}
3531 3531 self.abortifwdirpatched(
3532 3532 _('cannot commit over an applied mq patch'),
3533 3533 force)
3534 3534
3535 3535 return super(mqrepo, self).commit(text, user, date, match, force,
3536 3536 editor, extra)
3537 3537
3538 3538 def checkpush(self, pushop):
3539 3539 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3540 3540 outapplied = [e.node for e in self.mq.applied]
3541 3541 if pushop.revs:
3542 3542 # Assume applied patches have no non-patch descendants and
3543 3543 # are not on remote already. Filtering any changeset not
3544 3544 # pushed.
3545 3545 heads = set(pushop.revs)
3546 3546 for node in reversed(outapplied):
3547 3547 if node in heads:
3548 3548 break
3549 3549 else:
3550 3550 outapplied.pop()
3551 3551 # looking for pushed and shared changeset
3552 3552 for node in outapplied:
3553 3553 if self[node].phase() < phases.secret:
3554 3554 raise error.Abort(_('source has mq patches applied'))
3555 3555 # no non-secret patches pushed
3556 3556 super(mqrepo, self).checkpush(pushop)
3557 3557
3558 3558 def _findtags(self):
3559 3559 '''augment tags from base class with patch tags'''
3560 3560 result = super(mqrepo, self)._findtags()
3561 3561
3562 3562 q = self.mq
3563 3563 if not q.applied:
3564 3564 return result
3565 3565
3566 3566 mqtags = [(patch.node, patch.name) for patch in q.applied]
3567 3567
3568 3568 try:
3569 3569 # for now ignore filtering business
3570 3570 self.unfiltered().changelog.rev(mqtags[-1][0])
3571 3571 except error.LookupError:
3572 3572 self.ui.warn(_('mq status file refers to unknown node %s\n')
3573 3573 % short(mqtags[-1][0]))
3574 3574 return result
3575 3575
3576 3576 # do not add fake tags for filtered revisions
3577 3577 included = self.changelog.hasnode
3578 3578 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3579 3579 if not mqtags:
3580 3580 return result
3581 3581
3582 3582 mqtags.append((mqtags[-1][0], 'qtip'))
3583 3583 mqtags.append((mqtags[0][0], 'qbase'))
3584 3584 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3585 3585 tags = result[0]
3586 3586 for patch in mqtags:
3587 3587 if patch[1] in tags:
3588 3588 self.ui.warn(_('tag %s overrides mq patch of the same '
3589 3589 'name\n') % patch[1])
3590 3590 else:
3591 3591 tags[patch[1]] = patch[0]
3592 3592
3593 3593 return result
3594 3594
3595 3595 if repo.local():
3596 3596 repo.__class__ = mqrepo
3597 3597
3598 3598 repo._phasedefaults.append(mqphasedefaults)
3599 3599
3600 3600 def mqimport(orig, ui, repo, *args, **kwargs):
3601 3601 if (util.safehasattr(repo, 'abortifwdirpatched')
3602 3602 and not kwargs.get(r'no_commit', False)):
3603 3603 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3604 3604 kwargs.get(r'force'))
3605 3605 return orig(ui, repo, *args, **kwargs)
3606 3606
3607 3607 def mqinit(orig, ui, *args, **kwargs):
3608 3608 mq = kwargs.pop(r'mq', None)
3609 3609
3610 3610 if not mq:
3611 3611 return orig(ui, *args, **kwargs)
3612 3612
3613 3613 if args:
3614 3614 repopath = args[0]
3615 3615 if not hg.islocal(repopath):
3616 3616 raise error.Abort(_('only a local queue repository '
3617 3617 'may be initialized'))
3618 3618 else:
3619 3619 repopath = cmdutil.findrepo(encoding.getcwd())
3620 3620 if not repopath:
3621 3621 raise error.Abort(_('there is no Mercurial repository here '
3622 3622 '(.hg not found)'))
3623 3623 repo = hg.repository(ui, repopath)
3624 3624 return qinit(ui, repo, True)
3625 3625
3626 3626 def mqcommand(orig, ui, repo, *args, **kwargs):
3627 3627 """Add --mq option to operate on patch repository instead of main"""
3628 3628
3629 3629 # some commands do not like getting unknown options
3630 3630 mq = kwargs.pop(r'mq', None)
3631 3631
3632 3632 if not mq:
3633 3633 return orig(ui, repo, *args, **kwargs)
3634 3634
3635 3635 q = repo.mq
3636 3636 r = q.qrepo()
3637 3637 if not r:
3638 3638 raise error.Abort(_('no queue repository'))
3639 3639 return orig(r.ui, r, *args, **kwargs)
3640 3640
3641 3641 def summaryhook(ui, repo):
3642 3642 q = repo.mq
3643 3643 m = []
3644 3644 a, u = len(q.applied), len(q.unapplied(repo))
3645 3645 if a:
3646 3646 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3647 3647 if u:
3648 3648 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3649 3649 if m:
3650 3650 # i18n: column positioning for "hg summary"
3651 3651 ui.write(_("mq: %s\n") % ', '.join(m))
3652 3652 else:
3653 3653 # i18n: column positioning for "hg summary"
3654 3654 ui.note(_("mq: (empty queue)\n"))
3655 3655
3656 3656 revsetpredicate = registrar.revsetpredicate()
3657 3657
3658 3658 @revsetpredicate('mq()')
3659 3659 def revsetmq(repo, subset, x):
3660 3660 """Changesets managed by MQ.
3661 3661 """
3662 3662 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3663 3663 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3664 3664 return smartset.baseset([r for r in subset if r in applied])
3665 3665
3666 3666 # tell hggettext to extract docstrings from these functions:
3667 3667 i18nfunctions = [revsetmq]
3668 3668
3669 3669 def extsetup(ui):
3670 3670 # Ensure mq wrappers are called first, regardless of extension load order by
3671 3671 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3672 3672 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3673 3673
3674 3674 extensions.wrapcommand(commands.table, 'import', mqimport)
3675 3675 cmdutil.summaryhooks.add('mq', summaryhook)
3676 3676
3677 3677 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3678 3678 entry[1].extend(mqopt)
3679 3679
3680 3680 def dotable(cmdtable):
3681 3681 for cmd, entry in cmdtable.iteritems():
3682 3682 cmd = cmdutil.parsealiases(cmd)[0]
3683 3683 func = entry[0]
3684 3684 if func.norepo:
3685 3685 continue
3686 3686 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3687 3687 entry[1].extend(mqopt)
3688 3688
3689 3689 dotable(commands.table)
3690 3690
3691 3691 for extname, extmodule in extensions.extensions():
3692 3692 if extmodule.__file__ != __file__:
3693 3693 dotable(getattr(extmodule, 'cmdtable', {}))
3694 3694
3695 3695 colortable = {'qguard.negative': 'red',
3696 3696 'qguard.positive': 'yellow',
3697 3697 'qguard.unguarded': 'green',
3698 3698 'qseries.applied': 'blue bold underline',
3699 3699 'qseries.guarded': 'black bold',
3700 3700 'qseries.missing': 'red bold',
3701 3701 'qseries.unapplied': 'black bold'}
@@ -1,1012 +1,1012 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import contextlib
45 45 import itertools
46 46 import json
47 47 import operator
48 48 import re
49 49
50 50 from mercurial.node import bin, nullid
51 51 from mercurial.i18n import _
52 52 from mercurial import (
53 53 cmdutil,
54 54 context,
55 55 encoding,
56 56 error,
57 57 httpconnection as httpconnectionmod,
58 58 mdiff,
59 59 obsutil,
60 60 parser,
61 61 patch,
62 62 phases,
63 63 registrar,
64 64 scmutil,
65 65 smartset,
66 66 tags,
67 67 templateutil,
68 68 url as urlmod,
69 69 util,
70 70 )
71 71 from mercurial.utils import (
72 72 procutil,
73 73 stringutil,
74 74 )
75 75
76 76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 78 # be specifying the version(s) of Mercurial they are tested with, or
79 79 # leave the attribute unspecified.
80 80 testedwith = 'ships-with-hg-core'
81 81
82 82 cmdtable = {}
83 83 command = registrar.command(cmdtable)
84 84
85 85 configtable = {}
86 86 configitem = registrar.configitem(configtable)
87 87
88 88 # developer config: phabricator.batchsize
89 89 configitem(b'phabricator', b'batchsize',
90 90 default=12,
91 91 )
92 92 configitem(b'phabricator', b'callsign',
93 93 default=None,
94 94 )
95 95 configitem(b'phabricator', b'curlcmd',
96 96 default=None,
97 97 )
98 98 # developer config: phabricator.repophid
99 99 configitem(b'phabricator', b'repophid',
100 100 default=None,
101 101 )
102 102 configitem(b'phabricator', b'url',
103 103 default=None,
104 104 )
105 105 configitem(b'phabsend', b'confirm',
106 106 default=False,
107 107 )
108 108
109 109 colortable = {
110 110 b'phabricator.action.created': b'green',
111 111 b'phabricator.action.skipped': b'magenta',
112 112 b'phabricator.action.updated': b'magenta',
113 113 b'phabricator.desc': b'',
114 114 b'phabricator.drev': b'bold',
115 115 b'phabricator.node': b'',
116 116 }
117 117
118 118 _VCR_FLAGS = [
119 119 (b'', b'test-vcr', b'',
120 120 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 121 b', otherwise will mock all http requests using the specified vcr file.'
122 122 b' (ADVANCED)'
123 123 )),
124 124 ]
125 125
126 126 def vcrcommand(name, flags, spec, helpcategory=None):
127 127 fullflags = flags + _VCR_FLAGS
128 128 def decorate(fn):
129 129 def inner(*args, **kwargs):
130 130 cassette = kwargs.pop(r'test_vcr', None)
131 131 if cassette:
132 132 import hgdemandimport
133 133 with hgdemandimport.deactivated():
134 134 import vcr as vcrmod
135 135 import vcr.stubs as stubs
136 136 vcr = vcrmod.VCR(
137 137 serializer=r'json',
138 138 custom_patches=[
139 139 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
140 140 (urlmod, 'httpsconnection',
141 141 stubs.VCRHTTPSConnection),
142 142 ])
143 143 with vcr.use_cassette(cassette):
144 144 return fn(*args, **kwargs)
145 145 return fn(*args, **kwargs)
146 146 inner.__name__ = fn.__name__
147 147 inner.__doc__ = fn.__doc__
148 148 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
149 149 return decorate
150 150
151 151 def urlencodenested(params):
152 152 """like urlencode, but works with nested parameters.
153 153
154 154 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
155 155 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
156 156 urlencode. Note: the encoding is consistent with PHP's http_build_query.
157 157 """
158 158 flatparams = util.sortdict()
159 159 def process(prefix, obj):
160 160 if isinstance(obj, bool):
161 161 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
162 162 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
163 163 if items is None:
164 164 flatparams[prefix] = obj
165 165 else:
166 166 for k, v in items(obj):
167 167 if prefix:
168 168 process(b'%s[%s]' % (prefix, k), v)
169 169 else:
170 170 process(k, v)
171 171 process(b'', params)
172 172 return util.urlreq.urlencode(flatparams)
173 173
174 174 def readurltoken(repo):
175 175 """return conduit url, token and make sure they exist
176 176
177 177 Currently read from [auth] config section. In the future, it might
178 178 make sense to read from .arcconfig and .arcrc as well.
179 179 """
180 180 url = repo.ui.config(b'phabricator', b'url')
181 181 if not url:
182 182 raise error.Abort(_(b'config %s.%s is required')
183 183 % (b'phabricator', b'url'))
184 184
185 185 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
186 186 token = None
187 187
188 188 if res:
189 189 group, auth = res
190 190
191 191 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
192 192
193 193 token = auth.get(b'phabtoken')
194 194
195 195 if not token:
196 196 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
197 197 % (url,))
198 198
199 199 return url, token
200 200
201 201 def callconduit(repo, name, params):
202 202 """call Conduit API, params is a dict. return json.loads result, or None"""
203 203 host, token = readurltoken(repo)
204 204 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
205 205 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
206 206 params = params.copy()
207 207 params[b'api.token'] = token
208 208 data = urlencodenested(params)
209 209 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
210 210 if curlcmd:
211 211 sin, sout = procutil.popen2(b'%s -d @- %s'
212 212 % (curlcmd, procutil.shellquote(url)))
213 213 sin.write(data)
214 214 sin.close()
215 215 body = sout.read()
216 216 else:
217 217 urlopener = urlmod.opener(repo.ui, authinfo)
218 218 request = util.urlreq.request(url, data=data)
219 219 with contextlib.closing(urlopener.open(request)) as rsp:
220 220 body = rsp.read()
221 221 repo.ui.debug(b'Conduit Response: %s\n' % body)
222 222 parsed = json.loads(body)
223 223 if parsed.get(r'error_code'):
224 224 msg = (_(b'Conduit Error (%s): %s')
225 225 % (parsed[r'error_code'], parsed[r'error_info']))
226 226 raise error.Abort(msg)
227 227 return parsed[r'result']
228 228
229 229 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
230 230 def debugcallconduit(ui, repo, name):
231 231 """call Conduit API
232 232
233 233 Call parameters are read from stdin as a JSON blob. Result will be written
234 234 to stdout as a JSON blob.
235 235 """
236 236 params = json.loads(ui.fin.read())
237 237 result = callconduit(repo, name, params)
238 238 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
239 239 ui.write(b'%s\n' % s)
240 240
241 241 def getrepophid(repo):
242 242 """given callsign, return repository PHID or None"""
243 243 # developer config: phabricator.repophid
244 244 repophid = repo.ui.config(b'phabricator', b'repophid')
245 245 if repophid:
246 246 return repophid
247 247 callsign = repo.ui.config(b'phabricator', b'callsign')
248 248 if not callsign:
249 249 return None
250 250 query = callconduit(repo, b'diffusion.repository.search',
251 251 {b'constraints': {b'callsigns': [callsign]}})
252 252 if len(query[r'data']) == 0:
253 253 return None
254 254 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
255 255 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
256 256 return repophid
257 257
258 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
258 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
259 259 _differentialrevisiondescre = re.compile(
260 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
260 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
261 261
262 262 def getoldnodedrevmap(repo, nodelist):
263 263 """find previous nodes that has been sent to Phabricator
264 264
265 265 return {node: (oldnode, Differential diff, Differential Revision ID)}
266 266 for node in nodelist with known previous sent versions, or associated
267 267 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
268 268 be ``None``.
269 269
270 270 Examines commit messages like "Differential Revision:" to get the
271 271 association information.
272 272
273 273 If such commit message line is not found, examines all precursors and their
274 274 tags. Tags with format like "D1234" are considered a match and the node
275 275 with that tag, and the number after "D" (ex. 1234) will be returned.
276 276
277 277 The ``old node``, if not None, is guaranteed to be the last diff of
278 278 corresponding Differential Revision, and exist in the repo.
279 279 """
280 280 unfi = repo.unfiltered()
281 281 nodemap = unfi.changelog.nodemap
282 282
283 283 result = {} # {node: (oldnode?, lastdiff?, drev)}
284 284 toconfirm = {} # {node: (force, {precnode}, drev)}
285 285 for node in nodelist:
286 286 ctx = unfi[node]
287 287 # For tags like "D123", put them into "toconfirm" to verify later
288 288 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
289 289 for n in precnodes:
290 290 if n in nodemap:
291 291 for tag in unfi.nodetags(n):
292 292 m = _differentialrevisiontagre.match(tag)
293 293 if m:
294 294 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
295 295 continue
296 296
297 297 # Check commit message
298 298 m = _differentialrevisiondescre.search(ctx.description())
299 299 if m:
300 300 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
301 301
302 302 # Double check if tags are genuine by collecting all old nodes from
303 303 # Phabricator, and expect precursors overlap with it.
304 304 if toconfirm:
305 305 drevs = [drev for force, precs, drev in toconfirm.values()]
306 306 alldiffs = callconduit(unfi, b'differential.querydiffs',
307 307 {b'revisionIDs': drevs})
308 308 getnode = lambda d: bin(encoding.unitolocal(
309 309 getdiffmeta(d).get(r'node', b''))) or None
310 310 for newnode, (force, precset, drev) in toconfirm.items():
311 311 diffs = [d for d in alldiffs.values()
312 312 if int(d[r'revisionID']) == drev]
313 313
314 314 # "precursors" as known by Phabricator
315 315 phprecset = set(getnode(d) for d in diffs)
316 316
317 317 # Ignore if precursors (Phabricator and local repo) do not overlap,
318 318 # and force is not set (when commit message says nothing)
319 319 if not force and not bool(phprecset & precset):
320 320 tagname = b'D%d' % drev
321 321 tags.tag(repo, tagname, nullid, message=None, user=None,
322 322 date=None, local=True)
323 323 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
324 324 b'Differential history\n') % drev)
325 325 continue
326 326
327 327 # Find the last node using Phabricator metadata, and make sure it
328 328 # exists in the repo
329 329 oldnode = lastdiff = None
330 330 if diffs:
331 331 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
332 332 oldnode = getnode(lastdiff)
333 333 if oldnode and oldnode not in nodemap:
334 334 oldnode = None
335 335
336 336 result[newnode] = (oldnode, lastdiff, drev)
337 337
338 338 return result
339 339
340 340 def getdiff(ctx, diffopts):
341 341 """plain-text diff without header (user, commit message, etc)"""
342 342 output = util.stringio()
343 343 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
344 344 None, opts=diffopts):
345 345 output.write(chunk)
346 346 return output.getvalue()
347 347
348 348 def creatediff(ctx):
349 349 """create a Differential Diff"""
350 350 repo = ctx.repo()
351 351 repophid = getrepophid(repo)
352 352 # Create a "Differential Diff" via "differential.createrawdiff" API
353 353 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
354 354 if repophid:
355 355 params[b'repositoryPHID'] = repophid
356 356 diff = callconduit(repo, b'differential.createrawdiff', params)
357 357 if not diff:
358 358 raise error.Abort(_(b'cannot create diff for %s') % ctx)
359 359 return diff
360 360
361 361 def writediffproperties(ctx, diff):
362 362 """write metadata to diff so patches could be applied losslessly"""
363 363 params = {
364 364 b'diff_id': diff[r'id'],
365 365 b'name': b'hg:meta',
366 366 b'data': json.dumps({
367 367 b'user': ctx.user(),
368 368 b'date': b'%d %d' % ctx.date(),
369 369 b'node': ctx.hex(),
370 370 b'parent': ctx.p1().hex(),
371 371 }),
372 372 }
373 373 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
374 374
375 375 params = {
376 376 b'diff_id': diff[r'id'],
377 377 b'name': b'local:commits',
378 378 b'data': json.dumps({
379 379 ctx.hex(): {
380 380 b'author': stringutil.person(ctx.user()),
381 381 b'authorEmail': stringutil.email(ctx.user()),
382 382 b'time': ctx.date()[0],
383 383 },
384 384 }),
385 385 }
386 386 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
387 387
388 388 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
389 389 olddiff=None, actions=None):
390 390 """create or update a Differential Revision
391 391
392 392 If revid is None, create a new Differential Revision, otherwise update
393 393 revid. If parentrevid is not None, set it as a dependency.
394 394
395 395 If oldnode is not None, check if the patch content (without commit message
396 396 and metadata) has changed before creating another diff.
397 397
398 398 If actions is not None, they will be appended to the transaction.
399 399 """
400 400 repo = ctx.repo()
401 401 if oldnode:
402 402 diffopts = mdiff.diffopts(git=True, context=32767)
403 403 oldctx = repo.unfiltered()[oldnode]
404 404 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
405 405 else:
406 406 neednewdiff = True
407 407
408 408 transactions = []
409 409 if neednewdiff:
410 410 diff = creatediff(ctx)
411 411 transactions.append({b'type': b'update', b'value': diff[r'phid']})
412 412 else:
413 413 # Even if we don't need to upload a new diff because the patch content
414 414 # does not change. We might still need to update its metadata so
415 415 # pushers could know the correct node metadata.
416 416 assert olddiff
417 417 diff = olddiff
418 418 writediffproperties(ctx, diff)
419 419
420 420 # Use a temporary summary to set dependency. There might be better ways but
421 421 # I cannot find them for now. But do not do that if we are updating an
422 422 # existing revision (revid is not None) since that introduces visible
423 423 # churns (someone edited "Summary" twice) on the web page.
424 424 if parentrevid and revid is None:
425 425 summary = b'Depends on D%s' % parentrevid
426 426 transactions += [{b'type': b'summary', b'value': summary},
427 427 {b'type': b'summary', b'value': b' '}]
428 428
429 429 if actions:
430 430 transactions += actions
431 431
432 432 # Parse commit message and update related fields.
433 433 desc = ctx.description()
434 434 info = callconduit(repo, b'differential.parsecommitmessage',
435 435 {b'corpus': desc})
436 436 for k, v in info[r'fields'].items():
437 437 if k in [b'title', b'summary', b'testPlan']:
438 438 transactions.append({b'type': k, b'value': v})
439 439
440 440 params = {b'transactions': transactions}
441 441 if revid is not None:
442 442 # Update an existing Differential Revision
443 443 params[b'objectIdentifier'] = revid
444 444
445 445 revision = callconduit(repo, b'differential.revision.edit', params)
446 446 if not revision:
447 447 raise error.Abort(_(b'cannot create revision for %s') % ctx)
448 448
449 449 return revision, diff
450 450
451 451 def userphids(repo, names):
452 452 """convert user names to PHIDs"""
453 453 query = {b'constraints': {b'usernames': names}}
454 454 result = callconduit(repo, b'user.search', query)
455 455 # username not found is not an error of the API. So check if we have missed
456 456 # some names here.
457 457 data = result[r'data']
458 458 resolved = set(entry[r'fields'][r'username'] for entry in data)
459 459 unresolved = set(names) - resolved
460 460 if unresolved:
461 461 raise error.Abort(_(b'unknown username: %s')
462 462 % b' '.join(sorted(unresolved)))
463 463 return [entry[r'phid'] for entry in data]
464 464
465 465 @vcrcommand(b'phabsend',
466 466 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
467 467 (b'', b'amend', True, _(b'update commit messages')),
468 468 (b'', b'reviewer', [], _(b'specify reviewers')),
469 469 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
470 470 _(b'REV [OPTIONS]'),
471 471 helpcategory=command.CATEGORY_IMPORT_EXPORT)
472 472 def phabsend(ui, repo, *revs, **opts):
473 473 """upload changesets to Phabricator
474 474
475 475 If there are multiple revisions specified, they will be send as a stack
476 476 with a linear dependencies relationship using the order specified by the
477 477 revset.
478 478
479 479 For the first time uploading changesets, local tags will be created to
480 480 maintain the association. After the first time, phabsend will check
481 481 obsstore and tags information so it can figure out whether to update an
482 482 existing Differential Revision, or create a new one.
483 483
484 484 If --amend is set, update commit messages so they have the
485 485 ``Differential Revision`` URL, remove related tags. This is similar to what
486 486 arcanist will do, and is more desired in author-push workflows. Otherwise,
487 487 use local tags to record the ``Differential Revision`` association.
488 488
489 489 The --confirm option lets you confirm changesets before sending them. You
490 490 can also add following to your configuration file to make it default
491 491 behaviour::
492 492
493 493 [phabsend]
494 494 confirm = true
495 495
496 496 phabsend will check obsstore and the above association to decide whether to
497 497 update an existing Differential Revision, or create a new one.
498 498 """
499 499 revs = list(revs) + opts.get(b'rev', [])
500 500 revs = scmutil.revrange(repo, revs)
501 501
502 502 if not revs:
503 503 raise error.Abort(_(b'phabsend requires at least one changeset'))
504 504 if opts.get(b'amend'):
505 505 cmdutil.checkunfinished(repo)
506 506
507 507 # {newnode: (oldnode, olddiff, olddrev}
508 508 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
509 509
510 510 confirm = ui.configbool(b'phabsend', b'confirm')
511 511 confirm |= bool(opts.get(b'confirm'))
512 512 if confirm:
513 513 confirmed = _confirmbeforesend(repo, revs, oldmap)
514 514 if not confirmed:
515 515 raise error.Abort(_(b'phabsend cancelled'))
516 516
517 517 actions = []
518 518 reviewers = opts.get(b'reviewer', [])
519 519 if reviewers:
520 520 phids = userphids(repo, reviewers)
521 521 actions.append({b'type': b'reviewers.add', b'value': phids})
522 522
523 523 drevids = [] # [int]
524 524 diffmap = {} # {newnode: diff}
525 525
526 526 # Send patches one by one so we know their Differential Revision IDs and
527 527 # can provide dependency relationship
528 528 lastrevid = None
529 529 for rev in revs:
530 530 ui.debug(b'sending rev %d\n' % rev)
531 531 ctx = repo[rev]
532 532
533 533 # Get Differential Revision ID
534 534 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
535 535 if oldnode != ctx.node() or opts.get(b'amend'):
536 536 # Create or update Differential Revision
537 537 revision, diff = createdifferentialrevision(
538 538 ctx, revid, lastrevid, oldnode, olddiff, actions)
539 539 diffmap[ctx.node()] = diff
540 540 newrevid = int(revision[r'object'][r'id'])
541 541 if revid:
542 542 action = b'updated'
543 543 else:
544 544 action = b'created'
545 545
546 546 # Create a local tag to note the association, if commit message
547 547 # does not have it already
548 548 m = _differentialrevisiondescre.search(ctx.description())
549 549 if not m or int(m.group(b'id')) != newrevid:
550 550 tagname = b'D%d' % newrevid
551 551 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
552 552 date=None, local=True)
553 553 else:
554 554 # Nothing changed. But still set "newrevid" so the next revision
555 555 # could depend on this one.
556 556 newrevid = revid
557 557 action = b'skipped'
558 558
559 559 actiondesc = ui.label(
560 560 {b'created': _(b'created'),
561 561 b'skipped': _(b'skipped'),
562 562 b'updated': _(b'updated')}[action],
563 563 b'phabricator.action.%s' % action)
564 564 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
565 565 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
566 566 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
567 567 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
568 568 desc))
569 569 drevids.append(newrevid)
570 570 lastrevid = newrevid
571 571
572 572 # Update commit messages and remove tags
573 573 if opts.get(b'amend'):
574 574 unfi = repo.unfiltered()
575 575 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
576 576 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
577 577 wnode = unfi[b'.'].node()
578 578 mapping = {} # {oldnode: [newnode]}
579 579 for i, rev in enumerate(revs):
580 580 old = unfi[rev]
581 581 drevid = drevids[i]
582 582 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
583 583 newdesc = getdescfromdrev(drev)
584 584 newdesc = encoding.unitolocal(newdesc)
585 585 # Make sure commit message contain "Differential Revision"
586 586 if old.description() != newdesc:
587 587 if old.phase() == phases.public:
588 588 ui.warn(_("warning: not updating public commit %s\n")
589 589 % scmutil.formatchangeid(old))
590 590 continue
591 591 parents = [
592 592 mapping.get(old.p1().node(), (old.p1(),))[0],
593 593 mapping.get(old.p2().node(), (old.p2(),))[0],
594 594 ]
595 595 new = context.metadataonlyctx(
596 596 repo, old, parents=parents, text=newdesc,
597 597 user=old.user(), date=old.date(), extra=old.extra())
598 598
599 599 newnode = new.commit()
600 600
601 601 mapping[old.node()] = [newnode]
602 602 # Update diff property
603 603 writediffproperties(unfi[newnode], diffmap[old.node()])
604 604 # Remove local tags since it's no longer necessary
605 605 tagname = b'D%d' % drevid
606 606 if tagname in repo.tags():
607 607 tags.tag(repo, tagname, nullid, message=None, user=None,
608 608 date=None, local=True)
609 609 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
610 610 if wnode in mapping:
611 611 unfi.setparents(mapping[wnode][0])
612 612
613 613 # Map from "hg:meta" keys to header understood by "hg import". The order is
614 614 # consistent with "hg export" output.
615 615 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
616 616 (r'node', b'Node ID'), (r'parent', b'Parent ')])
617 617
618 618 def _confirmbeforesend(repo, revs, oldmap):
619 619 url, token = readurltoken(repo)
620 620 ui = repo.ui
621 621 for rev in revs:
622 622 ctx = repo[rev]
623 623 desc = ctx.description().splitlines()[0]
624 624 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
625 625 if drevid:
626 626 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
627 627 else:
628 628 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
629 629
630 630 ui.write(_(b'%s - %s: %s\n')
631 631 % (drevdesc,
632 632 ui.label(bytes(ctx), b'phabricator.node'),
633 633 ui.label(desc, b'phabricator.desc')))
634 634
635 635 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
636 636 b'$$ &Yes $$ &No') % url):
637 637 return False
638 638
639 639 return True
640 640
641 641 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
642 642 b'abandoned'}
643 643
644 644 def _getstatusname(drev):
645 645 """get normalized status name from a Differential Revision"""
646 646 return drev[r'statusName'].replace(b' ', b'').lower()
647 647
648 648 # Small language to specify differential revisions. Support symbols: (), :X,
649 649 # +, and -.
650 650
651 651 _elements = {
652 652 # token-type: binding-strength, primary, prefix, infix, suffix
653 653 b'(': (12, None, (b'group', 1, b')'), None, None),
654 654 b':': (8, None, (b'ancestors', 8), None, None),
655 655 b'&': (5, None, None, (b'and_', 5), None),
656 656 b'+': (4, None, None, (b'add', 4), None),
657 657 b'-': (4, None, None, (b'sub', 4), None),
658 658 b')': (0, None, None, None, None),
659 659 b'symbol': (0, b'symbol', None, None, None),
660 660 b'end': (0, None, None, None, None),
661 661 }
662 662
663 663 def _tokenize(text):
664 664 view = memoryview(text) # zero-copy slice
665 665 special = b'():+-& '
666 666 pos = 0
667 667 length = len(text)
668 668 while pos < length:
669 669 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
670 670 view[pos:]))
671 671 if symbol:
672 672 yield (b'symbol', symbol, pos)
673 673 pos += len(symbol)
674 674 else: # special char, ignore space
675 675 if text[pos] != b' ':
676 676 yield (text[pos], None, pos)
677 677 pos += 1
678 678 yield (b'end', None, pos)
679 679
680 680 def _parse(text):
681 681 tree, pos = parser.parser(_elements).parse(_tokenize(text))
682 682 if pos != len(text):
683 683 raise error.ParseError(b'invalid token', pos)
684 684 return tree
685 685
686 686 def _parsedrev(symbol):
687 687 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
688 688 if symbol.startswith(b'D') and symbol[1:].isdigit():
689 689 return int(symbol[1:])
690 690 if symbol.isdigit():
691 691 return int(symbol)
692 692
693 693 def _prefetchdrevs(tree):
694 694 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
695 695 drevs = set()
696 696 ancestordrevs = set()
697 697 op = tree[0]
698 698 if op == b'symbol':
699 699 r = _parsedrev(tree[1])
700 700 if r:
701 701 drevs.add(r)
702 702 elif op == b'ancestors':
703 703 r, a = _prefetchdrevs(tree[1])
704 704 drevs.update(r)
705 705 ancestordrevs.update(r)
706 706 ancestordrevs.update(a)
707 707 else:
708 708 for t in tree[1:]:
709 709 r, a = _prefetchdrevs(t)
710 710 drevs.update(r)
711 711 ancestordrevs.update(a)
712 712 return drevs, ancestordrevs
713 713
714 714 def querydrev(repo, spec):
715 715 """return a list of "Differential Revision" dicts
716 716
717 717 spec is a string using a simple query language, see docstring in phabread
718 718 for details.
719 719
720 720 A "Differential Revision dict" looks like:
721 721
722 722 {
723 723 "id": "2",
724 724 "phid": "PHID-DREV-672qvysjcczopag46qty",
725 725 "title": "example",
726 726 "uri": "https://phab.example.com/D2",
727 727 "dateCreated": "1499181406",
728 728 "dateModified": "1499182103",
729 729 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
730 730 "status": "0",
731 731 "statusName": "Needs Review",
732 732 "properties": [],
733 733 "branch": null,
734 734 "summary": "",
735 735 "testPlan": "",
736 736 "lineCount": "2",
737 737 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
738 738 "diffs": [
739 739 "3",
740 740 "4",
741 741 ],
742 742 "commits": [],
743 743 "reviewers": [],
744 744 "ccs": [],
745 745 "hashes": [],
746 746 "auxiliary": {
747 747 "phabricator:projects": [],
748 748 "phabricator:depends-on": [
749 749 "PHID-DREV-gbapp366kutjebt7agcd"
750 750 ]
751 751 },
752 752 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
753 753 "sourcePath": null
754 754 }
755 755 """
756 756 def fetch(params):
757 757 """params -> single drev or None"""
758 758 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
759 759 if key in prefetched:
760 760 return prefetched[key]
761 761 drevs = callconduit(repo, b'differential.query', params)
762 762 # Fill prefetched with the result
763 763 for drev in drevs:
764 764 prefetched[drev[r'phid']] = drev
765 765 prefetched[int(drev[r'id'])] = drev
766 766 if key not in prefetched:
767 767 raise error.Abort(_(b'cannot get Differential Revision %r')
768 768 % params)
769 769 return prefetched[key]
770 770
771 771 def getstack(topdrevids):
772 772 """given a top, get a stack from the bottom, [id] -> [id]"""
773 773 visited = set()
774 774 result = []
775 775 queue = [{r'ids': [i]} for i in topdrevids]
776 776 while queue:
777 777 params = queue.pop()
778 778 drev = fetch(params)
779 779 if drev[r'id'] in visited:
780 780 continue
781 781 visited.add(drev[r'id'])
782 782 result.append(int(drev[r'id']))
783 783 auxiliary = drev.get(r'auxiliary', {})
784 784 depends = auxiliary.get(r'phabricator:depends-on', [])
785 785 for phid in depends:
786 786 queue.append({b'phids': [phid]})
787 787 result.reverse()
788 788 return smartset.baseset(result)
789 789
790 790 # Initialize prefetch cache
791 791 prefetched = {} # {id or phid: drev}
792 792
793 793 tree = _parse(spec)
794 794 drevs, ancestordrevs = _prefetchdrevs(tree)
795 795
796 796 # developer config: phabricator.batchsize
797 797 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
798 798
799 799 # Prefetch Differential Revisions in batch
800 800 tofetch = set(drevs)
801 801 for r in ancestordrevs:
802 802 tofetch.update(range(max(1, r - batchsize), r + 1))
803 803 if drevs:
804 804 fetch({r'ids': list(tofetch)})
805 805 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
806 806
807 807 # Walk through the tree, return smartsets
808 808 def walk(tree):
809 809 op = tree[0]
810 810 if op == b'symbol':
811 811 drev = _parsedrev(tree[1])
812 812 if drev:
813 813 return smartset.baseset([drev])
814 814 elif tree[1] in _knownstatusnames:
815 815 drevs = [r for r in validids
816 816 if _getstatusname(prefetched[r]) == tree[1]]
817 817 return smartset.baseset(drevs)
818 818 else:
819 819 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
820 820 elif op in {b'and_', b'add', b'sub'}:
821 821 assert len(tree) == 3
822 822 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
823 823 elif op == b'group':
824 824 return walk(tree[1])
825 825 elif op == b'ancestors':
826 826 return getstack(walk(tree[1]))
827 827 else:
828 828 raise error.ProgrammingError(b'illegal tree: %r' % tree)
829 829
830 830 return [prefetched[r] for r in walk(tree)]
831 831
832 832 def getdescfromdrev(drev):
833 833 """get description (commit message) from "Differential Revision"
834 834
835 835 This is similar to differential.getcommitmessage API. But we only care
836 836 about limited fields: title, summary, test plan, and URL.
837 837 """
838 838 title = drev[r'title']
839 839 summary = drev[r'summary'].rstrip()
840 840 testplan = drev[r'testPlan'].rstrip()
841 841 if testplan:
842 842 testplan = b'Test Plan:\n%s' % testplan
843 843 uri = b'Differential Revision: %s' % drev[r'uri']
844 844 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
845 845
846 846 def getdiffmeta(diff):
847 847 """get commit metadata (date, node, user, p1) from a diff object
848 848
849 849 The metadata could be "hg:meta", sent by phabsend, like:
850 850
851 851 "properties": {
852 852 "hg:meta": {
853 853 "date": "1499571514 25200",
854 854 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
855 855 "user": "Foo Bar <foo@example.com>",
856 856 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
857 857 }
858 858 }
859 859
860 860 Or converted from "local:commits", sent by "arc", like:
861 861
862 862 "properties": {
863 863 "local:commits": {
864 864 "98c08acae292b2faf60a279b4189beb6cff1414d": {
865 865 "author": "Foo Bar",
866 866 "time": 1499546314,
867 867 "branch": "default",
868 868 "tag": "",
869 869 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
870 870 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
871 871 "local": "1000",
872 872 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
873 873 "summary": "...",
874 874 "message": "...",
875 875 "authorEmail": "foo@example.com"
876 876 }
877 877 }
878 878 }
879 879
880 880 Note: metadata extracted from "local:commits" will lose time zone
881 881 information.
882 882 """
883 883 props = diff.get(r'properties') or {}
884 884 meta = props.get(r'hg:meta')
885 885 if not meta and props.get(r'local:commits'):
886 886 commit = sorted(props[r'local:commits'].values())[0]
887 887 meta = {
888 888 r'date': r'%d 0' % commit[r'time'],
889 889 r'node': commit[r'rev'],
890 890 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
891 891 }
892 892 if len(commit.get(r'parents', ())) >= 1:
893 893 meta[r'parent'] = commit[r'parents'][0]
894 894 return meta or {}
895 895
896 896 def readpatch(repo, drevs, write):
897 897 """generate plain-text patch readable by 'hg import'
898 898
899 899 write is usually ui.write. drevs is what "querydrev" returns, results of
900 900 "differential.query".
901 901 """
902 902 # Prefetch hg:meta property for all diffs
903 903 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
904 904 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
905 905
906 906 # Generate patch for each drev
907 907 for drev in drevs:
908 908 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
909 909
910 910 diffid = max(int(v) for v in drev[r'diffs'])
911 911 body = callconduit(repo, b'differential.getrawdiff',
912 912 {b'diffID': diffid})
913 913 desc = getdescfromdrev(drev)
914 914 header = b'# HG changeset patch\n'
915 915
916 916 # Try to preserve metadata from hg:meta property. Write hg patch
917 917 # headers that can be read by the "import" command. See patchheadermap
918 918 # and extract in mercurial/patch.py for supported headers.
919 919 meta = getdiffmeta(diffs[str(diffid)])
920 920 for k in _metanamemap.keys():
921 921 if k in meta:
922 922 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
923 923
924 924 content = b'%s%s\n%s' % (header, desc, body)
925 925 write(encoding.unitolocal(content))
926 926
927 927 @vcrcommand(b'phabread',
928 928 [(b'', b'stack', False, _(b'read dependencies'))],
929 929 _(b'DREVSPEC [OPTIONS]'),
930 930 helpcategory=command.CATEGORY_IMPORT_EXPORT)
931 931 def phabread(ui, repo, spec, **opts):
932 932 """print patches from Phabricator suitable for importing
933 933
934 934 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
935 935 the number ``123``. It could also have common operators like ``+``, ``-``,
936 936 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
937 937 select a stack.
938 938
939 939 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
940 940 could be used to filter patches by status. For performance reason, they
941 941 only represent a subset of non-status selections and cannot be used alone.
942 942
943 943 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
944 944 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
945 945 stack up to D9.
946 946
947 947 If --stack is given, follow dependencies information and read all patches.
948 948 It is equivalent to the ``:`` operator.
949 949 """
950 950 if opts.get(b'stack'):
951 951 spec = b':(%s)' % spec
952 952 drevs = querydrev(repo, spec)
953 953 readpatch(repo, drevs, ui.write)
954 954
955 955 @vcrcommand(b'phabupdate',
956 956 [(b'', b'accept', False, _(b'accept revisions')),
957 957 (b'', b'reject', False, _(b'reject revisions')),
958 958 (b'', b'abandon', False, _(b'abandon revisions')),
959 959 (b'', b'reclaim', False, _(b'reclaim revisions')),
960 960 (b'm', b'comment', b'', _(b'comment on the last revision')),
961 961 ], _(b'DREVSPEC [OPTIONS]'),
962 962 helpcategory=command.CATEGORY_IMPORT_EXPORT)
963 963 def phabupdate(ui, repo, spec, **opts):
964 964 """update Differential Revision in batch
965 965
966 966 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
967 967 """
968 968 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
969 969 if len(flags) > 1:
970 970 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
971 971
972 972 actions = []
973 973 for f in flags:
974 974 actions.append({b'type': f, b'value': b'true'})
975 975
976 976 drevs = querydrev(repo, spec)
977 977 for i, drev in enumerate(drevs):
978 978 if i + 1 == len(drevs) and opts.get(b'comment'):
979 979 actions.append({b'type': b'comment', b'value': opts[b'comment']})
980 980 if actions:
981 981 params = {b'objectIdentifier': drev[r'phid'],
982 982 b'transactions': actions}
983 983 callconduit(repo, b'differential.revision.edit', params)
984 984
985 985 templatekeyword = registrar.templatekeyword()
986 986
987 987 @templatekeyword(b'phabreview', requires={b'ctx'})
988 988 def template_review(context, mapping):
989 989 """:phabreview: Object describing the review for this changeset.
990 990 Has attributes `url` and `id`.
991 991 """
992 992 ctx = context.resource(mapping, b'ctx')
993 993 m = _differentialrevisiondescre.search(ctx.description())
994 994 if m:
995 995 return templateutil.hybriddict({
996 996 b'url': m.group(b'url'),
997 997 b'id': b"D{}".format(m.group(b'id')),
998 998 })
999 999 else:
1000 1000 tags = ctx.repo().nodetags(ctx.node())
1001 1001 for t in tags:
1002 1002 if _differentialrevisiontagre.match(t):
1003 1003 url = ctx.repo().ui.config(b'phabricator', b'url')
1004 1004 if not url.endswith(b'/'):
1005 1005 url += b'/'
1006 1006 url += t
1007 1007
1008 1008 return templateutil.hybriddict({
1009 1009 b'url': url,
1010 1010 b'id': t,
1011 1011 })
1012 1012 return None
@@ -1,641 +1,641 b''
1 1 # Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5
6 6 """generate release notes from commit messages (EXPERIMENTAL)
7 7
8 8 It is common to maintain files detailing changes in a project between
9 9 releases. Maintaining these files can be difficult and time consuming.
10 10 The :hg:`releasenotes` command provided by this extension makes the
11 11 process simpler by automating it.
12 12 """
13 13
14 14 from __future__ import absolute_import
15 15
16 16 import difflib
17 17 import errno
18 18 import re
19 19
20 20 from mercurial.i18n import _
21 21 from mercurial import (
22 22 config,
23 23 error,
24 24 minirst,
25 25 node,
26 26 pycompat,
27 27 registrar,
28 28 scmutil,
29 29 util,
30 30 )
31 31 from mercurial.utils import (
32 32 stringutil,
33 33 )
34 34
35 35 cmdtable = {}
36 36 command = registrar.command(cmdtable)
37 37
38 38 try:
39 39 import fuzzywuzzy.fuzz as fuzz
40 40 fuzz.token_set_ratio
41 41 except ImportError:
42 42 fuzz = None
43 43
44 44 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 46 # be specifying the version(s) of Mercurial they are tested with, or
47 47 # leave the attribute unspecified.
48 48 testedwith = 'ships-with-hg-core'
49 49
50 50 DEFAULT_SECTIONS = [
51 51 ('feature', _('New Features')),
52 52 ('bc', _('Backwards Compatibility Changes')),
53 53 ('fix', _('Bug Fixes')),
54 54 ('perf', _('Performance Improvements')),
55 55 ('api', _('API Changes')),
56 56 ]
57 57
58 RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
58 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
59 59 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
60 60
61 61 BULLET_SECTION = _('Other Changes')
62 62
63 63 class parsedreleasenotes(object):
64 64 def __init__(self):
65 65 self.sections = {}
66 66
67 67 def __contains__(self, section):
68 68 return section in self.sections
69 69
70 70 def __iter__(self):
71 71 return iter(sorted(self.sections))
72 72
73 73 def addtitleditem(self, section, title, paragraphs):
74 74 """Add a titled release note entry."""
75 75 self.sections.setdefault(section, ([], []))
76 76 self.sections[section][0].append((title, paragraphs))
77 77
78 78 def addnontitleditem(self, section, paragraphs):
79 79 """Adds a non-titled release note entry.
80 80
81 81 Will be rendered as a bullet point.
82 82 """
83 83 self.sections.setdefault(section, ([], []))
84 84 self.sections[section][1].append(paragraphs)
85 85
86 86 def titledforsection(self, section):
87 87 """Returns titled entries in a section.
88 88
89 89 Returns a list of (title, paragraphs) tuples describing sub-sections.
90 90 """
91 91 return self.sections.get(section, ([], []))[0]
92 92
93 93 def nontitledforsection(self, section):
94 94 """Returns non-titled, bulleted paragraphs in a section."""
95 95 return self.sections.get(section, ([], []))[1]
96 96
97 97 def hastitledinsection(self, section, title):
98 98 return any(t[0] == title for t in self.titledforsection(section))
99 99
100 100 def merge(self, ui, other):
101 101 """Merge another instance into this one.
102 102
103 103 This is used to combine multiple sources of release notes together.
104 104 """
105 105 if not fuzz:
106 106 ui.warn(_("module 'fuzzywuzzy' not found, merging of similar "
107 107 "releasenotes is disabled\n"))
108 108
109 109 for section in other:
110 110 existingnotes = converttitled(self.titledforsection(section)) + \
111 111 convertnontitled(self.nontitledforsection(section))
112 112 for title, paragraphs in other.titledforsection(section):
113 113 if self.hastitledinsection(section, title):
114 114 # TODO prompt for resolution if different and running in
115 115 # interactive mode.
116 116 ui.write(_('%s already exists in %s section; ignoring\n') %
117 117 (title, section))
118 118 continue
119 119
120 120 incoming_str = converttitled([(title, paragraphs)])[0]
121 121 if section == 'fix':
122 122 issue = getissuenum(incoming_str)
123 123 if issue:
124 124 if findissue(ui, existingnotes, issue):
125 125 continue
126 126
127 127 if similar(ui, existingnotes, incoming_str):
128 128 continue
129 129
130 130 self.addtitleditem(section, title, paragraphs)
131 131
132 132 for paragraphs in other.nontitledforsection(section):
133 133 if paragraphs in self.nontitledforsection(section):
134 134 continue
135 135
136 136 incoming_str = convertnontitled([paragraphs])[0]
137 137 if section == 'fix':
138 138 issue = getissuenum(incoming_str)
139 139 if issue:
140 140 if findissue(ui, existingnotes, issue):
141 141 continue
142 142
143 143 if similar(ui, existingnotes, incoming_str):
144 144 continue
145 145
146 146 self.addnontitleditem(section, paragraphs)
147 147
148 148 class releasenotessections(object):
149 149 def __init__(self, ui, repo=None):
150 150 if repo:
151 151 sections = util.sortdict(DEFAULT_SECTIONS)
152 152 custom_sections = getcustomadmonitions(repo)
153 153 if custom_sections:
154 154 sections.update(custom_sections)
155 155 self._sections = list(sections.iteritems())
156 156 else:
157 157 self._sections = list(DEFAULT_SECTIONS)
158 158
159 159 def __iter__(self):
160 160 return iter(self._sections)
161 161
162 162 def names(self):
163 163 return [t[0] for t in self._sections]
164 164
165 165 def sectionfromtitle(self, title):
166 166 for name, value in self._sections:
167 167 if value == title:
168 168 return name
169 169
170 170 return None
171 171
172 172 def converttitled(titledparagraphs):
173 173 """
174 174 Convert titled paragraphs to strings
175 175 """
176 176 string_list = []
177 177 for title, paragraphs in titledparagraphs:
178 178 lines = []
179 179 for para in paragraphs:
180 180 lines.extend(para)
181 181 string_list.append(' '.join(lines))
182 182 return string_list
183 183
184 184 def convertnontitled(nontitledparagraphs):
185 185 """
186 186 Convert non-titled bullets to strings
187 187 """
188 188 string_list = []
189 189 for paragraphs in nontitledparagraphs:
190 190 lines = []
191 191 for para in paragraphs:
192 192 lines.extend(para)
193 193 string_list.append(' '.join(lines))
194 194 return string_list
195 195
196 196 def getissuenum(incoming_str):
197 197 """
198 198 Returns issue number from the incoming string if it exists
199 199 """
200 200 issue = re.search(RE_ISSUE, incoming_str, re.IGNORECASE)
201 201 if issue:
202 202 issue = issue.group()
203 203 return issue
204 204
205 205 def findissue(ui, existing, issue):
206 206 """
207 207 Returns true if issue number already exists in notes.
208 208 """
209 209 if any(issue in s for s in existing):
210 210 ui.write(_('"%s" already exists in notes; ignoring\n') % issue)
211 211 return True
212 212 else:
213 213 return False
214 214
215 215 def similar(ui, existing, incoming_str):
216 216 """
217 217 Returns true if similar note found in existing notes.
218 218 """
219 219 if len(incoming_str.split()) > 10:
220 220 merge = similaritycheck(incoming_str, existing)
221 221 if not merge:
222 222 ui.write(_('"%s" already exists in notes file; ignoring\n')
223 223 % incoming_str)
224 224 return True
225 225 else:
226 226 return False
227 227 else:
228 228 return False
229 229
230 230 def similaritycheck(incoming_str, existingnotes):
231 231 """
232 232 Returns false when note fragment can be merged to existing notes.
233 233 """
234 234 # fuzzywuzzy not present
235 235 if not fuzz:
236 236 return True
237 237
238 238 merge = True
239 239 for bullet in existingnotes:
240 240 score = fuzz.token_set_ratio(incoming_str, bullet)
241 241 if score > 75:
242 242 merge = False
243 243 break
244 244 return merge
245 245
246 246 def getcustomadmonitions(repo):
247 247 ctx = repo['.']
248 248 p = config.config()
249 249
250 250 def read(f, sections=None, remap=None):
251 251 if f in ctx:
252 252 data = ctx[f].data()
253 253 p.parse(f, data, sections, remap, read)
254 254 else:
255 255 raise error.Abort(_(".hgreleasenotes file \'%s\' not found") %
256 256 repo.pathto(f))
257 257
258 258 if '.hgreleasenotes' in ctx:
259 259 read('.hgreleasenotes')
260 260 return p['sections']
261 261
262 262 def checkadmonitions(ui, repo, directives, revs):
263 263 """
264 264 Checks the commit messages for admonitions and their validity.
265 265
266 266 .. abcd::
267 267
268 268 First paragraph under this admonition
269 269
270 270 For this commit message, using `hg releasenotes -r . --check`
271 271 returns: Invalid admonition 'abcd' present in changeset 3ea92981e103
272 272
273 273 As admonition 'abcd' is neither present in default nor custom admonitions
274 274 """
275 275 for rev in revs:
276 276 ctx = repo[rev]
277 277 admonition = re.search(RE_DIRECTIVE, ctx.description())
278 278 if admonition:
279 279 if admonition.group(1) in directives:
280 280 continue
281 281 else:
282 282 ui.write(_("Invalid admonition '%s' present in changeset %s"
283 283 "\n") % (admonition.group(1), ctx.hex()[:12]))
284 284 sim = lambda x: difflib.SequenceMatcher(None,
285 285 admonition.group(1), x).ratio()
286 286
287 287 similar = [s for s in directives if sim(s) > 0.6]
288 288 if len(similar) == 1:
289 289 ui.write(_("(did you mean %s?)\n") % similar[0])
290 290 elif similar:
291 291 ss = ", ".join(sorted(similar))
292 292 ui.write(_("(did you mean one of %s?)\n") % ss)
293 293
294 294 def _getadmonitionlist(ui, sections):
295 295 for section in sections:
296 296 ui.write("%s: %s\n" % (section[0], section[1]))
297 297
298 298 def parsenotesfromrevisions(repo, directives, revs):
299 299 notes = parsedreleasenotes()
300 300
301 301 for rev in revs:
302 302 ctx = repo[rev]
303 303
304 304 blocks, pruned = minirst.parse(ctx.description(),
305 305 admonitions=directives)
306 306
307 307 for i, block in enumerate(blocks):
308 308 if block['type'] != 'admonition':
309 309 continue
310 310
311 311 directive = block['admonitiontitle']
312 312 title = block['lines'][0].strip() if block['lines'] else None
313 313
314 314 if i + 1 == len(blocks):
315 315 raise error.Abort(_('changeset %s: release notes directive %s '
316 316 'lacks content') % (ctx, directive))
317 317
318 318 # Now search ahead and find all paragraphs attached to this
319 319 # admonition.
320 320 paragraphs = []
321 321 for j in range(i + 1, len(blocks)):
322 322 pblock = blocks[j]
323 323
324 324 # Margin blocks may appear between paragraphs. Ignore them.
325 325 if pblock['type'] == 'margin':
326 326 continue
327 327
328 328 if pblock['type'] == 'admonition':
329 329 break
330 330
331 331 if pblock['type'] != 'paragraph':
332 332 repo.ui.warn(_('changeset %s: unexpected block in release '
333 333 'notes directive %s\n') % (ctx, directive))
334 334
335 335 if pblock['indent'] > 0:
336 336 paragraphs.append(pblock['lines'])
337 337 else:
338 338 break
339 339
340 340 # TODO consider using title as paragraph for more concise notes.
341 341 if not paragraphs:
342 342 repo.ui.warn(_("error parsing releasenotes for revision: "
343 343 "'%s'\n") % node.hex(ctx.node()))
344 344 if title:
345 345 notes.addtitleditem(directive, title, paragraphs)
346 346 else:
347 347 notes.addnontitleditem(directive, paragraphs)
348 348
349 349 return notes
350 350
351 351 def parsereleasenotesfile(sections, text):
352 352 """Parse text content containing generated release notes."""
353 353 notes = parsedreleasenotes()
354 354
355 355 blocks = minirst.parse(text)[0]
356 356
357 357 def gatherparagraphsbullets(offset, title=False):
358 358 notefragment = []
359 359
360 360 for i in range(offset + 1, len(blocks)):
361 361 block = blocks[i]
362 362
363 363 if block['type'] == 'margin':
364 364 continue
365 365 elif block['type'] == 'section':
366 366 break
367 367 elif block['type'] == 'bullet':
368 368 if block['indent'] != 0:
369 369 raise error.Abort(_('indented bullet lists not supported'))
370 370 if title:
371 371 lines = [l[1:].strip() for l in block['lines']]
372 372 notefragment.append(lines)
373 373 continue
374 374 else:
375 375 lines = [[l[1:].strip() for l in block['lines']]]
376 376
377 377 for block in blocks[i + 1:]:
378 378 if block['type'] in ('bullet', 'section'):
379 379 break
380 380 if block['type'] == 'paragraph':
381 381 lines.append(block['lines'])
382 382 notefragment.append(lines)
383 383 continue
384 384 elif block['type'] != 'paragraph':
385 385 raise error.Abort(_('unexpected block type in release notes: '
386 386 '%s') % block['type'])
387 387 if title:
388 388 notefragment.append(block['lines'])
389 389
390 390 return notefragment
391 391
392 392 currentsection = None
393 393 for i, block in enumerate(blocks):
394 394 if block['type'] != 'section':
395 395 continue
396 396
397 397 title = block['lines'][0]
398 398
399 399 # TODO the parsing around paragraphs and bullet points needs some
400 400 # work.
401 401 if block['underline'] == '=': # main section
402 402 name = sections.sectionfromtitle(title)
403 403 if not name:
404 404 raise error.Abort(_('unknown release notes section: %s') %
405 405 title)
406 406
407 407 currentsection = name
408 408 bullet_points = gatherparagraphsbullets(i)
409 409 if bullet_points:
410 410 for para in bullet_points:
411 411 notes.addnontitleditem(currentsection, para)
412 412
413 413 elif block['underline'] == '-': # sub-section
414 414 if title == BULLET_SECTION:
415 415 bullet_points = gatherparagraphsbullets(i)
416 416 for para in bullet_points:
417 417 notes.addnontitleditem(currentsection, para)
418 418 else:
419 419 paragraphs = gatherparagraphsbullets(i, True)
420 420 notes.addtitleditem(currentsection, title, paragraphs)
421 421 else:
422 422 raise error.Abort(_('unsupported section type for %s') % title)
423 423
424 424 return notes
425 425
426 426 def serializenotes(sections, notes):
427 427 """Serialize release notes from parsed fragments and notes.
428 428
429 429 This function essentially takes the output of ``parsenotesfromrevisions()``
430 430 and ``parserelnotesfile()`` and produces output combining the 2.
431 431 """
432 432 lines = []
433 433
434 434 for sectionname, sectiontitle in sections:
435 435 if sectionname not in notes:
436 436 continue
437 437
438 438 lines.append(sectiontitle)
439 439 lines.append('=' * len(sectiontitle))
440 440 lines.append('')
441 441
442 442 # First pass to emit sub-sections.
443 443 for title, paragraphs in notes.titledforsection(sectionname):
444 444 lines.append(title)
445 445 lines.append('-' * len(title))
446 446 lines.append('')
447 447
448 448 for i, para in enumerate(paragraphs):
449 449 if i:
450 450 lines.append('')
451 451 lines.extend(stringutil.wrap(' '.join(para),
452 452 width=78).splitlines())
453 453
454 454 lines.append('')
455 455
456 456 # Second pass to emit bullet list items.
457 457
458 458 # If the section has titled and non-titled items, we can't
459 459 # simply emit the bullet list because it would appear to come
460 460 # from the last title/section. So, we emit a new sub-section
461 461 # for the non-titled items.
462 462 nontitled = notes.nontitledforsection(sectionname)
463 463 if notes.titledforsection(sectionname) and nontitled:
464 464 # TODO make configurable.
465 465 lines.append(BULLET_SECTION)
466 466 lines.append('-' * len(BULLET_SECTION))
467 467 lines.append('')
468 468
469 469 for paragraphs in nontitled:
470 470 lines.extend(stringutil.wrap(' '.join(paragraphs[0]),
471 471 width=78,
472 472 initindent='* ',
473 473 hangindent=' ').splitlines())
474 474
475 475 for para in paragraphs[1:]:
476 476 lines.append('')
477 477 lines.extend(stringutil.wrap(' '.join(para),
478 478 width=78,
479 479 initindent=' ',
480 480 hangindent=' ').splitlines())
481 481
482 482 lines.append('')
483 483
484 484 if lines and lines[-1]:
485 485 lines.append('')
486 486
487 487 return '\n'.join(lines)
488 488
489 489 @command('releasenotes',
490 490 [('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
491 491 ('c', 'check', False, _('checks for validity of admonitions (if any)'),
492 492 _('REV')),
493 493 ('l', 'list', False, _('list the available admonitions with their title'),
494 494 None)],
495 495 _('hg releasenotes [-r REV] [-c] FILE'),
496 496 helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
497 497 def releasenotes(ui, repo, file_=None, **opts):
498 498 """parse release notes from commit messages into an output file
499 499
500 500 Given an output file and set of revisions, this command will parse commit
501 501 messages for release notes then add them to the output file.
502 502
503 503 Release notes are defined in commit messages as ReStructuredText
504 504 directives. These have the form::
505 505
506 506 .. directive:: title
507 507
508 508 content
509 509
510 510 Each ``directive`` maps to an output section in a generated release notes
511 511 file, which itself is ReStructuredText. For example, the ``.. feature::``
512 512 directive would map to a ``New Features`` section.
513 513
514 514 Release note directives can be either short-form or long-form. In short-
515 515 form, ``title`` is omitted and the release note is rendered as a bullet
516 516 list. In long form, a sub-section with the title ``title`` is added to the
517 517 section.
518 518
519 519 The ``FILE`` argument controls the output file to write gathered release
520 520 notes to. The format of the file is::
521 521
522 522 Section 1
523 523 =========
524 524
525 525 ...
526 526
527 527 Section 2
528 528 =========
529 529
530 530 ...
531 531
532 532 Only sections with defined release notes are emitted.
533 533
534 534 If a section only has short-form notes, it will consist of bullet list::
535 535
536 536 Section
537 537 =======
538 538
539 539 * Release note 1
540 540 * Release note 2
541 541
542 542 If a section has long-form notes, sub-sections will be emitted::
543 543
544 544 Section
545 545 =======
546 546
547 547 Note 1 Title
548 548 ------------
549 549
550 550 Description of the first long-form note.
551 551
552 552 Note 2 Title
553 553 ------------
554 554
555 555 Description of the second long-form note.
556 556
557 557 If the ``FILE`` argument points to an existing file, that file will be
558 558 parsed for release notes having the format that would be generated by this
559 559 command. The notes from the processed commit messages will be *merged*
560 560 into this parsed set.
561 561
562 562 During release notes merging:
563 563
564 564 * Duplicate items are automatically ignored
565 565 * Items that are different are automatically ignored if the similarity is
566 566 greater than a threshold.
567 567
568 568 This means that the release notes file can be updated independently from
569 569 this command and changes should not be lost when running this command on
570 570 that file. A particular use case for this is to tweak the wording of a
571 571 release note after it has been added to the release notes file.
572 572
573 573 The -c/--check option checks the commit message for invalid admonitions.
574 574
575 575 The -l/--list option, presents the user with a list of existing available
576 576 admonitions along with their title. This also includes the custom
577 577 admonitions (if any).
578 578 """
579 579
580 580 opts = pycompat.byteskwargs(opts)
581 581 sections = releasenotessections(ui, repo)
582 582
583 583 listflag = opts.get('list')
584 584
585 585 if listflag and opts.get('rev'):
586 586 raise error.Abort(_('cannot use both \'--list\' and \'--rev\''))
587 587 if listflag and opts.get('check'):
588 588 raise error.Abort(_('cannot use both \'--list\' and \'--check\''))
589 589
590 590 if listflag:
591 591 return _getadmonitionlist(ui, sections)
592 592
593 593 rev = opts.get('rev')
594 594 revs = scmutil.revrange(repo, [rev or 'not public()'])
595 595 if opts.get('check'):
596 596 return checkadmonitions(ui, repo, sections.names(), revs)
597 597
598 598 incoming = parsenotesfromrevisions(repo, sections.names(), revs)
599 599
600 600 if file_ is None:
601 601 ui.pager('releasenotes')
602 602 return ui.write(serializenotes(sections, incoming))
603 603
604 604 try:
605 605 with open(file_, 'rb') as fh:
606 606 notes = parsereleasenotesfile(sections, fh.read())
607 607 except IOError as e:
608 608 if e.errno != errno.ENOENT:
609 609 raise
610 610
611 611 notes = parsedreleasenotes()
612 612
613 613 notes.merge(ui, incoming)
614 614
615 615 with open(file_, 'wb') as fh:
616 616 fh.write(serializenotes(sections, notes))
617 617
618 618 @command('debugparsereleasenotes', norepo=True)
619 619 def debugparsereleasenotes(ui, path, repo=None):
620 620 """parse release notes and print resulting data structure"""
621 621 if path == '-':
622 622 text = pycompat.stdin.read()
623 623 else:
624 624 with open(path, 'rb') as fh:
625 625 text = fh.read()
626 626
627 627 sections = releasenotessections(ui, repo)
628 628
629 629 notes = parsereleasenotesfile(sections, text)
630 630
631 631 for section in notes:
632 632 ui.write(_('section: %s\n') % section)
633 633 for title, paragraphs in notes.titledforsection(section):
634 634 ui.write(_(' subsection: %s\n') % title)
635 635 for para in paragraphs:
636 636 ui.write(_(' paragraph: %s\n') % ' '.join(para))
637 637
638 638 for paragraphs in notes.nontitledforsection(section):
639 639 ui.write(_(' bullet point:\n'))
640 640 for para in paragraphs:
641 641 ui.write(_(' paragraph: %s\n') % ' '.join(para))
@@ -1,533 +1,533 b''
1 1 # utility for color output for Mercurial commands
2 2 #
3 3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> and other
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import re
11 11
12 12 from .i18n import _
13 13
14 14 from . import (
15 15 encoding,
16 16 pycompat,
17 17 )
18 18
19 19 from .utils import (
20 20 stringutil,
21 21 )
22 22
23 23 try:
24 24 import curses
25 25 # Mapping from effect name to terminfo attribute name (or raw code) or
26 26 # color number. This will also force-load the curses module.
27 27 _baseterminfoparams = {
28 28 'none': (True, 'sgr0', ''),
29 29 'standout': (True, 'smso', ''),
30 30 'underline': (True, 'smul', ''),
31 31 'reverse': (True, 'rev', ''),
32 32 'inverse': (True, 'rev', ''),
33 33 'blink': (True, 'blink', ''),
34 34 'dim': (True, 'dim', ''),
35 35 'bold': (True, 'bold', ''),
36 36 'invisible': (True, 'invis', ''),
37 37 'italic': (True, 'sitm', ''),
38 38 'black': (False, curses.COLOR_BLACK, ''),
39 39 'red': (False, curses.COLOR_RED, ''),
40 40 'green': (False, curses.COLOR_GREEN, ''),
41 41 'yellow': (False, curses.COLOR_YELLOW, ''),
42 42 'blue': (False, curses.COLOR_BLUE, ''),
43 43 'magenta': (False, curses.COLOR_MAGENTA, ''),
44 44 'cyan': (False, curses.COLOR_CYAN, ''),
45 45 'white': (False, curses.COLOR_WHITE, ''),
46 46 }
47 47 except ImportError:
48 48 curses = None
49 49 _baseterminfoparams = {}
50 50
51 51 # start and stop parameters for effects
52 52 _effects = {
53 53 'none': 0,
54 54 'black': 30,
55 55 'red': 31,
56 56 'green': 32,
57 57 'yellow': 33,
58 58 'blue': 34,
59 59 'magenta': 35,
60 60 'cyan': 36,
61 61 'white': 37,
62 62 'bold': 1,
63 63 'italic': 3,
64 64 'underline': 4,
65 65 'inverse': 7,
66 66 'dim': 2,
67 67 'black_background': 40,
68 68 'red_background': 41,
69 69 'green_background': 42,
70 70 'yellow_background': 43,
71 71 'blue_background': 44,
72 72 'purple_background': 45,
73 73 'cyan_background': 46,
74 74 'white_background': 47,
75 75 }
76 76
77 77 _defaultstyles = {
78 78 'grep.match': 'red bold',
79 79 'grep.linenumber': 'green',
80 80 'grep.rev': 'green',
81 81 'grep.change': 'green',
82 82 'grep.sep': 'cyan',
83 83 'grep.filename': 'magenta',
84 84 'grep.user': 'magenta',
85 85 'grep.date': 'magenta',
86 86 'bookmarks.active': 'green',
87 87 'branches.active': 'none',
88 88 'branches.closed': 'black bold',
89 89 'branches.current': 'green',
90 90 'branches.inactive': 'none',
91 91 'diff.changed': 'white',
92 92 'diff.deleted': 'red',
93 93 'diff.deleted.changed': 'red bold underline',
94 94 'diff.deleted.unchanged': 'red',
95 95 'diff.diffline': 'bold',
96 96 'diff.extended': 'cyan bold',
97 97 'diff.file_a': 'red bold',
98 98 'diff.file_b': 'green bold',
99 99 'diff.hunk': 'magenta',
100 100 'diff.inserted': 'green',
101 101 'diff.inserted.changed': 'green bold underline',
102 102 'diff.inserted.unchanged': 'green',
103 103 'diff.tab': '',
104 104 'diff.trailingwhitespace': 'bold red_background',
105 105 'changeset.public': '',
106 106 'changeset.draft': '',
107 107 'changeset.secret': '',
108 108 'diffstat.deleted': 'red',
109 109 'diffstat.inserted': 'green',
110 110 'formatvariant.name.mismatchconfig': 'red',
111 111 'formatvariant.name.mismatchdefault': 'yellow',
112 112 'formatvariant.name.uptodate': 'green',
113 113 'formatvariant.repo.mismatchconfig': 'red',
114 114 'formatvariant.repo.mismatchdefault': 'yellow',
115 115 'formatvariant.repo.uptodate': 'green',
116 116 'formatvariant.config.special': 'yellow',
117 117 'formatvariant.config.default': 'green',
118 118 'formatvariant.default': '',
119 119 'histedit.remaining': 'red bold',
120 120 'ui.addremove.added': 'green',
121 121 'ui.addremove.removed': 'red',
122 122 'ui.error': 'red',
123 123 'ui.prompt': 'yellow',
124 124 'log.changeset': 'yellow',
125 125 'patchbomb.finalsummary': '',
126 126 'patchbomb.from': 'magenta',
127 127 'patchbomb.to': 'cyan',
128 128 'patchbomb.subject': 'green',
129 129 'patchbomb.diffstats': '',
130 130 'rebase.rebased': 'blue',
131 131 'rebase.remaining': 'red bold',
132 132 'resolve.resolved': 'green bold',
133 133 'resolve.unresolved': 'red bold',
134 134 'shelve.age': 'cyan',
135 135 'shelve.newest': 'green bold',
136 136 'shelve.name': 'blue bold',
137 137 'status.added': 'green bold',
138 138 'status.clean': 'none',
139 139 'status.copied': 'none',
140 140 'status.deleted': 'cyan bold underline',
141 141 'status.ignored': 'black bold',
142 142 'status.modified': 'blue bold',
143 143 'status.removed': 'red bold',
144 144 'status.unknown': 'magenta bold underline',
145 145 'tags.normal': 'green',
146 146 'tags.local': 'black bold',
147 147 }
148 148
149 149 def loadcolortable(ui, extname, colortable):
150 150 _defaultstyles.update(colortable)
151 151
152 152 def _terminfosetup(ui, mode, formatted):
153 153 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
154 154
155 155 # If we failed to load curses, we go ahead and return.
156 156 if curses is None:
157 157 return
158 158 # Otherwise, see what the config file says.
159 159 if mode not in ('auto', 'terminfo'):
160 160 return
161 161 ui._terminfoparams.update(_baseterminfoparams)
162 162
163 163 for key, val in ui.configitems('color'):
164 164 if key.startswith('color.'):
165 165 newval = (False, int(val), '')
166 166 ui._terminfoparams[key[6:]] = newval
167 167 elif key.startswith('terminfo.'):
168 168 newval = (True, '', val.replace('\\E', '\x1b'))
169 169 ui._terminfoparams[key[9:]] = newval
170 170 try:
171 171 curses.setupterm()
172 172 except curses.error:
173 173 ui._terminfoparams.clear()
174 174 return
175 175
176 176 for key, (b, e, c) in ui._terminfoparams.copy().items():
177 177 if not b:
178 178 continue
179 179 if not c and not curses.tigetstr(pycompat.sysstr(e)):
180 180 # Most terminals don't support dim, invis, etc, so don't be
181 181 # noisy and use ui.debug().
182 182 ui.debug("no terminfo entry for %s\n" % e)
183 183 del ui._terminfoparams[key]
184 184 if not curses.tigetstr(r'setaf') or not curses.tigetstr(r'setab'):
185 185 # Only warn about missing terminfo entries if we explicitly asked for
186 186 # terminfo mode and we're in a formatted terminal.
187 187 if mode == "terminfo" and formatted:
188 188 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
189 189 "ECMA-48 color\n"))
190 190 ui._terminfoparams.clear()
191 191
192 192 def setup(ui):
193 193 """configure color on a ui
194 194
195 195 That function both set the colormode for the ui object and read
196 196 the configuration looking for custom colors and effect definitions."""
197 197 mode = _modesetup(ui)
198 198 ui._colormode = mode
199 199 if mode and mode != 'debug':
200 200 configstyles(ui)
201 201
202 202 def _modesetup(ui):
203 203 if ui.plain('color'):
204 204 return None
205 205 config = ui.config('ui', 'color')
206 206 if config == 'debug':
207 207 return 'debug'
208 208
209 209 auto = (config == 'auto')
210 210 always = False
211 211 if not auto and stringutil.parsebool(config):
212 212 # We want the config to behave like a boolean, "on" is actually auto,
213 213 # but "always" value is treated as a special case to reduce confusion.
214 214 if ui.configsource('ui', 'color') == '--color' or config == 'always':
215 215 always = True
216 216 else:
217 217 auto = True
218 218
219 219 if not always and not auto:
220 220 return None
221 221
222 222 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
223 223 and ui.formatted()))
224 224
225 225 mode = ui.config('color', 'mode')
226 226
227 227 # If pager is active, color.pagermode overrides color.mode.
228 228 if getattr(ui, 'pageractive', False):
229 229 mode = ui.config('color', 'pagermode', mode)
230 230
231 231 realmode = mode
232 232 if pycompat.iswindows:
233 233 from . import win32
234 234
235 235 term = encoding.environ.get('TERM')
236 236 # TERM won't be defined in a vanilla cmd.exe environment.
237 237
238 238 # UNIX-like environments on Windows such as Cygwin and MSYS will
239 239 # set TERM. They appear to make a best effort attempt at setting it
240 240 # to something appropriate. However, not all environments with TERM
241 241 # defined support ANSI.
242 242 ansienviron = term and 'xterm' in term
243 243
244 244 if mode == 'auto':
245 245 # Since "ansi" could result in terminal gibberish, we error on the
246 246 # side of selecting "win32". However, if w32effects is not defined,
247 247 # we almost certainly don't support "win32", so don't even try.
248 248 # w32effects is not populated when stdout is redirected, so checking
249 249 # it first avoids win32 calls in a state known to error out.
250 250 if ansienviron or not w32effects or win32.enablevtmode():
251 251 realmode = 'ansi'
252 252 else:
253 253 realmode = 'win32'
254 254 # An empty w32effects is a clue that stdout is redirected, and thus
255 255 # cannot enable VT mode.
256 256 elif mode == 'ansi' and w32effects and not ansienviron:
257 257 win32.enablevtmode()
258 258 elif mode == 'auto':
259 259 realmode = 'ansi'
260 260
261 261 def modewarn():
262 262 # only warn if color.mode was explicitly set and we're in
263 263 # a formatted terminal
264 264 if mode == realmode and formatted:
265 265 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
266 266
267 267 if realmode == 'win32':
268 268 ui._terminfoparams.clear()
269 269 if not w32effects:
270 270 modewarn()
271 271 return None
272 272 elif realmode == 'ansi':
273 273 ui._terminfoparams.clear()
274 274 elif realmode == 'terminfo':
275 275 _terminfosetup(ui, mode, formatted)
276 276 if not ui._terminfoparams:
277 277 ## FIXME Shouldn't we return None in this case too?
278 278 modewarn()
279 279 realmode = 'ansi'
280 280 else:
281 281 return None
282 282
283 283 if always or (auto and formatted):
284 284 return realmode
285 285 return None
286 286
287 287 def configstyles(ui):
288 288 ui._styles.update(_defaultstyles)
289 289 for status, cfgeffects in ui.configitems('color'):
290 290 if '.' not in status or status.startswith(('color.', 'terminfo.')):
291 291 continue
292 292 cfgeffects = ui.configlist('color', status)
293 293 if cfgeffects:
294 294 good = []
295 295 for e in cfgeffects:
296 296 if valideffect(ui, e):
297 297 good.append(e)
298 298 else:
299 299 ui.warn(_("ignoring unknown color/effect %s "
300 300 "(configured in color.%s)\n")
301 301 % (stringutil.pprint(e), status))
302 302 ui._styles[status] = ' '.join(good)
303 303
304 304 def _activeeffects(ui):
305 305 '''Return the effects map for the color mode set on the ui.'''
306 306 if ui._colormode == 'win32':
307 307 return w32effects
308 308 elif ui._colormode is not None:
309 309 return _effects
310 310 return {}
311 311
312 312 def valideffect(ui, effect):
313 313 'Determine if the effect is valid or not.'
314 314 return ((not ui._terminfoparams and effect in _activeeffects(ui))
315 315 or (effect in ui._terminfoparams
316 316 or effect[:-11] in ui._terminfoparams))
317 317
318 318 def _effect_str(ui, effect):
319 319 '''Helper function for render_effects().'''
320 320
321 321 bg = False
322 322 if effect.endswith('_background'):
323 323 bg = True
324 324 effect = effect[:-11]
325 325 try:
326 326 attr, val, termcode = ui._terminfoparams[effect]
327 327 except KeyError:
328 328 return ''
329 329 if attr:
330 330 if termcode:
331 331 return termcode
332 332 else:
333 333 return curses.tigetstr(pycompat.sysstr(val))
334 334 elif bg:
335 335 return curses.tparm(curses.tigetstr(r'setab'), val)
336 336 else:
337 337 return curses.tparm(curses.tigetstr(r'setaf'), val)
338 338
339 339 def _mergeeffects(text, start, stop):
340 340 """Insert start sequence at every occurrence of stop sequence
341 341
342 342 >>> s = _mergeeffects(b'cyan', b'[C]', b'|')
343 343 >>> s = _mergeeffects(s + b'yellow', b'[Y]', b'|')
344 344 >>> s = _mergeeffects(b'ma' + s + b'genta', b'[M]', b'|')
345 345 >>> s = _mergeeffects(b'red' + s, b'[R]', b'|')
346 346 >>> s
347 347 '[R]red[M]ma[Y][C]cyan|[R][M][Y]yellow|[R][M]genta|'
348 348 """
349 349 parts = []
350 350 for t in text.split(stop):
351 351 if not t:
352 352 continue
353 353 parts.extend([start, t, stop])
354 354 return ''.join(parts)
355 355
356 356 def _render_effects(ui, text, effects):
357 357 'Wrap text in commands to turn on each effect.'
358 358 if not text:
359 359 return text
360 360 if ui._terminfoparams:
361 361 start = ''.join(_effect_str(ui, effect)
362 362 for effect in ['none'] + effects.split())
363 363 stop = _effect_str(ui, 'none')
364 364 else:
365 365 activeeffects = _activeeffects(ui)
366 366 start = [pycompat.bytestr(activeeffects[e])
367 367 for e in ['none'] + effects.split()]
368 368 start = '\033[' + ';'.join(start) + 'm'
369 369 stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
370 370 return _mergeeffects(text, start, stop)
371 371
372 372 _ansieffectre = re.compile(br'\x1b\[[0-9;]*m')
373 373
374 374 def stripeffects(text):
375 375 """Strip ANSI control codes which could be inserted by colorlabel()"""
376 376 return _ansieffectre.sub('', text)
377 377
378 378 def colorlabel(ui, msg, label):
379 379 """add color control code according to the mode"""
380 380 if ui._colormode == 'debug':
381 381 if label and msg:
382 382 if msg.endswith('\n'):
383 383 msg = "[%s|%s]\n" % (label, msg[:-1])
384 384 else:
385 385 msg = "[%s|%s]" % (label, msg)
386 386 elif ui._colormode is not None:
387 387 effects = []
388 388 for l in label.split():
389 389 s = ui._styles.get(l, '')
390 390 if s:
391 391 effects.append(s)
392 392 elif valideffect(ui, l):
393 393 effects.append(l)
394 394 effects = ' '.join(effects)
395 395 if effects:
396 396 msg = '\n'.join([_render_effects(ui, line, effects)
397 397 for line in msg.split('\n')])
398 398 return msg
399 399
400 400 w32effects = None
401 401 if pycompat.iswindows:
402 402 import ctypes
403 403
404 404 _kernel32 = ctypes.windll.kernel32
405 405
406 406 _WORD = ctypes.c_ushort
407 407
408 408 _INVALID_HANDLE_VALUE = -1
409 409
410 410 class _COORD(ctypes.Structure):
411 411 _fields_ = [(r'X', ctypes.c_short),
412 412 (r'Y', ctypes.c_short)]
413 413
414 414 class _SMALL_RECT(ctypes.Structure):
415 415 _fields_ = [(r'Left', ctypes.c_short),
416 416 (r'Top', ctypes.c_short),
417 417 (r'Right', ctypes.c_short),
418 418 (r'Bottom', ctypes.c_short)]
419 419
420 420 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
421 421 _fields_ = [(r'dwSize', _COORD),
422 422 (r'dwCursorPosition', _COORD),
423 423 (r'wAttributes', _WORD),
424 424 (r'srWindow', _SMALL_RECT),
425 425 (r'dwMaximumWindowSize', _COORD)]
426 426
427 427 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
428 428 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
429 429
430 430 _FOREGROUND_BLUE = 0x0001
431 431 _FOREGROUND_GREEN = 0x0002
432 432 _FOREGROUND_RED = 0x0004
433 433 _FOREGROUND_INTENSITY = 0x0008
434 434
435 435 _BACKGROUND_BLUE = 0x0010
436 436 _BACKGROUND_GREEN = 0x0020
437 437 _BACKGROUND_RED = 0x0040
438 438 _BACKGROUND_INTENSITY = 0x0080
439 439
440 440 _COMMON_LVB_REVERSE_VIDEO = 0x4000
441 441 _COMMON_LVB_UNDERSCORE = 0x8000
442 442
443 443 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
444 444 w32effects = {
445 445 'none': -1,
446 446 'black': 0,
447 447 'red': _FOREGROUND_RED,
448 448 'green': _FOREGROUND_GREEN,
449 449 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
450 450 'blue': _FOREGROUND_BLUE,
451 451 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
452 452 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
453 453 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
454 454 'bold': _FOREGROUND_INTENSITY,
455 455 'black_background': 0x100, # unused value > 0x0f
456 456 'red_background': _BACKGROUND_RED,
457 457 'green_background': _BACKGROUND_GREEN,
458 458 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
459 459 'blue_background': _BACKGROUND_BLUE,
460 460 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
461 461 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
462 462 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
463 463 _BACKGROUND_BLUE),
464 464 'bold_background': _BACKGROUND_INTENSITY,
465 465 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
466 466 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
467 467 }
468 468
469 469 passthrough = {_FOREGROUND_INTENSITY,
470 470 _BACKGROUND_INTENSITY,
471 471 _COMMON_LVB_UNDERSCORE,
472 472 _COMMON_LVB_REVERSE_VIDEO}
473 473
474 474 stdout = _kernel32.GetStdHandle(
475 475 _STD_OUTPUT_HANDLE) # don't close the handle returned
476 476 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
477 477 w32effects = None
478 478 else:
479 479 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
480 480 if not _kernel32.GetConsoleScreenBufferInfo(
481 481 stdout, ctypes.byref(csbi)):
482 482 # stdout may not support GetConsoleScreenBufferInfo()
483 483 # when called from subprocess or redirected
484 484 w32effects = None
485 485 else:
486 486 origattr = csbi.wAttributes
487 ansire = re.compile(b'\033\[([^m]*)m([^\033]*)(.*)',
487 ansire = re.compile(br'\033\[([^m]*)m([^\033]*)(.*)',
488 488 re.MULTILINE | re.DOTALL)
489 489
490 490 def win32print(ui, writefunc, text, **opts):
491 491 label = opts.get(r'label', '')
492 492 attr = origattr
493 493
494 494 def mapcolor(val, attr):
495 495 if val == -1:
496 496 return origattr
497 497 elif val in passthrough:
498 498 return attr | val
499 499 elif val > 0x0f:
500 500 return (val & 0x70) | (attr & 0x8f)
501 501 else:
502 502 return (val & 0x07) | (attr & 0xf8)
503 503
504 504 # determine console attributes based on labels
505 505 for l in label.split():
506 506 style = ui._styles.get(l, '')
507 507 for effect in style.split():
508 508 try:
509 509 attr = mapcolor(w32effects[effect], attr)
510 510 except KeyError:
511 511 # w32effects could not have certain attributes so we skip
512 512 # them if not found
513 513 pass
514 514 # hack to ensure regexp finds data
515 515 if not text.startswith(b'\033['):
516 516 text = b'\033[m' + text
517 517
518 518 # Look for ANSI-like codes embedded in text
519 519 m = re.match(ansire, text)
520 520
521 521 try:
522 522 while m:
523 523 for sattr in m.group(1).split(b';'):
524 524 if sattr:
525 525 attr = mapcolor(int(sattr), attr)
526 526 ui.flush()
527 527 _kernel32.SetConsoleTextAttribute(stdout, attr)
528 528 writefunc(m.group(2))
529 529 m = re.match(ansire, m.group(3))
530 530 finally:
531 531 # Explicitly reset original attributes
532 532 ui.flush()
533 533 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,2862 +1,2862 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import, print_function
10 10
11 11 import collections
12 12 import contextlib
13 13 import copy
14 14 import email
15 15 import errno
16 16 import hashlib
17 17 import os
18 18 import posixpath
19 19 import re
20 20 import shutil
21 21 import zlib
22 22
23 23 from .i18n import _
24 24 from .node import (
25 25 hex,
26 26 short,
27 27 )
28 28 from . import (
29 29 copies,
30 30 diffhelper,
31 31 diffutil,
32 32 encoding,
33 33 error,
34 34 mail,
35 35 match as matchmod,
36 36 mdiff,
37 37 pathutil,
38 38 pycompat,
39 39 scmutil,
40 40 similar,
41 41 util,
42 42 vfs as vfsmod,
43 43 )
44 44 from .utils import (
45 45 dateutil,
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 stringio = util.stringio
51 51
52 52 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
53 53 tabsplitter = re.compile(br'(\t+|[^\t]+)')
54 54 wordsplitter = re.compile(br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|'
55 55 b'[^ \ta-zA-Z0-9_\x80-\xff])')
56 56
57 57 PatchError = error.PatchError
58 58
59 59 # public functions
60 60
61 61 def split(stream):
62 62 '''return an iterator of individual patches from a stream'''
63 63 def isheader(line, inheader):
64 64 if inheader and line.startswith((' ', '\t')):
65 65 # continuation
66 66 return True
67 67 if line.startswith((' ', '-', '+')):
68 68 # diff line - don't check for header pattern in there
69 69 return False
70 70 l = line.split(': ', 1)
71 71 return len(l) == 2 and ' ' not in l[0]
72 72
73 73 def chunk(lines):
74 74 return stringio(''.join(lines))
75 75
76 76 def hgsplit(stream, cur):
77 77 inheader = True
78 78
79 79 for line in stream:
80 80 if not line.strip():
81 81 inheader = False
82 82 if not inheader and line.startswith('# HG changeset patch'):
83 83 yield chunk(cur)
84 84 cur = []
85 85 inheader = True
86 86
87 87 cur.append(line)
88 88
89 89 if cur:
90 90 yield chunk(cur)
91 91
92 92 def mboxsplit(stream, cur):
93 93 for line in stream:
94 94 if line.startswith('From '):
95 95 for c in split(chunk(cur[1:])):
96 96 yield c
97 97 cur = []
98 98
99 99 cur.append(line)
100 100
101 101 if cur:
102 102 for c in split(chunk(cur[1:])):
103 103 yield c
104 104
105 105 def mimesplit(stream, cur):
106 106 def msgfp(m):
107 107 fp = stringio()
108 108 g = email.Generator.Generator(fp, mangle_from_=False)
109 109 g.flatten(m)
110 110 fp.seek(0)
111 111 return fp
112 112
113 113 for line in stream:
114 114 cur.append(line)
115 115 c = chunk(cur)
116 116
117 117 m = mail.parse(c)
118 118 if not m.is_multipart():
119 119 yield msgfp(m)
120 120 else:
121 121 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
122 122 for part in m.walk():
123 123 ct = part.get_content_type()
124 124 if ct not in ok_types:
125 125 continue
126 126 yield msgfp(part)
127 127
128 128 def headersplit(stream, cur):
129 129 inheader = False
130 130
131 131 for line in stream:
132 132 if not inheader and isheader(line, inheader):
133 133 yield chunk(cur)
134 134 cur = []
135 135 inheader = True
136 136 if inheader and not isheader(line, inheader):
137 137 inheader = False
138 138
139 139 cur.append(line)
140 140
141 141 if cur:
142 142 yield chunk(cur)
143 143
144 144 def remainder(cur):
145 145 yield chunk(cur)
146 146
147 147 class fiter(object):
148 148 def __init__(self, fp):
149 149 self.fp = fp
150 150
151 151 def __iter__(self):
152 152 return self
153 153
154 154 def next(self):
155 155 l = self.fp.readline()
156 156 if not l:
157 157 raise StopIteration
158 158 return l
159 159
160 160 __next__ = next
161 161
162 162 inheader = False
163 163 cur = []
164 164
165 165 mimeheaders = ['content-type']
166 166
167 167 if not util.safehasattr(stream, 'next'):
168 168 # http responses, for example, have readline but not next
169 169 stream = fiter(stream)
170 170
171 171 for line in stream:
172 172 cur.append(line)
173 173 if line.startswith('# HG changeset patch'):
174 174 return hgsplit(stream, cur)
175 175 elif line.startswith('From '):
176 176 return mboxsplit(stream, cur)
177 177 elif isheader(line, inheader):
178 178 inheader = True
179 179 if line.split(':', 1)[0].lower() in mimeheaders:
180 180 # let email parser handle this
181 181 return mimesplit(stream, cur)
182 182 elif line.startswith('--- ') and inheader:
183 183 # No evil headers seen by diff start, split by hand
184 184 return headersplit(stream, cur)
185 185 # Not enough info, keep reading
186 186
187 187 # if we are here, we have a very plain patch
188 188 return remainder(cur)
189 189
190 190 ## Some facility for extensible patch parsing:
191 191 # list of pairs ("header to match", "data key")
192 192 patchheadermap = [('Date', 'date'),
193 193 ('Branch', 'branch'),
194 194 ('Node ID', 'nodeid'),
195 195 ]
196 196
197 197 @contextlib.contextmanager
198 198 def extract(ui, fileobj):
199 199 '''extract patch from data read from fileobj.
200 200
201 201 patch can be a normal patch or contained in an email message.
202 202
203 203 return a dictionary. Standard keys are:
204 204 - filename,
205 205 - message,
206 206 - user,
207 207 - date,
208 208 - branch,
209 209 - node,
210 210 - p1,
211 211 - p2.
212 212 Any item can be missing from the dictionary. If filename is missing,
213 213 fileobj did not contain a patch. Caller must unlink filename when done.'''
214 214
215 215 fd, tmpname = pycompat.mkstemp(prefix='hg-patch-')
216 216 tmpfp = os.fdopen(fd, r'wb')
217 217 try:
218 218 yield _extract(ui, fileobj, tmpname, tmpfp)
219 219 finally:
220 220 tmpfp.close()
221 221 os.unlink(tmpname)
222 222
223 223 def _extract(ui, fileobj, tmpname, tmpfp):
224 224
225 225 # attempt to detect the start of a patch
226 226 # (this heuristic is borrowed from quilt)
227 227 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
228 228 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
229 229 br'---[ \t].*?^\+\+\+[ \t]|'
230 230 br'\*\*\*[ \t].*?^---[ \t])',
231 231 re.MULTILINE | re.DOTALL)
232 232
233 233 data = {}
234 234
235 235 msg = mail.parse(fileobj)
236 236
237 237 subject = msg[r'Subject'] and mail.headdecode(msg[r'Subject'])
238 238 data['user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
239 239 if not subject and not data['user']:
240 240 # Not an email, restore parsed headers if any
241 241 subject = '\n'.join(': '.join(map(encoding.strtolocal, h))
242 242 for h in msg.items()) + '\n'
243 243
244 244 # should try to parse msg['Date']
245 245 parents = []
246 246
247 247 if subject:
248 248 if subject.startswith('[PATCH'):
249 249 pend = subject.find(']')
250 250 if pend >= 0:
251 251 subject = subject[pend + 1:].lstrip()
252 252 subject = re.sub(br'\n[ \t]+', ' ', subject)
253 253 ui.debug('Subject: %s\n' % subject)
254 254 if data['user']:
255 255 ui.debug('From: %s\n' % data['user'])
256 256 diffs_seen = 0
257 257 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
258 258 message = ''
259 259 for part in msg.walk():
260 260 content_type = pycompat.bytestr(part.get_content_type())
261 261 ui.debug('Content-Type: %s\n' % content_type)
262 262 if content_type not in ok_types:
263 263 continue
264 264 payload = part.get_payload(decode=True)
265 265 m = diffre.search(payload)
266 266 if m:
267 267 hgpatch = False
268 268 hgpatchheader = False
269 269 ignoretext = False
270 270
271 271 ui.debug('found patch at byte %d\n' % m.start(0))
272 272 diffs_seen += 1
273 273 cfp = stringio()
274 274 for line in payload[:m.start(0)].splitlines():
275 275 if line.startswith('# HG changeset patch') and not hgpatch:
276 276 ui.debug('patch generated by hg export\n')
277 277 hgpatch = True
278 278 hgpatchheader = True
279 279 # drop earlier commit message content
280 280 cfp.seek(0)
281 281 cfp.truncate()
282 282 subject = None
283 283 elif hgpatchheader:
284 284 if line.startswith('# User '):
285 285 data['user'] = line[7:]
286 286 ui.debug('From: %s\n' % data['user'])
287 287 elif line.startswith("# Parent "):
288 288 parents.append(line[9:].lstrip())
289 289 elif line.startswith("# "):
290 290 for header, key in patchheadermap:
291 291 prefix = '# %s ' % header
292 292 if line.startswith(prefix):
293 293 data[key] = line[len(prefix):]
294 294 else:
295 295 hgpatchheader = False
296 296 elif line == '---':
297 297 ignoretext = True
298 298 if not hgpatchheader and not ignoretext:
299 299 cfp.write(line)
300 300 cfp.write('\n')
301 301 message = cfp.getvalue()
302 302 if tmpfp:
303 303 tmpfp.write(payload)
304 304 if not payload.endswith('\n'):
305 305 tmpfp.write('\n')
306 306 elif not diffs_seen and message and content_type == 'text/plain':
307 307 message += '\n' + payload
308 308
309 309 if subject and not message.startswith(subject):
310 310 message = '%s\n%s' % (subject, message)
311 311 data['message'] = message
312 312 tmpfp.close()
313 313 if parents:
314 314 data['p1'] = parents.pop(0)
315 315 if parents:
316 316 data['p2'] = parents.pop(0)
317 317
318 318 if diffs_seen:
319 319 data['filename'] = tmpname
320 320
321 321 return data
322 322
323 323 class patchmeta(object):
324 324 """Patched file metadata
325 325
326 326 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
327 327 or COPY. 'path' is patched file path. 'oldpath' is set to the
328 328 origin file when 'op' is either COPY or RENAME, None otherwise. If
329 329 file mode is changed, 'mode' is a tuple (islink, isexec) where
330 330 'islink' is True if the file is a symlink and 'isexec' is True if
331 331 the file is executable. Otherwise, 'mode' is None.
332 332 """
333 333 def __init__(self, path):
334 334 self.path = path
335 335 self.oldpath = None
336 336 self.mode = None
337 337 self.op = 'MODIFY'
338 338 self.binary = False
339 339
340 340 def setmode(self, mode):
341 341 islink = mode & 0o20000
342 342 isexec = mode & 0o100
343 343 self.mode = (islink, isexec)
344 344
345 345 def copy(self):
346 346 other = patchmeta(self.path)
347 347 other.oldpath = self.oldpath
348 348 other.mode = self.mode
349 349 other.op = self.op
350 350 other.binary = self.binary
351 351 return other
352 352
353 353 def _ispatchinga(self, afile):
354 354 if afile == '/dev/null':
355 355 return self.op == 'ADD'
356 356 return afile == 'a/' + (self.oldpath or self.path)
357 357
358 358 def _ispatchingb(self, bfile):
359 359 if bfile == '/dev/null':
360 360 return self.op == 'DELETE'
361 361 return bfile == 'b/' + self.path
362 362
363 363 def ispatching(self, afile, bfile):
364 364 return self._ispatchinga(afile) and self._ispatchingb(bfile)
365 365
366 366 def __repr__(self):
367 367 return "<patchmeta %s %r>" % (self.op, self.path)
368 368
369 369 def readgitpatch(lr):
370 370 """extract git-style metadata about patches from <patchname>"""
371 371
372 372 # Filter patch for git information
373 373 gp = None
374 374 gitpatches = []
375 375 for line in lr:
376 376 line = line.rstrip(' \r\n')
377 377 if line.startswith('diff --git a/'):
378 378 m = gitre.match(line)
379 379 if m:
380 380 if gp:
381 381 gitpatches.append(gp)
382 382 dst = m.group(2)
383 383 gp = patchmeta(dst)
384 384 elif gp:
385 385 if line.startswith('--- '):
386 386 gitpatches.append(gp)
387 387 gp = None
388 388 continue
389 389 if line.startswith('rename from '):
390 390 gp.op = 'RENAME'
391 391 gp.oldpath = line[12:]
392 392 elif line.startswith('rename to '):
393 393 gp.path = line[10:]
394 394 elif line.startswith('copy from '):
395 395 gp.op = 'COPY'
396 396 gp.oldpath = line[10:]
397 397 elif line.startswith('copy to '):
398 398 gp.path = line[8:]
399 399 elif line.startswith('deleted file'):
400 400 gp.op = 'DELETE'
401 401 elif line.startswith('new file mode '):
402 402 gp.op = 'ADD'
403 403 gp.setmode(int(line[-6:], 8))
404 404 elif line.startswith('new mode '):
405 405 gp.setmode(int(line[-6:], 8))
406 406 elif line.startswith('GIT binary patch'):
407 407 gp.binary = True
408 408 if gp:
409 409 gitpatches.append(gp)
410 410
411 411 return gitpatches
412 412
413 413 class linereader(object):
414 414 # simple class to allow pushing lines back into the input stream
415 415 def __init__(self, fp):
416 416 self.fp = fp
417 417 self.buf = []
418 418
419 419 def push(self, line):
420 420 if line is not None:
421 421 self.buf.append(line)
422 422
423 423 def readline(self):
424 424 if self.buf:
425 425 l = self.buf[0]
426 426 del self.buf[0]
427 427 return l
428 428 return self.fp.readline()
429 429
430 430 def __iter__(self):
431 431 return iter(self.readline, '')
432 432
433 433 class abstractbackend(object):
434 434 def __init__(self, ui):
435 435 self.ui = ui
436 436
437 437 def getfile(self, fname):
438 438 """Return target file data and flags as a (data, (islink,
439 439 isexec)) tuple. Data is None if file is missing/deleted.
440 440 """
441 441 raise NotImplementedError
442 442
443 443 def setfile(self, fname, data, mode, copysource):
444 444 """Write data to target file fname and set its mode. mode is a
445 445 (islink, isexec) tuple. If data is None, the file content should
446 446 be left unchanged. If the file is modified after being copied,
447 447 copysource is set to the original file name.
448 448 """
449 449 raise NotImplementedError
450 450
451 451 def unlink(self, fname):
452 452 """Unlink target file."""
453 453 raise NotImplementedError
454 454
455 455 def writerej(self, fname, failed, total, lines):
456 456 """Write rejected lines for fname. total is the number of hunks
457 457 which failed to apply and total the total number of hunks for this
458 458 files.
459 459 """
460 460
461 461 def exists(self, fname):
462 462 raise NotImplementedError
463 463
464 464 def close(self):
465 465 raise NotImplementedError
466 466
467 467 class fsbackend(abstractbackend):
468 468 def __init__(self, ui, basedir):
469 469 super(fsbackend, self).__init__(ui)
470 470 self.opener = vfsmod.vfs(basedir)
471 471
472 472 def getfile(self, fname):
473 473 if self.opener.islink(fname):
474 474 return (self.opener.readlink(fname), (True, False))
475 475
476 476 isexec = False
477 477 try:
478 478 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
479 479 except OSError as e:
480 480 if e.errno != errno.ENOENT:
481 481 raise
482 482 try:
483 483 return (self.opener.read(fname), (False, isexec))
484 484 except IOError as e:
485 485 if e.errno != errno.ENOENT:
486 486 raise
487 487 return None, None
488 488
489 489 def setfile(self, fname, data, mode, copysource):
490 490 islink, isexec = mode
491 491 if data is None:
492 492 self.opener.setflags(fname, islink, isexec)
493 493 return
494 494 if islink:
495 495 self.opener.symlink(data, fname)
496 496 else:
497 497 self.opener.write(fname, data)
498 498 if isexec:
499 499 self.opener.setflags(fname, False, True)
500 500
501 501 def unlink(self, fname):
502 502 rmdir = self.ui.configbool('experimental', 'removeemptydirs')
503 503 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
504 504
505 505 def writerej(self, fname, failed, total, lines):
506 506 fname = fname + ".rej"
507 507 self.ui.warn(
508 508 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
509 509 (failed, total, fname))
510 510 fp = self.opener(fname, 'w')
511 511 fp.writelines(lines)
512 512 fp.close()
513 513
514 514 def exists(self, fname):
515 515 return self.opener.lexists(fname)
516 516
517 517 class workingbackend(fsbackend):
518 518 def __init__(self, ui, repo, similarity):
519 519 super(workingbackend, self).__init__(ui, repo.root)
520 520 self.repo = repo
521 521 self.similarity = similarity
522 522 self.removed = set()
523 523 self.changed = set()
524 524 self.copied = []
525 525
526 526 def _checkknown(self, fname):
527 527 if self.repo.dirstate[fname] == '?' and self.exists(fname):
528 528 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
529 529
530 530 def setfile(self, fname, data, mode, copysource):
531 531 self._checkknown(fname)
532 532 super(workingbackend, self).setfile(fname, data, mode, copysource)
533 533 if copysource is not None:
534 534 self.copied.append((copysource, fname))
535 535 self.changed.add(fname)
536 536
537 537 def unlink(self, fname):
538 538 self._checkknown(fname)
539 539 super(workingbackend, self).unlink(fname)
540 540 self.removed.add(fname)
541 541 self.changed.add(fname)
542 542
543 543 def close(self):
544 544 wctx = self.repo[None]
545 545 changed = set(self.changed)
546 546 for src, dst in self.copied:
547 547 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
548 548 if self.removed:
549 549 wctx.forget(sorted(self.removed))
550 550 for f in self.removed:
551 551 if f not in self.repo.dirstate:
552 552 # File was deleted and no longer belongs to the
553 553 # dirstate, it was probably marked added then
554 554 # deleted, and should not be considered by
555 555 # marktouched().
556 556 changed.discard(f)
557 557 if changed:
558 558 scmutil.marktouched(self.repo, changed, self.similarity)
559 559 return sorted(self.changed)
560 560
561 561 class filestore(object):
562 562 def __init__(self, maxsize=None):
563 563 self.opener = None
564 564 self.files = {}
565 565 self.created = 0
566 566 self.maxsize = maxsize
567 567 if self.maxsize is None:
568 568 self.maxsize = 4*(2**20)
569 569 self.size = 0
570 570 self.data = {}
571 571
572 572 def setfile(self, fname, data, mode, copied=None):
573 573 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
574 574 self.data[fname] = (data, mode, copied)
575 575 self.size += len(data)
576 576 else:
577 577 if self.opener is None:
578 578 root = pycompat.mkdtemp(prefix='hg-patch-')
579 579 self.opener = vfsmod.vfs(root)
580 580 # Avoid filename issues with these simple names
581 581 fn = '%d' % self.created
582 582 self.opener.write(fn, data)
583 583 self.created += 1
584 584 self.files[fname] = (fn, mode, copied)
585 585
586 586 def getfile(self, fname):
587 587 if fname in self.data:
588 588 return self.data[fname]
589 589 if not self.opener or fname not in self.files:
590 590 return None, None, None
591 591 fn, mode, copied = self.files[fname]
592 592 return self.opener.read(fn), mode, copied
593 593
594 594 def close(self):
595 595 if self.opener:
596 596 shutil.rmtree(self.opener.base)
597 597
598 598 class repobackend(abstractbackend):
599 599 def __init__(self, ui, repo, ctx, store):
600 600 super(repobackend, self).__init__(ui)
601 601 self.repo = repo
602 602 self.ctx = ctx
603 603 self.store = store
604 604 self.changed = set()
605 605 self.removed = set()
606 606 self.copied = {}
607 607
608 608 def _checkknown(self, fname):
609 609 if fname not in self.ctx:
610 610 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
611 611
612 612 def getfile(self, fname):
613 613 try:
614 614 fctx = self.ctx[fname]
615 615 except error.LookupError:
616 616 return None, None
617 617 flags = fctx.flags()
618 618 return fctx.data(), ('l' in flags, 'x' in flags)
619 619
620 620 def setfile(self, fname, data, mode, copysource):
621 621 if copysource:
622 622 self._checkknown(copysource)
623 623 if data is None:
624 624 data = self.ctx[fname].data()
625 625 self.store.setfile(fname, data, mode, copysource)
626 626 self.changed.add(fname)
627 627 if copysource:
628 628 self.copied[fname] = copysource
629 629
630 630 def unlink(self, fname):
631 631 self._checkknown(fname)
632 632 self.removed.add(fname)
633 633
634 634 def exists(self, fname):
635 635 return fname in self.ctx
636 636
637 637 def close(self):
638 638 return self.changed | self.removed
639 639
640 640 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
641 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
642 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
641 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
642 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
643 643 eolmodes = ['strict', 'crlf', 'lf', 'auto']
644 644
645 645 class patchfile(object):
646 646 def __init__(self, ui, gp, backend, store, eolmode='strict'):
647 647 self.fname = gp.path
648 648 self.eolmode = eolmode
649 649 self.eol = None
650 650 self.backend = backend
651 651 self.ui = ui
652 652 self.lines = []
653 653 self.exists = False
654 654 self.missing = True
655 655 self.mode = gp.mode
656 656 self.copysource = gp.oldpath
657 657 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
658 658 self.remove = gp.op == 'DELETE'
659 659 if self.copysource is None:
660 660 data, mode = backend.getfile(self.fname)
661 661 else:
662 662 data, mode = store.getfile(self.copysource)[:2]
663 663 if data is not None:
664 664 self.exists = self.copysource is None or backend.exists(self.fname)
665 665 self.missing = False
666 666 if data:
667 667 self.lines = mdiff.splitnewlines(data)
668 668 if self.mode is None:
669 669 self.mode = mode
670 670 if self.lines:
671 671 # Normalize line endings
672 672 if self.lines[0].endswith('\r\n'):
673 673 self.eol = '\r\n'
674 674 elif self.lines[0].endswith('\n'):
675 675 self.eol = '\n'
676 676 if eolmode != 'strict':
677 677 nlines = []
678 678 for l in self.lines:
679 679 if l.endswith('\r\n'):
680 680 l = l[:-2] + '\n'
681 681 nlines.append(l)
682 682 self.lines = nlines
683 683 else:
684 684 if self.create:
685 685 self.missing = False
686 686 if self.mode is None:
687 687 self.mode = (False, False)
688 688 if self.missing:
689 689 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
690 690 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
691 691 "current directory)\n"))
692 692
693 693 self.hash = {}
694 694 self.dirty = 0
695 695 self.offset = 0
696 696 self.skew = 0
697 697 self.rej = []
698 698 self.fileprinted = False
699 699 self.printfile(False)
700 700 self.hunks = 0
701 701
702 702 def writelines(self, fname, lines, mode):
703 703 if self.eolmode == 'auto':
704 704 eol = self.eol
705 705 elif self.eolmode == 'crlf':
706 706 eol = '\r\n'
707 707 else:
708 708 eol = '\n'
709 709
710 710 if self.eolmode != 'strict' and eol and eol != '\n':
711 711 rawlines = []
712 712 for l in lines:
713 713 if l and l.endswith('\n'):
714 714 l = l[:-1] + eol
715 715 rawlines.append(l)
716 716 lines = rawlines
717 717
718 718 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
719 719
720 720 def printfile(self, warn):
721 721 if self.fileprinted:
722 722 return
723 723 if warn or self.ui.verbose:
724 724 self.fileprinted = True
725 725 s = _("patching file %s\n") % self.fname
726 726 if warn:
727 727 self.ui.warn(s)
728 728 else:
729 729 self.ui.note(s)
730 730
731 731
732 732 def findlines(self, l, linenum):
733 733 # looks through the hash and finds candidate lines. The
734 734 # result is a list of line numbers sorted based on distance
735 735 # from linenum
736 736
737 737 cand = self.hash.get(l, [])
738 738 if len(cand) > 1:
739 739 # resort our list of potentials forward then back.
740 740 cand.sort(key=lambda x: abs(x - linenum))
741 741 return cand
742 742
743 743 def write_rej(self):
744 744 # our rejects are a little different from patch(1). This always
745 745 # creates rejects in the same form as the original patch. A file
746 746 # header is inserted so that you can run the reject through patch again
747 747 # without having to type the filename.
748 748 if not self.rej:
749 749 return
750 750 base = os.path.basename(self.fname)
751 751 lines = ["--- %s\n+++ %s\n" % (base, base)]
752 752 for x in self.rej:
753 753 for l in x.hunk:
754 754 lines.append(l)
755 755 if l[-1:] != '\n':
756 756 lines.append("\n\ No newline at end of file\n")
757 757 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
758 758
759 759 def apply(self, h):
760 760 if not h.complete():
761 761 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
762 762 (h.number, h.desc, len(h.a), h.lena, len(h.b),
763 763 h.lenb))
764 764
765 765 self.hunks += 1
766 766
767 767 if self.missing:
768 768 self.rej.append(h)
769 769 return -1
770 770
771 771 if self.exists and self.create:
772 772 if self.copysource:
773 773 self.ui.warn(_("cannot create %s: destination already "
774 774 "exists\n") % self.fname)
775 775 else:
776 776 self.ui.warn(_("file %s already exists\n") % self.fname)
777 777 self.rej.append(h)
778 778 return -1
779 779
780 780 if isinstance(h, binhunk):
781 781 if self.remove:
782 782 self.backend.unlink(self.fname)
783 783 else:
784 784 l = h.new(self.lines)
785 785 self.lines[:] = l
786 786 self.offset += len(l)
787 787 self.dirty = True
788 788 return 0
789 789
790 790 horig = h
791 791 if (self.eolmode in ('crlf', 'lf')
792 792 or self.eolmode == 'auto' and self.eol):
793 793 # If new eols are going to be normalized, then normalize
794 794 # hunk data before patching. Otherwise, preserve input
795 795 # line-endings.
796 796 h = h.getnormalized()
797 797
798 798 # fast case first, no offsets, no fuzz
799 799 old, oldstart, new, newstart = h.fuzzit(0, False)
800 800 oldstart += self.offset
801 801 orig_start = oldstart
802 802 # if there's skew we want to emit the "(offset %d lines)" even
803 803 # when the hunk cleanly applies at start + skew, so skip the
804 804 # fast case code
805 805 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
806 806 if self.remove:
807 807 self.backend.unlink(self.fname)
808 808 else:
809 809 self.lines[oldstart:oldstart + len(old)] = new
810 810 self.offset += len(new) - len(old)
811 811 self.dirty = True
812 812 return 0
813 813
814 814 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
815 815 self.hash = {}
816 816 for x, s in enumerate(self.lines):
817 817 self.hash.setdefault(s, []).append(x)
818 818
819 819 for fuzzlen in pycompat.xrange(self.ui.configint("patch", "fuzz") + 1):
820 820 for toponly in [True, False]:
821 821 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
822 822 oldstart = oldstart + self.offset + self.skew
823 823 oldstart = min(oldstart, len(self.lines))
824 824 if old:
825 825 cand = self.findlines(old[0][1:], oldstart)
826 826 else:
827 827 # Only adding lines with no or fuzzed context, just
828 828 # take the skew in account
829 829 cand = [oldstart]
830 830
831 831 for l in cand:
832 832 if not old or diffhelper.testhunk(old, self.lines, l):
833 833 self.lines[l : l + len(old)] = new
834 834 self.offset += len(new) - len(old)
835 835 self.skew = l - orig_start
836 836 self.dirty = True
837 837 offset = l - orig_start - fuzzlen
838 838 if fuzzlen:
839 839 msg = _("Hunk #%d succeeded at %d "
840 840 "with fuzz %d "
841 841 "(offset %d lines).\n")
842 842 self.printfile(True)
843 843 self.ui.warn(msg %
844 844 (h.number, l + 1, fuzzlen, offset))
845 845 else:
846 846 msg = _("Hunk #%d succeeded at %d "
847 847 "(offset %d lines).\n")
848 848 self.ui.note(msg % (h.number, l + 1, offset))
849 849 return fuzzlen
850 850 self.printfile(True)
851 851 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
852 852 self.rej.append(horig)
853 853 return -1
854 854
855 855 def close(self):
856 856 if self.dirty:
857 857 self.writelines(self.fname, self.lines, self.mode)
858 858 self.write_rej()
859 859 return len(self.rej)
860 860
861 861 class header(object):
862 862 """patch header
863 863 """
864 864 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
865 865 diff_re = re.compile('diff -r .* (.*)$')
866 866 allhunks_re = re.compile('(?:index|deleted file) ')
867 867 pretty_re = re.compile('(?:new file|deleted file) ')
868 868 special_re = re.compile('(?:index|deleted|copy|rename) ')
869 869 newfile_re = re.compile('(?:new file)')
870 870
871 871 def __init__(self, header):
872 872 self.header = header
873 873 self.hunks = []
874 874
875 875 def binary(self):
876 876 return any(h.startswith('index ') for h in self.header)
877 877
878 878 def pretty(self, fp):
879 879 for h in self.header:
880 880 if h.startswith('index '):
881 881 fp.write(_('this modifies a binary file (all or nothing)\n'))
882 882 break
883 883 if self.pretty_re.match(h):
884 884 fp.write(h)
885 885 if self.binary():
886 886 fp.write(_('this is a binary file\n'))
887 887 break
888 888 if h.startswith('---'):
889 889 fp.write(_('%d hunks, %d lines changed\n') %
890 890 (len(self.hunks),
891 891 sum([max(h.added, h.removed) for h in self.hunks])))
892 892 break
893 893 fp.write(h)
894 894
895 895 def write(self, fp):
896 896 fp.write(''.join(self.header))
897 897
898 898 def allhunks(self):
899 899 return any(self.allhunks_re.match(h) for h in self.header)
900 900
901 901 def files(self):
902 902 match = self.diffgit_re.match(self.header[0])
903 903 if match:
904 904 fromfile, tofile = match.groups()
905 905 if fromfile == tofile:
906 906 return [fromfile]
907 907 return [fromfile, tofile]
908 908 else:
909 909 return self.diff_re.match(self.header[0]).groups()
910 910
911 911 def filename(self):
912 912 return self.files()[-1]
913 913
914 914 def __repr__(self):
915 915 return '<header %s>' % (' '.join(map(repr, self.files())))
916 916
917 917 def isnewfile(self):
918 918 return any(self.newfile_re.match(h) for h in self.header)
919 919
920 920 def special(self):
921 921 # Special files are shown only at the header level and not at the hunk
922 922 # level for example a file that has been deleted is a special file.
923 923 # The user cannot change the content of the operation, in the case of
924 924 # the deleted file he has to take the deletion or not take it, he
925 925 # cannot take some of it.
926 926 # Newly added files are special if they are empty, they are not special
927 927 # if they have some content as we want to be able to change it
928 928 nocontent = len(self.header) == 2
929 929 emptynewfile = self.isnewfile() and nocontent
930 930 return emptynewfile or \
931 931 any(self.special_re.match(h) for h in self.header)
932 932
933 933 class recordhunk(object):
934 934 """patch hunk
935 935
936 936 XXX shouldn't we merge this with the other hunk class?
937 937 """
938 938
939 939 def __init__(self, header, fromline, toline, proc, before, hunk, after,
940 940 maxcontext=None):
941 941 def trimcontext(lines, reverse=False):
942 942 if maxcontext is not None:
943 943 delta = len(lines) - maxcontext
944 944 if delta > 0:
945 945 if reverse:
946 946 return delta, lines[delta:]
947 947 else:
948 948 return delta, lines[:maxcontext]
949 949 return 0, lines
950 950
951 951 self.header = header
952 952 trimedbefore, self.before = trimcontext(before, True)
953 953 self.fromline = fromline + trimedbefore
954 954 self.toline = toline + trimedbefore
955 955 _trimedafter, self.after = trimcontext(after, False)
956 956 self.proc = proc
957 957 self.hunk = hunk
958 958 self.added, self.removed = self.countchanges(self.hunk)
959 959
960 960 def __eq__(self, v):
961 961 if not isinstance(v, recordhunk):
962 962 return False
963 963
964 964 return ((v.hunk == self.hunk) and
965 965 (v.proc == self.proc) and
966 966 (self.fromline == v.fromline) and
967 967 (self.header.files() == v.header.files()))
968 968
969 969 def __hash__(self):
970 970 return hash((tuple(self.hunk),
971 971 tuple(self.header.files()),
972 972 self.fromline,
973 973 self.proc))
974 974
975 975 def countchanges(self, hunk):
976 976 """hunk -> (n+,n-)"""
977 977 add = len([h for h in hunk if h.startswith('+')])
978 978 rem = len([h for h in hunk if h.startswith('-')])
979 979 return add, rem
980 980
981 981 def reversehunk(self):
982 982 """return another recordhunk which is the reverse of the hunk
983 983
984 984 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
985 985 that, swap fromline/toline and +/- signs while keep other things
986 986 unchanged.
987 987 """
988 988 m = {'+': '-', '-': '+', '\\': '\\'}
989 989 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
990 990 return recordhunk(self.header, self.toline, self.fromline, self.proc,
991 991 self.before, hunk, self.after)
992 992
993 993 def write(self, fp):
994 994 delta = len(self.before) + len(self.after)
995 995 if self.after and self.after[-1] == '\\ No newline at end of file\n':
996 996 delta -= 1
997 997 fromlen = delta + self.removed
998 998 tolen = delta + self.added
999 999 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
1000 1000 (self.fromline, fromlen, self.toline, tolen,
1001 1001 self.proc and (' ' + self.proc)))
1002 1002 fp.write(''.join(self.before + self.hunk + self.after))
1003 1003
1004 1004 pretty = write
1005 1005
1006 1006 def filename(self):
1007 1007 return self.header.filename()
1008 1008
1009 1009 def __repr__(self):
1010 1010 return '<hunk %r@%d>' % (self.filename(), self.fromline)
1011 1011
1012 1012 def getmessages():
1013 1013 return {
1014 1014 'multiple': {
1015 1015 'apply': _("apply change %d/%d to '%s'?"),
1016 1016 'discard': _("discard change %d/%d to '%s'?"),
1017 1017 'record': _("record change %d/%d to '%s'?"),
1018 1018 },
1019 1019 'single': {
1020 1020 'apply': _("apply this change to '%s'?"),
1021 1021 'discard': _("discard this change to '%s'?"),
1022 1022 'record': _("record this change to '%s'?"),
1023 1023 },
1024 1024 'help': {
1025 1025 'apply': _('[Ynesfdaq?]'
1026 1026 '$$ &Yes, apply this change'
1027 1027 '$$ &No, skip this change'
1028 1028 '$$ &Edit this change manually'
1029 1029 '$$ &Skip remaining changes to this file'
1030 1030 '$$ Apply remaining changes to this &file'
1031 1031 '$$ &Done, skip remaining changes and files'
1032 1032 '$$ Apply &all changes to all remaining files'
1033 1033 '$$ &Quit, applying no changes'
1034 1034 '$$ &? (display help)'),
1035 1035 'discard': _('[Ynesfdaq?]'
1036 1036 '$$ &Yes, discard this change'
1037 1037 '$$ &No, skip this change'
1038 1038 '$$ &Edit this change manually'
1039 1039 '$$ &Skip remaining changes to this file'
1040 1040 '$$ Discard remaining changes to this &file'
1041 1041 '$$ &Done, skip remaining changes and files'
1042 1042 '$$ Discard &all changes to all remaining files'
1043 1043 '$$ &Quit, discarding no changes'
1044 1044 '$$ &? (display help)'),
1045 1045 'record': _('[Ynesfdaq?]'
1046 1046 '$$ &Yes, record this change'
1047 1047 '$$ &No, skip this change'
1048 1048 '$$ &Edit this change manually'
1049 1049 '$$ &Skip remaining changes to this file'
1050 1050 '$$ Record remaining changes to this &file'
1051 1051 '$$ &Done, skip remaining changes and files'
1052 1052 '$$ Record &all changes to all remaining files'
1053 1053 '$$ &Quit, recording no changes'
1054 1054 '$$ &? (display help)'),
1055 1055 }
1056 1056 }
1057 1057
1058 1058 def filterpatch(ui, headers, operation=None):
1059 1059 """Interactively filter patch chunks into applied-only chunks"""
1060 1060 messages = getmessages()
1061 1061
1062 1062 if operation is None:
1063 1063 operation = 'record'
1064 1064
1065 1065 def prompt(skipfile, skipall, query, chunk):
1066 1066 """prompt query, and process base inputs
1067 1067
1068 1068 - y/n for the rest of file
1069 1069 - y/n for the rest
1070 1070 - ? (help)
1071 1071 - q (quit)
1072 1072
1073 1073 Return True/False and possibly updated skipfile and skipall.
1074 1074 """
1075 1075 newpatches = None
1076 1076 if skipall is not None:
1077 1077 return skipall, skipfile, skipall, newpatches
1078 1078 if skipfile is not None:
1079 1079 return skipfile, skipfile, skipall, newpatches
1080 1080 while True:
1081 1081 resps = messages['help'][operation]
1082 1082 r = ui.promptchoice("%s %s" % (query, resps))
1083 1083 ui.write("\n")
1084 1084 if r == 8: # ?
1085 1085 for c, t in ui.extractchoices(resps)[1]:
1086 1086 ui.write('%s - %s\n' % (c, encoding.lower(t)))
1087 1087 continue
1088 1088 elif r == 0: # yes
1089 1089 ret = True
1090 1090 elif r == 1: # no
1091 1091 ret = False
1092 1092 elif r == 2: # Edit patch
1093 1093 if chunk is None:
1094 1094 ui.write(_('cannot edit patch for whole file'))
1095 1095 ui.write("\n")
1096 1096 continue
1097 1097 if chunk.header.binary():
1098 1098 ui.write(_('cannot edit patch for binary file'))
1099 1099 ui.write("\n")
1100 1100 continue
1101 1101 # Patch comment based on the Git one (based on comment at end of
1102 1102 # https://mercurial-scm.org/wiki/RecordExtension)
1103 1103 phelp = '---' + _("""
1104 1104 To remove '-' lines, make them ' ' lines (context).
1105 1105 To remove '+' lines, delete them.
1106 1106 Lines starting with # will be removed from the patch.
1107 1107
1108 1108 If the patch applies cleanly, the edited hunk will immediately be
1109 1109 added to the record list. If it does not apply cleanly, a rejects
1110 1110 file will be generated: you can use that when you try again. If
1111 1111 all lines of the hunk are removed, then the edit is aborted and
1112 1112 the hunk is left unchanged.
1113 1113 """)
1114 1114 (patchfd, patchfn) = pycompat.mkstemp(prefix="hg-editor-",
1115 1115 suffix=".diff")
1116 1116 ncpatchfp = None
1117 1117 try:
1118 1118 # Write the initial patch
1119 1119 f = util.nativeeolwriter(os.fdopen(patchfd, r'wb'))
1120 1120 chunk.header.write(f)
1121 1121 chunk.write(f)
1122 1122 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1123 1123 f.close()
1124 1124 # Start the editor and wait for it to complete
1125 1125 editor = ui.geteditor()
1126 1126 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1127 1127 environ={'HGUSER': ui.username()},
1128 1128 blockedtag='filterpatch')
1129 1129 if ret != 0:
1130 1130 ui.warn(_("editor exited with exit code %d\n") % ret)
1131 1131 continue
1132 1132 # Remove comment lines
1133 1133 patchfp = open(patchfn, r'rb')
1134 1134 ncpatchfp = stringio()
1135 1135 for line in util.iterfile(patchfp):
1136 1136 line = util.fromnativeeol(line)
1137 1137 if not line.startswith('#'):
1138 1138 ncpatchfp.write(line)
1139 1139 patchfp.close()
1140 1140 ncpatchfp.seek(0)
1141 1141 newpatches = parsepatch(ncpatchfp)
1142 1142 finally:
1143 1143 os.unlink(patchfn)
1144 1144 del ncpatchfp
1145 1145 # Signal that the chunk shouldn't be applied as-is, but
1146 1146 # provide the new patch to be used instead.
1147 1147 ret = False
1148 1148 elif r == 3: # Skip
1149 1149 ret = skipfile = False
1150 1150 elif r == 4: # file (Record remaining)
1151 1151 ret = skipfile = True
1152 1152 elif r == 5: # done, skip remaining
1153 1153 ret = skipall = False
1154 1154 elif r == 6: # all
1155 1155 ret = skipall = True
1156 1156 elif r == 7: # quit
1157 1157 raise error.Abort(_('user quit'))
1158 1158 return ret, skipfile, skipall, newpatches
1159 1159
1160 1160 seen = set()
1161 1161 applied = {} # 'filename' -> [] of chunks
1162 1162 skipfile, skipall = None, None
1163 1163 pos, total = 1, sum(len(h.hunks) for h in headers)
1164 1164 for h in headers:
1165 1165 pos += len(h.hunks)
1166 1166 skipfile = None
1167 1167 fixoffset = 0
1168 1168 hdr = ''.join(h.header)
1169 1169 if hdr in seen:
1170 1170 continue
1171 1171 seen.add(hdr)
1172 1172 if skipall is None:
1173 1173 h.pretty(ui)
1174 1174 msg = (_('examine changes to %s?') %
1175 1175 _(' and ').join("'%s'" % f for f in h.files()))
1176 1176 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1177 1177 if not r:
1178 1178 continue
1179 1179 applied[h.filename()] = [h]
1180 1180 if h.allhunks():
1181 1181 applied[h.filename()] += h.hunks
1182 1182 continue
1183 1183 for i, chunk in enumerate(h.hunks):
1184 1184 if skipfile is None and skipall is None:
1185 1185 chunk.pretty(ui)
1186 1186 if total == 1:
1187 1187 msg = messages['single'][operation] % chunk.filename()
1188 1188 else:
1189 1189 idx = pos - len(h.hunks) + i
1190 1190 msg = messages['multiple'][operation] % (idx, total,
1191 1191 chunk.filename())
1192 1192 r, skipfile, skipall, newpatches = prompt(skipfile,
1193 1193 skipall, msg, chunk)
1194 1194 if r:
1195 1195 if fixoffset:
1196 1196 chunk = copy.copy(chunk)
1197 1197 chunk.toline += fixoffset
1198 1198 applied[chunk.filename()].append(chunk)
1199 1199 elif newpatches is not None:
1200 1200 for newpatch in newpatches:
1201 1201 for newhunk in newpatch.hunks:
1202 1202 if fixoffset:
1203 1203 newhunk.toline += fixoffset
1204 1204 applied[newhunk.filename()].append(newhunk)
1205 1205 else:
1206 1206 fixoffset += chunk.removed - chunk.added
1207 1207 return (sum([h for h in applied.itervalues()
1208 1208 if h[0].special() or len(h) > 1], []), {})
1209 1209 class hunk(object):
1210 1210 def __init__(self, desc, num, lr, context):
1211 1211 self.number = num
1212 1212 self.desc = desc
1213 1213 self.hunk = [desc]
1214 1214 self.a = []
1215 1215 self.b = []
1216 1216 self.starta = self.lena = None
1217 1217 self.startb = self.lenb = None
1218 1218 if lr is not None:
1219 1219 if context:
1220 1220 self.read_context_hunk(lr)
1221 1221 else:
1222 1222 self.read_unified_hunk(lr)
1223 1223
1224 1224 def getnormalized(self):
1225 1225 """Return a copy with line endings normalized to LF."""
1226 1226
1227 1227 def normalize(lines):
1228 1228 nlines = []
1229 1229 for line in lines:
1230 1230 if line.endswith('\r\n'):
1231 1231 line = line[:-2] + '\n'
1232 1232 nlines.append(line)
1233 1233 return nlines
1234 1234
1235 1235 # Dummy object, it is rebuilt manually
1236 1236 nh = hunk(self.desc, self.number, None, None)
1237 1237 nh.number = self.number
1238 1238 nh.desc = self.desc
1239 1239 nh.hunk = self.hunk
1240 1240 nh.a = normalize(self.a)
1241 1241 nh.b = normalize(self.b)
1242 1242 nh.starta = self.starta
1243 1243 nh.startb = self.startb
1244 1244 nh.lena = self.lena
1245 1245 nh.lenb = self.lenb
1246 1246 return nh
1247 1247
1248 1248 def read_unified_hunk(self, lr):
1249 1249 m = unidesc.match(self.desc)
1250 1250 if not m:
1251 1251 raise PatchError(_("bad hunk #%d") % self.number)
1252 1252 self.starta, self.lena, self.startb, self.lenb = m.groups()
1253 1253 if self.lena is None:
1254 1254 self.lena = 1
1255 1255 else:
1256 1256 self.lena = int(self.lena)
1257 1257 if self.lenb is None:
1258 1258 self.lenb = 1
1259 1259 else:
1260 1260 self.lenb = int(self.lenb)
1261 1261 self.starta = int(self.starta)
1262 1262 self.startb = int(self.startb)
1263 1263 try:
1264 1264 diffhelper.addlines(lr, self.hunk, self.lena, self.lenb,
1265 1265 self.a, self.b)
1266 1266 except error.ParseError as e:
1267 1267 raise PatchError(_("bad hunk #%d: %s") % (self.number, e))
1268 1268 # if we hit eof before finishing out the hunk, the last line will
1269 1269 # be zero length. Lets try to fix it up.
1270 1270 while len(self.hunk[-1]) == 0:
1271 1271 del self.hunk[-1]
1272 1272 del self.a[-1]
1273 1273 del self.b[-1]
1274 1274 self.lena -= 1
1275 1275 self.lenb -= 1
1276 1276 self._fixnewline(lr)
1277 1277
1278 1278 def read_context_hunk(self, lr):
1279 1279 self.desc = lr.readline()
1280 1280 m = contextdesc.match(self.desc)
1281 1281 if not m:
1282 1282 raise PatchError(_("bad hunk #%d") % self.number)
1283 1283 self.starta, aend = m.groups()
1284 1284 self.starta = int(self.starta)
1285 1285 if aend is None:
1286 1286 aend = self.starta
1287 1287 self.lena = int(aend) - self.starta
1288 1288 if self.starta:
1289 1289 self.lena += 1
1290 1290 for x in pycompat.xrange(self.lena):
1291 1291 l = lr.readline()
1292 1292 if l.startswith('---'):
1293 1293 # lines addition, old block is empty
1294 1294 lr.push(l)
1295 1295 break
1296 1296 s = l[2:]
1297 1297 if l.startswith('- ') or l.startswith('! '):
1298 1298 u = '-' + s
1299 1299 elif l.startswith(' '):
1300 1300 u = ' ' + s
1301 1301 else:
1302 1302 raise PatchError(_("bad hunk #%d old text line %d") %
1303 1303 (self.number, x))
1304 1304 self.a.append(u)
1305 1305 self.hunk.append(u)
1306 1306
1307 1307 l = lr.readline()
1308 1308 if l.startswith('\ '):
1309 1309 s = self.a[-1][:-1]
1310 1310 self.a[-1] = s
1311 1311 self.hunk[-1] = s
1312 1312 l = lr.readline()
1313 1313 m = contextdesc.match(l)
1314 1314 if not m:
1315 1315 raise PatchError(_("bad hunk #%d") % self.number)
1316 1316 self.startb, bend = m.groups()
1317 1317 self.startb = int(self.startb)
1318 1318 if bend is None:
1319 1319 bend = self.startb
1320 1320 self.lenb = int(bend) - self.startb
1321 1321 if self.startb:
1322 1322 self.lenb += 1
1323 1323 hunki = 1
1324 1324 for x in pycompat.xrange(self.lenb):
1325 1325 l = lr.readline()
1326 1326 if l.startswith('\ '):
1327 1327 # XXX: the only way to hit this is with an invalid line range.
1328 1328 # The no-eol marker is not counted in the line range, but I
1329 1329 # guess there are diff(1) out there which behave differently.
1330 1330 s = self.b[-1][:-1]
1331 1331 self.b[-1] = s
1332 1332 self.hunk[hunki - 1] = s
1333 1333 continue
1334 1334 if not l:
1335 1335 # line deletions, new block is empty and we hit EOF
1336 1336 lr.push(l)
1337 1337 break
1338 1338 s = l[2:]
1339 1339 if l.startswith('+ ') or l.startswith('! '):
1340 1340 u = '+' + s
1341 1341 elif l.startswith(' '):
1342 1342 u = ' ' + s
1343 1343 elif len(self.b) == 0:
1344 1344 # line deletions, new block is empty
1345 1345 lr.push(l)
1346 1346 break
1347 1347 else:
1348 1348 raise PatchError(_("bad hunk #%d old text line %d") %
1349 1349 (self.number, x))
1350 1350 self.b.append(s)
1351 1351 while True:
1352 1352 if hunki >= len(self.hunk):
1353 1353 h = ""
1354 1354 else:
1355 1355 h = self.hunk[hunki]
1356 1356 hunki += 1
1357 1357 if h == u:
1358 1358 break
1359 1359 elif h.startswith('-'):
1360 1360 continue
1361 1361 else:
1362 1362 self.hunk.insert(hunki - 1, u)
1363 1363 break
1364 1364
1365 1365 if not self.a:
1366 1366 # this happens when lines were only added to the hunk
1367 1367 for x in self.hunk:
1368 1368 if x.startswith('-') or x.startswith(' '):
1369 1369 self.a.append(x)
1370 1370 if not self.b:
1371 1371 # this happens when lines were only deleted from the hunk
1372 1372 for x in self.hunk:
1373 1373 if x.startswith('+') or x.startswith(' '):
1374 1374 self.b.append(x[1:])
1375 1375 # @@ -start,len +start,len @@
1376 1376 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1377 1377 self.startb, self.lenb)
1378 1378 self.hunk[0] = self.desc
1379 1379 self._fixnewline(lr)
1380 1380
1381 1381 def _fixnewline(self, lr):
1382 1382 l = lr.readline()
1383 1383 if l.startswith('\ '):
1384 1384 diffhelper.fixnewline(self.hunk, self.a, self.b)
1385 1385 else:
1386 1386 lr.push(l)
1387 1387
1388 1388 def complete(self):
1389 1389 return len(self.a) == self.lena and len(self.b) == self.lenb
1390 1390
1391 1391 def _fuzzit(self, old, new, fuzz, toponly):
1392 1392 # this removes context lines from the top and bottom of list 'l'. It
1393 1393 # checks the hunk to make sure only context lines are removed, and then
1394 1394 # returns a new shortened list of lines.
1395 1395 fuzz = min(fuzz, len(old))
1396 1396 if fuzz:
1397 1397 top = 0
1398 1398 bot = 0
1399 1399 hlen = len(self.hunk)
1400 1400 for x in pycompat.xrange(hlen - 1):
1401 1401 # the hunk starts with the @@ line, so use x+1
1402 1402 if self.hunk[x + 1].startswith(' '):
1403 1403 top += 1
1404 1404 else:
1405 1405 break
1406 1406 if not toponly:
1407 1407 for x in pycompat.xrange(hlen - 1):
1408 1408 if self.hunk[hlen - bot - 1].startswith(' '):
1409 1409 bot += 1
1410 1410 else:
1411 1411 break
1412 1412
1413 1413 bot = min(fuzz, bot)
1414 1414 top = min(fuzz, top)
1415 1415 return old[top:len(old) - bot], new[top:len(new) - bot], top
1416 1416 return old, new, 0
1417 1417
1418 1418 def fuzzit(self, fuzz, toponly):
1419 1419 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1420 1420 oldstart = self.starta + top
1421 1421 newstart = self.startb + top
1422 1422 # zero length hunk ranges already have their start decremented
1423 1423 if self.lena and oldstart > 0:
1424 1424 oldstart -= 1
1425 1425 if self.lenb and newstart > 0:
1426 1426 newstart -= 1
1427 1427 return old, oldstart, new, newstart
1428 1428
1429 1429 class binhunk(object):
1430 1430 'A binary patch file.'
1431 1431 def __init__(self, lr, fname):
1432 1432 self.text = None
1433 1433 self.delta = False
1434 1434 self.hunk = ['GIT binary patch\n']
1435 1435 self._fname = fname
1436 1436 self._read(lr)
1437 1437
1438 1438 def complete(self):
1439 1439 return self.text is not None
1440 1440
1441 1441 def new(self, lines):
1442 1442 if self.delta:
1443 1443 return [applybindelta(self.text, ''.join(lines))]
1444 1444 return [self.text]
1445 1445
1446 1446 def _read(self, lr):
1447 1447 def getline(lr, hunk):
1448 1448 l = lr.readline()
1449 1449 hunk.append(l)
1450 1450 return l.rstrip('\r\n')
1451 1451
1452 1452 while True:
1453 1453 line = getline(lr, self.hunk)
1454 1454 if not line:
1455 1455 raise PatchError(_('could not extract "%s" binary data')
1456 1456 % self._fname)
1457 1457 if line.startswith('literal '):
1458 1458 size = int(line[8:].rstrip())
1459 1459 break
1460 1460 if line.startswith('delta '):
1461 1461 size = int(line[6:].rstrip())
1462 1462 self.delta = True
1463 1463 break
1464 1464 dec = []
1465 1465 line = getline(lr, self.hunk)
1466 1466 while len(line) > 1:
1467 1467 l = line[0:1]
1468 1468 if l <= 'Z' and l >= 'A':
1469 1469 l = ord(l) - ord('A') + 1
1470 1470 else:
1471 1471 l = ord(l) - ord('a') + 27
1472 1472 try:
1473 1473 dec.append(util.b85decode(line[1:])[:l])
1474 1474 except ValueError as e:
1475 1475 raise PatchError(_('could not decode "%s" binary patch: %s')
1476 1476 % (self._fname, stringutil.forcebytestr(e)))
1477 1477 line = getline(lr, self.hunk)
1478 1478 text = zlib.decompress(''.join(dec))
1479 1479 if len(text) != size:
1480 1480 raise PatchError(_('"%s" length is %d bytes, should be %d')
1481 1481 % (self._fname, len(text), size))
1482 1482 self.text = text
1483 1483
1484 1484 def parsefilename(str):
1485 1485 # --- filename \t|space stuff
1486 1486 s = str[4:].rstrip('\r\n')
1487 1487 i = s.find('\t')
1488 1488 if i < 0:
1489 1489 i = s.find(' ')
1490 1490 if i < 0:
1491 1491 return s
1492 1492 return s[:i]
1493 1493
1494 1494 def reversehunks(hunks):
1495 1495 '''reverse the signs in the hunks given as argument
1496 1496
1497 1497 This function operates on hunks coming out of patch.filterpatch, that is
1498 1498 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1499 1499
1500 1500 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1501 1501 ... --- a/folder1/g
1502 1502 ... +++ b/folder1/g
1503 1503 ... @@ -1,7 +1,7 @@
1504 1504 ... +firstline
1505 1505 ... c
1506 1506 ... 1
1507 1507 ... 2
1508 1508 ... + 3
1509 1509 ... -4
1510 1510 ... 5
1511 1511 ... d
1512 1512 ... +lastline"""
1513 1513 >>> hunks = parsepatch([rawpatch])
1514 1514 >>> hunkscomingfromfilterpatch = []
1515 1515 >>> for h in hunks:
1516 1516 ... hunkscomingfromfilterpatch.append(h)
1517 1517 ... hunkscomingfromfilterpatch.extend(h.hunks)
1518 1518
1519 1519 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1520 1520 >>> from . import util
1521 1521 >>> fp = util.stringio()
1522 1522 >>> for c in reversedhunks:
1523 1523 ... c.write(fp)
1524 1524 >>> fp.seek(0) or None
1525 1525 >>> reversedpatch = fp.read()
1526 1526 >>> print(pycompat.sysstr(reversedpatch))
1527 1527 diff --git a/folder1/g b/folder1/g
1528 1528 --- a/folder1/g
1529 1529 +++ b/folder1/g
1530 1530 @@ -1,4 +1,3 @@
1531 1531 -firstline
1532 1532 c
1533 1533 1
1534 1534 2
1535 1535 @@ -2,6 +1,6 @@
1536 1536 c
1537 1537 1
1538 1538 2
1539 1539 - 3
1540 1540 +4
1541 1541 5
1542 1542 d
1543 1543 @@ -6,3 +5,2 @@
1544 1544 5
1545 1545 d
1546 1546 -lastline
1547 1547
1548 1548 '''
1549 1549
1550 1550 newhunks = []
1551 1551 for c in hunks:
1552 1552 if util.safehasattr(c, 'reversehunk'):
1553 1553 c = c.reversehunk()
1554 1554 newhunks.append(c)
1555 1555 return newhunks
1556 1556
1557 1557 def parsepatch(originalchunks, maxcontext=None):
1558 1558 """patch -> [] of headers -> [] of hunks
1559 1559
1560 1560 If maxcontext is not None, trim context lines if necessary.
1561 1561
1562 1562 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1563 1563 ... --- a/folder1/g
1564 1564 ... +++ b/folder1/g
1565 1565 ... @@ -1,8 +1,10 @@
1566 1566 ... 1
1567 1567 ... 2
1568 1568 ... -3
1569 1569 ... 4
1570 1570 ... 5
1571 1571 ... 6
1572 1572 ... +6.1
1573 1573 ... +6.2
1574 1574 ... 7
1575 1575 ... 8
1576 1576 ... +9'''
1577 1577 >>> out = util.stringio()
1578 1578 >>> headers = parsepatch([rawpatch], maxcontext=1)
1579 1579 >>> for header in headers:
1580 1580 ... header.write(out)
1581 1581 ... for hunk in header.hunks:
1582 1582 ... hunk.write(out)
1583 1583 >>> print(pycompat.sysstr(out.getvalue()))
1584 1584 diff --git a/folder1/g b/folder1/g
1585 1585 --- a/folder1/g
1586 1586 +++ b/folder1/g
1587 1587 @@ -2,3 +2,2 @@
1588 1588 2
1589 1589 -3
1590 1590 4
1591 1591 @@ -6,2 +5,4 @@
1592 1592 6
1593 1593 +6.1
1594 1594 +6.2
1595 1595 7
1596 1596 @@ -8,1 +9,2 @@
1597 1597 8
1598 1598 +9
1599 1599 """
1600 1600 class parser(object):
1601 1601 """patch parsing state machine"""
1602 1602 def __init__(self):
1603 1603 self.fromline = 0
1604 1604 self.toline = 0
1605 1605 self.proc = ''
1606 1606 self.header = None
1607 1607 self.context = []
1608 1608 self.before = []
1609 1609 self.hunk = []
1610 1610 self.headers = []
1611 1611
1612 1612 def addrange(self, limits):
1613 1613 self.addcontext([])
1614 1614 fromstart, fromend, tostart, toend, proc = limits
1615 1615 self.fromline = int(fromstart)
1616 1616 self.toline = int(tostart)
1617 1617 self.proc = proc
1618 1618
1619 1619 def addcontext(self, context):
1620 1620 if self.hunk:
1621 1621 h = recordhunk(self.header, self.fromline, self.toline,
1622 1622 self.proc, self.before, self.hunk, context, maxcontext)
1623 1623 self.header.hunks.append(h)
1624 1624 self.fromline += len(self.before) + h.removed
1625 1625 self.toline += len(self.before) + h.added
1626 1626 self.before = []
1627 1627 self.hunk = []
1628 1628 self.context = context
1629 1629
1630 1630 def addhunk(self, hunk):
1631 1631 if self.context:
1632 1632 self.before = self.context
1633 1633 self.context = []
1634 1634 if self.hunk:
1635 1635 self.addcontext([])
1636 1636 self.hunk = hunk
1637 1637
1638 1638 def newfile(self, hdr):
1639 1639 self.addcontext([])
1640 1640 h = header(hdr)
1641 1641 self.headers.append(h)
1642 1642 self.header = h
1643 1643
1644 1644 def addother(self, line):
1645 1645 pass # 'other' lines are ignored
1646 1646
1647 1647 def finished(self):
1648 1648 self.addcontext([])
1649 1649 return self.headers
1650 1650
1651 1651 transitions = {
1652 1652 'file': {'context': addcontext,
1653 1653 'file': newfile,
1654 1654 'hunk': addhunk,
1655 1655 'range': addrange},
1656 1656 'context': {'file': newfile,
1657 1657 'hunk': addhunk,
1658 1658 'range': addrange,
1659 1659 'other': addother},
1660 1660 'hunk': {'context': addcontext,
1661 1661 'file': newfile,
1662 1662 'range': addrange},
1663 1663 'range': {'context': addcontext,
1664 1664 'hunk': addhunk},
1665 1665 'other': {'other': addother},
1666 1666 }
1667 1667
1668 1668 p = parser()
1669 1669 fp = stringio()
1670 1670 fp.write(''.join(originalchunks))
1671 1671 fp.seek(0)
1672 1672
1673 1673 state = 'context'
1674 1674 for newstate, data in scanpatch(fp):
1675 1675 try:
1676 1676 p.transitions[state][newstate](p, data)
1677 1677 except KeyError:
1678 1678 raise PatchError('unhandled transition: %s -> %s' %
1679 1679 (state, newstate))
1680 1680 state = newstate
1681 1681 del fp
1682 1682 return p.finished()
1683 1683
1684 1684 def pathtransform(path, strip, prefix):
1685 1685 '''turn a path from a patch into a path suitable for the repository
1686 1686
1687 1687 prefix, if not empty, is expected to be normalized with a / at the end.
1688 1688
1689 1689 Returns (stripped components, path in repository).
1690 1690
1691 1691 >>> pathtransform(b'a/b/c', 0, b'')
1692 1692 ('', 'a/b/c')
1693 1693 >>> pathtransform(b' a/b/c ', 0, b'')
1694 1694 ('', ' a/b/c')
1695 1695 >>> pathtransform(b' a/b/c ', 2, b'')
1696 1696 ('a/b/', 'c')
1697 1697 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1698 1698 ('', 'd/e/a/b/c')
1699 1699 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1700 1700 ('a//b/', 'd/e/c')
1701 1701 >>> pathtransform(b'a/b/c', 3, b'')
1702 1702 Traceback (most recent call last):
1703 1703 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1704 1704 '''
1705 1705 pathlen = len(path)
1706 1706 i = 0
1707 1707 if strip == 0:
1708 1708 return '', prefix + path.rstrip()
1709 1709 count = strip
1710 1710 while count > 0:
1711 1711 i = path.find('/', i)
1712 1712 if i == -1:
1713 1713 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1714 1714 (count, strip, path))
1715 1715 i += 1
1716 1716 # consume '//' in the path
1717 1717 while i < pathlen - 1 and path[i:i + 1] == '/':
1718 1718 i += 1
1719 1719 count -= 1
1720 1720 return path[:i].lstrip(), prefix + path[i:].rstrip()
1721 1721
1722 1722 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1723 1723 nulla = afile_orig == "/dev/null"
1724 1724 nullb = bfile_orig == "/dev/null"
1725 1725 create = nulla and hunk.starta == 0 and hunk.lena == 0
1726 1726 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1727 1727 abase, afile = pathtransform(afile_orig, strip, prefix)
1728 1728 gooda = not nulla and backend.exists(afile)
1729 1729 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1730 1730 if afile == bfile:
1731 1731 goodb = gooda
1732 1732 else:
1733 1733 goodb = not nullb and backend.exists(bfile)
1734 1734 missing = not goodb and not gooda and not create
1735 1735
1736 1736 # some diff programs apparently produce patches where the afile is
1737 1737 # not /dev/null, but afile starts with bfile
1738 1738 abasedir = afile[:afile.rfind('/') + 1]
1739 1739 bbasedir = bfile[:bfile.rfind('/') + 1]
1740 1740 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1741 1741 and hunk.starta == 0 and hunk.lena == 0):
1742 1742 create = True
1743 1743 missing = False
1744 1744
1745 1745 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1746 1746 # diff is between a file and its backup. In this case, the original
1747 1747 # file should be patched (see original mpatch code).
1748 1748 isbackup = (abase == bbase and bfile.startswith(afile))
1749 1749 fname = None
1750 1750 if not missing:
1751 1751 if gooda and goodb:
1752 1752 if isbackup:
1753 1753 fname = afile
1754 1754 else:
1755 1755 fname = bfile
1756 1756 elif gooda:
1757 1757 fname = afile
1758 1758
1759 1759 if not fname:
1760 1760 if not nullb:
1761 1761 if isbackup:
1762 1762 fname = afile
1763 1763 else:
1764 1764 fname = bfile
1765 1765 elif not nulla:
1766 1766 fname = afile
1767 1767 else:
1768 1768 raise PatchError(_("undefined source and destination files"))
1769 1769
1770 1770 gp = patchmeta(fname)
1771 1771 if create:
1772 1772 gp.op = 'ADD'
1773 1773 elif remove:
1774 1774 gp.op = 'DELETE'
1775 1775 return gp
1776 1776
1777 1777 def scanpatch(fp):
1778 1778 """like patch.iterhunks, but yield different events
1779 1779
1780 1780 - ('file', [header_lines + fromfile + tofile])
1781 1781 - ('context', [context_lines])
1782 1782 - ('hunk', [hunk_lines])
1783 1783 - ('range', (-start,len, +start,len, proc))
1784 1784 """
1785 1785 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1786 1786 lr = linereader(fp)
1787 1787
1788 1788 def scanwhile(first, p):
1789 1789 """scan lr while predicate holds"""
1790 1790 lines = [first]
1791 1791 for line in iter(lr.readline, ''):
1792 1792 if p(line):
1793 1793 lines.append(line)
1794 1794 else:
1795 1795 lr.push(line)
1796 1796 break
1797 1797 return lines
1798 1798
1799 1799 for line in iter(lr.readline, ''):
1800 1800 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1801 1801 def notheader(line):
1802 1802 s = line.split(None, 1)
1803 1803 return not s or s[0] not in ('---', 'diff')
1804 1804 header = scanwhile(line, notheader)
1805 1805 fromfile = lr.readline()
1806 1806 if fromfile.startswith('---'):
1807 1807 tofile = lr.readline()
1808 1808 header += [fromfile, tofile]
1809 1809 else:
1810 1810 lr.push(fromfile)
1811 1811 yield 'file', header
1812 1812 elif line.startswith(' '):
1813 1813 cs = (' ', '\\')
1814 1814 yield 'context', scanwhile(line, lambda l: l.startswith(cs))
1815 1815 elif line.startswith(('-', '+')):
1816 1816 cs = ('-', '+', '\\')
1817 1817 yield 'hunk', scanwhile(line, lambda l: l.startswith(cs))
1818 1818 else:
1819 1819 m = lines_re.match(line)
1820 1820 if m:
1821 1821 yield 'range', m.groups()
1822 1822 else:
1823 1823 yield 'other', line
1824 1824
1825 1825 def scangitpatch(lr, firstline):
1826 1826 """
1827 1827 Git patches can emit:
1828 1828 - rename a to b
1829 1829 - change b
1830 1830 - copy a to c
1831 1831 - change c
1832 1832
1833 1833 We cannot apply this sequence as-is, the renamed 'a' could not be
1834 1834 found for it would have been renamed already. And we cannot copy
1835 1835 from 'b' instead because 'b' would have been changed already. So
1836 1836 we scan the git patch for copy and rename commands so we can
1837 1837 perform the copies ahead of time.
1838 1838 """
1839 1839 pos = 0
1840 1840 try:
1841 1841 pos = lr.fp.tell()
1842 1842 fp = lr.fp
1843 1843 except IOError:
1844 1844 fp = stringio(lr.fp.read())
1845 1845 gitlr = linereader(fp)
1846 1846 gitlr.push(firstline)
1847 1847 gitpatches = readgitpatch(gitlr)
1848 1848 fp.seek(pos)
1849 1849 return gitpatches
1850 1850
1851 1851 def iterhunks(fp):
1852 1852 """Read a patch and yield the following events:
1853 1853 - ("file", afile, bfile, firsthunk): select a new target file.
1854 1854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1855 1855 "file" event.
1856 1856 - ("git", gitchanges): current diff is in git format, gitchanges
1857 1857 maps filenames to gitpatch records. Unique event.
1858 1858 """
1859 1859 afile = ""
1860 1860 bfile = ""
1861 1861 state = None
1862 1862 hunknum = 0
1863 1863 emitfile = newfile = False
1864 1864 gitpatches = None
1865 1865
1866 1866 # our states
1867 1867 BFILE = 1
1868 1868 context = None
1869 1869 lr = linereader(fp)
1870 1870
1871 1871 for x in iter(lr.readline, ''):
1872 1872 if state == BFILE and (
1873 1873 (not context and x.startswith('@'))
1874 1874 or (context is not False and x.startswith('***************'))
1875 1875 or x.startswith('GIT binary patch')):
1876 1876 gp = None
1877 1877 if (gitpatches and
1878 1878 gitpatches[-1].ispatching(afile, bfile)):
1879 1879 gp = gitpatches.pop()
1880 1880 if x.startswith('GIT binary patch'):
1881 1881 h = binhunk(lr, gp.path)
1882 1882 else:
1883 1883 if context is None and x.startswith('***************'):
1884 1884 context = True
1885 1885 h = hunk(x, hunknum + 1, lr, context)
1886 1886 hunknum += 1
1887 1887 if emitfile:
1888 1888 emitfile = False
1889 1889 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1890 1890 yield 'hunk', h
1891 1891 elif x.startswith('diff --git a/'):
1892 1892 m = gitre.match(x.rstrip(' \r\n'))
1893 1893 if not m:
1894 1894 continue
1895 1895 if gitpatches is None:
1896 1896 # scan whole input for git metadata
1897 1897 gitpatches = scangitpatch(lr, x)
1898 1898 yield 'git', [g.copy() for g in gitpatches
1899 1899 if g.op in ('COPY', 'RENAME')]
1900 1900 gitpatches.reverse()
1901 1901 afile = 'a/' + m.group(1)
1902 1902 bfile = 'b/' + m.group(2)
1903 1903 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1904 1904 gp = gitpatches.pop()
1905 1905 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1906 1906 if not gitpatches:
1907 1907 raise PatchError(_('failed to synchronize metadata for "%s"')
1908 1908 % afile[2:])
1909 1909 newfile = True
1910 1910 elif x.startswith('---'):
1911 1911 # check for a unified diff
1912 1912 l2 = lr.readline()
1913 1913 if not l2.startswith('+++'):
1914 1914 lr.push(l2)
1915 1915 continue
1916 1916 newfile = True
1917 1917 context = False
1918 1918 afile = parsefilename(x)
1919 1919 bfile = parsefilename(l2)
1920 1920 elif x.startswith('***'):
1921 1921 # check for a context diff
1922 1922 l2 = lr.readline()
1923 1923 if not l2.startswith('---'):
1924 1924 lr.push(l2)
1925 1925 continue
1926 1926 l3 = lr.readline()
1927 1927 lr.push(l3)
1928 1928 if not l3.startswith("***************"):
1929 1929 lr.push(l2)
1930 1930 continue
1931 1931 newfile = True
1932 1932 context = True
1933 1933 afile = parsefilename(x)
1934 1934 bfile = parsefilename(l2)
1935 1935
1936 1936 if newfile:
1937 1937 newfile = False
1938 1938 emitfile = True
1939 1939 state = BFILE
1940 1940 hunknum = 0
1941 1941
1942 1942 while gitpatches:
1943 1943 gp = gitpatches.pop()
1944 1944 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1945 1945
1946 1946 def applybindelta(binchunk, data):
1947 1947 """Apply a binary delta hunk
1948 1948 The algorithm used is the algorithm from git's patch-delta.c
1949 1949 """
1950 1950 def deltahead(binchunk):
1951 1951 i = 0
1952 1952 for c in pycompat.bytestr(binchunk):
1953 1953 i += 1
1954 1954 if not (ord(c) & 0x80):
1955 1955 return i
1956 1956 return i
1957 1957 out = ""
1958 1958 s = deltahead(binchunk)
1959 1959 binchunk = binchunk[s:]
1960 1960 s = deltahead(binchunk)
1961 1961 binchunk = binchunk[s:]
1962 1962 i = 0
1963 1963 while i < len(binchunk):
1964 1964 cmd = ord(binchunk[i:i + 1])
1965 1965 i += 1
1966 1966 if (cmd & 0x80):
1967 1967 offset = 0
1968 1968 size = 0
1969 1969 if (cmd & 0x01):
1970 1970 offset = ord(binchunk[i:i + 1])
1971 1971 i += 1
1972 1972 if (cmd & 0x02):
1973 1973 offset |= ord(binchunk[i:i + 1]) << 8
1974 1974 i += 1
1975 1975 if (cmd & 0x04):
1976 1976 offset |= ord(binchunk[i:i + 1]) << 16
1977 1977 i += 1
1978 1978 if (cmd & 0x08):
1979 1979 offset |= ord(binchunk[i:i + 1]) << 24
1980 1980 i += 1
1981 1981 if (cmd & 0x10):
1982 1982 size = ord(binchunk[i:i + 1])
1983 1983 i += 1
1984 1984 if (cmd & 0x20):
1985 1985 size |= ord(binchunk[i:i + 1]) << 8
1986 1986 i += 1
1987 1987 if (cmd & 0x40):
1988 1988 size |= ord(binchunk[i:i + 1]) << 16
1989 1989 i += 1
1990 1990 if size == 0:
1991 1991 size = 0x10000
1992 1992 offset_end = offset + size
1993 1993 out += data[offset:offset_end]
1994 1994 elif cmd != 0:
1995 1995 offset_end = i + cmd
1996 1996 out += binchunk[i:offset_end]
1997 1997 i += cmd
1998 1998 else:
1999 1999 raise PatchError(_('unexpected delta opcode 0'))
2000 2000 return out
2001 2001
2002 2002 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
2003 2003 """Reads a patch from fp and tries to apply it.
2004 2004
2005 2005 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2006 2006 there was any fuzz.
2007 2007
2008 2008 If 'eolmode' is 'strict', the patch content and patched file are
2009 2009 read in binary mode. Otherwise, line endings are ignored when
2010 2010 patching then normalized according to 'eolmode'.
2011 2011 """
2012 2012 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
2013 2013 prefix=prefix, eolmode=eolmode)
2014 2014
2015 2015 def _canonprefix(repo, prefix):
2016 2016 if prefix:
2017 2017 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2018 2018 if prefix != '':
2019 2019 prefix += '/'
2020 2020 return prefix
2021 2021
2022 2022 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
2023 2023 eolmode='strict'):
2024 2024 prefix = _canonprefix(backend.repo, prefix)
2025 2025 def pstrip(p):
2026 2026 return pathtransform(p, strip - 1, prefix)[1]
2027 2027
2028 2028 rejects = 0
2029 2029 err = 0
2030 2030 current_file = None
2031 2031
2032 2032 for state, values in iterhunks(fp):
2033 2033 if state == 'hunk':
2034 2034 if not current_file:
2035 2035 continue
2036 2036 ret = current_file.apply(values)
2037 2037 if ret > 0:
2038 2038 err = 1
2039 2039 elif state == 'file':
2040 2040 if current_file:
2041 2041 rejects += current_file.close()
2042 2042 current_file = None
2043 2043 afile, bfile, first_hunk, gp = values
2044 2044 if gp:
2045 2045 gp.path = pstrip(gp.path)
2046 2046 if gp.oldpath:
2047 2047 gp.oldpath = pstrip(gp.oldpath)
2048 2048 else:
2049 2049 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2050 2050 prefix)
2051 2051 if gp.op == 'RENAME':
2052 2052 backend.unlink(gp.oldpath)
2053 2053 if not first_hunk:
2054 2054 if gp.op == 'DELETE':
2055 2055 backend.unlink(gp.path)
2056 2056 continue
2057 2057 data, mode = None, None
2058 2058 if gp.op in ('RENAME', 'COPY'):
2059 2059 data, mode = store.getfile(gp.oldpath)[:2]
2060 2060 if data is None:
2061 2061 # This means that the old path does not exist
2062 2062 raise PatchError(_("source file '%s' does not exist")
2063 2063 % gp.oldpath)
2064 2064 if gp.mode:
2065 2065 mode = gp.mode
2066 2066 if gp.op == 'ADD':
2067 2067 # Added files without content have no hunk and
2068 2068 # must be created
2069 2069 data = ''
2070 2070 if data or mode:
2071 2071 if (gp.op in ('ADD', 'RENAME', 'COPY')
2072 2072 and backend.exists(gp.path)):
2073 2073 raise PatchError(_("cannot create %s: destination "
2074 2074 "already exists") % gp.path)
2075 2075 backend.setfile(gp.path, data, mode, gp.oldpath)
2076 2076 continue
2077 2077 try:
2078 2078 current_file = patcher(ui, gp, backend, store,
2079 2079 eolmode=eolmode)
2080 2080 except PatchError as inst:
2081 2081 ui.warn(str(inst) + '\n')
2082 2082 current_file = None
2083 2083 rejects += 1
2084 2084 continue
2085 2085 elif state == 'git':
2086 2086 for gp in values:
2087 2087 path = pstrip(gp.oldpath)
2088 2088 data, mode = backend.getfile(path)
2089 2089 if data is None:
2090 2090 # The error ignored here will trigger a getfile()
2091 2091 # error in a place more appropriate for error
2092 2092 # handling, and will not interrupt the patching
2093 2093 # process.
2094 2094 pass
2095 2095 else:
2096 2096 store.setfile(path, data, mode)
2097 2097 else:
2098 2098 raise error.Abort(_('unsupported parser state: %s') % state)
2099 2099
2100 2100 if current_file:
2101 2101 rejects += current_file.close()
2102 2102
2103 2103 if rejects:
2104 2104 return -1
2105 2105 return err
2106 2106
2107 2107 def _externalpatch(ui, repo, patcher, patchname, strip, files,
2108 2108 similarity):
2109 2109 """use <patcher> to apply <patchname> to the working directory.
2110 2110 returns whether patch was applied with fuzz factor."""
2111 2111
2112 2112 fuzz = False
2113 2113 args = []
2114 2114 cwd = repo.root
2115 2115 if cwd:
2116 2116 args.append('-d %s' % procutil.shellquote(cwd))
2117 2117 cmd = ('%s %s -p%d < %s'
2118 2118 % (patcher, ' '.join(args), strip, procutil.shellquote(patchname)))
2119 2119 ui.debug('Using external patch tool: %s\n' % cmd)
2120 2120 fp = procutil.popen(cmd, 'rb')
2121 2121 try:
2122 2122 for line in util.iterfile(fp):
2123 2123 line = line.rstrip()
2124 2124 ui.note(line + '\n')
2125 2125 if line.startswith('patching file '):
2126 2126 pf = util.parsepatchoutput(line)
2127 2127 printed_file = False
2128 2128 files.add(pf)
2129 2129 elif line.find('with fuzz') >= 0:
2130 2130 fuzz = True
2131 2131 if not printed_file:
2132 2132 ui.warn(pf + '\n')
2133 2133 printed_file = True
2134 2134 ui.warn(line + '\n')
2135 2135 elif line.find('saving rejects to file') >= 0:
2136 2136 ui.warn(line + '\n')
2137 2137 elif line.find('FAILED') >= 0:
2138 2138 if not printed_file:
2139 2139 ui.warn(pf + '\n')
2140 2140 printed_file = True
2141 2141 ui.warn(line + '\n')
2142 2142 finally:
2143 2143 if files:
2144 2144 scmutil.marktouched(repo, files, similarity)
2145 2145 code = fp.close()
2146 2146 if code:
2147 2147 raise PatchError(_("patch command failed: %s") %
2148 2148 procutil.explainexit(code))
2149 2149 return fuzz
2150 2150
2151 2151 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2152 2152 eolmode='strict'):
2153 2153 if files is None:
2154 2154 files = set()
2155 2155 if eolmode is None:
2156 2156 eolmode = ui.config('patch', 'eol')
2157 2157 if eolmode.lower() not in eolmodes:
2158 2158 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
2159 2159 eolmode = eolmode.lower()
2160 2160
2161 2161 store = filestore()
2162 2162 try:
2163 2163 fp = open(patchobj, 'rb')
2164 2164 except TypeError:
2165 2165 fp = patchobj
2166 2166 try:
2167 2167 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2168 2168 eolmode=eolmode)
2169 2169 finally:
2170 2170 if fp != patchobj:
2171 2171 fp.close()
2172 2172 files.update(backend.close())
2173 2173 store.close()
2174 2174 if ret < 0:
2175 2175 raise PatchError(_('patch failed to apply'))
2176 2176 return ret > 0
2177 2177
2178 2178 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2179 2179 eolmode='strict', similarity=0):
2180 2180 """use builtin patch to apply <patchobj> to the working directory.
2181 2181 returns whether patch was applied with fuzz factor."""
2182 2182 backend = workingbackend(ui, repo, similarity)
2183 2183 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2184 2184
2185 2185 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2186 2186 eolmode='strict'):
2187 2187 backend = repobackend(ui, repo, ctx, store)
2188 2188 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2189 2189
2190 2190 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2191 2191 similarity=0):
2192 2192 """Apply <patchname> to the working directory.
2193 2193
2194 2194 'eolmode' specifies how end of lines should be handled. It can be:
2195 2195 - 'strict': inputs are read in binary mode, EOLs are preserved
2196 2196 - 'crlf': EOLs are ignored when patching and reset to CRLF
2197 2197 - 'lf': EOLs are ignored when patching and reset to LF
2198 2198 - None: get it from user settings, default to 'strict'
2199 2199 'eolmode' is ignored when using an external patcher program.
2200 2200
2201 2201 Returns whether patch was applied with fuzz factor.
2202 2202 """
2203 2203 patcher = ui.config('ui', 'patch')
2204 2204 if files is None:
2205 2205 files = set()
2206 2206 if patcher:
2207 2207 return _externalpatch(ui, repo, patcher, patchname, strip,
2208 2208 files, similarity)
2209 2209 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2210 2210 similarity)
2211 2211
2212 2212 def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
2213 2213 backend = fsbackend(ui, repo.root)
2214 2214 prefix = _canonprefix(repo, prefix)
2215 2215 with open(patchpath, 'rb') as fp:
2216 2216 changed = set()
2217 2217 for state, values in iterhunks(fp):
2218 2218 if state == 'file':
2219 2219 afile, bfile, first_hunk, gp = values
2220 2220 if gp:
2221 2221 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2222 2222 if gp.oldpath:
2223 2223 gp.oldpath = pathtransform(gp.oldpath, strip - 1,
2224 2224 prefix)[1]
2225 2225 else:
2226 2226 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2227 2227 prefix)
2228 2228 changed.add(gp.path)
2229 2229 if gp.op == 'RENAME':
2230 2230 changed.add(gp.oldpath)
2231 2231 elif state not in ('hunk', 'git'):
2232 2232 raise error.Abort(_('unsupported parser state: %s') % state)
2233 2233 return changed
2234 2234
2235 2235 class GitDiffRequired(Exception):
2236 2236 pass
2237 2237
2238 2238 diffopts = diffutil.diffallopts
2239 2239 diffallopts = diffutil.diffallopts
2240 2240 difffeatureopts = diffutil.difffeatureopts
2241 2241
2242 2242 def diff(repo, node1=None, node2=None, match=None, changes=None,
2243 2243 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
2244 2244 hunksfilterfn=None):
2245 2245 '''yields diff of changes to files between two nodes, or node and
2246 2246 working directory.
2247 2247
2248 2248 if node1 is None, use first dirstate parent instead.
2249 2249 if node2 is None, compare node1 with working directory.
2250 2250
2251 2251 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2252 2252 every time some change cannot be represented with the current
2253 2253 patch format. Return False to upgrade to git patch format, True to
2254 2254 accept the loss or raise an exception to abort the diff. It is
2255 2255 called with the name of current file being diffed as 'fn'. If set
2256 2256 to None, patches will always be upgraded to git format when
2257 2257 necessary.
2258 2258
2259 2259 prefix is a filename prefix that is prepended to all filenames on
2260 2260 display (used for subrepos).
2261 2261
2262 2262 relroot, if not empty, must be normalized with a trailing /. Any match
2263 2263 patterns that fall outside it will be ignored.
2264 2264
2265 2265 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2266 2266 information.
2267 2267
2268 2268 hunksfilterfn, if not None, should be a function taking a filectx and
2269 2269 hunks generator that may yield filtered hunks.
2270 2270 '''
2271 2271 for fctx1, fctx2, hdr, hunks in diffhunks(
2272 2272 repo, node1=node1, node2=node2,
2273 2273 match=match, changes=changes, opts=opts,
2274 2274 losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
2275 2275 ):
2276 2276 if hunksfilterfn is not None:
2277 2277 # If the file has been removed, fctx2 is None; but this should
2278 2278 # not occur here since we catch removed files early in
2279 2279 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2280 2280 assert fctx2 is not None, \
2281 2281 'fctx2 unexpectly None in diff hunks filtering'
2282 2282 hunks = hunksfilterfn(fctx2, hunks)
2283 2283 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2284 2284 if hdr and (text or len(hdr) > 1):
2285 2285 yield '\n'.join(hdr) + '\n'
2286 2286 if text:
2287 2287 yield text
2288 2288
2289 2289 def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
2290 2290 opts=None, losedatafn=None, prefix='', relroot='', copy=None):
2291 2291 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2292 2292 where `header` is a list of diff headers and `hunks` is an iterable of
2293 2293 (`hunkrange`, `hunklines`) tuples.
2294 2294
2295 2295 See diff() for the meaning of parameters.
2296 2296 """
2297 2297
2298 2298 if opts is None:
2299 2299 opts = mdiff.defaultopts
2300 2300
2301 2301 if not node1 and not node2:
2302 2302 node1 = repo.dirstate.p1()
2303 2303
2304 2304 def lrugetfilectx():
2305 2305 cache = {}
2306 2306 order = collections.deque()
2307 2307 def getfilectx(f, ctx):
2308 2308 fctx = ctx.filectx(f, filelog=cache.get(f))
2309 2309 if f not in cache:
2310 2310 if len(cache) > 20:
2311 2311 del cache[order.popleft()]
2312 2312 cache[f] = fctx.filelog()
2313 2313 else:
2314 2314 order.remove(f)
2315 2315 order.append(f)
2316 2316 return fctx
2317 2317 return getfilectx
2318 2318 getfilectx = lrugetfilectx()
2319 2319
2320 2320 ctx1 = repo[node1]
2321 2321 ctx2 = repo[node2]
2322 2322
2323 2323 if relroot:
2324 2324 relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path')
2325 2325 match = matchmod.intersectmatchers(match, relrootmatch)
2326 2326
2327 2327 if not changes:
2328 2328 changes = ctx1.status(ctx2, match=match)
2329 2329 modified, added, removed = changes[:3]
2330 2330
2331 2331 if not modified and not added and not removed:
2332 2332 return []
2333 2333
2334 2334 if repo.ui.debugflag:
2335 2335 hexfunc = hex
2336 2336 else:
2337 2337 hexfunc = short
2338 2338 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2339 2339
2340 2340 if copy is None:
2341 2341 copy = {}
2342 2342 if opts.git or opts.upgrade:
2343 2343 copy = copies.pathcopies(ctx1, ctx2, match=match)
2344 2344
2345 2345 if relroot:
2346 2346 # filter out copies where source side isn't inside the relative root
2347 2347 # (copies.pathcopies() already filtered out the destination)
2348 2348 copy = {dst: src for dst, src in copy.iteritems()
2349 2349 if src.startswith(relroot)}
2350 2350
2351 2351 modifiedset = set(modified)
2352 2352 addedset = set(added)
2353 2353 removedset = set(removed)
2354 2354 for f in modified:
2355 2355 if f not in ctx1:
2356 2356 # Fix up added, since merged-in additions appear as
2357 2357 # modifications during merges
2358 2358 modifiedset.remove(f)
2359 2359 addedset.add(f)
2360 2360 for f in removed:
2361 2361 if f not in ctx1:
2362 2362 # Merged-in additions that are then removed are reported as removed.
2363 2363 # They are not in ctx1, so We don't want to show them in the diff.
2364 2364 removedset.remove(f)
2365 2365 modified = sorted(modifiedset)
2366 2366 added = sorted(addedset)
2367 2367 removed = sorted(removedset)
2368 2368 for dst, src in list(copy.items()):
2369 2369 if src not in ctx1:
2370 2370 # Files merged in during a merge and then copied/renamed are
2371 2371 # reported as copies. We want to show them in the diff as additions.
2372 2372 del copy[dst]
2373 2373
2374 2374 prefetchmatch = scmutil.matchfiles(
2375 2375 repo, list(modifiedset | addedset | removedset))
2376 2376 scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
2377 2377
2378 2378 def difffn(opts, losedata):
2379 2379 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2380 2380 copy, getfilectx, opts, losedata, prefix, relroot)
2381 2381 if opts.upgrade and not opts.git:
2382 2382 try:
2383 2383 def losedata(fn):
2384 2384 if not losedatafn or not losedatafn(fn=fn):
2385 2385 raise GitDiffRequired
2386 2386 # Buffer the whole output until we are sure it can be generated
2387 2387 return list(difffn(opts.copy(git=False), losedata))
2388 2388 except GitDiffRequired:
2389 2389 return difffn(opts.copy(git=True), None)
2390 2390 else:
2391 2391 return difffn(opts, None)
2392 2392
2393 2393 def diffsinglehunk(hunklines):
2394 2394 """yield tokens for a list of lines in a single hunk"""
2395 2395 for line in hunklines:
2396 2396 # chomp
2397 2397 chompline = line.rstrip('\r\n')
2398 2398 # highlight tabs and trailing whitespace
2399 2399 stripline = chompline.rstrip()
2400 2400 if line.startswith('-'):
2401 2401 label = 'diff.deleted'
2402 2402 elif line.startswith('+'):
2403 2403 label = 'diff.inserted'
2404 2404 else:
2405 2405 raise error.ProgrammingError('unexpected hunk line: %s' % line)
2406 2406 for token in tabsplitter.findall(stripline):
2407 2407 if token.startswith('\t'):
2408 2408 yield (token, 'diff.tab')
2409 2409 else:
2410 2410 yield (token, label)
2411 2411
2412 2412 if chompline != stripline:
2413 2413 yield (chompline[len(stripline):], 'diff.trailingwhitespace')
2414 2414 if chompline != line:
2415 2415 yield (line[len(chompline):], '')
2416 2416
2417 2417 def diffsinglehunkinline(hunklines):
2418 2418 """yield tokens for a list of lines in a single hunk, with inline colors"""
2419 2419 # prepare deleted, and inserted content
2420 2420 a = ''
2421 2421 b = ''
2422 2422 for line in hunklines:
2423 2423 if line[0:1] == '-':
2424 2424 a += line[1:]
2425 2425 elif line[0:1] == '+':
2426 2426 b += line[1:]
2427 2427 else:
2428 2428 raise error.ProgrammingError('unexpected hunk line: %s' % line)
2429 2429 # fast path: if either side is empty, use diffsinglehunk
2430 2430 if not a or not b:
2431 2431 for t in diffsinglehunk(hunklines):
2432 2432 yield t
2433 2433 return
2434 2434 # re-split the content into words
2435 2435 al = wordsplitter.findall(a)
2436 2436 bl = wordsplitter.findall(b)
2437 2437 # re-arrange the words to lines since the diff algorithm is line-based
2438 2438 aln = [s if s == '\n' else s + '\n' for s in al]
2439 2439 bln = [s if s == '\n' else s + '\n' for s in bl]
2440 2440 an = ''.join(aln)
2441 2441 bn = ''.join(bln)
2442 2442 # run the diff algorithm, prepare atokens and btokens
2443 2443 atokens = []
2444 2444 btokens = []
2445 2445 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2446 2446 for (a1, a2, b1, b2), btype in blocks:
2447 2447 changed = btype == '!'
2448 2448 for token in mdiff.splitnewlines(''.join(al[a1:a2])):
2449 2449 atokens.append((changed, token))
2450 2450 for token in mdiff.splitnewlines(''.join(bl[b1:b2])):
2451 2451 btokens.append((changed, token))
2452 2452
2453 2453 # yield deleted tokens, then inserted ones
2454 2454 for prefix, label, tokens in [('-', 'diff.deleted', atokens),
2455 2455 ('+', 'diff.inserted', btokens)]:
2456 2456 nextisnewline = True
2457 2457 for changed, token in tokens:
2458 2458 if nextisnewline:
2459 2459 yield (prefix, label)
2460 2460 nextisnewline = False
2461 2461 # special handling line end
2462 2462 isendofline = token.endswith('\n')
2463 2463 if isendofline:
2464 2464 chomp = token[:-1] # chomp
2465 2465 if chomp.endswith('\r'):
2466 2466 chomp = chomp[:-1]
2467 2467 endofline = token[len(chomp):]
2468 2468 token = chomp.rstrip() # detect spaces at the end
2469 2469 endspaces = chomp[len(token):]
2470 2470 # scan tabs
2471 2471 for maybetab in tabsplitter.findall(token):
2472 2472 if b'\t' == maybetab[0:1]:
2473 2473 currentlabel = 'diff.tab'
2474 2474 else:
2475 2475 if changed:
2476 2476 currentlabel = label + '.changed'
2477 2477 else:
2478 2478 currentlabel = label + '.unchanged'
2479 2479 yield (maybetab, currentlabel)
2480 2480 if isendofline:
2481 2481 if endspaces:
2482 2482 yield (endspaces, 'diff.trailingwhitespace')
2483 2483 yield (endofline, '')
2484 2484 nextisnewline = True
2485 2485
2486 2486 def difflabel(func, *args, **kw):
2487 2487 '''yields 2-tuples of (output, label) based on the output of func()'''
2488 2488 if kw.get(r'opts') and kw[r'opts'].worddiff:
2489 2489 dodiffhunk = diffsinglehunkinline
2490 2490 else:
2491 2491 dodiffhunk = diffsinglehunk
2492 2492 headprefixes = [('diff', 'diff.diffline'),
2493 2493 ('copy', 'diff.extended'),
2494 2494 ('rename', 'diff.extended'),
2495 2495 ('old', 'diff.extended'),
2496 2496 ('new', 'diff.extended'),
2497 2497 ('deleted', 'diff.extended'),
2498 2498 ('index', 'diff.extended'),
2499 2499 ('similarity', 'diff.extended'),
2500 2500 ('---', 'diff.file_a'),
2501 2501 ('+++', 'diff.file_b')]
2502 2502 textprefixes = [('@', 'diff.hunk'),
2503 2503 # - and + are handled by diffsinglehunk
2504 2504 ]
2505 2505 head = False
2506 2506
2507 2507 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2508 2508 hunkbuffer = []
2509 2509 def consumehunkbuffer():
2510 2510 if hunkbuffer:
2511 2511 for token in dodiffhunk(hunkbuffer):
2512 2512 yield token
2513 2513 hunkbuffer[:] = []
2514 2514
2515 2515 for chunk in func(*args, **kw):
2516 2516 lines = chunk.split('\n')
2517 2517 linecount = len(lines)
2518 2518 for i, line in enumerate(lines):
2519 2519 if head:
2520 2520 if line.startswith('@'):
2521 2521 head = False
2522 2522 else:
2523 2523 if line and not line.startswith((' ', '+', '-', '@', '\\')):
2524 2524 head = True
2525 2525 diffline = False
2526 2526 if not head and line and line.startswith(('+', '-')):
2527 2527 diffline = True
2528 2528
2529 2529 prefixes = textprefixes
2530 2530 if head:
2531 2531 prefixes = headprefixes
2532 2532 if diffline:
2533 2533 # buffered
2534 2534 bufferedline = line
2535 2535 if i + 1 < linecount:
2536 2536 bufferedline += "\n"
2537 2537 hunkbuffer.append(bufferedline)
2538 2538 else:
2539 2539 # unbuffered
2540 2540 for token in consumehunkbuffer():
2541 2541 yield token
2542 2542 stripline = line.rstrip()
2543 2543 for prefix, label in prefixes:
2544 2544 if stripline.startswith(prefix):
2545 2545 yield (stripline, label)
2546 2546 if line != stripline:
2547 2547 yield (line[len(stripline):],
2548 2548 'diff.trailingwhitespace')
2549 2549 break
2550 2550 else:
2551 2551 yield (line, '')
2552 2552 if i + 1 < linecount:
2553 2553 yield ('\n', '')
2554 2554 for token in consumehunkbuffer():
2555 2555 yield token
2556 2556
2557 2557 def diffui(*args, **kw):
2558 2558 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2559 2559 return difflabel(diff, *args, **kw)
2560 2560
2561 2561 def _filepairs(modified, added, removed, copy, opts):
2562 2562 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2563 2563 before and f2 is the the name after. For added files, f1 will be None,
2564 2564 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2565 2565 or 'rename' (the latter two only if opts.git is set).'''
2566 2566 gone = set()
2567 2567
2568 2568 copyto = dict([(v, k) for k, v in copy.items()])
2569 2569
2570 2570 addedset, removedset = set(added), set(removed)
2571 2571
2572 2572 for f in sorted(modified + added + removed):
2573 2573 copyop = None
2574 2574 f1, f2 = f, f
2575 2575 if f in addedset:
2576 2576 f1 = None
2577 2577 if f in copy:
2578 2578 if opts.git:
2579 2579 f1 = copy[f]
2580 2580 if f1 in removedset and f1 not in gone:
2581 2581 copyop = 'rename'
2582 2582 gone.add(f1)
2583 2583 else:
2584 2584 copyop = 'copy'
2585 2585 elif f in removedset:
2586 2586 f2 = None
2587 2587 if opts.git:
2588 2588 # have we already reported a copy above?
2589 2589 if (f in copyto and copyto[f] in addedset
2590 2590 and copy[copyto[f]] == f):
2591 2591 continue
2592 2592 yield f1, f2, copyop
2593 2593
2594 2594 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2595 2595 copy, getfilectx, opts, losedatafn, prefix, relroot):
2596 2596 '''given input data, generate a diff and yield it in blocks
2597 2597
2598 2598 If generating a diff would lose data like flags or binary data and
2599 2599 losedatafn is not None, it will be called.
2600 2600
2601 2601 relroot is removed and prefix is added to every path in the diff output.
2602 2602
2603 2603 If relroot is not empty, this function expects every path in modified,
2604 2604 added, removed and copy to start with it.'''
2605 2605
2606 2606 def gitindex(text):
2607 2607 if not text:
2608 2608 text = ""
2609 2609 l = len(text)
2610 2610 s = hashlib.sha1('blob %d\0' % l)
2611 2611 s.update(text)
2612 2612 return hex(s.digest())
2613 2613
2614 2614 if opts.noprefix:
2615 2615 aprefix = bprefix = ''
2616 2616 else:
2617 2617 aprefix = 'a/'
2618 2618 bprefix = 'b/'
2619 2619
2620 2620 def diffline(f, revs):
2621 2621 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2622 2622 return 'diff %s %s' % (revinfo, f)
2623 2623
2624 2624 def isempty(fctx):
2625 2625 return fctx is None or fctx.size() == 0
2626 2626
2627 2627 date1 = dateutil.datestr(ctx1.date())
2628 2628 date2 = dateutil.datestr(ctx2.date())
2629 2629
2630 2630 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2631 2631
2632 2632 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
2633 2633 or repo.ui.configbool('devel', 'check-relroot')):
2634 2634 for f in modified + added + removed + list(copy) + list(copy.values()):
2635 2635 if f is not None and not f.startswith(relroot):
2636 2636 raise AssertionError(
2637 2637 "file %s doesn't start with relroot %s" % (f, relroot))
2638 2638
2639 2639 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2640 2640 content1 = None
2641 2641 content2 = None
2642 2642 fctx1 = None
2643 2643 fctx2 = None
2644 2644 flag1 = None
2645 2645 flag2 = None
2646 2646 if f1:
2647 2647 fctx1 = getfilectx(f1, ctx1)
2648 2648 if opts.git or losedatafn:
2649 2649 flag1 = ctx1.flags(f1)
2650 2650 if f2:
2651 2651 fctx2 = getfilectx(f2, ctx2)
2652 2652 if opts.git or losedatafn:
2653 2653 flag2 = ctx2.flags(f2)
2654 2654 # if binary is True, output "summary" or "base85", but not "text diff"
2655 2655 if opts.text:
2656 2656 binary = False
2657 2657 else:
2658 2658 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2659 2659
2660 2660 if losedatafn and not opts.git:
2661 2661 if (binary or
2662 2662 # copy/rename
2663 2663 f2 in copy or
2664 2664 # empty file creation
2665 2665 (not f1 and isempty(fctx2)) or
2666 2666 # empty file deletion
2667 2667 (isempty(fctx1) and not f2) or
2668 2668 # create with flags
2669 2669 (not f1 and flag2) or
2670 2670 # change flags
2671 2671 (f1 and f2 and flag1 != flag2)):
2672 2672 losedatafn(f2 or f1)
2673 2673
2674 2674 path1 = f1 or f2
2675 2675 path2 = f2 or f1
2676 2676 path1 = posixpath.join(prefix, path1[len(relroot):])
2677 2677 path2 = posixpath.join(prefix, path2[len(relroot):])
2678 2678 header = []
2679 2679 if opts.git:
2680 2680 header.append('diff --git %s%s %s%s' %
2681 2681 (aprefix, path1, bprefix, path2))
2682 2682 if not f1: # added
2683 2683 header.append('new file mode %s' % gitmode[flag2])
2684 2684 elif not f2: # removed
2685 2685 header.append('deleted file mode %s' % gitmode[flag1])
2686 2686 else: # modified/copied/renamed
2687 2687 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2688 2688 if mode1 != mode2:
2689 2689 header.append('old mode %s' % mode1)
2690 2690 header.append('new mode %s' % mode2)
2691 2691 if copyop is not None:
2692 2692 if opts.showsimilarity:
2693 2693 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
2694 2694 header.append('similarity index %d%%' % sim)
2695 2695 header.append('%s from %s' % (copyop, path1))
2696 2696 header.append('%s to %s' % (copyop, path2))
2697 2697 elif revs and not repo.ui.quiet:
2698 2698 header.append(diffline(path1, revs))
2699 2699
2700 2700 # fctx.is | diffopts | what to | is fctx.data()
2701 2701 # binary() | text nobinary git index | output? | outputted?
2702 2702 # ------------------------------------|----------------------------
2703 2703 # yes | no no no * | summary | no
2704 2704 # yes | no no yes * | base85 | yes
2705 2705 # yes | no yes no * | summary | no
2706 2706 # yes | no yes yes 0 | summary | no
2707 2707 # yes | no yes yes >0 | summary | semi [1]
2708 2708 # yes | yes * * * | text diff | yes
2709 2709 # no | * * * * | text diff | yes
2710 2710 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
2711 2711 if binary and (not opts.git or (opts.git and opts.nobinary and not
2712 2712 opts.index)):
2713 2713 # fast path: no binary content will be displayed, content1 and
2714 2714 # content2 are only used for equivalent test. cmp() could have a
2715 2715 # fast path.
2716 2716 if fctx1 is not None:
2717 2717 content1 = b'\0'
2718 2718 if fctx2 is not None:
2719 2719 if fctx1 is not None and not fctx1.cmp(fctx2):
2720 2720 content2 = b'\0' # not different
2721 2721 else:
2722 2722 content2 = b'\0\0'
2723 2723 else:
2724 2724 # normal path: load contents
2725 2725 if fctx1 is not None:
2726 2726 content1 = fctx1.data()
2727 2727 if fctx2 is not None:
2728 2728 content2 = fctx2.data()
2729 2729
2730 2730 if binary and opts.git and not opts.nobinary:
2731 2731 text = mdiff.b85diff(content1, content2)
2732 2732 if text:
2733 2733 header.append('index %s..%s' %
2734 2734 (gitindex(content1), gitindex(content2)))
2735 2735 hunks = (None, [text]),
2736 2736 else:
2737 2737 if opts.git and opts.index > 0:
2738 2738 flag = flag1
2739 2739 if flag is None:
2740 2740 flag = flag2
2741 2741 header.append('index %s..%s %s' %
2742 2742 (gitindex(content1)[0:opts.index],
2743 2743 gitindex(content2)[0:opts.index],
2744 2744 gitmode[flag]))
2745 2745
2746 2746 uheaders, hunks = mdiff.unidiff(content1, date1,
2747 2747 content2, date2,
2748 2748 path1, path2,
2749 2749 binary=binary, opts=opts)
2750 2750 header.extend(uheaders)
2751 2751 yield fctx1, fctx2, header, hunks
2752 2752
2753 2753 def diffstatsum(stats):
2754 2754 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2755 2755 for f, a, r, b in stats:
2756 2756 maxfile = max(maxfile, encoding.colwidth(f))
2757 2757 maxtotal = max(maxtotal, a + r)
2758 2758 addtotal += a
2759 2759 removetotal += r
2760 2760 binary = binary or b
2761 2761
2762 2762 return maxfile, maxtotal, addtotal, removetotal, binary
2763 2763
2764 2764 def diffstatdata(lines):
2765 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2765 diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
2766 2766
2767 2767 results = []
2768 2768 filename, adds, removes, isbinary = None, 0, 0, False
2769 2769
2770 2770 def addresult():
2771 2771 if filename:
2772 2772 results.append((filename, adds, removes, isbinary))
2773 2773
2774 2774 # inheader is used to track if a line is in the
2775 2775 # header portion of the diff. This helps properly account
2776 2776 # for lines that start with '--' or '++'
2777 2777 inheader = False
2778 2778
2779 2779 for line in lines:
2780 2780 if line.startswith('diff'):
2781 2781 addresult()
2782 2782 # starting a new file diff
2783 2783 # set numbers to 0 and reset inheader
2784 2784 inheader = True
2785 2785 adds, removes, isbinary = 0, 0, False
2786 2786 if line.startswith('diff --git a/'):
2787 2787 filename = gitre.search(line).group(2)
2788 2788 elif line.startswith('diff -r'):
2789 2789 # format: "diff -r ... -r ... filename"
2790 2790 filename = diffre.search(line).group(1)
2791 2791 elif line.startswith('@@'):
2792 2792 inheader = False
2793 2793 elif line.startswith('+') and not inheader:
2794 2794 adds += 1
2795 2795 elif line.startswith('-') and not inheader:
2796 2796 removes += 1
2797 2797 elif (line.startswith('GIT binary patch') or
2798 2798 line.startswith('Binary file')):
2799 2799 isbinary = True
2800 2800 elif line.startswith('rename from'):
2801 2801 filename = line[12:]
2802 2802 elif line.startswith('rename to'):
2803 2803 filename += ' => %s' % line[10:]
2804 2804 addresult()
2805 2805 return results
2806 2806
2807 2807 def diffstat(lines, width=80):
2808 2808 output = []
2809 2809 stats = diffstatdata(lines)
2810 2810 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2811 2811
2812 2812 countwidth = len(str(maxtotal))
2813 2813 if hasbinary and countwidth < 3:
2814 2814 countwidth = 3
2815 2815 graphwidth = width - countwidth - maxname - 6
2816 2816 if graphwidth < 10:
2817 2817 graphwidth = 10
2818 2818
2819 2819 def scale(i):
2820 2820 if maxtotal <= graphwidth:
2821 2821 return i
2822 2822 # If diffstat runs out of room it doesn't print anything,
2823 2823 # which isn't very useful, so always print at least one + or -
2824 2824 # if there were at least some changes.
2825 2825 return max(i * graphwidth // maxtotal, int(bool(i)))
2826 2826
2827 2827 for filename, adds, removes, isbinary in stats:
2828 2828 if isbinary:
2829 2829 count = 'Bin'
2830 2830 else:
2831 2831 count = '%d' % (adds + removes)
2832 2832 pluses = '+' * scale(adds)
2833 2833 minuses = '-' * scale(removes)
2834 2834 output.append(' %s%s | %*s %s%s\n' %
2835 2835 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2836 2836 countwidth, count, pluses, minuses))
2837 2837
2838 2838 if stats:
2839 2839 output.append(_(' %d files changed, %d insertions(+), '
2840 2840 '%d deletions(-)\n')
2841 2841 % (len(stats), totaladds, totalremoves))
2842 2842
2843 2843 return ''.join(output)
2844 2844
2845 2845 def diffstatui(*args, **kw):
2846 2846 '''like diffstat(), but yields 2-tuples of (output, label) for
2847 2847 ui.write()
2848 2848 '''
2849 2849
2850 2850 for line in diffstat(*args, **kw).splitlines():
2851 2851 if line and line[-1] in '+-':
2852 2852 name, graph = line.rsplit(' ', 1)
2853 2853 yield (name + ' ', '')
2854 2854 m = re.search(br'\++', graph)
2855 2855 if m:
2856 2856 yield (m.group(0), 'diffstat.inserted')
2857 2857 m = re.search(br'-+', graph)
2858 2858 if m:
2859 2859 yield (m.group(0), 'diffstat.deleted')
2860 2860 else:
2861 2861 yield (line, '')
2862 2862 yield ('\n', '')
General Comments 0
You need to be logged in to leave comments. Login now