##// END OF EJS Templates
registrar: move cmdutil.command to registrar module (API)...
Yuya Nishihara -
r32337:46ba2cdd default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,59 +1,59 b''
1 # debugshell extension
1 # debugshell extension
2 """a python shell with repo, changelog & manifest objects"""
2 """a python shell with repo, changelog & manifest objects"""
3
3
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5 import code
5 import code
6 import mercurial
6 import mercurial
7 import sys
7 import sys
8 from mercurial import (
8 from mercurial import (
9 cmdutil,
10 demandimport,
9 demandimport,
10 registrar,
11 )
11 )
12
12
13 cmdtable = {}
13 cmdtable = {}
14 command = cmdutil.command(cmdtable)
14 command = registrar.command(cmdtable)
15
15
16 def pdb(ui, repo, msg, **opts):
16 def pdb(ui, repo, msg, **opts):
17 objects = {
17 objects = {
18 'mercurial': mercurial,
18 'mercurial': mercurial,
19 'repo': repo,
19 'repo': repo,
20 'cl': repo.changelog,
20 'cl': repo.changelog,
21 'mf': repo.manifestlog,
21 'mf': repo.manifestlog,
22 }
22 }
23
23
24 code.interact(msg, local=objects)
24 code.interact(msg, local=objects)
25
25
26 def ipdb(ui, repo, msg, **opts):
26 def ipdb(ui, repo, msg, **opts):
27 import IPython
27 import IPython
28
28
29 cl = repo.changelog
29 cl = repo.changelog
30 mf = repo.manifestlog
30 mf = repo.manifestlog
31 cl, mf # use variables to appease pyflakes
31 cl, mf # use variables to appease pyflakes
32
32
33 IPython.embed()
33 IPython.embed()
34
34
35 @command('debugshell|dbsh', [])
35 @command('debugshell|dbsh', [])
36 def debugshell(ui, repo, **opts):
36 def debugshell(ui, repo, **opts):
37 bannermsg = "loaded repo : %s\n" \
37 bannermsg = "loaded repo : %s\n" \
38 "using source: %s" % (repo.root,
38 "using source: %s" % (repo.root,
39 mercurial.__path__[0])
39 mercurial.__path__[0])
40
40
41 pdbmap = {
41 pdbmap = {
42 'pdb' : 'code',
42 'pdb' : 'code',
43 'ipdb' : 'IPython'
43 'ipdb' : 'IPython'
44 }
44 }
45
45
46 debugger = ui.config("ui", "debugger")
46 debugger = ui.config("ui", "debugger")
47 if not debugger:
47 if not debugger:
48 debugger = 'pdb'
48 debugger = 'pdb'
49
49
50 # if IPython doesn't exist, fallback to code.interact
50 # if IPython doesn't exist, fallback to code.interact
51 try:
51 try:
52 with demandimport.deactivated():
52 with demandimport.deactivated():
53 __import__(pdbmap[debugger])
53 __import__(pdbmap[debugger])
54 except ImportError:
54 except ImportError:
55 ui.warn(("%s debugger specified but %s module was not found\n")
55 ui.warn(("%s debugger specified but %s module was not found\n")
56 % (debugger, pdbmap[debugger]))
56 % (debugger, pdbmap[debugger]))
57 debugger = 'pdb'
57 debugger = 'pdb'
58
58
59 getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
59 getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
@@ -1,1324 +1,1331 b''
1 # perf.py - performance test routines
1 # perf.py - performance test routines
2 '''helper extension to measure performance'''
2 '''helper extension to measure performance'''
3
3
4 # "historical portability" policy of perf.py:
4 # "historical portability" policy of perf.py:
5 #
5 #
6 # We have to do:
6 # We have to do:
7 # - make perf.py "loadable" with as wide Mercurial version as possible
7 # - make perf.py "loadable" with as wide Mercurial version as possible
8 # This doesn't mean that perf commands work correctly with that Mercurial.
8 # This doesn't mean that perf commands work correctly with that Mercurial.
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
10 # - make historical perf command work correctly with as wide Mercurial
10 # - make historical perf command work correctly with as wide Mercurial
11 # version as possible
11 # version as possible
12 #
12 #
13 # We have to do, if possible with reasonable cost:
13 # We have to do, if possible with reasonable cost:
14 # - make recent perf command for historical feature work correctly
14 # - make recent perf command for historical feature work correctly
15 # with early Mercurial
15 # with early Mercurial
16 #
16 #
17 # We don't have to do:
17 # We don't have to do:
18 # - make perf command for recent feature work correctly with early
18 # - make perf command for recent feature work correctly with early
19 # Mercurial
19 # Mercurial
20
20
21 from __future__ import absolute_import
21 from __future__ import absolute_import
22 import functools
22 import functools
23 import gc
23 import gc
24 import os
24 import os
25 import random
25 import random
26 import sys
26 import sys
27 import time
27 import time
28 from mercurial import (
28 from mercurial import (
29 changegroup,
29 changegroup,
30 cmdutil,
30 cmdutil,
31 commands,
31 commands,
32 copies,
32 copies,
33 error,
33 error,
34 extensions,
34 extensions,
35 mdiff,
35 mdiff,
36 merge,
36 merge,
37 util,
37 util,
38 )
38 )
39
39
40 # for "historical portability":
40 # for "historical portability":
41 # try to import modules separately (in dict order), and ignore
41 # try to import modules separately (in dict order), and ignore
42 # failure, because these aren't available with early Mercurial
42 # failure, because these aren't available with early Mercurial
43 try:
43 try:
44 from mercurial import branchmap # since 2.5 (or bcee63733aad)
44 from mercurial import branchmap # since 2.5 (or bcee63733aad)
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47 try:
47 try:
48 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
48 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
49 except ImportError:
49 except ImportError:
50 pass
50 pass
51 try:
51 try:
52 from mercurial import registrar # since 3.7 (or 37d50250b696)
53 dir(registrar) # forcibly load it
54 except ImportError:
55 registrar = None
56 try:
52 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
57 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
53 except ImportError:
58 except ImportError:
54 pass
59 pass
55 try:
60 try:
56 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
61 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
57 except ImportError:
62 except ImportError:
58 pass
63 pass
59
64
60 # for "historical portability":
65 # for "historical portability":
61 # define util.safehasattr forcibly, because util.safehasattr has been
66 # define util.safehasattr forcibly, because util.safehasattr has been
62 # available since 1.9.3 (or 94b200a11cf7)
67 # available since 1.9.3 (or 94b200a11cf7)
63 _undefined = object()
68 _undefined = object()
64 def safehasattr(thing, attr):
69 def safehasattr(thing, attr):
65 return getattr(thing, attr, _undefined) is not _undefined
70 return getattr(thing, attr, _undefined) is not _undefined
66 setattr(util, 'safehasattr', safehasattr)
71 setattr(util, 'safehasattr', safehasattr)
67
72
68 # for "historical portability":
73 # for "historical portability":
69 # define util.timer forcibly, because util.timer has been available
74 # define util.timer forcibly, because util.timer has been available
70 # since ae5d60bb70c9
75 # since ae5d60bb70c9
71 if safehasattr(time, 'perf_counter'):
76 if safehasattr(time, 'perf_counter'):
72 util.timer = time.perf_counter
77 util.timer = time.perf_counter
73 elif os.name == 'nt':
78 elif os.name == 'nt':
74 util.timer = time.clock
79 util.timer = time.clock
75 else:
80 else:
76 util.timer = time.time
81 util.timer = time.time
77
82
78 # for "historical portability":
83 # for "historical portability":
79 # use locally defined empty option list, if formatteropts isn't
84 # use locally defined empty option list, if formatteropts isn't
80 # available, because commands.formatteropts has been available since
85 # available, because commands.formatteropts has been available since
81 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
86 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
82 # available since 2.2 (or ae5f92e154d3)
87 # available since 2.2 (or ae5f92e154d3)
83 formatteropts = getattr(commands, "formatteropts", [])
88 formatteropts = getattr(commands, "formatteropts", [])
84
89
85 # for "historical portability":
90 # for "historical portability":
86 # use locally defined option list, if debugrevlogopts isn't available,
91 # use locally defined option list, if debugrevlogopts isn't available,
87 # because commands.debugrevlogopts has been available since 3.7 (or
92 # because commands.debugrevlogopts has been available since 3.7 (or
88 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
93 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
89 # since 1.9 (or a79fea6b3e77).
94 # since 1.9 (or a79fea6b3e77).
90 revlogopts = getattr(commands, "debugrevlogopts", [
95 revlogopts = getattr(commands, "debugrevlogopts", [
91 ('c', 'changelog', False, ('open changelog')),
96 ('c', 'changelog', False, ('open changelog')),
92 ('m', 'manifest', False, ('open manifest')),
97 ('m', 'manifest', False, ('open manifest')),
93 ('', 'dir', False, ('open directory manifest')),
98 ('', 'dir', False, ('open directory manifest')),
94 ])
99 ])
95
100
96 cmdtable = {}
101 cmdtable = {}
97
102
98 # for "historical portability":
103 # for "historical portability":
99 # define parsealiases locally, because cmdutil.parsealiases has been
104 # define parsealiases locally, because cmdutil.parsealiases has been
100 # available since 1.5 (or 6252852b4332)
105 # available since 1.5 (or 6252852b4332)
101 def parsealiases(cmd):
106 def parsealiases(cmd):
102 return cmd.lstrip("^").split("|")
107 return cmd.lstrip("^").split("|")
103
108
104 if safehasattr(cmdutil, 'command'):
109 if safehasattr(registrar, 'command'):
110 command = registrar.command(cmdtable)
111 elif safehasattr(cmdutil, 'command'):
105 import inspect
112 import inspect
106 command = cmdutil.command(cmdtable)
113 command = cmdutil.command(cmdtable)
107 if 'norepo' not in inspect.getargspec(command)[0]:
114 if 'norepo' not in inspect.getargspec(command)[0]:
108 # for "historical portability":
115 # for "historical portability":
109 # wrap original cmdutil.command, because "norepo" option has
116 # wrap original cmdutil.command, because "norepo" option has
110 # been available since 3.1 (or 75a96326cecb)
117 # been available since 3.1 (or 75a96326cecb)
111 _command = command
118 _command = command
112 def command(name, options=(), synopsis=None, norepo=False):
119 def command(name, options=(), synopsis=None, norepo=False):
113 if norepo:
120 if norepo:
114 commands.norepo += ' %s' % ' '.join(parsealiases(name))
121 commands.norepo += ' %s' % ' '.join(parsealiases(name))
115 return _command(name, list(options), synopsis)
122 return _command(name, list(options), synopsis)
116 else:
123 else:
117 # for "historical portability":
124 # for "historical portability":
118 # define "@command" annotation locally, because cmdutil.command
125 # define "@command" annotation locally, because cmdutil.command
119 # has been available since 1.9 (or 2daa5179e73f)
126 # has been available since 1.9 (or 2daa5179e73f)
120 def command(name, options=(), synopsis=None, norepo=False):
127 def command(name, options=(), synopsis=None, norepo=False):
121 def decorator(func):
128 def decorator(func):
122 if synopsis:
129 if synopsis:
123 cmdtable[name] = func, list(options), synopsis
130 cmdtable[name] = func, list(options), synopsis
124 else:
131 else:
125 cmdtable[name] = func, list(options)
132 cmdtable[name] = func, list(options)
126 if norepo:
133 if norepo:
127 commands.norepo += ' %s' % ' '.join(parsealiases(name))
134 commands.norepo += ' %s' % ' '.join(parsealiases(name))
128 return func
135 return func
129 return decorator
136 return decorator
130
137
131 def getlen(ui):
138 def getlen(ui):
132 if ui.configbool("perf", "stub"):
139 if ui.configbool("perf", "stub"):
133 return lambda x: 1
140 return lambda x: 1
134 return len
141 return len
135
142
136 def gettimer(ui, opts=None):
143 def gettimer(ui, opts=None):
137 """return a timer function and formatter: (timer, formatter)
144 """return a timer function and formatter: (timer, formatter)
138
145
139 This function exists to gather the creation of formatter in a single
146 This function exists to gather the creation of formatter in a single
140 place instead of duplicating it in all performance commands."""
147 place instead of duplicating it in all performance commands."""
141
148
142 # enforce an idle period before execution to counteract power management
149 # enforce an idle period before execution to counteract power management
143 # experimental config: perf.presleep
150 # experimental config: perf.presleep
144 time.sleep(getint(ui, "perf", "presleep", 1))
151 time.sleep(getint(ui, "perf", "presleep", 1))
145
152
146 if opts is None:
153 if opts is None:
147 opts = {}
154 opts = {}
148 # redirect all to stderr unless buffer api is in use
155 # redirect all to stderr unless buffer api is in use
149 if not ui._buffers:
156 if not ui._buffers:
150 ui = ui.copy()
157 ui = ui.copy()
151 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
158 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
152 if uifout:
159 if uifout:
153 # for "historical portability":
160 # for "historical portability":
154 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
161 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
155 uifout.set(ui.ferr)
162 uifout.set(ui.ferr)
156
163
157 # get a formatter
164 # get a formatter
158 uiformatter = getattr(ui, 'formatter', None)
165 uiformatter = getattr(ui, 'formatter', None)
159 if uiformatter:
166 if uiformatter:
160 fm = uiformatter('perf', opts)
167 fm = uiformatter('perf', opts)
161 else:
168 else:
162 # for "historical portability":
169 # for "historical portability":
163 # define formatter locally, because ui.formatter has been
170 # define formatter locally, because ui.formatter has been
164 # available since 2.2 (or ae5f92e154d3)
171 # available since 2.2 (or ae5f92e154d3)
165 from mercurial import node
172 from mercurial import node
166 class defaultformatter(object):
173 class defaultformatter(object):
167 """Minimized composition of baseformatter and plainformatter
174 """Minimized composition of baseformatter and plainformatter
168 """
175 """
169 def __init__(self, ui, topic, opts):
176 def __init__(self, ui, topic, opts):
170 self._ui = ui
177 self._ui = ui
171 if ui.debugflag:
178 if ui.debugflag:
172 self.hexfunc = node.hex
179 self.hexfunc = node.hex
173 else:
180 else:
174 self.hexfunc = node.short
181 self.hexfunc = node.short
175 def __nonzero__(self):
182 def __nonzero__(self):
176 return False
183 return False
177 __bool__ = __nonzero__
184 __bool__ = __nonzero__
178 def startitem(self):
185 def startitem(self):
179 pass
186 pass
180 def data(self, **data):
187 def data(self, **data):
181 pass
188 pass
182 def write(self, fields, deftext, *fielddata, **opts):
189 def write(self, fields, deftext, *fielddata, **opts):
183 self._ui.write(deftext % fielddata, **opts)
190 self._ui.write(deftext % fielddata, **opts)
184 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
191 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
185 if cond:
192 if cond:
186 self._ui.write(deftext % fielddata, **opts)
193 self._ui.write(deftext % fielddata, **opts)
187 def plain(self, text, **opts):
194 def plain(self, text, **opts):
188 self._ui.write(text, **opts)
195 self._ui.write(text, **opts)
189 def end(self):
196 def end(self):
190 pass
197 pass
191 fm = defaultformatter(ui, 'perf', opts)
198 fm = defaultformatter(ui, 'perf', opts)
192
199
193 # stub function, runs code only once instead of in a loop
200 # stub function, runs code only once instead of in a loop
194 # experimental config: perf.stub
201 # experimental config: perf.stub
195 if ui.configbool("perf", "stub"):
202 if ui.configbool("perf", "stub"):
196 return functools.partial(stub_timer, fm), fm
203 return functools.partial(stub_timer, fm), fm
197 return functools.partial(_timer, fm), fm
204 return functools.partial(_timer, fm), fm
198
205
199 def stub_timer(fm, func, title=None):
206 def stub_timer(fm, func, title=None):
200 func()
207 func()
201
208
202 def _timer(fm, func, title=None):
209 def _timer(fm, func, title=None):
203 gc.collect()
210 gc.collect()
204 results = []
211 results = []
205 begin = util.timer()
212 begin = util.timer()
206 count = 0
213 count = 0
207 while True:
214 while True:
208 ostart = os.times()
215 ostart = os.times()
209 cstart = util.timer()
216 cstart = util.timer()
210 r = func()
217 r = func()
211 cstop = util.timer()
218 cstop = util.timer()
212 ostop = os.times()
219 ostop = os.times()
213 count += 1
220 count += 1
214 a, b = ostart, ostop
221 a, b = ostart, ostop
215 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
222 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
216 if cstop - begin > 3 and count >= 100:
223 if cstop - begin > 3 and count >= 100:
217 break
224 break
218 if cstop - begin > 10 and count >= 3:
225 if cstop - begin > 10 and count >= 3:
219 break
226 break
220
227
221 fm.startitem()
228 fm.startitem()
222
229
223 if title:
230 if title:
224 fm.write('title', '! %s\n', title)
231 fm.write('title', '! %s\n', title)
225 if r:
232 if r:
226 fm.write('result', '! result: %s\n', r)
233 fm.write('result', '! result: %s\n', r)
227 m = min(results)
234 m = min(results)
228 fm.plain('!')
235 fm.plain('!')
229 fm.write('wall', ' wall %f', m[0])
236 fm.write('wall', ' wall %f', m[0])
230 fm.write('comb', ' comb %f', m[1] + m[2])
237 fm.write('comb', ' comb %f', m[1] + m[2])
231 fm.write('user', ' user %f', m[1])
238 fm.write('user', ' user %f', m[1])
232 fm.write('sys', ' sys %f', m[2])
239 fm.write('sys', ' sys %f', m[2])
233 fm.write('count', ' (best of %d)', count)
240 fm.write('count', ' (best of %d)', count)
234 fm.plain('\n')
241 fm.plain('\n')
235
242
236 # utilities for historical portability
243 # utilities for historical portability
237
244
238 def getint(ui, section, name, default):
245 def getint(ui, section, name, default):
239 # for "historical portability":
246 # for "historical portability":
240 # ui.configint has been available since 1.9 (or fa2b596db182)
247 # ui.configint has been available since 1.9 (or fa2b596db182)
241 v = ui.config(section, name, None)
248 v = ui.config(section, name, None)
242 if v is None:
249 if v is None:
243 return default
250 return default
244 try:
251 try:
245 return int(v)
252 return int(v)
246 except ValueError:
253 except ValueError:
247 raise error.ConfigError(("%s.%s is not an integer ('%s')")
254 raise error.ConfigError(("%s.%s is not an integer ('%s')")
248 % (section, name, v))
255 % (section, name, v))
249
256
250 def safeattrsetter(obj, name, ignoremissing=False):
257 def safeattrsetter(obj, name, ignoremissing=False):
251 """Ensure that 'obj' has 'name' attribute before subsequent setattr
258 """Ensure that 'obj' has 'name' attribute before subsequent setattr
252
259
253 This function is aborted, if 'obj' doesn't have 'name' attribute
260 This function is aborted, if 'obj' doesn't have 'name' attribute
254 at runtime. This avoids overlooking removal of an attribute, which
261 at runtime. This avoids overlooking removal of an attribute, which
255 breaks assumption of performance measurement, in the future.
262 breaks assumption of performance measurement, in the future.
256
263
257 This function returns the object to (1) assign a new value, and
264 This function returns the object to (1) assign a new value, and
258 (2) restore an original value to the attribute.
265 (2) restore an original value to the attribute.
259
266
260 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
267 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
261 abortion, and this function returns None. This is useful to
268 abortion, and this function returns None. This is useful to
262 examine an attribute, which isn't ensured in all Mercurial
269 examine an attribute, which isn't ensured in all Mercurial
263 versions.
270 versions.
264 """
271 """
265 if not util.safehasattr(obj, name):
272 if not util.safehasattr(obj, name):
266 if ignoremissing:
273 if ignoremissing:
267 return None
274 return None
268 raise error.Abort(("missing attribute %s of %s might break assumption"
275 raise error.Abort(("missing attribute %s of %s might break assumption"
269 " of performance measurement") % (name, obj))
276 " of performance measurement") % (name, obj))
270
277
271 origvalue = getattr(obj, name)
278 origvalue = getattr(obj, name)
272 class attrutil(object):
279 class attrutil(object):
273 def set(self, newvalue):
280 def set(self, newvalue):
274 setattr(obj, name, newvalue)
281 setattr(obj, name, newvalue)
275 def restore(self):
282 def restore(self):
276 setattr(obj, name, origvalue)
283 setattr(obj, name, origvalue)
277
284
278 return attrutil()
285 return attrutil()
279
286
280 # utilities to examine each internal API changes
287 # utilities to examine each internal API changes
281
288
282 def getbranchmapsubsettable():
289 def getbranchmapsubsettable():
283 # for "historical portability":
290 # for "historical portability":
284 # subsettable is defined in:
291 # subsettable is defined in:
285 # - branchmap since 2.9 (or 175c6fd8cacc)
292 # - branchmap since 2.9 (or 175c6fd8cacc)
286 # - repoview since 2.5 (or 59a9f18d4587)
293 # - repoview since 2.5 (or 59a9f18d4587)
287 for mod in (branchmap, repoview):
294 for mod in (branchmap, repoview):
288 subsettable = getattr(mod, 'subsettable', None)
295 subsettable = getattr(mod, 'subsettable', None)
289 if subsettable:
296 if subsettable:
290 return subsettable
297 return subsettable
291
298
292 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
299 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
293 # branchmap and repoview modules exist, but subsettable attribute
300 # branchmap and repoview modules exist, but subsettable attribute
294 # doesn't)
301 # doesn't)
295 raise error.Abort(("perfbranchmap not available with this Mercurial"),
302 raise error.Abort(("perfbranchmap not available with this Mercurial"),
296 hint="use 2.5 or later")
303 hint="use 2.5 or later")
297
304
298 def getsvfs(repo):
305 def getsvfs(repo):
299 """Return appropriate object to access files under .hg/store
306 """Return appropriate object to access files under .hg/store
300 """
307 """
301 # for "historical portability":
308 # for "historical portability":
302 # repo.svfs has been available since 2.3 (or 7034365089bf)
309 # repo.svfs has been available since 2.3 (or 7034365089bf)
303 svfs = getattr(repo, 'svfs', None)
310 svfs = getattr(repo, 'svfs', None)
304 if svfs:
311 if svfs:
305 return svfs
312 return svfs
306 else:
313 else:
307 return getattr(repo, 'sopener')
314 return getattr(repo, 'sopener')
308
315
309 def getvfs(repo):
316 def getvfs(repo):
310 """Return appropriate object to access files under .hg
317 """Return appropriate object to access files under .hg
311 """
318 """
312 # for "historical portability":
319 # for "historical portability":
313 # repo.vfs has been available since 2.3 (or 7034365089bf)
320 # repo.vfs has been available since 2.3 (or 7034365089bf)
314 vfs = getattr(repo, 'vfs', None)
321 vfs = getattr(repo, 'vfs', None)
315 if vfs:
322 if vfs:
316 return vfs
323 return vfs
317 else:
324 else:
318 return getattr(repo, 'opener')
325 return getattr(repo, 'opener')
319
326
320 def repocleartagscachefunc(repo):
327 def repocleartagscachefunc(repo):
321 """Return the function to clear tags cache according to repo internal API
328 """Return the function to clear tags cache according to repo internal API
322 """
329 """
323 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
330 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
324 # in this case, setattr(repo, '_tagscache', None) or so isn't
331 # in this case, setattr(repo, '_tagscache', None) or so isn't
325 # correct way to clear tags cache, because existing code paths
332 # correct way to clear tags cache, because existing code paths
326 # expect _tagscache to be a structured object.
333 # expect _tagscache to be a structured object.
327 def clearcache():
334 def clearcache():
328 # _tagscache has been filteredpropertycache since 2.5 (or
335 # _tagscache has been filteredpropertycache since 2.5 (or
329 # 98c867ac1330), and delattr() can't work in such case
336 # 98c867ac1330), and delattr() can't work in such case
330 if '_tagscache' in vars(repo):
337 if '_tagscache' in vars(repo):
331 del repo.__dict__['_tagscache']
338 del repo.__dict__['_tagscache']
332 return clearcache
339 return clearcache
333
340
334 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
341 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
335 if repotags: # since 1.4 (or 5614a628d173)
342 if repotags: # since 1.4 (or 5614a628d173)
336 return lambda : repotags.set(None)
343 return lambda : repotags.set(None)
337
344
338 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
345 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
339 if repotagscache: # since 0.6 (or d7df759d0e97)
346 if repotagscache: # since 0.6 (or d7df759d0e97)
340 return lambda : repotagscache.set(None)
347 return lambda : repotagscache.set(None)
341
348
342 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
349 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
343 # this point, but it isn't so problematic, because:
350 # this point, but it isn't so problematic, because:
344 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
351 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
345 # in perftags() causes failure soon
352 # in perftags() causes failure soon
346 # - perf.py itself has been available since 1.1 (or eb240755386d)
353 # - perf.py itself has been available since 1.1 (or eb240755386d)
347 raise error.Abort(("tags API of this hg command is unknown"))
354 raise error.Abort(("tags API of this hg command is unknown"))
348
355
349 # perf commands
356 # perf commands
350
357
351 @command('perfwalk', formatteropts)
358 @command('perfwalk', formatteropts)
352 def perfwalk(ui, repo, *pats, **opts):
359 def perfwalk(ui, repo, *pats, **opts):
353 timer, fm = gettimer(ui, opts)
360 timer, fm = gettimer(ui, opts)
354 try:
361 try:
355 m = scmutil.match(repo[None], pats, {})
362 m = scmutil.match(repo[None], pats, {})
356 timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
363 timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
357 except Exception:
364 except Exception:
358 try:
365 try:
359 m = scmutil.match(repo[None], pats, {})
366 m = scmutil.match(repo[None], pats, {})
360 timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
367 timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
361 except Exception:
368 except Exception:
362 timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
369 timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
363 fm.end()
370 fm.end()
364
371
365 @command('perfannotate', formatteropts)
372 @command('perfannotate', formatteropts)
366 def perfannotate(ui, repo, f, **opts):
373 def perfannotate(ui, repo, f, **opts):
367 timer, fm = gettimer(ui, opts)
374 timer, fm = gettimer(ui, opts)
368 fc = repo['.'][f]
375 fc = repo['.'][f]
369 timer(lambda: len(fc.annotate(True)))
376 timer(lambda: len(fc.annotate(True)))
370 fm.end()
377 fm.end()
371
378
372 @command('perfstatus',
379 @command('perfstatus',
373 [('u', 'unknown', False,
380 [('u', 'unknown', False,
374 'ask status to look for unknown files')] + formatteropts)
381 'ask status to look for unknown files')] + formatteropts)
375 def perfstatus(ui, repo, **opts):
382 def perfstatus(ui, repo, **opts):
376 #m = match.always(repo.root, repo.getcwd())
383 #m = match.always(repo.root, repo.getcwd())
377 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
384 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
378 # False))))
385 # False))))
379 timer, fm = gettimer(ui, opts)
386 timer, fm = gettimer(ui, opts)
380 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
387 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
381 fm.end()
388 fm.end()
382
389
383 @command('perfaddremove', formatteropts)
390 @command('perfaddremove', formatteropts)
384 def perfaddremove(ui, repo, **opts):
391 def perfaddremove(ui, repo, **opts):
385 timer, fm = gettimer(ui, opts)
392 timer, fm = gettimer(ui, opts)
386 try:
393 try:
387 oldquiet = repo.ui.quiet
394 oldquiet = repo.ui.quiet
388 repo.ui.quiet = True
395 repo.ui.quiet = True
389 matcher = scmutil.match(repo[None])
396 matcher = scmutil.match(repo[None])
390 timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
397 timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
391 finally:
398 finally:
392 repo.ui.quiet = oldquiet
399 repo.ui.quiet = oldquiet
393 fm.end()
400 fm.end()
394
401
395 def clearcaches(cl):
402 def clearcaches(cl):
396 # behave somewhat consistently across internal API changes
403 # behave somewhat consistently across internal API changes
397 if util.safehasattr(cl, 'clearcaches'):
404 if util.safehasattr(cl, 'clearcaches'):
398 cl.clearcaches()
405 cl.clearcaches()
399 elif util.safehasattr(cl, '_nodecache'):
406 elif util.safehasattr(cl, '_nodecache'):
400 from mercurial.node import nullid, nullrev
407 from mercurial.node import nullid, nullrev
401 cl._nodecache = {nullid: nullrev}
408 cl._nodecache = {nullid: nullrev}
402 cl._nodepos = None
409 cl._nodepos = None
403
410
404 @command('perfheads', formatteropts)
411 @command('perfheads', formatteropts)
405 def perfheads(ui, repo, **opts):
412 def perfheads(ui, repo, **opts):
406 timer, fm = gettimer(ui, opts)
413 timer, fm = gettimer(ui, opts)
407 cl = repo.changelog
414 cl = repo.changelog
408 def d():
415 def d():
409 len(cl.headrevs())
416 len(cl.headrevs())
410 clearcaches(cl)
417 clearcaches(cl)
411 timer(d)
418 timer(d)
412 fm.end()
419 fm.end()
413
420
414 @command('perftags', formatteropts)
421 @command('perftags', formatteropts)
415 def perftags(ui, repo, **opts):
422 def perftags(ui, repo, **opts):
416 import mercurial.changelog
423 import mercurial.changelog
417 import mercurial.manifest
424 import mercurial.manifest
418 timer, fm = gettimer(ui, opts)
425 timer, fm = gettimer(ui, opts)
419 svfs = getsvfs(repo)
426 svfs = getsvfs(repo)
420 repocleartagscache = repocleartagscachefunc(repo)
427 repocleartagscache = repocleartagscachefunc(repo)
421 def t():
428 def t():
422 repo.changelog = mercurial.changelog.changelog(svfs)
429 repo.changelog = mercurial.changelog.changelog(svfs)
423 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
430 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
424 repocleartagscache()
431 repocleartagscache()
425 return len(repo.tags())
432 return len(repo.tags())
426 timer(t)
433 timer(t)
427 fm.end()
434 fm.end()
428
435
429 @command('perfancestors', formatteropts)
436 @command('perfancestors', formatteropts)
430 def perfancestors(ui, repo, **opts):
437 def perfancestors(ui, repo, **opts):
431 timer, fm = gettimer(ui, opts)
438 timer, fm = gettimer(ui, opts)
432 heads = repo.changelog.headrevs()
439 heads = repo.changelog.headrevs()
433 def d():
440 def d():
434 for a in repo.changelog.ancestors(heads):
441 for a in repo.changelog.ancestors(heads):
435 pass
442 pass
436 timer(d)
443 timer(d)
437 fm.end()
444 fm.end()
438
445
439 @command('perfancestorset', formatteropts)
446 @command('perfancestorset', formatteropts)
440 def perfancestorset(ui, repo, revset, **opts):
447 def perfancestorset(ui, repo, revset, **opts):
441 timer, fm = gettimer(ui, opts)
448 timer, fm = gettimer(ui, opts)
442 revs = repo.revs(revset)
449 revs = repo.revs(revset)
443 heads = repo.changelog.headrevs()
450 heads = repo.changelog.headrevs()
444 def d():
451 def d():
445 s = repo.changelog.ancestors(heads)
452 s = repo.changelog.ancestors(heads)
446 for rev in revs:
453 for rev in revs:
447 rev in s
454 rev in s
448 timer(d)
455 timer(d)
449 fm.end()
456 fm.end()
450
457
451 @command('perfchangegroupchangelog', formatteropts +
458 @command('perfchangegroupchangelog', formatteropts +
452 [('', 'version', '02', 'changegroup version'),
459 [('', 'version', '02', 'changegroup version'),
453 ('r', 'rev', '', 'revisions to add to changegroup')])
460 ('r', 'rev', '', 'revisions to add to changegroup')])
454 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
461 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
455 """Benchmark producing a changelog group for a changegroup.
462 """Benchmark producing a changelog group for a changegroup.
456
463
457 This measures the time spent processing the changelog during a
464 This measures the time spent processing the changelog during a
458 bundle operation. This occurs during `hg bundle` and on a server
465 bundle operation. This occurs during `hg bundle` and on a server
459 processing a `getbundle` wire protocol request (handles clones
466 processing a `getbundle` wire protocol request (handles clones
460 and pull requests).
467 and pull requests).
461
468
462 By default, all revisions are added to the changegroup.
469 By default, all revisions are added to the changegroup.
463 """
470 """
464 cl = repo.changelog
471 cl = repo.changelog
465 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
472 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
466 bundler = changegroup.getbundler(version, repo)
473 bundler = changegroup.getbundler(version, repo)
467
474
468 def lookup(node):
475 def lookup(node):
469 # The real bundler reads the revision in order to access the
476 # The real bundler reads the revision in order to access the
470 # manifest node and files list. Do that here.
477 # manifest node and files list. Do that here.
471 cl.read(node)
478 cl.read(node)
472 return node
479 return node
473
480
474 def d():
481 def d():
475 for chunk in bundler.group(revs, cl, lookup):
482 for chunk in bundler.group(revs, cl, lookup):
476 pass
483 pass
477
484
478 timer, fm = gettimer(ui, opts)
485 timer, fm = gettimer(ui, opts)
479 timer(d)
486 timer(d)
480 fm.end()
487 fm.end()
481
488
482 @command('perfdirs', formatteropts)
489 @command('perfdirs', formatteropts)
483 def perfdirs(ui, repo, **opts):
490 def perfdirs(ui, repo, **opts):
484 timer, fm = gettimer(ui, opts)
491 timer, fm = gettimer(ui, opts)
485 dirstate = repo.dirstate
492 dirstate = repo.dirstate
486 'a' in dirstate
493 'a' in dirstate
487 def d():
494 def d():
488 dirstate.dirs()
495 dirstate.dirs()
489 del dirstate._dirs
496 del dirstate._dirs
490 timer(d)
497 timer(d)
491 fm.end()
498 fm.end()
492
499
493 @command('perfdirstate', formatteropts)
500 @command('perfdirstate', formatteropts)
494 def perfdirstate(ui, repo, **opts):
501 def perfdirstate(ui, repo, **opts):
495 timer, fm = gettimer(ui, opts)
502 timer, fm = gettimer(ui, opts)
496 "a" in repo.dirstate
503 "a" in repo.dirstate
497 def d():
504 def d():
498 repo.dirstate.invalidate()
505 repo.dirstate.invalidate()
499 "a" in repo.dirstate
506 "a" in repo.dirstate
500 timer(d)
507 timer(d)
501 fm.end()
508 fm.end()
502
509
503 @command('perfdirstatedirs', formatteropts)
510 @command('perfdirstatedirs', formatteropts)
504 def perfdirstatedirs(ui, repo, **opts):
511 def perfdirstatedirs(ui, repo, **opts):
505 timer, fm = gettimer(ui, opts)
512 timer, fm = gettimer(ui, opts)
506 "a" in repo.dirstate
513 "a" in repo.dirstate
507 def d():
514 def d():
508 "a" in repo.dirstate._dirs
515 "a" in repo.dirstate._dirs
509 del repo.dirstate._dirs
516 del repo.dirstate._dirs
510 timer(d)
517 timer(d)
511 fm.end()
518 fm.end()
512
519
513 @command('perfdirstatefoldmap', formatteropts)
520 @command('perfdirstatefoldmap', formatteropts)
514 def perfdirstatefoldmap(ui, repo, **opts):
521 def perfdirstatefoldmap(ui, repo, **opts):
515 timer, fm = gettimer(ui, opts)
522 timer, fm = gettimer(ui, opts)
516 dirstate = repo.dirstate
523 dirstate = repo.dirstate
517 'a' in dirstate
524 'a' in dirstate
518 def d():
525 def d():
519 dirstate._filefoldmap.get('a')
526 dirstate._filefoldmap.get('a')
520 del dirstate._filefoldmap
527 del dirstate._filefoldmap
521 timer(d)
528 timer(d)
522 fm.end()
529 fm.end()
523
530
524 @command('perfdirfoldmap', formatteropts)
531 @command('perfdirfoldmap', formatteropts)
525 def perfdirfoldmap(ui, repo, **opts):
532 def perfdirfoldmap(ui, repo, **opts):
526 timer, fm = gettimer(ui, opts)
533 timer, fm = gettimer(ui, opts)
527 dirstate = repo.dirstate
534 dirstate = repo.dirstate
528 'a' in dirstate
535 'a' in dirstate
529 def d():
536 def d():
530 dirstate._dirfoldmap.get('a')
537 dirstate._dirfoldmap.get('a')
531 del dirstate._dirfoldmap
538 del dirstate._dirfoldmap
532 del dirstate._dirs
539 del dirstate._dirs
533 timer(d)
540 timer(d)
534 fm.end()
541 fm.end()
535
542
536 @command('perfdirstatewrite', formatteropts)
543 @command('perfdirstatewrite', formatteropts)
537 def perfdirstatewrite(ui, repo, **opts):
544 def perfdirstatewrite(ui, repo, **opts):
538 timer, fm = gettimer(ui, opts)
545 timer, fm = gettimer(ui, opts)
539 ds = repo.dirstate
546 ds = repo.dirstate
540 "a" in ds
547 "a" in ds
541 def d():
548 def d():
542 ds._dirty = True
549 ds._dirty = True
543 ds.write(repo.currenttransaction())
550 ds.write(repo.currenttransaction())
544 timer(d)
551 timer(d)
545 fm.end()
552 fm.end()
546
553
547 @command('perfmergecalculate',
554 @command('perfmergecalculate',
548 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
555 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
549 def perfmergecalculate(ui, repo, rev, **opts):
556 def perfmergecalculate(ui, repo, rev, **opts):
550 timer, fm = gettimer(ui, opts)
557 timer, fm = gettimer(ui, opts)
551 wctx = repo[None]
558 wctx = repo[None]
552 rctx = scmutil.revsingle(repo, rev, rev)
559 rctx = scmutil.revsingle(repo, rev, rev)
553 ancestor = wctx.ancestor(rctx)
560 ancestor = wctx.ancestor(rctx)
554 # we don't want working dir files to be stat'd in the benchmark, so prime
561 # we don't want working dir files to be stat'd in the benchmark, so prime
555 # that cache
562 # that cache
556 wctx.dirty()
563 wctx.dirty()
557 def d():
564 def d():
558 # acceptremote is True because we don't want prompts in the middle of
565 # acceptremote is True because we don't want prompts in the middle of
559 # our benchmark
566 # our benchmark
560 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
567 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
561 acceptremote=True, followcopies=True)
568 acceptremote=True, followcopies=True)
562 timer(d)
569 timer(d)
563 fm.end()
570 fm.end()
564
571
565 @command('perfpathcopies', [], "REV REV")
572 @command('perfpathcopies', [], "REV REV")
566 def perfpathcopies(ui, repo, rev1, rev2, **opts):
573 def perfpathcopies(ui, repo, rev1, rev2, **opts):
567 timer, fm = gettimer(ui, opts)
574 timer, fm = gettimer(ui, opts)
568 ctx1 = scmutil.revsingle(repo, rev1, rev1)
575 ctx1 = scmutil.revsingle(repo, rev1, rev1)
569 ctx2 = scmutil.revsingle(repo, rev2, rev2)
576 ctx2 = scmutil.revsingle(repo, rev2, rev2)
570 def d():
577 def d():
571 copies.pathcopies(ctx1, ctx2)
578 copies.pathcopies(ctx1, ctx2)
572 timer(d)
579 timer(d)
573 fm.end()
580 fm.end()
574
581
575 @command('perfmanifest', [], 'REV')
582 @command('perfmanifest', [], 'REV')
576 def perfmanifest(ui, repo, rev, **opts):
583 def perfmanifest(ui, repo, rev, **opts):
577 timer, fm = gettimer(ui, opts)
584 timer, fm = gettimer(ui, opts)
578 ctx = scmutil.revsingle(repo, rev, rev)
585 ctx = scmutil.revsingle(repo, rev, rev)
579 t = ctx.manifestnode()
586 t = ctx.manifestnode()
580 def d():
587 def d():
581 repo.manifestlog.clearcaches()
588 repo.manifestlog.clearcaches()
582 repo.manifestlog[t].read()
589 repo.manifestlog[t].read()
583 timer(d)
590 timer(d)
584 fm.end()
591 fm.end()
585
592
586 @command('perfchangeset', formatteropts)
593 @command('perfchangeset', formatteropts)
587 def perfchangeset(ui, repo, rev, **opts):
594 def perfchangeset(ui, repo, rev, **opts):
588 timer, fm = gettimer(ui, opts)
595 timer, fm = gettimer(ui, opts)
589 n = repo[rev].node()
596 n = repo[rev].node()
590 def d():
597 def d():
591 repo.changelog.read(n)
598 repo.changelog.read(n)
592 #repo.changelog._cache = None
599 #repo.changelog._cache = None
593 timer(d)
600 timer(d)
594 fm.end()
601 fm.end()
595
602
596 @command('perfindex', formatteropts)
603 @command('perfindex', formatteropts)
597 def perfindex(ui, repo, **opts):
604 def perfindex(ui, repo, **opts):
598 import mercurial.revlog
605 import mercurial.revlog
599 timer, fm = gettimer(ui, opts)
606 timer, fm = gettimer(ui, opts)
600 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
607 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
601 n = repo["tip"].node()
608 n = repo["tip"].node()
602 svfs = getsvfs(repo)
609 svfs = getsvfs(repo)
603 def d():
610 def d():
604 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
611 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
605 cl.rev(n)
612 cl.rev(n)
606 timer(d)
613 timer(d)
607 fm.end()
614 fm.end()
608
615
609 @command('perfstartup', formatteropts)
616 @command('perfstartup', formatteropts)
610 def perfstartup(ui, repo, **opts):
617 def perfstartup(ui, repo, **opts):
611 timer, fm = gettimer(ui, opts)
618 timer, fm = gettimer(ui, opts)
612 cmd = sys.argv[0]
619 cmd = sys.argv[0]
613 def d():
620 def d():
614 if os.name != 'nt':
621 if os.name != 'nt':
615 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
622 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
616 else:
623 else:
617 os.environ['HGRCPATH'] = ''
624 os.environ['HGRCPATH'] = ''
618 os.system("%s version -q > NUL" % cmd)
625 os.system("%s version -q > NUL" % cmd)
619 timer(d)
626 timer(d)
620 fm.end()
627 fm.end()
621
628
622 @command('perfparents', formatteropts)
629 @command('perfparents', formatteropts)
623 def perfparents(ui, repo, **opts):
630 def perfparents(ui, repo, **opts):
624 timer, fm = gettimer(ui, opts)
631 timer, fm = gettimer(ui, opts)
625 # control the number of commits perfparents iterates over
632 # control the number of commits perfparents iterates over
626 # experimental config: perf.parentscount
633 # experimental config: perf.parentscount
627 count = getint(ui, "perf", "parentscount", 1000)
634 count = getint(ui, "perf", "parentscount", 1000)
628 if len(repo.changelog) < count:
635 if len(repo.changelog) < count:
629 raise error.Abort("repo needs %d commits for this test" % count)
636 raise error.Abort("repo needs %d commits for this test" % count)
630 repo = repo.unfiltered()
637 repo = repo.unfiltered()
631 nl = [repo.changelog.node(i) for i in xrange(count)]
638 nl = [repo.changelog.node(i) for i in xrange(count)]
632 def d():
639 def d():
633 for n in nl:
640 for n in nl:
634 repo.changelog.parents(n)
641 repo.changelog.parents(n)
635 timer(d)
642 timer(d)
636 fm.end()
643 fm.end()
637
644
638 @command('perfctxfiles', formatteropts)
645 @command('perfctxfiles', formatteropts)
639 def perfctxfiles(ui, repo, x, **opts):
646 def perfctxfiles(ui, repo, x, **opts):
640 x = int(x)
647 x = int(x)
641 timer, fm = gettimer(ui, opts)
648 timer, fm = gettimer(ui, opts)
642 def d():
649 def d():
643 len(repo[x].files())
650 len(repo[x].files())
644 timer(d)
651 timer(d)
645 fm.end()
652 fm.end()
646
653
647 @command('perfrawfiles', formatteropts)
654 @command('perfrawfiles', formatteropts)
648 def perfrawfiles(ui, repo, x, **opts):
655 def perfrawfiles(ui, repo, x, **opts):
649 x = int(x)
656 x = int(x)
650 timer, fm = gettimer(ui, opts)
657 timer, fm = gettimer(ui, opts)
651 cl = repo.changelog
658 cl = repo.changelog
652 def d():
659 def d():
653 len(cl.read(x)[3])
660 len(cl.read(x)[3])
654 timer(d)
661 timer(d)
655 fm.end()
662 fm.end()
656
663
657 @command('perflookup', formatteropts)
664 @command('perflookup', formatteropts)
658 def perflookup(ui, repo, rev, **opts):
665 def perflookup(ui, repo, rev, **opts):
659 timer, fm = gettimer(ui, opts)
666 timer, fm = gettimer(ui, opts)
660 timer(lambda: len(repo.lookup(rev)))
667 timer(lambda: len(repo.lookup(rev)))
661 fm.end()
668 fm.end()
662
669
663 @command('perfrevrange', formatteropts)
670 @command('perfrevrange', formatteropts)
664 def perfrevrange(ui, repo, *specs, **opts):
671 def perfrevrange(ui, repo, *specs, **opts):
665 timer, fm = gettimer(ui, opts)
672 timer, fm = gettimer(ui, opts)
666 revrange = scmutil.revrange
673 revrange = scmutil.revrange
667 timer(lambda: len(revrange(repo, specs)))
674 timer(lambda: len(revrange(repo, specs)))
668 fm.end()
675 fm.end()
669
676
670 @command('perfnodelookup', formatteropts)
677 @command('perfnodelookup', formatteropts)
671 def perfnodelookup(ui, repo, rev, **opts):
678 def perfnodelookup(ui, repo, rev, **opts):
672 timer, fm = gettimer(ui, opts)
679 timer, fm = gettimer(ui, opts)
673 import mercurial.revlog
680 import mercurial.revlog
674 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
681 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
675 n = repo[rev].node()
682 n = repo[rev].node()
676 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
683 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
677 def d():
684 def d():
678 cl.rev(n)
685 cl.rev(n)
679 clearcaches(cl)
686 clearcaches(cl)
680 timer(d)
687 timer(d)
681 fm.end()
688 fm.end()
682
689
683 @command('perflog',
690 @command('perflog',
684 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
691 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
685 def perflog(ui, repo, rev=None, **opts):
692 def perflog(ui, repo, rev=None, **opts):
686 if rev is None:
693 if rev is None:
687 rev=[]
694 rev=[]
688 timer, fm = gettimer(ui, opts)
695 timer, fm = gettimer(ui, opts)
689 ui.pushbuffer()
696 ui.pushbuffer()
690 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
697 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
691 copies=opts.get('rename')))
698 copies=opts.get('rename')))
692 ui.popbuffer()
699 ui.popbuffer()
693 fm.end()
700 fm.end()
694
701
695 @command('perfmoonwalk', formatteropts)
702 @command('perfmoonwalk', formatteropts)
696 def perfmoonwalk(ui, repo, **opts):
703 def perfmoonwalk(ui, repo, **opts):
697 """benchmark walking the changelog backwards
704 """benchmark walking the changelog backwards
698
705
699 This also loads the changelog data for each revision in the changelog.
706 This also loads the changelog data for each revision in the changelog.
700 """
707 """
701 timer, fm = gettimer(ui, opts)
708 timer, fm = gettimer(ui, opts)
702 def moonwalk():
709 def moonwalk():
703 for i in xrange(len(repo), -1, -1):
710 for i in xrange(len(repo), -1, -1):
704 ctx = repo[i]
711 ctx = repo[i]
705 ctx.branch() # read changelog data (in addition to the index)
712 ctx.branch() # read changelog data (in addition to the index)
706 timer(moonwalk)
713 timer(moonwalk)
707 fm.end()
714 fm.end()
708
715
709 @command('perftemplating', formatteropts)
716 @command('perftemplating', formatteropts)
710 def perftemplating(ui, repo, rev=None, **opts):
717 def perftemplating(ui, repo, rev=None, **opts):
711 if rev is None:
718 if rev is None:
712 rev=[]
719 rev=[]
713 timer, fm = gettimer(ui, opts)
720 timer, fm = gettimer(ui, opts)
714 ui.pushbuffer()
721 ui.pushbuffer()
715 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
722 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
716 template='{date|shortdate} [{rev}:{node|short}]'
723 template='{date|shortdate} [{rev}:{node|short}]'
717 ' {author|person}: {desc|firstline}\n'))
724 ' {author|person}: {desc|firstline}\n'))
718 ui.popbuffer()
725 ui.popbuffer()
719 fm.end()
726 fm.end()
720
727
721 @command('perfcca', formatteropts)
728 @command('perfcca', formatteropts)
722 def perfcca(ui, repo, **opts):
729 def perfcca(ui, repo, **opts):
723 timer, fm = gettimer(ui, opts)
730 timer, fm = gettimer(ui, opts)
724 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
731 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
725 fm.end()
732 fm.end()
726
733
727 @command('perffncacheload', formatteropts)
734 @command('perffncacheload', formatteropts)
728 def perffncacheload(ui, repo, **opts):
735 def perffncacheload(ui, repo, **opts):
729 timer, fm = gettimer(ui, opts)
736 timer, fm = gettimer(ui, opts)
730 s = repo.store
737 s = repo.store
731 def d():
738 def d():
732 s.fncache._load()
739 s.fncache._load()
733 timer(d)
740 timer(d)
734 fm.end()
741 fm.end()
735
742
736 @command('perffncachewrite', formatteropts)
743 @command('perffncachewrite', formatteropts)
737 def perffncachewrite(ui, repo, **opts):
744 def perffncachewrite(ui, repo, **opts):
738 timer, fm = gettimer(ui, opts)
745 timer, fm = gettimer(ui, opts)
739 s = repo.store
746 s = repo.store
740 s.fncache._load()
747 s.fncache._load()
741 lock = repo.lock()
748 lock = repo.lock()
742 tr = repo.transaction('perffncachewrite')
749 tr = repo.transaction('perffncachewrite')
743 def d():
750 def d():
744 s.fncache._dirty = True
751 s.fncache._dirty = True
745 s.fncache.write(tr)
752 s.fncache.write(tr)
746 timer(d)
753 timer(d)
747 tr.close()
754 tr.close()
748 lock.release()
755 lock.release()
749 fm.end()
756 fm.end()
750
757
751 @command('perffncacheencode', formatteropts)
758 @command('perffncacheencode', formatteropts)
752 def perffncacheencode(ui, repo, **opts):
759 def perffncacheencode(ui, repo, **opts):
753 timer, fm = gettimer(ui, opts)
760 timer, fm = gettimer(ui, opts)
754 s = repo.store
761 s = repo.store
755 s.fncache._load()
762 s.fncache._load()
756 def d():
763 def d():
757 for p in s.fncache.entries:
764 for p in s.fncache.entries:
758 s.encode(p)
765 s.encode(p)
759 timer(d)
766 timer(d)
760 fm.end()
767 fm.end()
761
768
762 @command('perfbdiff', revlogopts + formatteropts + [
769 @command('perfbdiff', revlogopts + formatteropts + [
763 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
770 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
764 ('', 'alldata', False, 'test bdiffs for all associated revisions')],
771 ('', 'alldata', False, 'test bdiffs for all associated revisions')],
765 '-c|-m|FILE REV')
772 '-c|-m|FILE REV')
766 def perfbdiff(ui, repo, file_, rev=None, count=None, **opts):
773 def perfbdiff(ui, repo, file_, rev=None, count=None, **opts):
767 """benchmark a bdiff between revisions
774 """benchmark a bdiff between revisions
768
775
769 By default, benchmark a bdiff between its delta parent and itself.
776 By default, benchmark a bdiff between its delta parent and itself.
770
777
771 With ``--count``, benchmark bdiffs between delta parents and self for N
778 With ``--count``, benchmark bdiffs between delta parents and self for N
772 revisions starting at the specified revision.
779 revisions starting at the specified revision.
773
780
774 With ``--alldata``, assume the requested revision is a changeset and
781 With ``--alldata``, assume the requested revision is a changeset and
775 measure bdiffs for all changes related to that changeset (manifest
782 measure bdiffs for all changes related to that changeset (manifest
776 and filelogs).
783 and filelogs).
777 """
784 """
778 if opts['alldata']:
785 if opts['alldata']:
779 opts['changelog'] = True
786 opts['changelog'] = True
780
787
781 if opts.get('changelog') or opts.get('manifest'):
788 if opts.get('changelog') or opts.get('manifest'):
782 file_, rev = None, file_
789 file_, rev = None, file_
783 elif rev is None:
790 elif rev is None:
784 raise error.CommandError('perfbdiff', 'invalid arguments')
791 raise error.CommandError('perfbdiff', 'invalid arguments')
785
792
786 textpairs = []
793 textpairs = []
787
794
788 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
795 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
789
796
790 startrev = r.rev(r.lookup(rev))
797 startrev = r.rev(r.lookup(rev))
791 for rev in range(startrev, min(startrev + count, len(r) - 1)):
798 for rev in range(startrev, min(startrev + count, len(r) - 1)):
792 if opts['alldata']:
799 if opts['alldata']:
793 # Load revisions associated with changeset.
800 # Load revisions associated with changeset.
794 ctx = repo[rev]
801 ctx = repo[rev]
795 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
802 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
796 for pctx in ctx.parents():
803 for pctx in ctx.parents():
797 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
804 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
798 textpairs.append((pman, mtext))
805 textpairs.append((pman, mtext))
799
806
800 # Load filelog revisions by iterating manifest delta.
807 # Load filelog revisions by iterating manifest delta.
801 man = ctx.manifest()
808 man = ctx.manifest()
802 pman = ctx.p1().manifest()
809 pman = ctx.p1().manifest()
803 for filename, change in pman.diff(man).items():
810 for filename, change in pman.diff(man).items():
804 fctx = repo.file(filename)
811 fctx = repo.file(filename)
805 f1 = fctx.revision(change[0][0] or -1)
812 f1 = fctx.revision(change[0][0] or -1)
806 f2 = fctx.revision(change[1][0] or -1)
813 f2 = fctx.revision(change[1][0] or -1)
807 textpairs.append((f1, f2))
814 textpairs.append((f1, f2))
808 else:
815 else:
809 dp = r.deltaparent(rev)
816 dp = r.deltaparent(rev)
810 textpairs.append((r.revision(dp), r.revision(rev)))
817 textpairs.append((r.revision(dp), r.revision(rev)))
811
818
812 def d():
819 def d():
813 for pair in textpairs:
820 for pair in textpairs:
814 mdiff.textdiff(*pair)
821 mdiff.textdiff(*pair)
815
822
816 timer, fm = gettimer(ui, opts)
823 timer, fm = gettimer(ui, opts)
817 timer(d)
824 timer(d)
818 fm.end()
825 fm.end()
819
826
820 @command('perfdiffwd', formatteropts)
827 @command('perfdiffwd', formatteropts)
821 def perfdiffwd(ui, repo, **opts):
828 def perfdiffwd(ui, repo, **opts):
822 """Profile diff of working directory changes"""
829 """Profile diff of working directory changes"""
823 timer, fm = gettimer(ui, opts)
830 timer, fm = gettimer(ui, opts)
824 options = {
831 options = {
825 'w': 'ignore_all_space',
832 'w': 'ignore_all_space',
826 'b': 'ignore_space_change',
833 'b': 'ignore_space_change',
827 'B': 'ignore_blank_lines',
834 'B': 'ignore_blank_lines',
828 }
835 }
829
836
830 for diffopt in ('', 'w', 'b', 'B', 'wB'):
837 for diffopt in ('', 'w', 'b', 'B', 'wB'):
831 opts = dict((options[c], '1') for c in diffopt)
838 opts = dict((options[c], '1') for c in diffopt)
832 def d():
839 def d():
833 ui.pushbuffer()
840 ui.pushbuffer()
834 commands.diff(ui, repo, **opts)
841 commands.diff(ui, repo, **opts)
835 ui.popbuffer()
842 ui.popbuffer()
836 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
843 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
837 timer(d, title)
844 timer(d, title)
838 fm.end()
845 fm.end()
839
846
840 @command('perfrevlog', revlogopts + formatteropts +
847 @command('perfrevlog', revlogopts + formatteropts +
841 [('d', 'dist', 100, 'distance between the revisions'),
848 [('d', 'dist', 100, 'distance between the revisions'),
842 ('s', 'startrev', 0, 'revision to start reading at'),
849 ('s', 'startrev', 0, 'revision to start reading at'),
843 ('', 'reverse', False, 'read in reverse')],
850 ('', 'reverse', False, 'read in reverse')],
844 '-c|-m|FILE')
851 '-c|-m|FILE')
845 def perfrevlog(ui, repo, file_=None, startrev=0, reverse=False, **opts):
852 def perfrevlog(ui, repo, file_=None, startrev=0, reverse=False, **opts):
846 """Benchmark reading a series of revisions from a revlog.
853 """Benchmark reading a series of revisions from a revlog.
847
854
848 By default, we read every ``-d/--dist`` revision from 0 to tip of
855 By default, we read every ``-d/--dist`` revision from 0 to tip of
849 the specified revlog.
856 the specified revlog.
850
857
851 The start revision can be defined via ``-s/--startrev``.
858 The start revision can be defined via ``-s/--startrev``.
852 """
859 """
853 rl = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts)
860 rl = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts)
854 rllen = getlen(ui)(rl)
861 rllen = getlen(ui)(rl)
855
862
856 def d():
863 def d():
857 rl.clearcaches()
864 rl.clearcaches()
858
865
859 beginrev = startrev
866 beginrev = startrev
860 endrev = rllen
867 endrev = rllen
861 dist = opts['dist']
868 dist = opts['dist']
862
869
863 if reverse:
870 if reverse:
864 beginrev, endrev = endrev, beginrev
871 beginrev, endrev = endrev, beginrev
865 dist = -1 * dist
872 dist = -1 * dist
866
873
867 for x in xrange(beginrev, endrev, dist):
874 for x in xrange(beginrev, endrev, dist):
868 # Old revisions don't support passing int.
875 # Old revisions don't support passing int.
869 n = rl.node(x)
876 n = rl.node(x)
870 rl.revision(n)
877 rl.revision(n)
871
878
872 timer, fm = gettimer(ui, opts)
879 timer, fm = gettimer(ui, opts)
873 timer(d)
880 timer(d)
874 fm.end()
881 fm.end()
875
882
876 @command('perfrevlogchunks', revlogopts + formatteropts +
883 @command('perfrevlogchunks', revlogopts + formatteropts +
877 [('e', 'engines', '', 'compression engines to use'),
884 [('e', 'engines', '', 'compression engines to use'),
878 ('s', 'startrev', 0, 'revision to start at')],
885 ('s', 'startrev', 0, 'revision to start at')],
879 '-c|-m|FILE')
886 '-c|-m|FILE')
880 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
887 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
881 """Benchmark operations on revlog chunks.
888 """Benchmark operations on revlog chunks.
882
889
883 Logically, each revlog is a collection of fulltext revisions. However,
890 Logically, each revlog is a collection of fulltext revisions. However,
884 stored within each revlog are "chunks" of possibly compressed data. This
891 stored within each revlog are "chunks" of possibly compressed data. This
885 data needs to be read and decompressed or compressed and written.
892 data needs to be read and decompressed or compressed and written.
886
893
887 This command measures the time it takes to read+decompress and recompress
894 This command measures the time it takes to read+decompress and recompress
888 chunks in a revlog. It effectively isolates I/O and compression performance.
895 chunks in a revlog. It effectively isolates I/O and compression performance.
889 For measurements of higher-level operations like resolving revisions,
896 For measurements of higher-level operations like resolving revisions,
890 see ``perfrevlog`` and ``perfrevlogrevision``.
897 see ``perfrevlog`` and ``perfrevlogrevision``.
891 """
898 """
892 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
899 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
893
900
894 # _chunkraw was renamed to _getsegmentforrevs.
901 # _chunkraw was renamed to _getsegmentforrevs.
895 try:
902 try:
896 segmentforrevs = rl._getsegmentforrevs
903 segmentforrevs = rl._getsegmentforrevs
897 except AttributeError:
904 except AttributeError:
898 segmentforrevs = rl._chunkraw
905 segmentforrevs = rl._chunkraw
899
906
900 # Verify engines argument.
907 # Verify engines argument.
901 if engines:
908 if engines:
902 engines = set(e.strip() for e in engines.split(','))
909 engines = set(e.strip() for e in engines.split(','))
903 for engine in engines:
910 for engine in engines:
904 try:
911 try:
905 util.compressionengines[engine]
912 util.compressionengines[engine]
906 except KeyError:
913 except KeyError:
907 raise error.Abort('unknown compression engine: %s' % engine)
914 raise error.Abort('unknown compression engine: %s' % engine)
908 else:
915 else:
909 engines = []
916 engines = []
910 for e in util.compengines:
917 for e in util.compengines:
911 engine = util.compengines[e]
918 engine = util.compengines[e]
912 try:
919 try:
913 if engine.available():
920 if engine.available():
914 engine.revlogcompressor().compress('dummy')
921 engine.revlogcompressor().compress('dummy')
915 engines.append(e)
922 engines.append(e)
916 except NotImplementedError:
923 except NotImplementedError:
917 pass
924 pass
918
925
919 revs = list(rl.revs(startrev, len(rl) - 1))
926 revs = list(rl.revs(startrev, len(rl) - 1))
920
927
921 def rlfh(rl):
928 def rlfh(rl):
922 if rl._inline:
929 if rl._inline:
923 return getsvfs(repo)(rl.indexfile)
930 return getsvfs(repo)(rl.indexfile)
924 else:
931 else:
925 return getsvfs(repo)(rl.datafile)
932 return getsvfs(repo)(rl.datafile)
926
933
927 def doread():
934 def doread():
928 rl.clearcaches()
935 rl.clearcaches()
929 for rev in revs:
936 for rev in revs:
930 segmentforrevs(rev, rev)
937 segmentforrevs(rev, rev)
931
938
932 def doreadcachedfh():
939 def doreadcachedfh():
933 rl.clearcaches()
940 rl.clearcaches()
934 fh = rlfh(rl)
941 fh = rlfh(rl)
935 for rev in revs:
942 for rev in revs:
936 segmentforrevs(rev, rev, df=fh)
943 segmentforrevs(rev, rev, df=fh)
937
944
938 def doreadbatch():
945 def doreadbatch():
939 rl.clearcaches()
946 rl.clearcaches()
940 segmentforrevs(revs[0], revs[-1])
947 segmentforrevs(revs[0], revs[-1])
941
948
942 def doreadbatchcachedfh():
949 def doreadbatchcachedfh():
943 rl.clearcaches()
950 rl.clearcaches()
944 fh = rlfh(rl)
951 fh = rlfh(rl)
945 segmentforrevs(revs[0], revs[-1], df=fh)
952 segmentforrevs(revs[0], revs[-1], df=fh)
946
953
947 def dochunk():
954 def dochunk():
948 rl.clearcaches()
955 rl.clearcaches()
949 fh = rlfh(rl)
956 fh = rlfh(rl)
950 for rev in revs:
957 for rev in revs:
951 rl._chunk(rev, df=fh)
958 rl._chunk(rev, df=fh)
952
959
953 chunks = [None]
960 chunks = [None]
954
961
955 def dochunkbatch():
962 def dochunkbatch():
956 rl.clearcaches()
963 rl.clearcaches()
957 fh = rlfh(rl)
964 fh = rlfh(rl)
958 # Save chunks as a side-effect.
965 # Save chunks as a side-effect.
959 chunks[0] = rl._chunks(revs, df=fh)
966 chunks[0] = rl._chunks(revs, df=fh)
960
967
961 def docompress(compressor):
968 def docompress(compressor):
962 rl.clearcaches()
969 rl.clearcaches()
963
970
964 try:
971 try:
965 # Swap in the requested compression engine.
972 # Swap in the requested compression engine.
966 oldcompressor = rl._compressor
973 oldcompressor = rl._compressor
967 rl._compressor = compressor
974 rl._compressor = compressor
968 for chunk in chunks[0]:
975 for chunk in chunks[0]:
969 rl.compress(chunk)
976 rl.compress(chunk)
970 finally:
977 finally:
971 rl._compressor = oldcompressor
978 rl._compressor = oldcompressor
972
979
973 benches = [
980 benches = [
974 (lambda: doread(), 'read'),
981 (lambda: doread(), 'read'),
975 (lambda: doreadcachedfh(), 'read w/ reused fd'),
982 (lambda: doreadcachedfh(), 'read w/ reused fd'),
976 (lambda: doreadbatch(), 'read batch'),
983 (lambda: doreadbatch(), 'read batch'),
977 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
984 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
978 (lambda: dochunk(), 'chunk'),
985 (lambda: dochunk(), 'chunk'),
979 (lambda: dochunkbatch(), 'chunk batch'),
986 (lambda: dochunkbatch(), 'chunk batch'),
980 ]
987 ]
981
988
982 for engine in sorted(engines):
989 for engine in sorted(engines):
983 compressor = util.compengines[engine].revlogcompressor()
990 compressor = util.compengines[engine].revlogcompressor()
984 benches.append((functools.partial(docompress, compressor),
991 benches.append((functools.partial(docompress, compressor),
985 'compress w/ %s' % engine))
992 'compress w/ %s' % engine))
986
993
987 for fn, title in benches:
994 for fn, title in benches:
988 timer, fm = gettimer(ui, opts)
995 timer, fm = gettimer(ui, opts)
989 timer(fn, title=title)
996 timer(fn, title=title)
990 fm.end()
997 fm.end()
991
998
992 @command('perfrevlogrevision', revlogopts + formatteropts +
999 @command('perfrevlogrevision', revlogopts + formatteropts +
993 [('', 'cache', False, 'use caches instead of clearing')],
1000 [('', 'cache', False, 'use caches instead of clearing')],
994 '-c|-m|FILE REV')
1001 '-c|-m|FILE REV')
995 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1002 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
996 """Benchmark obtaining a revlog revision.
1003 """Benchmark obtaining a revlog revision.
997
1004
998 Obtaining a revlog revision consists of roughly the following steps:
1005 Obtaining a revlog revision consists of roughly the following steps:
999
1006
1000 1. Compute the delta chain
1007 1. Compute the delta chain
1001 2. Obtain the raw chunks for that delta chain
1008 2. Obtain the raw chunks for that delta chain
1002 3. Decompress each raw chunk
1009 3. Decompress each raw chunk
1003 4. Apply binary patches to obtain fulltext
1010 4. Apply binary patches to obtain fulltext
1004 5. Verify hash of fulltext
1011 5. Verify hash of fulltext
1005
1012
1006 This command measures the time spent in each of these phases.
1013 This command measures the time spent in each of these phases.
1007 """
1014 """
1008 if opts.get('changelog') or opts.get('manifest'):
1015 if opts.get('changelog') or opts.get('manifest'):
1009 file_, rev = None, file_
1016 file_, rev = None, file_
1010 elif rev is None:
1017 elif rev is None:
1011 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1018 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1012
1019
1013 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1020 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1014
1021
1015 # _chunkraw was renamed to _getsegmentforrevs.
1022 # _chunkraw was renamed to _getsegmentforrevs.
1016 try:
1023 try:
1017 segmentforrevs = r._getsegmentforrevs
1024 segmentforrevs = r._getsegmentforrevs
1018 except AttributeError:
1025 except AttributeError:
1019 segmentforrevs = r._chunkraw
1026 segmentforrevs = r._chunkraw
1020
1027
1021 node = r.lookup(rev)
1028 node = r.lookup(rev)
1022 rev = r.rev(node)
1029 rev = r.rev(node)
1023
1030
1024 def getrawchunks(data, chain):
1031 def getrawchunks(data, chain):
1025 start = r.start
1032 start = r.start
1026 length = r.length
1033 length = r.length
1027 inline = r._inline
1034 inline = r._inline
1028 iosize = r._io.size
1035 iosize = r._io.size
1029 buffer = util.buffer
1036 buffer = util.buffer
1030 offset = start(chain[0])
1037 offset = start(chain[0])
1031
1038
1032 chunks = []
1039 chunks = []
1033 ladd = chunks.append
1040 ladd = chunks.append
1034
1041
1035 for rev in chain:
1042 for rev in chain:
1036 chunkstart = start(rev)
1043 chunkstart = start(rev)
1037 if inline:
1044 if inline:
1038 chunkstart += (rev + 1) * iosize
1045 chunkstart += (rev + 1) * iosize
1039 chunklength = length(rev)
1046 chunklength = length(rev)
1040 ladd(buffer(data, chunkstart - offset, chunklength))
1047 ladd(buffer(data, chunkstart - offset, chunklength))
1041
1048
1042 return chunks
1049 return chunks
1043
1050
1044 def dodeltachain(rev):
1051 def dodeltachain(rev):
1045 if not cache:
1052 if not cache:
1046 r.clearcaches()
1053 r.clearcaches()
1047 r._deltachain(rev)
1054 r._deltachain(rev)
1048
1055
1049 def doread(chain):
1056 def doread(chain):
1050 if not cache:
1057 if not cache:
1051 r.clearcaches()
1058 r.clearcaches()
1052 segmentforrevs(chain[0], chain[-1])
1059 segmentforrevs(chain[0], chain[-1])
1053
1060
1054 def dorawchunks(data, chain):
1061 def dorawchunks(data, chain):
1055 if not cache:
1062 if not cache:
1056 r.clearcaches()
1063 r.clearcaches()
1057 getrawchunks(data, chain)
1064 getrawchunks(data, chain)
1058
1065
1059 def dodecompress(chunks):
1066 def dodecompress(chunks):
1060 decomp = r.decompress
1067 decomp = r.decompress
1061 for chunk in chunks:
1068 for chunk in chunks:
1062 decomp(chunk)
1069 decomp(chunk)
1063
1070
1064 def dopatch(text, bins):
1071 def dopatch(text, bins):
1065 if not cache:
1072 if not cache:
1066 r.clearcaches()
1073 r.clearcaches()
1067 mdiff.patches(text, bins)
1074 mdiff.patches(text, bins)
1068
1075
1069 def dohash(text):
1076 def dohash(text):
1070 if not cache:
1077 if not cache:
1071 r.clearcaches()
1078 r.clearcaches()
1072 r.checkhash(text, node, rev=rev)
1079 r.checkhash(text, node, rev=rev)
1073
1080
1074 def dorevision():
1081 def dorevision():
1075 if not cache:
1082 if not cache:
1076 r.clearcaches()
1083 r.clearcaches()
1077 r.revision(node)
1084 r.revision(node)
1078
1085
1079 chain = r._deltachain(rev)[0]
1086 chain = r._deltachain(rev)[0]
1080 data = segmentforrevs(chain[0], chain[-1])[1]
1087 data = segmentforrevs(chain[0], chain[-1])[1]
1081 rawchunks = getrawchunks(data, chain)
1088 rawchunks = getrawchunks(data, chain)
1082 bins = r._chunks(chain)
1089 bins = r._chunks(chain)
1083 text = str(bins[0])
1090 text = str(bins[0])
1084 bins = bins[1:]
1091 bins = bins[1:]
1085 text = mdiff.patches(text, bins)
1092 text = mdiff.patches(text, bins)
1086
1093
1087 benches = [
1094 benches = [
1088 (lambda: dorevision(), 'full'),
1095 (lambda: dorevision(), 'full'),
1089 (lambda: dodeltachain(rev), 'deltachain'),
1096 (lambda: dodeltachain(rev), 'deltachain'),
1090 (lambda: doread(chain), 'read'),
1097 (lambda: doread(chain), 'read'),
1091 (lambda: dorawchunks(data, chain), 'rawchunks'),
1098 (lambda: dorawchunks(data, chain), 'rawchunks'),
1092 (lambda: dodecompress(rawchunks), 'decompress'),
1099 (lambda: dodecompress(rawchunks), 'decompress'),
1093 (lambda: dopatch(text, bins), 'patch'),
1100 (lambda: dopatch(text, bins), 'patch'),
1094 (lambda: dohash(text), 'hash'),
1101 (lambda: dohash(text), 'hash'),
1095 ]
1102 ]
1096
1103
1097 for fn, title in benches:
1104 for fn, title in benches:
1098 timer, fm = gettimer(ui, opts)
1105 timer, fm = gettimer(ui, opts)
1099 timer(fn, title=title)
1106 timer(fn, title=title)
1100 fm.end()
1107 fm.end()
1101
1108
1102 @command('perfrevset',
1109 @command('perfrevset',
1103 [('C', 'clear', False, 'clear volatile cache between each call.'),
1110 [('C', 'clear', False, 'clear volatile cache between each call.'),
1104 ('', 'contexts', False, 'obtain changectx for each revision')]
1111 ('', 'contexts', False, 'obtain changectx for each revision')]
1105 + formatteropts, "REVSET")
1112 + formatteropts, "REVSET")
1106 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1113 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1107 """benchmark the execution time of a revset
1114 """benchmark the execution time of a revset
1108
1115
1109 Use the --clean option if need to evaluate the impact of build volatile
1116 Use the --clean option if need to evaluate the impact of build volatile
1110 revisions set cache on the revset execution. Volatile cache hold filtered
1117 revisions set cache on the revset execution. Volatile cache hold filtered
1111 and obsolete related cache."""
1118 and obsolete related cache."""
1112 timer, fm = gettimer(ui, opts)
1119 timer, fm = gettimer(ui, opts)
1113 def d():
1120 def d():
1114 if clear:
1121 if clear:
1115 repo.invalidatevolatilesets()
1122 repo.invalidatevolatilesets()
1116 if contexts:
1123 if contexts:
1117 for ctx in repo.set(expr): pass
1124 for ctx in repo.set(expr): pass
1118 else:
1125 else:
1119 for r in repo.revs(expr): pass
1126 for r in repo.revs(expr): pass
1120 timer(d)
1127 timer(d)
1121 fm.end()
1128 fm.end()
1122
1129
1123 @command('perfvolatilesets', formatteropts)
1130 @command('perfvolatilesets', formatteropts)
1124 def perfvolatilesets(ui, repo, *names, **opts):
1131 def perfvolatilesets(ui, repo, *names, **opts):
1125 """benchmark the computation of various volatile set
1132 """benchmark the computation of various volatile set
1126
1133
1127 Volatile set computes element related to filtering and obsolescence."""
1134 Volatile set computes element related to filtering and obsolescence."""
1128 timer, fm = gettimer(ui, opts)
1135 timer, fm = gettimer(ui, opts)
1129 repo = repo.unfiltered()
1136 repo = repo.unfiltered()
1130
1137
1131 def getobs(name):
1138 def getobs(name):
1132 def d():
1139 def d():
1133 repo.invalidatevolatilesets()
1140 repo.invalidatevolatilesets()
1134 obsolete.getrevs(repo, name)
1141 obsolete.getrevs(repo, name)
1135 return d
1142 return d
1136
1143
1137 allobs = sorted(obsolete.cachefuncs)
1144 allobs = sorted(obsolete.cachefuncs)
1138 if names:
1145 if names:
1139 allobs = [n for n in allobs if n in names]
1146 allobs = [n for n in allobs if n in names]
1140
1147
1141 for name in allobs:
1148 for name in allobs:
1142 timer(getobs(name), title=name)
1149 timer(getobs(name), title=name)
1143
1150
1144 def getfiltered(name):
1151 def getfiltered(name):
1145 def d():
1152 def d():
1146 repo.invalidatevolatilesets()
1153 repo.invalidatevolatilesets()
1147 repoview.filterrevs(repo, name)
1154 repoview.filterrevs(repo, name)
1148 return d
1155 return d
1149
1156
1150 allfilter = sorted(repoview.filtertable)
1157 allfilter = sorted(repoview.filtertable)
1151 if names:
1158 if names:
1152 allfilter = [n for n in allfilter if n in names]
1159 allfilter = [n for n in allfilter if n in names]
1153
1160
1154 for name in allfilter:
1161 for name in allfilter:
1155 timer(getfiltered(name), title=name)
1162 timer(getfiltered(name), title=name)
1156 fm.end()
1163 fm.end()
1157
1164
1158 @command('perfbranchmap',
1165 @command('perfbranchmap',
1159 [('f', 'full', False,
1166 [('f', 'full', False,
1160 'Includes build time of subset'),
1167 'Includes build time of subset'),
1161 ] + formatteropts)
1168 ] + formatteropts)
1162 def perfbranchmap(ui, repo, full=False, **opts):
1169 def perfbranchmap(ui, repo, full=False, **opts):
1163 """benchmark the update of a branchmap
1170 """benchmark the update of a branchmap
1164
1171
1165 This benchmarks the full repo.branchmap() call with read and write disabled
1172 This benchmarks the full repo.branchmap() call with read and write disabled
1166 """
1173 """
1167 timer, fm = gettimer(ui, opts)
1174 timer, fm = gettimer(ui, opts)
1168 def getbranchmap(filtername):
1175 def getbranchmap(filtername):
1169 """generate a benchmark function for the filtername"""
1176 """generate a benchmark function for the filtername"""
1170 if filtername is None:
1177 if filtername is None:
1171 view = repo
1178 view = repo
1172 else:
1179 else:
1173 view = repo.filtered(filtername)
1180 view = repo.filtered(filtername)
1174 def d():
1181 def d():
1175 if full:
1182 if full:
1176 view._branchcaches.clear()
1183 view._branchcaches.clear()
1177 else:
1184 else:
1178 view._branchcaches.pop(filtername, None)
1185 view._branchcaches.pop(filtername, None)
1179 view.branchmap()
1186 view.branchmap()
1180 return d
1187 return d
1181 # add filter in smaller subset to bigger subset
1188 # add filter in smaller subset to bigger subset
1182 possiblefilters = set(repoview.filtertable)
1189 possiblefilters = set(repoview.filtertable)
1183 subsettable = getbranchmapsubsettable()
1190 subsettable = getbranchmapsubsettable()
1184 allfilters = []
1191 allfilters = []
1185 while possiblefilters:
1192 while possiblefilters:
1186 for name in possiblefilters:
1193 for name in possiblefilters:
1187 subset = subsettable.get(name)
1194 subset = subsettable.get(name)
1188 if subset not in possiblefilters:
1195 if subset not in possiblefilters:
1189 break
1196 break
1190 else:
1197 else:
1191 assert False, 'subset cycle %s!' % possiblefilters
1198 assert False, 'subset cycle %s!' % possiblefilters
1192 allfilters.append(name)
1199 allfilters.append(name)
1193 possiblefilters.remove(name)
1200 possiblefilters.remove(name)
1194
1201
1195 # warm the cache
1202 # warm the cache
1196 if not full:
1203 if not full:
1197 for name in allfilters:
1204 for name in allfilters:
1198 repo.filtered(name).branchmap()
1205 repo.filtered(name).branchmap()
1199 # add unfiltered
1206 # add unfiltered
1200 allfilters.append(None)
1207 allfilters.append(None)
1201
1208
1202 branchcacheread = safeattrsetter(branchmap, 'read')
1209 branchcacheread = safeattrsetter(branchmap, 'read')
1203 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1210 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1204 branchcacheread.set(lambda repo: None)
1211 branchcacheread.set(lambda repo: None)
1205 branchcachewrite.set(lambda bc, repo: None)
1212 branchcachewrite.set(lambda bc, repo: None)
1206 try:
1213 try:
1207 for name in allfilters:
1214 for name in allfilters:
1208 timer(getbranchmap(name), title=str(name))
1215 timer(getbranchmap(name), title=str(name))
1209 finally:
1216 finally:
1210 branchcacheread.restore()
1217 branchcacheread.restore()
1211 branchcachewrite.restore()
1218 branchcachewrite.restore()
1212 fm.end()
1219 fm.end()
1213
1220
1214 @command('perfloadmarkers')
1221 @command('perfloadmarkers')
1215 def perfloadmarkers(ui, repo):
1222 def perfloadmarkers(ui, repo):
1216 """benchmark the time to parse the on-disk markers for a repo
1223 """benchmark the time to parse the on-disk markers for a repo
1217
1224
1218 Result is the number of markers in the repo."""
1225 Result is the number of markers in the repo."""
1219 timer, fm = gettimer(ui)
1226 timer, fm = gettimer(ui)
1220 svfs = getsvfs(repo)
1227 svfs = getsvfs(repo)
1221 timer(lambda: len(obsolete.obsstore(svfs)))
1228 timer(lambda: len(obsolete.obsstore(svfs)))
1222 fm.end()
1229 fm.end()
1223
1230
1224 @command('perflrucachedict', formatteropts +
1231 @command('perflrucachedict', formatteropts +
1225 [('', 'size', 4, 'size of cache'),
1232 [('', 'size', 4, 'size of cache'),
1226 ('', 'gets', 10000, 'number of key lookups'),
1233 ('', 'gets', 10000, 'number of key lookups'),
1227 ('', 'sets', 10000, 'number of key sets'),
1234 ('', 'sets', 10000, 'number of key sets'),
1228 ('', 'mixed', 10000, 'number of mixed mode operations'),
1235 ('', 'mixed', 10000, 'number of mixed mode operations'),
1229 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1236 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1230 norepo=True)
1237 norepo=True)
1231 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1238 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1232 mixedgetfreq=50, **opts):
1239 mixedgetfreq=50, **opts):
1233 def doinit():
1240 def doinit():
1234 for i in xrange(10000):
1241 for i in xrange(10000):
1235 util.lrucachedict(size)
1242 util.lrucachedict(size)
1236
1243
1237 values = []
1244 values = []
1238 for i in xrange(size):
1245 for i in xrange(size):
1239 values.append(random.randint(0, sys.maxint))
1246 values.append(random.randint(0, sys.maxint))
1240
1247
1241 # Get mode fills the cache and tests raw lookup performance with no
1248 # Get mode fills the cache and tests raw lookup performance with no
1242 # eviction.
1249 # eviction.
1243 getseq = []
1250 getseq = []
1244 for i in xrange(gets):
1251 for i in xrange(gets):
1245 getseq.append(random.choice(values))
1252 getseq.append(random.choice(values))
1246
1253
1247 def dogets():
1254 def dogets():
1248 d = util.lrucachedict(size)
1255 d = util.lrucachedict(size)
1249 for v in values:
1256 for v in values:
1250 d[v] = v
1257 d[v] = v
1251 for key in getseq:
1258 for key in getseq:
1252 value = d[key]
1259 value = d[key]
1253 value # silence pyflakes warning
1260 value # silence pyflakes warning
1254
1261
1255 # Set mode tests insertion speed with cache eviction.
1262 # Set mode tests insertion speed with cache eviction.
1256 setseq = []
1263 setseq = []
1257 for i in xrange(sets):
1264 for i in xrange(sets):
1258 setseq.append(random.randint(0, sys.maxint))
1265 setseq.append(random.randint(0, sys.maxint))
1259
1266
1260 def dosets():
1267 def dosets():
1261 d = util.lrucachedict(size)
1268 d = util.lrucachedict(size)
1262 for v in setseq:
1269 for v in setseq:
1263 d[v] = v
1270 d[v] = v
1264
1271
1265 # Mixed mode randomly performs gets and sets with eviction.
1272 # Mixed mode randomly performs gets and sets with eviction.
1266 mixedops = []
1273 mixedops = []
1267 for i in xrange(mixed):
1274 for i in xrange(mixed):
1268 r = random.randint(0, 100)
1275 r = random.randint(0, 100)
1269 if r < mixedgetfreq:
1276 if r < mixedgetfreq:
1270 op = 0
1277 op = 0
1271 else:
1278 else:
1272 op = 1
1279 op = 1
1273
1280
1274 mixedops.append((op, random.randint(0, size * 2)))
1281 mixedops.append((op, random.randint(0, size * 2)))
1275
1282
1276 def domixed():
1283 def domixed():
1277 d = util.lrucachedict(size)
1284 d = util.lrucachedict(size)
1278
1285
1279 for op, v in mixedops:
1286 for op, v in mixedops:
1280 if op == 0:
1287 if op == 0:
1281 try:
1288 try:
1282 d[v]
1289 d[v]
1283 except KeyError:
1290 except KeyError:
1284 pass
1291 pass
1285 else:
1292 else:
1286 d[v] = v
1293 d[v] = v
1287
1294
1288 benches = [
1295 benches = [
1289 (doinit, 'init'),
1296 (doinit, 'init'),
1290 (dogets, 'gets'),
1297 (dogets, 'gets'),
1291 (dosets, 'sets'),
1298 (dosets, 'sets'),
1292 (domixed, 'mixed')
1299 (domixed, 'mixed')
1293 ]
1300 ]
1294
1301
1295 for fn, title in benches:
1302 for fn, title in benches:
1296 timer, fm = gettimer(ui, opts)
1303 timer, fm = gettimer(ui, opts)
1297 timer(fn, title=title)
1304 timer(fn, title=title)
1298 fm.end()
1305 fm.end()
1299
1306
1300 @command('perfwrite', formatteropts)
1307 @command('perfwrite', formatteropts)
1301 def perfwrite(ui, repo, **opts):
1308 def perfwrite(ui, repo, **opts):
1302 """microbenchmark ui.write
1309 """microbenchmark ui.write
1303 """
1310 """
1304 timer, fm = gettimer(ui, opts)
1311 timer, fm = gettimer(ui, opts)
1305 def write():
1312 def write():
1306 for i in range(100000):
1313 for i in range(100000):
1307 ui.write(('Testing write performance\n'))
1314 ui.write(('Testing write performance\n'))
1308 timer(write)
1315 timer(write)
1309 fm.end()
1316 fm.end()
1310
1317
1311 def uisetup(ui):
1318 def uisetup(ui):
1312 if (util.safehasattr(cmdutil, 'openrevlog') and
1319 if (util.safehasattr(cmdutil, 'openrevlog') and
1313 not util.safehasattr(commands, 'debugrevlogopts')):
1320 not util.safehasattr(commands, 'debugrevlogopts')):
1314 # for "historical portability":
1321 # for "historical portability":
1315 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1322 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1316 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1323 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1317 # openrevlog() should cause failure, because it has been
1324 # openrevlog() should cause failure, because it has been
1318 # available since 3.5 (or 49c583ca48c4).
1325 # available since 3.5 (or 49c583ca48c4).
1319 def openrevlog(orig, repo, cmd, file_, opts):
1326 def openrevlog(orig, repo, cmd, file_, opts):
1320 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1327 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1321 raise error.Abort("This version doesn't support --dir option",
1328 raise error.Abort("This version doesn't support --dir option",
1322 hint="use 3.5 or later")
1329 hint="use 3.5 or later")
1323 return orig(repo, cmd, file_, opts)
1330 return orig(repo, cmd, file_, opts)
1324 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
1331 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
@@ -1,516 +1,516 b''
1 # synthrepo.py - repo synthesis
1 # synthrepo.py - repo synthesis
2 #
2 #
3 # Copyright 2012 Facebook
3 # Copyright 2012 Facebook
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''synthesize structurally interesting change history
8 '''synthesize structurally interesting change history
9
9
10 This extension is useful for creating a repository with properties
10 This extension is useful for creating a repository with properties
11 that are statistically similar to an existing repository. During
11 that are statistically similar to an existing repository. During
12 analysis, a simple probability table is constructed from the history
12 analysis, a simple probability table is constructed from the history
13 of an existing repository. During synthesis, these properties are
13 of an existing repository. During synthesis, these properties are
14 reconstructed.
14 reconstructed.
15
15
16 Properties that are analyzed and synthesized include the following:
16 Properties that are analyzed and synthesized include the following:
17
17
18 - Lines added or removed when an existing file is modified
18 - Lines added or removed when an existing file is modified
19 - Number and sizes of files added
19 - Number and sizes of files added
20 - Number of files removed
20 - Number of files removed
21 - Line lengths
21 - Line lengths
22 - Topological distance to parent changeset(s)
22 - Topological distance to parent changeset(s)
23 - Probability of a commit being a merge
23 - Probability of a commit being a merge
24 - Probability of a newly added file being added to a new directory
24 - Probability of a newly added file being added to a new directory
25 - Interarrival time, and time zone, of commits
25 - Interarrival time, and time zone, of commits
26 - Number of files in each directory
26 - Number of files in each directory
27
27
28 A few obvious properties that are not currently handled realistically:
28 A few obvious properties that are not currently handled realistically:
29
29
30 - Merges are treated as regular commits with two parents, which is not
30 - Merges are treated as regular commits with two parents, which is not
31 realistic
31 realistic
32 - Modifications are not treated as operations on hunks of lines, but
32 - Modifications are not treated as operations on hunks of lines, but
33 as insertions and deletions of randomly chosen single lines
33 as insertions and deletions of randomly chosen single lines
34 - Committer ID (always random)
34 - Committer ID (always random)
35 - Executability of files
35 - Executability of files
36 - Symlinks and binary files are ignored
36 - Symlinks and binary files are ignored
37 '''
37 '''
38
38
39 from __future__ import absolute_import
39 from __future__ import absolute_import
40 import bisect
40 import bisect
41 import collections
41 import collections
42 import itertools
42 import itertools
43 import json
43 import json
44 import os
44 import os
45 import random
45 import random
46 import sys
46 import sys
47 import time
47 import time
48
48
49 from mercurial.i18n import _
49 from mercurial.i18n import _
50 from mercurial.node import (
50 from mercurial.node import (
51 nullid,
51 nullid,
52 nullrev,
52 nullrev,
53 short,
53 short,
54 )
54 )
55 from mercurial import (
55 from mercurial import (
56 cmdutil,
57 context,
56 context,
58 error,
57 error,
59 hg,
58 hg,
60 patch,
59 patch,
60 registrar,
61 scmutil,
61 scmutil,
62 util,
62 util,
63 )
63 )
64
64
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
67 # be specifying the version(s) of Mercurial they are tested with, or
67 # be specifying the version(s) of Mercurial they are tested with, or
68 # leave the attribute unspecified.
68 # leave the attribute unspecified.
69 testedwith = 'ships-with-hg-core'
69 testedwith = 'ships-with-hg-core'
70
70
71 cmdtable = {}
71 cmdtable = {}
72 command = cmdutil.command(cmdtable)
72 command = registrar.command(cmdtable)
73
73
74 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
74 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
75
75
76 def zerodict():
76 def zerodict():
77 return collections.defaultdict(lambda: 0)
77 return collections.defaultdict(lambda: 0)
78
78
79 def roundto(x, k):
79 def roundto(x, k):
80 if x > k * 2:
80 if x > k * 2:
81 return int(round(x / float(k)) * k)
81 return int(round(x / float(k)) * k)
82 return int(round(x))
82 return int(round(x))
83
83
84 def parsegitdiff(lines):
84 def parsegitdiff(lines):
85 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
85 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
86 binary = False
86 binary = False
87 for line in lines:
87 for line in lines:
88 start = line[:6]
88 start = line[:6]
89 if start == 'diff -':
89 if start == 'diff -':
90 if filename:
90 if filename:
91 yield filename, mar, lineadd, lineremove, binary
91 yield filename, mar, lineadd, lineremove, binary
92 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
92 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
93 filename = patch.gitre.match(line).group(1)
93 filename = patch.gitre.match(line).group(1)
94 elif start in newfile:
94 elif start in newfile:
95 mar = 'a'
95 mar = 'a'
96 elif start == 'GIT bi':
96 elif start == 'GIT bi':
97 binary = True
97 binary = True
98 elif start == 'delete':
98 elif start == 'delete':
99 mar = 'r'
99 mar = 'r'
100 elif start:
100 elif start:
101 s = start[0]
101 s = start[0]
102 if s == '-' and not line.startswith('--- '):
102 if s == '-' and not line.startswith('--- '):
103 lineremove += 1
103 lineremove += 1
104 elif s == '+' and not line.startswith('+++ '):
104 elif s == '+' and not line.startswith('+++ '):
105 lineadd[roundto(len(line) - 1, 5)] += 1
105 lineadd[roundto(len(line) - 1, 5)] += 1
106 if filename:
106 if filename:
107 yield filename, mar, lineadd, lineremove, binary
107 yield filename, mar, lineadd, lineremove, binary
108
108
109 @command('analyze',
109 @command('analyze',
110 [('o', 'output', '', _('write output to given file'), _('FILE')),
110 [('o', 'output', '', _('write output to given file'), _('FILE')),
111 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
111 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
112 _('hg analyze'), optionalrepo=True)
112 _('hg analyze'), optionalrepo=True)
113 def analyze(ui, repo, *revs, **opts):
113 def analyze(ui, repo, *revs, **opts):
114 '''create a simple model of a repository to use for later synthesis
114 '''create a simple model of a repository to use for later synthesis
115
115
116 This command examines every changeset in the given range (or all
116 This command examines every changeset in the given range (or all
117 of history if none are specified) and creates a simple statistical
117 of history if none are specified) and creates a simple statistical
118 model of the history of the repository. It also measures the directory
118 model of the history of the repository. It also measures the directory
119 structure of the repository as checked out.
119 structure of the repository as checked out.
120
120
121 The model is written out to a JSON file, and can be used by
121 The model is written out to a JSON file, and can be used by
122 :hg:`synthesize` to create or augment a repository with synthetic
122 :hg:`synthesize` to create or augment a repository with synthetic
123 commits that have a structure that is statistically similar to the
123 commits that have a structure that is statistically similar to the
124 analyzed repository.
124 analyzed repository.
125 '''
125 '''
126 root = repo.root
126 root = repo.root
127 if not root.endswith(os.path.sep):
127 if not root.endswith(os.path.sep):
128 root += os.path.sep
128 root += os.path.sep
129
129
130 revs = list(revs)
130 revs = list(revs)
131 revs.extend(opts['rev'])
131 revs.extend(opts['rev'])
132 if not revs:
132 if not revs:
133 revs = [':']
133 revs = [':']
134
134
135 output = opts['output']
135 output = opts['output']
136 if not output:
136 if not output:
137 output = os.path.basename(root) + '.json'
137 output = os.path.basename(root) + '.json'
138
138
139 if output == '-':
139 if output == '-':
140 fp = sys.stdout
140 fp = sys.stdout
141 else:
141 else:
142 fp = open(output, 'w')
142 fp = open(output, 'w')
143
143
144 # Always obtain file counts of each directory in the given root directory.
144 # Always obtain file counts of each directory in the given root directory.
145 def onerror(e):
145 def onerror(e):
146 ui.warn(_('error walking directory structure: %s\n') % e)
146 ui.warn(_('error walking directory structure: %s\n') % e)
147
147
148 dirs = {}
148 dirs = {}
149 rootprefixlen = len(root)
149 rootprefixlen = len(root)
150 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
150 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
151 dirpathfromroot = dirpath[rootprefixlen:]
151 dirpathfromroot = dirpath[rootprefixlen:]
152 dirs[dirpathfromroot] = len(filenames)
152 dirs[dirpathfromroot] = len(filenames)
153 if '.hg' in dirnames:
153 if '.hg' in dirnames:
154 dirnames.remove('.hg')
154 dirnames.remove('.hg')
155
155
156 lineschanged = zerodict()
156 lineschanged = zerodict()
157 children = zerodict()
157 children = zerodict()
158 p1distance = zerodict()
158 p1distance = zerodict()
159 p2distance = zerodict()
159 p2distance = zerodict()
160 linesinfilesadded = zerodict()
160 linesinfilesadded = zerodict()
161 fileschanged = zerodict()
161 fileschanged = zerodict()
162 filesadded = zerodict()
162 filesadded = zerodict()
163 filesremoved = zerodict()
163 filesremoved = zerodict()
164 linelengths = zerodict()
164 linelengths = zerodict()
165 interarrival = zerodict()
165 interarrival = zerodict()
166 parents = zerodict()
166 parents = zerodict()
167 dirsadded = zerodict()
167 dirsadded = zerodict()
168 tzoffset = zerodict()
168 tzoffset = zerodict()
169
169
170 # If a mercurial repo is available, also model the commit history.
170 # If a mercurial repo is available, also model the commit history.
171 if repo:
171 if repo:
172 revs = scmutil.revrange(repo, revs)
172 revs = scmutil.revrange(repo, revs)
173 revs.sort()
173 revs.sort()
174
174
175 progress = ui.progress
175 progress = ui.progress
176 _analyzing = _('analyzing')
176 _analyzing = _('analyzing')
177 _changesets = _('changesets')
177 _changesets = _('changesets')
178 _total = len(revs)
178 _total = len(revs)
179
179
180 for i, rev in enumerate(revs):
180 for i, rev in enumerate(revs):
181 progress(_analyzing, i, unit=_changesets, total=_total)
181 progress(_analyzing, i, unit=_changesets, total=_total)
182 ctx = repo[rev]
182 ctx = repo[rev]
183 pl = ctx.parents()
183 pl = ctx.parents()
184 pctx = pl[0]
184 pctx = pl[0]
185 prev = pctx.rev()
185 prev = pctx.rev()
186 children[prev] += 1
186 children[prev] += 1
187 p1distance[rev - prev] += 1
187 p1distance[rev - prev] += 1
188 parents[len(pl)] += 1
188 parents[len(pl)] += 1
189 tzoffset[ctx.date()[1]] += 1
189 tzoffset[ctx.date()[1]] += 1
190 if len(pl) > 1:
190 if len(pl) > 1:
191 p2distance[rev - pl[1].rev()] += 1
191 p2distance[rev - pl[1].rev()] += 1
192 if prev == rev - 1:
192 if prev == rev - 1:
193 lastctx = pctx
193 lastctx = pctx
194 else:
194 else:
195 lastctx = repo[rev - 1]
195 lastctx = repo[rev - 1]
196 if lastctx.rev() != nullrev:
196 if lastctx.rev() != nullrev:
197 timedelta = ctx.date()[0] - lastctx.date()[0]
197 timedelta = ctx.date()[0] - lastctx.date()[0]
198 interarrival[roundto(timedelta, 300)] += 1
198 interarrival[roundto(timedelta, 300)] += 1
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
202 if isbin:
202 if isbin:
203 continue
203 continue
204 added = sum(lineadd.itervalues(), 0)
204 added = sum(lineadd.itervalues(), 0)
205 if mar == 'm':
205 if mar == 'm':
206 if added and lineremove:
206 if added and lineremove:
207 lineschanged[roundto(added, 5),
207 lineschanged[roundto(added, 5),
208 roundto(lineremove, 5)] += 1
208 roundto(lineremove, 5)] += 1
209 filechanges += 1
209 filechanges += 1
210 elif mar == 'a':
210 elif mar == 'a':
211 fileadds += 1
211 fileadds += 1
212 if '/' in filename:
212 if '/' in filename:
213 filedir = filename.rsplit('/', 1)[0]
213 filedir = filename.rsplit('/', 1)[0]
214 if filedir not in pctx.dirs():
214 if filedir not in pctx.dirs():
215 diradds += 1
215 diradds += 1
216 linesinfilesadded[roundto(added, 5)] += 1
216 linesinfilesadded[roundto(added, 5)] += 1
217 elif mar == 'r':
217 elif mar == 'r':
218 fileremoves += 1
218 fileremoves += 1
219 for length, count in lineadd.iteritems():
219 for length, count in lineadd.iteritems():
220 linelengths[length] += count
220 linelengths[length] += count
221 fileschanged[filechanges] += 1
221 fileschanged[filechanges] += 1
222 filesadded[fileadds] += 1
222 filesadded[fileadds] += 1
223 dirsadded[diradds] += 1
223 dirsadded[diradds] += 1
224 filesremoved[fileremoves] += 1
224 filesremoved[fileremoves] += 1
225
225
226 invchildren = zerodict()
226 invchildren = zerodict()
227
227
228 for rev, count in children.iteritems():
228 for rev, count in children.iteritems():
229 invchildren[count] += 1
229 invchildren[count] += 1
230
230
231 if output != '-':
231 if output != '-':
232 ui.status(_('writing output to %s\n') % output)
232 ui.status(_('writing output to %s\n') % output)
233
233
234 def pronk(d):
234 def pronk(d):
235 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
235 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
236
236
237 json.dump({'revs': len(revs),
237 json.dump({'revs': len(revs),
238 'initdirs': pronk(dirs),
238 'initdirs': pronk(dirs),
239 'lineschanged': pronk(lineschanged),
239 'lineschanged': pronk(lineschanged),
240 'children': pronk(invchildren),
240 'children': pronk(invchildren),
241 'fileschanged': pronk(fileschanged),
241 'fileschanged': pronk(fileschanged),
242 'filesadded': pronk(filesadded),
242 'filesadded': pronk(filesadded),
243 'linesinfilesadded': pronk(linesinfilesadded),
243 'linesinfilesadded': pronk(linesinfilesadded),
244 'dirsadded': pronk(dirsadded),
244 'dirsadded': pronk(dirsadded),
245 'filesremoved': pronk(filesremoved),
245 'filesremoved': pronk(filesremoved),
246 'linelengths': pronk(linelengths),
246 'linelengths': pronk(linelengths),
247 'parents': pronk(parents),
247 'parents': pronk(parents),
248 'p1distance': pronk(p1distance),
248 'p1distance': pronk(p1distance),
249 'p2distance': pronk(p2distance),
249 'p2distance': pronk(p2distance),
250 'interarrival': pronk(interarrival),
250 'interarrival': pronk(interarrival),
251 'tzoffset': pronk(tzoffset),
251 'tzoffset': pronk(tzoffset),
252 },
252 },
253 fp)
253 fp)
254 fp.close()
254 fp.close()
255
255
256 @command('synthesize',
256 @command('synthesize',
257 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
257 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
258 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
258 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
259 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
259 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
260 _('hg synthesize [OPTION].. DESCFILE'))
260 _('hg synthesize [OPTION].. DESCFILE'))
261 def synthesize(ui, repo, descpath, **opts):
261 def synthesize(ui, repo, descpath, **opts):
262 '''synthesize commits based on a model of an existing repository
262 '''synthesize commits based on a model of an existing repository
263
263
264 The model must have been generated by :hg:`analyze`. Commits will
264 The model must have been generated by :hg:`analyze`. Commits will
265 be generated randomly according to the probabilities described in
265 be generated randomly according to the probabilities described in
266 the model. If --initfiles is set, the repository will be seeded with
266 the model. If --initfiles is set, the repository will be seeded with
267 the given number files following the modeled repository's directory
267 the given number files following the modeled repository's directory
268 structure.
268 structure.
269
269
270 When synthesizing new content, commit descriptions, and user
270 When synthesizing new content, commit descriptions, and user
271 names, words will be chosen randomly from a dictionary that is
271 names, words will be chosen randomly from a dictionary that is
272 presumed to contain one word per line. Use --dict to specify the
272 presumed to contain one word per line. Use --dict to specify the
273 path to an alternate dictionary to use.
273 path to an alternate dictionary to use.
274 '''
274 '''
275 try:
275 try:
276 fp = hg.openpath(ui, descpath)
276 fp = hg.openpath(ui, descpath)
277 except Exception as err:
277 except Exception as err:
278 raise error.Abort('%s: %s' % (descpath, err[0].strerror))
278 raise error.Abort('%s: %s' % (descpath, err[0].strerror))
279 desc = json.load(fp)
279 desc = json.load(fp)
280 fp.close()
280 fp.close()
281
281
282 def cdf(l):
282 def cdf(l):
283 if not l:
283 if not l:
284 return [], []
284 return [], []
285 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
285 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
286 t = float(sum(probs, 0))
286 t = float(sum(probs, 0))
287 s, cdfs = 0, []
287 s, cdfs = 0, []
288 for v in probs:
288 for v in probs:
289 s += v
289 s += v
290 cdfs.append(s / t)
290 cdfs.append(s / t)
291 return vals, cdfs
291 return vals, cdfs
292
292
293 lineschanged = cdf(desc['lineschanged'])
293 lineschanged = cdf(desc['lineschanged'])
294 fileschanged = cdf(desc['fileschanged'])
294 fileschanged = cdf(desc['fileschanged'])
295 filesadded = cdf(desc['filesadded'])
295 filesadded = cdf(desc['filesadded'])
296 dirsadded = cdf(desc['dirsadded'])
296 dirsadded = cdf(desc['dirsadded'])
297 filesremoved = cdf(desc['filesremoved'])
297 filesremoved = cdf(desc['filesremoved'])
298 linelengths = cdf(desc['linelengths'])
298 linelengths = cdf(desc['linelengths'])
299 parents = cdf(desc['parents'])
299 parents = cdf(desc['parents'])
300 p1distance = cdf(desc['p1distance'])
300 p1distance = cdf(desc['p1distance'])
301 p2distance = cdf(desc['p2distance'])
301 p2distance = cdf(desc['p2distance'])
302 interarrival = cdf(desc['interarrival'])
302 interarrival = cdf(desc['interarrival'])
303 linesinfilesadded = cdf(desc['linesinfilesadded'])
303 linesinfilesadded = cdf(desc['linesinfilesadded'])
304 tzoffset = cdf(desc['tzoffset'])
304 tzoffset = cdf(desc['tzoffset'])
305
305
306 dictfile = opts.get('dict') or '/usr/share/dict/words'
306 dictfile = opts.get('dict') or '/usr/share/dict/words'
307 try:
307 try:
308 fp = open(dictfile, 'rU')
308 fp = open(dictfile, 'rU')
309 except IOError as err:
309 except IOError as err:
310 raise error.Abort('%s: %s' % (dictfile, err.strerror))
310 raise error.Abort('%s: %s' % (dictfile, err.strerror))
311 words = fp.read().splitlines()
311 words = fp.read().splitlines()
312 fp.close()
312 fp.close()
313
313
314 initdirs = {}
314 initdirs = {}
315 if desc['initdirs']:
315 if desc['initdirs']:
316 for k, v in desc['initdirs']:
316 for k, v in desc['initdirs']:
317 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
317 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
318 initdirs = renamedirs(initdirs, words)
318 initdirs = renamedirs(initdirs, words)
319 initdirscdf = cdf(initdirs)
319 initdirscdf = cdf(initdirs)
320
320
321 def pick(cdf):
321 def pick(cdf):
322 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
322 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
323
323
324 def pickpath():
324 def pickpath():
325 return os.path.join(pick(initdirscdf), random.choice(words))
325 return os.path.join(pick(initdirscdf), random.choice(words))
326
326
327 def makeline(minimum=0):
327 def makeline(minimum=0):
328 total = max(minimum, pick(linelengths))
328 total = max(minimum, pick(linelengths))
329 c, l = 0, []
329 c, l = 0, []
330 while c < total:
330 while c < total:
331 w = random.choice(words)
331 w = random.choice(words)
332 c += len(w) + 1
332 c += len(w) + 1
333 l.append(w)
333 l.append(w)
334 return ' '.join(l)
334 return ' '.join(l)
335
335
336 wlock = repo.wlock()
336 wlock = repo.wlock()
337 lock = repo.lock()
337 lock = repo.lock()
338
338
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
340
340
341 progress = ui.progress
341 progress = ui.progress
342 _synthesizing = _('synthesizing')
342 _synthesizing = _('synthesizing')
343 _files = _('initial files')
343 _files = _('initial files')
344 _changesets = _('changesets')
344 _changesets = _('changesets')
345
345
346 # Synthesize a single initial revision adding files to the repo according
346 # Synthesize a single initial revision adding files to the repo according
347 # to the modeled directory structure.
347 # to the modeled directory structure.
348 initcount = int(opts['initfiles'])
348 initcount = int(opts['initfiles'])
349 if initcount and initdirs:
349 if initcount and initdirs:
350 pctx = repo[None].parents()[0]
350 pctx = repo[None].parents()[0]
351 dirs = set(pctx.dirs())
351 dirs = set(pctx.dirs())
352 files = {}
352 files = {}
353
353
354 def validpath(path):
354 def validpath(path):
355 # Don't pick filenames which are already directory names.
355 # Don't pick filenames which are already directory names.
356 if path in dirs:
356 if path in dirs:
357 return False
357 return False
358 # Don't pick directories which were used as file names.
358 # Don't pick directories which were used as file names.
359 while path:
359 while path:
360 if path in files:
360 if path in files:
361 return False
361 return False
362 path = os.path.dirname(path)
362 path = os.path.dirname(path)
363 return True
363 return True
364
364
365 for i in xrange(0, initcount):
365 for i in xrange(0, initcount):
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
367
367
368 path = pickpath()
368 path = pickpath()
369 while not validpath(path):
369 while not validpath(path):
370 path = pickpath()
370 path = pickpath()
371 data = '%s contents\n' % path
371 data = '%s contents\n' % path
372 files[path] = context.memfilectx(repo, path, data)
372 files[path] = context.memfilectx(repo, path, data)
373 dir = os.path.dirname(path)
373 dir = os.path.dirname(path)
374 while dir and dir not in dirs:
374 while dir and dir not in dirs:
375 dirs.add(dir)
375 dirs.add(dir)
376 dir = os.path.dirname(dir)
376 dir = os.path.dirname(dir)
377
377
378 def filectxfn(repo, memctx, path):
378 def filectxfn(repo, memctx, path):
379 return files[path]
379 return files[path]
380
380
381 ui.progress(_synthesizing, None)
381 ui.progress(_synthesizing, None)
382 message = 'synthesized wide repo with %d files' % (len(files),)
382 message = 'synthesized wide repo with %d files' % (len(files),)
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
384 files.iterkeys(), filectxfn, ui.username(),
384 files.iterkeys(), filectxfn, ui.username(),
385 '%d %d' % util.makedate())
385 '%d %d' % util.makedate())
386 initnode = mc.commit()
386 initnode = mc.commit()
387 if ui.debugflag:
387 if ui.debugflag:
388 hexfn = hex
388 hexfn = hex
389 else:
389 else:
390 hexfn = short
390 hexfn = short
391 ui.status(_('added commit %s with %d files\n')
391 ui.status(_('added commit %s with %d files\n')
392 % (hexfn(initnode), len(files)))
392 % (hexfn(initnode), len(files)))
393
393
394 # Synthesize incremental revisions to the repository, adding repo depth.
394 # Synthesize incremental revisions to the repository, adding repo depth.
395 count = int(opts['count'])
395 count = int(opts['count'])
396 heads = set(map(repo.changelog.rev, repo.heads()))
396 heads = set(map(repo.changelog.rev, repo.heads()))
397 for i in xrange(count):
397 for i in xrange(count):
398 progress(_synthesizing, i, unit=_changesets, total=count)
398 progress(_synthesizing, i, unit=_changesets, total=count)
399
399
400 node = repo.changelog.node
400 node = repo.changelog.node
401 revs = len(repo)
401 revs = len(repo)
402
402
403 def pickhead(heads, distance):
403 def pickhead(heads, distance):
404 if heads:
404 if heads:
405 lheads = sorted(heads)
405 lheads = sorted(heads)
406 rev = revs - min(pick(distance), revs)
406 rev = revs - min(pick(distance), revs)
407 if rev < lheads[-1]:
407 if rev < lheads[-1]:
408 rev = lheads[bisect.bisect_left(lheads, rev)]
408 rev = lheads[bisect.bisect_left(lheads, rev)]
409 else:
409 else:
410 rev = lheads[-1]
410 rev = lheads[-1]
411 return rev, node(rev)
411 return rev, node(rev)
412 return nullrev, nullid
412 return nullrev, nullid
413
413
414 r1 = revs - min(pick(p1distance), revs)
414 r1 = revs - min(pick(p1distance), revs)
415 p1 = node(r1)
415 p1 = node(r1)
416
416
417 # the number of heads will grow without bound if we use a pure
417 # the number of heads will grow without bound if we use a pure
418 # model, so artificially constrain their proliferation
418 # model, so artificially constrain their proliferation
419 toomanyheads = len(heads) > random.randint(1, 20)
419 toomanyheads = len(heads) > random.randint(1, 20)
420 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
420 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
421 r2, p2 = pickhead(heads.difference([r1]), p2distance)
421 r2, p2 = pickhead(heads.difference([r1]), p2distance)
422 else:
422 else:
423 r2, p2 = nullrev, nullid
423 r2, p2 = nullrev, nullid
424
424
425 pl = [p1, p2]
425 pl = [p1, p2]
426 pctx = repo[r1]
426 pctx = repo[r1]
427 mf = pctx.manifest()
427 mf = pctx.manifest()
428 mfk = mf.keys()
428 mfk = mf.keys()
429 changes = {}
429 changes = {}
430 if mfk:
430 if mfk:
431 for __ in xrange(pick(fileschanged)):
431 for __ in xrange(pick(fileschanged)):
432 for __ in xrange(10):
432 for __ in xrange(10):
433 fctx = pctx.filectx(random.choice(mfk))
433 fctx = pctx.filectx(random.choice(mfk))
434 path = fctx.path()
434 path = fctx.path()
435 if not (path in nevertouch or fctx.isbinary() or
435 if not (path in nevertouch or fctx.isbinary() or
436 'l' in fctx.flags()):
436 'l' in fctx.flags()):
437 break
437 break
438 lines = fctx.data().splitlines()
438 lines = fctx.data().splitlines()
439 add, remove = pick(lineschanged)
439 add, remove = pick(lineschanged)
440 for __ in xrange(remove):
440 for __ in xrange(remove):
441 if not lines:
441 if not lines:
442 break
442 break
443 del lines[random.randrange(0, len(lines))]
443 del lines[random.randrange(0, len(lines))]
444 for __ in xrange(add):
444 for __ in xrange(add):
445 lines.insert(random.randint(0, len(lines)), makeline())
445 lines.insert(random.randint(0, len(lines)), makeline())
446 path = fctx.path()
446 path = fctx.path()
447 changes[path] = context.memfilectx(repo, path,
447 changes[path] = context.memfilectx(repo, path,
448 '\n'.join(lines) + '\n')
448 '\n'.join(lines) + '\n')
449 for __ in xrange(pick(filesremoved)):
449 for __ in xrange(pick(filesremoved)):
450 path = random.choice(mfk)
450 path = random.choice(mfk)
451 for __ in xrange(10):
451 for __ in xrange(10):
452 path = random.choice(mfk)
452 path = random.choice(mfk)
453 if path not in changes:
453 if path not in changes:
454 changes[path] = None
454 changes[path] = None
455 break
455 break
456 if filesadded:
456 if filesadded:
457 dirs = list(pctx.dirs())
457 dirs = list(pctx.dirs())
458 dirs.insert(0, '')
458 dirs.insert(0, '')
459 for __ in xrange(pick(filesadded)):
459 for __ in xrange(pick(filesadded)):
460 pathstr = ''
460 pathstr = ''
461 while pathstr in dirs:
461 while pathstr in dirs:
462 path = [random.choice(dirs)]
462 path = [random.choice(dirs)]
463 if pick(dirsadded):
463 if pick(dirsadded):
464 path.append(random.choice(words))
464 path.append(random.choice(words))
465 path.append(random.choice(words))
465 path.append(random.choice(words))
466 pathstr = '/'.join(filter(None, path))
466 pathstr = '/'.join(filter(None, path))
467 data = '\n'.join(makeline()
467 data = '\n'.join(makeline()
468 for __ in xrange(pick(linesinfilesadded))) + '\n'
468 for __ in xrange(pick(linesinfilesadded))) + '\n'
469 changes[pathstr] = context.memfilectx(repo, pathstr, data)
469 changes[pathstr] = context.memfilectx(repo, pathstr, data)
470 def filectxfn(repo, memctx, path):
470 def filectxfn(repo, memctx, path):
471 return changes[path]
471 return changes[path]
472 if not changes:
472 if not changes:
473 continue
473 continue
474 if revs:
474 if revs:
475 date = repo['tip'].date()[0] + pick(interarrival)
475 date = repo['tip'].date()[0] + pick(interarrival)
476 else:
476 else:
477 date = time.time() - (86400 * count)
477 date = time.time() - (86400 * count)
478 # dates in mercurial must be positive, fit in 32-bit signed integers.
478 # dates in mercurial must be positive, fit in 32-bit signed integers.
479 date = min(0x7fffffff, max(0, date))
479 date = min(0x7fffffff, max(0, date))
480 user = random.choice(words) + '@' + random.choice(words)
480 user = random.choice(words) + '@' + random.choice(words)
481 mc = context.memctx(repo, pl, makeline(minimum=2),
481 mc = context.memctx(repo, pl, makeline(minimum=2),
482 sorted(changes.iterkeys()),
482 sorted(changes.iterkeys()),
483 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
483 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
484 newnode = mc.commit()
484 newnode = mc.commit()
485 heads.add(repo.changelog.rev(newnode))
485 heads.add(repo.changelog.rev(newnode))
486 heads.discard(r1)
486 heads.discard(r1)
487 heads.discard(r2)
487 heads.discard(r2)
488
488
489 lock.release()
489 lock.release()
490 wlock.release()
490 wlock.release()
491
491
492 def renamedirs(dirs, words):
492 def renamedirs(dirs, words):
493 '''Randomly rename the directory names in the per-dir file count dict.'''
493 '''Randomly rename the directory names in the per-dir file count dict.'''
494 wordgen = itertools.cycle(words)
494 wordgen = itertools.cycle(words)
495 replacements = {'': ''}
495 replacements = {'': ''}
496 def rename(dirpath):
496 def rename(dirpath):
497 '''Recursively rename the directory and all path prefixes.
497 '''Recursively rename the directory and all path prefixes.
498
498
499 The mapping from path to renamed path is stored for all path prefixes
499 The mapping from path to renamed path is stored for all path prefixes
500 as in dynamic programming, ensuring linear runtime and consistent
500 as in dynamic programming, ensuring linear runtime and consistent
501 renaming regardless of iteration order through the model.
501 renaming regardless of iteration order through the model.
502 '''
502 '''
503 if dirpath in replacements:
503 if dirpath in replacements:
504 return replacements[dirpath]
504 return replacements[dirpath]
505 head, _ = os.path.split(dirpath)
505 head, _ = os.path.split(dirpath)
506 if head:
506 if head:
507 head = rename(head)
507 head = rename(head)
508 else:
508 else:
509 head = ''
509 head = ''
510 renamed = os.path.join(head, next(wordgen))
510 renamed = os.path.join(head, next(wordgen))
511 replacements[dirpath] = renamed
511 replacements[dirpath] = renamed
512 return renamed
512 return renamed
513 result = []
513 result = []
514 for dirpath, count in dirs.iteritems():
514 for dirpath, count in dirs.iteritems():
515 result.append([rename(dirpath.lstrip(os.sep)), count])
515 result.append([rename(dirpath.lstrip(os.sep)), count])
516 return result
516 return result
@@ -1,250 +1,250 b''
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track config key.
12 The events that get logged can be configured via the blackbox.track config key.
13
13
14 Examples::
14 Examples::
15
15
16 [blackbox]
16 [blackbox]
17 track = *
17 track = *
18 # dirty is *EXPENSIVE* (slow);
18 # dirty is *EXPENSIVE* (slow);
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
20 dirty = True
20 dirty = True
21 # record the source of log messages
21 # record the source of log messages
22 logsource = True
22 logsource = True
23
23
24 [blackbox]
24 [blackbox]
25 track = command, commandfinish, commandexception, exthook, pythonhook
25 track = command, commandfinish, commandexception, exthook, pythonhook
26
26
27 [blackbox]
27 [blackbox]
28 track = incoming
28 track = incoming
29
29
30 [blackbox]
30 [blackbox]
31 # limit the size of a log file
31 # limit the size of a log file
32 maxsize = 1.5 MB
32 maxsize = 1.5 MB
33 # rotate up to N log files when the current one gets too big
33 # rotate up to N log files when the current one gets too big
34 maxfiles = 3
34 maxfiles = 3
35
35
36 """
36 """
37
37
38 from __future__ import absolute_import
38 from __future__ import absolute_import
39
39
40 import errno
40 import errno
41 import re
41 import re
42
42
43 from mercurial.i18n import _
43 from mercurial.i18n import _
44 from mercurial.node import hex
44 from mercurial.node import hex
45
45
46 from mercurial import (
46 from mercurial import (
47 cmdutil,
47 registrar,
48 ui as uimod,
48 ui as uimod,
49 util,
49 util,
50 )
50 )
51
51
52 cmdtable = {}
52 cmdtable = {}
53 command = cmdutil.command(cmdtable)
53 command = registrar.command(cmdtable)
54 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
54 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
55 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
55 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
56 # be specifying the version(s) of Mercurial they are tested with, or
56 # be specifying the version(s) of Mercurial they are tested with, or
57 # leave the attribute unspecified.
57 # leave the attribute unspecified.
58 testedwith = 'ships-with-hg-core'
58 testedwith = 'ships-with-hg-core'
59 lastui = None
59 lastui = None
60
60
61 filehandles = {}
61 filehandles = {}
62
62
63 def _openlog(vfs):
63 def _openlog(vfs):
64 path = vfs.join('blackbox.log')
64 path = vfs.join('blackbox.log')
65 if path in filehandles:
65 if path in filehandles:
66 return filehandles[path]
66 return filehandles[path]
67 filehandles[path] = fp = vfs('blackbox.log', 'a')
67 filehandles[path] = fp = vfs('blackbox.log', 'a')
68 return fp
68 return fp
69
69
70 def _closelog(vfs):
70 def _closelog(vfs):
71 path = vfs.join('blackbox.log')
71 path = vfs.join('blackbox.log')
72 fp = filehandles[path]
72 fp = filehandles[path]
73 del filehandles[path]
73 del filehandles[path]
74 fp.close()
74 fp.close()
75
75
76 def wrapui(ui):
76 def wrapui(ui):
77 class blackboxui(ui.__class__):
77 class blackboxui(ui.__class__):
78 def __init__(self, src=None):
78 def __init__(self, src=None):
79 super(blackboxui, self).__init__(src)
79 super(blackboxui, self).__init__(src)
80 if src is None:
80 if src is None:
81 self._partialinit()
81 self._partialinit()
82 else:
82 else:
83 self._bbfp = getattr(src, '_bbfp', None)
83 self._bbfp = getattr(src, '_bbfp', None)
84 self._bbinlog = False
84 self._bbinlog = False
85 self._bbrepo = getattr(src, '_bbrepo', None)
85 self._bbrepo = getattr(src, '_bbrepo', None)
86 self._bbvfs = getattr(src, '_bbvfs', None)
86 self._bbvfs = getattr(src, '_bbvfs', None)
87
87
88 def _partialinit(self):
88 def _partialinit(self):
89 if util.safehasattr(self, '_bbvfs'):
89 if util.safehasattr(self, '_bbvfs'):
90 return
90 return
91 self._bbfp = None
91 self._bbfp = None
92 self._bbinlog = False
92 self._bbinlog = False
93 self._bbrepo = None
93 self._bbrepo = None
94 self._bbvfs = None
94 self._bbvfs = None
95
95
96 def copy(self):
96 def copy(self):
97 self._partialinit()
97 self._partialinit()
98 return self.__class__(self)
98 return self.__class__(self)
99
99
100 @util.propertycache
100 @util.propertycache
101 def track(self):
101 def track(self):
102 return self.configlist('blackbox', 'track', ['*'])
102 return self.configlist('blackbox', 'track', ['*'])
103
103
104 def _openlogfile(self):
104 def _openlogfile(self):
105 def rotate(oldpath, newpath):
105 def rotate(oldpath, newpath):
106 try:
106 try:
107 self._bbvfs.unlink(newpath)
107 self._bbvfs.unlink(newpath)
108 except OSError as err:
108 except OSError as err:
109 if err.errno != errno.ENOENT:
109 if err.errno != errno.ENOENT:
110 self.debug("warning: cannot remove '%s': %s\n" %
110 self.debug("warning: cannot remove '%s': %s\n" %
111 (newpath, err.strerror))
111 (newpath, err.strerror))
112 try:
112 try:
113 if newpath:
113 if newpath:
114 self._bbvfs.rename(oldpath, newpath)
114 self._bbvfs.rename(oldpath, newpath)
115 except OSError as err:
115 except OSError as err:
116 if err.errno != errno.ENOENT:
116 if err.errno != errno.ENOENT:
117 self.debug("warning: cannot rename '%s' to '%s': %s\n" %
117 self.debug("warning: cannot rename '%s' to '%s': %s\n" %
118 (newpath, oldpath, err.strerror))
118 (newpath, oldpath, err.strerror))
119
119
120 fp = _openlog(self._bbvfs)
120 fp = _openlog(self._bbvfs)
121 maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
121 maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
122 if maxsize > 0:
122 if maxsize > 0:
123 st = self._bbvfs.fstat(fp)
123 st = self._bbvfs.fstat(fp)
124 if st.st_size >= maxsize:
124 if st.st_size >= maxsize:
125 path = fp.name
125 path = fp.name
126 _closelog(self._bbvfs)
126 _closelog(self._bbvfs)
127 maxfiles = self.configint('blackbox', 'maxfiles', 7)
127 maxfiles = self.configint('blackbox', 'maxfiles', 7)
128 for i in xrange(maxfiles - 1, 1, -1):
128 for i in xrange(maxfiles - 1, 1, -1):
129 rotate(oldpath='%s.%d' % (path, i - 1),
129 rotate(oldpath='%s.%d' % (path, i - 1),
130 newpath='%s.%d' % (path, i))
130 newpath='%s.%d' % (path, i))
131 rotate(oldpath=path,
131 rotate(oldpath=path,
132 newpath=maxfiles > 0 and path + '.1')
132 newpath=maxfiles > 0 and path + '.1')
133 fp = _openlog(self._bbvfs)
133 fp = _openlog(self._bbvfs)
134 return fp
134 return fp
135
135
136 def _bbwrite(self, fmt, *args):
136 def _bbwrite(self, fmt, *args):
137 self._bbfp.write(fmt % args)
137 self._bbfp.write(fmt % args)
138 self._bbfp.flush()
138 self._bbfp.flush()
139
139
140 def log(self, event, *msg, **opts):
140 def log(self, event, *msg, **opts):
141 global lastui
141 global lastui
142 super(blackboxui, self).log(event, *msg, **opts)
142 super(blackboxui, self).log(event, *msg, **opts)
143 self._partialinit()
143 self._partialinit()
144
144
145 if not '*' in self.track and not event in self.track:
145 if not '*' in self.track and not event in self.track:
146 return
146 return
147
147
148 if self._bbfp:
148 if self._bbfp:
149 ui = self
149 ui = self
150 elif self._bbvfs:
150 elif self._bbvfs:
151 try:
151 try:
152 self._bbfp = self._openlogfile()
152 self._bbfp = self._openlogfile()
153 except (IOError, OSError) as err:
153 except (IOError, OSError) as err:
154 self.debug('warning: cannot write to blackbox.log: %s\n' %
154 self.debug('warning: cannot write to blackbox.log: %s\n' %
155 err.strerror)
155 err.strerror)
156 del self._bbvfs
156 del self._bbvfs
157 self._bbfp = None
157 self._bbfp = None
158 ui = self
158 ui = self
159 else:
159 else:
160 # certain ui instances exist outside the context of
160 # certain ui instances exist outside the context of
161 # a repo, so just default to the last blackbox that
161 # a repo, so just default to the last blackbox that
162 # was seen.
162 # was seen.
163 ui = lastui
163 ui = lastui
164
164
165 if not ui or not ui._bbfp:
165 if not ui or not ui._bbfp:
166 return
166 return
167 if not lastui or ui._bbrepo:
167 if not lastui or ui._bbrepo:
168 lastui = ui
168 lastui = ui
169 if ui._bbinlog:
169 if ui._bbinlog:
170 # recursion guard
170 # recursion guard
171 return
171 return
172 try:
172 try:
173 ui._bbinlog = True
173 ui._bbinlog = True
174 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
174 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
175 user = util.getuser()
175 user = util.getuser()
176 pid = '%d' % util.getpid()
176 pid = '%d' % util.getpid()
177 formattedmsg = msg[0] % msg[1:]
177 formattedmsg = msg[0] % msg[1:]
178 rev = '(unknown)'
178 rev = '(unknown)'
179 changed = ''
179 changed = ''
180 if ui._bbrepo:
180 if ui._bbrepo:
181 ctx = ui._bbrepo[None]
181 ctx = ui._bbrepo[None]
182 parents = ctx.parents()
182 parents = ctx.parents()
183 rev = ('+'.join([hex(p.node()) for p in parents]))
183 rev = ('+'.join([hex(p.node()) for p in parents]))
184 if (ui.configbool('blackbox', 'dirty', False) and (
184 if (ui.configbool('blackbox', 'dirty', False) and (
185 any(ui._bbrepo.status()) or
185 any(ui._bbrepo.status()) or
186 any(ctx.sub(s).dirty() for s in ctx.substate)
186 any(ctx.sub(s).dirty() for s in ctx.substate)
187 )):
187 )):
188 changed = '+'
188 changed = '+'
189 if ui.configbool('blackbox', 'logsource', False):
189 if ui.configbool('blackbox', 'logsource', False):
190 src = ' [%s]' % event
190 src = ' [%s]' % event
191 else:
191 else:
192 src = ''
192 src = ''
193 try:
193 try:
194 ui._bbwrite('%s %s @%s%s (%s)%s> %s',
194 ui._bbwrite('%s %s @%s%s (%s)%s> %s',
195 date, user, rev, changed, pid, src, formattedmsg)
195 date, user, rev, changed, pid, src, formattedmsg)
196 except IOError as err:
196 except IOError as err:
197 self.debug('warning: cannot write to blackbox.log: %s\n' %
197 self.debug('warning: cannot write to blackbox.log: %s\n' %
198 err.strerror)
198 err.strerror)
199 finally:
199 finally:
200 ui._bbinlog = False
200 ui._bbinlog = False
201
201
202 def setrepo(self, repo):
202 def setrepo(self, repo):
203 self._bbfp = None
203 self._bbfp = None
204 self._bbinlog = False
204 self._bbinlog = False
205 self._bbrepo = repo
205 self._bbrepo = repo
206 self._bbvfs = repo.vfs
206 self._bbvfs = repo.vfs
207
207
208 ui.__class__ = blackboxui
208 ui.__class__ = blackboxui
209 uimod.ui = blackboxui
209 uimod.ui = blackboxui
210
210
211 def uisetup(ui):
211 def uisetup(ui):
212 wrapui(ui)
212 wrapui(ui)
213
213
214 def reposetup(ui, repo):
214 def reposetup(ui, repo):
215 # During 'hg pull' a httppeer repo is created to represent the remote repo.
215 # During 'hg pull' a httppeer repo is created to represent the remote repo.
216 # It doesn't have a .hg directory to put a blackbox in, so we don't do
216 # It doesn't have a .hg directory to put a blackbox in, so we don't do
217 # the blackbox setup for it.
217 # the blackbox setup for it.
218 if not repo.local():
218 if not repo.local():
219 return
219 return
220
220
221 if util.safehasattr(ui, 'setrepo'):
221 if util.safehasattr(ui, 'setrepo'):
222 ui.setrepo(repo)
222 ui.setrepo(repo)
223
223
224 @command('^blackbox',
224 @command('^blackbox',
225 [('l', 'limit', 10, _('the number of events to show')),
225 [('l', 'limit', 10, _('the number of events to show')),
226 ],
226 ],
227 _('hg blackbox [OPTION]...'))
227 _('hg blackbox [OPTION]...'))
228 def blackbox(ui, repo, *revs, **opts):
228 def blackbox(ui, repo, *revs, **opts):
229 '''view the recent repository events
229 '''view the recent repository events
230 '''
230 '''
231
231
232 if not repo.vfs.exists('blackbox.log'):
232 if not repo.vfs.exists('blackbox.log'):
233 return
233 return
234
234
235 limit = opts.get('limit')
235 limit = opts.get('limit')
236 fp = repo.vfs('blackbox.log', 'r')
236 fp = repo.vfs('blackbox.log', 'r')
237 lines = fp.read().split('\n')
237 lines = fp.read().split('\n')
238
238
239 count = 0
239 count = 0
240 output = []
240 output = []
241 for line in reversed(lines):
241 for line in reversed(lines):
242 if count >= limit:
242 if count >= limit:
243 break
243 break
244
244
245 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
245 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
246 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
246 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
247 count += 1
247 count += 1
248 output.append(line)
248 output.append(line)
249
249
250 ui.status('\n'.join(reversed(output)))
250 ui.status('\n'.join(reversed(output)))
@@ -1,190 +1,190 b''
1 # Copyright (C) 2015 - Mike Edgar <adgar@google.com>
1 # Copyright (C) 2015 - Mike Edgar <adgar@google.com>
2 #
2 #
3 # This extension enables removal of file content at a given revision,
3 # This extension enables removal of file content at a given revision,
4 # rewriting the data/metadata of successive revisions to preserve revision log
4 # rewriting the data/metadata of successive revisions to preserve revision log
5 # integrity.
5 # integrity.
6
6
7 """erase file content at a given revision
7 """erase file content at a given revision
8
8
9 The censor command instructs Mercurial to erase all content of a file at a given
9 The censor command instructs Mercurial to erase all content of a file at a given
10 revision *without updating the changeset hash.* This allows existing history to
10 revision *without updating the changeset hash.* This allows existing history to
11 remain valid while preventing future clones/pulls from receiving the erased
11 remain valid while preventing future clones/pulls from receiving the erased
12 data.
12 data.
13
13
14 Typical uses for censor are due to security or legal requirements, including::
14 Typical uses for censor are due to security or legal requirements, including::
15
15
16 * Passwords, private keys, cryptographic material
16 * Passwords, private keys, cryptographic material
17 * Licensed data/code/libraries for which the license has expired
17 * Licensed data/code/libraries for which the license has expired
18 * Personally Identifiable Information or other private data
18 * Personally Identifiable Information or other private data
19
19
20 Censored nodes can interrupt mercurial's typical operation whenever the excised
20 Censored nodes can interrupt mercurial's typical operation whenever the excised
21 data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
21 data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
22 simply fail when asked to produce censored data. Others, like ``hg verify`` and
22 simply fail when asked to produce censored data. Others, like ``hg verify`` and
23 ``hg update``, must be capable of tolerating censored data to continue to
23 ``hg update``, must be capable of tolerating censored data to continue to
24 function in a meaningful way. Such commands only tolerate censored file
24 function in a meaningful way. Such commands only tolerate censored file
25 revisions if they are allowed by the "censor.policy=ignore" config option.
25 revisions if they are allowed by the "censor.policy=ignore" config option.
26 """
26 """
27
27
28 from __future__ import absolute_import
28 from __future__ import absolute_import
29
29
30 from mercurial.i18n import _
30 from mercurial.i18n import _
31 from mercurial.node import short
31 from mercurial.node import short
32
32
33 from mercurial import (
33 from mercurial import (
34 cmdutil,
35 error,
34 error,
36 filelog,
35 filelog,
37 lock as lockmod,
36 lock as lockmod,
37 registrar,
38 revlog,
38 revlog,
39 scmutil,
39 scmutil,
40 util,
40 util,
41 )
41 )
42
42
43 cmdtable = {}
43 cmdtable = {}
44 command = cmdutil.command(cmdtable)
44 command = registrar.command(cmdtable)
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
47 # be specifying the version(s) of Mercurial they are tested with, or
47 # be specifying the version(s) of Mercurial they are tested with, or
48 # leave the attribute unspecified.
48 # leave the attribute unspecified.
49 testedwith = 'ships-with-hg-core'
49 testedwith = 'ships-with-hg-core'
50
50
51 @command('censor',
51 @command('censor',
52 [('r', 'rev', '', _('censor file from specified revision'), _('REV')),
52 [('r', 'rev', '', _('censor file from specified revision'), _('REV')),
53 ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
53 ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
54 _('-r REV [-t TEXT] [FILE]'))
54 _('-r REV [-t TEXT] [FILE]'))
55 def censor(ui, repo, path, rev='', tombstone='', **opts):
55 def censor(ui, repo, path, rev='', tombstone='', **opts):
56 wlock = lock = None
56 wlock = lock = None
57 try:
57 try:
58 wlock = repo.wlock()
58 wlock = repo.wlock()
59 lock = repo.lock()
59 lock = repo.lock()
60 return _docensor(ui, repo, path, rev, tombstone, **opts)
60 return _docensor(ui, repo, path, rev, tombstone, **opts)
61 finally:
61 finally:
62 lockmod.release(lock, wlock)
62 lockmod.release(lock, wlock)
63
63
64 def _docensor(ui, repo, path, rev='', tombstone='', **opts):
64 def _docensor(ui, repo, path, rev='', tombstone='', **opts):
65 if not path:
65 if not path:
66 raise error.Abort(_('must specify file path to censor'))
66 raise error.Abort(_('must specify file path to censor'))
67 if not rev:
67 if not rev:
68 raise error.Abort(_('must specify revision to censor'))
68 raise error.Abort(_('must specify revision to censor'))
69
69
70 wctx = repo[None]
70 wctx = repo[None]
71
71
72 m = scmutil.match(wctx, (path,))
72 m = scmutil.match(wctx, (path,))
73 if m.anypats() or len(m.files()) != 1:
73 if m.anypats() or len(m.files()) != 1:
74 raise error.Abort(_('can only specify an explicit filename'))
74 raise error.Abort(_('can only specify an explicit filename'))
75 path = m.files()[0]
75 path = m.files()[0]
76 flog = repo.file(path)
76 flog = repo.file(path)
77 if not len(flog):
77 if not len(flog):
78 raise error.Abort(_('cannot censor file with no history'))
78 raise error.Abort(_('cannot censor file with no history'))
79
79
80 rev = scmutil.revsingle(repo, rev, rev).rev()
80 rev = scmutil.revsingle(repo, rev, rev).rev()
81 try:
81 try:
82 ctx = repo[rev]
82 ctx = repo[rev]
83 except KeyError:
83 except KeyError:
84 raise error.Abort(_('invalid revision identifier %s') % rev)
84 raise error.Abort(_('invalid revision identifier %s') % rev)
85
85
86 try:
86 try:
87 fctx = ctx.filectx(path)
87 fctx = ctx.filectx(path)
88 except error.LookupError:
88 except error.LookupError:
89 raise error.Abort(_('file does not exist at revision %s') % rev)
89 raise error.Abort(_('file does not exist at revision %s') % rev)
90
90
91 fnode = fctx.filenode()
91 fnode = fctx.filenode()
92 headctxs = [repo[c] for c in repo.heads()]
92 headctxs = [repo[c] for c in repo.heads()]
93 heads = [c for c in headctxs if path in c and c.filenode(path) == fnode]
93 heads = [c for c in headctxs if path in c and c.filenode(path) == fnode]
94 if heads:
94 if heads:
95 headlist = ', '.join([short(c.node()) for c in heads])
95 headlist = ', '.join([short(c.node()) for c in heads])
96 raise error.Abort(_('cannot censor file in heads (%s)') % headlist,
96 raise error.Abort(_('cannot censor file in heads (%s)') % headlist,
97 hint=_('clean/delete and commit first'))
97 hint=_('clean/delete and commit first'))
98
98
99 wp = wctx.parents()
99 wp = wctx.parents()
100 if ctx.node() in [p.node() for p in wp]:
100 if ctx.node() in [p.node() for p in wp]:
101 raise error.Abort(_('cannot censor working directory'),
101 raise error.Abort(_('cannot censor working directory'),
102 hint=_('clean/delete/update first'))
102 hint=_('clean/delete/update first'))
103
103
104 flogv = flog.version & 0xFFFF
104 flogv = flog.version & 0xFFFF
105 if flogv != revlog.REVLOGV1:
105 if flogv != revlog.REVLOGV1:
106 raise error.Abort(
106 raise error.Abort(
107 _('censor does not support revlog version %d') % (flogv,))
107 _('censor does not support revlog version %d') % (flogv,))
108
108
109 tombstone = filelog.packmeta({"censored": tombstone}, "")
109 tombstone = filelog.packmeta({"censored": tombstone}, "")
110
110
111 crev = fctx.filerev()
111 crev = fctx.filerev()
112
112
113 if len(tombstone) > flog.rawsize(crev):
113 if len(tombstone) > flog.rawsize(crev):
114 raise error.Abort(_(
114 raise error.Abort(_(
115 'censor tombstone must be no longer than censored data'))
115 'censor tombstone must be no longer than censored data'))
116
116
117 # Using two files instead of one makes it easy to rewrite entry-by-entry
117 # Using two files instead of one makes it easy to rewrite entry-by-entry
118 idxread = repo.svfs(flog.indexfile, 'r')
118 idxread = repo.svfs(flog.indexfile, 'r')
119 idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
119 idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
120 if flog.version & revlog.FLAG_INLINE_DATA:
120 if flog.version & revlog.FLAG_INLINE_DATA:
121 dataread, datawrite = idxread, idxwrite
121 dataread, datawrite = idxread, idxwrite
122 else:
122 else:
123 dataread = repo.svfs(flog.datafile, 'r')
123 dataread = repo.svfs(flog.datafile, 'r')
124 datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True)
124 datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True)
125
125
126 # Copy all revlog data up to the entry to be censored.
126 # Copy all revlog data up to the entry to be censored.
127 rio = revlog.revlogio()
127 rio = revlog.revlogio()
128 offset = flog.start(crev)
128 offset = flog.start(crev)
129
129
130 for chunk in util.filechunkiter(idxread, limit=crev * rio.size):
130 for chunk in util.filechunkiter(idxread, limit=crev * rio.size):
131 idxwrite.write(chunk)
131 idxwrite.write(chunk)
132 for chunk in util.filechunkiter(dataread, limit=offset):
132 for chunk in util.filechunkiter(dataread, limit=offset):
133 datawrite.write(chunk)
133 datawrite.write(chunk)
134
134
135 def rewriteindex(r, newoffs, newdata=None):
135 def rewriteindex(r, newoffs, newdata=None):
136 """Rewrite the index entry with a new data offset and optional new data.
136 """Rewrite the index entry with a new data offset and optional new data.
137
137
138 The newdata argument, if given, is a tuple of three positive integers:
138 The newdata argument, if given, is a tuple of three positive integers:
139 (new compressed, new uncompressed, added flag bits).
139 (new compressed, new uncompressed, added flag bits).
140 """
140 """
141 offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r]
141 offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r]
142 flags = revlog.gettype(offlags)
142 flags = revlog.gettype(offlags)
143 if newdata:
143 if newdata:
144 comp, uncomp, nflags = newdata
144 comp, uncomp, nflags = newdata
145 flags |= nflags
145 flags |= nflags
146 offlags = revlog.offset_type(newoffs, flags)
146 offlags = revlog.offset_type(newoffs, flags)
147 e = (offlags, comp, uncomp, r, link, p1, p2, nodeid)
147 e = (offlags, comp, uncomp, r, link, p1, p2, nodeid)
148 idxwrite.write(rio.packentry(e, None, flog.version, r))
148 idxwrite.write(rio.packentry(e, None, flog.version, r))
149 idxread.seek(rio.size, 1)
149 idxread.seek(rio.size, 1)
150
150
151 def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS):
151 def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS):
152 """Write the given full text to the filelog with the given data offset.
152 """Write the given full text to the filelog with the given data offset.
153
153
154 Returns:
154 Returns:
155 The integer number of data bytes written, for tracking data offsets.
155 The integer number of data bytes written, for tracking data offsets.
156 """
156 """
157 flag, compdata = flog.compress(data)
157 flag, compdata = flog.compress(data)
158 newcomp = len(flag) + len(compdata)
158 newcomp = len(flag) + len(compdata)
159 rewriteindex(r, offs, (newcomp, len(data), nflags))
159 rewriteindex(r, offs, (newcomp, len(data), nflags))
160 datawrite.write(flag)
160 datawrite.write(flag)
161 datawrite.write(compdata)
161 datawrite.write(compdata)
162 dataread.seek(flog.length(r), 1)
162 dataread.seek(flog.length(r), 1)
163 return newcomp
163 return newcomp
164
164
165 # Rewrite censored revlog entry with (padded) tombstone data.
165 # Rewrite censored revlog entry with (padded) tombstone data.
166 pad = ' ' * (flog.rawsize(crev) - len(tombstone))
166 pad = ' ' * (flog.rawsize(crev) - len(tombstone))
167 offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED)
167 offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED)
168
168
169 # Rewrite all following filelog revisions fixing up offsets and deltas.
169 # Rewrite all following filelog revisions fixing up offsets and deltas.
170 for srev in xrange(crev + 1, len(flog)):
170 for srev in xrange(crev + 1, len(flog)):
171 if crev in flog.parentrevs(srev):
171 if crev in flog.parentrevs(srev):
172 # Immediate children of censored node must be re-added as fulltext.
172 # Immediate children of censored node must be re-added as fulltext.
173 try:
173 try:
174 revdata = flog.revision(srev)
174 revdata = flog.revision(srev)
175 except error.CensoredNodeError as e:
175 except error.CensoredNodeError as e:
176 revdata = e.tombstone
176 revdata = e.tombstone
177 dlen = rewrite(srev, offset, revdata)
177 dlen = rewrite(srev, offset, revdata)
178 else:
178 else:
179 # Copy any other revision data verbatim after fixing up the offset.
179 # Copy any other revision data verbatim after fixing up the offset.
180 rewriteindex(srev, offset)
180 rewriteindex(srev, offset)
181 dlen = flog.length(srev)
181 dlen = flog.length(srev)
182 for chunk in util.filechunkiter(dataread, limit=dlen):
182 for chunk in util.filechunkiter(dataread, limit=dlen):
183 datawrite.write(chunk)
183 datawrite.write(chunk)
184 offset += dlen
184 offset += dlen
185
185
186 idxread.close()
186 idxread.close()
187 idxwrite.close()
187 idxwrite.close()
188 if dataread is not idxread:
188 if dataread is not idxread:
189 dataread.close()
189 dataread.close()
190 datawrite.close()
190 datawrite.close()
@@ -1,69 +1,70 b''
1 # Mercurial extension to provide the 'hg children' command
1 # Mercurial extension to provide the 'hg children' command
2 #
2 #
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 #
4 #
5 # Author(s):
5 # Author(s):
6 # Thomas Arendsen Hein <thomas@intevation.de>
6 # Thomas Arendsen Hein <thomas@intevation.de>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2 or any later version.
9 # GNU General Public License version 2 or any later version.
10
10
11 '''command to display child changesets (DEPRECATED)
11 '''command to display child changesets (DEPRECATED)
12
12
13 This extension is deprecated. You should use :hg:`log -r
13 This extension is deprecated. You should use :hg:`log -r
14 "children(REV)"` instead.
14 "children(REV)"` instead.
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 from mercurial.i18n import _
19 from mercurial.i18n import _
20 from mercurial import (
20 from mercurial import (
21 cmdutil,
21 cmdutil,
22 commands,
22 commands,
23 registrar,
23 )
24 )
24
25
25 templateopts = commands.templateopts
26 templateopts = commands.templateopts
26
27
27 cmdtable = {}
28 cmdtable = {}
28 command = cmdutil.command(cmdtable)
29 command = registrar.command(cmdtable)
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # leave the attribute unspecified.
33 # leave the attribute unspecified.
33 testedwith = 'ships-with-hg-core'
34 testedwith = 'ships-with-hg-core'
34
35
35 @command('children',
36 @command('children',
36 [('r', 'rev', '',
37 [('r', 'rev', '',
37 _('show children of the specified revision'), _('REV')),
38 _('show children of the specified revision'), _('REV')),
38 ] + templateopts,
39 ] + templateopts,
39 _('hg children [-r REV] [FILE]'),
40 _('hg children [-r REV] [FILE]'),
40 inferrepo=True)
41 inferrepo=True)
41 def children(ui, repo, file_=None, **opts):
42 def children(ui, repo, file_=None, **opts):
42 """show the children of the given or working directory revision
43 """show the children of the given or working directory revision
43
44
44 Print the children of the working directory's revisions. If a
45 Print the children of the working directory's revisions. If a
45 revision is given via -r/--rev, the children of that revision will
46 revision is given via -r/--rev, the children of that revision will
46 be printed. If a file argument is given, revision in which the
47 be printed. If a file argument is given, revision in which the
47 file was last changed (after the working directory revision or the
48 file was last changed (after the working directory revision or the
48 argument to --rev if given) is printed.
49 argument to --rev if given) is printed.
49
50
50 Please use :hg:`log` instead::
51 Please use :hg:`log` instead::
51
52
52 hg children => hg log -r "children()"
53 hg children => hg log -r "children()"
53 hg children -r REV => hg log -r "children(REV)"
54 hg children -r REV => hg log -r "children(REV)"
54
55
55 See :hg:`help log` and :hg:`help revsets.children`.
56 See :hg:`help log` and :hg:`help revsets.children`.
56
57
57 """
58 """
58 rev = opts.get('rev')
59 rev = opts.get('rev')
59 if file_:
60 if file_:
60 fctx = repo.filectx(file_, changeid=rev)
61 fctx = repo.filectx(file_, changeid=rev)
61 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
62 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
62 else:
63 else:
63 ctx = repo[rev]
64 ctx = repo[rev]
64 childctxs = ctx.children()
65 childctxs = ctx.children()
65
66
66 displayer = cmdutil.show_changeset(ui, repo, opts)
67 displayer = cmdutil.show_changeset(ui, repo, opts)
67 for cctx in childctxs:
68 for cctx in childctxs:
68 displayer.show(cctx)
69 displayer.show(cctx)
69 displayer.close()
70 displayer.close()
@@ -1,211 +1,212 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 import datetime
13 import datetime
14 import os
14 import os
15 import time
15 import time
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 cmdutil,
19 cmdutil,
20 commands,
20 commands,
21 encoding,
21 encoding,
22 patch,
22 patch,
23 registrar,
23 scmutil,
24 scmutil,
24 util,
25 util,
25 )
26 )
26
27
27 cmdtable = {}
28 cmdtable = {}
28 command = cmdutil.command(cmdtable)
29 command = registrar.command(cmdtable)
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # leave the attribute unspecified.
33 # leave the attribute unspecified.
33 testedwith = 'ships-with-hg-core'
34 testedwith = 'ships-with-hg-core'
34
35
35 def maketemplater(ui, repo, tmpl):
36 def maketemplater(ui, repo, tmpl):
36 return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
37 return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
37
38
38 def changedlines(ui, repo, ctx1, ctx2, fns):
39 def changedlines(ui, repo, ctx1, ctx2, fns):
39 added, removed = 0, 0
40 added, removed = 0, 0
40 fmatch = scmutil.matchfiles(repo, fns)
41 fmatch = scmutil.matchfiles(repo, fns)
41 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
42 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
42 for l in diff.split('\n'):
43 for l in diff.split('\n'):
43 if l.startswith("+") and not l.startswith("+++ "):
44 if l.startswith("+") and not l.startswith("+++ "):
44 added += 1
45 added += 1
45 elif l.startswith("-") and not l.startswith("--- "):
46 elif l.startswith("-") and not l.startswith("--- "):
46 removed += 1
47 removed += 1
47 return (added, removed)
48 return (added, removed)
48
49
49 def countrate(ui, repo, amap, *pats, **opts):
50 def countrate(ui, repo, amap, *pats, **opts):
50 """Calculate stats"""
51 """Calculate stats"""
51 if opts.get('dateformat'):
52 if opts.get('dateformat'):
52 def getkey(ctx):
53 def getkey(ctx):
53 t, tz = ctx.date()
54 t, tz = ctx.date()
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
55 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
55 return date.strftime(opts['dateformat'])
56 return date.strftime(opts['dateformat'])
56 else:
57 else:
57 tmpl = opts.get('oldtemplate') or opts.get('template')
58 tmpl = opts.get('oldtemplate') or opts.get('template')
58 tmpl = maketemplater(ui, repo, tmpl)
59 tmpl = maketemplater(ui, repo, tmpl)
59 def getkey(ctx):
60 def getkey(ctx):
60 ui.pushbuffer()
61 ui.pushbuffer()
61 tmpl.show(ctx)
62 tmpl.show(ctx)
62 return ui.popbuffer()
63 return ui.popbuffer()
63
64
64 state = {'count': 0}
65 state = {'count': 0}
65 rate = {}
66 rate = {}
66 df = False
67 df = False
67 if opts.get('date'):
68 if opts.get('date'):
68 df = util.matchdate(opts['date'])
69 df = util.matchdate(opts['date'])
69
70
70 m = scmutil.match(repo[None], pats, opts)
71 m = scmutil.match(repo[None], pats, opts)
71 def prep(ctx, fns):
72 def prep(ctx, fns):
72 rev = ctx.rev()
73 rev = ctx.rev()
73 if df and not df(ctx.date()[0]): # doesn't match date format
74 if df and not df(ctx.date()[0]): # doesn't match date format
74 return
75 return
75
76
76 key = getkey(ctx).strip()
77 key = getkey(ctx).strip()
77 key = amap.get(key, key) # alias remap
78 key = amap.get(key, key) # alias remap
78 if opts.get('changesets'):
79 if opts.get('changesets'):
79 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
80 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
80 else:
81 else:
81 parents = ctx.parents()
82 parents = ctx.parents()
82 if len(parents) > 1:
83 if len(parents) > 1:
83 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
84 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
84 return
85 return
85
86
86 ctx1 = parents[0]
87 ctx1 = parents[0]
87 lines = changedlines(ui, repo, ctx1, ctx, fns)
88 lines = changedlines(ui, repo, ctx1, ctx, fns)
88 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
89 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
89
90
90 state['count'] += 1
91 state['count'] += 1
91 ui.progress(_('analyzing'), state['count'], total=len(repo),
92 ui.progress(_('analyzing'), state['count'], total=len(repo),
92 unit=_('revisions'))
93 unit=_('revisions'))
93
94
94 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
95 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
95 continue
96 continue
96
97
97 ui.progress(_('analyzing'), None)
98 ui.progress(_('analyzing'), None)
98
99
99 return rate
100 return rate
100
101
101
102
102 @command('churn',
103 @command('churn',
103 [('r', 'rev', [],
104 [('r', 'rev', [],
104 _('count rate for the specified revision or revset'), _('REV')),
105 _('count rate for the specified revision or revset'), _('REV')),
105 ('d', 'date', '',
106 ('d', 'date', '',
106 _('count rate for revisions matching date spec'), _('DATE')),
107 _('count rate for revisions matching date spec'), _('DATE')),
107 ('t', 'oldtemplate', '',
108 ('t', 'oldtemplate', '',
108 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
109 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
109 ('T', 'template', '{author|email}',
110 ('T', 'template', '{author|email}',
110 _('template to group changesets'), _('TEMPLATE')),
111 _('template to group changesets'), _('TEMPLATE')),
111 ('f', 'dateformat', '',
112 ('f', 'dateformat', '',
112 _('strftime-compatible format for grouping by date'), _('FORMAT')),
113 _('strftime-compatible format for grouping by date'), _('FORMAT')),
113 ('c', 'changesets', False, _('count rate by number of changesets')),
114 ('c', 'changesets', False, _('count rate by number of changesets')),
114 ('s', 'sort', False, _('sort by key (default: sort by count)')),
115 ('s', 'sort', False, _('sort by key (default: sort by count)')),
115 ('', 'diffstat', False, _('display added/removed lines separately')),
116 ('', 'diffstat', False, _('display added/removed lines separately')),
116 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
117 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
117 ] + commands.walkopts,
118 ] + commands.walkopts,
118 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
119 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
119 inferrepo=True)
120 inferrepo=True)
120 def churn(ui, repo, *pats, **opts):
121 def churn(ui, repo, *pats, **opts):
121 '''histogram of changes to the repository
122 '''histogram of changes to the repository
122
123
123 This command will display a histogram representing the number
124 This command will display a histogram representing the number
124 of changed lines or revisions, grouped according to the given
125 of changed lines or revisions, grouped according to the given
125 template. The default template will group changes by author.
126 template. The default template will group changes by author.
126 The --dateformat option may be used to group the results by
127 The --dateformat option may be used to group the results by
127 date instead.
128 date instead.
128
129
129 Statistics are based on the number of changed lines, or
130 Statistics are based on the number of changed lines, or
130 alternatively the number of matching revisions if the
131 alternatively the number of matching revisions if the
131 --changesets option is specified.
132 --changesets option is specified.
132
133
133 Examples::
134 Examples::
134
135
135 # display count of changed lines for every committer
136 # display count of changed lines for every committer
136 hg churn -T "{author|email}"
137 hg churn -T "{author|email}"
137
138
138 # display daily activity graph
139 # display daily activity graph
139 hg churn -f "%H" -s -c
140 hg churn -f "%H" -s -c
140
141
141 # display activity of developers by month
142 # display activity of developers by month
142 hg churn -f "%Y-%m" -s -c
143 hg churn -f "%Y-%m" -s -c
143
144
144 # display count of lines changed in every year
145 # display count of lines changed in every year
145 hg churn -f "%Y" -s
146 hg churn -f "%Y" -s
146
147
147 It is possible to map alternate email addresses to a main address
148 It is possible to map alternate email addresses to a main address
148 by providing a file using the following format::
149 by providing a file using the following format::
149
150
150 <alias email> = <actual email>
151 <alias email> = <actual email>
151
152
152 Such a file may be specified with the --aliases option, otherwise
153 Such a file may be specified with the --aliases option, otherwise
153 a .hgchurn file will be looked for in the working directory root.
154 a .hgchurn file will be looked for in the working directory root.
154 Aliases will be split from the rightmost "=".
155 Aliases will be split from the rightmost "=".
155 '''
156 '''
156 def pad(s, l):
157 def pad(s, l):
157 return s + " " * (l - encoding.colwidth(s))
158 return s + " " * (l - encoding.colwidth(s))
158
159
159 amap = {}
160 amap = {}
160 aliases = opts.get('aliases')
161 aliases = opts.get('aliases')
161 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
162 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
162 aliases = repo.wjoin('.hgchurn')
163 aliases = repo.wjoin('.hgchurn')
163 if aliases:
164 if aliases:
164 for l in open(aliases, "r"):
165 for l in open(aliases, "r"):
165 try:
166 try:
166 alias, actual = l.rsplit('=' in l and '=' or None, 1)
167 alias, actual = l.rsplit('=' in l and '=' or None, 1)
167 amap[alias.strip()] = actual.strip()
168 amap[alias.strip()] = actual.strip()
168 except ValueError:
169 except ValueError:
169 l = l.strip()
170 l = l.strip()
170 if l:
171 if l:
171 ui.warn(_("skipping malformed alias: %s\n") % l)
172 ui.warn(_("skipping malformed alias: %s\n") % l)
172 continue
173 continue
173
174
174 rate = countrate(ui, repo, amap, *pats, **opts).items()
175 rate = countrate(ui, repo, amap, *pats, **opts).items()
175 if not rate:
176 if not rate:
176 return
177 return
177
178
178 if opts.get('sort'):
179 if opts.get('sort'):
179 rate.sort()
180 rate.sort()
180 else:
181 else:
181 rate.sort(key=lambda x: (-sum(x[1]), x))
182 rate.sort(key=lambda x: (-sum(x[1]), x))
182
183
183 # Be careful not to have a zero maxcount (issue833)
184 # Be careful not to have a zero maxcount (issue833)
184 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
185 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
185 maxname = max(len(k) for k, v in rate)
186 maxname = max(len(k) for k, v in rate)
186
187
187 ttywidth = ui.termwidth()
188 ttywidth = ui.termwidth()
188 ui.debug("assuming %i character terminal\n" % ttywidth)
189 ui.debug("assuming %i character terminal\n" % ttywidth)
189 width = ttywidth - maxname - 2 - 2 - 2
190 width = ttywidth - maxname - 2 - 2 - 2
190
191
191 if opts.get('diffstat'):
192 if opts.get('diffstat'):
192 width -= 15
193 width -= 15
193 def format(name, diffstat):
194 def format(name, diffstat):
194 added, removed = diffstat
195 added, removed = diffstat
195 return "%s %15s %s%s\n" % (pad(name, maxname),
196 return "%s %15s %s%s\n" % (pad(name, maxname),
196 '+%d/-%d' % (added, removed),
197 '+%d/-%d' % (added, removed),
197 ui.label('+' * charnum(added),
198 ui.label('+' * charnum(added),
198 'diffstat.inserted'),
199 'diffstat.inserted'),
199 ui.label('-' * charnum(removed),
200 ui.label('-' * charnum(removed),
200 'diffstat.deleted'))
201 'diffstat.deleted'))
201 else:
202 else:
202 width -= 6
203 width -= 6
203 def format(name, count):
204 def format(name, count):
204 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
205 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
205 '*' * charnum(sum(count)))
206 '*' * charnum(sum(count)))
206
207
207 def charnum(count):
208 def charnum(count):
208 return int(round(count * width / maxcount))
209 return int(round(count * width / maxcount))
209
210
210 for name, count in rate:
211 for name, count in rate:
211 ui.write(format(name, count))
212 ui.write(format(name, count))
@@ -1,504 +1,503 b''
1 # convert.py Foreign SCM converter
1 # convert.py Foreign SCM converter
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''import revisions from foreign VCS repositories into Mercurial'''
8 '''import revisions from foreign VCS repositories into Mercurial'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import (
13 from mercurial import (
14 cmdutil,
15 registrar,
14 registrar,
16 )
15 )
17
16
18 from . import (
17 from . import (
19 convcmd,
18 convcmd,
20 cvsps,
19 cvsps,
21 subversion,
20 subversion,
22 )
21 )
23
22
24 cmdtable = {}
23 cmdtable = {}
25 command = cmdutil.command(cmdtable)
24 command = registrar.command(cmdtable)
26 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
25 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
27 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
26 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
28 # be specifying the version(s) of Mercurial they are tested with, or
27 # be specifying the version(s) of Mercurial they are tested with, or
29 # leave the attribute unspecified.
28 # leave the attribute unspecified.
30 testedwith = 'ships-with-hg-core'
29 testedwith = 'ships-with-hg-core'
31
30
32 # Commands definition was moved elsewhere to ease demandload job.
31 # Commands definition was moved elsewhere to ease demandload job.
33
32
34 @command('convert',
33 @command('convert',
35 [('', 'authors', '',
34 [('', 'authors', '',
36 _('username mapping filename (DEPRECATED) (use --authormap instead)'),
35 _('username mapping filename (DEPRECATED) (use --authormap instead)'),
37 _('FILE')),
36 _('FILE')),
38 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
37 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
39 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
38 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
40 ('r', 'rev', [], _('import up to source revision REV'), _('REV')),
39 ('r', 'rev', [], _('import up to source revision REV'), _('REV')),
41 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
40 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
42 ('', 'filemap', '', _('remap file names using contents of file'),
41 ('', 'filemap', '', _('remap file names using contents of file'),
43 _('FILE')),
42 _('FILE')),
44 ('', 'full', None,
43 ('', 'full', None,
45 _('apply filemap changes by converting all files again')),
44 _('apply filemap changes by converting all files again')),
46 ('', 'splicemap', '', _('splice synthesized history into place'),
45 ('', 'splicemap', '', _('splice synthesized history into place'),
47 _('FILE')),
46 _('FILE')),
48 ('', 'branchmap', '', _('change branch names while converting'),
47 ('', 'branchmap', '', _('change branch names while converting'),
49 _('FILE')),
48 _('FILE')),
50 ('', 'branchsort', None, _('try to sort changesets by branches')),
49 ('', 'branchsort', None, _('try to sort changesets by branches')),
51 ('', 'datesort', None, _('try to sort changesets by date')),
50 ('', 'datesort', None, _('try to sort changesets by date')),
52 ('', 'sourcesort', None, _('preserve source changesets order')),
51 ('', 'sourcesort', None, _('preserve source changesets order')),
53 ('', 'closesort', None, _('try to reorder closed revisions'))],
52 ('', 'closesort', None, _('try to reorder closed revisions'))],
54 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
53 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
55 norepo=True)
54 norepo=True)
56 def convert(ui, src, dest=None, revmapfile=None, **opts):
55 def convert(ui, src, dest=None, revmapfile=None, **opts):
57 """convert a foreign SCM repository to a Mercurial one.
56 """convert a foreign SCM repository to a Mercurial one.
58
57
59 Accepted source formats [identifiers]:
58 Accepted source formats [identifiers]:
60
59
61 - Mercurial [hg]
60 - Mercurial [hg]
62 - CVS [cvs]
61 - CVS [cvs]
63 - Darcs [darcs]
62 - Darcs [darcs]
64 - git [git]
63 - git [git]
65 - Subversion [svn]
64 - Subversion [svn]
66 - Monotone [mtn]
65 - Monotone [mtn]
67 - GNU Arch [gnuarch]
66 - GNU Arch [gnuarch]
68 - Bazaar [bzr]
67 - Bazaar [bzr]
69 - Perforce [p4]
68 - Perforce [p4]
70
69
71 Accepted destination formats [identifiers]:
70 Accepted destination formats [identifiers]:
72
71
73 - Mercurial [hg]
72 - Mercurial [hg]
74 - Subversion [svn] (history on branches is not preserved)
73 - Subversion [svn] (history on branches is not preserved)
75
74
76 If no revision is given, all revisions will be converted.
75 If no revision is given, all revisions will be converted.
77 Otherwise, convert will only import up to the named revision
76 Otherwise, convert will only import up to the named revision
78 (given in a format understood by the source).
77 (given in a format understood by the source).
79
78
80 If no destination directory name is specified, it defaults to the
79 If no destination directory name is specified, it defaults to the
81 basename of the source with ``-hg`` appended. If the destination
80 basename of the source with ``-hg`` appended. If the destination
82 repository doesn't exist, it will be created.
81 repository doesn't exist, it will be created.
83
82
84 By default, all sources except Mercurial will use --branchsort.
83 By default, all sources except Mercurial will use --branchsort.
85 Mercurial uses --sourcesort to preserve original revision numbers
84 Mercurial uses --sourcesort to preserve original revision numbers
86 order. Sort modes have the following effects:
85 order. Sort modes have the following effects:
87
86
88 --branchsort convert from parent to child revision when possible,
87 --branchsort convert from parent to child revision when possible,
89 which means branches are usually converted one after
88 which means branches are usually converted one after
90 the other. It generates more compact repositories.
89 the other. It generates more compact repositories.
91
90
92 --datesort sort revisions by date. Converted repositories have
91 --datesort sort revisions by date. Converted repositories have
93 good-looking changelogs but are often an order of
92 good-looking changelogs but are often an order of
94 magnitude larger than the same ones generated by
93 magnitude larger than the same ones generated by
95 --branchsort.
94 --branchsort.
96
95
97 --sourcesort try to preserve source revisions order, only
96 --sourcesort try to preserve source revisions order, only
98 supported by Mercurial sources.
97 supported by Mercurial sources.
99
98
100 --closesort try to move closed revisions as close as possible
99 --closesort try to move closed revisions as close as possible
101 to parent branches, only supported by Mercurial
100 to parent branches, only supported by Mercurial
102 sources.
101 sources.
103
102
104 If ``REVMAP`` isn't given, it will be put in a default location
103 If ``REVMAP`` isn't given, it will be put in a default location
105 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
104 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
106 text file that maps each source commit ID to the destination ID
105 text file that maps each source commit ID to the destination ID
107 for that revision, like so::
106 for that revision, like so::
108
107
109 <source ID> <destination ID>
108 <source ID> <destination ID>
110
109
111 If the file doesn't exist, it's automatically created. It's
110 If the file doesn't exist, it's automatically created. It's
112 updated on each commit copied, so :hg:`convert` can be interrupted
111 updated on each commit copied, so :hg:`convert` can be interrupted
113 and can be run repeatedly to copy new commits.
112 and can be run repeatedly to copy new commits.
114
113
115 The authormap is a simple text file that maps each source commit
114 The authormap is a simple text file that maps each source commit
116 author to a destination commit author. It is handy for source SCMs
115 author to a destination commit author. It is handy for source SCMs
117 that use unix logins to identify authors (e.g.: CVS). One line per
116 that use unix logins to identify authors (e.g.: CVS). One line per
118 author mapping and the line format is::
117 author mapping and the line format is::
119
118
120 source author = destination author
119 source author = destination author
121
120
122 Empty lines and lines starting with a ``#`` are ignored.
121 Empty lines and lines starting with a ``#`` are ignored.
123
122
124 The filemap is a file that allows filtering and remapping of files
123 The filemap is a file that allows filtering and remapping of files
125 and directories. Each line can contain one of the following
124 and directories. Each line can contain one of the following
126 directives::
125 directives::
127
126
128 include path/to/file-or-dir
127 include path/to/file-or-dir
129
128
130 exclude path/to/file-or-dir
129 exclude path/to/file-or-dir
131
130
132 rename path/to/source path/to/destination
131 rename path/to/source path/to/destination
133
132
134 Comment lines start with ``#``. A specified path matches if it
133 Comment lines start with ``#``. A specified path matches if it
135 equals the full relative name of a file or one of its parent
134 equals the full relative name of a file or one of its parent
136 directories. The ``include`` or ``exclude`` directive with the
135 directories. The ``include`` or ``exclude`` directive with the
137 longest matching path applies, so line order does not matter.
136 longest matching path applies, so line order does not matter.
138
137
139 The ``include`` directive causes a file, or all files under a
138 The ``include`` directive causes a file, or all files under a
140 directory, to be included in the destination repository. The default
139 directory, to be included in the destination repository. The default
141 if there are no ``include`` statements is to include everything.
140 if there are no ``include`` statements is to include everything.
142 If there are any ``include`` statements, nothing else is included.
141 If there are any ``include`` statements, nothing else is included.
143 The ``exclude`` directive causes files or directories to
142 The ``exclude`` directive causes files or directories to
144 be omitted. The ``rename`` directive renames a file or directory if
143 be omitted. The ``rename`` directive renames a file or directory if
145 it is converted. To rename from a subdirectory into the root of
144 it is converted. To rename from a subdirectory into the root of
146 the repository, use ``.`` as the path to rename to.
145 the repository, use ``.`` as the path to rename to.
147
146
148 ``--full`` will make sure the converted changesets contain exactly
147 ``--full`` will make sure the converted changesets contain exactly
149 the right files with the right content. It will make a full
148 the right files with the right content. It will make a full
150 conversion of all files, not just the ones that have
149 conversion of all files, not just the ones that have
151 changed. Files that already are correct will not be changed. This
150 changed. Files that already are correct will not be changed. This
152 can be used to apply filemap changes when converting
151 can be used to apply filemap changes when converting
153 incrementally. This is currently only supported for Mercurial and
152 incrementally. This is currently only supported for Mercurial and
154 Subversion.
153 Subversion.
155
154
156 The splicemap is a file that allows insertion of synthetic
155 The splicemap is a file that allows insertion of synthetic
157 history, letting you specify the parents of a revision. This is
156 history, letting you specify the parents of a revision. This is
158 useful if you want to e.g. give a Subversion merge two parents, or
157 useful if you want to e.g. give a Subversion merge two parents, or
159 graft two disconnected series of history together. Each entry
158 graft two disconnected series of history together. Each entry
160 contains a key, followed by a space, followed by one or two
159 contains a key, followed by a space, followed by one or two
161 comma-separated values::
160 comma-separated values::
162
161
163 key parent1, parent2
162 key parent1, parent2
164
163
165 The key is the revision ID in the source
164 The key is the revision ID in the source
166 revision control system whose parents should be modified (same
165 revision control system whose parents should be modified (same
167 format as a key in .hg/shamap). The values are the revision IDs
166 format as a key in .hg/shamap). The values are the revision IDs
168 (in either the source or destination revision control system) that
167 (in either the source or destination revision control system) that
169 should be used as the new parents for that node. For example, if
168 should be used as the new parents for that node. For example, if
170 you have merged "release-1.0" into "trunk", then you should
169 you have merged "release-1.0" into "trunk", then you should
171 specify the revision on "trunk" as the first parent and the one on
170 specify the revision on "trunk" as the first parent and the one on
172 the "release-1.0" branch as the second.
171 the "release-1.0" branch as the second.
173
172
174 The branchmap is a file that allows you to rename a branch when it is
173 The branchmap is a file that allows you to rename a branch when it is
175 being brought in from whatever external repository. When used in
174 being brought in from whatever external repository. When used in
176 conjunction with a splicemap, it allows for a powerful combination
175 conjunction with a splicemap, it allows for a powerful combination
177 to help fix even the most badly mismanaged repositories and turn them
176 to help fix even the most badly mismanaged repositories and turn them
178 into nicely structured Mercurial repositories. The branchmap contains
177 into nicely structured Mercurial repositories. The branchmap contains
179 lines of the form::
178 lines of the form::
180
179
181 original_branch_name new_branch_name
180 original_branch_name new_branch_name
182
181
183 where "original_branch_name" is the name of the branch in the
182 where "original_branch_name" is the name of the branch in the
184 source repository, and "new_branch_name" is the name of the branch
183 source repository, and "new_branch_name" is the name of the branch
185 is the destination repository. No whitespace is allowed in the
184 is the destination repository. No whitespace is allowed in the
186 branch names. This can be used to (for instance) move code in one
185 branch names. This can be used to (for instance) move code in one
187 repository from "default" to a named branch.
186 repository from "default" to a named branch.
188
187
189 Mercurial Source
188 Mercurial Source
190 ################
189 ################
191
190
192 The Mercurial source recognizes the following configuration
191 The Mercurial source recognizes the following configuration
193 options, which you can set on the command line with ``--config``:
192 options, which you can set on the command line with ``--config``:
194
193
195 :convert.hg.ignoreerrors: ignore integrity errors when reading.
194 :convert.hg.ignoreerrors: ignore integrity errors when reading.
196 Use it to fix Mercurial repositories with missing revlogs, by
195 Use it to fix Mercurial repositories with missing revlogs, by
197 converting from and to Mercurial. Default is False.
196 converting from and to Mercurial. Default is False.
198
197
199 :convert.hg.saverev: store original revision ID in changeset
198 :convert.hg.saverev: store original revision ID in changeset
200 (forces target IDs to change). It takes a boolean argument and
199 (forces target IDs to change). It takes a boolean argument and
201 defaults to False.
200 defaults to False.
202
201
203 :convert.hg.startrev: specify the initial Mercurial revision.
202 :convert.hg.startrev: specify the initial Mercurial revision.
204 The default is 0.
203 The default is 0.
205
204
206 :convert.hg.revs: revset specifying the source revisions to convert.
205 :convert.hg.revs: revset specifying the source revisions to convert.
207
206
208 CVS Source
207 CVS Source
209 ##########
208 ##########
210
209
211 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
210 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
212 to indicate the starting point of what will be converted. Direct
211 to indicate the starting point of what will be converted. Direct
213 access to the repository files is not needed, unless of course the
212 access to the repository files is not needed, unless of course the
214 repository is ``:local:``. The conversion uses the top level
213 repository is ``:local:``. The conversion uses the top level
215 directory in the sandbox to find the CVS repository, and then uses
214 directory in the sandbox to find the CVS repository, and then uses
216 CVS rlog commands to find files to convert. This means that unless
215 CVS rlog commands to find files to convert. This means that unless
217 a filemap is given, all files under the starting directory will be
216 a filemap is given, all files under the starting directory will be
218 converted, and that any directory reorganization in the CVS
217 converted, and that any directory reorganization in the CVS
219 sandbox is ignored.
218 sandbox is ignored.
220
219
221 The following options can be used with ``--config``:
220 The following options can be used with ``--config``:
222
221
223 :convert.cvsps.cache: Set to False to disable remote log caching,
222 :convert.cvsps.cache: Set to False to disable remote log caching,
224 for testing and debugging purposes. Default is True.
223 for testing and debugging purposes. Default is True.
225
224
226 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
225 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
227 allowed between commits with identical user and log message in
226 allowed between commits with identical user and log message in
228 a single changeset. When very large files were checked in as
227 a single changeset. When very large files were checked in as
229 part of a changeset then the default may not be long enough.
228 part of a changeset then the default may not be long enough.
230 The default is 60.
229 The default is 60.
231
230
232 :convert.cvsps.mergeto: Specify a regular expression to which
231 :convert.cvsps.mergeto: Specify a regular expression to which
233 commit log messages are matched. If a match occurs, then the
232 commit log messages are matched. If a match occurs, then the
234 conversion process will insert a dummy revision merging the
233 conversion process will insert a dummy revision merging the
235 branch on which this log message occurs to the branch
234 branch on which this log message occurs to the branch
236 indicated in the regex. Default is ``{{mergetobranch
235 indicated in the regex. Default is ``{{mergetobranch
237 ([-\\w]+)}}``
236 ([-\\w]+)}}``
238
237
239 :convert.cvsps.mergefrom: Specify a regular expression to which
238 :convert.cvsps.mergefrom: Specify a regular expression to which
240 commit log messages are matched. If a match occurs, then the
239 commit log messages are matched. If a match occurs, then the
241 conversion process will add the most recent revision on the
240 conversion process will add the most recent revision on the
242 branch indicated in the regex as the second parent of the
241 branch indicated in the regex as the second parent of the
243 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
242 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
244
243
245 :convert.localtimezone: use local time (as determined by the TZ
244 :convert.localtimezone: use local time (as determined by the TZ
246 environment variable) for changeset date/times. The default
245 environment variable) for changeset date/times. The default
247 is False (use UTC).
246 is False (use UTC).
248
247
249 :hooks.cvslog: Specify a Python function to be called at the end of
248 :hooks.cvslog: Specify a Python function to be called at the end of
250 gathering the CVS log. The function is passed a list with the
249 gathering the CVS log. The function is passed a list with the
251 log entries, and can modify the entries in-place, or add or
250 log entries, and can modify the entries in-place, or add or
252 delete them.
251 delete them.
253
252
254 :hooks.cvschangesets: Specify a Python function to be called after
253 :hooks.cvschangesets: Specify a Python function to be called after
255 the changesets are calculated from the CVS log. The
254 the changesets are calculated from the CVS log. The
256 function is passed a list with the changeset entries, and can
255 function is passed a list with the changeset entries, and can
257 modify the changesets in-place, or add or delete them.
256 modify the changesets in-place, or add or delete them.
258
257
259 An additional "debugcvsps" Mercurial command allows the builtin
258 An additional "debugcvsps" Mercurial command allows the builtin
260 changeset merging code to be run without doing a conversion. Its
259 changeset merging code to be run without doing a conversion. Its
261 parameters and output are similar to that of cvsps 2.1. Please see
260 parameters and output are similar to that of cvsps 2.1. Please see
262 the command help for more details.
261 the command help for more details.
263
262
264 Subversion Source
263 Subversion Source
265 #################
264 #################
266
265
267 Subversion source detects classical trunk/branches/tags layouts.
266 Subversion source detects classical trunk/branches/tags layouts.
268 By default, the supplied ``svn://repo/path/`` source URL is
267 By default, the supplied ``svn://repo/path/`` source URL is
269 converted as a single branch. If ``svn://repo/path/trunk`` exists
268 converted as a single branch. If ``svn://repo/path/trunk`` exists
270 it replaces the default branch. If ``svn://repo/path/branches``
269 it replaces the default branch. If ``svn://repo/path/branches``
271 exists, its subdirectories are listed as possible branches. If
270 exists, its subdirectories are listed as possible branches. If
272 ``svn://repo/path/tags`` exists, it is looked for tags referencing
271 ``svn://repo/path/tags`` exists, it is looked for tags referencing
273 converted branches. Default ``trunk``, ``branches`` and ``tags``
272 converted branches. Default ``trunk``, ``branches`` and ``tags``
274 values can be overridden with following options. Set them to paths
273 values can be overridden with following options. Set them to paths
275 relative to the source URL, or leave them blank to disable auto
274 relative to the source URL, or leave them blank to disable auto
276 detection.
275 detection.
277
276
278 The following options can be set with ``--config``:
277 The following options can be set with ``--config``:
279
278
280 :convert.svn.branches: specify the directory containing branches.
279 :convert.svn.branches: specify the directory containing branches.
281 The default is ``branches``.
280 The default is ``branches``.
282
281
283 :convert.svn.tags: specify the directory containing tags. The
282 :convert.svn.tags: specify the directory containing tags. The
284 default is ``tags``.
283 default is ``tags``.
285
284
286 :convert.svn.trunk: specify the name of the trunk branch. The
285 :convert.svn.trunk: specify the name of the trunk branch. The
287 default is ``trunk``.
286 default is ``trunk``.
288
287
289 :convert.localtimezone: use local time (as determined by the TZ
288 :convert.localtimezone: use local time (as determined by the TZ
290 environment variable) for changeset date/times. The default
289 environment variable) for changeset date/times. The default
291 is False (use UTC).
290 is False (use UTC).
292
291
293 Source history can be retrieved starting at a specific revision,
292 Source history can be retrieved starting at a specific revision,
294 instead of being integrally converted. Only single branch
293 instead of being integrally converted. Only single branch
295 conversions are supported.
294 conversions are supported.
296
295
297 :convert.svn.startrev: specify start Subversion revision number.
296 :convert.svn.startrev: specify start Subversion revision number.
298 The default is 0.
297 The default is 0.
299
298
300 Git Source
299 Git Source
301 ##########
300 ##########
302
301
303 The Git importer converts commits from all reachable branches (refs
302 The Git importer converts commits from all reachable branches (refs
304 in refs/heads) and remotes (refs in refs/remotes) to Mercurial.
303 in refs/heads) and remotes (refs in refs/remotes) to Mercurial.
305 Branches are converted to bookmarks with the same name, with the
304 Branches are converted to bookmarks with the same name, with the
306 leading 'refs/heads' stripped. Git submodules are converted to Git
305 leading 'refs/heads' stripped. Git submodules are converted to Git
307 subrepos in Mercurial.
306 subrepos in Mercurial.
308
307
309 The following options can be set with ``--config``:
308 The following options can be set with ``--config``:
310
309
311 :convert.git.similarity: specify how similar files modified in a
310 :convert.git.similarity: specify how similar files modified in a
312 commit must be to be imported as renames or copies, as a
311 commit must be to be imported as renames or copies, as a
313 percentage between ``0`` (disabled) and ``100`` (files must be
312 percentage between ``0`` (disabled) and ``100`` (files must be
314 identical). For example, ``90`` means that a delete/add pair will
313 identical). For example, ``90`` means that a delete/add pair will
315 be imported as a rename if more than 90% of the file hasn't
314 be imported as a rename if more than 90% of the file hasn't
316 changed. The default is ``50``.
315 changed. The default is ``50``.
317
316
318 :convert.git.findcopiesharder: while detecting copies, look at all
317 :convert.git.findcopiesharder: while detecting copies, look at all
319 files in the working copy instead of just changed ones. This
318 files in the working copy instead of just changed ones. This
320 is very expensive for large projects, and is only effective when
319 is very expensive for large projects, and is only effective when
321 ``convert.git.similarity`` is greater than 0. The default is False.
320 ``convert.git.similarity`` is greater than 0. The default is False.
322
321
323 :convert.git.renamelimit: perform rename and copy detection up to this
322 :convert.git.renamelimit: perform rename and copy detection up to this
324 many changed files in a commit. Increasing this will make rename
323 many changed files in a commit. Increasing this will make rename
325 and copy detection more accurate but will significantly slow down
324 and copy detection more accurate but will significantly slow down
326 computation on large projects. The option is only relevant if
325 computation on large projects. The option is only relevant if
327 ``convert.git.similarity`` is greater than 0. The default is
326 ``convert.git.similarity`` is greater than 0. The default is
328 ``400``.
327 ``400``.
329
328
330 :convert.git.committeractions: list of actions to take when processing
329 :convert.git.committeractions: list of actions to take when processing
331 author and committer values.
330 author and committer values.
332
331
333 Git commits have separate author (who wrote the commit) and committer
332 Git commits have separate author (who wrote the commit) and committer
334 (who applied the commit) fields. Not all destinations support separate
333 (who applied the commit) fields. Not all destinations support separate
335 author and committer fields (including Mercurial). This config option
334 author and committer fields (including Mercurial). This config option
336 controls what to do with these author and committer fields during
335 controls what to do with these author and committer fields during
337 conversion.
336 conversion.
338
337
339 A value of ``messagedifferent`` will append a ``committer: ...``
338 A value of ``messagedifferent`` will append a ``committer: ...``
340 line to the commit message if the Git committer is different from the
339 line to the commit message if the Git committer is different from the
341 author. The prefix of that line can be specified using the syntax
340 author. The prefix of that line can be specified using the syntax
342 ``messagedifferent=<prefix>``. e.g. ``messagedifferent=git-committer:``.
341 ``messagedifferent=<prefix>``. e.g. ``messagedifferent=git-committer:``.
343 When a prefix is specified, a space will always be inserted between the
342 When a prefix is specified, a space will always be inserted between the
344 prefix and the value.
343 prefix and the value.
345
344
346 ``messagealways`` behaves like ``messagedifferent`` except it will
345 ``messagealways`` behaves like ``messagedifferent`` except it will
347 always result in a ``committer: ...`` line being appended to the commit
346 always result in a ``committer: ...`` line being appended to the commit
348 message. This value is mutually exclusive with ``messagedifferent``.
347 message. This value is mutually exclusive with ``messagedifferent``.
349
348
350 ``dropcommitter`` will remove references to the committer. Only
349 ``dropcommitter`` will remove references to the committer. Only
351 references to the author will remain. Actions that add references
350 references to the author will remain. Actions that add references
352 to the committer will have no effect when this is set.
351 to the committer will have no effect when this is set.
353
352
354 ``replaceauthor`` will replace the value of the author field with
353 ``replaceauthor`` will replace the value of the author field with
355 the committer. Other actions that add references to the committer
354 the committer. Other actions that add references to the committer
356 will still take effect when this is set.
355 will still take effect when this is set.
357
356
358 The default is ``messagedifferent``.
357 The default is ``messagedifferent``.
359
358
360 :convert.git.extrakeys: list of extra keys from commit metadata to copy to
359 :convert.git.extrakeys: list of extra keys from commit metadata to copy to
361 the destination. Some Git repositories store extra metadata in commits.
360 the destination. Some Git repositories store extra metadata in commits.
362 By default, this non-default metadata will be lost during conversion.
361 By default, this non-default metadata will be lost during conversion.
363 Setting this config option can retain that metadata. Some built-in
362 Setting this config option can retain that metadata. Some built-in
364 keys such as ``parent`` and ``branch`` are not allowed to be copied.
363 keys such as ``parent`` and ``branch`` are not allowed to be copied.
365
364
366 :convert.git.remoteprefix: remote refs are converted as bookmarks with
365 :convert.git.remoteprefix: remote refs are converted as bookmarks with
367 ``convert.git.remoteprefix`` as a prefix followed by a /. The default
366 ``convert.git.remoteprefix`` as a prefix followed by a /. The default
368 is 'remote'.
367 is 'remote'.
369
368
370 :convert.git.saverev: whether to store the original Git commit ID in the
369 :convert.git.saverev: whether to store the original Git commit ID in the
371 metadata of the destination commit. The default is True.
370 metadata of the destination commit. The default is True.
372
371
373 :convert.git.skipsubmodules: does not convert root level .gitmodules files
372 :convert.git.skipsubmodules: does not convert root level .gitmodules files
374 or files with 160000 mode indicating a submodule. Default is False.
373 or files with 160000 mode indicating a submodule. Default is False.
375
374
376 Perforce Source
375 Perforce Source
377 ###############
376 ###############
378
377
379 The Perforce (P4) importer can be given a p4 depot path or a
378 The Perforce (P4) importer can be given a p4 depot path or a
380 client specification as source. It will convert all files in the
379 client specification as source. It will convert all files in the
381 source to a flat Mercurial repository, ignoring labels, branches
380 source to a flat Mercurial repository, ignoring labels, branches
382 and integrations. Note that when a depot path is given you then
381 and integrations. Note that when a depot path is given you then
383 usually should specify a target directory, because otherwise the
382 usually should specify a target directory, because otherwise the
384 target may be named ``...-hg``.
383 target may be named ``...-hg``.
385
384
386 The following options can be set with ``--config``:
385 The following options can be set with ``--config``:
387
386
388 :convert.p4.encoding: specify the encoding to use when decoding standard
387 :convert.p4.encoding: specify the encoding to use when decoding standard
389 output of the Perforce command line tool. The default is default system
388 output of the Perforce command line tool. The default is default system
390 encoding.
389 encoding.
391
390
392 :convert.p4.startrev: specify initial Perforce revision (a
391 :convert.p4.startrev: specify initial Perforce revision (a
393 Perforce changelist number).
392 Perforce changelist number).
394
393
395 Mercurial Destination
394 Mercurial Destination
396 #####################
395 #####################
397
396
398 The Mercurial destination will recognize Mercurial subrepositories in the
397 The Mercurial destination will recognize Mercurial subrepositories in the
399 destination directory, and update the .hgsubstate file automatically if the
398 destination directory, and update the .hgsubstate file automatically if the
400 destination subrepositories contain the <dest>/<sub>/.hg/shamap file.
399 destination subrepositories contain the <dest>/<sub>/.hg/shamap file.
401 Converting a repository with subrepositories requires converting a single
400 Converting a repository with subrepositories requires converting a single
402 repository at a time, from the bottom up.
401 repository at a time, from the bottom up.
403
402
404 .. container:: verbose
403 .. container:: verbose
405
404
406 An example showing how to convert a repository with subrepositories::
405 An example showing how to convert a repository with subrepositories::
407
406
408 # so convert knows the type when it sees a non empty destination
407 # so convert knows the type when it sees a non empty destination
409 $ hg init converted
408 $ hg init converted
410
409
411 $ hg convert orig/sub1 converted/sub1
410 $ hg convert orig/sub1 converted/sub1
412 $ hg convert orig/sub2 converted/sub2
411 $ hg convert orig/sub2 converted/sub2
413 $ hg convert orig converted
412 $ hg convert orig converted
414
413
415 The following options are supported:
414 The following options are supported:
416
415
417 :convert.hg.clonebranches: dispatch source branches in separate
416 :convert.hg.clonebranches: dispatch source branches in separate
418 clones. The default is False.
417 clones. The default is False.
419
418
420 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
419 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
421 ``default``.
420 ``default``.
422
421
423 :convert.hg.usebranchnames: preserve branch names. The default is
422 :convert.hg.usebranchnames: preserve branch names. The default is
424 True.
423 True.
425
424
426 :convert.hg.sourcename: records the given string as a 'convert_source' extra
425 :convert.hg.sourcename: records the given string as a 'convert_source' extra
427 value on each commit made in the target repository. The default is None.
426 value on each commit made in the target repository. The default is None.
428
427
429 All Destinations
428 All Destinations
430 ################
429 ################
431
430
432 All destination types accept the following options:
431 All destination types accept the following options:
433
432
434 :convert.skiptags: does not convert tags from the source repo to the target
433 :convert.skiptags: does not convert tags from the source repo to the target
435 repo. The default is False.
434 repo. The default is False.
436 """
435 """
437 return convcmd.convert(ui, src, dest, revmapfile, **opts)
436 return convcmd.convert(ui, src, dest, revmapfile, **opts)
438
437
439 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
438 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
440 def debugsvnlog(ui, **opts):
439 def debugsvnlog(ui, **opts):
441 return subversion.debugsvnlog(ui, **opts)
440 return subversion.debugsvnlog(ui, **opts)
442
441
443 @command('debugcvsps',
442 @command('debugcvsps',
444 [
443 [
445 # Main options shared with cvsps-2.1
444 # Main options shared with cvsps-2.1
446 ('b', 'branches', [], _('only return changes on specified branches')),
445 ('b', 'branches', [], _('only return changes on specified branches')),
447 ('p', 'prefix', '', _('prefix to remove from file names')),
446 ('p', 'prefix', '', _('prefix to remove from file names')),
448 ('r', 'revisions', [],
447 ('r', 'revisions', [],
449 _('only return changes after or between specified tags')),
448 _('only return changes after or between specified tags')),
450 ('u', 'update-cache', None, _("update cvs log cache")),
449 ('u', 'update-cache', None, _("update cvs log cache")),
451 ('x', 'new-cache', None, _("create new cvs log cache")),
450 ('x', 'new-cache', None, _("create new cvs log cache")),
452 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
451 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
453 ('', 'root', '', _('specify cvsroot')),
452 ('', 'root', '', _('specify cvsroot')),
454 # Options specific to builtin cvsps
453 # Options specific to builtin cvsps
455 ('', 'parents', '', _('show parent changesets')),
454 ('', 'parents', '', _('show parent changesets')),
456 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
455 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
457 # Options that are ignored for compatibility with cvsps-2.1
456 # Options that are ignored for compatibility with cvsps-2.1
458 ('A', 'cvs-direct', None, _('ignored for compatibility')),
457 ('A', 'cvs-direct', None, _('ignored for compatibility')),
459 ],
458 ],
460 _('hg debugcvsps [OPTION]... [PATH]...'),
459 _('hg debugcvsps [OPTION]... [PATH]...'),
461 norepo=True)
460 norepo=True)
462 def debugcvsps(ui, *args, **opts):
461 def debugcvsps(ui, *args, **opts):
463 '''create changeset information from CVS
462 '''create changeset information from CVS
464
463
465 This command is intended as a debugging tool for the CVS to
464 This command is intended as a debugging tool for the CVS to
466 Mercurial converter, and can be used as a direct replacement for
465 Mercurial converter, and can be used as a direct replacement for
467 cvsps.
466 cvsps.
468
467
469 Hg debugcvsps reads the CVS rlog for current directory (or any
468 Hg debugcvsps reads the CVS rlog for current directory (or any
470 named directory) in the CVS repository, and converts the log to a
469 named directory) in the CVS repository, and converts the log to a
471 series of changesets based on matching commit log entries and
470 series of changesets based on matching commit log entries and
472 dates.'''
471 dates.'''
473 return cvsps.debugcvsps(ui, *args, **opts)
472 return cvsps.debugcvsps(ui, *args, **opts)
474
473
475 def kwconverted(ctx, name):
474 def kwconverted(ctx, name):
476 rev = ctx.extra().get('convert_revision', '')
475 rev = ctx.extra().get('convert_revision', '')
477 if rev.startswith('svn:'):
476 if rev.startswith('svn:'):
478 if name == 'svnrev':
477 if name == 'svnrev':
479 return str(subversion.revsplit(rev)[2])
478 return str(subversion.revsplit(rev)[2])
480 elif name == 'svnpath':
479 elif name == 'svnpath':
481 return subversion.revsplit(rev)[1]
480 return subversion.revsplit(rev)[1]
482 elif name == 'svnuuid':
481 elif name == 'svnuuid':
483 return subversion.revsplit(rev)[0]
482 return subversion.revsplit(rev)[0]
484 return rev
483 return rev
485
484
486 templatekeyword = registrar.templatekeyword()
485 templatekeyword = registrar.templatekeyword()
487
486
488 @templatekeyword('svnrev')
487 @templatekeyword('svnrev')
489 def kwsvnrev(repo, ctx, **args):
488 def kwsvnrev(repo, ctx, **args):
490 """String. Converted subversion revision number."""
489 """String. Converted subversion revision number."""
491 return kwconverted(ctx, 'svnrev')
490 return kwconverted(ctx, 'svnrev')
492
491
493 @templatekeyword('svnpath')
492 @templatekeyword('svnpath')
494 def kwsvnpath(repo, ctx, **args):
493 def kwsvnpath(repo, ctx, **args):
495 """String. Converted subversion revision project path."""
494 """String. Converted subversion revision project path."""
496 return kwconverted(ctx, 'svnpath')
495 return kwconverted(ctx, 'svnpath')
497
496
498 @templatekeyword('svnuuid')
497 @templatekeyword('svnuuid')
499 def kwsvnuuid(repo, ctx, **args):
498 def kwsvnuuid(repo, ctx, **args):
500 """String. Converted subversion revision repository identifier."""
499 """String. Converted subversion revision repository identifier."""
501 return kwconverted(ctx, 'svnuuid')
500 return kwconverted(ctx, 'svnuuid')
502
501
503 # tell hggettext to extract docstrings from these functions:
502 # tell hggettext to extract docstrings from these functions:
504 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
503 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
@@ -1,400 +1,401 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 The extdiff extension also allows you to configure new diff commands, so
16 The extdiff extension also allows you to configure new diff commands, so
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18
18
19 [extdiff]
19 [extdiff]
20 # add new command that runs GNU diff(1) in 'context diff' mode
20 # add new command that runs GNU diff(1) in 'context diff' mode
21 cdiff = gdiff -Nprc5
21 cdiff = gdiff -Nprc5
22 ## or the old way:
22 ## or the old way:
23 #cmd.cdiff = gdiff
23 #cmd.cdiff = gdiff
24 #opts.cdiff = -Nprc5
24 #opts.cdiff = -Nprc5
25
25
26 # add new command called meld, runs meld (no need to name twice). If
26 # add new command called meld, runs meld (no need to name twice). If
27 # the meld executable is not available, the meld tool in [merge-tools]
27 # the meld executable is not available, the meld tool in [merge-tools]
28 # will be used, if available
28 # will be used, if available
29 meld =
29 meld =
30
30
31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 # your .vimrc
34 # your .vimrc
35 vimdiff = gvim -f "+next" \\
35 vimdiff = gvim -f "+next" \\
36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
37
37
38 Tool arguments can include variables that are expanded at runtime::
38 Tool arguments can include variables that are expanded at runtime::
39
39
40 $parent1, $plabel1 - filename, descriptive label of first parent
40 $parent1, $plabel1 - filename, descriptive label of first parent
41 $child, $clabel - filename, descriptive label of child revision
41 $child, $clabel - filename, descriptive label of child revision
42 $parent2, $plabel2 - filename, descriptive label of second parent
42 $parent2, $plabel2 - filename, descriptive label of second parent
43 $root - repository root
43 $root - repository root
44 $parent is an alias for $parent1.
44 $parent is an alias for $parent1.
45
45
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
47 sections for diff tool arguments, when none are specified in [extdiff].
47 sections for diff tool arguments, when none are specified in [extdiff].
48
48
49 ::
49 ::
50
50
51 [extdiff]
51 [extdiff]
52 kdiff3 =
52 kdiff3 =
53
53
54 [diff-tools]
54 [diff-tools]
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
56
56
57 You can use -I/-X and list of file or directory names like normal
57 You can use -I/-X and list of file or directory names like normal
58 :hg:`diff` command. The extdiff extension makes snapshots of only
58 :hg:`diff` command. The extdiff extension makes snapshots of only
59 needed files, so running the external diff program will actually be
59 needed files, so running the external diff program will actually be
60 pretty fast (at least faster than having to compare the entire tree).
60 pretty fast (at least faster than having to compare the entire tree).
61 '''
61 '''
62
62
63 from __future__ import absolute_import
63 from __future__ import absolute_import
64
64
65 import os
65 import os
66 import re
66 import re
67 import shutil
67 import shutil
68 import tempfile
68 import tempfile
69 from mercurial.i18n import _
69 from mercurial.i18n import _
70 from mercurial.node import (
70 from mercurial.node import (
71 nullid,
71 nullid,
72 short,
72 short,
73 )
73 )
74 from mercurial import (
74 from mercurial import (
75 archival,
75 archival,
76 cmdutil,
76 cmdutil,
77 commands,
77 commands,
78 error,
78 error,
79 filemerge,
79 filemerge,
80 pycompat,
80 pycompat,
81 registrar,
81 scmutil,
82 scmutil,
82 util,
83 util,
83 )
84 )
84
85
85 cmdtable = {}
86 cmdtable = {}
86 command = cmdutil.command(cmdtable)
87 command = registrar.command(cmdtable)
87 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
88 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
88 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
89 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
89 # be specifying the version(s) of Mercurial they are tested with, or
90 # be specifying the version(s) of Mercurial they are tested with, or
90 # leave the attribute unspecified.
91 # leave the attribute unspecified.
91 testedwith = 'ships-with-hg-core'
92 testedwith = 'ships-with-hg-core'
92
93
93 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
94 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
94 '''snapshot files as of some revision
95 '''snapshot files as of some revision
95 if not using snapshot, -I/-X does not work and recursive diff
96 if not using snapshot, -I/-X does not work and recursive diff
96 in tools like kdiff3 and meld displays too many files.'''
97 in tools like kdiff3 and meld displays too many files.'''
97 dirname = os.path.basename(repo.root)
98 dirname = os.path.basename(repo.root)
98 if dirname == "":
99 if dirname == "":
99 dirname = "root"
100 dirname = "root"
100 if node is not None:
101 if node is not None:
101 dirname = '%s.%s' % (dirname, short(node))
102 dirname = '%s.%s' % (dirname, short(node))
102 base = os.path.join(tmproot, dirname)
103 base = os.path.join(tmproot, dirname)
103 os.mkdir(base)
104 os.mkdir(base)
104 fnsandstat = []
105 fnsandstat = []
105
106
106 if node is not None:
107 if node is not None:
107 ui.note(_('making snapshot of %d files from rev %s\n') %
108 ui.note(_('making snapshot of %d files from rev %s\n') %
108 (len(files), short(node)))
109 (len(files), short(node)))
109 else:
110 else:
110 ui.note(_('making snapshot of %d files from working directory\n') %
111 ui.note(_('making snapshot of %d files from working directory\n') %
111 (len(files)))
112 (len(files)))
112
113
113 if files:
114 if files:
114 repo.ui.setconfig("ui", "archivemeta", False)
115 repo.ui.setconfig("ui", "archivemeta", False)
115
116
116 archival.archive(repo, base, node, 'files',
117 archival.archive(repo, base, node, 'files',
117 matchfn=scmutil.matchfiles(repo, files),
118 matchfn=scmutil.matchfiles(repo, files),
118 subrepos=listsubrepos)
119 subrepos=listsubrepos)
119
120
120 for fn in sorted(files):
121 for fn in sorted(files):
121 wfn = util.pconvert(fn)
122 wfn = util.pconvert(fn)
122 ui.note(' %s\n' % wfn)
123 ui.note(' %s\n' % wfn)
123
124
124 if node is None:
125 if node is None:
125 dest = os.path.join(base, wfn)
126 dest = os.path.join(base, wfn)
126
127
127 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
128 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
128 return dirname, fnsandstat
129 return dirname, fnsandstat
129
130
130 def dodiff(ui, repo, cmdline, pats, opts):
131 def dodiff(ui, repo, cmdline, pats, opts):
131 '''Do the actual diff:
132 '''Do the actual diff:
132
133
133 - copy to a temp structure if diffing 2 internal revisions
134 - copy to a temp structure if diffing 2 internal revisions
134 - copy to a temp structure if diffing working revision with
135 - copy to a temp structure if diffing working revision with
135 another one and more than 1 file is changed
136 another one and more than 1 file is changed
136 - just invoke the diff for a single file in the working dir
137 - just invoke the diff for a single file in the working dir
137 '''
138 '''
138
139
139 revs = opts.get('rev')
140 revs = opts.get('rev')
140 change = opts.get('change')
141 change = opts.get('change')
141 do3way = '$parent2' in cmdline
142 do3way = '$parent2' in cmdline
142
143
143 if revs and change:
144 if revs and change:
144 msg = _('cannot specify --rev and --change at the same time')
145 msg = _('cannot specify --rev and --change at the same time')
145 raise error.Abort(msg)
146 raise error.Abort(msg)
146 elif change:
147 elif change:
147 node2 = scmutil.revsingle(repo, change, None).node()
148 node2 = scmutil.revsingle(repo, change, None).node()
148 node1a, node1b = repo.changelog.parents(node2)
149 node1a, node1b = repo.changelog.parents(node2)
149 else:
150 else:
150 node1a, node2 = scmutil.revpair(repo, revs)
151 node1a, node2 = scmutil.revpair(repo, revs)
151 if not revs:
152 if not revs:
152 node1b = repo.dirstate.p2()
153 node1b = repo.dirstate.p2()
153 else:
154 else:
154 node1b = nullid
155 node1b = nullid
155
156
156 # Disable 3-way merge if there is only one parent
157 # Disable 3-way merge if there is only one parent
157 if do3way:
158 if do3way:
158 if node1b == nullid:
159 if node1b == nullid:
159 do3way = False
160 do3way = False
160
161
161 subrepos=opts.get('subrepos')
162 subrepos=opts.get('subrepos')
162
163
163 matcher = scmutil.match(repo[node2], pats, opts)
164 matcher = scmutil.match(repo[node2], pats, opts)
164
165
165 if opts.get('patch'):
166 if opts.get('patch'):
166 if subrepos:
167 if subrepos:
167 raise error.Abort(_('--patch cannot be used with --subrepos'))
168 raise error.Abort(_('--patch cannot be used with --subrepos'))
168 if node2 is None:
169 if node2 is None:
169 raise error.Abort(_('--patch requires two revisions'))
170 raise error.Abort(_('--patch requires two revisions'))
170 else:
171 else:
171 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
172 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
172 listsubrepos=subrepos)[:3])
173 listsubrepos=subrepos)[:3])
173 if do3way:
174 if do3way:
174 mod_b, add_b, rem_b = map(set,
175 mod_b, add_b, rem_b = map(set,
175 repo.status(node1b, node2, matcher,
176 repo.status(node1b, node2, matcher,
176 listsubrepos=subrepos)[:3])
177 listsubrepos=subrepos)[:3])
177 else:
178 else:
178 mod_b, add_b, rem_b = set(), set(), set()
179 mod_b, add_b, rem_b = set(), set(), set()
179 modadd = mod_a | add_a | mod_b | add_b
180 modadd = mod_a | add_a | mod_b | add_b
180 common = modadd | rem_a | rem_b
181 common = modadd | rem_a | rem_b
181 if not common:
182 if not common:
182 return 0
183 return 0
183
184
184 tmproot = tempfile.mkdtemp(prefix='extdiff.')
185 tmproot = tempfile.mkdtemp(prefix='extdiff.')
185 try:
186 try:
186 if not opts.get('patch'):
187 if not opts.get('patch'):
187 # Always make a copy of node1a (and node1b, if applicable)
188 # Always make a copy of node1a (and node1b, if applicable)
188 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
189 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
189 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
190 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
190 subrepos)[0]
191 subrepos)[0]
191 rev1a = '@%d' % repo[node1a].rev()
192 rev1a = '@%d' % repo[node1a].rev()
192 if do3way:
193 if do3way:
193 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
194 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
194 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
195 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
195 subrepos)[0]
196 subrepos)[0]
196 rev1b = '@%d' % repo[node1b].rev()
197 rev1b = '@%d' % repo[node1b].rev()
197 else:
198 else:
198 dir1b = None
199 dir1b = None
199 rev1b = ''
200 rev1b = ''
200
201
201 fnsandstat = []
202 fnsandstat = []
202
203
203 # If node2 in not the wc or there is >1 change, copy it
204 # If node2 in not the wc or there is >1 change, copy it
204 dir2root = ''
205 dir2root = ''
205 rev2 = ''
206 rev2 = ''
206 if node2:
207 if node2:
207 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
208 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
208 rev2 = '@%d' % repo[node2].rev()
209 rev2 = '@%d' % repo[node2].rev()
209 elif len(common) > 1:
210 elif len(common) > 1:
210 #we only actually need to get the files to copy back to
211 #we only actually need to get the files to copy back to
211 #the working dir in this case (because the other cases
212 #the working dir in this case (because the other cases
212 #are: diffing 2 revisions or single file -- in which case
213 #are: diffing 2 revisions or single file -- in which case
213 #the file is already directly passed to the diff tool).
214 #the file is already directly passed to the diff tool).
214 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
215 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
215 subrepos)
216 subrepos)
216 else:
217 else:
217 # This lets the diff tool open the changed file directly
218 # This lets the diff tool open the changed file directly
218 dir2 = ''
219 dir2 = ''
219 dir2root = repo.root
220 dir2root = repo.root
220
221
221 label1a = rev1a
222 label1a = rev1a
222 label1b = rev1b
223 label1b = rev1b
223 label2 = rev2
224 label2 = rev2
224
225
225 # If only one change, diff the files instead of the directories
226 # If only one change, diff the files instead of the directories
226 # Handle bogus modifies correctly by checking if the files exist
227 # Handle bogus modifies correctly by checking if the files exist
227 if len(common) == 1:
228 if len(common) == 1:
228 common_file = util.localpath(common.pop())
229 common_file = util.localpath(common.pop())
229 dir1a = os.path.join(tmproot, dir1a, common_file)
230 dir1a = os.path.join(tmproot, dir1a, common_file)
230 label1a = common_file + rev1a
231 label1a = common_file + rev1a
231 if not os.path.isfile(dir1a):
232 if not os.path.isfile(dir1a):
232 dir1a = os.devnull
233 dir1a = os.devnull
233 if do3way:
234 if do3way:
234 dir1b = os.path.join(tmproot, dir1b, common_file)
235 dir1b = os.path.join(tmproot, dir1b, common_file)
235 label1b = common_file + rev1b
236 label1b = common_file + rev1b
236 if not os.path.isfile(dir1b):
237 if not os.path.isfile(dir1b):
237 dir1b = os.devnull
238 dir1b = os.devnull
238 dir2 = os.path.join(dir2root, dir2, common_file)
239 dir2 = os.path.join(dir2root, dir2, common_file)
239 label2 = common_file + rev2
240 label2 = common_file + rev2
240 else:
241 else:
241 template = 'hg-%h.patch'
242 template = 'hg-%h.patch'
242 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
243 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
243 template=repo.vfs.reljoin(tmproot, template),
244 template=repo.vfs.reljoin(tmproot, template),
244 match=matcher)
245 match=matcher)
245 label1a = cmdutil.makefilename(repo, template, node1a)
246 label1a = cmdutil.makefilename(repo, template, node1a)
246 label2 = cmdutil.makefilename(repo, template, node2)
247 label2 = cmdutil.makefilename(repo, template, node2)
247 dir1a = repo.vfs.reljoin(tmproot, label1a)
248 dir1a = repo.vfs.reljoin(tmproot, label1a)
248 dir2 = repo.vfs.reljoin(tmproot, label2)
249 dir2 = repo.vfs.reljoin(tmproot, label2)
249 dir1b = None
250 dir1b = None
250 label1b = None
251 label1b = None
251 fnsandstat = []
252 fnsandstat = []
252
253
253 # Function to quote file/dir names in the argument string.
254 # Function to quote file/dir names in the argument string.
254 # When not operating in 3-way mode, an empty string is
255 # When not operating in 3-way mode, an empty string is
255 # returned for parent2
256 # returned for parent2
256 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
257 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
257 'plabel1': label1a, 'plabel2': label1b,
258 'plabel1': label1a, 'plabel2': label1b,
258 'clabel': label2, 'child': dir2,
259 'clabel': label2, 'child': dir2,
259 'root': repo.root}
260 'root': repo.root}
260 def quote(match):
261 def quote(match):
261 pre = match.group(2)
262 pre = match.group(2)
262 key = match.group(3)
263 key = match.group(3)
263 if not do3way and key == 'parent2':
264 if not do3way and key == 'parent2':
264 return pre
265 return pre
265 return pre + util.shellquote(replace[key])
266 return pre + util.shellquote(replace[key])
266
267
267 # Match parent2 first, so 'parent1?' will match both parent1 and parent
268 # Match parent2 first, so 'parent1?' will match both parent1 and parent
268 regex = (r'''(['"]?)([^\s'"$]*)'''
269 regex = (r'''(['"]?)([^\s'"$]*)'''
269 r'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
270 r'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
270 if not do3way and not re.search(regex, cmdline):
271 if not do3way and not re.search(regex, cmdline):
271 cmdline += ' $parent1 $child'
272 cmdline += ' $parent1 $child'
272 cmdline = re.sub(regex, quote, cmdline)
273 cmdline = re.sub(regex, quote, cmdline)
273
274
274 ui.debug('running %r in %s\n' % (cmdline, tmproot))
275 ui.debug('running %r in %s\n' % (cmdline, tmproot))
275 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
276 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
276
277
277 for copy_fn, working_fn, st in fnsandstat:
278 for copy_fn, working_fn, st in fnsandstat:
278 cpstat = os.lstat(copy_fn)
279 cpstat = os.lstat(copy_fn)
279 # Some tools copy the file and attributes, so mtime may not detect
280 # Some tools copy the file and attributes, so mtime may not detect
280 # all changes. A size check will detect more cases, but not all.
281 # all changes. A size check will detect more cases, but not all.
281 # The only certain way to detect every case is to diff all files,
282 # The only certain way to detect every case is to diff all files,
282 # which could be expensive.
283 # which could be expensive.
283 # copyfile() carries over the permission, so the mode check could
284 # copyfile() carries over the permission, so the mode check could
284 # be in an 'elif' branch, but for the case where the file has
285 # be in an 'elif' branch, but for the case where the file has
285 # changed without affecting mtime or size.
286 # changed without affecting mtime or size.
286 if (cpstat.st_mtime != st.st_mtime or cpstat.st_size != st.st_size
287 if (cpstat.st_mtime != st.st_mtime or cpstat.st_size != st.st_size
287 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
288 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
288 ui.debug('file changed while diffing. '
289 ui.debug('file changed while diffing. '
289 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
290 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
290 util.copyfile(copy_fn, working_fn)
291 util.copyfile(copy_fn, working_fn)
291
292
292 return 1
293 return 1
293 finally:
294 finally:
294 ui.note(_('cleaning up temp directory\n'))
295 ui.note(_('cleaning up temp directory\n'))
295 shutil.rmtree(tmproot)
296 shutil.rmtree(tmproot)
296
297
297 extdiffopts = [
298 extdiffopts = [
298 ('o', 'option', [],
299 ('o', 'option', [],
299 _('pass option to comparison program'), _('OPT')),
300 _('pass option to comparison program'), _('OPT')),
300 ('r', 'rev', [], _('revision'), _('REV')),
301 ('r', 'rev', [], _('revision'), _('REV')),
301 ('c', 'change', '', _('change made by revision'), _('REV')),
302 ('c', 'change', '', _('change made by revision'), _('REV')),
302 ('', 'patch', None, _('compare patches for two revisions'))
303 ('', 'patch', None, _('compare patches for two revisions'))
303 ] + commands.walkopts + commands.subrepoopts
304 ] + commands.walkopts + commands.subrepoopts
304
305
305 @command('extdiff',
306 @command('extdiff',
306 [('p', 'program', '', _('comparison program to run'), _('CMD')),
307 [('p', 'program', '', _('comparison program to run'), _('CMD')),
307 ] + extdiffopts,
308 ] + extdiffopts,
308 _('hg extdiff [OPT]... [FILE]...'),
309 _('hg extdiff [OPT]... [FILE]...'),
309 inferrepo=True)
310 inferrepo=True)
310 def extdiff(ui, repo, *pats, **opts):
311 def extdiff(ui, repo, *pats, **opts):
311 '''use external program to diff repository (or selected files)
312 '''use external program to diff repository (or selected files)
312
313
313 Show differences between revisions for the specified files, using
314 Show differences between revisions for the specified files, using
314 an external program. The default program used is diff, with
315 an external program. The default program used is diff, with
315 default options "-Npru".
316 default options "-Npru".
316
317
317 To select a different program, use the -p/--program option. The
318 To select a different program, use the -p/--program option. The
318 program will be passed the names of two directories to compare. To
319 program will be passed the names of two directories to compare. To
319 pass additional options to the program, use -o/--option. These
320 pass additional options to the program, use -o/--option. These
320 will be passed before the names of the directories to compare.
321 will be passed before the names of the directories to compare.
321
322
322 When two revision arguments are given, then changes are shown
323 When two revision arguments are given, then changes are shown
323 between those revisions. If only one revision is specified then
324 between those revisions. If only one revision is specified then
324 that revision is compared to the working directory, and, when no
325 that revision is compared to the working directory, and, when no
325 revisions are specified, the working directory files are compared
326 revisions are specified, the working directory files are compared
326 to its parent.'''
327 to its parent.'''
327 program = opts.get('program')
328 program = opts.get('program')
328 option = opts.get('option')
329 option = opts.get('option')
329 if not program:
330 if not program:
330 program = 'diff'
331 program = 'diff'
331 option = option or ['-Npru']
332 option = option or ['-Npru']
332 cmdline = ' '.join(map(util.shellquote, [program] + option))
333 cmdline = ' '.join(map(util.shellquote, [program] + option))
333 return dodiff(ui, repo, cmdline, pats, opts)
334 return dodiff(ui, repo, cmdline, pats, opts)
334
335
335 class savedcmd(object):
336 class savedcmd(object):
336 """use external program to diff repository (or selected files)
337 """use external program to diff repository (or selected files)
337
338
338 Show differences between revisions for the specified files, using
339 Show differences between revisions for the specified files, using
339 the following program::
340 the following program::
340
341
341 %(path)s
342 %(path)s
342
343
343 When two revision arguments are given, then changes are shown
344 When two revision arguments are given, then changes are shown
344 between those revisions. If only one revision is specified then
345 between those revisions. If only one revision is specified then
345 that revision is compared to the working directory, and, when no
346 that revision is compared to the working directory, and, when no
346 revisions are specified, the working directory files are compared
347 revisions are specified, the working directory files are compared
347 to its parent.
348 to its parent.
348 """
349 """
349
350
350 def __init__(self, path, cmdline):
351 def __init__(self, path, cmdline):
351 # We can't pass non-ASCII through docstrings (and path is
352 # We can't pass non-ASCII through docstrings (and path is
352 # in an unknown encoding anyway)
353 # in an unknown encoding anyway)
353 docpath = util.escapestr(path)
354 docpath = util.escapestr(path)
354 self.__doc__ = self.__doc__ % {'path': util.uirepr(docpath)}
355 self.__doc__ = self.__doc__ % {'path': util.uirepr(docpath)}
355 self._cmdline = cmdline
356 self._cmdline = cmdline
356
357
357 def __call__(self, ui, repo, *pats, **opts):
358 def __call__(self, ui, repo, *pats, **opts):
358 options = ' '.join(map(util.shellquote, opts['option']))
359 options = ' '.join(map(util.shellquote, opts['option']))
359 if options:
360 if options:
360 options = ' ' + options
361 options = ' ' + options
361 return dodiff(ui, repo, self._cmdline + options, pats, opts)
362 return dodiff(ui, repo, self._cmdline + options, pats, opts)
362
363
363 def uisetup(ui):
364 def uisetup(ui):
364 for cmd, path in ui.configitems('extdiff'):
365 for cmd, path in ui.configitems('extdiff'):
365 path = util.expandpath(path)
366 path = util.expandpath(path)
366 if cmd.startswith('cmd.'):
367 if cmd.startswith('cmd.'):
367 cmd = cmd[4:]
368 cmd = cmd[4:]
368 if not path:
369 if not path:
369 path = util.findexe(cmd)
370 path = util.findexe(cmd)
370 if path is None:
371 if path is None:
371 path = filemerge.findexternaltool(ui, cmd) or cmd
372 path = filemerge.findexternaltool(ui, cmd) or cmd
372 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
373 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
373 cmdline = util.shellquote(path)
374 cmdline = util.shellquote(path)
374 if diffopts:
375 if diffopts:
375 cmdline += ' ' + diffopts
376 cmdline += ' ' + diffopts
376 elif cmd.startswith('opts.'):
377 elif cmd.startswith('opts.'):
377 continue
378 continue
378 else:
379 else:
379 if path:
380 if path:
380 # case "cmd = path opts"
381 # case "cmd = path opts"
381 cmdline = path
382 cmdline = path
382 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
383 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
383 else:
384 else:
384 # case "cmd ="
385 # case "cmd ="
385 path = util.findexe(cmd)
386 path = util.findexe(cmd)
386 if path is None:
387 if path is None:
387 path = filemerge.findexternaltool(ui, cmd) or cmd
388 path = filemerge.findexternaltool(ui, cmd) or cmd
388 cmdline = util.shellquote(path)
389 cmdline = util.shellquote(path)
389 diffopts = False
390 diffopts = False
390 # look for diff arguments in [diff-tools] then [merge-tools]
391 # look for diff arguments in [diff-tools] then [merge-tools]
391 if not diffopts:
392 if not diffopts:
392 args = ui.config('diff-tools', cmd+'.diffargs') or \
393 args = ui.config('diff-tools', cmd+'.diffargs') or \
393 ui.config('merge-tools', cmd+'.diffargs')
394 ui.config('merge-tools', cmd+'.diffargs')
394 if args:
395 if args:
395 cmdline += ' ' + args
396 cmdline += ' ' + args
396 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
397 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
397 inferrepo=True)(savedcmd(path, cmdline))
398 inferrepo=True)(savedcmd(path, cmdline))
398
399
399 # tell hggettext to extract docstrings from these functions:
400 # tell hggettext to extract docstrings from these functions:
400 i18nfunctions = [savedcmd]
401 i18nfunctions = [savedcmd]
@@ -1,165 +1,166 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''pull, update and merge in one command (DEPRECATED)'''
8 '''pull, update and merge in one command (DEPRECATED)'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.node import (
13 from mercurial.node import (
14 short,
14 short,
15 )
15 )
16 from mercurial import (
16 from mercurial import (
17 cmdutil,
17 cmdutil,
18 commands,
18 commands,
19 error,
19 error,
20 exchange,
20 exchange,
21 hg,
21 hg,
22 lock,
22 lock,
23 registrar,
23 util,
24 util,
24 )
25 )
25
26
26 release = lock.release
27 release = lock.release
27 cmdtable = {}
28 cmdtable = {}
28 command = cmdutil.command(cmdtable)
29 command = registrar.command(cmdtable)
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # leave the attribute unspecified.
33 # leave the attribute unspecified.
33 testedwith = 'ships-with-hg-core'
34 testedwith = 'ships-with-hg-core'
34
35
35 @command('fetch',
36 @command('fetch',
36 [('r', 'rev', [],
37 [('r', 'rev', [],
37 _('a specific revision you would like to pull'), _('REV')),
38 _('a specific revision you would like to pull'), _('REV')),
38 ('e', 'edit', None, _('invoke editor on commit messages')),
39 ('e', 'edit', None, _('invoke editor on commit messages')),
39 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
40 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
40 ('', 'switch-parent', None, _('switch parents when merging')),
41 ('', 'switch-parent', None, _('switch parents when merging')),
41 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
42 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
42 _('hg fetch [SOURCE]'))
43 _('hg fetch [SOURCE]'))
43 def fetch(ui, repo, source='default', **opts):
44 def fetch(ui, repo, source='default', **opts):
44 '''pull changes from a remote repository, merge new changes if needed.
45 '''pull changes from a remote repository, merge new changes if needed.
45
46
46 This finds all changes from the repository at the specified path
47 This finds all changes from the repository at the specified path
47 or URL and adds them to the local repository.
48 or URL and adds them to the local repository.
48
49
49 If the pulled changes add a new branch head, the head is
50 If the pulled changes add a new branch head, the head is
50 automatically merged, and the result of the merge is committed.
51 automatically merged, and the result of the merge is committed.
51 Otherwise, the working directory is updated to include the new
52 Otherwise, the working directory is updated to include the new
52 changes.
53 changes.
53
54
54 When a merge is needed, the working directory is first updated to
55 When a merge is needed, the working directory is first updated to
55 the newly pulled changes. Local changes are then merged into the
56 the newly pulled changes. Local changes are then merged into the
56 pulled changes. To switch the merge order, use --switch-parent.
57 pulled changes. To switch the merge order, use --switch-parent.
57
58
58 See :hg:`help dates` for a list of formats valid for -d/--date.
59 See :hg:`help dates` for a list of formats valid for -d/--date.
59
60
60 Returns 0 on success.
61 Returns 0 on success.
61 '''
62 '''
62
63
63 date = opts.get('date')
64 date = opts.get('date')
64 if date:
65 if date:
65 opts['date'] = util.parsedate(date)
66 opts['date'] = util.parsedate(date)
66
67
67 parent, _p2 = repo.dirstate.parents()
68 parent, _p2 = repo.dirstate.parents()
68 branch = repo.dirstate.branch()
69 branch = repo.dirstate.branch()
69 try:
70 try:
70 branchnode = repo.branchtip(branch)
71 branchnode = repo.branchtip(branch)
71 except error.RepoLookupError:
72 except error.RepoLookupError:
72 branchnode = None
73 branchnode = None
73 if parent != branchnode:
74 if parent != branchnode:
74 raise error.Abort(_('working directory not at branch tip'),
75 raise error.Abort(_('working directory not at branch tip'),
75 hint=_("use 'hg update' to check out branch tip"))
76 hint=_("use 'hg update' to check out branch tip"))
76
77
77 wlock = lock = None
78 wlock = lock = None
78 try:
79 try:
79 wlock = repo.wlock()
80 wlock = repo.wlock()
80 lock = repo.lock()
81 lock = repo.lock()
81
82
82 cmdutil.bailifchanged(repo)
83 cmdutil.bailifchanged(repo)
83
84
84 bheads = repo.branchheads(branch)
85 bheads = repo.branchheads(branch)
85 bheads = [head for head in bheads if len(repo[head].children()) == 0]
86 bheads = [head for head in bheads if len(repo[head].children()) == 0]
86 if len(bheads) > 1:
87 if len(bheads) > 1:
87 raise error.Abort(_('multiple heads in this branch '
88 raise error.Abort(_('multiple heads in this branch '
88 '(use "hg heads ." and "hg merge" to merge)'))
89 '(use "hg heads ." and "hg merge" to merge)'))
89
90
90 other = hg.peer(repo, opts, ui.expandpath(source))
91 other = hg.peer(repo, opts, ui.expandpath(source))
91 ui.status(_('pulling from %s\n') %
92 ui.status(_('pulling from %s\n') %
92 util.hidepassword(ui.expandpath(source)))
93 util.hidepassword(ui.expandpath(source)))
93 revs = None
94 revs = None
94 if opts['rev']:
95 if opts['rev']:
95 try:
96 try:
96 revs = [other.lookup(rev) for rev in opts['rev']]
97 revs = [other.lookup(rev) for rev in opts['rev']]
97 except error.CapabilityError:
98 except error.CapabilityError:
98 err = _("other repository doesn't support revision lookup, "
99 err = _("other repository doesn't support revision lookup, "
99 "so a rev cannot be specified.")
100 "so a rev cannot be specified.")
100 raise error.Abort(err)
101 raise error.Abort(err)
101
102
102 # Are there any changes at all?
103 # Are there any changes at all?
103 modheads = exchange.pull(repo, other, heads=revs).cgresult
104 modheads = exchange.pull(repo, other, heads=revs).cgresult
104 if modheads == 0:
105 if modheads == 0:
105 return 0
106 return 0
106
107
107 # Is this a simple fast-forward along the current branch?
108 # Is this a simple fast-forward along the current branch?
108 newheads = repo.branchheads(branch)
109 newheads = repo.branchheads(branch)
109 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
110 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
110 if len(newheads) == 1 and len(newchildren):
111 if len(newheads) == 1 and len(newchildren):
111 if newchildren[0] != parent:
112 if newchildren[0] != parent:
112 return hg.update(repo, newchildren[0])
113 return hg.update(repo, newchildren[0])
113 else:
114 else:
114 return 0
115 return 0
115
116
116 # Are there more than one additional branch heads?
117 # Are there more than one additional branch heads?
117 newchildren = [n for n in newchildren if n != parent]
118 newchildren = [n for n in newchildren if n != parent]
118 newparent = parent
119 newparent = parent
119 if newchildren:
120 if newchildren:
120 newparent = newchildren[0]
121 newparent = newchildren[0]
121 hg.clean(repo, newparent)
122 hg.clean(repo, newparent)
122 newheads = [n for n in newheads if n != newparent]
123 newheads = [n for n in newheads if n != newparent]
123 if len(newheads) > 1:
124 if len(newheads) > 1:
124 ui.status(_('not merging with %d other new branch heads '
125 ui.status(_('not merging with %d other new branch heads '
125 '(use "hg heads ." and "hg merge" to merge them)\n') %
126 '(use "hg heads ." and "hg merge" to merge them)\n') %
126 (len(newheads) - 1))
127 (len(newheads) - 1))
127 return 1
128 return 1
128
129
129 if not newheads:
130 if not newheads:
130 return 0
131 return 0
131
132
132 # Otherwise, let's merge.
133 # Otherwise, let's merge.
133 err = False
134 err = False
134 if newheads:
135 if newheads:
135 # By default, we consider the repository we're pulling
136 # By default, we consider the repository we're pulling
136 # *from* as authoritative, so we merge our changes into
137 # *from* as authoritative, so we merge our changes into
137 # theirs.
138 # theirs.
138 if opts['switch_parent']:
139 if opts['switch_parent']:
139 firstparent, secondparent = newparent, newheads[0]
140 firstparent, secondparent = newparent, newheads[0]
140 else:
141 else:
141 firstparent, secondparent = newheads[0], newparent
142 firstparent, secondparent = newheads[0], newparent
142 ui.status(_('updating to %d:%s\n') %
143 ui.status(_('updating to %d:%s\n') %
143 (repo.changelog.rev(firstparent),
144 (repo.changelog.rev(firstparent),
144 short(firstparent)))
145 short(firstparent)))
145 hg.clean(repo, firstparent)
146 hg.clean(repo, firstparent)
146 ui.status(_('merging with %d:%s\n') %
147 ui.status(_('merging with %d:%s\n') %
147 (repo.changelog.rev(secondparent), short(secondparent)))
148 (repo.changelog.rev(secondparent), short(secondparent)))
148 err = hg.merge(repo, secondparent, remind=False)
149 err = hg.merge(repo, secondparent, remind=False)
149
150
150 if not err:
151 if not err:
151 # we don't translate commit messages
152 # we don't translate commit messages
152 message = (cmdutil.logmessage(ui, opts) or
153 message = (cmdutil.logmessage(ui, opts) or
153 ('Automated merge with %s' %
154 ('Automated merge with %s' %
154 util.removeauth(other.url())))
155 util.removeauth(other.url())))
155 editopt = opts.get('edit') or opts.get('force_editor')
156 editopt = opts.get('edit') or opts.get('force_editor')
156 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
157 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
157 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
158 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
158 ui.status(_('new changeset %d:%s merges remote changes '
159 ui.status(_('new changeset %d:%s merges remote changes '
159 'with local\n') % (repo.changelog.rev(n),
160 'with local\n') % (repo.changelog.rev(n),
160 short(n)))
161 short(n)))
161
162
162 return err
163 return err
163
164
164 finally:
165 finally:
165 release(lock, wlock)
166 release(lock, wlock)
@@ -1,319 +1,320 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import os
11 import os
12 import tempfile
12 import tempfile
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 cmdutil,
16 cmdutil,
17 commands,
17 commands,
18 error,
18 error,
19 match,
19 match,
20 node as hgnode,
20 node as hgnode,
21 pycompat,
21 pycompat,
22 registrar,
22 util,
23 util,
23 )
24 )
24
25
25 cmdtable = {}
26 cmdtable = {}
26 command = cmdutil.command(cmdtable)
27 command = registrar.command(cmdtable)
27 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
28 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
28 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
29 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
29 # be specifying the version(s) of Mercurial they are tested with, or
30 # be specifying the version(s) of Mercurial they are tested with, or
30 # leave the attribute unspecified.
31 # leave the attribute unspecified.
31 testedwith = 'ships-with-hg-core'
32 testedwith = 'ships-with-hg-core'
32
33
33 class gpg(object):
34 class gpg(object):
34 def __init__(self, path, key=None):
35 def __init__(self, path, key=None):
35 self.path = path
36 self.path = path
36 self.key = (key and " --local-user \"%s\"" % key) or ""
37 self.key = (key and " --local-user \"%s\"" % key) or ""
37
38
38 def sign(self, data):
39 def sign(self, data):
39 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
40 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
40 return util.filter(data, gpgcmd)
41 return util.filter(data, gpgcmd)
41
42
42 def verify(self, data, sig):
43 def verify(self, data, sig):
43 """ returns of the good and bad signatures"""
44 """ returns of the good and bad signatures"""
44 sigfile = datafile = None
45 sigfile = datafile = None
45 try:
46 try:
46 # create temporary files
47 # create temporary files
47 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
48 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
48 fp = os.fdopen(fd, pycompat.sysstr('wb'))
49 fp = os.fdopen(fd, pycompat.sysstr('wb'))
49 fp.write(sig)
50 fp.write(sig)
50 fp.close()
51 fp.close()
51 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
52 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
52 fp = os.fdopen(fd, pycompat.sysstr('wb'))
53 fp = os.fdopen(fd, pycompat.sysstr('wb'))
53 fp.write(data)
54 fp.write(data)
54 fp.close()
55 fp.close()
55 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
56 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
56 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
57 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
57 ret = util.filter("", gpgcmd)
58 ret = util.filter("", gpgcmd)
58 finally:
59 finally:
59 for f in (sigfile, datafile):
60 for f in (sigfile, datafile):
60 try:
61 try:
61 if f:
62 if f:
62 os.unlink(f)
63 os.unlink(f)
63 except OSError:
64 except OSError:
64 pass
65 pass
65 keys = []
66 keys = []
66 key, fingerprint = None, None
67 key, fingerprint = None, None
67 for l in ret.splitlines():
68 for l in ret.splitlines():
68 # see DETAILS in the gnupg documentation
69 # see DETAILS in the gnupg documentation
69 # filter the logger output
70 # filter the logger output
70 if not l.startswith("[GNUPG:]"):
71 if not l.startswith("[GNUPG:]"):
71 continue
72 continue
72 l = l[9:]
73 l = l[9:]
73 if l.startswith("VALIDSIG"):
74 if l.startswith("VALIDSIG"):
74 # fingerprint of the primary key
75 # fingerprint of the primary key
75 fingerprint = l.split()[10]
76 fingerprint = l.split()[10]
76 elif l.startswith("ERRSIG"):
77 elif l.startswith("ERRSIG"):
77 key = l.split(" ", 3)[:2]
78 key = l.split(" ", 3)[:2]
78 key.append("")
79 key.append("")
79 fingerprint = None
80 fingerprint = None
80 elif (l.startswith("GOODSIG") or
81 elif (l.startswith("GOODSIG") or
81 l.startswith("EXPSIG") or
82 l.startswith("EXPSIG") or
82 l.startswith("EXPKEYSIG") or
83 l.startswith("EXPKEYSIG") or
83 l.startswith("BADSIG")):
84 l.startswith("BADSIG")):
84 if key is not None:
85 if key is not None:
85 keys.append(key + [fingerprint])
86 keys.append(key + [fingerprint])
86 key = l.split(" ", 2)
87 key = l.split(" ", 2)
87 fingerprint = None
88 fingerprint = None
88 if key is not None:
89 if key is not None:
89 keys.append(key + [fingerprint])
90 keys.append(key + [fingerprint])
90 return keys
91 return keys
91
92
92 def newgpg(ui, **opts):
93 def newgpg(ui, **opts):
93 """create a new gpg instance"""
94 """create a new gpg instance"""
94 gpgpath = ui.config("gpg", "cmd", "gpg")
95 gpgpath = ui.config("gpg", "cmd", "gpg")
95 gpgkey = opts.get('key')
96 gpgkey = opts.get('key')
96 if not gpgkey:
97 if not gpgkey:
97 gpgkey = ui.config("gpg", "key", None)
98 gpgkey = ui.config("gpg", "key", None)
98 return gpg(gpgpath, gpgkey)
99 return gpg(gpgpath, gpgkey)
99
100
100 def sigwalk(repo):
101 def sigwalk(repo):
101 """
102 """
102 walk over every sigs, yields a couple
103 walk over every sigs, yields a couple
103 ((node, version, sig), (filename, linenumber))
104 ((node, version, sig), (filename, linenumber))
104 """
105 """
105 def parsefile(fileiter, context):
106 def parsefile(fileiter, context):
106 ln = 1
107 ln = 1
107 for l in fileiter:
108 for l in fileiter:
108 if not l:
109 if not l:
109 continue
110 continue
110 yield (l.split(" ", 2), (context, ln))
111 yield (l.split(" ", 2), (context, ln))
111 ln += 1
112 ln += 1
112
113
113 # read the heads
114 # read the heads
114 fl = repo.file(".hgsigs")
115 fl = repo.file(".hgsigs")
115 for r in reversed(fl.heads()):
116 for r in reversed(fl.heads()):
116 fn = ".hgsigs|%s" % hgnode.short(r)
117 fn = ".hgsigs|%s" % hgnode.short(r)
117 for item in parsefile(fl.read(r).splitlines(), fn):
118 for item in parsefile(fl.read(r).splitlines(), fn):
118 yield item
119 yield item
119 try:
120 try:
120 # read local signatures
121 # read local signatures
121 fn = "localsigs"
122 fn = "localsigs"
122 for item in parsefile(repo.vfs(fn), fn):
123 for item in parsefile(repo.vfs(fn), fn):
123 yield item
124 yield item
124 except IOError:
125 except IOError:
125 pass
126 pass
126
127
127 def getkeys(ui, repo, mygpg, sigdata, context):
128 def getkeys(ui, repo, mygpg, sigdata, context):
128 """get the keys who signed a data"""
129 """get the keys who signed a data"""
129 fn, ln = context
130 fn, ln = context
130 node, version, sig = sigdata
131 node, version, sig = sigdata
131 prefix = "%s:%d" % (fn, ln)
132 prefix = "%s:%d" % (fn, ln)
132 node = hgnode.bin(node)
133 node = hgnode.bin(node)
133
134
134 data = node2txt(repo, node, version)
135 data = node2txt(repo, node, version)
135 sig = binascii.a2b_base64(sig)
136 sig = binascii.a2b_base64(sig)
136 keys = mygpg.verify(data, sig)
137 keys = mygpg.verify(data, sig)
137
138
138 validkeys = []
139 validkeys = []
139 # warn for expired key and/or sigs
140 # warn for expired key and/or sigs
140 for key in keys:
141 for key in keys:
141 if key[0] == "ERRSIG":
142 if key[0] == "ERRSIG":
142 ui.write(_("%s Unknown key ID \"%s\"\n")
143 ui.write(_("%s Unknown key ID \"%s\"\n")
143 % (prefix, shortkey(ui, key[1][:15])))
144 % (prefix, shortkey(ui, key[1][:15])))
144 continue
145 continue
145 if key[0] == "BADSIG":
146 if key[0] == "BADSIG":
146 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
147 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
147 continue
148 continue
148 if key[0] == "EXPSIG":
149 if key[0] == "EXPSIG":
149 ui.write(_("%s Note: Signature has expired"
150 ui.write(_("%s Note: Signature has expired"
150 " (signed by: \"%s\")\n") % (prefix, key[2]))
151 " (signed by: \"%s\")\n") % (prefix, key[2]))
151 elif key[0] == "EXPKEYSIG":
152 elif key[0] == "EXPKEYSIG":
152 ui.write(_("%s Note: This key has expired"
153 ui.write(_("%s Note: This key has expired"
153 " (signed by: \"%s\")\n") % (prefix, key[2]))
154 " (signed by: \"%s\")\n") % (prefix, key[2]))
154 validkeys.append((key[1], key[2], key[3]))
155 validkeys.append((key[1], key[2], key[3]))
155 return validkeys
156 return validkeys
156
157
157 @command("sigs", [], _('hg sigs'))
158 @command("sigs", [], _('hg sigs'))
158 def sigs(ui, repo):
159 def sigs(ui, repo):
159 """list signed changesets"""
160 """list signed changesets"""
160 mygpg = newgpg(ui)
161 mygpg = newgpg(ui)
161 revs = {}
162 revs = {}
162
163
163 for data, context in sigwalk(repo):
164 for data, context in sigwalk(repo):
164 node, version, sig = data
165 node, version, sig = data
165 fn, ln = context
166 fn, ln = context
166 try:
167 try:
167 n = repo.lookup(node)
168 n = repo.lookup(node)
168 except KeyError:
169 except KeyError:
169 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
170 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
170 continue
171 continue
171 r = repo.changelog.rev(n)
172 r = repo.changelog.rev(n)
172 keys = getkeys(ui, repo, mygpg, data, context)
173 keys = getkeys(ui, repo, mygpg, data, context)
173 if not keys:
174 if not keys:
174 continue
175 continue
175 revs.setdefault(r, [])
176 revs.setdefault(r, [])
176 revs[r].extend(keys)
177 revs[r].extend(keys)
177 for rev in sorted(revs, reverse=True):
178 for rev in sorted(revs, reverse=True):
178 for k in revs[rev]:
179 for k in revs[rev]:
179 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
180 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
180 ui.write("%-30s %s\n" % (keystr(ui, k), r))
181 ui.write("%-30s %s\n" % (keystr(ui, k), r))
181
182
182 @command("sigcheck", [], _('hg sigcheck REV'))
183 @command("sigcheck", [], _('hg sigcheck REV'))
183 def sigcheck(ui, repo, rev):
184 def sigcheck(ui, repo, rev):
184 """verify all the signatures there may be for a particular revision"""
185 """verify all the signatures there may be for a particular revision"""
185 mygpg = newgpg(ui)
186 mygpg = newgpg(ui)
186 rev = repo.lookup(rev)
187 rev = repo.lookup(rev)
187 hexrev = hgnode.hex(rev)
188 hexrev = hgnode.hex(rev)
188 keys = []
189 keys = []
189
190
190 for data, context in sigwalk(repo):
191 for data, context in sigwalk(repo):
191 node, version, sig = data
192 node, version, sig = data
192 if node == hexrev:
193 if node == hexrev:
193 k = getkeys(ui, repo, mygpg, data, context)
194 k = getkeys(ui, repo, mygpg, data, context)
194 if k:
195 if k:
195 keys.extend(k)
196 keys.extend(k)
196
197
197 if not keys:
198 if not keys:
198 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
199 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
199 return
200 return
200
201
201 # print summary
202 # print summary
202 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
203 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
203 for key in keys:
204 for key in keys:
204 ui.write(" %s\n" % keystr(ui, key))
205 ui.write(" %s\n" % keystr(ui, key))
205
206
206 def keystr(ui, key):
207 def keystr(ui, key):
207 """associate a string to a key (username, comment)"""
208 """associate a string to a key (username, comment)"""
208 keyid, user, fingerprint = key
209 keyid, user, fingerprint = key
209 comment = ui.config("gpg", fingerprint, None)
210 comment = ui.config("gpg", fingerprint, None)
210 if comment:
211 if comment:
211 return "%s (%s)" % (user, comment)
212 return "%s (%s)" % (user, comment)
212 else:
213 else:
213 return user
214 return user
214
215
215 @command("sign",
216 @command("sign",
216 [('l', 'local', None, _('make the signature local')),
217 [('l', 'local', None, _('make the signature local')),
217 ('f', 'force', None, _('sign even if the sigfile is modified')),
218 ('f', 'force', None, _('sign even if the sigfile is modified')),
218 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
219 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
219 ('k', 'key', '',
220 ('k', 'key', '',
220 _('the key id to sign with'), _('ID')),
221 _('the key id to sign with'), _('ID')),
221 ('m', 'message', '',
222 ('m', 'message', '',
222 _('use text as commit message'), _('TEXT')),
223 _('use text as commit message'), _('TEXT')),
223 ('e', 'edit', False, _('invoke editor on commit messages')),
224 ('e', 'edit', False, _('invoke editor on commit messages')),
224 ] + commands.commitopts2,
225 ] + commands.commitopts2,
225 _('hg sign [OPTION]... [REV]...'))
226 _('hg sign [OPTION]... [REV]...'))
226 def sign(ui, repo, *revs, **opts):
227 def sign(ui, repo, *revs, **opts):
227 """add a signature for the current or given revision
228 """add a signature for the current or given revision
228
229
229 If no revision is given, the parent of the working directory is used,
230 If no revision is given, the parent of the working directory is used,
230 or tip if no revision is checked out.
231 or tip if no revision is checked out.
231
232
232 The ``gpg.cmd`` config setting can be used to specify the command
233 The ``gpg.cmd`` config setting can be used to specify the command
233 to run. A default key can be specified with ``gpg.key``.
234 to run. A default key can be specified with ``gpg.key``.
234
235
235 See :hg:`help dates` for a list of formats valid for -d/--date.
236 See :hg:`help dates` for a list of formats valid for -d/--date.
236 """
237 """
237 with repo.wlock():
238 with repo.wlock():
238 return _dosign(ui, repo, *revs, **opts)
239 return _dosign(ui, repo, *revs, **opts)
239
240
240 def _dosign(ui, repo, *revs, **opts):
241 def _dosign(ui, repo, *revs, **opts):
241 mygpg = newgpg(ui, **opts)
242 mygpg = newgpg(ui, **opts)
242 sigver = "0"
243 sigver = "0"
243 sigmessage = ""
244 sigmessage = ""
244
245
245 date = opts.get('date')
246 date = opts.get('date')
246 if date:
247 if date:
247 opts['date'] = util.parsedate(date)
248 opts['date'] = util.parsedate(date)
248
249
249 if revs:
250 if revs:
250 nodes = [repo.lookup(n) for n in revs]
251 nodes = [repo.lookup(n) for n in revs]
251 else:
252 else:
252 nodes = [node for node in repo.dirstate.parents()
253 nodes = [node for node in repo.dirstate.parents()
253 if node != hgnode.nullid]
254 if node != hgnode.nullid]
254 if len(nodes) > 1:
255 if len(nodes) > 1:
255 raise error.Abort(_('uncommitted merge - please provide a '
256 raise error.Abort(_('uncommitted merge - please provide a '
256 'specific revision'))
257 'specific revision'))
257 if not nodes:
258 if not nodes:
258 nodes = [repo.changelog.tip()]
259 nodes = [repo.changelog.tip()]
259
260
260 for n in nodes:
261 for n in nodes:
261 hexnode = hgnode.hex(n)
262 hexnode = hgnode.hex(n)
262 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
263 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
263 hgnode.short(n)))
264 hgnode.short(n)))
264 # build data
265 # build data
265 data = node2txt(repo, n, sigver)
266 data = node2txt(repo, n, sigver)
266 sig = mygpg.sign(data)
267 sig = mygpg.sign(data)
267 if not sig:
268 if not sig:
268 raise error.Abort(_("error while signing"))
269 raise error.Abort(_("error while signing"))
269 sig = binascii.b2a_base64(sig)
270 sig = binascii.b2a_base64(sig)
270 sig = sig.replace("\n", "")
271 sig = sig.replace("\n", "")
271 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
272 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
272
273
273 # write it
274 # write it
274 if opts['local']:
275 if opts['local']:
275 repo.vfs.append("localsigs", sigmessage)
276 repo.vfs.append("localsigs", sigmessage)
276 return
277 return
277
278
278 if not opts["force"]:
279 if not opts["force"]:
279 msigs = match.exact(repo.root, '', ['.hgsigs'])
280 msigs = match.exact(repo.root, '', ['.hgsigs'])
280 if any(repo.status(match=msigs, unknown=True, ignored=True)):
281 if any(repo.status(match=msigs, unknown=True, ignored=True)):
281 raise error.Abort(_("working copy of .hgsigs is changed "),
282 raise error.Abort(_("working copy of .hgsigs is changed "),
282 hint=_("please commit .hgsigs manually"))
283 hint=_("please commit .hgsigs manually"))
283
284
284 sigsfile = repo.wvfs(".hgsigs", "ab")
285 sigsfile = repo.wvfs(".hgsigs", "ab")
285 sigsfile.write(sigmessage)
286 sigsfile.write(sigmessage)
286 sigsfile.close()
287 sigsfile.close()
287
288
288 if '.hgsigs' not in repo.dirstate:
289 if '.hgsigs' not in repo.dirstate:
289 repo[None].add([".hgsigs"])
290 repo[None].add([".hgsigs"])
290
291
291 if opts["no_commit"]:
292 if opts["no_commit"]:
292 return
293 return
293
294
294 message = opts['message']
295 message = opts['message']
295 if not message:
296 if not message:
296 # we don't translate commit messages
297 # we don't translate commit messages
297 message = "\n".join(["Added signature for changeset %s"
298 message = "\n".join(["Added signature for changeset %s"
298 % hgnode.short(n)
299 % hgnode.short(n)
299 for n in nodes])
300 for n in nodes])
300 try:
301 try:
301 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
302 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
302 repo.commit(message, opts['user'], opts['date'], match=msigs,
303 repo.commit(message, opts['user'], opts['date'], match=msigs,
303 editor=editor)
304 editor=editor)
304 except ValueError as inst:
305 except ValueError as inst:
305 raise error.Abort(str(inst))
306 raise error.Abort(str(inst))
306
307
307 def shortkey(ui, key):
308 def shortkey(ui, key):
308 if len(key) != 16:
309 if len(key) != 16:
309 ui.debug("key ID \"%s\" format error\n" % key)
310 ui.debug("key ID \"%s\" format error\n" % key)
310 return key
311 return key
311
312
312 return key[-8:]
313 return key[-8:]
313
314
314 def node2txt(repo, node, ver):
315 def node2txt(repo, node, ver):
315 """map a manifest into some text"""
316 """map a manifest into some text"""
316 if ver == "0":
317 if ver == "0":
317 return "%s\n" % hgnode.hex(node)
318 return "%s\n" % hgnode.hex(node)
318 else:
319 else:
319 raise error.Abort(_("unknown signature version"))
320 raise error.Abort(_("unknown signature version"))
@@ -1,69 +1,69 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell (DEPRECATED)
8 '''command to view revision graphs from a shell (DEPRECATED)
9
9
10 The functionality of this extension has been include in core Mercurial
10 The functionality of this extension has been include in core Mercurial
11 since version 2.3. Please use :hg:`log -G ...` instead.
11 since version 2.3. Please use :hg:`log -G ...` instead.
12
12
13 This extension adds a --graph option to the incoming, outgoing and log
13 This extension adds a --graph option to the incoming, outgoing and log
14 commands. When this options is given, an ASCII representation of the
14 commands. When this options is given, an ASCII representation of the
15 revision graph is also shown.
15 revision graph is also shown.
16 '''
16 '''
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import (
21 from mercurial import (
22 cmdutil,
23 commands,
22 commands,
23 registrar,
24 )
24 )
25
25
26 cmdtable = {}
26 cmdtable = {}
27 command = cmdutil.command(cmdtable)
27 command = registrar.command(cmdtable)
28 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
28 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
29 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # be specifying the version(s) of Mercurial they are tested with, or
30 # be specifying the version(s) of Mercurial they are tested with, or
31 # leave the attribute unspecified.
31 # leave the attribute unspecified.
32 testedwith = 'ships-with-hg-core'
32 testedwith = 'ships-with-hg-core'
33
33
34 @command('glog',
34 @command('glog',
35 [('f', 'follow', None,
35 [('f', 'follow', None,
36 _('follow changeset history, or file history across copies and renames')),
36 _('follow changeset history, or file history across copies and renames')),
37 ('', 'follow-first', None,
37 ('', 'follow-first', None,
38 _('only follow the first parent of merge changesets (DEPRECATED)')),
38 _('only follow the first parent of merge changesets (DEPRECATED)')),
39 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
39 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
40 ('C', 'copies', None, _('show copied files')),
40 ('C', 'copies', None, _('show copied files')),
41 ('k', 'keyword', [],
41 ('k', 'keyword', [],
42 _('do case-insensitive search for a given text'), _('TEXT')),
42 _('do case-insensitive search for a given text'), _('TEXT')),
43 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
43 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
44 ('', 'removed', None, _('include revisions where files were removed')),
44 ('', 'removed', None, _('include revisions where files were removed')),
45 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
45 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
46 ('u', 'user', [], _('revisions committed by user'), _('USER')),
46 ('u', 'user', [], _('revisions committed by user'), _('USER')),
47 ('', 'only-branch', [],
47 ('', 'only-branch', [],
48 _('show only changesets within the given named branch (DEPRECATED)'),
48 _('show only changesets within the given named branch (DEPRECATED)'),
49 _('BRANCH')),
49 _('BRANCH')),
50 ('b', 'branch', [],
50 ('b', 'branch', [],
51 _('show changesets within the given named branch'), _('BRANCH')),
51 _('show changesets within the given named branch'), _('BRANCH')),
52 ('P', 'prune', [],
52 ('P', 'prune', [],
53 _('do not display revision or any of its ancestors'), _('REV')),
53 _('do not display revision or any of its ancestors'), _('REV')),
54 ] + commands.logopts + commands.walkopts,
54 ] + commands.logopts + commands.walkopts,
55 _('[OPTION]... [FILE]'),
55 _('[OPTION]... [FILE]'),
56 inferrepo=True)
56 inferrepo=True)
57 def glog(ui, repo, *pats, **opts):
57 def glog(ui, repo, *pats, **opts):
58 """show revision history alongside an ASCII revision graph
58 """show revision history alongside an ASCII revision graph
59
59
60 Print a revision history alongside a revision graph drawn with
60 Print a revision history alongside a revision graph drawn with
61 ASCII characters.
61 ASCII characters.
62
62
63 Nodes printed as an @ character are parents of the working
63 Nodes printed as an @ character are parents of the working
64 directory.
64 directory.
65
65
66 This is an alias to :hg:`log -G`.
66 This is an alias to :hg:`log -G`.
67 """
67 """
68 opts['graph'] = True
68 opts['graph'] = True
69 return commands.log(ui, repo, *pats, **opts)
69 return commands.log(ui, repo, *pats, **opts)
@@ -1,350 +1,350 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''browse the repository in a graphical way
8 '''browse the repository in a graphical way
9
9
10 The hgk extension allows browsing the history of a repository in a
10 The hgk extension allows browsing the history of a repository in a
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
12 distributed with Mercurial.)
12 distributed with Mercurial.)
13
13
14 hgk consists of two parts: a Tcl script that does the displaying and
14 hgk consists of two parts: a Tcl script that does the displaying and
15 querying of information, and an extension to Mercurial named hgk.py,
15 querying of information, and an extension to Mercurial named hgk.py,
16 which provides hooks for hgk to get information. hgk can be found in
16 which provides hooks for hgk to get information. hgk can be found in
17 the contrib directory, and the extension is shipped in the hgext
17 the contrib directory, and the extension is shipped in the hgext
18 repository, and needs to be enabled.
18 repository, and needs to be enabled.
19
19
20 The :hg:`view` command will launch the hgk Tcl script. For this command
20 The :hg:`view` command will launch the hgk Tcl script. For this command
21 to work, hgk must be in your search path. Alternately, you can specify
21 to work, hgk must be in your search path. Alternately, you can specify
22 the path to hgk in your configuration file::
22 the path to hgk in your configuration file::
23
23
24 [hgk]
24 [hgk]
25 path = /location/of/hgk
25 path = /location/of/hgk
26
26
27 hgk can make use of the extdiff extension to visualize revisions.
27 hgk can make use of the extdiff extension to visualize revisions.
28 Assuming you had already configured extdiff vdiff command, just add::
28 Assuming you had already configured extdiff vdiff command, just add::
29
29
30 [hgk]
30 [hgk]
31 vdiff=vdiff
31 vdiff=vdiff
32
32
33 Revisions context menu will now display additional entries to fire
33 Revisions context menu will now display additional entries to fire
34 vdiff on hovered and selected revisions.
34 vdiff on hovered and selected revisions.
35 '''
35 '''
36
36
37 from __future__ import absolute_import
37 from __future__ import absolute_import
38
38
39 import os
39 import os
40
40
41 from mercurial.i18n import _
41 from mercurial.i18n import _
42 from mercurial.node import (
42 from mercurial.node import (
43 nullid,
43 nullid,
44 nullrev,
44 nullrev,
45 short,
45 short,
46 )
46 )
47 from mercurial import (
47 from mercurial import (
48 cmdutil,
49 commands,
48 commands,
50 obsolete,
49 obsolete,
51 patch,
50 patch,
51 registrar,
52 scmutil,
52 scmutil,
53 )
53 )
54
54
55 cmdtable = {}
55 cmdtable = {}
56 command = cmdutil.command(cmdtable)
56 command = registrar.command(cmdtable)
57 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
57 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
58 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
58 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
59 # be specifying the version(s) of Mercurial they are tested with, or
59 # be specifying the version(s) of Mercurial they are tested with, or
60 # leave the attribute unspecified.
60 # leave the attribute unspecified.
61 testedwith = 'ships-with-hg-core'
61 testedwith = 'ships-with-hg-core'
62
62
63 @command('debug-diff-tree',
63 @command('debug-diff-tree',
64 [('p', 'patch', None, _('generate patch')),
64 [('p', 'patch', None, _('generate patch')),
65 ('r', 'recursive', None, _('recursive')),
65 ('r', 'recursive', None, _('recursive')),
66 ('P', 'pretty', None, _('pretty')),
66 ('P', 'pretty', None, _('pretty')),
67 ('s', 'stdin', None, _('stdin')),
67 ('s', 'stdin', None, _('stdin')),
68 ('C', 'copy', None, _('detect copies')),
68 ('C', 'copy', None, _('detect copies')),
69 ('S', 'search', "", _('search'))],
69 ('S', 'search', "", _('search'))],
70 ('[OPTION]... NODE1 NODE2 [FILE]...'),
70 ('[OPTION]... NODE1 NODE2 [FILE]...'),
71 inferrepo=True)
71 inferrepo=True)
72 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
72 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
73 """diff trees from two commits"""
73 """diff trees from two commits"""
74 def __difftree(repo, node1, node2, files=None):
74 def __difftree(repo, node1, node2, files=None):
75 assert node2 is not None
75 assert node2 is not None
76 if files is None:
76 if files is None:
77 files = []
77 files = []
78 mmap = repo[node1].manifest()
78 mmap = repo[node1].manifest()
79 mmap2 = repo[node2].manifest()
79 mmap2 = repo[node2].manifest()
80 m = scmutil.match(repo[node1], files)
80 m = scmutil.match(repo[node1], files)
81 modified, added, removed = repo.status(node1, node2, m)[:3]
81 modified, added, removed = repo.status(node1, node2, m)[:3]
82 empty = short(nullid)
82 empty = short(nullid)
83
83
84 for f in modified:
84 for f in modified:
85 # TODO get file permissions
85 # TODO get file permissions
86 ui.write((":100664 100664 %s %s M\t%s\t%s\n") %
86 ui.write((":100664 100664 %s %s M\t%s\t%s\n") %
87 (short(mmap[f]), short(mmap2[f]), f, f))
87 (short(mmap[f]), short(mmap2[f]), f, f))
88 for f in added:
88 for f in added:
89 ui.write((":000000 100664 %s %s N\t%s\t%s\n") %
89 ui.write((":000000 100664 %s %s N\t%s\t%s\n") %
90 (empty, short(mmap2[f]), f, f))
90 (empty, short(mmap2[f]), f, f))
91 for f in removed:
91 for f in removed:
92 ui.write((":100664 000000 %s %s D\t%s\t%s\n") %
92 ui.write((":100664 000000 %s %s D\t%s\t%s\n") %
93 (short(mmap[f]), empty, f, f))
93 (short(mmap[f]), empty, f, f))
94 ##
94 ##
95
95
96 while True:
96 while True:
97 if opts['stdin']:
97 if opts['stdin']:
98 try:
98 try:
99 line = raw_input().split(' ')
99 line = raw_input().split(' ')
100 node1 = line[0]
100 node1 = line[0]
101 if len(line) > 1:
101 if len(line) > 1:
102 node2 = line[1]
102 node2 = line[1]
103 else:
103 else:
104 node2 = None
104 node2 = None
105 except EOFError:
105 except EOFError:
106 break
106 break
107 node1 = repo.lookup(node1)
107 node1 = repo.lookup(node1)
108 if node2:
108 if node2:
109 node2 = repo.lookup(node2)
109 node2 = repo.lookup(node2)
110 else:
110 else:
111 node2 = node1
111 node2 = node1
112 node1 = repo.changelog.parents(node1)[0]
112 node1 = repo.changelog.parents(node1)[0]
113 if opts['patch']:
113 if opts['patch']:
114 if opts['pretty']:
114 if opts['pretty']:
115 catcommit(ui, repo, node2, "")
115 catcommit(ui, repo, node2, "")
116 m = scmutil.match(repo[node1], files)
116 m = scmutil.match(repo[node1], files)
117 diffopts = patch.difffeatureopts(ui)
117 diffopts = patch.difffeatureopts(ui)
118 diffopts.git = True
118 diffopts.git = True
119 chunks = patch.diff(repo, node1, node2, match=m,
119 chunks = patch.diff(repo, node1, node2, match=m,
120 opts=diffopts)
120 opts=diffopts)
121 for chunk in chunks:
121 for chunk in chunks:
122 ui.write(chunk)
122 ui.write(chunk)
123 else:
123 else:
124 __difftree(repo, node1, node2, files=files)
124 __difftree(repo, node1, node2, files=files)
125 if not opts['stdin']:
125 if not opts['stdin']:
126 break
126 break
127
127
128 def catcommit(ui, repo, n, prefix, ctx=None):
128 def catcommit(ui, repo, n, prefix, ctx=None):
129 nlprefix = '\n' + prefix
129 nlprefix = '\n' + prefix
130 if ctx is None:
130 if ctx is None:
131 ctx = repo[n]
131 ctx = repo[n]
132 # use ctx.node() instead ??
132 # use ctx.node() instead ??
133 ui.write(("tree %s\n" % short(ctx.changeset()[0])))
133 ui.write(("tree %s\n" % short(ctx.changeset()[0])))
134 for p in ctx.parents():
134 for p in ctx.parents():
135 ui.write(("parent %s\n" % p))
135 ui.write(("parent %s\n" % p))
136
136
137 date = ctx.date()
137 date = ctx.date()
138 description = ctx.description().replace("\0", "")
138 description = ctx.description().replace("\0", "")
139 ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))
139 ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))
140
140
141 if 'committer' in ctx.extra():
141 if 'committer' in ctx.extra():
142 ui.write(("committer %s\n" % ctx.extra()['committer']))
142 ui.write(("committer %s\n" % ctx.extra()['committer']))
143
143
144 ui.write(("revision %d\n" % ctx.rev()))
144 ui.write(("revision %d\n" % ctx.rev()))
145 ui.write(("branch %s\n" % ctx.branch()))
145 ui.write(("branch %s\n" % ctx.branch()))
146 if obsolete.isenabled(repo, obsolete.createmarkersopt):
146 if obsolete.isenabled(repo, obsolete.createmarkersopt):
147 if ctx.obsolete():
147 if ctx.obsolete():
148 ui.write(("obsolete\n"))
148 ui.write(("obsolete\n"))
149 ui.write(("phase %s\n\n" % ctx.phasestr()))
149 ui.write(("phase %s\n\n" % ctx.phasestr()))
150
150
151 if prefix != "":
151 if prefix != "":
152 ui.write("%s%s\n" % (prefix,
152 ui.write("%s%s\n" % (prefix,
153 description.replace('\n', nlprefix).strip()))
153 description.replace('\n', nlprefix).strip()))
154 else:
154 else:
155 ui.write(description + "\n")
155 ui.write(description + "\n")
156 if prefix:
156 if prefix:
157 ui.write('\0')
157 ui.write('\0')
158
158
159 @command('debug-merge-base', [], _('REV REV'))
159 @command('debug-merge-base', [], _('REV REV'))
160 def base(ui, repo, node1, node2):
160 def base(ui, repo, node1, node2):
161 """output common ancestor information"""
161 """output common ancestor information"""
162 node1 = repo.lookup(node1)
162 node1 = repo.lookup(node1)
163 node2 = repo.lookup(node2)
163 node2 = repo.lookup(node2)
164 n = repo.changelog.ancestor(node1, node2)
164 n = repo.changelog.ancestor(node1, node2)
165 ui.write(short(n) + "\n")
165 ui.write(short(n) + "\n")
166
166
167 @command('debug-cat-file',
167 @command('debug-cat-file',
168 [('s', 'stdin', None, _('stdin'))],
168 [('s', 'stdin', None, _('stdin'))],
169 _('[OPTION]... TYPE FILE'),
169 _('[OPTION]... TYPE FILE'),
170 inferrepo=True)
170 inferrepo=True)
171 def catfile(ui, repo, type=None, r=None, **opts):
171 def catfile(ui, repo, type=None, r=None, **opts):
172 """cat a specific revision"""
172 """cat a specific revision"""
173 # in stdin mode, every line except the commit is prefixed with two
173 # in stdin mode, every line except the commit is prefixed with two
174 # spaces. This way the our caller can find the commit without magic
174 # spaces. This way the our caller can find the commit without magic
175 # strings
175 # strings
176 #
176 #
177 prefix = ""
177 prefix = ""
178 if opts['stdin']:
178 if opts['stdin']:
179 try:
179 try:
180 (type, r) = raw_input().split(' ')
180 (type, r) = raw_input().split(' ')
181 prefix = " "
181 prefix = " "
182 except EOFError:
182 except EOFError:
183 return
183 return
184
184
185 else:
185 else:
186 if not type or not r:
186 if not type or not r:
187 ui.warn(_("cat-file: type or revision not supplied\n"))
187 ui.warn(_("cat-file: type or revision not supplied\n"))
188 commands.help_(ui, 'cat-file')
188 commands.help_(ui, 'cat-file')
189
189
190 while r:
190 while r:
191 if type != "commit":
191 if type != "commit":
192 ui.warn(_("aborting hg cat-file only understands commits\n"))
192 ui.warn(_("aborting hg cat-file only understands commits\n"))
193 return 1
193 return 1
194 n = repo.lookup(r)
194 n = repo.lookup(r)
195 catcommit(ui, repo, n, prefix)
195 catcommit(ui, repo, n, prefix)
196 if opts['stdin']:
196 if opts['stdin']:
197 try:
197 try:
198 (type, r) = raw_input().split(' ')
198 (type, r) = raw_input().split(' ')
199 except EOFError:
199 except EOFError:
200 break
200 break
201 else:
201 else:
202 break
202 break
203
203
204 # git rev-tree is a confusing thing. You can supply a number of
204 # git rev-tree is a confusing thing. You can supply a number of
205 # commit sha1s on the command line, and it walks the commit history
205 # commit sha1s on the command line, and it walks the commit history
206 # telling you which commits are reachable from the supplied ones via
206 # telling you which commits are reachable from the supplied ones via
207 # a bitmask based on arg position.
207 # a bitmask based on arg position.
208 # you can specify a commit to stop at by starting the sha1 with ^
208 # you can specify a commit to stop at by starting the sha1 with ^
209 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
209 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
210 def chlogwalk():
210 def chlogwalk():
211 count = len(repo)
211 count = len(repo)
212 i = count
212 i = count
213 l = [0] * 100
213 l = [0] * 100
214 chunk = 100
214 chunk = 100
215 while True:
215 while True:
216 if chunk > i:
216 if chunk > i:
217 chunk = i
217 chunk = i
218 i = 0
218 i = 0
219 else:
219 else:
220 i -= chunk
220 i -= chunk
221
221
222 for x in xrange(chunk):
222 for x in xrange(chunk):
223 if i + x >= count:
223 if i + x >= count:
224 l[chunk - x:] = [0] * (chunk - x)
224 l[chunk - x:] = [0] * (chunk - x)
225 break
225 break
226 if full is not None:
226 if full is not None:
227 if (i + x) in repo:
227 if (i + x) in repo:
228 l[x] = repo[i + x]
228 l[x] = repo[i + x]
229 l[x].changeset() # force reading
229 l[x].changeset() # force reading
230 else:
230 else:
231 if (i + x) in repo:
231 if (i + x) in repo:
232 l[x] = 1
232 l[x] = 1
233 for x in xrange(chunk - 1, -1, -1):
233 for x in xrange(chunk - 1, -1, -1):
234 if l[x] != 0:
234 if l[x] != 0:
235 yield (i + x, full is not None and l[x] or None)
235 yield (i + x, full is not None and l[x] or None)
236 if i == 0:
236 if i == 0:
237 break
237 break
238
238
239 # calculate and return the reachability bitmask for sha
239 # calculate and return the reachability bitmask for sha
240 def is_reachable(ar, reachable, sha):
240 def is_reachable(ar, reachable, sha):
241 if len(ar) == 0:
241 if len(ar) == 0:
242 return 1
242 return 1
243 mask = 0
243 mask = 0
244 for i in xrange(len(ar)):
244 for i in xrange(len(ar)):
245 if sha in reachable[i]:
245 if sha in reachable[i]:
246 mask |= 1 << i
246 mask |= 1 << i
247
247
248 return mask
248 return mask
249
249
250 reachable = []
250 reachable = []
251 stop_sha1 = []
251 stop_sha1 = []
252 want_sha1 = []
252 want_sha1 = []
253 count = 0
253 count = 0
254
254
255 # figure out which commits they are asking for and which ones they
255 # figure out which commits they are asking for and which ones they
256 # want us to stop on
256 # want us to stop on
257 for i, arg in enumerate(args):
257 for i, arg in enumerate(args):
258 if arg.startswith('^'):
258 if arg.startswith('^'):
259 s = repo.lookup(arg[1:])
259 s = repo.lookup(arg[1:])
260 stop_sha1.append(s)
260 stop_sha1.append(s)
261 want_sha1.append(s)
261 want_sha1.append(s)
262 elif arg != 'HEAD':
262 elif arg != 'HEAD':
263 want_sha1.append(repo.lookup(arg))
263 want_sha1.append(repo.lookup(arg))
264
264
265 # calculate the graph for the supplied commits
265 # calculate the graph for the supplied commits
266 for i, n in enumerate(want_sha1):
266 for i, n in enumerate(want_sha1):
267 reachable.append(set())
267 reachable.append(set())
268 visit = [n]
268 visit = [n]
269 reachable[i].add(n)
269 reachable[i].add(n)
270 while visit:
270 while visit:
271 n = visit.pop(0)
271 n = visit.pop(0)
272 if n in stop_sha1:
272 if n in stop_sha1:
273 continue
273 continue
274 for p in repo.changelog.parents(n):
274 for p in repo.changelog.parents(n):
275 if p not in reachable[i]:
275 if p not in reachable[i]:
276 reachable[i].add(p)
276 reachable[i].add(p)
277 visit.append(p)
277 visit.append(p)
278 if p in stop_sha1:
278 if p in stop_sha1:
279 continue
279 continue
280
280
281 # walk the repository looking for commits that are in our
281 # walk the repository looking for commits that are in our
282 # reachability graph
282 # reachability graph
283 for i, ctx in chlogwalk():
283 for i, ctx in chlogwalk():
284 if i not in repo:
284 if i not in repo:
285 continue
285 continue
286 n = repo.changelog.node(i)
286 n = repo.changelog.node(i)
287 mask = is_reachable(want_sha1, reachable, n)
287 mask = is_reachable(want_sha1, reachable, n)
288 if mask:
288 if mask:
289 parentstr = ""
289 parentstr = ""
290 if parents:
290 if parents:
291 pp = repo.changelog.parents(n)
291 pp = repo.changelog.parents(n)
292 if pp[0] != nullid:
292 if pp[0] != nullid:
293 parentstr += " " + short(pp[0])
293 parentstr += " " + short(pp[0])
294 if pp[1] != nullid:
294 if pp[1] != nullid:
295 parentstr += " " + short(pp[1])
295 parentstr += " " + short(pp[1])
296 if not full:
296 if not full:
297 ui.write("%s%s\n" % (short(n), parentstr))
297 ui.write("%s%s\n" % (short(n), parentstr))
298 elif full == "commit":
298 elif full == "commit":
299 ui.write("%s%s\n" % (short(n), parentstr))
299 ui.write("%s%s\n" % (short(n), parentstr))
300 catcommit(ui, repo, n, ' ', ctx)
300 catcommit(ui, repo, n, ' ', ctx)
301 else:
301 else:
302 (p1, p2) = repo.changelog.parents(n)
302 (p1, p2) = repo.changelog.parents(n)
303 (h, h1, h2) = map(short, (n, p1, p2))
303 (h, h1, h2) = map(short, (n, p1, p2))
304 (i1, i2) = map(repo.changelog.rev, (p1, p2))
304 (i1, i2) = map(repo.changelog.rev, (p1, p2))
305
305
306 date = ctx.date()[0]
306 date = ctx.date()[0]
307 ui.write("%s %s:%s" % (date, h, mask))
307 ui.write("%s %s:%s" % (date, h, mask))
308 mask = is_reachable(want_sha1, reachable, p1)
308 mask = is_reachable(want_sha1, reachable, p1)
309 if i1 != nullrev and mask > 0:
309 if i1 != nullrev and mask > 0:
310 ui.write("%s:%s " % (h1, mask)),
310 ui.write("%s:%s " % (h1, mask)),
311 mask = is_reachable(want_sha1, reachable, p2)
311 mask = is_reachable(want_sha1, reachable, p2)
312 if i2 != nullrev and mask > 0:
312 if i2 != nullrev and mask > 0:
313 ui.write("%s:%s " % (h2, mask))
313 ui.write("%s:%s " % (h2, mask))
314 ui.write("\n")
314 ui.write("\n")
315 if maxnr and count >= maxnr:
315 if maxnr and count >= maxnr:
316 break
316 break
317 count += 1
317 count += 1
318
318
319 # git rev-list tries to order things by date, and has the ability to stop
319 # git rev-list tries to order things by date, and has the ability to stop
320 # at a given commit without walking the whole repo. TODO add the stop
320 # at a given commit without walking the whole repo. TODO add the stop
321 # parameter
321 # parameter
322 @command('debug-rev-list',
322 @command('debug-rev-list',
323 [('H', 'header', None, _('header')),
323 [('H', 'header', None, _('header')),
324 ('t', 'topo-order', None, _('topo-order')),
324 ('t', 'topo-order', None, _('topo-order')),
325 ('p', 'parents', None, _('parents')),
325 ('p', 'parents', None, _('parents')),
326 ('n', 'max-count', 0, _('max-count'))],
326 ('n', 'max-count', 0, _('max-count'))],
327 ('[OPTION]... REV...'))
327 ('[OPTION]... REV...'))
328 def revlist(ui, repo, *revs, **opts):
328 def revlist(ui, repo, *revs, **opts):
329 """print revisions"""
329 """print revisions"""
330 if opts['header']:
330 if opts['header']:
331 full = "commit"
331 full = "commit"
332 else:
332 else:
333 full = None
333 full = None
334 copy = [x for x in revs]
334 copy = [x for x in revs]
335 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
335 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
336
336
337 @command('view',
337 @command('view',
338 [('l', 'limit', '',
338 [('l', 'limit', '',
339 _('limit number of changes displayed'), _('NUM'))],
339 _('limit number of changes displayed'), _('NUM'))],
340 _('[-l LIMIT] [REVRANGE]'))
340 _('[-l LIMIT] [REVRANGE]'))
341 def view(ui, repo, *etc, **opts):
341 def view(ui, repo, *etc, **opts):
342 "start interactive history viewer"
342 "start interactive history viewer"
343 os.chdir(repo.root)
343 os.chdir(repo.root)
344 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
344 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
345 if repo.filtername is None:
345 if repo.filtername is None:
346 optstr += '--hidden'
346 optstr += '--hidden'
347
347
348 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
348 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
349 ui.debug("running %s\n" % cmd)
349 ui.debug("running %s\n" % cmd)
350 ui.system(cmd, blockedtag='hgk_view')
350 ui.system(cmd, blockedtag='hgk_view')
@@ -1,1675 +1,1676 b''
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but stop for amending
37 # e, edit = use commit, but stop for amending
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description and date
39 # r, roll = like fold, but discard this commit's description and date
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit commit message without changing commit content
41 # m, mess = edit commit message without changing commit content
42 #
42 #
43
43
44 In this file, lines beginning with ``#`` are ignored. You must specify a rule
44 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 for each revision in your history. For example, if you had meant to add gamma
45 for each revision in your history. For example, if you had meant to add gamma
46 before beta, and then wanted to add delta in the same revision as beta, you
46 before beta, and then wanted to add delta in the same revision as beta, you
47 would reorganize the file to look like this::
47 would reorganize the file to look like this::
48
48
49 pick 030b686bedc4 Add gamma
49 pick 030b686bedc4 Add gamma
50 pick c561b4e977df Add beta
50 pick c561b4e977df Add beta
51 fold 7c2fd3b9020c Add delta
51 fold 7c2fd3b9020c Add delta
52
52
53 # Edit history between c561b4e977df and 7c2fd3b9020c
53 # Edit history between c561b4e977df and 7c2fd3b9020c
54 #
54 #
55 # Commits are listed from least to most recent
55 # Commits are listed from least to most recent
56 #
56 #
57 # Commands:
57 # Commands:
58 # p, pick = use commit
58 # p, pick = use commit
59 # e, edit = use commit, but stop for amending
59 # e, edit = use commit, but stop for amending
60 # f, fold = use commit, but combine it with the one above
60 # f, fold = use commit, but combine it with the one above
61 # r, roll = like fold, but discard this commit's description and date
61 # r, roll = like fold, but discard this commit's description and date
62 # d, drop = remove commit from history
62 # d, drop = remove commit from history
63 # m, mess = edit commit message without changing commit content
63 # m, mess = edit commit message without changing commit content
64 #
64 #
65
65
66 At which point you close the editor and ``histedit`` starts working. When you
66 At which point you close the editor and ``histedit`` starts working. When you
67 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
67 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
68 those revisions together, offering you a chance to clean up the commit message::
68 those revisions together, offering you a chance to clean up the commit message::
69
69
70 Add beta
70 Add beta
71 ***
71 ***
72 Add delta
72 Add delta
73
73
74 Edit the commit message to your liking, then close the editor. The date used
74 Edit the commit message to your liking, then close the editor. The date used
75 for the commit will be the later of the two commits' dates. For this example,
75 for the commit will be the later of the two commits' dates. For this example,
76 let's assume that the commit message was changed to ``Add beta and delta.``
76 let's assume that the commit message was changed to ``Add beta and delta.``
77 After histedit has run and had a chance to remove any old or temporary
77 After histedit has run and had a chance to remove any old or temporary
78 revisions it needed, the history looks like this::
78 revisions it needed, the history looks like this::
79
79
80 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
80 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
81 | Add beta and delta.
81 | Add beta and delta.
82 |
82 |
83 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
83 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
84 | Add gamma
84 | Add gamma
85 |
85 |
86 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
86 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
87 Add alpha
87 Add alpha
88
88
89 Note that ``histedit`` does *not* remove any revisions (even its own temporary
89 Note that ``histedit`` does *not* remove any revisions (even its own temporary
90 ones) until after it has completed all the editing operations, so it will
90 ones) until after it has completed all the editing operations, so it will
91 probably perform several strip operations when it's done. For the above example,
91 probably perform several strip operations when it's done. For the above example,
92 it had to run strip twice. Strip can be slow depending on a variety of factors,
92 it had to run strip twice. Strip can be slow depending on a variety of factors,
93 so you might need to be a little patient. You can choose to keep the original
93 so you might need to be a little patient. You can choose to keep the original
94 revisions by passing the ``--keep`` flag.
94 revisions by passing the ``--keep`` flag.
95
95
96 The ``edit`` operation will drop you back to a command prompt,
96 The ``edit`` operation will drop you back to a command prompt,
97 allowing you to edit files freely, or even use ``hg record`` to commit
97 allowing you to edit files freely, or even use ``hg record`` to commit
98 some changes as a separate commit. When you're done, any remaining
98 some changes as a separate commit. When you're done, any remaining
99 uncommitted changes will be committed as well. When done, run ``hg
99 uncommitted changes will be committed as well. When done, run ``hg
100 histedit --continue`` to finish this step. If there are uncommitted
100 histedit --continue`` to finish this step. If there are uncommitted
101 changes, you'll be prompted for a new commit message, but the default
101 changes, you'll be prompted for a new commit message, but the default
102 commit message will be the original message for the ``edit`` ed
102 commit message will be the original message for the ``edit`` ed
103 revision, and the date of the original commit will be preserved.
103 revision, and the date of the original commit will be preserved.
104
104
105 The ``message`` operation will give you a chance to revise a commit
105 The ``message`` operation will give you a chance to revise a commit
106 message without changing the contents. It's a shortcut for doing
106 message without changing the contents. It's a shortcut for doing
107 ``edit`` immediately followed by `hg histedit --continue``.
107 ``edit`` immediately followed by `hg histedit --continue``.
108
108
109 If ``histedit`` encounters a conflict when moving a revision (while
109 If ``histedit`` encounters a conflict when moving a revision (while
110 handling ``pick`` or ``fold``), it'll stop in a similar manner to
110 handling ``pick`` or ``fold``), it'll stop in a similar manner to
111 ``edit`` with the difference that it won't prompt you for a commit
111 ``edit`` with the difference that it won't prompt you for a commit
112 message when done. If you decide at this point that you don't like how
112 message when done. If you decide at this point that you don't like how
113 much work it will be to rearrange history, or that you made a mistake,
113 much work it will be to rearrange history, or that you made a mistake,
114 you can use ``hg histedit --abort`` to abandon the new changes you
114 you can use ``hg histedit --abort`` to abandon the new changes you
115 have made and return to the state before you attempted to edit your
115 have made and return to the state before you attempted to edit your
116 history.
116 history.
117
117
118 If we clone the histedit-ed example repository above and add four more
118 If we clone the histedit-ed example repository above and add four more
119 changes, such that we have the following history::
119 changes, such that we have the following history::
120
120
121 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
121 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
122 | Add theta
122 | Add theta
123 |
123 |
124 o 5 140988835471 2009-04-27 18:04 -0500 stefan
124 o 5 140988835471 2009-04-27 18:04 -0500 stefan
125 | Add eta
125 | Add eta
126 |
126 |
127 o 4 122930637314 2009-04-27 18:04 -0500 stefan
127 o 4 122930637314 2009-04-27 18:04 -0500 stefan
128 | Add zeta
128 | Add zeta
129 |
129 |
130 o 3 836302820282 2009-04-27 18:04 -0500 stefan
130 o 3 836302820282 2009-04-27 18:04 -0500 stefan
131 | Add epsilon
131 | Add epsilon
132 |
132 |
133 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
133 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
134 | Add beta and delta.
134 | Add beta and delta.
135 |
135 |
136 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
136 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
137 | Add gamma
137 | Add gamma
138 |
138 |
139 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
139 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
140 Add alpha
140 Add alpha
141
141
142 If you run ``hg histedit --outgoing`` on the clone then it is the same
142 If you run ``hg histedit --outgoing`` on the clone then it is the same
143 as running ``hg histedit 836302820282``. If you need plan to push to a
143 as running ``hg histedit 836302820282``. If you need plan to push to a
144 repository that Mercurial does not detect to be related to the source
144 repository that Mercurial does not detect to be related to the source
145 repo, you can add a ``--force`` option.
145 repo, you can add a ``--force`` option.
146
146
147 Config
147 Config
148 ------
148 ------
149
149
150 Histedit rule lines are truncated to 80 characters by default. You
150 Histedit rule lines are truncated to 80 characters by default. You
151 can customize this behavior by setting a different length in your
151 can customize this behavior by setting a different length in your
152 configuration file::
152 configuration file::
153
153
154 [histedit]
154 [histedit]
155 linelen = 120 # truncate rule lines at 120 characters
155 linelen = 120 # truncate rule lines at 120 characters
156
156
157 ``hg histedit`` attempts to automatically choose an appropriate base
157 ``hg histedit`` attempts to automatically choose an appropriate base
158 revision to use. To change which base revision is used, define a
158 revision to use. To change which base revision is used, define a
159 revset in your configuration file::
159 revset in your configuration file::
160
160
161 [histedit]
161 [histedit]
162 defaultrev = only(.) & draft()
162 defaultrev = only(.) & draft()
163
163
164 By default each edited revision needs to be present in histedit commands.
164 By default each edited revision needs to be present in histedit commands.
165 To remove revision you need to use ``drop`` operation. You can configure
165 To remove revision you need to use ``drop`` operation. You can configure
166 the drop to be implicit for missing commits by adding::
166 the drop to be implicit for missing commits by adding::
167
167
168 [histedit]
168 [histedit]
169 dropmissing = True
169 dropmissing = True
170
170
171 By default, histedit will close the transaction after each action. For
171 By default, histedit will close the transaction after each action. For
172 performance purposes, you can configure histedit to use a single transaction
172 performance purposes, you can configure histedit to use a single transaction
173 across the entire histedit. WARNING: This setting introduces a significant risk
173 across the entire histedit. WARNING: This setting introduces a significant risk
174 of losing the work you've done in a histedit if the histedit aborts
174 of losing the work you've done in a histedit if the histedit aborts
175 unexpectedly::
175 unexpectedly::
176
176
177 [histedit]
177 [histedit]
178 singletransaction = True
178 singletransaction = True
179
179
180 """
180 """
181
181
182 from __future__ import absolute_import
182 from __future__ import absolute_import
183
183
184 import errno
184 import errno
185 import os
185 import os
186
186
187 from mercurial.i18n import _
187 from mercurial.i18n import _
188 from mercurial import (
188 from mercurial import (
189 bundle2,
189 bundle2,
190 cmdutil,
190 cmdutil,
191 context,
191 context,
192 copies,
192 copies,
193 destutil,
193 destutil,
194 discovery,
194 discovery,
195 error,
195 error,
196 exchange,
196 exchange,
197 extensions,
197 extensions,
198 hg,
198 hg,
199 lock,
199 lock,
200 merge as mergemod,
200 merge as mergemod,
201 mergeutil,
201 mergeutil,
202 node,
202 node,
203 obsolete,
203 obsolete,
204 registrar,
204 repair,
205 repair,
205 scmutil,
206 scmutil,
206 util,
207 util,
207 )
208 )
208
209
209 pickle = util.pickle
210 pickle = util.pickle
210 release = lock.release
211 release = lock.release
211 cmdtable = {}
212 cmdtable = {}
212 command = cmdutil.command(cmdtable)
213 command = registrar.command(cmdtable)
213
214
214 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
215 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
215 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
216 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
216 # be specifying the version(s) of Mercurial they are tested with, or
217 # be specifying the version(s) of Mercurial they are tested with, or
217 # leave the attribute unspecified.
218 # leave the attribute unspecified.
218 testedwith = 'ships-with-hg-core'
219 testedwith = 'ships-with-hg-core'
219
220
220 actiontable = {}
221 actiontable = {}
221 primaryactions = set()
222 primaryactions = set()
222 secondaryactions = set()
223 secondaryactions = set()
223 tertiaryactions = set()
224 tertiaryactions = set()
224 internalactions = set()
225 internalactions = set()
225
226
226 def geteditcomment(ui, first, last):
227 def geteditcomment(ui, first, last):
227 """ construct the editor comment
228 """ construct the editor comment
228 The comment includes::
229 The comment includes::
229 - an intro
230 - an intro
230 - sorted primary commands
231 - sorted primary commands
231 - sorted short commands
232 - sorted short commands
232 - sorted long commands
233 - sorted long commands
233 - additional hints
234 - additional hints
234
235
235 Commands are only included once.
236 Commands are only included once.
236 """
237 """
237 intro = _("""Edit history between %s and %s
238 intro = _("""Edit history between %s and %s
238
239
239 Commits are listed from least to most recent
240 Commits are listed from least to most recent
240
241
241 You can reorder changesets by reordering the lines
242 You can reorder changesets by reordering the lines
242
243
243 Commands:
244 Commands:
244 """)
245 """)
245 actions = []
246 actions = []
246 def addverb(v):
247 def addverb(v):
247 a = actiontable[v]
248 a = actiontable[v]
248 lines = a.message.split("\n")
249 lines = a.message.split("\n")
249 if len(a.verbs):
250 if len(a.verbs):
250 v = ', '.join(sorted(a.verbs, key=lambda v: len(v)))
251 v = ', '.join(sorted(a.verbs, key=lambda v: len(v)))
251 actions.append(" %s = %s" % (v, lines[0]))
252 actions.append(" %s = %s" % (v, lines[0]))
252 actions.extend([' %s' for l in lines[1:]])
253 actions.extend([' %s' for l in lines[1:]])
253
254
254 for v in (
255 for v in (
255 sorted(primaryactions) +
256 sorted(primaryactions) +
256 sorted(secondaryactions) +
257 sorted(secondaryactions) +
257 sorted(tertiaryactions)
258 sorted(tertiaryactions)
258 ):
259 ):
259 addverb(v)
260 addverb(v)
260 actions.append('')
261 actions.append('')
261
262
262 hints = []
263 hints = []
263 if ui.configbool('histedit', 'dropmissing'):
264 if ui.configbool('histedit', 'dropmissing'):
264 hints.append("Deleting a changeset from the list "
265 hints.append("Deleting a changeset from the list "
265 "will DISCARD it from the edited history!")
266 "will DISCARD it from the edited history!")
266
267
267 lines = (intro % (first, last)).split('\n') + actions + hints
268 lines = (intro % (first, last)).split('\n') + actions + hints
268
269
269 return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
270 return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
270
271
271 class histeditstate(object):
272 class histeditstate(object):
272 def __init__(self, repo, parentctxnode=None, actions=None, keep=None,
273 def __init__(self, repo, parentctxnode=None, actions=None, keep=None,
273 topmost=None, replacements=None, lock=None, wlock=None):
274 topmost=None, replacements=None, lock=None, wlock=None):
274 self.repo = repo
275 self.repo = repo
275 self.actions = actions
276 self.actions = actions
276 self.keep = keep
277 self.keep = keep
277 self.topmost = topmost
278 self.topmost = topmost
278 self.parentctxnode = parentctxnode
279 self.parentctxnode = parentctxnode
279 self.lock = lock
280 self.lock = lock
280 self.wlock = wlock
281 self.wlock = wlock
281 self.backupfile = None
282 self.backupfile = None
282 self.tr = None
283 self.tr = None
283 if replacements is None:
284 if replacements is None:
284 self.replacements = []
285 self.replacements = []
285 else:
286 else:
286 self.replacements = replacements
287 self.replacements = replacements
287
288
288 def read(self):
289 def read(self):
289 """Load histedit state from disk and set fields appropriately."""
290 """Load histedit state from disk and set fields appropriately."""
290 try:
291 try:
291 state = self.repo.vfs.read('histedit-state')
292 state = self.repo.vfs.read('histedit-state')
292 except IOError as err:
293 except IOError as err:
293 if err.errno != errno.ENOENT:
294 if err.errno != errno.ENOENT:
294 raise
295 raise
295 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
296 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
296
297
297 if state.startswith('v1\n'):
298 if state.startswith('v1\n'):
298 data = self._load()
299 data = self._load()
299 parentctxnode, rules, keep, topmost, replacements, backupfile = data
300 parentctxnode, rules, keep, topmost, replacements, backupfile = data
300 else:
301 else:
301 data = pickle.loads(state)
302 data = pickle.loads(state)
302 parentctxnode, rules, keep, topmost, replacements = data
303 parentctxnode, rules, keep, topmost, replacements = data
303 backupfile = None
304 backupfile = None
304
305
305 self.parentctxnode = parentctxnode
306 self.parentctxnode = parentctxnode
306 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
307 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
307 actions = parserules(rules, self)
308 actions = parserules(rules, self)
308 self.actions = actions
309 self.actions = actions
309 self.keep = keep
310 self.keep = keep
310 self.topmost = topmost
311 self.topmost = topmost
311 self.replacements = replacements
312 self.replacements = replacements
312 self.backupfile = backupfile
313 self.backupfile = backupfile
313
314
314 def write(self, tr=None):
315 def write(self, tr=None):
315 if tr:
316 if tr:
316 tr.addfilegenerator('histedit-state', ('histedit-state',),
317 tr.addfilegenerator('histedit-state', ('histedit-state',),
317 self._write, location='plain')
318 self._write, location='plain')
318 else:
319 else:
319 with self.repo.vfs("histedit-state", "w") as f:
320 with self.repo.vfs("histedit-state", "w") as f:
320 self._write(f)
321 self._write(f)
321
322
322 def _write(self, fp):
323 def _write(self, fp):
323 fp.write('v1\n')
324 fp.write('v1\n')
324 fp.write('%s\n' % node.hex(self.parentctxnode))
325 fp.write('%s\n' % node.hex(self.parentctxnode))
325 fp.write('%s\n' % node.hex(self.topmost))
326 fp.write('%s\n' % node.hex(self.topmost))
326 fp.write('%s\n' % self.keep)
327 fp.write('%s\n' % self.keep)
327 fp.write('%d\n' % len(self.actions))
328 fp.write('%d\n' % len(self.actions))
328 for action in self.actions:
329 for action in self.actions:
329 fp.write('%s\n' % action.tostate())
330 fp.write('%s\n' % action.tostate())
330 fp.write('%d\n' % len(self.replacements))
331 fp.write('%d\n' % len(self.replacements))
331 for replacement in self.replacements:
332 for replacement in self.replacements:
332 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
333 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
333 for r in replacement[1])))
334 for r in replacement[1])))
334 backupfile = self.backupfile
335 backupfile = self.backupfile
335 if not backupfile:
336 if not backupfile:
336 backupfile = ''
337 backupfile = ''
337 fp.write('%s\n' % backupfile)
338 fp.write('%s\n' % backupfile)
338
339
339 def _load(self):
340 def _load(self):
340 fp = self.repo.vfs('histedit-state', 'r')
341 fp = self.repo.vfs('histedit-state', 'r')
341 lines = [l[:-1] for l in fp.readlines()]
342 lines = [l[:-1] for l in fp.readlines()]
342
343
343 index = 0
344 index = 0
344 lines[index] # version number
345 lines[index] # version number
345 index += 1
346 index += 1
346
347
347 parentctxnode = node.bin(lines[index])
348 parentctxnode = node.bin(lines[index])
348 index += 1
349 index += 1
349
350
350 topmost = node.bin(lines[index])
351 topmost = node.bin(lines[index])
351 index += 1
352 index += 1
352
353
353 keep = lines[index] == 'True'
354 keep = lines[index] == 'True'
354 index += 1
355 index += 1
355
356
356 # Rules
357 # Rules
357 rules = []
358 rules = []
358 rulelen = int(lines[index])
359 rulelen = int(lines[index])
359 index += 1
360 index += 1
360 for i in xrange(rulelen):
361 for i in xrange(rulelen):
361 ruleaction = lines[index]
362 ruleaction = lines[index]
362 index += 1
363 index += 1
363 rule = lines[index]
364 rule = lines[index]
364 index += 1
365 index += 1
365 rules.append((ruleaction, rule))
366 rules.append((ruleaction, rule))
366
367
367 # Replacements
368 # Replacements
368 replacements = []
369 replacements = []
369 replacementlen = int(lines[index])
370 replacementlen = int(lines[index])
370 index += 1
371 index += 1
371 for i in xrange(replacementlen):
372 for i in xrange(replacementlen):
372 replacement = lines[index]
373 replacement = lines[index]
373 original = node.bin(replacement[:40])
374 original = node.bin(replacement[:40])
374 succ = [node.bin(replacement[i:i + 40]) for i in
375 succ = [node.bin(replacement[i:i + 40]) for i in
375 range(40, len(replacement), 40)]
376 range(40, len(replacement), 40)]
376 replacements.append((original, succ))
377 replacements.append((original, succ))
377 index += 1
378 index += 1
378
379
379 backupfile = lines[index]
380 backupfile = lines[index]
380 index += 1
381 index += 1
381
382
382 fp.close()
383 fp.close()
383
384
384 return parentctxnode, rules, keep, topmost, replacements, backupfile
385 return parentctxnode, rules, keep, topmost, replacements, backupfile
385
386
386 def clear(self):
387 def clear(self):
387 if self.inprogress():
388 if self.inprogress():
388 self.repo.vfs.unlink('histedit-state')
389 self.repo.vfs.unlink('histedit-state')
389
390
390 def inprogress(self):
391 def inprogress(self):
391 return self.repo.vfs.exists('histedit-state')
392 return self.repo.vfs.exists('histedit-state')
392
393
393
394
394 class histeditaction(object):
395 class histeditaction(object):
395 def __init__(self, state, node):
396 def __init__(self, state, node):
396 self.state = state
397 self.state = state
397 self.repo = state.repo
398 self.repo = state.repo
398 self.node = node
399 self.node = node
399
400
400 @classmethod
401 @classmethod
401 def fromrule(cls, state, rule):
402 def fromrule(cls, state, rule):
402 """Parses the given rule, returning an instance of the histeditaction.
403 """Parses the given rule, returning an instance of the histeditaction.
403 """
404 """
404 rulehash = rule.strip().split(' ', 1)[0]
405 rulehash = rule.strip().split(' ', 1)[0]
405 try:
406 try:
406 rev = node.bin(rulehash)
407 rev = node.bin(rulehash)
407 except TypeError:
408 except TypeError:
408 raise error.ParseError("invalid changeset %s" % rulehash)
409 raise error.ParseError("invalid changeset %s" % rulehash)
409 return cls(state, rev)
410 return cls(state, rev)
410
411
411 def verify(self, prev, expected, seen):
412 def verify(self, prev, expected, seen):
412 """ Verifies semantic correctness of the rule"""
413 """ Verifies semantic correctness of the rule"""
413 repo = self.repo
414 repo = self.repo
414 ha = node.hex(self.node)
415 ha = node.hex(self.node)
415 try:
416 try:
416 self.node = repo[ha].node()
417 self.node = repo[ha].node()
417 except error.RepoError:
418 except error.RepoError:
418 raise error.ParseError(_('unknown changeset %s listed')
419 raise error.ParseError(_('unknown changeset %s listed')
419 % ha[:12])
420 % ha[:12])
420 if self.node is not None:
421 if self.node is not None:
421 self._verifynodeconstraints(prev, expected, seen)
422 self._verifynodeconstraints(prev, expected, seen)
422
423
423 def _verifynodeconstraints(self, prev, expected, seen):
424 def _verifynodeconstraints(self, prev, expected, seen):
424 # by default command need a node in the edited list
425 # by default command need a node in the edited list
425 if self.node not in expected:
426 if self.node not in expected:
426 raise error.ParseError(_('%s "%s" changeset was not a candidate')
427 raise error.ParseError(_('%s "%s" changeset was not a candidate')
427 % (self.verb, node.short(self.node)),
428 % (self.verb, node.short(self.node)),
428 hint=_('only use listed changesets'))
429 hint=_('only use listed changesets'))
429 # and only one command per node
430 # and only one command per node
430 if self.node in seen:
431 if self.node in seen:
431 raise error.ParseError(_('duplicated command for changeset %s') %
432 raise error.ParseError(_('duplicated command for changeset %s') %
432 node.short(self.node))
433 node.short(self.node))
433
434
434 def torule(self):
435 def torule(self):
435 """build a histedit rule line for an action
436 """build a histedit rule line for an action
436
437
437 by default lines are in the form:
438 by default lines are in the form:
438 <hash> <rev> <summary>
439 <hash> <rev> <summary>
439 """
440 """
440 ctx = self.repo[self.node]
441 ctx = self.repo[self.node]
441 summary = _getsummary(ctx)
442 summary = _getsummary(ctx)
442 line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
443 line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
443 # trim to 75 columns by default so it's not stupidly wide in my editor
444 # trim to 75 columns by default so it's not stupidly wide in my editor
444 # (the 5 more are left for verb)
445 # (the 5 more are left for verb)
445 maxlen = self.repo.ui.configint('histedit', 'linelen', default=80)
446 maxlen = self.repo.ui.configint('histedit', 'linelen', default=80)
446 maxlen = max(maxlen, 22) # avoid truncating hash
447 maxlen = max(maxlen, 22) # avoid truncating hash
447 return util.ellipsis(line, maxlen)
448 return util.ellipsis(line, maxlen)
448
449
449 def tostate(self):
450 def tostate(self):
450 """Print an action in format used by histedit state files
451 """Print an action in format used by histedit state files
451 (the first line is a verb, the remainder is the second)
452 (the first line is a verb, the remainder is the second)
452 """
453 """
453 return "%s\n%s" % (self.verb, node.hex(self.node))
454 return "%s\n%s" % (self.verb, node.hex(self.node))
454
455
455 def run(self):
456 def run(self):
456 """Runs the action. The default behavior is simply apply the action's
457 """Runs the action. The default behavior is simply apply the action's
457 rulectx onto the current parentctx."""
458 rulectx onto the current parentctx."""
458 self.applychange()
459 self.applychange()
459 self.continuedirty()
460 self.continuedirty()
460 return self.continueclean()
461 return self.continueclean()
461
462
462 def applychange(self):
463 def applychange(self):
463 """Applies the changes from this action's rulectx onto the current
464 """Applies the changes from this action's rulectx onto the current
464 parentctx, but does not commit them."""
465 parentctx, but does not commit them."""
465 repo = self.repo
466 repo = self.repo
466 rulectx = repo[self.node]
467 rulectx = repo[self.node]
467 repo.ui.pushbuffer(error=True, labeled=True)
468 repo.ui.pushbuffer(error=True, labeled=True)
468 hg.update(repo, self.state.parentctxnode, quietempty=True)
469 hg.update(repo, self.state.parentctxnode, quietempty=True)
469 stats = applychanges(repo.ui, repo, rulectx, {})
470 stats = applychanges(repo.ui, repo, rulectx, {})
470 if stats and stats[3] > 0:
471 if stats and stats[3] > 0:
471 buf = repo.ui.popbuffer()
472 buf = repo.ui.popbuffer()
472 repo.ui.write(*buf)
473 repo.ui.write(*buf)
473 raise error.InterventionRequired(
474 raise error.InterventionRequired(
474 _('Fix up the change (%s %s)') %
475 _('Fix up the change (%s %s)') %
475 (self.verb, node.short(self.node)),
476 (self.verb, node.short(self.node)),
476 hint=_('hg histedit --continue to resume'))
477 hint=_('hg histedit --continue to resume'))
477 else:
478 else:
478 repo.ui.popbuffer()
479 repo.ui.popbuffer()
479
480
480 def continuedirty(self):
481 def continuedirty(self):
481 """Continues the action when changes have been applied to the working
482 """Continues the action when changes have been applied to the working
482 copy. The default behavior is to commit the dirty changes."""
483 copy. The default behavior is to commit the dirty changes."""
483 repo = self.repo
484 repo = self.repo
484 rulectx = repo[self.node]
485 rulectx = repo[self.node]
485
486
486 editor = self.commiteditor()
487 editor = self.commiteditor()
487 commit = commitfuncfor(repo, rulectx)
488 commit = commitfuncfor(repo, rulectx)
488
489
489 commit(text=rulectx.description(), user=rulectx.user(),
490 commit(text=rulectx.description(), user=rulectx.user(),
490 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
491 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
491
492
492 def commiteditor(self):
493 def commiteditor(self):
493 """The editor to be used to edit the commit message."""
494 """The editor to be used to edit the commit message."""
494 return False
495 return False
495
496
496 def continueclean(self):
497 def continueclean(self):
497 """Continues the action when the working copy is clean. The default
498 """Continues the action when the working copy is clean. The default
498 behavior is to accept the current commit as the new version of the
499 behavior is to accept the current commit as the new version of the
499 rulectx."""
500 rulectx."""
500 ctx = self.repo['.']
501 ctx = self.repo['.']
501 if ctx.node() == self.state.parentctxnode:
502 if ctx.node() == self.state.parentctxnode:
502 self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') %
503 self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') %
503 node.short(self.node))
504 node.short(self.node))
504 return ctx, [(self.node, tuple())]
505 return ctx, [(self.node, tuple())]
505 if ctx.node() == self.node:
506 if ctx.node() == self.node:
506 # Nothing changed
507 # Nothing changed
507 return ctx, []
508 return ctx, []
508 return ctx, [(self.node, (ctx.node(),))]
509 return ctx, [(self.node, (ctx.node(),))]
509
510
510 def commitfuncfor(repo, src):
511 def commitfuncfor(repo, src):
511 """Build a commit function for the replacement of <src>
512 """Build a commit function for the replacement of <src>
512
513
513 This function ensure we apply the same treatment to all changesets.
514 This function ensure we apply the same treatment to all changesets.
514
515
515 - Add a 'histedit_source' entry in extra.
516 - Add a 'histedit_source' entry in extra.
516
517
517 Note that fold has its own separated logic because its handling is a bit
518 Note that fold has its own separated logic because its handling is a bit
518 different and not easily factored out of the fold method.
519 different and not easily factored out of the fold method.
519 """
520 """
520 phasemin = src.phase()
521 phasemin = src.phase()
521 def commitfunc(**kwargs):
522 def commitfunc(**kwargs):
522 overrides = {('phases', 'new-commit'): phasemin}
523 overrides = {('phases', 'new-commit'): phasemin}
523 with repo.ui.configoverride(overrides, 'histedit'):
524 with repo.ui.configoverride(overrides, 'histedit'):
524 extra = kwargs.get('extra', {}).copy()
525 extra = kwargs.get('extra', {}).copy()
525 extra['histedit_source'] = src.hex()
526 extra['histedit_source'] = src.hex()
526 kwargs['extra'] = extra
527 kwargs['extra'] = extra
527 return repo.commit(**kwargs)
528 return repo.commit(**kwargs)
528 return commitfunc
529 return commitfunc
529
530
530 def applychanges(ui, repo, ctx, opts):
531 def applychanges(ui, repo, ctx, opts):
531 """Merge changeset from ctx (only) in the current working directory"""
532 """Merge changeset from ctx (only) in the current working directory"""
532 wcpar = repo.dirstate.parents()[0]
533 wcpar = repo.dirstate.parents()[0]
533 if ctx.p1().node() == wcpar:
534 if ctx.p1().node() == wcpar:
534 # edits are "in place" we do not need to make any merge,
535 # edits are "in place" we do not need to make any merge,
535 # just applies changes on parent for editing
536 # just applies changes on parent for editing
536 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
537 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
537 stats = None
538 stats = None
538 else:
539 else:
539 try:
540 try:
540 # ui.forcemerge is an internal variable, do not document
541 # ui.forcemerge is an internal variable, do not document
541 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
542 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
542 'histedit')
543 'histedit')
543 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
544 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
544 finally:
545 finally:
545 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
546 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
546 return stats
547 return stats
547
548
548 def collapse(repo, first, last, commitopts, skipprompt=False):
549 def collapse(repo, first, last, commitopts, skipprompt=False):
549 """collapse the set of revisions from first to last as new one.
550 """collapse the set of revisions from first to last as new one.
550
551
551 Expected commit options are:
552 Expected commit options are:
552 - message
553 - message
553 - date
554 - date
554 - username
555 - username
555 Commit message is edited in all cases.
556 Commit message is edited in all cases.
556
557
557 This function works in memory."""
558 This function works in memory."""
558 ctxs = list(repo.set('%d::%d', first, last))
559 ctxs = list(repo.set('%d::%d', first, last))
559 if not ctxs:
560 if not ctxs:
560 return None
561 return None
561 for c in ctxs:
562 for c in ctxs:
562 if not c.mutable():
563 if not c.mutable():
563 raise error.ParseError(
564 raise error.ParseError(
564 _("cannot fold into public change %s") % node.short(c.node()))
565 _("cannot fold into public change %s") % node.short(c.node()))
565 base = first.parents()[0]
566 base = first.parents()[0]
566
567
567 # commit a new version of the old changeset, including the update
568 # commit a new version of the old changeset, including the update
568 # collect all files which might be affected
569 # collect all files which might be affected
569 files = set()
570 files = set()
570 for ctx in ctxs:
571 for ctx in ctxs:
571 files.update(ctx.files())
572 files.update(ctx.files())
572
573
573 # Recompute copies (avoid recording a -> b -> a)
574 # Recompute copies (avoid recording a -> b -> a)
574 copied = copies.pathcopies(base, last)
575 copied = copies.pathcopies(base, last)
575
576
576 # prune files which were reverted by the updates
577 # prune files which were reverted by the updates
577 files = [f for f in files if not cmdutil.samefile(f, last, base)]
578 files = [f for f in files if not cmdutil.samefile(f, last, base)]
578 # commit version of these files as defined by head
579 # commit version of these files as defined by head
579 headmf = last.manifest()
580 headmf = last.manifest()
580 def filectxfn(repo, ctx, path):
581 def filectxfn(repo, ctx, path):
581 if path in headmf:
582 if path in headmf:
582 fctx = last[path]
583 fctx = last[path]
583 flags = fctx.flags()
584 flags = fctx.flags()
584 mctx = context.memfilectx(repo,
585 mctx = context.memfilectx(repo,
585 fctx.path(), fctx.data(),
586 fctx.path(), fctx.data(),
586 islink='l' in flags,
587 islink='l' in flags,
587 isexec='x' in flags,
588 isexec='x' in flags,
588 copied=copied.get(path))
589 copied=copied.get(path))
589 return mctx
590 return mctx
590 return None
591 return None
591
592
592 if commitopts.get('message'):
593 if commitopts.get('message'):
593 message = commitopts['message']
594 message = commitopts['message']
594 else:
595 else:
595 message = first.description()
596 message = first.description()
596 user = commitopts.get('user')
597 user = commitopts.get('user')
597 date = commitopts.get('date')
598 date = commitopts.get('date')
598 extra = commitopts.get('extra')
599 extra = commitopts.get('extra')
599
600
600 parents = (first.p1().node(), first.p2().node())
601 parents = (first.p1().node(), first.p2().node())
601 editor = None
602 editor = None
602 if not skipprompt:
603 if not skipprompt:
603 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
604 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
604 new = context.memctx(repo,
605 new = context.memctx(repo,
605 parents=parents,
606 parents=parents,
606 text=message,
607 text=message,
607 files=files,
608 files=files,
608 filectxfn=filectxfn,
609 filectxfn=filectxfn,
609 user=user,
610 user=user,
610 date=date,
611 date=date,
611 extra=extra,
612 extra=extra,
612 editor=editor)
613 editor=editor)
613 return repo.commitctx(new)
614 return repo.commitctx(new)
614
615
615 def _isdirtywc(repo):
616 def _isdirtywc(repo):
616 return repo[None].dirty(missing=True)
617 return repo[None].dirty(missing=True)
617
618
618 def abortdirty():
619 def abortdirty():
619 raise error.Abort(_('working copy has pending changes'),
620 raise error.Abort(_('working copy has pending changes'),
620 hint=_('amend, commit, or revert them and run histedit '
621 hint=_('amend, commit, or revert them and run histedit '
621 '--continue, or abort with histedit --abort'))
622 '--continue, or abort with histedit --abort'))
622
623
623 def action(verbs, message, priority=False, internal=False):
624 def action(verbs, message, priority=False, internal=False):
624 def wrap(cls):
625 def wrap(cls):
625 assert not priority or not internal
626 assert not priority or not internal
626 verb = verbs[0]
627 verb = verbs[0]
627 if priority:
628 if priority:
628 primaryactions.add(verb)
629 primaryactions.add(verb)
629 elif internal:
630 elif internal:
630 internalactions.add(verb)
631 internalactions.add(verb)
631 elif len(verbs) > 1:
632 elif len(verbs) > 1:
632 secondaryactions.add(verb)
633 secondaryactions.add(verb)
633 else:
634 else:
634 tertiaryactions.add(verb)
635 tertiaryactions.add(verb)
635
636
636 cls.verb = verb
637 cls.verb = verb
637 cls.verbs = verbs
638 cls.verbs = verbs
638 cls.message = message
639 cls.message = message
639 for verb in verbs:
640 for verb in verbs:
640 actiontable[verb] = cls
641 actiontable[verb] = cls
641 return cls
642 return cls
642 return wrap
643 return wrap
643
644
644 @action(['pick', 'p'],
645 @action(['pick', 'p'],
645 _('use commit'),
646 _('use commit'),
646 priority=True)
647 priority=True)
647 class pick(histeditaction):
648 class pick(histeditaction):
648 def run(self):
649 def run(self):
649 rulectx = self.repo[self.node]
650 rulectx = self.repo[self.node]
650 if rulectx.parents()[0].node() == self.state.parentctxnode:
651 if rulectx.parents()[0].node() == self.state.parentctxnode:
651 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
652 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
652 return rulectx, []
653 return rulectx, []
653
654
654 return super(pick, self).run()
655 return super(pick, self).run()
655
656
656 @action(['edit', 'e'],
657 @action(['edit', 'e'],
657 _('use commit, but stop for amending'),
658 _('use commit, but stop for amending'),
658 priority=True)
659 priority=True)
659 class edit(histeditaction):
660 class edit(histeditaction):
660 def run(self):
661 def run(self):
661 repo = self.repo
662 repo = self.repo
662 rulectx = repo[self.node]
663 rulectx = repo[self.node]
663 hg.update(repo, self.state.parentctxnode, quietempty=True)
664 hg.update(repo, self.state.parentctxnode, quietempty=True)
664 applychanges(repo.ui, repo, rulectx, {})
665 applychanges(repo.ui, repo, rulectx, {})
665 raise error.InterventionRequired(
666 raise error.InterventionRequired(
666 _('Editing (%s), you may commit or record as needed now.')
667 _('Editing (%s), you may commit or record as needed now.')
667 % node.short(self.node),
668 % node.short(self.node),
668 hint=_('hg histedit --continue to resume'))
669 hint=_('hg histedit --continue to resume'))
669
670
670 def commiteditor(self):
671 def commiteditor(self):
671 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
672 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
672
673
673 @action(['fold', 'f'],
674 @action(['fold', 'f'],
674 _('use commit, but combine it with the one above'))
675 _('use commit, but combine it with the one above'))
675 class fold(histeditaction):
676 class fold(histeditaction):
676 def verify(self, prev, expected, seen):
677 def verify(self, prev, expected, seen):
677 """ Verifies semantic correctness of the fold rule"""
678 """ Verifies semantic correctness of the fold rule"""
678 super(fold, self).verify(prev, expected, seen)
679 super(fold, self).verify(prev, expected, seen)
679 repo = self.repo
680 repo = self.repo
680 if not prev:
681 if not prev:
681 c = repo[self.node].parents()[0]
682 c = repo[self.node].parents()[0]
682 elif not prev.verb in ('pick', 'base'):
683 elif not prev.verb in ('pick', 'base'):
683 return
684 return
684 else:
685 else:
685 c = repo[prev.node]
686 c = repo[prev.node]
686 if not c.mutable():
687 if not c.mutable():
687 raise error.ParseError(
688 raise error.ParseError(
688 _("cannot fold into public change %s") % node.short(c.node()))
689 _("cannot fold into public change %s") % node.short(c.node()))
689
690
690
691
691 def continuedirty(self):
692 def continuedirty(self):
692 repo = self.repo
693 repo = self.repo
693 rulectx = repo[self.node]
694 rulectx = repo[self.node]
694
695
695 commit = commitfuncfor(repo, rulectx)
696 commit = commitfuncfor(repo, rulectx)
696 commit(text='fold-temp-revision %s' % node.short(self.node),
697 commit(text='fold-temp-revision %s' % node.short(self.node),
697 user=rulectx.user(), date=rulectx.date(),
698 user=rulectx.user(), date=rulectx.date(),
698 extra=rulectx.extra())
699 extra=rulectx.extra())
699
700
700 def continueclean(self):
701 def continueclean(self):
701 repo = self.repo
702 repo = self.repo
702 ctx = repo['.']
703 ctx = repo['.']
703 rulectx = repo[self.node]
704 rulectx = repo[self.node]
704 parentctxnode = self.state.parentctxnode
705 parentctxnode = self.state.parentctxnode
705 if ctx.node() == parentctxnode:
706 if ctx.node() == parentctxnode:
706 repo.ui.warn(_('%s: empty changeset\n') %
707 repo.ui.warn(_('%s: empty changeset\n') %
707 node.short(self.node))
708 node.short(self.node))
708 return ctx, [(self.node, (parentctxnode,))]
709 return ctx, [(self.node, (parentctxnode,))]
709
710
710 parentctx = repo[parentctxnode]
711 parentctx = repo[parentctxnode]
711 newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
712 newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
712 parentctx))
713 parentctx))
713 if not newcommits:
714 if not newcommits:
714 repo.ui.warn(_('%s: cannot fold - working copy is not a '
715 repo.ui.warn(_('%s: cannot fold - working copy is not a '
715 'descendant of previous commit %s\n') %
716 'descendant of previous commit %s\n') %
716 (node.short(self.node), node.short(parentctxnode)))
717 (node.short(self.node), node.short(parentctxnode)))
717 return ctx, [(self.node, (ctx.node(),))]
718 return ctx, [(self.node, (ctx.node(),))]
718
719
719 middlecommits = newcommits.copy()
720 middlecommits = newcommits.copy()
720 middlecommits.discard(ctx.node())
721 middlecommits.discard(ctx.node())
721
722
722 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
723 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
723 middlecommits)
724 middlecommits)
724
725
725 def skipprompt(self):
726 def skipprompt(self):
726 """Returns true if the rule should skip the message editor.
727 """Returns true if the rule should skip the message editor.
727
728
728 For example, 'fold' wants to show an editor, but 'rollup'
729 For example, 'fold' wants to show an editor, but 'rollup'
729 doesn't want to.
730 doesn't want to.
730 """
731 """
731 return False
732 return False
732
733
733 def mergedescs(self):
734 def mergedescs(self):
734 """Returns true if the rule should merge messages of multiple changes.
735 """Returns true if the rule should merge messages of multiple changes.
735
736
736 This exists mainly so that 'rollup' rules can be a subclass of
737 This exists mainly so that 'rollup' rules can be a subclass of
737 'fold'.
738 'fold'.
738 """
739 """
739 return True
740 return True
740
741
741 def firstdate(self):
742 def firstdate(self):
742 """Returns true if the rule should preserve the date of the first
743 """Returns true if the rule should preserve the date of the first
743 change.
744 change.
744
745
745 This exists mainly so that 'rollup' rules can be a subclass of
746 This exists mainly so that 'rollup' rules can be a subclass of
746 'fold'.
747 'fold'.
747 """
748 """
748 return False
749 return False
749
750
750 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
751 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
751 parent = ctx.parents()[0].node()
752 parent = ctx.parents()[0].node()
752 repo.ui.pushbuffer()
753 repo.ui.pushbuffer()
753 hg.update(repo, parent)
754 hg.update(repo, parent)
754 repo.ui.popbuffer()
755 repo.ui.popbuffer()
755 ### prepare new commit data
756 ### prepare new commit data
756 commitopts = {}
757 commitopts = {}
757 commitopts['user'] = ctx.user()
758 commitopts['user'] = ctx.user()
758 # commit message
759 # commit message
759 if not self.mergedescs():
760 if not self.mergedescs():
760 newmessage = ctx.description()
761 newmessage = ctx.description()
761 else:
762 else:
762 newmessage = '\n***\n'.join(
763 newmessage = '\n***\n'.join(
763 [ctx.description()] +
764 [ctx.description()] +
764 [repo[r].description() for r in internalchanges] +
765 [repo[r].description() for r in internalchanges] +
765 [oldctx.description()]) + '\n'
766 [oldctx.description()]) + '\n'
766 commitopts['message'] = newmessage
767 commitopts['message'] = newmessage
767 # date
768 # date
768 if self.firstdate():
769 if self.firstdate():
769 commitopts['date'] = ctx.date()
770 commitopts['date'] = ctx.date()
770 else:
771 else:
771 commitopts['date'] = max(ctx.date(), oldctx.date())
772 commitopts['date'] = max(ctx.date(), oldctx.date())
772 extra = ctx.extra().copy()
773 extra = ctx.extra().copy()
773 # histedit_source
774 # histedit_source
774 # note: ctx is likely a temporary commit but that the best we can do
775 # note: ctx is likely a temporary commit but that the best we can do
775 # here. This is sufficient to solve issue3681 anyway.
776 # here. This is sufficient to solve issue3681 anyway.
776 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
777 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
777 commitopts['extra'] = extra
778 commitopts['extra'] = extra
778 phasemin = max(ctx.phase(), oldctx.phase())
779 phasemin = max(ctx.phase(), oldctx.phase())
779 overrides = {('phases', 'new-commit'): phasemin}
780 overrides = {('phases', 'new-commit'): phasemin}
780 with repo.ui.configoverride(overrides, 'histedit'):
781 with repo.ui.configoverride(overrides, 'histedit'):
781 n = collapse(repo, ctx, repo[newnode], commitopts,
782 n = collapse(repo, ctx, repo[newnode], commitopts,
782 skipprompt=self.skipprompt())
783 skipprompt=self.skipprompt())
783 if n is None:
784 if n is None:
784 return ctx, []
785 return ctx, []
785 repo.ui.pushbuffer()
786 repo.ui.pushbuffer()
786 hg.update(repo, n)
787 hg.update(repo, n)
787 repo.ui.popbuffer()
788 repo.ui.popbuffer()
788 replacements = [(oldctx.node(), (newnode,)),
789 replacements = [(oldctx.node(), (newnode,)),
789 (ctx.node(), (n,)),
790 (ctx.node(), (n,)),
790 (newnode, (n,)),
791 (newnode, (n,)),
791 ]
792 ]
792 for ich in internalchanges:
793 for ich in internalchanges:
793 replacements.append((ich, (n,)))
794 replacements.append((ich, (n,)))
794 return repo[n], replacements
795 return repo[n], replacements
795
796
796 class base(histeditaction):
797 class base(histeditaction):
797
798
798 def run(self):
799 def run(self):
799 if self.repo['.'].node() != self.node:
800 if self.repo['.'].node() != self.node:
800 mergemod.update(self.repo, self.node, False, True)
801 mergemod.update(self.repo, self.node, False, True)
801 # branchmerge, force)
802 # branchmerge, force)
802 return self.continueclean()
803 return self.continueclean()
803
804
804 def continuedirty(self):
805 def continuedirty(self):
805 abortdirty()
806 abortdirty()
806
807
807 def continueclean(self):
808 def continueclean(self):
808 basectx = self.repo['.']
809 basectx = self.repo['.']
809 return basectx, []
810 return basectx, []
810
811
811 def _verifynodeconstraints(self, prev, expected, seen):
812 def _verifynodeconstraints(self, prev, expected, seen):
812 # base can only be use with a node not in the edited set
813 # base can only be use with a node not in the edited set
813 if self.node in expected:
814 if self.node in expected:
814 msg = _('%s "%s" changeset was an edited list candidate')
815 msg = _('%s "%s" changeset was an edited list candidate')
815 raise error.ParseError(
816 raise error.ParseError(
816 msg % (self.verb, node.short(self.node)),
817 msg % (self.verb, node.short(self.node)),
817 hint=_('base must only use unlisted changesets'))
818 hint=_('base must only use unlisted changesets'))
818
819
819 @action(['_multifold'],
820 @action(['_multifold'],
820 _(
821 _(
821 """fold subclass used for when multiple folds happen in a row
822 """fold subclass used for when multiple folds happen in a row
822
823
823 We only want to fire the editor for the folded message once when
824 We only want to fire the editor for the folded message once when
824 (say) four changes are folded down into a single change. This is
825 (say) four changes are folded down into a single change. This is
825 similar to rollup, but we should preserve both messages so that
826 similar to rollup, but we should preserve both messages so that
826 when the last fold operation runs we can show the user all the
827 when the last fold operation runs we can show the user all the
827 commit messages in their editor.
828 commit messages in their editor.
828 """),
829 """),
829 internal=True)
830 internal=True)
830 class _multifold(fold):
831 class _multifold(fold):
831 def skipprompt(self):
832 def skipprompt(self):
832 return True
833 return True
833
834
834 @action(["roll", "r"],
835 @action(["roll", "r"],
835 _("like fold, but discard this commit's description and date"))
836 _("like fold, but discard this commit's description and date"))
836 class rollup(fold):
837 class rollup(fold):
837 def mergedescs(self):
838 def mergedescs(self):
838 return False
839 return False
839
840
840 def skipprompt(self):
841 def skipprompt(self):
841 return True
842 return True
842
843
843 def firstdate(self):
844 def firstdate(self):
844 return True
845 return True
845
846
846 @action(["drop", "d"],
847 @action(["drop", "d"],
847 _('remove commit from history'))
848 _('remove commit from history'))
848 class drop(histeditaction):
849 class drop(histeditaction):
849 def run(self):
850 def run(self):
850 parentctx = self.repo[self.state.parentctxnode]
851 parentctx = self.repo[self.state.parentctxnode]
851 return parentctx, [(self.node, tuple())]
852 return parentctx, [(self.node, tuple())]
852
853
853 @action(["mess", "m"],
854 @action(["mess", "m"],
854 _('edit commit message without changing commit content'),
855 _('edit commit message without changing commit content'),
855 priority=True)
856 priority=True)
856 class message(histeditaction):
857 class message(histeditaction):
857 def commiteditor(self):
858 def commiteditor(self):
858 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
859 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
859
860
860 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
861 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
861 """utility function to find the first outgoing changeset
862 """utility function to find the first outgoing changeset
862
863
863 Used by initialization code"""
864 Used by initialization code"""
864 if opts is None:
865 if opts is None:
865 opts = {}
866 opts = {}
866 dest = ui.expandpath(remote or 'default-push', remote or 'default')
867 dest = ui.expandpath(remote or 'default-push', remote or 'default')
867 dest, revs = hg.parseurl(dest, None)[:2]
868 dest, revs = hg.parseurl(dest, None)[:2]
868 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
869 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
869
870
870 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
871 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
871 other = hg.peer(repo, opts, dest)
872 other = hg.peer(repo, opts, dest)
872
873
873 if revs:
874 if revs:
874 revs = [repo.lookup(rev) for rev in revs]
875 revs = [repo.lookup(rev) for rev in revs]
875
876
876 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
877 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
877 if not outgoing.missing:
878 if not outgoing.missing:
878 raise error.Abort(_('no outgoing ancestors'))
879 raise error.Abort(_('no outgoing ancestors'))
879 roots = list(repo.revs("roots(%ln)", outgoing.missing))
880 roots = list(repo.revs("roots(%ln)", outgoing.missing))
880 if 1 < len(roots):
881 if 1 < len(roots):
881 msg = _('there are ambiguous outgoing revisions')
882 msg = _('there are ambiguous outgoing revisions')
882 hint = _("see 'hg help histedit' for more detail")
883 hint = _("see 'hg help histedit' for more detail")
883 raise error.Abort(msg, hint=hint)
884 raise error.Abort(msg, hint=hint)
884 return repo.lookup(roots[0])
885 return repo.lookup(roots[0])
885
886
886
887
887 @command('histedit',
888 @command('histedit',
888 [('', 'commands', '',
889 [('', 'commands', '',
889 _('read history edits from the specified file'), _('FILE')),
890 _('read history edits from the specified file'), _('FILE')),
890 ('c', 'continue', False, _('continue an edit already in progress')),
891 ('c', 'continue', False, _('continue an edit already in progress')),
891 ('', 'edit-plan', False, _('edit remaining actions list')),
892 ('', 'edit-plan', False, _('edit remaining actions list')),
892 ('k', 'keep', False,
893 ('k', 'keep', False,
893 _("don't strip old nodes after edit is complete")),
894 _("don't strip old nodes after edit is complete")),
894 ('', 'abort', False, _('abort an edit in progress')),
895 ('', 'abort', False, _('abort an edit in progress')),
895 ('o', 'outgoing', False, _('changesets not found in destination')),
896 ('o', 'outgoing', False, _('changesets not found in destination')),
896 ('f', 'force', False,
897 ('f', 'force', False,
897 _('force outgoing even for unrelated repositories')),
898 _('force outgoing even for unrelated repositories')),
898 ('r', 'rev', [], _('first revision to be edited'), _('REV'))],
899 ('r', 'rev', [], _('first revision to be edited'), _('REV'))],
899 _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"))
900 _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"))
900 def histedit(ui, repo, *freeargs, **opts):
901 def histedit(ui, repo, *freeargs, **opts):
901 """interactively edit changeset history
902 """interactively edit changeset history
902
903
903 This command lets you edit a linear series of changesets (up to
904 This command lets you edit a linear series of changesets (up to
904 and including the working directory, which should be clean).
905 and including the working directory, which should be clean).
905 You can:
906 You can:
906
907
907 - `pick` to [re]order a changeset
908 - `pick` to [re]order a changeset
908
909
909 - `drop` to omit changeset
910 - `drop` to omit changeset
910
911
911 - `mess` to reword the changeset commit message
912 - `mess` to reword the changeset commit message
912
913
913 - `fold` to combine it with the preceding changeset (using the later date)
914 - `fold` to combine it with the preceding changeset (using the later date)
914
915
915 - `roll` like fold, but discarding this commit's description and date
916 - `roll` like fold, but discarding this commit's description and date
916
917
917 - `edit` to edit this changeset (preserving date)
918 - `edit` to edit this changeset (preserving date)
918
919
919 There are a number of ways to select the root changeset:
920 There are a number of ways to select the root changeset:
920
921
921 - Specify ANCESTOR directly
922 - Specify ANCESTOR directly
922
923
923 - Use --outgoing -- it will be the first linear changeset not
924 - Use --outgoing -- it will be the first linear changeset not
924 included in destination. (See :hg:`help config.paths.default-push`)
925 included in destination. (See :hg:`help config.paths.default-push`)
925
926
926 - Otherwise, the value from the "histedit.defaultrev" config option
927 - Otherwise, the value from the "histedit.defaultrev" config option
927 is used as a revset to select the base revision when ANCESTOR is not
928 is used as a revset to select the base revision when ANCESTOR is not
928 specified. The first revision returned by the revset is used. By
929 specified. The first revision returned by the revset is used. By
929 default, this selects the editable history that is unique to the
930 default, this selects the editable history that is unique to the
930 ancestry of the working directory.
931 ancestry of the working directory.
931
932
932 .. container:: verbose
933 .. container:: verbose
933
934
934 If you use --outgoing, this command will abort if there are ambiguous
935 If you use --outgoing, this command will abort if there are ambiguous
935 outgoing revisions. For example, if there are multiple branches
936 outgoing revisions. For example, if there are multiple branches
936 containing outgoing revisions.
937 containing outgoing revisions.
937
938
938 Use "min(outgoing() and ::.)" or similar revset specification
939 Use "min(outgoing() and ::.)" or similar revset specification
939 instead of --outgoing to specify edit target revision exactly in
940 instead of --outgoing to specify edit target revision exactly in
940 such ambiguous situation. See :hg:`help revsets` for detail about
941 such ambiguous situation. See :hg:`help revsets` for detail about
941 selecting revisions.
942 selecting revisions.
942
943
943 .. container:: verbose
944 .. container:: verbose
944
945
945 Examples:
946 Examples:
946
947
947 - A number of changes have been made.
948 - A number of changes have been made.
948 Revision 3 is no longer needed.
949 Revision 3 is no longer needed.
949
950
950 Start history editing from revision 3::
951 Start history editing from revision 3::
951
952
952 hg histedit -r 3
953 hg histedit -r 3
953
954
954 An editor opens, containing the list of revisions,
955 An editor opens, containing the list of revisions,
955 with specific actions specified::
956 with specific actions specified::
956
957
957 pick 5339bf82f0ca 3 Zworgle the foobar
958 pick 5339bf82f0ca 3 Zworgle the foobar
958 pick 8ef592ce7cc4 4 Bedazzle the zerlog
959 pick 8ef592ce7cc4 4 Bedazzle the zerlog
959 pick 0a9639fcda9d 5 Morgify the cromulancy
960 pick 0a9639fcda9d 5 Morgify the cromulancy
960
961
961 Additional information about the possible actions
962 Additional information about the possible actions
962 to take appears below the list of revisions.
963 to take appears below the list of revisions.
963
964
964 To remove revision 3 from the history,
965 To remove revision 3 from the history,
965 its action (at the beginning of the relevant line)
966 its action (at the beginning of the relevant line)
966 is changed to 'drop'::
967 is changed to 'drop'::
967
968
968 drop 5339bf82f0ca 3 Zworgle the foobar
969 drop 5339bf82f0ca 3 Zworgle the foobar
969 pick 8ef592ce7cc4 4 Bedazzle the zerlog
970 pick 8ef592ce7cc4 4 Bedazzle the zerlog
970 pick 0a9639fcda9d 5 Morgify the cromulancy
971 pick 0a9639fcda9d 5 Morgify the cromulancy
971
972
972 - A number of changes have been made.
973 - A number of changes have been made.
973 Revision 2 and 4 need to be swapped.
974 Revision 2 and 4 need to be swapped.
974
975
975 Start history editing from revision 2::
976 Start history editing from revision 2::
976
977
977 hg histedit -r 2
978 hg histedit -r 2
978
979
979 An editor opens, containing the list of revisions,
980 An editor opens, containing the list of revisions,
980 with specific actions specified::
981 with specific actions specified::
981
982
982 pick 252a1af424ad 2 Blorb a morgwazzle
983 pick 252a1af424ad 2 Blorb a morgwazzle
983 pick 5339bf82f0ca 3 Zworgle the foobar
984 pick 5339bf82f0ca 3 Zworgle the foobar
984 pick 8ef592ce7cc4 4 Bedazzle the zerlog
985 pick 8ef592ce7cc4 4 Bedazzle the zerlog
985
986
986 To swap revision 2 and 4, its lines are swapped
987 To swap revision 2 and 4, its lines are swapped
987 in the editor::
988 in the editor::
988
989
989 pick 8ef592ce7cc4 4 Bedazzle the zerlog
990 pick 8ef592ce7cc4 4 Bedazzle the zerlog
990 pick 5339bf82f0ca 3 Zworgle the foobar
991 pick 5339bf82f0ca 3 Zworgle the foobar
991 pick 252a1af424ad 2 Blorb a morgwazzle
992 pick 252a1af424ad 2 Blorb a morgwazzle
992
993
993 Returns 0 on success, 1 if user intervention is required (not only
994 Returns 0 on success, 1 if user intervention is required (not only
994 for intentional "edit" command, but also for resolving unexpected
995 for intentional "edit" command, but also for resolving unexpected
995 conflicts).
996 conflicts).
996 """
997 """
997 state = histeditstate(repo)
998 state = histeditstate(repo)
998 try:
999 try:
999 state.wlock = repo.wlock()
1000 state.wlock = repo.wlock()
1000 state.lock = repo.lock()
1001 state.lock = repo.lock()
1001 _histedit(ui, repo, state, *freeargs, **opts)
1002 _histedit(ui, repo, state, *freeargs, **opts)
1002 finally:
1003 finally:
1003 release(state.lock, state.wlock)
1004 release(state.lock, state.wlock)
1004
1005
1005 goalcontinue = 'continue'
1006 goalcontinue = 'continue'
1006 goalabort = 'abort'
1007 goalabort = 'abort'
1007 goaleditplan = 'edit-plan'
1008 goaleditplan = 'edit-plan'
1008 goalnew = 'new'
1009 goalnew = 'new'
1009
1010
1010 def _getgoal(opts):
1011 def _getgoal(opts):
1011 if opts.get('continue'):
1012 if opts.get('continue'):
1012 return goalcontinue
1013 return goalcontinue
1013 if opts.get('abort'):
1014 if opts.get('abort'):
1014 return goalabort
1015 return goalabort
1015 if opts.get('edit_plan'):
1016 if opts.get('edit_plan'):
1016 return goaleditplan
1017 return goaleditplan
1017 return goalnew
1018 return goalnew
1018
1019
1019 def _readfile(ui, path):
1020 def _readfile(ui, path):
1020 if path == '-':
1021 if path == '-':
1021 with ui.timeblockedsection('histedit'):
1022 with ui.timeblockedsection('histedit'):
1022 return ui.fin.read()
1023 return ui.fin.read()
1023 else:
1024 else:
1024 with open(path, 'rb') as f:
1025 with open(path, 'rb') as f:
1025 return f.read()
1026 return f.read()
1026
1027
1027 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1028 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1028 # TODO only abort if we try to histedit mq patches, not just
1029 # TODO only abort if we try to histedit mq patches, not just
1029 # blanket if mq patches are applied somewhere
1030 # blanket if mq patches are applied somewhere
1030 mq = getattr(repo, 'mq', None)
1031 mq = getattr(repo, 'mq', None)
1031 if mq and mq.applied:
1032 if mq and mq.applied:
1032 raise error.Abort(_('source has mq patches applied'))
1033 raise error.Abort(_('source has mq patches applied'))
1033
1034
1034 # basic argument incompatibility processing
1035 # basic argument incompatibility processing
1035 outg = opts.get('outgoing')
1036 outg = opts.get('outgoing')
1036 editplan = opts.get('edit_plan')
1037 editplan = opts.get('edit_plan')
1037 abort = opts.get('abort')
1038 abort = opts.get('abort')
1038 force = opts.get('force')
1039 force = opts.get('force')
1039 if force and not outg:
1040 if force and not outg:
1040 raise error.Abort(_('--force only allowed with --outgoing'))
1041 raise error.Abort(_('--force only allowed with --outgoing'))
1041 if goal == 'continue':
1042 if goal == 'continue':
1042 if any((outg, abort, revs, freeargs, rules, editplan)):
1043 if any((outg, abort, revs, freeargs, rules, editplan)):
1043 raise error.Abort(_('no arguments allowed with --continue'))
1044 raise error.Abort(_('no arguments allowed with --continue'))
1044 elif goal == 'abort':
1045 elif goal == 'abort':
1045 if any((outg, revs, freeargs, rules, editplan)):
1046 if any((outg, revs, freeargs, rules, editplan)):
1046 raise error.Abort(_('no arguments allowed with --abort'))
1047 raise error.Abort(_('no arguments allowed with --abort'))
1047 elif goal == 'edit-plan':
1048 elif goal == 'edit-plan':
1048 if any((outg, revs, freeargs)):
1049 if any((outg, revs, freeargs)):
1049 raise error.Abort(_('only --commands argument allowed with '
1050 raise error.Abort(_('only --commands argument allowed with '
1050 '--edit-plan'))
1051 '--edit-plan'))
1051 else:
1052 else:
1052 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1053 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1053 raise error.Abort(_('history edit already in progress, try '
1054 raise error.Abort(_('history edit already in progress, try '
1054 '--continue or --abort'))
1055 '--continue or --abort'))
1055 if outg:
1056 if outg:
1056 if revs:
1057 if revs:
1057 raise error.Abort(_('no revisions allowed with --outgoing'))
1058 raise error.Abort(_('no revisions allowed with --outgoing'))
1058 if len(freeargs) > 1:
1059 if len(freeargs) > 1:
1059 raise error.Abort(
1060 raise error.Abort(
1060 _('only one repo argument allowed with --outgoing'))
1061 _('only one repo argument allowed with --outgoing'))
1061 else:
1062 else:
1062 revs.extend(freeargs)
1063 revs.extend(freeargs)
1063 if len(revs) == 0:
1064 if len(revs) == 0:
1064 defaultrev = destutil.desthistedit(ui, repo)
1065 defaultrev = destutil.desthistedit(ui, repo)
1065 if defaultrev is not None:
1066 if defaultrev is not None:
1066 revs.append(defaultrev)
1067 revs.append(defaultrev)
1067
1068
1068 if len(revs) != 1:
1069 if len(revs) != 1:
1069 raise error.Abort(
1070 raise error.Abort(
1070 _('histedit requires exactly one ancestor revision'))
1071 _('histedit requires exactly one ancestor revision'))
1071
1072
1072 def _histedit(ui, repo, state, *freeargs, **opts):
1073 def _histedit(ui, repo, state, *freeargs, **opts):
1073 goal = _getgoal(opts)
1074 goal = _getgoal(opts)
1074 revs = opts.get('rev', [])
1075 revs = opts.get('rev', [])
1075 rules = opts.get('commands', '')
1076 rules = opts.get('commands', '')
1076 state.keep = opts.get('keep', False)
1077 state.keep = opts.get('keep', False)
1077
1078
1078 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1079 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1079
1080
1080 # rebuild state
1081 # rebuild state
1081 if goal == goalcontinue:
1082 if goal == goalcontinue:
1082 state.read()
1083 state.read()
1083 state = bootstrapcontinue(ui, state, opts)
1084 state = bootstrapcontinue(ui, state, opts)
1084 elif goal == goaleditplan:
1085 elif goal == goaleditplan:
1085 _edithisteditplan(ui, repo, state, rules)
1086 _edithisteditplan(ui, repo, state, rules)
1086 return
1087 return
1087 elif goal == goalabort:
1088 elif goal == goalabort:
1088 _aborthistedit(ui, repo, state)
1089 _aborthistedit(ui, repo, state)
1089 return
1090 return
1090 else:
1091 else:
1091 # goal == goalnew
1092 # goal == goalnew
1092 _newhistedit(ui, repo, state, revs, freeargs, opts)
1093 _newhistedit(ui, repo, state, revs, freeargs, opts)
1093
1094
1094 _continuehistedit(ui, repo, state)
1095 _continuehistedit(ui, repo, state)
1095 _finishhistedit(ui, repo, state)
1096 _finishhistedit(ui, repo, state)
1096
1097
1097 def _continuehistedit(ui, repo, state):
1098 def _continuehistedit(ui, repo, state):
1098 """This function runs after either:
1099 """This function runs after either:
1099 - bootstrapcontinue (if the goal is 'continue')
1100 - bootstrapcontinue (if the goal is 'continue')
1100 - _newhistedit (if the goal is 'new')
1101 - _newhistedit (if the goal is 'new')
1101 """
1102 """
1102 # preprocess rules so that we can hide inner folds from the user
1103 # preprocess rules so that we can hide inner folds from the user
1103 # and only show one editor
1104 # and only show one editor
1104 actions = state.actions[:]
1105 actions = state.actions[:]
1105 for idx, (action, nextact) in enumerate(
1106 for idx, (action, nextact) in enumerate(
1106 zip(actions, actions[1:] + [None])):
1107 zip(actions, actions[1:] + [None])):
1107 if action.verb == 'fold' and nextact and nextact.verb == 'fold':
1108 if action.verb == 'fold' and nextact and nextact.verb == 'fold':
1108 state.actions[idx].__class__ = _multifold
1109 state.actions[idx].__class__ = _multifold
1109
1110
1110 total = len(state.actions)
1111 total = len(state.actions)
1111 pos = 0
1112 pos = 0
1112 state.tr = None
1113 state.tr = None
1113
1114
1114 # Force an initial state file write, so the user can run --abort/continue
1115 # Force an initial state file write, so the user can run --abort/continue
1115 # even if there's an exception before the first transaction serialize.
1116 # even if there's an exception before the first transaction serialize.
1116 state.write()
1117 state.write()
1117 try:
1118 try:
1118 # Don't use singletransaction by default since it rolls the entire
1119 # Don't use singletransaction by default since it rolls the entire
1119 # transaction back if an unexpected exception happens (like a
1120 # transaction back if an unexpected exception happens (like a
1120 # pretxncommit hook throws, or the user aborts the commit msg editor).
1121 # pretxncommit hook throws, or the user aborts the commit msg editor).
1121 if ui.configbool("histedit", "singletransaction", False):
1122 if ui.configbool("histedit", "singletransaction", False):
1122 # Don't use a 'with' for the transaction, since actions may close
1123 # Don't use a 'with' for the transaction, since actions may close
1123 # and reopen a transaction. For example, if the action executes an
1124 # and reopen a transaction. For example, if the action executes an
1124 # external process it may choose to commit the transaction first.
1125 # external process it may choose to commit the transaction first.
1125 state.tr = repo.transaction('histedit')
1126 state.tr = repo.transaction('histedit')
1126
1127
1127 while state.actions:
1128 while state.actions:
1128 state.write(tr=state.tr)
1129 state.write(tr=state.tr)
1129 actobj = state.actions[0]
1130 actobj = state.actions[0]
1130 pos += 1
1131 pos += 1
1131 ui.progress(_("editing"), pos, actobj.torule(),
1132 ui.progress(_("editing"), pos, actobj.torule(),
1132 _('changes'), total)
1133 _('changes'), total)
1133 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1134 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1134 actobj.torule()))
1135 actobj.torule()))
1135 parentctx, replacement_ = actobj.run()
1136 parentctx, replacement_ = actobj.run()
1136 state.parentctxnode = parentctx.node()
1137 state.parentctxnode = parentctx.node()
1137 state.replacements.extend(replacement_)
1138 state.replacements.extend(replacement_)
1138 state.actions.pop(0)
1139 state.actions.pop(0)
1139
1140
1140 if state.tr is not None:
1141 if state.tr is not None:
1141 state.tr.close()
1142 state.tr.close()
1142 except error.InterventionRequired:
1143 except error.InterventionRequired:
1143 if state.tr is not None:
1144 if state.tr is not None:
1144 state.tr.close()
1145 state.tr.close()
1145 raise
1146 raise
1146 except Exception:
1147 except Exception:
1147 if state.tr is not None:
1148 if state.tr is not None:
1148 state.tr.abort()
1149 state.tr.abort()
1149 raise
1150 raise
1150
1151
1151 state.write()
1152 state.write()
1152 ui.progress(_("editing"), None)
1153 ui.progress(_("editing"), None)
1153
1154
1154 def _finishhistedit(ui, repo, state):
1155 def _finishhistedit(ui, repo, state):
1155 """This action runs when histedit is finishing its session"""
1156 """This action runs when histedit is finishing its session"""
1156 repo.ui.pushbuffer()
1157 repo.ui.pushbuffer()
1157 hg.update(repo, state.parentctxnode, quietempty=True)
1158 hg.update(repo, state.parentctxnode, quietempty=True)
1158 repo.ui.popbuffer()
1159 repo.ui.popbuffer()
1159
1160
1160 mapping, tmpnodes, created, ntm = processreplacement(state)
1161 mapping, tmpnodes, created, ntm = processreplacement(state)
1161 if mapping:
1162 if mapping:
1162 for prec, succs in mapping.iteritems():
1163 for prec, succs in mapping.iteritems():
1163 if not succs:
1164 if not succs:
1164 ui.debug('histedit: %s is dropped\n' % node.short(prec))
1165 ui.debug('histedit: %s is dropped\n' % node.short(prec))
1165 else:
1166 else:
1166 ui.debug('histedit: %s is replaced by %s\n' % (
1167 ui.debug('histedit: %s is replaced by %s\n' % (
1167 node.short(prec), node.short(succs[0])))
1168 node.short(prec), node.short(succs[0])))
1168 if len(succs) > 1:
1169 if len(succs) > 1:
1169 m = 'histedit: %s'
1170 m = 'histedit: %s'
1170 for n in succs[1:]:
1171 for n in succs[1:]:
1171 ui.debug(m % node.short(n))
1172 ui.debug(m % node.short(n))
1172
1173
1173 safecleanupnode(ui, repo, 'temp', tmpnodes)
1174 safecleanupnode(ui, repo, 'temp', tmpnodes)
1174
1175
1175 if not state.keep:
1176 if not state.keep:
1176 if mapping:
1177 if mapping:
1177 movebookmarks(ui, repo, mapping, state.topmost, ntm)
1178 movebookmarks(ui, repo, mapping, state.topmost, ntm)
1178 # TODO update mq state
1179 # TODO update mq state
1179 safecleanupnode(ui, repo, 'replaced', mapping)
1180 safecleanupnode(ui, repo, 'replaced', mapping)
1180
1181
1181 state.clear()
1182 state.clear()
1182 if os.path.exists(repo.sjoin('undo')):
1183 if os.path.exists(repo.sjoin('undo')):
1183 os.unlink(repo.sjoin('undo'))
1184 os.unlink(repo.sjoin('undo'))
1184 if repo.vfs.exists('histedit-last-edit.txt'):
1185 if repo.vfs.exists('histedit-last-edit.txt'):
1185 repo.vfs.unlink('histedit-last-edit.txt')
1186 repo.vfs.unlink('histedit-last-edit.txt')
1186
1187
1187 def _aborthistedit(ui, repo, state):
1188 def _aborthistedit(ui, repo, state):
1188 try:
1189 try:
1189 state.read()
1190 state.read()
1190 __, leafs, tmpnodes, __ = processreplacement(state)
1191 __, leafs, tmpnodes, __ = processreplacement(state)
1191 ui.debug('restore wc to old parent %s\n'
1192 ui.debug('restore wc to old parent %s\n'
1192 % node.short(state.topmost))
1193 % node.short(state.topmost))
1193
1194
1194 # Recover our old commits if necessary
1195 # Recover our old commits if necessary
1195 if not state.topmost in repo and state.backupfile:
1196 if not state.topmost in repo and state.backupfile:
1196 backupfile = repo.vfs.join(state.backupfile)
1197 backupfile = repo.vfs.join(state.backupfile)
1197 f = hg.openpath(ui, backupfile)
1198 f = hg.openpath(ui, backupfile)
1198 gen = exchange.readbundle(ui, f, backupfile)
1199 gen = exchange.readbundle(ui, f, backupfile)
1199 with repo.transaction('histedit.abort') as tr:
1200 with repo.transaction('histedit.abort') as tr:
1200 if not isinstance(gen, bundle2.unbundle20):
1201 if not isinstance(gen, bundle2.unbundle20):
1201 gen.apply(repo, 'histedit', 'bundle:' + backupfile)
1202 gen.apply(repo, 'histedit', 'bundle:' + backupfile)
1202 if isinstance(gen, bundle2.unbundle20):
1203 if isinstance(gen, bundle2.unbundle20):
1203 bundle2.applybundle(repo, gen, tr,
1204 bundle2.applybundle(repo, gen, tr,
1204 source='histedit',
1205 source='histedit',
1205 url='bundle:' + backupfile)
1206 url='bundle:' + backupfile)
1206
1207
1207 os.remove(backupfile)
1208 os.remove(backupfile)
1208
1209
1209 # check whether we should update away
1210 # check whether we should update away
1210 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1211 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1211 state.parentctxnode, leafs | tmpnodes):
1212 state.parentctxnode, leafs | tmpnodes):
1212 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1213 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1213 cleanupnode(ui, repo, 'created', tmpnodes)
1214 cleanupnode(ui, repo, 'created', tmpnodes)
1214 cleanupnode(ui, repo, 'temp', leafs)
1215 cleanupnode(ui, repo, 'temp', leafs)
1215 except Exception:
1216 except Exception:
1216 if state.inprogress():
1217 if state.inprogress():
1217 ui.warn(_('warning: encountered an exception during histedit '
1218 ui.warn(_('warning: encountered an exception during histedit '
1218 '--abort; the repository may not have been completely '
1219 '--abort; the repository may not have been completely '
1219 'cleaned up\n'))
1220 'cleaned up\n'))
1220 raise
1221 raise
1221 finally:
1222 finally:
1222 state.clear()
1223 state.clear()
1223
1224
1224 def _edithisteditplan(ui, repo, state, rules):
1225 def _edithisteditplan(ui, repo, state, rules):
1225 state.read()
1226 state.read()
1226 if not rules:
1227 if not rules:
1227 comment = geteditcomment(ui,
1228 comment = geteditcomment(ui,
1228 node.short(state.parentctxnode),
1229 node.short(state.parentctxnode),
1229 node.short(state.topmost))
1230 node.short(state.topmost))
1230 rules = ruleeditor(repo, ui, state.actions, comment)
1231 rules = ruleeditor(repo, ui, state.actions, comment)
1231 else:
1232 else:
1232 rules = _readfile(ui, rules)
1233 rules = _readfile(ui, rules)
1233 actions = parserules(rules, state)
1234 actions = parserules(rules, state)
1234 ctxs = [repo[act.node] \
1235 ctxs = [repo[act.node] \
1235 for act in state.actions if act.node]
1236 for act in state.actions if act.node]
1236 warnverifyactions(ui, repo, actions, state, ctxs)
1237 warnverifyactions(ui, repo, actions, state, ctxs)
1237 state.actions = actions
1238 state.actions = actions
1238 state.write()
1239 state.write()
1239
1240
1240 def _newhistedit(ui, repo, state, revs, freeargs, opts):
1241 def _newhistedit(ui, repo, state, revs, freeargs, opts):
1241 outg = opts.get('outgoing')
1242 outg = opts.get('outgoing')
1242 rules = opts.get('commands', '')
1243 rules = opts.get('commands', '')
1243 force = opts.get('force')
1244 force = opts.get('force')
1244
1245
1245 cmdutil.checkunfinished(repo)
1246 cmdutil.checkunfinished(repo)
1246 cmdutil.bailifchanged(repo)
1247 cmdutil.bailifchanged(repo)
1247
1248
1248 topmost, empty = repo.dirstate.parents()
1249 topmost, empty = repo.dirstate.parents()
1249 if outg:
1250 if outg:
1250 if freeargs:
1251 if freeargs:
1251 remote = freeargs[0]
1252 remote = freeargs[0]
1252 else:
1253 else:
1253 remote = None
1254 remote = None
1254 root = findoutgoing(ui, repo, remote, force, opts)
1255 root = findoutgoing(ui, repo, remote, force, opts)
1255 else:
1256 else:
1256 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
1257 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
1257 if len(rr) != 1:
1258 if len(rr) != 1:
1258 raise error.Abort(_('The specified revisions must have '
1259 raise error.Abort(_('The specified revisions must have '
1259 'exactly one common root'))
1260 'exactly one common root'))
1260 root = rr[0].node()
1261 root = rr[0].node()
1261
1262
1262 revs = between(repo, root, topmost, state.keep)
1263 revs = between(repo, root, topmost, state.keep)
1263 if not revs:
1264 if not revs:
1264 raise error.Abort(_('%s is not an ancestor of working directory') %
1265 raise error.Abort(_('%s is not an ancestor of working directory') %
1265 node.short(root))
1266 node.short(root))
1266
1267
1267 ctxs = [repo[r] for r in revs]
1268 ctxs = [repo[r] for r in revs]
1268 if not rules:
1269 if not rules:
1269 comment = geteditcomment(ui, node.short(root), node.short(topmost))
1270 comment = geteditcomment(ui, node.short(root), node.short(topmost))
1270 actions = [pick(state, r) for r in revs]
1271 actions = [pick(state, r) for r in revs]
1271 rules = ruleeditor(repo, ui, actions, comment)
1272 rules = ruleeditor(repo, ui, actions, comment)
1272 else:
1273 else:
1273 rules = _readfile(ui, rules)
1274 rules = _readfile(ui, rules)
1274 actions = parserules(rules, state)
1275 actions = parserules(rules, state)
1275 warnverifyactions(ui, repo, actions, state, ctxs)
1276 warnverifyactions(ui, repo, actions, state, ctxs)
1276
1277
1277 parentctxnode = repo[root].parents()[0].node()
1278 parentctxnode = repo[root].parents()[0].node()
1278
1279
1279 state.parentctxnode = parentctxnode
1280 state.parentctxnode = parentctxnode
1280 state.actions = actions
1281 state.actions = actions
1281 state.topmost = topmost
1282 state.topmost = topmost
1282 state.replacements = []
1283 state.replacements = []
1283
1284
1284 # Create a backup so we can always abort completely.
1285 # Create a backup so we can always abort completely.
1285 backupfile = None
1286 backupfile = None
1286 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1287 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1287 backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
1288 backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
1288 'histedit')
1289 'histedit')
1289 state.backupfile = backupfile
1290 state.backupfile = backupfile
1290
1291
1291 def _getsummary(ctx):
1292 def _getsummary(ctx):
1292 # a common pattern is to extract the summary but default to the empty
1293 # a common pattern is to extract the summary but default to the empty
1293 # string
1294 # string
1294 summary = ctx.description() or ''
1295 summary = ctx.description() or ''
1295 if summary:
1296 if summary:
1296 summary = summary.splitlines()[0]
1297 summary = summary.splitlines()[0]
1297 return summary
1298 return summary
1298
1299
1299 def bootstrapcontinue(ui, state, opts):
1300 def bootstrapcontinue(ui, state, opts):
1300 repo = state.repo
1301 repo = state.repo
1301
1302
1302 ms = mergemod.mergestate.read(repo)
1303 ms = mergemod.mergestate.read(repo)
1303 mergeutil.checkunresolved(ms)
1304 mergeutil.checkunresolved(ms)
1304
1305
1305 if state.actions:
1306 if state.actions:
1306 actobj = state.actions.pop(0)
1307 actobj = state.actions.pop(0)
1307
1308
1308 if _isdirtywc(repo):
1309 if _isdirtywc(repo):
1309 actobj.continuedirty()
1310 actobj.continuedirty()
1310 if _isdirtywc(repo):
1311 if _isdirtywc(repo):
1311 abortdirty()
1312 abortdirty()
1312
1313
1313 parentctx, replacements = actobj.continueclean()
1314 parentctx, replacements = actobj.continueclean()
1314
1315
1315 state.parentctxnode = parentctx.node()
1316 state.parentctxnode = parentctx.node()
1316 state.replacements.extend(replacements)
1317 state.replacements.extend(replacements)
1317
1318
1318 return state
1319 return state
1319
1320
1320 def between(repo, old, new, keep):
1321 def between(repo, old, new, keep):
1321 """select and validate the set of revision to edit
1322 """select and validate the set of revision to edit
1322
1323
1323 When keep is false, the specified set can't have children."""
1324 When keep is false, the specified set can't have children."""
1324 ctxs = list(repo.set('%n::%n', old, new))
1325 ctxs = list(repo.set('%n::%n', old, new))
1325 if ctxs and not keep:
1326 if ctxs and not keep:
1326 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
1327 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
1327 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
1328 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
1328 raise error.Abort(_('can only histedit a changeset together '
1329 raise error.Abort(_('can only histedit a changeset together '
1329 'with all its descendants'))
1330 'with all its descendants'))
1330 if repo.revs('(%ld) and merge()', ctxs):
1331 if repo.revs('(%ld) and merge()', ctxs):
1331 raise error.Abort(_('cannot edit history that contains merges'))
1332 raise error.Abort(_('cannot edit history that contains merges'))
1332 root = ctxs[0] # list is already sorted by repo.set
1333 root = ctxs[0] # list is already sorted by repo.set
1333 if not root.mutable():
1334 if not root.mutable():
1334 raise error.Abort(_('cannot edit public changeset: %s') % root,
1335 raise error.Abort(_('cannot edit public changeset: %s') % root,
1335 hint=_("see 'hg help phases' for details"))
1336 hint=_("see 'hg help phases' for details"))
1336 return [c.node() for c in ctxs]
1337 return [c.node() for c in ctxs]
1337
1338
1338 def ruleeditor(repo, ui, actions, editcomment=""):
1339 def ruleeditor(repo, ui, actions, editcomment=""):
1339 """open an editor to edit rules
1340 """open an editor to edit rules
1340
1341
1341 rules are in the format [ [act, ctx], ...] like in state.rules
1342 rules are in the format [ [act, ctx], ...] like in state.rules
1342 """
1343 """
1343 if repo.ui.configbool("experimental", "histedit.autoverb"):
1344 if repo.ui.configbool("experimental", "histedit.autoverb"):
1344 newact = util.sortdict()
1345 newact = util.sortdict()
1345 for act in actions:
1346 for act in actions:
1346 ctx = repo[act.node]
1347 ctx = repo[act.node]
1347 summary = _getsummary(ctx)
1348 summary = _getsummary(ctx)
1348 fword = summary.split(' ', 1)[0].lower()
1349 fword = summary.split(' ', 1)[0].lower()
1349 added = False
1350 added = False
1350
1351
1351 # if it doesn't end with the special character '!' just skip this
1352 # if it doesn't end with the special character '!' just skip this
1352 if fword.endswith('!'):
1353 if fword.endswith('!'):
1353 fword = fword[:-1]
1354 fword = fword[:-1]
1354 if fword in primaryactions | secondaryactions | tertiaryactions:
1355 if fword in primaryactions | secondaryactions | tertiaryactions:
1355 act.verb = fword
1356 act.verb = fword
1356 # get the target summary
1357 # get the target summary
1357 tsum = summary[len(fword) + 1:].lstrip()
1358 tsum = summary[len(fword) + 1:].lstrip()
1358 # safe but slow: reverse iterate over the actions so we
1359 # safe but slow: reverse iterate over the actions so we
1359 # don't clash on two commits having the same summary
1360 # don't clash on two commits having the same summary
1360 for na, l in reversed(list(newact.iteritems())):
1361 for na, l in reversed(list(newact.iteritems())):
1361 actx = repo[na.node]
1362 actx = repo[na.node]
1362 asum = _getsummary(actx)
1363 asum = _getsummary(actx)
1363 if asum == tsum:
1364 if asum == tsum:
1364 added = True
1365 added = True
1365 l.append(act)
1366 l.append(act)
1366 break
1367 break
1367
1368
1368 if not added:
1369 if not added:
1369 newact[act] = []
1370 newact[act] = []
1370
1371
1371 # copy over and flatten the new list
1372 # copy over and flatten the new list
1372 actions = []
1373 actions = []
1373 for na, l in newact.iteritems():
1374 for na, l in newact.iteritems():
1374 actions.append(na)
1375 actions.append(na)
1375 actions += l
1376 actions += l
1376
1377
1377 rules = '\n'.join([act.torule() for act in actions])
1378 rules = '\n'.join([act.torule() for act in actions])
1378 rules += '\n\n'
1379 rules += '\n\n'
1379 rules += editcomment
1380 rules += editcomment
1380 rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
1381 rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
1381 repopath=repo.path)
1382 repopath=repo.path)
1382
1383
1383 # Save edit rules in .hg/histedit-last-edit.txt in case
1384 # Save edit rules in .hg/histedit-last-edit.txt in case
1384 # the user needs to ask for help after something
1385 # the user needs to ask for help after something
1385 # surprising happens.
1386 # surprising happens.
1386 f = open(repo.vfs.join('histedit-last-edit.txt'), 'w')
1387 f = open(repo.vfs.join('histedit-last-edit.txt'), 'w')
1387 f.write(rules)
1388 f.write(rules)
1388 f.close()
1389 f.close()
1389
1390
1390 return rules
1391 return rules
1391
1392
1392 def parserules(rules, state):
1393 def parserules(rules, state):
1393 """Read the histedit rules string and return list of action objects """
1394 """Read the histedit rules string and return list of action objects """
1394 rules = [l for l in (r.strip() for r in rules.splitlines())
1395 rules = [l for l in (r.strip() for r in rules.splitlines())
1395 if l and not l.startswith('#')]
1396 if l and not l.startswith('#')]
1396 actions = []
1397 actions = []
1397 for r in rules:
1398 for r in rules:
1398 if ' ' not in r:
1399 if ' ' not in r:
1399 raise error.ParseError(_('malformed line "%s"') % r)
1400 raise error.ParseError(_('malformed line "%s"') % r)
1400 verb, rest = r.split(' ', 1)
1401 verb, rest = r.split(' ', 1)
1401
1402
1402 if verb not in actiontable:
1403 if verb not in actiontable:
1403 raise error.ParseError(_('unknown action "%s"') % verb)
1404 raise error.ParseError(_('unknown action "%s"') % verb)
1404
1405
1405 action = actiontable[verb].fromrule(state, rest)
1406 action = actiontable[verb].fromrule(state, rest)
1406 actions.append(action)
1407 actions.append(action)
1407 return actions
1408 return actions
1408
1409
1409 def warnverifyactions(ui, repo, actions, state, ctxs):
1410 def warnverifyactions(ui, repo, actions, state, ctxs):
1410 try:
1411 try:
1411 verifyactions(actions, state, ctxs)
1412 verifyactions(actions, state, ctxs)
1412 except error.ParseError:
1413 except error.ParseError:
1413 if repo.vfs.exists('histedit-last-edit.txt'):
1414 if repo.vfs.exists('histedit-last-edit.txt'):
1414 ui.warn(_('warning: histedit rules saved '
1415 ui.warn(_('warning: histedit rules saved '
1415 'to: .hg/histedit-last-edit.txt\n'))
1416 'to: .hg/histedit-last-edit.txt\n'))
1416 raise
1417 raise
1417
1418
1418 def verifyactions(actions, state, ctxs):
1419 def verifyactions(actions, state, ctxs):
1419 """Verify that there exists exactly one action per given changeset and
1420 """Verify that there exists exactly one action per given changeset and
1420 other constraints.
1421 other constraints.
1421
1422
1422 Will abort if there are to many or too few rules, a malformed rule,
1423 Will abort if there are to many or too few rules, a malformed rule,
1423 or a rule on a changeset outside of the user-given range.
1424 or a rule on a changeset outside of the user-given range.
1424 """
1425 """
1425 expected = set(c.node() for c in ctxs)
1426 expected = set(c.node() for c in ctxs)
1426 seen = set()
1427 seen = set()
1427 prev = None
1428 prev = None
1428 for action in actions:
1429 for action in actions:
1429 action.verify(prev, expected, seen)
1430 action.verify(prev, expected, seen)
1430 prev = action
1431 prev = action
1431 if action.node is not None:
1432 if action.node is not None:
1432 seen.add(action.node)
1433 seen.add(action.node)
1433 missing = sorted(expected - seen) # sort to stabilize output
1434 missing = sorted(expected - seen) # sort to stabilize output
1434
1435
1435 if state.repo.ui.configbool('histedit', 'dropmissing'):
1436 if state.repo.ui.configbool('histedit', 'dropmissing'):
1436 if len(actions) == 0:
1437 if len(actions) == 0:
1437 raise error.ParseError(_('no rules provided'),
1438 raise error.ParseError(_('no rules provided'),
1438 hint=_('use strip extension to remove commits'))
1439 hint=_('use strip extension to remove commits'))
1439
1440
1440 drops = [drop(state, n) for n in missing]
1441 drops = [drop(state, n) for n in missing]
1441 # put the in the beginning so they execute immediately and
1442 # put the in the beginning so they execute immediately and
1442 # don't show in the edit-plan in the future
1443 # don't show in the edit-plan in the future
1443 actions[:0] = drops
1444 actions[:0] = drops
1444 elif missing:
1445 elif missing:
1445 raise error.ParseError(_('missing rules for changeset %s') %
1446 raise error.ParseError(_('missing rules for changeset %s') %
1446 node.short(missing[0]),
1447 node.short(missing[0]),
1447 hint=_('use "drop %s" to discard, see also: '
1448 hint=_('use "drop %s" to discard, see also: '
1448 "'hg help -e histedit.config'")
1449 "'hg help -e histedit.config'")
1449 % node.short(missing[0]))
1450 % node.short(missing[0]))
1450
1451
1451 def adjustreplacementsfrommarkers(repo, oldreplacements):
1452 def adjustreplacementsfrommarkers(repo, oldreplacements):
1452 """Adjust replacements from obsolescence markers
1453 """Adjust replacements from obsolescence markers
1453
1454
1454 Replacements structure is originally generated based on
1455 Replacements structure is originally generated based on
1455 histedit's state and does not account for changes that are
1456 histedit's state and does not account for changes that are
1456 not recorded there. This function fixes that by adding
1457 not recorded there. This function fixes that by adding
1457 data read from obsolescence markers"""
1458 data read from obsolescence markers"""
1458 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1459 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1459 return oldreplacements
1460 return oldreplacements
1460
1461
1461 unfi = repo.unfiltered()
1462 unfi = repo.unfiltered()
1462 nm = unfi.changelog.nodemap
1463 nm = unfi.changelog.nodemap
1463 obsstore = repo.obsstore
1464 obsstore = repo.obsstore
1464 newreplacements = list(oldreplacements)
1465 newreplacements = list(oldreplacements)
1465 oldsuccs = [r[1] for r in oldreplacements]
1466 oldsuccs = [r[1] for r in oldreplacements]
1466 # successors that have already been added to succstocheck once
1467 # successors that have already been added to succstocheck once
1467 seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples
1468 seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples
1468 succstocheck = list(seensuccs)
1469 succstocheck = list(seensuccs)
1469 while succstocheck:
1470 while succstocheck:
1470 n = succstocheck.pop()
1471 n = succstocheck.pop()
1471 missing = nm.get(n) is None
1472 missing = nm.get(n) is None
1472 markers = obsstore.successors.get(n, ())
1473 markers = obsstore.successors.get(n, ())
1473 if missing and not markers:
1474 if missing and not markers:
1474 # dead end, mark it as such
1475 # dead end, mark it as such
1475 newreplacements.append((n, ()))
1476 newreplacements.append((n, ()))
1476 for marker in markers:
1477 for marker in markers:
1477 nsuccs = marker[1]
1478 nsuccs = marker[1]
1478 newreplacements.append((n, nsuccs))
1479 newreplacements.append((n, nsuccs))
1479 for nsucc in nsuccs:
1480 for nsucc in nsuccs:
1480 if nsucc not in seensuccs:
1481 if nsucc not in seensuccs:
1481 seensuccs.add(nsucc)
1482 seensuccs.add(nsucc)
1482 succstocheck.append(nsucc)
1483 succstocheck.append(nsucc)
1483
1484
1484 return newreplacements
1485 return newreplacements
1485
1486
1486 def processreplacement(state):
1487 def processreplacement(state):
1487 """process the list of replacements to return
1488 """process the list of replacements to return
1488
1489
1489 1) the final mapping between original and created nodes
1490 1) the final mapping between original and created nodes
1490 2) the list of temporary node created by histedit
1491 2) the list of temporary node created by histedit
1491 3) the list of new commit created by histedit"""
1492 3) the list of new commit created by histedit"""
1492 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
1493 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
1493 allsuccs = set()
1494 allsuccs = set()
1494 replaced = set()
1495 replaced = set()
1495 fullmapping = {}
1496 fullmapping = {}
1496 # initialize basic set
1497 # initialize basic set
1497 # fullmapping records all operations recorded in replacement
1498 # fullmapping records all operations recorded in replacement
1498 for rep in replacements:
1499 for rep in replacements:
1499 allsuccs.update(rep[1])
1500 allsuccs.update(rep[1])
1500 replaced.add(rep[0])
1501 replaced.add(rep[0])
1501 fullmapping.setdefault(rep[0], set()).update(rep[1])
1502 fullmapping.setdefault(rep[0], set()).update(rep[1])
1502 new = allsuccs - replaced
1503 new = allsuccs - replaced
1503 tmpnodes = allsuccs & replaced
1504 tmpnodes = allsuccs & replaced
1504 # Reduce content fullmapping into direct relation between original nodes
1505 # Reduce content fullmapping into direct relation between original nodes
1505 # and final node created during history edition
1506 # and final node created during history edition
1506 # Dropped changeset are replaced by an empty list
1507 # Dropped changeset are replaced by an empty list
1507 toproceed = set(fullmapping)
1508 toproceed = set(fullmapping)
1508 final = {}
1509 final = {}
1509 while toproceed:
1510 while toproceed:
1510 for x in list(toproceed):
1511 for x in list(toproceed):
1511 succs = fullmapping[x]
1512 succs = fullmapping[x]
1512 for s in list(succs):
1513 for s in list(succs):
1513 if s in toproceed:
1514 if s in toproceed:
1514 # non final node with unknown closure
1515 # non final node with unknown closure
1515 # We can't process this now
1516 # We can't process this now
1516 break
1517 break
1517 elif s in final:
1518 elif s in final:
1518 # non final node, replace with closure
1519 # non final node, replace with closure
1519 succs.remove(s)
1520 succs.remove(s)
1520 succs.update(final[s])
1521 succs.update(final[s])
1521 else:
1522 else:
1522 final[x] = succs
1523 final[x] = succs
1523 toproceed.remove(x)
1524 toproceed.remove(x)
1524 # remove tmpnodes from final mapping
1525 # remove tmpnodes from final mapping
1525 for n in tmpnodes:
1526 for n in tmpnodes:
1526 del final[n]
1527 del final[n]
1527 # we expect all changes involved in final to exist in the repo
1528 # we expect all changes involved in final to exist in the repo
1528 # turn `final` into list (topologically sorted)
1529 # turn `final` into list (topologically sorted)
1529 nm = state.repo.changelog.nodemap
1530 nm = state.repo.changelog.nodemap
1530 for prec, succs in final.items():
1531 for prec, succs in final.items():
1531 final[prec] = sorted(succs, key=nm.get)
1532 final[prec] = sorted(succs, key=nm.get)
1532
1533
1533 # computed topmost element (necessary for bookmark)
1534 # computed topmost element (necessary for bookmark)
1534 if new:
1535 if new:
1535 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1536 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1536 elif not final:
1537 elif not final:
1537 # Nothing rewritten at all. we won't need `newtopmost`
1538 # Nothing rewritten at all. we won't need `newtopmost`
1538 # It is the same as `oldtopmost` and `processreplacement` know it
1539 # It is the same as `oldtopmost` and `processreplacement` know it
1539 newtopmost = None
1540 newtopmost = None
1540 else:
1541 else:
1541 # every body died. The newtopmost is the parent of the root.
1542 # every body died. The newtopmost is the parent of the root.
1542 r = state.repo.changelog.rev
1543 r = state.repo.changelog.rev
1543 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1544 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1544
1545
1545 return final, tmpnodes, new, newtopmost
1546 return final, tmpnodes, new, newtopmost
1546
1547
1547 def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
1548 def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
1548 """Move bookmark from old to newly created node"""
1549 """Move bookmark from old to newly created node"""
1549 if not mapping:
1550 if not mapping:
1550 # if nothing got rewritten there is not purpose for this function
1551 # if nothing got rewritten there is not purpose for this function
1551 return
1552 return
1552 moves = []
1553 moves = []
1553 for bk, old in sorted(repo._bookmarks.iteritems()):
1554 for bk, old in sorted(repo._bookmarks.iteritems()):
1554 if old == oldtopmost:
1555 if old == oldtopmost:
1555 # special case ensure bookmark stay on tip.
1556 # special case ensure bookmark stay on tip.
1556 #
1557 #
1557 # This is arguably a feature and we may only want that for the
1558 # This is arguably a feature and we may only want that for the
1558 # active bookmark. But the behavior is kept compatible with the old
1559 # active bookmark. But the behavior is kept compatible with the old
1559 # version for now.
1560 # version for now.
1560 moves.append((bk, newtopmost))
1561 moves.append((bk, newtopmost))
1561 continue
1562 continue
1562 base = old
1563 base = old
1563 new = mapping.get(base, None)
1564 new = mapping.get(base, None)
1564 if new is None:
1565 if new is None:
1565 continue
1566 continue
1566 while not new:
1567 while not new:
1567 # base is killed, trying with parent
1568 # base is killed, trying with parent
1568 base = repo[base].p1().node()
1569 base = repo[base].p1().node()
1569 new = mapping.get(base, (base,))
1570 new = mapping.get(base, (base,))
1570 # nothing to move
1571 # nothing to move
1571 moves.append((bk, new[-1]))
1572 moves.append((bk, new[-1]))
1572 if moves:
1573 if moves:
1573 lock = tr = None
1574 lock = tr = None
1574 try:
1575 try:
1575 lock = repo.lock()
1576 lock = repo.lock()
1576 tr = repo.transaction('histedit')
1577 tr = repo.transaction('histedit')
1577 marks = repo._bookmarks
1578 marks = repo._bookmarks
1578 for mark, new in moves:
1579 for mark, new in moves:
1579 old = marks[mark]
1580 old = marks[mark]
1580 ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
1581 ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
1581 % (mark, node.short(old), node.short(new)))
1582 % (mark, node.short(old), node.short(new)))
1582 marks[mark] = new
1583 marks[mark] = new
1583 marks.recordchange(tr)
1584 marks.recordchange(tr)
1584 tr.close()
1585 tr.close()
1585 finally:
1586 finally:
1586 release(tr, lock)
1587 release(tr, lock)
1587
1588
1588 def cleanupnode(ui, repo, name, nodes):
1589 def cleanupnode(ui, repo, name, nodes):
1589 """strip a group of nodes from the repository
1590 """strip a group of nodes from the repository
1590
1591
1591 The set of node to strip may contains unknown nodes."""
1592 The set of node to strip may contains unknown nodes."""
1592 ui.debug('should strip %s nodes %s\n' %
1593 ui.debug('should strip %s nodes %s\n' %
1593 (name, ', '.join([node.short(n) for n in nodes])))
1594 (name, ', '.join([node.short(n) for n in nodes])))
1594 with repo.lock():
1595 with repo.lock():
1595 # do not let filtering get in the way of the cleanse
1596 # do not let filtering get in the way of the cleanse
1596 # we should probably get rid of obsolescence marker created during the
1597 # we should probably get rid of obsolescence marker created during the
1597 # histedit, but we currently do not have such information.
1598 # histedit, but we currently do not have such information.
1598 repo = repo.unfiltered()
1599 repo = repo.unfiltered()
1599 # Find all nodes that need to be stripped
1600 # Find all nodes that need to be stripped
1600 # (we use %lr instead of %ln to silently ignore unknown items)
1601 # (we use %lr instead of %ln to silently ignore unknown items)
1601 nm = repo.changelog.nodemap
1602 nm = repo.changelog.nodemap
1602 nodes = sorted(n for n in nodes if n in nm)
1603 nodes = sorted(n for n in nodes if n in nm)
1603 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1604 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1604 for c in roots:
1605 for c in roots:
1605 # We should process node in reverse order to strip tip most first.
1606 # We should process node in reverse order to strip tip most first.
1606 # but this trigger a bug in changegroup hook.
1607 # but this trigger a bug in changegroup hook.
1607 # This would reduce bundle overhead
1608 # This would reduce bundle overhead
1608 repair.strip(ui, repo, c)
1609 repair.strip(ui, repo, c)
1609
1610
1610 def safecleanupnode(ui, repo, name, nodes):
1611 def safecleanupnode(ui, repo, name, nodes):
1611 """strip or obsolete nodes
1612 """strip or obsolete nodes
1612
1613
1613 nodes could be either a set or dict which maps to replacements.
1614 nodes could be either a set or dict which maps to replacements.
1614 nodes could be unknown (outside the repo).
1615 nodes could be unknown (outside the repo).
1615 """
1616 """
1616 supportsmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
1617 supportsmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
1617 if supportsmarkers:
1618 if supportsmarkers:
1618 if util.safehasattr(nodes, 'get'):
1619 if util.safehasattr(nodes, 'get'):
1619 # nodes is a dict-like mapping
1620 # nodes is a dict-like mapping
1620 # use unfiltered repo for successors in case they are hidden
1621 # use unfiltered repo for successors in case they are hidden
1621 urepo = repo.unfiltered()
1622 urepo = repo.unfiltered()
1622 def getmarker(prec):
1623 def getmarker(prec):
1623 succs = tuple(urepo[n] for n in nodes.get(prec, ()))
1624 succs = tuple(urepo[n] for n in nodes.get(prec, ()))
1624 return (repo[prec], succs)
1625 return (repo[prec], succs)
1625 else:
1626 else:
1626 # nodes is a set-like
1627 # nodes is a set-like
1627 def getmarker(prec):
1628 def getmarker(prec):
1628 return (repo[prec], ())
1629 return (repo[prec], ())
1629 # sort by revision number because it sound "right"
1630 # sort by revision number because it sound "right"
1630 sortednodes = sorted([n for n in nodes if n in repo],
1631 sortednodes = sorted([n for n in nodes if n in repo],
1631 key=repo.changelog.rev)
1632 key=repo.changelog.rev)
1632 markers = [getmarker(t) for t in sortednodes]
1633 markers = [getmarker(t) for t in sortednodes]
1633 if markers:
1634 if markers:
1634 obsolete.createmarkers(repo, markers, operation='histedit')
1635 obsolete.createmarkers(repo, markers, operation='histedit')
1635 else:
1636 else:
1636 return cleanupnode(ui, repo, name, nodes)
1637 return cleanupnode(ui, repo, name, nodes)
1637
1638
1638 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1639 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1639 if isinstance(nodelist, str):
1640 if isinstance(nodelist, str):
1640 nodelist = [nodelist]
1641 nodelist = [nodelist]
1641 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1642 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1642 state = histeditstate(repo)
1643 state = histeditstate(repo)
1643 state.read()
1644 state.read()
1644 histedit_nodes = {action.node for action
1645 histedit_nodes = {action.node for action
1645 in state.actions if action.node}
1646 in state.actions if action.node}
1646 common_nodes = histedit_nodes & set(nodelist)
1647 common_nodes = histedit_nodes & set(nodelist)
1647 if common_nodes:
1648 if common_nodes:
1648 raise error.Abort(_("histedit in progress, can't strip %s")
1649 raise error.Abort(_("histedit in progress, can't strip %s")
1649 % ', '.join(node.short(x) for x in common_nodes))
1650 % ', '.join(node.short(x) for x in common_nodes))
1650 return orig(ui, repo, nodelist, *args, **kwargs)
1651 return orig(ui, repo, nodelist, *args, **kwargs)
1651
1652
1652 extensions.wrapfunction(repair, 'strip', stripwrapper)
1653 extensions.wrapfunction(repair, 'strip', stripwrapper)
1653
1654
1654 def summaryhook(ui, repo):
1655 def summaryhook(ui, repo):
1655 if not os.path.exists(repo.vfs.join('histedit-state')):
1656 if not os.path.exists(repo.vfs.join('histedit-state')):
1656 return
1657 return
1657 state = histeditstate(repo)
1658 state = histeditstate(repo)
1658 state.read()
1659 state.read()
1659 if state.actions:
1660 if state.actions:
1660 # i18n: column positioning for "hg summary"
1661 # i18n: column positioning for "hg summary"
1661 ui.write(_('hist: %s (histedit --continue)\n') %
1662 ui.write(_('hist: %s (histedit --continue)\n') %
1662 (ui.label(_('%d remaining'), 'histedit.remaining') %
1663 (ui.label(_('%d remaining'), 'histedit.remaining') %
1663 len(state.actions)))
1664 len(state.actions)))
1664
1665
1665 def extsetup(ui):
1666 def extsetup(ui):
1666 cmdutil.summaryhooks.add('histedit', summaryhook)
1667 cmdutil.summaryhooks.add('histedit', summaryhook)
1667 cmdutil.unfinishedstates.append(
1668 cmdutil.unfinishedstates.append(
1668 ['histedit-state', False, True, _('histedit in progress'),
1669 ['histedit-state', False, True, _('histedit in progress'),
1669 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
1670 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
1670 cmdutil.afterresolvedstates.append(
1671 cmdutil.afterresolvedstates.append(
1671 ['histedit-state', _('hg histedit --continue')])
1672 ['histedit-state', _('hg histedit --continue')])
1672 if ui.configbool("experimental", "histeditng"):
1673 if ui.configbool("experimental", "histeditng"):
1673 globals()['base'] = action(['base', 'b'],
1674 globals()['base'] = action(['base', 'b'],
1674 _('checkout changeset and apply further changesets from there')
1675 _('checkout changeset and apply further changesets from there')
1675 )(base)
1676 )(base)
@@ -1,503 +1,504 b''
1 # journal.py
1 # journal.py
2 #
2 #
3 # Copyright 2014-2016 Facebook, Inc.
3 # Copyright 2014-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """track previous positions of bookmarks (EXPERIMENTAL)
7 """track previous positions of bookmarks (EXPERIMENTAL)
8
8
9 This extension adds a new command: `hg journal`, which shows you where
9 This extension adds a new command: `hg journal`, which shows you where
10 bookmarks were previously located.
10 bookmarks were previously located.
11
11
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import collections
16 import collections
17 import errno
17 import errno
18 import os
18 import os
19 import weakref
19 import weakref
20
20
21 from mercurial.i18n import _
21 from mercurial.i18n import _
22
22
23 from mercurial import (
23 from mercurial import (
24 bookmarks,
24 bookmarks,
25 cmdutil,
25 cmdutil,
26 commands,
26 commands,
27 dispatch,
27 dispatch,
28 error,
28 error,
29 extensions,
29 extensions,
30 hg,
30 hg,
31 localrepo,
31 localrepo,
32 lock,
32 lock,
33 node,
33 node,
34 registrar,
34 util,
35 util,
35 )
36 )
36
37
37 from . import share
38 from . import share
38
39
39 cmdtable = {}
40 cmdtable = {}
40 command = cmdutil.command(cmdtable)
41 command = registrar.command(cmdtable)
41
42
42 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
43 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
43 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
44 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
44 # be specifying the version(s) of Mercurial they are tested with, or
45 # be specifying the version(s) of Mercurial they are tested with, or
45 # leave the attribute unspecified.
46 # leave the attribute unspecified.
46 testedwith = 'ships-with-hg-core'
47 testedwith = 'ships-with-hg-core'
47
48
48 # storage format version; increment when the format changes
49 # storage format version; increment when the format changes
49 storageversion = 0
50 storageversion = 0
50
51
51 # namespaces
52 # namespaces
52 bookmarktype = 'bookmark'
53 bookmarktype = 'bookmark'
53 wdirparenttype = 'wdirparent'
54 wdirparenttype = 'wdirparent'
54 # In a shared repository, what shared feature name is used
55 # In a shared repository, what shared feature name is used
55 # to indicate this namespace is shared with the source?
56 # to indicate this namespace is shared with the source?
56 sharednamespaces = {
57 sharednamespaces = {
57 bookmarktype: hg.sharedbookmarks,
58 bookmarktype: hg.sharedbookmarks,
58 }
59 }
59
60
60 # Journal recording, register hooks and storage object
61 # Journal recording, register hooks and storage object
61 def extsetup(ui):
62 def extsetup(ui):
62 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
63 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
63 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
64 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
64 extensions.wrapfunction(
65 extensions.wrapfunction(
65 localrepo.localrepository.dirstate, 'func', wrapdirstate)
66 localrepo.localrepository.dirstate, 'func', wrapdirstate)
66 extensions.wrapfunction(hg, 'postshare', wrappostshare)
67 extensions.wrapfunction(hg, 'postshare', wrappostshare)
67 extensions.wrapfunction(hg, 'copystore', unsharejournal)
68 extensions.wrapfunction(hg, 'copystore', unsharejournal)
68
69
69 def reposetup(ui, repo):
70 def reposetup(ui, repo):
70 if repo.local():
71 if repo.local():
71 repo.journal = journalstorage(repo)
72 repo.journal = journalstorage(repo)
72
73
73 def runcommand(orig, lui, repo, cmd, fullargs, *args):
74 def runcommand(orig, lui, repo, cmd, fullargs, *args):
74 """Track the command line options for recording in the journal"""
75 """Track the command line options for recording in the journal"""
75 journalstorage.recordcommand(*fullargs)
76 journalstorage.recordcommand(*fullargs)
76 return orig(lui, repo, cmd, fullargs, *args)
77 return orig(lui, repo, cmd, fullargs, *args)
77
78
78 # hooks to record dirstate changes
79 # hooks to record dirstate changes
79 def wrapdirstate(orig, repo):
80 def wrapdirstate(orig, repo):
80 """Make journal storage available to the dirstate object"""
81 """Make journal storage available to the dirstate object"""
81 dirstate = orig(repo)
82 dirstate = orig(repo)
82 if util.safehasattr(repo, 'journal'):
83 if util.safehasattr(repo, 'journal'):
83 dirstate.journalstorage = repo.journal
84 dirstate.journalstorage = repo.journal
84 dirstate.addparentchangecallback('journal', recorddirstateparents)
85 dirstate.addparentchangecallback('journal', recorddirstateparents)
85 return dirstate
86 return dirstate
86
87
87 def recorddirstateparents(dirstate, old, new):
88 def recorddirstateparents(dirstate, old, new):
88 """Records all dirstate parent changes in the journal."""
89 """Records all dirstate parent changes in the journal."""
89 old = list(old)
90 old = list(old)
90 new = list(new)
91 new = list(new)
91 if util.safehasattr(dirstate, 'journalstorage'):
92 if util.safehasattr(dirstate, 'journalstorage'):
92 # only record two hashes if there was a merge
93 # only record two hashes if there was a merge
93 oldhashes = old[:1] if old[1] == node.nullid else old
94 oldhashes = old[:1] if old[1] == node.nullid else old
94 newhashes = new[:1] if new[1] == node.nullid else new
95 newhashes = new[:1] if new[1] == node.nullid else new
95 dirstate.journalstorage.record(
96 dirstate.journalstorage.record(
96 wdirparenttype, '.', oldhashes, newhashes)
97 wdirparenttype, '.', oldhashes, newhashes)
97
98
98 # hooks to record bookmark changes (both local and remote)
99 # hooks to record bookmark changes (both local and remote)
99 def recordbookmarks(orig, store, fp):
100 def recordbookmarks(orig, store, fp):
100 """Records all bookmark changes in the journal."""
101 """Records all bookmark changes in the journal."""
101 repo = store._repo
102 repo = store._repo
102 if util.safehasattr(repo, 'journal'):
103 if util.safehasattr(repo, 'journal'):
103 oldmarks = bookmarks.bmstore(repo)
104 oldmarks = bookmarks.bmstore(repo)
104 for mark, value in store.iteritems():
105 for mark, value in store.iteritems():
105 oldvalue = oldmarks.get(mark, node.nullid)
106 oldvalue = oldmarks.get(mark, node.nullid)
106 if value != oldvalue:
107 if value != oldvalue:
107 repo.journal.record(bookmarktype, mark, oldvalue, value)
108 repo.journal.record(bookmarktype, mark, oldvalue, value)
108 return orig(store, fp)
109 return orig(store, fp)
109
110
110 # shared repository support
111 # shared repository support
111 def _readsharedfeatures(repo):
112 def _readsharedfeatures(repo):
112 """A set of shared features for this repository"""
113 """A set of shared features for this repository"""
113 try:
114 try:
114 return set(repo.vfs.read('shared').splitlines())
115 return set(repo.vfs.read('shared').splitlines())
115 except IOError as inst:
116 except IOError as inst:
116 if inst.errno != errno.ENOENT:
117 if inst.errno != errno.ENOENT:
117 raise
118 raise
118 return set()
119 return set()
119
120
120 def _mergeentriesiter(*iterables, **kwargs):
121 def _mergeentriesiter(*iterables, **kwargs):
121 """Given a set of sorted iterables, yield the next entry in merged order
122 """Given a set of sorted iterables, yield the next entry in merged order
122
123
123 Note that by default entries go from most recent to oldest.
124 Note that by default entries go from most recent to oldest.
124 """
125 """
125 order = kwargs.pop('order', max)
126 order = kwargs.pop('order', max)
126 iterables = [iter(it) for it in iterables]
127 iterables = [iter(it) for it in iterables]
127 # this tracks still active iterables; iterables are deleted as they are
128 # this tracks still active iterables; iterables are deleted as they are
128 # exhausted, which is why this is a dictionary and why each entry also
129 # exhausted, which is why this is a dictionary and why each entry also
129 # stores the key. Entries are mutable so we can store the next value each
130 # stores the key. Entries are mutable so we can store the next value each
130 # time.
131 # time.
131 iterable_map = {}
132 iterable_map = {}
132 for key, it in enumerate(iterables):
133 for key, it in enumerate(iterables):
133 try:
134 try:
134 iterable_map[key] = [next(it), key, it]
135 iterable_map[key] = [next(it), key, it]
135 except StopIteration:
136 except StopIteration:
136 # empty entry, can be ignored
137 # empty entry, can be ignored
137 pass
138 pass
138
139
139 while iterable_map:
140 while iterable_map:
140 value, key, it = order(iterable_map.itervalues())
141 value, key, it = order(iterable_map.itervalues())
141 yield value
142 yield value
142 try:
143 try:
143 iterable_map[key][0] = next(it)
144 iterable_map[key][0] = next(it)
144 except StopIteration:
145 except StopIteration:
145 # this iterable is empty, remove it from consideration
146 # this iterable is empty, remove it from consideration
146 del iterable_map[key]
147 del iterable_map[key]
147
148
148 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
149 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
149 """Mark this shared working copy as sharing journal information"""
150 """Mark this shared working copy as sharing journal information"""
150 with destrepo.wlock():
151 with destrepo.wlock():
151 orig(sourcerepo, destrepo, **kwargs)
152 orig(sourcerepo, destrepo, **kwargs)
152 with destrepo.vfs('shared', 'a') as fp:
153 with destrepo.vfs('shared', 'a') as fp:
153 fp.write('journal\n')
154 fp.write('journal\n')
154
155
155 def unsharejournal(orig, ui, repo, repopath):
156 def unsharejournal(orig, ui, repo, repopath):
156 """Copy shared journal entries into this repo when unsharing"""
157 """Copy shared journal entries into this repo when unsharing"""
157 if (repo.path == repopath and repo.shared() and
158 if (repo.path == repopath and repo.shared() and
158 util.safehasattr(repo, 'journal')):
159 util.safehasattr(repo, 'journal')):
159 sharedrepo = share._getsrcrepo(repo)
160 sharedrepo = share._getsrcrepo(repo)
160 sharedfeatures = _readsharedfeatures(repo)
161 sharedfeatures = _readsharedfeatures(repo)
161 if sharedrepo and sharedfeatures > {'journal'}:
162 if sharedrepo and sharedfeatures > {'journal'}:
162 # there is a shared repository and there are shared journal entries
163 # there is a shared repository and there are shared journal entries
163 # to copy. move shared date over from source to destination but
164 # to copy. move shared date over from source to destination but
164 # move the local file first
165 # move the local file first
165 if repo.vfs.exists('namejournal'):
166 if repo.vfs.exists('namejournal'):
166 journalpath = repo.vfs.join('namejournal')
167 journalpath = repo.vfs.join('namejournal')
167 util.rename(journalpath, journalpath + '.bak')
168 util.rename(journalpath, journalpath + '.bak')
168 storage = repo.journal
169 storage = repo.journal
169 local = storage._open(
170 local = storage._open(
170 repo.vfs, filename='namejournal.bak', _newestfirst=False)
171 repo.vfs, filename='namejournal.bak', _newestfirst=False)
171 shared = (
172 shared = (
172 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
173 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
173 if sharednamespaces.get(e.namespace) in sharedfeatures)
174 if sharednamespaces.get(e.namespace) in sharedfeatures)
174 for entry in _mergeentriesiter(local, shared, order=min):
175 for entry in _mergeentriesiter(local, shared, order=min):
175 storage._write(repo.vfs, entry)
176 storage._write(repo.vfs, entry)
176
177
177 return orig(ui, repo, repopath)
178 return orig(ui, repo, repopath)
178
179
179 class journalentry(collections.namedtuple(
180 class journalentry(collections.namedtuple(
180 u'journalentry',
181 u'journalentry',
181 u'timestamp user command namespace name oldhashes newhashes')):
182 u'timestamp user command namespace name oldhashes newhashes')):
182 """Individual journal entry
183 """Individual journal entry
183
184
184 * timestamp: a mercurial (time, timezone) tuple
185 * timestamp: a mercurial (time, timezone) tuple
185 * user: the username that ran the command
186 * user: the username that ran the command
186 * namespace: the entry namespace, an opaque string
187 * namespace: the entry namespace, an opaque string
187 * name: the name of the changed item, opaque string with meaning in the
188 * name: the name of the changed item, opaque string with meaning in the
188 namespace
189 namespace
189 * command: the hg command that triggered this record
190 * command: the hg command that triggered this record
190 * oldhashes: a tuple of one or more binary hashes for the old location
191 * oldhashes: a tuple of one or more binary hashes for the old location
191 * newhashes: a tuple of one or more binary hashes for the new location
192 * newhashes: a tuple of one or more binary hashes for the new location
192
193
193 Handles serialisation from and to the storage format. Fields are
194 Handles serialisation from and to the storage format. Fields are
194 separated by newlines, hashes are written out in hex separated by commas,
195 separated by newlines, hashes are written out in hex separated by commas,
195 timestamp and timezone are separated by a space.
196 timestamp and timezone are separated by a space.
196
197
197 """
198 """
198 @classmethod
199 @classmethod
199 def fromstorage(cls, line):
200 def fromstorage(cls, line):
200 (time, user, command, namespace, name,
201 (time, user, command, namespace, name,
201 oldhashes, newhashes) = line.split('\n')
202 oldhashes, newhashes) = line.split('\n')
202 timestamp, tz = time.split()
203 timestamp, tz = time.split()
203 timestamp, tz = float(timestamp), int(tz)
204 timestamp, tz = float(timestamp), int(tz)
204 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
205 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
205 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
206 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
206 return cls(
207 return cls(
207 (timestamp, tz), user, command, namespace, name,
208 (timestamp, tz), user, command, namespace, name,
208 oldhashes, newhashes)
209 oldhashes, newhashes)
209
210
210 def __str__(self):
211 def __str__(self):
211 """String representation for storage"""
212 """String representation for storage"""
212 time = ' '.join(map(str, self.timestamp))
213 time = ' '.join(map(str, self.timestamp))
213 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
214 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
214 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
215 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
215 return '\n'.join((
216 return '\n'.join((
216 time, self.user, self.command, self.namespace, self.name,
217 time, self.user, self.command, self.namespace, self.name,
217 oldhashes, newhashes))
218 oldhashes, newhashes))
218
219
219 class journalstorage(object):
220 class journalstorage(object):
220 """Storage for journal entries
221 """Storage for journal entries
221
222
222 Entries are divided over two files; one with entries that pertain to the
223 Entries are divided over two files; one with entries that pertain to the
223 local working copy *only*, and one with entries that are shared across
224 local working copy *only*, and one with entries that are shared across
224 multiple working copies when shared using the share extension.
225 multiple working copies when shared using the share extension.
225
226
226 Entries are stored with NUL bytes as separators. See the journalentry
227 Entries are stored with NUL bytes as separators. See the journalentry
227 class for the per-entry structure.
228 class for the per-entry structure.
228
229
229 The file format starts with an integer version, delimited by a NUL.
230 The file format starts with an integer version, delimited by a NUL.
230
231
231 This storage uses a dedicated lock; this makes it easier to avoid issues
232 This storage uses a dedicated lock; this makes it easier to avoid issues
232 with adding entries that added when the regular wlock is unlocked (e.g.
233 with adding entries that added when the regular wlock is unlocked (e.g.
233 the dirstate).
234 the dirstate).
234
235
235 """
236 """
236 _currentcommand = ()
237 _currentcommand = ()
237 _lockref = None
238 _lockref = None
238
239
239 def __init__(self, repo):
240 def __init__(self, repo):
240 self.user = util.getuser()
241 self.user = util.getuser()
241 self.ui = repo.ui
242 self.ui = repo.ui
242 self.vfs = repo.vfs
243 self.vfs = repo.vfs
243
244
244 # is this working copy using a shared storage?
245 # is this working copy using a shared storage?
245 self.sharedfeatures = self.sharedvfs = None
246 self.sharedfeatures = self.sharedvfs = None
246 if repo.shared():
247 if repo.shared():
247 features = _readsharedfeatures(repo)
248 features = _readsharedfeatures(repo)
248 sharedrepo = share._getsrcrepo(repo)
249 sharedrepo = share._getsrcrepo(repo)
249 if sharedrepo is not None and 'journal' in features:
250 if sharedrepo is not None and 'journal' in features:
250 self.sharedvfs = sharedrepo.vfs
251 self.sharedvfs = sharedrepo.vfs
251 self.sharedfeatures = features
252 self.sharedfeatures = features
252
253
253 # track the current command for recording in journal entries
254 # track the current command for recording in journal entries
254 @property
255 @property
255 def command(self):
256 def command(self):
256 commandstr = ' '.join(
257 commandstr = ' '.join(
257 map(util.shellquote, journalstorage._currentcommand))
258 map(util.shellquote, journalstorage._currentcommand))
258 if '\n' in commandstr:
259 if '\n' in commandstr:
259 # truncate multi-line commands
260 # truncate multi-line commands
260 commandstr = commandstr.partition('\n')[0] + ' ...'
261 commandstr = commandstr.partition('\n')[0] + ' ...'
261 return commandstr
262 return commandstr
262
263
263 @classmethod
264 @classmethod
264 def recordcommand(cls, *fullargs):
265 def recordcommand(cls, *fullargs):
265 """Set the current hg arguments, stored with recorded entries"""
266 """Set the current hg arguments, stored with recorded entries"""
266 # Set the current command on the class because we may have started
267 # Set the current command on the class because we may have started
267 # with a non-local repo (cloning for example).
268 # with a non-local repo (cloning for example).
268 cls._currentcommand = fullargs
269 cls._currentcommand = fullargs
269
270
270 def _currentlock(self, lockref):
271 def _currentlock(self, lockref):
271 """Returns the lock if it's held, or None if it's not.
272 """Returns the lock if it's held, or None if it's not.
272
273
273 (This is copied from the localrepo class)
274 (This is copied from the localrepo class)
274 """
275 """
275 if lockref is None:
276 if lockref is None:
276 return None
277 return None
277 l = lockref()
278 l = lockref()
278 if l is None or not l.held:
279 if l is None or not l.held:
279 return None
280 return None
280 return l
281 return l
281
282
282 def jlock(self, vfs):
283 def jlock(self, vfs):
283 """Create a lock for the journal file"""
284 """Create a lock for the journal file"""
284 if self._currentlock(self._lockref) is not None:
285 if self._currentlock(self._lockref) is not None:
285 raise error.Abort(_('journal lock does not support nesting'))
286 raise error.Abort(_('journal lock does not support nesting'))
286 desc = _('journal of %s') % vfs.base
287 desc = _('journal of %s') % vfs.base
287 try:
288 try:
288 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
289 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
289 except error.LockHeld as inst:
290 except error.LockHeld as inst:
290 self.ui.warn(
291 self.ui.warn(
291 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
292 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
292 # default to 600 seconds timeout
293 # default to 600 seconds timeout
293 l = lock.lock(
294 l = lock.lock(
294 vfs, 'namejournal.lock',
295 vfs, 'namejournal.lock',
295 int(self.ui.config("ui", "timeout", "600")), desc=desc)
296 int(self.ui.config("ui", "timeout", "600")), desc=desc)
296 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
297 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
297 self._lockref = weakref.ref(l)
298 self._lockref = weakref.ref(l)
298 return l
299 return l
299
300
300 def record(self, namespace, name, oldhashes, newhashes):
301 def record(self, namespace, name, oldhashes, newhashes):
301 """Record a new journal entry
302 """Record a new journal entry
302
303
303 * namespace: an opaque string; this can be used to filter on the type
304 * namespace: an opaque string; this can be used to filter on the type
304 of recorded entries.
305 of recorded entries.
305 * name: the name defining this entry; for bookmarks, this is the
306 * name: the name defining this entry; for bookmarks, this is the
306 bookmark name. Can be filtered on when retrieving entries.
307 bookmark name. Can be filtered on when retrieving entries.
307 * oldhashes and newhashes: each a single binary hash, or a list of
308 * oldhashes and newhashes: each a single binary hash, or a list of
308 binary hashes. These represent the old and new position of the named
309 binary hashes. These represent the old and new position of the named
309 item.
310 item.
310
311
311 """
312 """
312 if not isinstance(oldhashes, list):
313 if not isinstance(oldhashes, list):
313 oldhashes = [oldhashes]
314 oldhashes = [oldhashes]
314 if not isinstance(newhashes, list):
315 if not isinstance(newhashes, list):
315 newhashes = [newhashes]
316 newhashes = [newhashes]
316
317
317 entry = journalentry(
318 entry = journalentry(
318 util.makedate(), self.user, self.command, namespace, name,
319 util.makedate(), self.user, self.command, namespace, name,
319 oldhashes, newhashes)
320 oldhashes, newhashes)
320
321
321 vfs = self.vfs
322 vfs = self.vfs
322 if self.sharedvfs is not None:
323 if self.sharedvfs is not None:
323 # write to the shared repository if this feature is being
324 # write to the shared repository if this feature is being
324 # shared between working copies.
325 # shared between working copies.
325 if sharednamespaces.get(namespace) in self.sharedfeatures:
326 if sharednamespaces.get(namespace) in self.sharedfeatures:
326 vfs = self.sharedvfs
327 vfs = self.sharedvfs
327
328
328 self._write(vfs, entry)
329 self._write(vfs, entry)
329
330
330 def _write(self, vfs, entry):
331 def _write(self, vfs, entry):
331 with self.jlock(vfs):
332 with self.jlock(vfs):
332 version = None
333 version = None
333 # open file in amend mode to ensure it is created if missing
334 # open file in amend mode to ensure it is created if missing
334 with vfs('namejournal', mode='a+b', atomictemp=True) as f:
335 with vfs('namejournal', mode='a+b', atomictemp=True) as f:
335 f.seek(0, os.SEEK_SET)
336 f.seek(0, os.SEEK_SET)
336 # Read just enough bytes to get a version number (up to 2
337 # Read just enough bytes to get a version number (up to 2
337 # digits plus separator)
338 # digits plus separator)
338 version = f.read(3).partition('\0')[0]
339 version = f.read(3).partition('\0')[0]
339 if version and version != str(storageversion):
340 if version and version != str(storageversion):
340 # different version of the storage. Exit early (and not
341 # different version of the storage. Exit early (and not
341 # write anything) if this is not a version we can handle or
342 # write anything) if this is not a version we can handle or
342 # the file is corrupt. In future, perhaps rotate the file
343 # the file is corrupt. In future, perhaps rotate the file
343 # instead?
344 # instead?
344 self.ui.warn(
345 self.ui.warn(
345 _("unsupported journal file version '%s'\n") % version)
346 _("unsupported journal file version '%s'\n") % version)
346 return
347 return
347 if not version:
348 if not version:
348 # empty file, write version first
349 # empty file, write version first
349 f.write(str(storageversion) + '\0')
350 f.write(str(storageversion) + '\0')
350 f.seek(0, os.SEEK_END)
351 f.seek(0, os.SEEK_END)
351 f.write(str(entry) + '\0')
352 f.write(str(entry) + '\0')
352
353
353 def filtered(self, namespace=None, name=None):
354 def filtered(self, namespace=None, name=None):
354 """Yield all journal entries with the given namespace or name
355 """Yield all journal entries with the given namespace or name
355
356
356 Both the namespace and the name are optional; if neither is given all
357 Both the namespace and the name are optional; if neither is given all
357 entries in the journal are produced.
358 entries in the journal are produced.
358
359
359 Matching supports regular expressions by using the `re:` prefix
360 Matching supports regular expressions by using the `re:` prefix
360 (use `literal:` to match names or namespaces that start with `re:`)
361 (use `literal:` to match names or namespaces that start with `re:`)
361
362
362 """
363 """
363 if namespace is not None:
364 if namespace is not None:
364 namespace = util.stringmatcher(namespace)[-1]
365 namespace = util.stringmatcher(namespace)[-1]
365 if name is not None:
366 if name is not None:
366 name = util.stringmatcher(name)[-1]
367 name = util.stringmatcher(name)[-1]
367 for entry in self:
368 for entry in self:
368 if namespace is not None and not namespace(entry.namespace):
369 if namespace is not None and not namespace(entry.namespace):
369 continue
370 continue
370 if name is not None and not name(entry.name):
371 if name is not None and not name(entry.name):
371 continue
372 continue
372 yield entry
373 yield entry
373
374
374 def __iter__(self):
375 def __iter__(self):
375 """Iterate over the storage
376 """Iterate over the storage
376
377
377 Yields journalentry instances for each contained journal record.
378 Yields journalentry instances for each contained journal record.
378
379
379 """
380 """
380 local = self._open(self.vfs)
381 local = self._open(self.vfs)
381
382
382 if self.sharedvfs is None:
383 if self.sharedvfs is None:
383 return local
384 return local
384
385
385 # iterate over both local and shared entries, but only those
386 # iterate over both local and shared entries, but only those
386 # shared entries that are among the currently shared features
387 # shared entries that are among the currently shared features
387 shared = (
388 shared = (
388 e for e in self._open(self.sharedvfs)
389 e for e in self._open(self.sharedvfs)
389 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
390 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
390 return _mergeentriesiter(local, shared)
391 return _mergeentriesiter(local, shared)
391
392
392 def _open(self, vfs, filename='namejournal', _newestfirst=True):
393 def _open(self, vfs, filename='namejournal', _newestfirst=True):
393 if not vfs.exists(filename):
394 if not vfs.exists(filename):
394 return
395 return
395
396
396 with vfs(filename) as f:
397 with vfs(filename) as f:
397 raw = f.read()
398 raw = f.read()
398
399
399 lines = raw.split('\0')
400 lines = raw.split('\0')
400 version = lines and lines[0]
401 version = lines and lines[0]
401 if version != str(storageversion):
402 if version != str(storageversion):
402 version = version or _('not available')
403 version = version or _('not available')
403 raise error.Abort(_("unknown journal file version '%s'") % version)
404 raise error.Abort(_("unknown journal file version '%s'") % version)
404
405
405 # Skip the first line, it's a version number. Normally we iterate over
406 # Skip the first line, it's a version number. Normally we iterate over
406 # these in reverse order to list newest first; only when copying across
407 # these in reverse order to list newest first; only when copying across
407 # a shared storage do we forgo reversing.
408 # a shared storage do we forgo reversing.
408 lines = lines[1:]
409 lines = lines[1:]
409 if _newestfirst:
410 if _newestfirst:
410 lines = reversed(lines)
411 lines = reversed(lines)
411 for line in lines:
412 for line in lines:
412 if not line:
413 if not line:
413 continue
414 continue
414 yield journalentry.fromstorage(line)
415 yield journalentry.fromstorage(line)
415
416
416 # journal reading
417 # journal reading
417 # log options that don't make sense for journal
418 # log options that don't make sense for journal
418 _ignoreopts = ('no-merges', 'graph')
419 _ignoreopts = ('no-merges', 'graph')
419 @command(
420 @command(
420 'journal', [
421 'journal', [
421 ('', 'all', None, 'show history for all names'),
422 ('', 'all', None, 'show history for all names'),
422 ('c', 'commits', None, 'show commit metadata'),
423 ('c', 'commits', None, 'show commit metadata'),
423 ] + [opt for opt in commands.logopts if opt[1] not in _ignoreopts],
424 ] + [opt for opt in commands.logopts if opt[1] not in _ignoreopts],
424 '[OPTION]... [BOOKMARKNAME]')
425 '[OPTION]... [BOOKMARKNAME]')
425 def journal(ui, repo, *args, **opts):
426 def journal(ui, repo, *args, **opts):
426 """show the previous position of bookmarks and the working copy
427 """show the previous position of bookmarks and the working copy
427
428
428 The journal is used to see the previous commits that bookmarks and the
429 The journal is used to see the previous commits that bookmarks and the
429 working copy pointed to. By default the previous locations for the working
430 working copy pointed to. By default the previous locations for the working
430 copy. Passing a bookmark name will show all the previous positions of
431 copy. Passing a bookmark name will show all the previous positions of
431 that bookmark. Use the --all switch to show previous locations for all
432 that bookmark. Use the --all switch to show previous locations for all
432 bookmarks and the working copy; each line will then include the bookmark
433 bookmarks and the working copy; each line will then include the bookmark
433 name, or '.' for the working copy, as well.
434 name, or '.' for the working copy, as well.
434
435
435 If `name` starts with `re:`, the remainder of the name is treated as
436 If `name` starts with `re:`, the remainder of the name is treated as
436 a regular expression. To match a name that actually starts with `re:`,
437 a regular expression. To match a name that actually starts with `re:`,
437 use the prefix `literal:`.
438 use the prefix `literal:`.
438
439
439 By default hg journal only shows the commit hash and the command that was
440 By default hg journal only shows the commit hash and the command that was
440 running at that time. -v/--verbose will show the prior hash, the user, and
441 running at that time. -v/--verbose will show the prior hash, the user, and
441 the time at which it happened.
442 the time at which it happened.
442
443
443 Use -c/--commits to output log information on each commit hash; at this
444 Use -c/--commits to output log information on each commit hash; at this
444 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
445 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
445 switches to alter the log output for these.
446 switches to alter the log output for these.
446
447
447 `hg journal -T json` can be used to produce machine readable output.
448 `hg journal -T json` can be used to produce machine readable output.
448
449
449 """
450 """
450 name = '.'
451 name = '.'
451 if opts.get('all'):
452 if opts.get('all'):
452 if args:
453 if args:
453 raise error.Abort(
454 raise error.Abort(
454 _("You can't combine --all and filtering on a name"))
455 _("You can't combine --all and filtering on a name"))
455 name = None
456 name = None
456 if args:
457 if args:
457 name = args[0]
458 name = args[0]
458
459
459 fm = ui.formatter('journal', opts)
460 fm = ui.formatter('journal', opts)
460
461
461 if opts.get("template") != "json":
462 if opts.get("template") != "json":
462 if name is None:
463 if name is None:
463 displayname = _('the working copy and bookmarks')
464 displayname = _('the working copy and bookmarks')
464 else:
465 else:
465 displayname = "'%s'" % name
466 displayname = "'%s'" % name
466 ui.status(_("previous locations of %s:\n") % displayname)
467 ui.status(_("previous locations of %s:\n") % displayname)
467
468
468 limit = cmdutil.loglimit(opts)
469 limit = cmdutil.loglimit(opts)
469 entry = None
470 entry = None
470 for count, entry in enumerate(repo.journal.filtered(name=name)):
471 for count, entry in enumerate(repo.journal.filtered(name=name)):
471 if count == limit:
472 if count == limit:
472 break
473 break
473 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
474 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
474 name='node', sep=',')
475 name='node', sep=',')
475 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
476 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
476 name='node', sep=',')
477 name='node', sep=',')
477
478
478 fm.startitem()
479 fm.startitem()
479 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
480 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
480 fm.write('newhashes', '%s', newhashesstr)
481 fm.write('newhashes', '%s', newhashesstr)
481 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
482 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
482 fm.condwrite(
483 fm.condwrite(
483 opts.get('all') or name.startswith('re:'),
484 opts.get('all') or name.startswith('re:'),
484 'name', ' %-8s', entry.name)
485 'name', ' %-8s', entry.name)
485
486
486 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
487 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
487 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
488 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
488 fm.write('command', ' %s\n', entry.command)
489 fm.write('command', ' %s\n', entry.command)
489
490
490 if opts.get("commits"):
491 if opts.get("commits"):
491 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False)
492 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False)
492 for hash in entry.newhashes:
493 for hash in entry.newhashes:
493 try:
494 try:
494 ctx = repo[hash]
495 ctx = repo[hash]
495 displayer.show(ctx)
496 displayer.show(ctx)
496 except error.RepoLookupError as e:
497 except error.RepoLookupError as e:
497 fm.write('repolookuperror', "%s\n\n", str(e))
498 fm.write('repolookuperror', "%s\n\n", str(e))
498 displayer.close()
499 displayer.close()
499
500
500 fm.end()
501 fm.end()
501
502
502 if entry is None:
503 if entry is None:
503 ui.status(_("no recorded locations\n"))
504 ui.status(_("no recorded locations\n"))
@@ -1,760 +1,760 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85
85
86 from __future__ import absolute_import
86 from __future__ import absolute_import
87
87
88 import os
88 import os
89 import re
89 import re
90 import tempfile
90 import tempfile
91
91
92 from mercurial.i18n import _
92 from mercurial.i18n import _
93 from mercurial.hgweb import webcommands
93 from mercurial.hgweb import webcommands
94
94
95 from mercurial import (
95 from mercurial import (
96 cmdutil,
96 cmdutil,
97 commands,
97 commands,
98 context,
98 context,
99 dispatch,
99 dispatch,
100 error,
100 error,
101 extensions,
101 extensions,
102 filelog,
102 filelog,
103 localrepo,
103 localrepo,
104 match,
104 match,
105 patch,
105 patch,
106 pathutil,
106 pathutil,
107 registrar,
107 registrar,
108 scmutil,
108 scmutil,
109 templatefilters,
109 templatefilters,
110 util,
110 util,
111 )
111 )
112
112
113 cmdtable = {}
113 cmdtable = {}
114 command = cmdutil.command(cmdtable)
114 command = registrar.command(cmdtable)
115 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
115 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
116 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
116 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
117 # be specifying the version(s) of Mercurial they are tested with, or
117 # be specifying the version(s) of Mercurial they are tested with, or
118 # leave the attribute unspecified.
118 # leave the attribute unspecified.
119 testedwith = 'ships-with-hg-core'
119 testedwith = 'ships-with-hg-core'
120
120
121 # hg commands that do not act on keywords
121 # hg commands that do not act on keywords
122 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
122 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
123 ' outgoing push tip verify convert email glog')
123 ' outgoing push tip verify convert email glog')
124
124
125 # hg commands that trigger expansion only when writing to working dir,
125 # hg commands that trigger expansion only when writing to working dir,
126 # not when reading filelog, and unexpand when reading from working dir
126 # not when reading filelog, and unexpand when reading from working dir
127 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
127 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
128 ' unshelve rebase graft backout histedit fetch')
128 ' unshelve rebase graft backout histedit fetch')
129
129
130 # names of extensions using dorecord
130 # names of extensions using dorecord
131 recordextensions = 'record'
131 recordextensions = 'record'
132
132
133 colortable = {
133 colortable = {
134 'kwfiles.enabled': 'green bold',
134 'kwfiles.enabled': 'green bold',
135 'kwfiles.deleted': 'cyan bold underline',
135 'kwfiles.deleted': 'cyan bold underline',
136 'kwfiles.enabledunknown': 'green',
136 'kwfiles.enabledunknown': 'green',
137 'kwfiles.ignored': 'bold',
137 'kwfiles.ignored': 'bold',
138 'kwfiles.ignoredunknown': 'none'
138 'kwfiles.ignoredunknown': 'none'
139 }
139 }
140
140
141 templatefilter = registrar.templatefilter()
141 templatefilter = registrar.templatefilter()
142
142
143 # date like in cvs' $Date
143 # date like in cvs' $Date
144 @templatefilter('utcdate')
144 @templatefilter('utcdate')
145 def utcdate(text):
145 def utcdate(text):
146 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
146 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
147 '''
147 '''
148 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
148 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
149 # date like in svn's $Date
149 # date like in svn's $Date
150 @templatefilter('svnisodate')
150 @templatefilter('svnisodate')
151 def svnisodate(text):
151 def svnisodate(text):
152 '''Date. Returns a date in this format: "2009-08-18 13:00:13
152 '''Date. Returns a date in this format: "2009-08-18 13:00:13
153 +0200 (Tue, 18 Aug 2009)".
153 +0200 (Tue, 18 Aug 2009)".
154 '''
154 '''
155 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
155 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
156 # date like in svn's $Id
156 # date like in svn's $Id
157 @templatefilter('svnutcdate')
157 @templatefilter('svnutcdate')
158 def svnutcdate(text):
158 def svnutcdate(text):
159 '''Date. Returns a UTC-date in this format: "2009-08-18
159 '''Date. Returns a UTC-date in this format: "2009-08-18
160 11:00:13Z".
160 11:00:13Z".
161 '''
161 '''
162 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
162 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
163
163
164 # make keyword tools accessible
164 # make keyword tools accessible
165 kwtools = {'templater': None, 'hgcmd': ''}
165 kwtools = {'templater': None, 'hgcmd': ''}
166
166
167 def _defaultkwmaps(ui):
167 def _defaultkwmaps(ui):
168 '''Returns default keywordmaps according to keywordset configuration.'''
168 '''Returns default keywordmaps according to keywordset configuration.'''
169 templates = {
169 templates = {
170 'Revision': '{node|short}',
170 'Revision': '{node|short}',
171 'Author': '{author|user}',
171 'Author': '{author|user}',
172 }
172 }
173 kwsets = ({
173 kwsets = ({
174 'Date': '{date|utcdate}',
174 'Date': '{date|utcdate}',
175 'RCSfile': '{file|basename},v',
175 'RCSfile': '{file|basename},v',
176 'RCSFile': '{file|basename},v', # kept for backwards compatibility
176 'RCSFile': '{file|basename},v', # kept for backwards compatibility
177 # with hg-keyword
177 # with hg-keyword
178 'Source': '{root}/{file},v',
178 'Source': '{root}/{file},v',
179 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
179 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
180 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
180 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
181 }, {
181 }, {
182 'Date': '{date|svnisodate}',
182 'Date': '{date|svnisodate}',
183 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
183 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
184 'LastChangedRevision': '{node|short}',
184 'LastChangedRevision': '{node|short}',
185 'LastChangedBy': '{author|user}',
185 'LastChangedBy': '{author|user}',
186 'LastChangedDate': '{date|svnisodate}',
186 'LastChangedDate': '{date|svnisodate}',
187 })
187 })
188 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
188 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
189 return templates
189 return templates
190
190
191 def _shrinktext(text, subfunc):
191 def _shrinktext(text, subfunc):
192 '''Helper for keyword expansion removal in text.
192 '''Helper for keyword expansion removal in text.
193 Depending on subfunc also returns number of substitutions.'''
193 Depending on subfunc also returns number of substitutions.'''
194 return subfunc(r'$\1$', text)
194 return subfunc(r'$\1$', text)
195
195
196 def _preselect(wstatus, changed):
196 def _preselect(wstatus, changed):
197 '''Retrieves modified and added files from a working directory state
197 '''Retrieves modified and added files from a working directory state
198 and returns the subset of each contained in given changed files
198 and returns the subset of each contained in given changed files
199 retrieved from a change context.'''
199 retrieved from a change context.'''
200 modified = [f for f in wstatus.modified if f in changed]
200 modified = [f for f in wstatus.modified if f in changed]
201 added = [f for f in wstatus.added if f in changed]
201 added = [f for f in wstatus.added if f in changed]
202 return modified, added
202 return modified, added
203
203
204
204
205 class kwtemplater(object):
205 class kwtemplater(object):
206 '''
206 '''
207 Sets up keyword templates, corresponding keyword regex, and
207 Sets up keyword templates, corresponding keyword regex, and
208 provides keyword substitution functions.
208 provides keyword substitution functions.
209 '''
209 '''
210
210
211 def __init__(self, ui, repo, inc, exc):
211 def __init__(self, ui, repo, inc, exc):
212 self.ui = ui
212 self.ui = ui
213 self.repo = repo
213 self.repo = repo
214 self.match = match.match(repo.root, '', [], inc, exc)
214 self.match = match.match(repo.root, '', [], inc, exc)
215 self.restrict = kwtools['hgcmd'] in restricted.split()
215 self.restrict = kwtools['hgcmd'] in restricted.split()
216 self.postcommit = False
216 self.postcommit = False
217
217
218 kwmaps = self.ui.configitems('keywordmaps')
218 kwmaps = self.ui.configitems('keywordmaps')
219 if kwmaps: # override default templates
219 if kwmaps: # override default templates
220 self.templates = dict(kwmaps)
220 self.templates = dict(kwmaps)
221 else:
221 else:
222 self.templates = _defaultkwmaps(self.ui)
222 self.templates = _defaultkwmaps(self.ui)
223
223
224 @util.propertycache
224 @util.propertycache
225 def escape(self):
225 def escape(self):
226 '''Returns bar-separated and escaped keywords.'''
226 '''Returns bar-separated and escaped keywords.'''
227 return '|'.join(map(re.escape, self.templates.keys()))
227 return '|'.join(map(re.escape, self.templates.keys()))
228
228
229 @util.propertycache
229 @util.propertycache
230 def rekw(self):
230 def rekw(self):
231 '''Returns regex for unexpanded keywords.'''
231 '''Returns regex for unexpanded keywords.'''
232 return re.compile(r'\$(%s)\$' % self.escape)
232 return re.compile(r'\$(%s)\$' % self.escape)
233
233
234 @util.propertycache
234 @util.propertycache
235 def rekwexp(self):
235 def rekwexp(self):
236 '''Returns regex for expanded keywords.'''
236 '''Returns regex for expanded keywords.'''
237 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
237 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
238
238
239 def substitute(self, data, path, ctx, subfunc):
239 def substitute(self, data, path, ctx, subfunc):
240 '''Replaces keywords in data with expanded template.'''
240 '''Replaces keywords in data with expanded template.'''
241 def kwsub(mobj):
241 def kwsub(mobj):
242 kw = mobj.group(1)
242 kw = mobj.group(1)
243 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
243 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
244 self.templates[kw], '', False)
244 self.templates[kw], '', False)
245 self.ui.pushbuffer()
245 self.ui.pushbuffer()
246 ct.show(ctx, root=self.repo.root, file=path)
246 ct.show(ctx, root=self.repo.root, file=path)
247 ekw = templatefilters.firstline(self.ui.popbuffer())
247 ekw = templatefilters.firstline(self.ui.popbuffer())
248 return '$%s: %s $' % (kw, ekw)
248 return '$%s: %s $' % (kw, ekw)
249 return subfunc(kwsub, data)
249 return subfunc(kwsub, data)
250
250
251 def linkctx(self, path, fileid):
251 def linkctx(self, path, fileid):
252 '''Similar to filelog.linkrev, but returns a changectx.'''
252 '''Similar to filelog.linkrev, but returns a changectx.'''
253 return self.repo.filectx(path, fileid=fileid).changectx()
253 return self.repo.filectx(path, fileid=fileid).changectx()
254
254
255 def expand(self, path, node, data):
255 def expand(self, path, node, data):
256 '''Returns data with keywords expanded.'''
256 '''Returns data with keywords expanded.'''
257 if not self.restrict and self.match(path) and not util.binary(data):
257 if not self.restrict and self.match(path) and not util.binary(data):
258 ctx = self.linkctx(path, node)
258 ctx = self.linkctx(path, node)
259 return self.substitute(data, path, ctx, self.rekw.sub)
259 return self.substitute(data, path, ctx, self.rekw.sub)
260 return data
260 return data
261
261
262 def iskwfile(self, cand, ctx):
262 def iskwfile(self, cand, ctx):
263 '''Returns subset of candidates which are configured for keyword
263 '''Returns subset of candidates which are configured for keyword
264 expansion but are not symbolic links.'''
264 expansion but are not symbolic links.'''
265 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
265 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
266
266
267 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
267 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
268 '''Overwrites selected files expanding/shrinking keywords.'''
268 '''Overwrites selected files expanding/shrinking keywords.'''
269 if self.restrict or lookup or self.postcommit: # exclude kw_copy
269 if self.restrict or lookup or self.postcommit: # exclude kw_copy
270 candidates = self.iskwfile(candidates, ctx)
270 candidates = self.iskwfile(candidates, ctx)
271 if not candidates:
271 if not candidates:
272 return
272 return
273 kwcmd = self.restrict and lookup # kwexpand/kwshrink
273 kwcmd = self.restrict and lookup # kwexpand/kwshrink
274 if self.restrict or expand and lookup:
274 if self.restrict or expand and lookup:
275 mf = ctx.manifest()
275 mf = ctx.manifest()
276 if self.restrict or rekw:
276 if self.restrict or rekw:
277 re_kw = self.rekw
277 re_kw = self.rekw
278 else:
278 else:
279 re_kw = self.rekwexp
279 re_kw = self.rekwexp
280 if expand:
280 if expand:
281 msg = _('overwriting %s expanding keywords\n')
281 msg = _('overwriting %s expanding keywords\n')
282 else:
282 else:
283 msg = _('overwriting %s shrinking keywords\n')
283 msg = _('overwriting %s shrinking keywords\n')
284 for f in candidates:
284 for f in candidates:
285 if self.restrict:
285 if self.restrict:
286 data = self.repo.file(f).read(mf[f])
286 data = self.repo.file(f).read(mf[f])
287 else:
287 else:
288 data = self.repo.wread(f)
288 data = self.repo.wread(f)
289 if util.binary(data):
289 if util.binary(data):
290 continue
290 continue
291 if expand:
291 if expand:
292 parents = ctx.parents()
292 parents = ctx.parents()
293 if lookup:
293 if lookup:
294 ctx = self.linkctx(f, mf[f])
294 ctx = self.linkctx(f, mf[f])
295 elif self.restrict and len(parents) > 1:
295 elif self.restrict and len(parents) > 1:
296 # merge commit
296 # merge commit
297 # in case of conflict f is in modified state during
297 # in case of conflict f is in modified state during
298 # merge, even if f does not differ from f in parent
298 # merge, even if f does not differ from f in parent
299 for p in parents:
299 for p in parents:
300 if f in p and not p[f].cmp(ctx[f]):
300 if f in p and not p[f].cmp(ctx[f]):
301 ctx = p[f].changectx()
301 ctx = p[f].changectx()
302 break
302 break
303 data, found = self.substitute(data, f, ctx, re_kw.subn)
303 data, found = self.substitute(data, f, ctx, re_kw.subn)
304 elif self.restrict:
304 elif self.restrict:
305 found = re_kw.search(data)
305 found = re_kw.search(data)
306 else:
306 else:
307 data, found = _shrinktext(data, re_kw.subn)
307 data, found = _shrinktext(data, re_kw.subn)
308 if found:
308 if found:
309 self.ui.note(msg % f)
309 self.ui.note(msg % f)
310 fp = self.repo.wvfs(f, "wb", atomictemp=True)
310 fp = self.repo.wvfs(f, "wb", atomictemp=True)
311 fp.write(data)
311 fp.write(data)
312 fp.close()
312 fp.close()
313 if kwcmd:
313 if kwcmd:
314 self.repo.dirstate.normal(f)
314 self.repo.dirstate.normal(f)
315 elif self.postcommit:
315 elif self.postcommit:
316 self.repo.dirstate.normallookup(f)
316 self.repo.dirstate.normallookup(f)
317
317
318 def shrink(self, fname, text):
318 def shrink(self, fname, text):
319 '''Returns text with all keyword substitutions removed.'''
319 '''Returns text with all keyword substitutions removed.'''
320 if self.match(fname) and not util.binary(text):
320 if self.match(fname) and not util.binary(text):
321 return _shrinktext(text, self.rekwexp.sub)
321 return _shrinktext(text, self.rekwexp.sub)
322 return text
322 return text
323
323
324 def shrinklines(self, fname, lines):
324 def shrinklines(self, fname, lines):
325 '''Returns lines with keyword substitutions removed.'''
325 '''Returns lines with keyword substitutions removed.'''
326 if self.match(fname):
326 if self.match(fname):
327 text = ''.join(lines)
327 text = ''.join(lines)
328 if not util.binary(text):
328 if not util.binary(text):
329 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
329 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
330 return lines
330 return lines
331
331
332 def wread(self, fname, data):
332 def wread(self, fname, data):
333 '''If in restricted mode returns data read from wdir with
333 '''If in restricted mode returns data read from wdir with
334 keyword substitutions removed.'''
334 keyword substitutions removed.'''
335 if self.restrict:
335 if self.restrict:
336 return self.shrink(fname, data)
336 return self.shrink(fname, data)
337 return data
337 return data
338
338
339 class kwfilelog(filelog.filelog):
339 class kwfilelog(filelog.filelog):
340 '''
340 '''
341 Subclass of filelog to hook into its read, add, cmp methods.
341 Subclass of filelog to hook into its read, add, cmp methods.
342 Keywords are "stored" unexpanded, and processed on reading.
342 Keywords are "stored" unexpanded, and processed on reading.
343 '''
343 '''
344 def __init__(self, opener, kwt, path):
344 def __init__(self, opener, kwt, path):
345 super(kwfilelog, self).__init__(opener, path)
345 super(kwfilelog, self).__init__(opener, path)
346 self.kwt = kwt
346 self.kwt = kwt
347 self.path = path
347 self.path = path
348
348
349 def read(self, node):
349 def read(self, node):
350 '''Expands keywords when reading filelog.'''
350 '''Expands keywords when reading filelog.'''
351 data = super(kwfilelog, self).read(node)
351 data = super(kwfilelog, self).read(node)
352 if self.renamed(node):
352 if self.renamed(node):
353 return data
353 return data
354 return self.kwt.expand(self.path, node, data)
354 return self.kwt.expand(self.path, node, data)
355
355
356 def add(self, text, meta, tr, link, p1=None, p2=None):
356 def add(self, text, meta, tr, link, p1=None, p2=None):
357 '''Removes keyword substitutions when adding to filelog.'''
357 '''Removes keyword substitutions when adding to filelog.'''
358 text = self.kwt.shrink(self.path, text)
358 text = self.kwt.shrink(self.path, text)
359 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
359 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
360
360
361 def cmp(self, node, text):
361 def cmp(self, node, text):
362 '''Removes keyword substitutions for comparison.'''
362 '''Removes keyword substitutions for comparison.'''
363 text = self.kwt.shrink(self.path, text)
363 text = self.kwt.shrink(self.path, text)
364 return super(kwfilelog, self).cmp(node, text)
364 return super(kwfilelog, self).cmp(node, text)
365
365
366 def _status(ui, repo, wctx, kwt, *pats, **opts):
366 def _status(ui, repo, wctx, kwt, *pats, **opts):
367 '''Bails out if [keyword] configuration is not active.
367 '''Bails out if [keyword] configuration is not active.
368 Returns status of working directory.'''
368 Returns status of working directory.'''
369 if kwt:
369 if kwt:
370 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
370 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
371 unknown=opts.get('unknown') or opts.get('all'))
371 unknown=opts.get('unknown') or opts.get('all'))
372 if ui.configitems('keyword'):
372 if ui.configitems('keyword'):
373 raise error.Abort(_('[keyword] patterns cannot match'))
373 raise error.Abort(_('[keyword] patterns cannot match'))
374 raise error.Abort(_('no [keyword] patterns configured'))
374 raise error.Abort(_('no [keyword] patterns configured'))
375
375
376 def _kwfwrite(ui, repo, expand, *pats, **opts):
376 def _kwfwrite(ui, repo, expand, *pats, **opts):
377 '''Selects files and passes them to kwtemplater.overwrite.'''
377 '''Selects files and passes them to kwtemplater.overwrite.'''
378 wctx = repo[None]
378 wctx = repo[None]
379 if len(wctx.parents()) > 1:
379 if len(wctx.parents()) > 1:
380 raise error.Abort(_('outstanding uncommitted merge'))
380 raise error.Abort(_('outstanding uncommitted merge'))
381 kwt = kwtools['templater']
381 kwt = kwtools['templater']
382 with repo.wlock():
382 with repo.wlock():
383 status = _status(ui, repo, wctx, kwt, *pats, **opts)
383 status = _status(ui, repo, wctx, kwt, *pats, **opts)
384 if status.modified or status.added or status.removed or status.deleted:
384 if status.modified or status.added or status.removed or status.deleted:
385 raise error.Abort(_('outstanding uncommitted changes'))
385 raise error.Abort(_('outstanding uncommitted changes'))
386 kwt.overwrite(wctx, status.clean, True, expand)
386 kwt.overwrite(wctx, status.clean, True, expand)
387
387
388 @command('kwdemo',
388 @command('kwdemo',
389 [('d', 'default', None, _('show default keyword template maps')),
389 [('d', 'default', None, _('show default keyword template maps')),
390 ('f', 'rcfile', '',
390 ('f', 'rcfile', '',
391 _('read maps from rcfile'), _('FILE'))],
391 _('read maps from rcfile'), _('FILE'))],
392 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
392 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
393 optionalrepo=True)
393 optionalrepo=True)
394 def demo(ui, repo, *args, **opts):
394 def demo(ui, repo, *args, **opts):
395 '''print [keywordmaps] configuration and an expansion example
395 '''print [keywordmaps] configuration and an expansion example
396
396
397 Show current, custom, or default keyword template maps and their
397 Show current, custom, or default keyword template maps and their
398 expansions.
398 expansions.
399
399
400 Extend the current configuration by specifying maps as arguments
400 Extend the current configuration by specifying maps as arguments
401 and using -f/--rcfile to source an external hgrc file.
401 and using -f/--rcfile to source an external hgrc file.
402
402
403 Use -d/--default to disable current configuration.
403 Use -d/--default to disable current configuration.
404
404
405 See :hg:`help templates` for information on templates and filters.
405 See :hg:`help templates` for information on templates and filters.
406 '''
406 '''
407 def demoitems(section, items):
407 def demoitems(section, items):
408 ui.write('[%s]\n' % section)
408 ui.write('[%s]\n' % section)
409 for k, v in sorted(items):
409 for k, v in sorted(items):
410 ui.write('%s = %s\n' % (k, v))
410 ui.write('%s = %s\n' % (k, v))
411
411
412 fn = 'demo.txt'
412 fn = 'demo.txt'
413 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
413 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
414 ui.note(_('creating temporary repository at %s\n') % tmpdir)
414 ui.note(_('creating temporary repository at %s\n') % tmpdir)
415 if repo is None:
415 if repo is None:
416 baseui = ui
416 baseui = ui
417 else:
417 else:
418 baseui = repo.baseui
418 baseui = repo.baseui
419 repo = localrepo.localrepository(baseui, tmpdir, True)
419 repo = localrepo.localrepository(baseui, tmpdir, True)
420 ui.setconfig('keyword', fn, '', 'keyword')
420 ui.setconfig('keyword', fn, '', 'keyword')
421 svn = ui.configbool('keywordset', 'svn')
421 svn = ui.configbool('keywordset', 'svn')
422 # explicitly set keywordset for demo output
422 # explicitly set keywordset for demo output
423 ui.setconfig('keywordset', 'svn', svn, 'keyword')
423 ui.setconfig('keywordset', 'svn', svn, 'keyword')
424
424
425 uikwmaps = ui.configitems('keywordmaps')
425 uikwmaps = ui.configitems('keywordmaps')
426 if args or opts.get('rcfile'):
426 if args or opts.get('rcfile'):
427 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
427 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
428 if uikwmaps:
428 if uikwmaps:
429 ui.status(_('\textending current template maps\n'))
429 ui.status(_('\textending current template maps\n'))
430 if opts.get('default') or not uikwmaps:
430 if opts.get('default') or not uikwmaps:
431 if svn:
431 if svn:
432 ui.status(_('\toverriding default svn keywordset\n'))
432 ui.status(_('\toverriding default svn keywordset\n'))
433 else:
433 else:
434 ui.status(_('\toverriding default cvs keywordset\n'))
434 ui.status(_('\toverriding default cvs keywordset\n'))
435 if opts.get('rcfile'):
435 if opts.get('rcfile'):
436 ui.readconfig(opts.get('rcfile'))
436 ui.readconfig(opts.get('rcfile'))
437 if args:
437 if args:
438 # simulate hgrc parsing
438 # simulate hgrc parsing
439 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
439 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
440 repo.vfs.write('hgrc', rcmaps)
440 repo.vfs.write('hgrc', rcmaps)
441 ui.readconfig(repo.vfs.join('hgrc'))
441 ui.readconfig(repo.vfs.join('hgrc'))
442 kwmaps = dict(ui.configitems('keywordmaps'))
442 kwmaps = dict(ui.configitems('keywordmaps'))
443 elif opts.get('default'):
443 elif opts.get('default'):
444 if svn:
444 if svn:
445 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
445 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
446 else:
446 else:
447 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
447 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
448 kwmaps = _defaultkwmaps(ui)
448 kwmaps = _defaultkwmaps(ui)
449 if uikwmaps:
449 if uikwmaps:
450 ui.status(_('\tdisabling current template maps\n'))
450 ui.status(_('\tdisabling current template maps\n'))
451 for k, v in kwmaps.iteritems():
451 for k, v in kwmaps.iteritems():
452 ui.setconfig('keywordmaps', k, v, 'keyword')
452 ui.setconfig('keywordmaps', k, v, 'keyword')
453 else:
453 else:
454 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
454 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
455 if uikwmaps:
455 if uikwmaps:
456 kwmaps = dict(uikwmaps)
456 kwmaps = dict(uikwmaps)
457 else:
457 else:
458 kwmaps = _defaultkwmaps(ui)
458 kwmaps = _defaultkwmaps(ui)
459
459
460 uisetup(ui)
460 uisetup(ui)
461 reposetup(ui, repo)
461 reposetup(ui, repo)
462 ui.write(('[extensions]\nkeyword =\n'))
462 ui.write(('[extensions]\nkeyword =\n'))
463 demoitems('keyword', ui.configitems('keyword'))
463 demoitems('keyword', ui.configitems('keyword'))
464 demoitems('keywordset', ui.configitems('keywordset'))
464 demoitems('keywordset', ui.configitems('keywordset'))
465 demoitems('keywordmaps', kwmaps.iteritems())
465 demoitems('keywordmaps', kwmaps.iteritems())
466 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
466 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
467 repo.wvfs.write(fn, keywords)
467 repo.wvfs.write(fn, keywords)
468 repo[None].add([fn])
468 repo[None].add([fn])
469 ui.note(_('\nkeywords written to %s:\n') % fn)
469 ui.note(_('\nkeywords written to %s:\n') % fn)
470 ui.note(keywords)
470 ui.note(keywords)
471 with repo.wlock():
471 with repo.wlock():
472 repo.dirstate.setbranch('demobranch')
472 repo.dirstate.setbranch('demobranch')
473 for name, cmd in ui.configitems('hooks'):
473 for name, cmd in ui.configitems('hooks'):
474 if name.split('.', 1)[0].find('commit') > -1:
474 if name.split('.', 1)[0].find('commit') > -1:
475 repo.ui.setconfig('hooks', name, '', 'keyword')
475 repo.ui.setconfig('hooks', name, '', 'keyword')
476 msg = _('hg keyword configuration and expansion example')
476 msg = _('hg keyword configuration and expansion example')
477 ui.note(("hg ci -m '%s'\n" % msg))
477 ui.note(("hg ci -m '%s'\n" % msg))
478 repo.commit(text=msg)
478 repo.commit(text=msg)
479 ui.status(_('\n\tkeywords expanded\n'))
479 ui.status(_('\n\tkeywords expanded\n'))
480 ui.write(repo.wread(fn))
480 ui.write(repo.wread(fn))
481 repo.wvfs.rmtree(repo.root)
481 repo.wvfs.rmtree(repo.root)
482
482
483 @command('kwexpand',
483 @command('kwexpand',
484 commands.walkopts,
484 commands.walkopts,
485 _('hg kwexpand [OPTION]... [FILE]...'),
485 _('hg kwexpand [OPTION]... [FILE]...'),
486 inferrepo=True)
486 inferrepo=True)
487 def expand(ui, repo, *pats, **opts):
487 def expand(ui, repo, *pats, **opts):
488 '''expand keywords in the working directory
488 '''expand keywords in the working directory
489
489
490 Run after (re)enabling keyword expansion.
490 Run after (re)enabling keyword expansion.
491
491
492 kwexpand refuses to run if given files contain local changes.
492 kwexpand refuses to run if given files contain local changes.
493 '''
493 '''
494 # 3rd argument sets expansion to True
494 # 3rd argument sets expansion to True
495 _kwfwrite(ui, repo, True, *pats, **opts)
495 _kwfwrite(ui, repo, True, *pats, **opts)
496
496
497 @command('kwfiles',
497 @command('kwfiles',
498 [('A', 'all', None, _('show keyword status flags of all files')),
498 [('A', 'all', None, _('show keyword status flags of all files')),
499 ('i', 'ignore', None, _('show files excluded from expansion')),
499 ('i', 'ignore', None, _('show files excluded from expansion')),
500 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
500 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
501 ] + commands.walkopts,
501 ] + commands.walkopts,
502 _('hg kwfiles [OPTION]... [FILE]...'),
502 _('hg kwfiles [OPTION]... [FILE]...'),
503 inferrepo=True)
503 inferrepo=True)
504 def files(ui, repo, *pats, **opts):
504 def files(ui, repo, *pats, **opts):
505 '''show files configured for keyword expansion
505 '''show files configured for keyword expansion
506
506
507 List which files in the working directory are matched by the
507 List which files in the working directory are matched by the
508 [keyword] configuration patterns.
508 [keyword] configuration patterns.
509
509
510 Useful to prevent inadvertent keyword expansion and to speed up
510 Useful to prevent inadvertent keyword expansion and to speed up
511 execution by including only files that are actual candidates for
511 execution by including only files that are actual candidates for
512 expansion.
512 expansion.
513
513
514 See :hg:`help keyword` on how to construct patterns both for
514 See :hg:`help keyword` on how to construct patterns both for
515 inclusion and exclusion of files.
515 inclusion and exclusion of files.
516
516
517 With -A/--all and -v/--verbose the codes used to show the status
517 With -A/--all and -v/--verbose the codes used to show the status
518 of files are::
518 of files are::
519
519
520 K = keyword expansion candidate
520 K = keyword expansion candidate
521 k = keyword expansion candidate (not tracked)
521 k = keyword expansion candidate (not tracked)
522 I = ignored
522 I = ignored
523 i = ignored (not tracked)
523 i = ignored (not tracked)
524 '''
524 '''
525 kwt = kwtools['templater']
525 kwt = kwtools['templater']
526 wctx = repo[None]
526 wctx = repo[None]
527 status = _status(ui, repo, wctx, kwt, *pats, **opts)
527 status = _status(ui, repo, wctx, kwt, *pats, **opts)
528 if pats:
528 if pats:
529 cwd = repo.getcwd()
529 cwd = repo.getcwd()
530 else:
530 else:
531 cwd = ''
531 cwd = ''
532 files = []
532 files = []
533 if not opts.get('unknown') or opts.get('all'):
533 if not opts.get('unknown') or opts.get('all'):
534 files = sorted(status.modified + status.added + status.clean)
534 files = sorted(status.modified + status.added + status.clean)
535 kwfiles = kwt.iskwfile(files, wctx)
535 kwfiles = kwt.iskwfile(files, wctx)
536 kwdeleted = kwt.iskwfile(status.deleted, wctx)
536 kwdeleted = kwt.iskwfile(status.deleted, wctx)
537 kwunknown = kwt.iskwfile(status.unknown, wctx)
537 kwunknown = kwt.iskwfile(status.unknown, wctx)
538 if not opts.get('ignore') or opts.get('all'):
538 if not opts.get('ignore') or opts.get('all'):
539 showfiles = kwfiles, kwdeleted, kwunknown
539 showfiles = kwfiles, kwdeleted, kwunknown
540 else:
540 else:
541 showfiles = [], [], []
541 showfiles = [], [], []
542 if opts.get('all') or opts.get('ignore'):
542 if opts.get('all') or opts.get('ignore'):
543 showfiles += ([f for f in files if f not in kwfiles],
543 showfiles += ([f for f in files if f not in kwfiles],
544 [f for f in status.unknown if f not in kwunknown])
544 [f for f in status.unknown if f not in kwunknown])
545 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
545 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
546 kwstates = zip(kwlabels, 'K!kIi', showfiles)
546 kwstates = zip(kwlabels, 'K!kIi', showfiles)
547 fm = ui.formatter('kwfiles', opts)
547 fm = ui.formatter('kwfiles', opts)
548 fmt = '%.0s%s\n'
548 fmt = '%.0s%s\n'
549 if opts.get('all') or ui.verbose:
549 if opts.get('all') or ui.verbose:
550 fmt = '%s %s\n'
550 fmt = '%s %s\n'
551 for kwstate, char, filenames in kwstates:
551 for kwstate, char, filenames in kwstates:
552 label = 'kwfiles.' + kwstate
552 label = 'kwfiles.' + kwstate
553 for f in filenames:
553 for f in filenames:
554 fm.startitem()
554 fm.startitem()
555 fm.write('kwstatus path', fmt, char,
555 fm.write('kwstatus path', fmt, char,
556 repo.pathto(f, cwd), label=label)
556 repo.pathto(f, cwd), label=label)
557 fm.end()
557 fm.end()
558
558
559 @command('kwshrink',
559 @command('kwshrink',
560 commands.walkopts,
560 commands.walkopts,
561 _('hg kwshrink [OPTION]... [FILE]...'),
561 _('hg kwshrink [OPTION]... [FILE]...'),
562 inferrepo=True)
562 inferrepo=True)
563 def shrink(ui, repo, *pats, **opts):
563 def shrink(ui, repo, *pats, **opts):
564 '''revert expanded keywords in the working directory
564 '''revert expanded keywords in the working directory
565
565
566 Must be run before changing/disabling active keywords.
566 Must be run before changing/disabling active keywords.
567
567
568 kwshrink refuses to run if given files contain local changes.
568 kwshrink refuses to run if given files contain local changes.
569 '''
569 '''
570 # 3rd argument sets expansion to False
570 # 3rd argument sets expansion to False
571 _kwfwrite(ui, repo, False, *pats, **opts)
571 _kwfwrite(ui, repo, False, *pats, **opts)
572
572
573
573
574 def uisetup(ui):
574 def uisetup(ui):
575 ''' Monkeypatches dispatch._parse to retrieve user command.'''
575 ''' Monkeypatches dispatch._parse to retrieve user command.'''
576
576
577 def kwdispatch_parse(orig, ui, args):
577 def kwdispatch_parse(orig, ui, args):
578 '''Monkeypatch dispatch._parse to obtain running hg command.'''
578 '''Monkeypatch dispatch._parse to obtain running hg command.'''
579 cmd, func, args, options, cmdoptions = orig(ui, args)
579 cmd, func, args, options, cmdoptions = orig(ui, args)
580 kwtools['hgcmd'] = cmd
580 kwtools['hgcmd'] = cmd
581 return cmd, func, args, options, cmdoptions
581 return cmd, func, args, options, cmdoptions
582
582
583 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
583 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
584
584
585 def reposetup(ui, repo):
585 def reposetup(ui, repo):
586 '''Sets up repo as kwrepo for keyword substitution.
586 '''Sets up repo as kwrepo for keyword substitution.
587 Overrides file method to return kwfilelog instead of filelog
587 Overrides file method to return kwfilelog instead of filelog
588 if file matches user configuration.
588 if file matches user configuration.
589 Wraps commit to overwrite configured files with updated
589 Wraps commit to overwrite configured files with updated
590 keyword substitutions.
590 keyword substitutions.
591 Monkeypatches patch and webcommands.'''
591 Monkeypatches patch and webcommands.'''
592
592
593 try:
593 try:
594 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
594 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
595 or '.hg' in util.splitpath(repo.root)
595 or '.hg' in util.splitpath(repo.root)
596 or repo._url.startswith('bundle:')):
596 or repo._url.startswith('bundle:')):
597 return
597 return
598 except AttributeError:
598 except AttributeError:
599 pass
599 pass
600
600
601 inc, exc = [], ['.hg*']
601 inc, exc = [], ['.hg*']
602 for pat, opt in ui.configitems('keyword'):
602 for pat, opt in ui.configitems('keyword'):
603 if opt != 'ignore':
603 if opt != 'ignore':
604 inc.append(pat)
604 inc.append(pat)
605 else:
605 else:
606 exc.append(pat)
606 exc.append(pat)
607 if not inc:
607 if not inc:
608 return
608 return
609
609
610 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
610 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
611
611
612 class kwrepo(repo.__class__):
612 class kwrepo(repo.__class__):
613 def file(self, f):
613 def file(self, f):
614 if f[0] == '/':
614 if f[0] == '/':
615 f = f[1:]
615 f = f[1:]
616 return kwfilelog(self.svfs, kwt, f)
616 return kwfilelog(self.svfs, kwt, f)
617
617
618 def wread(self, filename):
618 def wread(self, filename):
619 data = super(kwrepo, self).wread(filename)
619 data = super(kwrepo, self).wread(filename)
620 return kwt.wread(filename, data)
620 return kwt.wread(filename, data)
621
621
622 def commit(self, *args, **opts):
622 def commit(self, *args, **opts):
623 # use custom commitctx for user commands
623 # use custom commitctx for user commands
624 # other extensions can still wrap repo.commitctx directly
624 # other extensions can still wrap repo.commitctx directly
625 self.commitctx = self.kwcommitctx
625 self.commitctx = self.kwcommitctx
626 try:
626 try:
627 return super(kwrepo, self).commit(*args, **opts)
627 return super(kwrepo, self).commit(*args, **opts)
628 finally:
628 finally:
629 del self.commitctx
629 del self.commitctx
630
630
631 def kwcommitctx(self, ctx, error=False):
631 def kwcommitctx(self, ctx, error=False):
632 n = super(kwrepo, self).commitctx(ctx, error)
632 n = super(kwrepo, self).commitctx(ctx, error)
633 # no lock needed, only called from repo.commit() which already locks
633 # no lock needed, only called from repo.commit() which already locks
634 if not kwt.postcommit:
634 if not kwt.postcommit:
635 restrict = kwt.restrict
635 restrict = kwt.restrict
636 kwt.restrict = True
636 kwt.restrict = True
637 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
637 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
638 False, True)
638 False, True)
639 kwt.restrict = restrict
639 kwt.restrict = restrict
640 return n
640 return n
641
641
642 def rollback(self, dryrun=False, force=False):
642 def rollback(self, dryrun=False, force=False):
643 wlock = self.wlock()
643 wlock = self.wlock()
644 origrestrict = kwt.restrict
644 origrestrict = kwt.restrict
645 try:
645 try:
646 if not dryrun:
646 if not dryrun:
647 changed = self['.'].files()
647 changed = self['.'].files()
648 ret = super(kwrepo, self).rollback(dryrun, force)
648 ret = super(kwrepo, self).rollback(dryrun, force)
649 if not dryrun:
649 if not dryrun:
650 ctx = self['.']
650 ctx = self['.']
651 modified, added = _preselect(ctx.status(), changed)
651 modified, added = _preselect(ctx.status(), changed)
652 kwt.restrict = False
652 kwt.restrict = False
653 kwt.overwrite(ctx, modified, True, True)
653 kwt.overwrite(ctx, modified, True, True)
654 kwt.overwrite(ctx, added, True, False)
654 kwt.overwrite(ctx, added, True, False)
655 return ret
655 return ret
656 finally:
656 finally:
657 kwt.restrict = origrestrict
657 kwt.restrict = origrestrict
658 wlock.release()
658 wlock.release()
659
659
660 # monkeypatches
660 # monkeypatches
661 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
661 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
662 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
662 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
663 rejects or conflicts due to expanded keywords in working dir.'''
663 rejects or conflicts due to expanded keywords in working dir.'''
664 orig(self, ui, gp, backend, store, eolmode)
664 orig(self, ui, gp, backend, store, eolmode)
665 # shrink keywords read from working dir
665 # shrink keywords read from working dir
666 self.lines = kwt.shrinklines(self.fname, self.lines)
666 self.lines = kwt.shrinklines(self.fname, self.lines)
667
667
668 def kwdiff(orig, *args, **kwargs):
668 def kwdiff(orig, *args, **kwargs):
669 '''Monkeypatch patch.diff to avoid expansion.'''
669 '''Monkeypatch patch.diff to avoid expansion.'''
670 kwt.restrict = True
670 kwt.restrict = True
671 return orig(*args, **kwargs)
671 return orig(*args, **kwargs)
672
672
673 def kwweb_skip(orig, web, req, tmpl):
673 def kwweb_skip(orig, web, req, tmpl):
674 '''Wraps webcommands.x turning off keyword expansion.'''
674 '''Wraps webcommands.x turning off keyword expansion.'''
675 kwt.match = util.never
675 kwt.match = util.never
676 return orig(web, req, tmpl)
676 return orig(web, req, tmpl)
677
677
678 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
678 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
679 '''Wraps cmdutil.amend expanding keywords after amend.'''
679 '''Wraps cmdutil.amend expanding keywords after amend.'''
680 with repo.wlock():
680 with repo.wlock():
681 kwt.postcommit = True
681 kwt.postcommit = True
682 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
682 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
683 if newid != old.node():
683 if newid != old.node():
684 ctx = repo[newid]
684 ctx = repo[newid]
685 kwt.restrict = True
685 kwt.restrict = True
686 kwt.overwrite(ctx, ctx.files(), False, True)
686 kwt.overwrite(ctx, ctx.files(), False, True)
687 kwt.restrict = False
687 kwt.restrict = False
688 return newid
688 return newid
689
689
690 def kw_copy(orig, ui, repo, pats, opts, rename=False):
690 def kw_copy(orig, ui, repo, pats, opts, rename=False):
691 '''Wraps cmdutil.copy so that copy/rename destinations do not
691 '''Wraps cmdutil.copy so that copy/rename destinations do not
692 contain expanded keywords.
692 contain expanded keywords.
693 Note that the source of a regular file destination may also be a
693 Note that the source of a regular file destination may also be a
694 symlink:
694 symlink:
695 hg cp sym x -> x is symlink
695 hg cp sym x -> x is symlink
696 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
696 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
697 For the latter we have to follow the symlink to find out whether its
697 For the latter we have to follow the symlink to find out whether its
698 target is configured for expansion and we therefore must unexpand the
698 target is configured for expansion and we therefore must unexpand the
699 keywords in the destination.'''
699 keywords in the destination.'''
700 with repo.wlock():
700 with repo.wlock():
701 orig(ui, repo, pats, opts, rename)
701 orig(ui, repo, pats, opts, rename)
702 if opts.get('dry_run'):
702 if opts.get('dry_run'):
703 return
703 return
704 wctx = repo[None]
704 wctx = repo[None]
705 cwd = repo.getcwd()
705 cwd = repo.getcwd()
706
706
707 def haskwsource(dest):
707 def haskwsource(dest):
708 '''Returns true if dest is a regular file and configured for
708 '''Returns true if dest is a regular file and configured for
709 expansion or a symlink which points to a file configured for
709 expansion or a symlink which points to a file configured for
710 expansion. '''
710 expansion. '''
711 source = repo.dirstate.copied(dest)
711 source = repo.dirstate.copied(dest)
712 if 'l' in wctx.flags(source):
712 if 'l' in wctx.flags(source):
713 source = pathutil.canonpath(repo.root, cwd,
713 source = pathutil.canonpath(repo.root, cwd,
714 os.path.realpath(source))
714 os.path.realpath(source))
715 return kwt.match(source)
715 return kwt.match(source)
716
716
717 candidates = [f for f in repo.dirstate.copies() if
717 candidates = [f for f in repo.dirstate.copies() if
718 'l' not in wctx.flags(f) and haskwsource(f)]
718 'l' not in wctx.flags(f) and haskwsource(f)]
719 kwt.overwrite(wctx, candidates, False, False)
719 kwt.overwrite(wctx, candidates, False, False)
720
720
721 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
721 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
722 '''Wraps record.dorecord expanding keywords after recording.'''
722 '''Wraps record.dorecord expanding keywords after recording.'''
723 with repo.wlock():
723 with repo.wlock():
724 # record returns 0 even when nothing has changed
724 # record returns 0 even when nothing has changed
725 # therefore compare nodes before and after
725 # therefore compare nodes before and after
726 kwt.postcommit = True
726 kwt.postcommit = True
727 ctx = repo['.']
727 ctx = repo['.']
728 wstatus = ctx.status()
728 wstatus = ctx.status()
729 ret = orig(ui, repo, commitfunc, *pats, **opts)
729 ret = orig(ui, repo, commitfunc, *pats, **opts)
730 recctx = repo['.']
730 recctx = repo['.']
731 if ctx != recctx:
731 if ctx != recctx:
732 modified, added = _preselect(wstatus, recctx.files())
732 modified, added = _preselect(wstatus, recctx.files())
733 kwt.restrict = False
733 kwt.restrict = False
734 kwt.overwrite(recctx, modified, False, True)
734 kwt.overwrite(recctx, modified, False, True)
735 kwt.overwrite(recctx, added, False, True, True)
735 kwt.overwrite(recctx, added, False, True, True)
736 kwt.restrict = True
736 kwt.restrict = True
737 return ret
737 return ret
738
738
739 def kwfilectx_cmp(orig, self, fctx):
739 def kwfilectx_cmp(orig, self, fctx):
740 if fctx._customcmp:
740 if fctx._customcmp:
741 return fctx.cmp(self)
741 return fctx.cmp(self)
742 # keyword affects data size, comparing wdir and filelog size does
742 # keyword affects data size, comparing wdir and filelog size does
743 # not make sense
743 # not make sense
744 if (fctx._filenode is None and
744 if (fctx._filenode is None and
745 (self._repo._encodefilterpats or
745 (self._repo._encodefilterpats or
746 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
746 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
747 self.size() - 4 == fctx.size()) or
747 self.size() - 4 == fctx.size()) or
748 self.size() == fctx.size()):
748 self.size() == fctx.size()):
749 return self._filelog.cmp(self._filenode, fctx.data())
749 return self._filelog.cmp(self._filenode, fctx.data())
750 return True
750 return True
751
751
752 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
752 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
753 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
753 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
754 extensions.wrapfunction(patch, 'diff', kwdiff)
754 extensions.wrapfunction(patch, 'diff', kwdiff)
755 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
755 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
756 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
756 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
757 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
757 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
758 for c in 'annotate changeset rev filediff diff'.split():
758 for c in 'annotate changeset rev filediff diff'.split():
759 extensions.wrapfunction(webcommands, c, kwweb_skip)
759 extensions.wrapfunction(webcommands, c, kwweb_skip)
760 repo.__class__ = kwrepo
760 repo.__class__ = kwrepo
@@ -1,579 +1,580 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import errno
12 import errno
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import shutil
15 import shutil
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18
18
19 from mercurial import (
19 from mercurial import (
20 cmdutil,
20 cmdutil,
21 commands,
21 commands,
22 context,
22 context,
23 error,
23 error,
24 hg,
24 hg,
25 lock,
25 lock,
26 match as matchmod,
26 match as matchmod,
27 node,
27 node,
28 registrar,
28 scmutil,
29 scmutil,
29 util,
30 util,
30 )
31 )
31
32
32 from ..convert import (
33 from ..convert import (
33 convcmd,
34 convcmd,
34 filemap,
35 filemap,
35 )
36 )
36
37
37 from . import (
38 from . import (
38 lfutil,
39 lfutil,
39 storefactory
40 storefactory
40 )
41 )
41
42
42 release = lock.release
43 release = lock.release
43
44
44 # -- Commands ----------------------------------------------------------
45 # -- Commands ----------------------------------------------------------
45
46
46 cmdtable = {}
47 cmdtable = {}
47 command = cmdutil.command(cmdtable)
48 command = registrar.command(cmdtable)
48
49
49 @command('lfconvert',
50 @command('lfconvert',
50 [('s', 'size', '',
51 [('s', 'size', '',
51 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
52 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
52 ('', 'to-normal', False,
53 ('', 'to-normal', False,
53 _('convert from a largefiles repo to a normal repo')),
54 _('convert from a largefiles repo to a normal repo')),
54 ],
55 ],
55 _('hg lfconvert SOURCE DEST [FILE ...]'),
56 _('hg lfconvert SOURCE DEST [FILE ...]'),
56 norepo=True,
57 norepo=True,
57 inferrepo=True)
58 inferrepo=True)
58 def lfconvert(ui, src, dest, *pats, **opts):
59 def lfconvert(ui, src, dest, *pats, **opts):
59 '''convert a normal repository to a largefiles repository
60 '''convert a normal repository to a largefiles repository
60
61
61 Convert repository SOURCE to a new repository DEST, identical to
62 Convert repository SOURCE to a new repository DEST, identical to
62 SOURCE except that certain files will be converted as largefiles:
63 SOURCE except that certain files will be converted as largefiles:
63 specifically, any file that matches any PATTERN *or* whose size is
64 specifically, any file that matches any PATTERN *or* whose size is
64 above the minimum size threshold is converted as a largefile. The
65 above the minimum size threshold is converted as a largefile. The
65 size used to determine whether or not to track a file as a
66 size used to determine whether or not to track a file as a
66 largefile is the size of the first version of the file. The
67 largefile is the size of the first version of the file. The
67 minimum size can be specified either with --size or in
68 minimum size can be specified either with --size or in
68 configuration as ``largefiles.size``.
69 configuration as ``largefiles.size``.
69
70
70 After running this command you will need to make sure that
71 After running this command you will need to make sure that
71 largefiles is enabled anywhere you intend to push the new
72 largefiles is enabled anywhere you intend to push the new
72 repository.
73 repository.
73
74
74 Use --to-normal to convert largefiles back to normal files; after
75 Use --to-normal to convert largefiles back to normal files; after
75 this, the DEST repository can be used without largefiles at all.'''
76 this, the DEST repository can be used without largefiles at all.'''
76
77
77 if opts['to_normal']:
78 if opts['to_normal']:
78 tolfile = False
79 tolfile = False
79 else:
80 else:
80 tolfile = True
81 tolfile = True
81 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
82 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
82
83
83 if not hg.islocal(src):
84 if not hg.islocal(src):
84 raise error.Abort(_('%s is not a local Mercurial repo') % src)
85 raise error.Abort(_('%s is not a local Mercurial repo') % src)
85 if not hg.islocal(dest):
86 if not hg.islocal(dest):
86 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
87 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
87
88
88 rsrc = hg.repository(ui, src)
89 rsrc = hg.repository(ui, src)
89 ui.status(_('initializing destination %s\n') % dest)
90 ui.status(_('initializing destination %s\n') % dest)
90 rdst = hg.repository(ui, dest, create=True)
91 rdst = hg.repository(ui, dest, create=True)
91
92
92 success = False
93 success = False
93 dstwlock = dstlock = None
94 dstwlock = dstlock = None
94 try:
95 try:
95 # Get a list of all changesets in the source. The easy way to do this
96 # Get a list of all changesets in the source. The easy way to do this
96 # is to simply walk the changelog, using changelog.nodesbetween().
97 # is to simply walk the changelog, using changelog.nodesbetween().
97 # Take a look at mercurial/revlog.py:639 for more details.
98 # Take a look at mercurial/revlog.py:639 for more details.
98 # Use a generator instead of a list to decrease memory usage
99 # Use a generator instead of a list to decrease memory usage
99 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
100 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
100 rsrc.heads())[0])
101 rsrc.heads())[0])
101 revmap = {node.nullid: node.nullid}
102 revmap = {node.nullid: node.nullid}
102 if tolfile:
103 if tolfile:
103 # Lock destination to prevent modification while it is converted to.
104 # Lock destination to prevent modification while it is converted to.
104 # Don't need to lock src because we are just reading from its
105 # Don't need to lock src because we are just reading from its
105 # history which can't change.
106 # history which can't change.
106 dstwlock = rdst.wlock()
107 dstwlock = rdst.wlock()
107 dstlock = rdst.lock()
108 dstlock = rdst.lock()
108
109
109 lfiles = set()
110 lfiles = set()
110 normalfiles = set()
111 normalfiles = set()
111 if not pats:
112 if not pats:
112 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
113 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
113 if pats:
114 if pats:
114 matcher = matchmod.match(rsrc.root, '', list(pats))
115 matcher = matchmod.match(rsrc.root, '', list(pats))
115 else:
116 else:
116 matcher = None
117 matcher = None
117
118
118 lfiletohash = {}
119 lfiletohash = {}
119 for ctx in ctxs:
120 for ctx in ctxs:
120 ui.progress(_('converting revisions'), ctx.rev(),
121 ui.progress(_('converting revisions'), ctx.rev(),
121 unit=_('revisions'), total=rsrc['tip'].rev())
122 unit=_('revisions'), total=rsrc['tip'].rev())
122 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
123 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
123 lfiles, normalfiles, matcher, size, lfiletohash)
124 lfiles, normalfiles, matcher, size, lfiletohash)
124 ui.progress(_('converting revisions'), None)
125 ui.progress(_('converting revisions'), None)
125
126
126 if rdst.wvfs.exists(lfutil.shortname):
127 if rdst.wvfs.exists(lfutil.shortname):
127 rdst.wvfs.rmtree(lfutil.shortname)
128 rdst.wvfs.rmtree(lfutil.shortname)
128
129
129 for f in lfiletohash.keys():
130 for f in lfiletohash.keys():
130 if rdst.wvfs.isfile(f):
131 if rdst.wvfs.isfile(f):
131 rdst.wvfs.unlink(f)
132 rdst.wvfs.unlink(f)
132 try:
133 try:
133 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
134 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
134 except OSError:
135 except OSError:
135 pass
136 pass
136
137
137 # If there were any files converted to largefiles, add largefiles
138 # If there were any files converted to largefiles, add largefiles
138 # to the destination repository's requirements.
139 # to the destination repository's requirements.
139 if lfiles:
140 if lfiles:
140 rdst.requirements.add('largefiles')
141 rdst.requirements.add('largefiles')
141 rdst._writerequirements()
142 rdst._writerequirements()
142 else:
143 else:
143 class lfsource(filemap.filemap_source):
144 class lfsource(filemap.filemap_source):
144 def __init__(self, ui, source):
145 def __init__(self, ui, source):
145 super(lfsource, self).__init__(ui, source, None)
146 super(lfsource, self).__init__(ui, source, None)
146 self.filemapper.rename[lfutil.shortname] = '.'
147 self.filemapper.rename[lfutil.shortname] = '.'
147
148
148 def getfile(self, name, rev):
149 def getfile(self, name, rev):
149 realname, realrev = rev
150 realname, realrev = rev
150 f = super(lfsource, self).getfile(name, rev)
151 f = super(lfsource, self).getfile(name, rev)
151
152
152 if (not realname.startswith(lfutil.shortnameslash)
153 if (not realname.startswith(lfutil.shortnameslash)
153 or f[0] is None):
154 or f[0] is None):
154 return f
155 return f
155
156
156 # Substitute in the largefile data for the hash
157 # Substitute in the largefile data for the hash
157 hash = f[0].strip()
158 hash = f[0].strip()
158 path = lfutil.findfile(rsrc, hash)
159 path = lfutil.findfile(rsrc, hash)
159
160
160 if path is None:
161 if path is None:
161 raise error.Abort(_("missing largefile for '%s' in %s")
162 raise error.Abort(_("missing largefile for '%s' in %s")
162 % (realname, realrev))
163 % (realname, realrev))
163 return util.readfile(path), f[1]
164 return util.readfile(path), f[1]
164
165
165 class converter(convcmd.converter):
166 class converter(convcmd.converter):
166 def __init__(self, ui, source, dest, revmapfile, opts):
167 def __init__(self, ui, source, dest, revmapfile, opts):
167 src = lfsource(ui, source)
168 src = lfsource(ui, source)
168
169
169 super(converter, self).__init__(ui, src, dest, revmapfile,
170 super(converter, self).__init__(ui, src, dest, revmapfile,
170 opts)
171 opts)
171
172
172 found, missing = downloadlfiles(ui, rsrc)
173 found, missing = downloadlfiles(ui, rsrc)
173 if missing != 0:
174 if missing != 0:
174 raise error.Abort(_("all largefiles must be present locally"))
175 raise error.Abort(_("all largefiles must be present locally"))
175
176
176 orig = convcmd.converter
177 orig = convcmd.converter
177 convcmd.converter = converter
178 convcmd.converter = converter
178
179
179 try:
180 try:
180 convcmd.convert(ui, src, dest)
181 convcmd.convert(ui, src, dest)
181 finally:
182 finally:
182 convcmd.converter = orig
183 convcmd.converter = orig
183 success = True
184 success = True
184 finally:
185 finally:
185 if tolfile:
186 if tolfile:
186 rdst.dirstate.clear()
187 rdst.dirstate.clear()
187 release(dstlock, dstwlock)
188 release(dstlock, dstwlock)
188 if not success:
189 if not success:
189 # we failed, remove the new directory
190 # we failed, remove the new directory
190 shutil.rmtree(rdst.root)
191 shutil.rmtree(rdst.root)
191
192
192 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
193 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
193 matcher, size, lfiletohash):
194 matcher, size, lfiletohash):
194 # Convert src parents to dst parents
195 # Convert src parents to dst parents
195 parents = _convertparents(ctx, revmap)
196 parents = _convertparents(ctx, revmap)
196
197
197 # Generate list of changed files
198 # Generate list of changed files
198 files = _getchangedfiles(ctx, parents)
199 files = _getchangedfiles(ctx, parents)
199
200
200 dstfiles = []
201 dstfiles = []
201 for f in files:
202 for f in files:
202 if f not in lfiles and f not in normalfiles:
203 if f not in lfiles and f not in normalfiles:
203 islfile = _islfile(f, ctx, matcher, size)
204 islfile = _islfile(f, ctx, matcher, size)
204 # If this file was renamed or copied then copy
205 # If this file was renamed or copied then copy
205 # the largefile-ness of its predecessor
206 # the largefile-ness of its predecessor
206 if f in ctx.manifest():
207 if f in ctx.manifest():
207 fctx = ctx.filectx(f)
208 fctx = ctx.filectx(f)
208 renamed = fctx.renamed()
209 renamed = fctx.renamed()
209 renamedlfile = renamed and renamed[0] in lfiles
210 renamedlfile = renamed and renamed[0] in lfiles
210 islfile |= renamedlfile
211 islfile |= renamedlfile
211 if 'l' in fctx.flags():
212 if 'l' in fctx.flags():
212 if renamedlfile:
213 if renamedlfile:
213 raise error.Abort(
214 raise error.Abort(
214 _('renamed/copied largefile %s becomes symlink')
215 _('renamed/copied largefile %s becomes symlink')
215 % f)
216 % f)
216 islfile = False
217 islfile = False
217 if islfile:
218 if islfile:
218 lfiles.add(f)
219 lfiles.add(f)
219 else:
220 else:
220 normalfiles.add(f)
221 normalfiles.add(f)
221
222
222 if f in lfiles:
223 if f in lfiles:
223 fstandin = lfutil.standin(f)
224 fstandin = lfutil.standin(f)
224 dstfiles.append(fstandin)
225 dstfiles.append(fstandin)
225 # largefile in manifest if it has not been removed/renamed
226 # largefile in manifest if it has not been removed/renamed
226 if f in ctx.manifest():
227 if f in ctx.manifest():
227 fctx = ctx.filectx(f)
228 fctx = ctx.filectx(f)
228 if 'l' in fctx.flags():
229 if 'l' in fctx.flags():
229 renamed = fctx.renamed()
230 renamed = fctx.renamed()
230 if renamed and renamed[0] in lfiles:
231 if renamed and renamed[0] in lfiles:
231 raise error.Abort(_('largefile %s becomes symlink') % f)
232 raise error.Abort(_('largefile %s becomes symlink') % f)
232
233
233 # largefile was modified, update standins
234 # largefile was modified, update standins
234 m = hashlib.sha1('')
235 m = hashlib.sha1('')
235 m.update(ctx[f].data())
236 m.update(ctx[f].data())
236 hash = m.hexdigest()
237 hash = m.hexdigest()
237 if f not in lfiletohash or lfiletohash[f] != hash:
238 if f not in lfiletohash or lfiletohash[f] != hash:
238 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
239 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
239 executable = 'x' in ctx[f].flags()
240 executable = 'x' in ctx[f].flags()
240 lfutil.writestandin(rdst, fstandin, hash,
241 lfutil.writestandin(rdst, fstandin, hash,
241 executable)
242 executable)
242 lfiletohash[f] = hash
243 lfiletohash[f] = hash
243 else:
244 else:
244 # normal file
245 # normal file
245 dstfiles.append(f)
246 dstfiles.append(f)
246
247
247 def getfilectx(repo, memctx, f):
248 def getfilectx(repo, memctx, f):
248 srcfname = lfutil.splitstandin(f)
249 srcfname = lfutil.splitstandin(f)
249 if srcfname is not None:
250 if srcfname is not None:
250 # if the file isn't in the manifest then it was removed
251 # if the file isn't in the manifest then it was removed
251 # or renamed, return None to indicate this
252 # or renamed, return None to indicate this
252 try:
253 try:
253 fctx = ctx.filectx(srcfname)
254 fctx = ctx.filectx(srcfname)
254 except error.LookupError:
255 except error.LookupError:
255 return None
256 return None
256 renamed = fctx.renamed()
257 renamed = fctx.renamed()
257 if renamed:
258 if renamed:
258 # standin is always a largefile because largefile-ness
259 # standin is always a largefile because largefile-ness
259 # doesn't change after rename or copy
260 # doesn't change after rename or copy
260 renamed = lfutil.standin(renamed[0])
261 renamed = lfutil.standin(renamed[0])
261
262
262 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
263 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
263 'l' in fctx.flags(), 'x' in fctx.flags(),
264 'l' in fctx.flags(), 'x' in fctx.flags(),
264 renamed)
265 renamed)
265 else:
266 else:
266 return _getnormalcontext(repo, ctx, f, revmap)
267 return _getnormalcontext(repo, ctx, f, revmap)
267
268
268 # Commit
269 # Commit
269 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
270 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
270
271
271 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
272 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
272 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
273 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
273 getfilectx, ctx.user(), ctx.date(), ctx.extra())
274 getfilectx, ctx.user(), ctx.date(), ctx.extra())
274 ret = rdst.commitctx(mctx)
275 ret = rdst.commitctx(mctx)
275 lfutil.copyalltostore(rdst, ret)
276 lfutil.copyalltostore(rdst, ret)
276 rdst.setparents(ret)
277 rdst.setparents(ret)
277 revmap[ctx.node()] = rdst.changelog.tip()
278 revmap[ctx.node()] = rdst.changelog.tip()
278
279
279 # Generate list of changed files
280 # Generate list of changed files
280 def _getchangedfiles(ctx, parents):
281 def _getchangedfiles(ctx, parents):
281 files = set(ctx.files())
282 files = set(ctx.files())
282 if node.nullid not in parents:
283 if node.nullid not in parents:
283 mc = ctx.manifest()
284 mc = ctx.manifest()
284 mp1 = ctx.parents()[0].manifest()
285 mp1 = ctx.parents()[0].manifest()
285 mp2 = ctx.parents()[1].manifest()
286 mp2 = ctx.parents()[1].manifest()
286 files |= (set(mp1) | set(mp2)) - set(mc)
287 files |= (set(mp1) | set(mp2)) - set(mc)
287 for f in mc:
288 for f in mc:
288 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
289 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
289 files.add(f)
290 files.add(f)
290 return files
291 return files
291
292
292 # Convert src parents to dst parents
293 # Convert src parents to dst parents
293 def _convertparents(ctx, revmap):
294 def _convertparents(ctx, revmap):
294 parents = []
295 parents = []
295 for p in ctx.parents():
296 for p in ctx.parents():
296 parents.append(revmap[p.node()])
297 parents.append(revmap[p.node()])
297 while len(parents) < 2:
298 while len(parents) < 2:
298 parents.append(node.nullid)
299 parents.append(node.nullid)
299 return parents
300 return parents
300
301
301 # Get memfilectx for a normal file
302 # Get memfilectx for a normal file
302 def _getnormalcontext(repo, ctx, f, revmap):
303 def _getnormalcontext(repo, ctx, f, revmap):
303 try:
304 try:
304 fctx = ctx.filectx(f)
305 fctx = ctx.filectx(f)
305 except error.LookupError:
306 except error.LookupError:
306 return None
307 return None
307 renamed = fctx.renamed()
308 renamed = fctx.renamed()
308 if renamed:
309 if renamed:
309 renamed = renamed[0]
310 renamed = renamed[0]
310
311
311 data = fctx.data()
312 data = fctx.data()
312 if f == '.hgtags':
313 if f == '.hgtags':
313 data = _converttags (repo.ui, revmap, data)
314 data = _converttags (repo.ui, revmap, data)
314 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
315 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
315 'x' in fctx.flags(), renamed)
316 'x' in fctx.flags(), renamed)
316
317
317 # Remap tag data using a revision map
318 # Remap tag data using a revision map
318 def _converttags(ui, revmap, data):
319 def _converttags(ui, revmap, data):
319 newdata = []
320 newdata = []
320 for line in data.splitlines():
321 for line in data.splitlines():
321 try:
322 try:
322 id, name = line.split(' ', 1)
323 id, name = line.split(' ', 1)
323 except ValueError:
324 except ValueError:
324 ui.warn(_('skipping incorrectly formatted tag %s\n')
325 ui.warn(_('skipping incorrectly formatted tag %s\n')
325 % line)
326 % line)
326 continue
327 continue
327 try:
328 try:
328 newid = node.bin(id)
329 newid = node.bin(id)
329 except TypeError:
330 except TypeError:
330 ui.warn(_('skipping incorrectly formatted id %s\n')
331 ui.warn(_('skipping incorrectly formatted id %s\n')
331 % id)
332 % id)
332 continue
333 continue
333 try:
334 try:
334 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
335 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
335 name))
336 name))
336 except KeyError:
337 except KeyError:
337 ui.warn(_('no mapping for id %s\n') % id)
338 ui.warn(_('no mapping for id %s\n') % id)
338 continue
339 continue
339 return ''.join(newdata)
340 return ''.join(newdata)
340
341
341 def _islfile(file, ctx, matcher, size):
342 def _islfile(file, ctx, matcher, size):
342 '''Return true if file should be considered a largefile, i.e.
343 '''Return true if file should be considered a largefile, i.e.
343 matcher matches it or it is larger than size.'''
344 matcher matches it or it is larger than size.'''
344 # never store special .hg* files as largefiles
345 # never store special .hg* files as largefiles
345 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
346 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
346 return False
347 return False
347 if matcher and matcher(file):
348 if matcher and matcher(file):
348 return True
349 return True
349 try:
350 try:
350 return ctx.filectx(file).size() >= size * 1024 * 1024
351 return ctx.filectx(file).size() >= size * 1024 * 1024
351 except error.LookupError:
352 except error.LookupError:
352 return False
353 return False
353
354
354 def uploadlfiles(ui, rsrc, rdst, files):
355 def uploadlfiles(ui, rsrc, rdst, files):
355 '''upload largefiles to the central store'''
356 '''upload largefiles to the central store'''
356
357
357 if not files:
358 if not files:
358 return
359 return
359
360
360 store = storefactory.openstore(rsrc, rdst, put=True)
361 store = storefactory.openstore(rsrc, rdst, put=True)
361
362
362 at = 0
363 at = 0
363 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
364 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
364 retval = store.exists(files)
365 retval = store.exists(files)
365 files = filter(lambda h: not retval[h], files)
366 files = filter(lambda h: not retval[h], files)
366 ui.debug("%d largefiles need to be uploaded\n" % len(files))
367 ui.debug("%d largefiles need to be uploaded\n" % len(files))
367
368
368 for hash in files:
369 for hash in files:
369 ui.progress(_('uploading largefiles'), at, unit=_('files'),
370 ui.progress(_('uploading largefiles'), at, unit=_('files'),
370 total=len(files))
371 total=len(files))
371 source = lfutil.findfile(rsrc, hash)
372 source = lfutil.findfile(rsrc, hash)
372 if not source:
373 if not source:
373 raise error.Abort(_('largefile %s missing from store'
374 raise error.Abort(_('largefile %s missing from store'
374 ' (needs to be uploaded)') % hash)
375 ' (needs to be uploaded)') % hash)
375 # XXX check for errors here
376 # XXX check for errors here
376 store.put(source, hash)
377 store.put(source, hash)
377 at += 1
378 at += 1
378 ui.progress(_('uploading largefiles'), None)
379 ui.progress(_('uploading largefiles'), None)
379
380
380 def verifylfiles(ui, repo, all=False, contents=False):
381 def verifylfiles(ui, repo, all=False, contents=False):
381 '''Verify that every largefile revision in the current changeset
382 '''Verify that every largefile revision in the current changeset
382 exists in the central store. With --contents, also verify that
383 exists in the central store. With --contents, also verify that
383 the contents of each local largefile file revision are correct (SHA-1 hash
384 the contents of each local largefile file revision are correct (SHA-1 hash
384 matches the revision ID). With --all, check every changeset in
385 matches the revision ID). With --all, check every changeset in
385 this repository.'''
386 this repository.'''
386 if all:
387 if all:
387 revs = repo.revs('all()')
388 revs = repo.revs('all()')
388 else:
389 else:
389 revs = ['.']
390 revs = ['.']
390
391
391 store = storefactory.openstore(repo)
392 store = storefactory.openstore(repo)
392 return store.verify(revs, contents=contents)
393 return store.verify(revs, contents=contents)
393
394
394 def cachelfiles(ui, repo, node, filelist=None):
395 def cachelfiles(ui, repo, node, filelist=None):
395 '''cachelfiles ensures that all largefiles needed by the specified revision
396 '''cachelfiles ensures that all largefiles needed by the specified revision
396 are present in the repository's largefile cache.
397 are present in the repository's largefile cache.
397
398
398 returns a tuple (cached, missing). cached is the list of files downloaded
399 returns a tuple (cached, missing). cached is the list of files downloaded
399 by this operation; missing is the list of files that were needed but could
400 by this operation; missing is the list of files that were needed but could
400 not be found.'''
401 not be found.'''
401 lfiles = lfutil.listlfiles(repo, node)
402 lfiles = lfutil.listlfiles(repo, node)
402 if filelist:
403 if filelist:
403 lfiles = set(lfiles) & set(filelist)
404 lfiles = set(lfiles) & set(filelist)
404 toget = []
405 toget = []
405
406
406 ctx = repo[node]
407 ctx = repo[node]
407 for lfile in lfiles:
408 for lfile in lfiles:
408 try:
409 try:
409 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
410 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
410 except IOError as err:
411 except IOError as err:
411 if err.errno == errno.ENOENT:
412 if err.errno == errno.ENOENT:
412 continue # node must be None and standin wasn't found in wctx
413 continue # node must be None and standin wasn't found in wctx
413 raise
414 raise
414 if not lfutil.findfile(repo, expectedhash):
415 if not lfutil.findfile(repo, expectedhash):
415 toget.append((lfile, expectedhash))
416 toget.append((lfile, expectedhash))
416
417
417 if toget:
418 if toget:
418 store = storefactory.openstore(repo)
419 store = storefactory.openstore(repo)
419 ret = store.get(toget)
420 ret = store.get(toget)
420 return ret
421 return ret
421
422
422 return ([], [])
423 return ([], [])
423
424
424 def downloadlfiles(ui, repo, rev=None):
425 def downloadlfiles(ui, repo, rev=None):
425 matchfn = scmutil.match(repo[None],
426 matchfn = scmutil.match(repo[None],
426 [repo.wjoin(lfutil.shortname)], {})
427 [repo.wjoin(lfutil.shortname)], {})
427 def prepare(ctx, fns):
428 def prepare(ctx, fns):
428 pass
429 pass
429 totalsuccess = 0
430 totalsuccess = 0
430 totalmissing = 0
431 totalmissing = 0
431 if rev != []: # walkchangerevs on empty list would return all revs
432 if rev != []: # walkchangerevs on empty list would return all revs
432 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
433 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
433 prepare):
434 prepare):
434 success, missing = cachelfiles(ui, repo, ctx.node())
435 success, missing = cachelfiles(ui, repo, ctx.node())
435 totalsuccess += len(success)
436 totalsuccess += len(success)
436 totalmissing += len(missing)
437 totalmissing += len(missing)
437 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
438 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
438 if totalmissing > 0:
439 if totalmissing > 0:
439 ui.status(_("%d largefiles failed to download\n") % totalmissing)
440 ui.status(_("%d largefiles failed to download\n") % totalmissing)
440 return totalsuccess, totalmissing
441 return totalsuccess, totalmissing
441
442
442 def updatelfiles(ui, repo, filelist=None, printmessage=None,
443 def updatelfiles(ui, repo, filelist=None, printmessage=None,
443 normallookup=False):
444 normallookup=False):
444 '''Update largefiles according to standins in the working directory
445 '''Update largefiles according to standins in the working directory
445
446
446 If ``printmessage`` is other than ``None``, it means "print (or
447 If ``printmessage`` is other than ``None``, it means "print (or
447 ignore, for false) message forcibly".
448 ignore, for false) message forcibly".
448 '''
449 '''
449 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
450 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
450 with repo.wlock():
451 with repo.wlock():
451 lfdirstate = lfutil.openlfdirstate(ui, repo)
452 lfdirstate = lfutil.openlfdirstate(ui, repo)
452 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
453 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
453
454
454 if filelist is not None:
455 if filelist is not None:
455 filelist = set(filelist)
456 filelist = set(filelist)
456 lfiles = [f for f in lfiles if f in filelist]
457 lfiles = [f for f in lfiles if f in filelist]
457
458
458 update = {}
459 update = {}
459 updated, removed = 0, 0
460 updated, removed = 0, 0
460 wvfs = repo.wvfs
461 wvfs = repo.wvfs
461 wctx = repo[None]
462 wctx = repo[None]
462 for lfile in lfiles:
463 for lfile in lfiles:
463 rellfile = lfile
464 rellfile = lfile
464 rellfileorig = os.path.relpath(
465 rellfileorig = os.path.relpath(
465 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
466 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
466 start=repo.root)
467 start=repo.root)
467 relstandin = lfutil.standin(lfile)
468 relstandin = lfutil.standin(lfile)
468 relstandinorig = os.path.relpath(
469 relstandinorig = os.path.relpath(
469 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
470 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
470 start=repo.root)
471 start=repo.root)
471 if wvfs.exists(relstandin):
472 if wvfs.exists(relstandin):
472 if (wvfs.exists(relstandinorig) and
473 if (wvfs.exists(relstandinorig) and
473 wvfs.exists(rellfile)):
474 wvfs.exists(rellfile)):
474 shutil.copyfile(wvfs.join(rellfile),
475 shutil.copyfile(wvfs.join(rellfile),
475 wvfs.join(rellfileorig))
476 wvfs.join(rellfileorig))
476 wvfs.unlinkpath(relstandinorig)
477 wvfs.unlinkpath(relstandinorig)
477 expecthash = lfutil.readasstandin(wctx[relstandin])
478 expecthash = lfutil.readasstandin(wctx[relstandin])
478 if expecthash != '':
479 if expecthash != '':
479 if lfile not in wctx: # not switched to normal file
480 if lfile not in wctx: # not switched to normal file
480 wvfs.unlinkpath(rellfile, ignoremissing=True)
481 wvfs.unlinkpath(rellfile, ignoremissing=True)
481 # use normallookup() to allocate an entry in largefiles
482 # use normallookup() to allocate an entry in largefiles
482 # dirstate to prevent lfilesrepo.status() from reporting
483 # dirstate to prevent lfilesrepo.status() from reporting
483 # missing files as removed.
484 # missing files as removed.
484 lfdirstate.normallookup(lfile)
485 lfdirstate.normallookup(lfile)
485 update[lfile] = expecthash
486 update[lfile] = expecthash
486 else:
487 else:
487 # Remove lfiles for which the standin is deleted, unless the
488 # Remove lfiles for which the standin is deleted, unless the
488 # lfile is added to the repository again. This happens when a
489 # lfile is added to the repository again. This happens when a
489 # largefile is converted back to a normal file: the standin
490 # largefile is converted back to a normal file: the standin
490 # disappears, but a new (normal) file appears as the lfile.
491 # disappears, but a new (normal) file appears as the lfile.
491 if (wvfs.exists(rellfile) and
492 if (wvfs.exists(rellfile) and
492 repo.dirstate.normalize(lfile) not in wctx):
493 repo.dirstate.normalize(lfile) not in wctx):
493 wvfs.unlinkpath(rellfile)
494 wvfs.unlinkpath(rellfile)
494 removed += 1
495 removed += 1
495
496
496 # largefile processing might be slow and be interrupted - be prepared
497 # largefile processing might be slow and be interrupted - be prepared
497 lfdirstate.write()
498 lfdirstate.write()
498
499
499 if lfiles:
500 if lfiles:
500 statuswriter(_('getting changed largefiles\n'))
501 statuswriter(_('getting changed largefiles\n'))
501 cachelfiles(ui, repo, None, lfiles)
502 cachelfiles(ui, repo, None, lfiles)
502
503
503 for lfile in lfiles:
504 for lfile in lfiles:
504 update1 = 0
505 update1 = 0
505
506
506 expecthash = update.get(lfile)
507 expecthash = update.get(lfile)
507 if expecthash:
508 if expecthash:
508 if not lfutil.copyfromcache(repo, expecthash, lfile):
509 if not lfutil.copyfromcache(repo, expecthash, lfile):
509 # failed ... but already removed and set to normallookup
510 # failed ... but already removed and set to normallookup
510 continue
511 continue
511 # Synchronize largefile dirstate to the last modified
512 # Synchronize largefile dirstate to the last modified
512 # time of the file
513 # time of the file
513 lfdirstate.normal(lfile)
514 lfdirstate.normal(lfile)
514 update1 = 1
515 update1 = 1
515
516
516 # copy the exec mode of largefile standin from the repository's
517 # copy the exec mode of largefile standin from the repository's
517 # dirstate to its state in the lfdirstate.
518 # dirstate to its state in the lfdirstate.
518 rellfile = lfile
519 rellfile = lfile
519 relstandin = lfutil.standin(lfile)
520 relstandin = lfutil.standin(lfile)
520 if wvfs.exists(relstandin):
521 if wvfs.exists(relstandin):
521 # exec is decided by the users permissions using mask 0o100
522 # exec is decided by the users permissions using mask 0o100
522 standinexec = wvfs.stat(relstandin).st_mode & 0o100
523 standinexec = wvfs.stat(relstandin).st_mode & 0o100
523 st = wvfs.stat(rellfile)
524 st = wvfs.stat(rellfile)
524 mode = st.st_mode
525 mode = st.st_mode
525 if standinexec != mode & 0o100:
526 if standinexec != mode & 0o100:
526 # first remove all X bits, then shift all R bits to X
527 # first remove all X bits, then shift all R bits to X
527 mode &= ~0o111
528 mode &= ~0o111
528 if standinexec:
529 if standinexec:
529 mode |= (mode >> 2) & 0o111 & ~util.umask
530 mode |= (mode >> 2) & 0o111 & ~util.umask
530 wvfs.chmod(rellfile, mode)
531 wvfs.chmod(rellfile, mode)
531 update1 = 1
532 update1 = 1
532
533
533 updated += update1
534 updated += update1
534
535
535 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
536 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
536
537
537 lfdirstate.write()
538 lfdirstate.write()
538 if lfiles:
539 if lfiles:
539 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
540 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
540 removed))
541 removed))
541
542
542 @command('lfpull',
543 @command('lfpull',
543 [('r', 'rev', [], _('pull largefiles for these revisions'))
544 [('r', 'rev', [], _('pull largefiles for these revisions'))
544 ] + commands.remoteopts,
545 ] + commands.remoteopts,
545 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
546 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
546 def lfpull(ui, repo, source="default", **opts):
547 def lfpull(ui, repo, source="default", **opts):
547 """pull largefiles for the specified revisions from the specified source
548 """pull largefiles for the specified revisions from the specified source
548
549
549 Pull largefiles that are referenced from local changesets but missing
550 Pull largefiles that are referenced from local changesets but missing
550 locally, pulling from a remote repository to the local cache.
551 locally, pulling from a remote repository to the local cache.
551
552
552 If SOURCE is omitted, the 'default' path will be used.
553 If SOURCE is omitted, the 'default' path will be used.
553 See :hg:`help urls` for more information.
554 See :hg:`help urls` for more information.
554
555
555 .. container:: verbose
556 .. container:: verbose
556
557
557 Some examples:
558 Some examples:
558
559
559 - pull largefiles for all branch heads::
560 - pull largefiles for all branch heads::
560
561
561 hg lfpull -r "head() and not closed()"
562 hg lfpull -r "head() and not closed()"
562
563
563 - pull largefiles on the default branch::
564 - pull largefiles on the default branch::
564
565
565 hg lfpull -r "branch(default)"
566 hg lfpull -r "branch(default)"
566 """
567 """
567 repo.lfpullsource = source
568 repo.lfpullsource = source
568
569
569 revs = opts.get('rev', [])
570 revs = opts.get('rev', [])
570 if not revs:
571 if not revs:
571 raise error.Abort(_('no revisions specified'))
572 raise error.Abort(_('no revisions specified'))
572 revs = scmutil.revrange(repo, revs)
573 revs = scmutil.revrange(repo, revs)
573
574
574 numcached = 0
575 numcached = 0
575 for rev in revs:
576 for rev in revs:
576 ui.note(_('pulling largefiles for revision %s\n') % rev)
577 ui.note(_('pulling largefiles for revision %s\n') % rev)
577 (cached, missing) = cachelfiles(ui, repo, rev)
578 (cached, missing) = cachelfiles(ui, repo, rev)
578 numcached += len(cached)
579 numcached += len(cached)
579 ui.status(_("%d largefiles cached\n") % numcached)
580 ui.status(_("%d largefiles cached\n") % numcached)
@@ -1,3614 +1,3614 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help COMMAND` for more details)::
17 Common tasks (use :hg:`help COMMAND` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import
65 from __future__ import absolute_import
66
66
67 import errno
67 import errno
68 import os
68 import os
69 import re
69 import re
70 import shutil
70 import shutil
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import (
72 from mercurial.node import (
73 bin,
73 bin,
74 hex,
74 hex,
75 nullid,
75 nullid,
76 nullrev,
76 nullrev,
77 short,
77 short,
78 )
78 )
79 from mercurial import (
79 from mercurial import (
80 cmdutil,
80 cmdutil,
81 commands,
81 commands,
82 dirstateguard,
82 dirstateguard,
83 error,
83 error,
84 extensions,
84 extensions,
85 hg,
85 hg,
86 localrepo,
86 localrepo,
87 lock as lockmod,
87 lock as lockmod,
88 patch as patchmod,
88 patch as patchmod,
89 phases,
89 phases,
90 pycompat,
90 pycompat,
91 registrar,
91 registrar,
92 revsetlang,
92 revsetlang,
93 scmutil,
93 scmutil,
94 smartset,
94 smartset,
95 subrepo,
95 subrepo,
96 util,
96 util,
97 vfs as vfsmod,
97 vfs as vfsmod,
98 )
98 )
99
99
100 release = lockmod.release
100 release = lockmod.release
101 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
101 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
102
102
103 cmdtable = {}
103 cmdtable = {}
104 command = cmdutil.command(cmdtable)
104 command = registrar.command(cmdtable)
105 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
105 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
106 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
106 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
107 # be specifying the version(s) of Mercurial they are tested with, or
107 # be specifying the version(s) of Mercurial they are tested with, or
108 # leave the attribute unspecified.
108 # leave the attribute unspecified.
109 testedwith = 'ships-with-hg-core'
109 testedwith = 'ships-with-hg-core'
110
110
111 # force load strip extension formerly included in mq and import some utility
111 # force load strip extension formerly included in mq and import some utility
112 try:
112 try:
113 stripext = extensions.find('strip')
113 stripext = extensions.find('strip')
114 except KeyError:
114 except KeyError:
115 # note: load is lazy so we could avoid the try-except,
115 # note: load is lazy so we could avoid the try-except,
116 # but I (marmoute) prefer this explicit code.
116 # but I (marmoute) prefer this explicit code.
117 class dummyui(object):
117 class dummyui(object):
118 def debug(self, msg):
118 def debug(self, msg):
119 pass
119 pass
120 stripext = extensions.load(dummyui(), 'strip', '')
120 stripext = extensions.load(dummyui(), 'strip', '')
121
121
122 strip = stripext.strip
122 strip = stripext.strip
123 checksubstate = stripext.checksubstate
123 checksubstate = stripext.checksubstate
124 checklocalchanges = stripext.checklocalchanges
124 checklocalchanges = stripext.checklocalchanges
125
125
126
126
127 # Patch names looks like unix-file names.
127 # Patch names looks like unix-file names.
128 # They must be joinable with queue directory and result in the patch path.
128 # They must be joinable with queue directory and result in the patch path.
129 normname = util.normpath
129 normname = util.normpath
130
130
131 class statusentry(object):
131 class statusentry(object):
132 def __init__(self, node, name):
132 def __init__(self, node, name):
133 self.node, self.name = node, name
133 self.node, self.name = node, name
134 def __repr__(self):
134 def __repr__(self):
135 return hex(self.node) + ':' + self.name
135 return hex(self.node) + ':' + self.name
136
136
137 # The order of the headers in 'hg export' HG patches:
137 # The order of the headers in 'hg export' HG patches:
138 HGHEADERS = [
138 HGHEADERS = [
139 # '# HG changeset patch',
139 # '# HG changeset patch',
140 '# User ',
140 '# User ',
141 '# Date ',
141 '# Date ',
142 '# ',
142 '# ',
143 '# Branch ',
143 '# Branch ',
144 '# Node ID ',
144 '# Node ID ',
145 '# Parent ', # can occur twice for merges - but that is not relevant for mq
145 '# Parent ', # can occur twice for merges - but that is not relevant for mq
146 ]
146 ]
147 # The order of headers in plain 'mail style' patches:
147 # The order of headers in plain 'mail style' patches:
148 PLAINHEADERS = {
148 PLAINHEADERS = {
149 'from': 0,
149 'from': 0,
150 'date': 1,
150 'date': 1,
151 'subject': 2,
151 'subject': 2,
152 }
152 }
153
153
154 def inserthgheader(lines, header, value):
154 def inserthgheader(lines, header, value):
155 """Assuming lines contains a HG patch header, add a header line with value.
155 """Assuming lines contains a HG patch header, add a header line with value.
156 >>> try: inserthgheader([], '# Date ', 'z')
156 >>> try: inserthgheader([], '# Date ', 'z')
157 ... except ValueError, inst: print "oops"
157 ... except ValueError, inst: print "oops"
158 oops
158 oops
159 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
159 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
160 ['# HG changeset patch', '# Date z']
160 ['# HG changeset patch', '# Date z']
161 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
161 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
162 ['# HG changeset patch', '# Date z', '']
162 ['# HG changeset patch', '# Date z', '']
163 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
163 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
164 ['# HG changeset patch', '# User y', '# Date z']
164 ['# HG changeset patch', '# User y', '# Date z']
165 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
165 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
166 ... '# User ', 'z')
166 ... '# User ', 'z')
167 ['# HG changeset patch', '# Date x', '# User z']
167 ['# HG changeset patch', '# Date x', '# User z']
168 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
168 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
169 ['# HG changeset patch', '# Date z']
169 ['# HG changeset patch', '# Date z']
170 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
170 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
171 ['# HG changeset patch', '# Date z', '', '# Date y']
171 ['# HG changeset patch', '# Date z', '', '# Date y']
172 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
172 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
173 ['# HG changeset patch', '# Date z', '# Parent y']
173 ['# HG changeset patch', '# Date z', '# Parent y']
174 """
174 """
175 start = lines.index('# HG changeset patch') + 1
175 start = lines.index('# HG changeset patch') + 1
176 newindex = HGHEADERS.index(header)
176 newindex = HGHEADERS.index(header)
177 bestpos = len(lines)
177 bestpos = len(lines)
178 for i in range(start, len(lines)):
178 for i in range(start, len(lines)):
179 line = lines[i]
179 line = lines[i]
180 if not line.startswith('# '):
180 if not line.startswith('# '):
181 bestpos = min(bestpos, i)
181 bestpos = min(bestpos, i)
182 break
182 break
183 for lineindex, h in enumerate(HGHEADERS):
183 for lineindex, h in enumerate(HGHEADERS):
184 if line.startswith(h):
184 if line.startswith(h):
185 if lineindex == newindex:
185 if lineindex == newindex:
186 lines[i] = header + value
186 lines[i] = header + value
187 return lines
187 return lines
188 if lineindex > newindex:
188 if lineindex > newindex:
189 bestpos = min(bestpos, i)
189 bestpos = min(bestpos, i)
190 break # next line
190 break # next line
191 lines.insert(bestpos, header + value)
191 lines.insert(bestpos, header + value)
192 return lines
192 return lines
193
193
194 def insertplainheader(lines, header, value):
194 def insertplainheader(lines, header, value):
195 """For lines containing a plain patch header, add a header line with value.
195 """For lines containing a plain patch header, add a header line with value.
196 >>> insertplainheader([], 'Date', 'z')
196 >>> insertplainheader([], 'Date', 'z')
197 ['Date: z']
197 ['Date: z']
198 >>> insertplainheader([''], 'Date', 'z')
198 >>> insertplainheader([''], 'Date', 'z')
199 ['Date: z', '']
199 ['Date: z', '']
200 >>> insertplainheader(['x'], 'Date', 'z')
200 >>> insertplainheader(['x'], 'Date', 'z')
201 ['Date: z', '', 'x']
201 ['Date: z', '', 'x']
202 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
202 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
203 ['From: y', 'Date: z', '', 'x']
203 ['From: y', 'Date: z', '', 'x']
204 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
204 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
205 [' date : x', 'From: z', '']
205 [' date : x', 'From: z', '']
206 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
206 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
207 ['Date: z', '', 'Date: y']
207 ['Date: z', '', 'Date: y']
208 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
208 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
209 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
209 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
210 """
210 """
211 newprio = PLAINHEADERS[header.lower()]
211 newprio = PLAINHEADERS[header.lower()]
212 bestpos = len(lines)
212 bestpos = len(lines)
213 for i, line in enumerate(lines):
213 for i, line in enumerate(lines):
214 if ':' in line:
214 if ':' in line:
215 lheader = line.split(':', 1)[0].strip().lower()
215 lheader = line.split(':', 1)[0].strip().lower()
216 lprio = PLAINHEADERS.get(lheader, newprio + 1)
216 lprio = PLAINHEADERS.get(lheader, newprio + 1)
217 if lprio == newprio:
217 if lprio == newprio:
218 lines[i] = '%s: %s' % (header, value)
218 lines[i] = '%s: %s' % (header, value)
219 return lines
219 return lines
220 if lprio > newprio and i < bestpos:
220 if lprio > newprio and i < bestpos:
221 bestpos = i
221 bestpos = i
222 else:
222 else:
223 if line:
223 if line:
224 lines.insert(i, '')
224 lines.insert(i, '')
225 if i < bestpos:
225 if i < bestpos:
226 bestpos = i
226 bestpos = i
227 break
227 break
228 lines.insert(bestpos, '%s: %s' % (header, value))
228 lines.insert(bestpos, '%s: %s' % (header, value))
229 return lines
229 return lines
230
230
231 class patchheader(object):
231 class patchheader(object):
232 def __init__(self, pf, plainmode=False):
232 def __init__(self, pf, plainmode=False):
233 def eatdiff(lines):
233 def eatdiff(lines):
234 while lines:
234 while lines:
235 l = lines[-1]
235 l = lines[-1]
236 if (l.startswith("diff -") or
236 if (l.startswith("diff -") or
237 l.startswith("Index:") or
237 l.startswith("Index:") or
238 l.startswith("===========")):
238 l.startswith("===========")):
239 del lines[-1]
239 del lines[-1]
240 else:
240 else:
241 break
241 break
242 def eatempty(lines):
242 def eatempty(lines):
243 while lines:
243 while lines:
244 if not lines[-1].strip():
244 if not lines[-1].strip():
245 del lines[-1]
245 del lines[-1]
246 else:
246 else:
247 break
247 break
248
248
249 message = []
249 message = []
250 comments = []
250 comments = []
251 user = None
251 user = None
252 date = None
252 date = None
253 parent = None
253 parent = None
254 format = None
254 format = None
255 subject = None
255 subject = None
256 branch = None
256 branch = None
257 nodeid = None
257 nodeid = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if (line.startswith('diff --git')
262 if (line.startswith('diff --git')
263 or (diffstart and line.startswith('+++ '))):
263 or (diffstart and line.startswith('+++ '))):
264 diffstart = 2
264 diffstart = 2
265 break
265 break
266 diffstart = 0 # reset
266 diffstart = 0 # reset
267 if line.startswith("--- "):
267 if line.startswith("--- "):
268 diffstart = 1
268 diffstart = 1
269 continue
269 continue
270 elif format == "hgpatch":
270 elif format == "hgpatch":
271 # parse values when importing the result of an hg export
271 # parse values when importing the result of an hg export
272 if line.startswith("# User "):
272 if line.startswith("# User "):
273 user = line[7:]
273 user = line[7:]
274 elif line.startswith("# Date "):
274 elif line.startswith("# Date "):
275 date = line[7:]
275 date = line[7:]
276 elif line.startswith("# Parent "):
276 elif line.startswith("# Parent "):
277 parent = line[9:].lstrip() # handle double trailing space
277 parent = line[9:].lstrip() # handle double trailing space
278 elif line.startswith("# Branch "):
278 elif line.startswith("# Branch "):
279 branch = line[9:]
279 branch = line[9:]
280 elif line.startswith("# Node ID "):
280 elif line.startswith("# Node ID "):
281 nodeid = line[10:]
281 nodeid = line[10:]
282 elif not line.startswith("# ") and line:
282 elif not line.startswith("# ") and line:
283 message.append(line)
283 message.append(line)
284 format = None
284 format = None
285 elif line == '# HG changeset patch':
285 elif line == '# HG changeset patch':
286 message = []
286 message = []
287 format = "hgpatch"
287 format = "hgpatch"
288 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 elif (format != "tagdone" and (line.startswith("Subject: ") or
289 line.startswith("subject: "))):
289 line.startswith("subject: "))):
290 subject = line[9:]
290 subject = line[9:]
291 format = "tag"
291 format = "tag"
292 elif (format != "tagdone" and (line.startswith("From: ") or
292 elif (format != "tagdone" and (line.startswith("From: ") or
293 line.startswith("from: "))):
293 line.startswith("from: "))):
294 user = line[6:]
294 user = line[6:]
295 format = "tag"
295 format = "tag"
296 elif (format != "tagdone" and (line.startswith("Date: ") or
296 elif (format != "tagdone" and (line.startswith("Date: ") or
297 line.startswith("date: "))):
297 line.startswith("date: "))):
298 date = line[6:]
298 date = line[6:]
299 format = "tag"
299 format = "tag"
300 elif format == "tag" and line == "":
300 elif format == "tag" and line == "":
301 # when looking for tags (subject: from: etc) they
301 # when looking for tags (subject: from: etc) they
302 # end once you find a blank line in the source
302 # end once you find a blank line in the source
303 format = "tagdone"
303 format = "tagdone"
304 elif message or line:
304 elif message or line:
305 message.append(line)
305 message.append(line)
306 comments.append(line)
306 comments.append(line)
307
307
308 eatdiff(message)
308 eatdiff(message)
309 eatdiff(comments)
309 eatdiff(comments)
310 # Remember the exact starting line of the patch diffs before consuming
310 # Remember the exact starting line of the patch diffs before consuming
311 # empty lines, for external use by TortoiseHg and others
311 # empty lines, for external use by TortoiseHg and others
312 self.diffstartline = len(comments)
312 self.diffstartline = len(comments)
313 eatempty(message)
313 eatempty(message)
314 eatempty(comments)
314 eatempty(comments)
315
315
316 # make sure message isn't empty
316 # make sure message isn't empty
317 if format and format.startswith("tag") and subject:
317 if format and format.startswith("tag") and subject:
318 message.insert(0, subject)
318 message.insert(0, subject)
319
319
320 self.message = message
320 self.message = message
321 self.comments = comments
321 self.comments = comments
322 self.user = user
322 self.user = user
323 self.date = date
323 self.date = date
324 self.parent = parent
324 self.parent = parent
325 # nodeid and branch are for external use by TortoiseHg and others
325 # nodeid and branch are for external use by TortoiseHg and others
326 self.nodeid = nodeid
326 self.nodeid = nodeid
327 self.branch = branch
327 self.branch = branch
328 self.haspatch = diffstart > 1
328 self.haspatch = diffstart > 1
329 self.plainmode = (plainmode or
329 self.plainmode = (plainmode or
330 '# HG changeset patch' not in self.comments and
330 '# HG changeset patch' not in self.comments and
331 any(c.startswith('Date: ') or
331 any(c.startswith('Date: ') or
332 c.startswith('From: ')
332 c.startswith('From: ')
333 for c in self.comments))
333 for c in self.comments))
334
334
335 def setuser(self, user):
335 def setuser(self, user):
336 try:
336 try:
337 inserthgheader(self.comments, '# User ', user)
337 inserthgheader(self.comments, '# User ', user)
338 except ValueError:
338 except ValueError:
339 if self.plainmode:
339 if self.plainmode:
340 insertplainheader(self.comments, 'From', user)
340 insertplainheader(self.comments, 'From', user)
341 else:
341 else:
342 tmp = ['# HG changeset patch', '# User ' + user]
342 tmp = ['# HG changeset patch', '# User ' + user]
343 self.comments = tmp + self.comments
343 self.comments = tmp + self.comments
344 self.user = user
344 self.user = user
345
345
346 def setdate(self, date):
346 def setdate(self, date):
347 try:
347 try:
348 inserthgheader(self.comments, '# Date ', date)
348 inserthgheader(self.comments, '# Date ', date)
349 except ValueError:
349 except ValueError:
350 if self.plainmode:
350 if self.plainmode:
351 insertplainheader(self.comments, 'Date', date)
351 insertplainheader(self.comments, 'Date', date)
352 else:
352 else:
353 tmp = ['# HG changeset patch', '# Date ' + date]
353 tmp = ['# HG changeset patch', '# Date ' + date]
354 self.comments = tmp + self.comments
354 self.comments = tmp + self.comments
355 self.date = date
355 self.date = date
356
356
357 def setparent(self, parent):
357 def setparent(self, parent):
358 try:
358 try:
359 inserthgheader(self.comments, '# Parent ', parent)
359 inserthgheader(self.comments, '# Parent ', parent)
360 except ValueError:
360 except ValueError:
361 if not self.plainmode:
361 if not self.plainmode:
362 tmp = ['# HG changeset patch', '# Parent ' + parent]
362 tmp = ['# HG changeset patch', '# Parent ' + parent]
363 self.comments = tmp + self.comments
363 self.comments = tmp + self.comments
364 self.parent = parent
364 self.parent = parent
365
365
366 def setmessage(self, message):
366 def setmessage(self, message):
367 if self.comments:
367 if self.comments:
368 self._delmsg()
368 self._delmsg()
369 self.message = [message]
369 self.message = [message]
370 if message:
370 if message:
371 if self.plainmode and self.comments and self.comments[-1]:
371 if self.plainmode and self.comments and self.comments[-1]:
372 self.comments.append('')
372 self.comments.append('')
373 self.comments.append(message)
373 self.comments.append(message)
374
374
375 def __str__(self):
375 def __str__(self):
376 s = '\n'.join(self.comments).rstrip()
376 s = '\n'.join(self.comments).rstrip()
377 if not s:
377 if not s:
378 return ''
378 return ''
379 return s + '\n\n'
379 return s + '\n\n'
380
380
381 def _delmsg(self):
381 def _delmsg(self):
382 '''Remove existing message, keeping the rest of the comments fields.
382 '''Remove existing message, keeping the rest of the comments fields.
383 If comments contains 'subject: ', message will prepend
383 If comments contains 'subject: ', message will prepend
384 the field and a blank line.'''
384 the field and a blank line.'''
385 if self.message:
385 if self.message:
386 subj = 'subject: ' + self.message[0].lower()
386 subj = 'subject: ' + self.message[0].lower()
387 for i in xrange(len(self.comments)):
387 for i in xrange(len(self.comments)):
388 if subj == self.comments[i].lower():
388 if subj == self.comments[i].lower():
389 del self.comments[i]
389 del self.comments[i]
390 self.message = self.message[2:]
390 self.message = self.message[2:]
391 break
391 break
392 ci = 0
392 ci = 0
393 for mi in self.message:
393 for mi in self.message:
394 while mi != self.comments[ci]:
394 while mi != self.comments[ci]:
395 ci += 1
395 ci += 1
396 del self.comments[ci]
396 del self.comments[ci]
397
397
398 def newcommit(repo, phase, *args, **kwargs):
398 def newcommit(repo, phase, *args, **kwargs):
399 """helper dedicated to ensure a commit respect mq.secret setting
399 """helper dedicated to ensure a commit respect mq.secret setting
400
400
401 It should be used instead of repo.commit inside the mq source for operation
401 It should be used instead of repo.commit inside the mq source for operation
402 creating new changeset.
402 creating new changeset.
403 """
403 """
404 repo = repo.unfiltered()
404 repo = repo.unfiltered()
405 if phase is None:
405 if phase is None:
406 if repo.ui.configbool('mq', 'secret', False):
406 if repo.ui.configbool('mq', 'secret', False):
407 phase = phases.secret
407 phase = phases.secret
408 overrides = {('ui', 'allowemptycommit'): True}
408 overrides = {('ui', 'allowemptycommit'): True}
409 if phase is not None:
409 if phase is not None:
410 overrides[('phases', 'new-commit')] = phase
410 overrides[('phases', 'new-commit')] = phase
411 with repo.ui.configoverride(overrides, 'mq'):
411 with repo.ui.configoverride(overrides, 'mq'):
412 repo.ui.setconfig('ui', 'allowemptycommit', True)
412 repo.ui.setconfig('ui', 'allowemptycommit', True)
413 return repo.commit(*args, **kwargs)
413 return repo.commit(*args, **kwargs)
414
414
415 class AbortNoCleanup(error.Abort):
415 class AbortNoCleanup(error.Abort):
416 pass
416 pass
417
417
418 class queue(object):
418 class queue(object):
419 def __init__(self, ui, baseui, path, patchdir=None):
419 def __init__(self, ui, baseui, path, patchdir=None):
420 self.basepath = path
420 self.basepath = path
421 try:
421 try:
422 fh = open(os.path.join(path, 'patches.queue'))
422 fh = open(os.path.join(path, 'patches.queue'))
423 cur = fh.read().rstrip()
423 cur = fh.read().rstrip()
424 fh.close()
424 fh.close()
425 if not cur:
425 if not cur:
426 curpath = os.path.join(path, 'patches')
426 curpath = os.path.join(path, 'patches')
427 else:
427 else:
428 curpath = os.path.join(path, 'patches-' + cur)
428 curpath = os.path.join(path, 'patches-' + cur)
429 except IOError:
429 except IOError:
430 curpath = os.path.join(path, 'patches')
430 curpath = os.path.join(path, 'patches')
431 self.path = patchdir or curpath
431 self.path = patchdir or curpath
432 self.opener = vfsmod.vfs(self.path)
432 self.opener = vfsmod.vfs(self.path)
433 self.ui = ui
433 self.ui = ui
434 self.baseui = baseui
434 self.baseui = baseui
435 self.applieddirty = False
435 self.applieddirty = False
436 self.seriesdirty = False
436 self.seriesdirty = False
437 self.added = []
437 self.added = []
438 self.seriespath = "series"
438 self.seriespath = "series"
439 self.statuspath = "status"
439 self.statuspath = "status"
440 self.guardspath = "guards"
440 self.guardspath = "guards"
441 self.activeguards = None
441 self.activeguards = None
442 self.guardsdirty = False
442 self.guardsdirty = False
443 # Handle mq.git as a bool with extended values
443 # Handle mq.git as a bool with extended values
444 try:
444 try:
445 gitmode = ui.configbool('mq', 'git', None)
445 gitmode = ui.configbool('mq', 'git', None)
446 if gitmode is None:
446 if gitmode is None:
447 raise error.ConfigError
447 raise error.ConfigError
448 if gitmode:
448 if gitmode:
449 self.gitmode = 'yes'
449 self.gitmode = 'yes'
450 else:
450 else:
451 self.gitmode = 'no'
451 self.gitmode = 'no'
452 except error.ConfigError:
452 except error.ConfigError:
453 # let's have check-config ignore the type mismatch
453 # let's have check-config ignore the type mismatch
454 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
454 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
455 # deprecated config: mq.plain
455 # deprecated config: mq.plain
456 self.plainmode = ui.configbool('mq', 'plain', False)
456 self.plainmode = ui.configbool('mq', 'plain', False)
457 self.checkapplied = True
457 self.checkapplied = True
458
458
459 @util.propertycache
459 @util.propertycache
460 def applied(self):
460 def applied(self):
461 def parselines(lines):
461 def parselines(lines):
462 for l in lines:
462 for l in lines:
463 entry = l.split(':', 1)
463 entry = l.split(':', 1)
464 if len(entry) > 1:
464 if len(entry) > 1:
465 n, name = entry
465 n, name = entry
466 yield statusentry(bin(n), name)
466 yield statusentry(bin(n), name)
467 elif l.strip():
467 elif l.strip():
468 self.ui.warn(_('malformated mq status line: %s\n') % entry)
468 self.ui.warn(_('malformated mq status line: %s\n') % entry)
469 # else we ignore empty lines
469 # else we ignore empty lines
470 try:
470 try:
471 lines = self.opener.read(self.statuspath).splitlines()
471 lines = self.opener.read(self.statuspath).splitlines()
472 return list(parselines(lines))
472 return list(parselines(lines))
473 except IOError as e:
473 except IOError as e:
474 if e.errno == errno.ENOENT:
474 if e.errno == errno.ENOENT:
475 return []
475 return []
476 raise
476 raise
477
477
478 @util.propertycache
478 @util.propertycache
479 def fullseries(self):
479 def fullseries(self):
480 try:
480 try:
481 return self.opener.read(self.seriespath).splitlines()
481 return self.opener.read(self.seriespath).splitlines()
482 except IOError as e:
482 except IOError as e:
483 if e.errno == errno.ENOENT:
483 if e.errno == errno.ENOENT:
484 return []
484 return []
485 raise
485 raise
486
486
487 @util.propertycache
487 @util.propertycache
488 def series(self):
488 def series(self):
489 self.parseseries()
489 self.parseseries()
490 return self.series
490 return self.series
491
491
492 @util.propertycache
492 @util.propertycache
493 def seriesguards(self):
493 def seriesguards(self):
494 self.parseseries()
494 self.parseseries()
495 return self.seriesguards
495 return self.seriesguards
496
496
497 def invalidate(self):
497 def invalidate(self):
498 for a in 'applied fullseries series seriesguards'.split():
498 for a in 'applied fullseries series seriesguards'.split():
499 if a in self.__dict__:
499 if a in self.__dict__:
500 delattr(self, a)
500 delattr(self, a)
501 self.applieddirty = False
501 self.applieddirty = False
502 self.seriesdirty = False
502 self.seriesdirty = False
503 self.guardsdirty = False
503 self.guardsdirty = False
504 self.activeguards = None
504 self.activeguards = None
505
505
506 def diffopts(self, opts=None, patchfn=None):
506 def diffopts(self, opts=None, patchfn=None):
507 diffopts = patchmod.diffopts(self.ui, opts)
507 diffopts = patchmod.diffopts(self.ui, opts)
508 if self.gitmode == 'auto':
508 if self.gitmode == 'auto':
509 diffopts.upgrade = True
509 diffopts.upgrade = True
510 elif self.gitmode == 'keep':
510 elif self.gitmode == 'keep':
511 pass
511 pass
512 elif self.gitmode in ('yes', 'no'):
512 elif self.gitmode in ('yes', 'no'):
513 diffopts.git = self.gitmode == 'yes'
513 diffopts.git = self.gitmode == 'yes'
514 else:
514 else:
515 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
515 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
516 ' got %s') % self.gitmode)
516 ' got %s') % self.gitmode)
517 if patchfn:
517 if patchfn:
518 diffopts = self.patchopts(diffopts, patchfn)
518 diffopts = self.patchopts(diffopts, patchfn)
519 return diffopts
519 return diffopts
520
520
521 def patchopts(self, diffopts, *patches):
521 def patchopts(self, diffopts, *patches):
522 """Return a copy of input diff options with git set to true if
522 """Return a copy of input diff options with git set to true if
523 referenced patch is a git patch and should be preserved as such.
523 referenced patch is a git patch and should be preserved as such.
524 """
524 """
525 diffopts = diffopts.copy()
525 diffopts = diffopts.copy()
526 if not diffopts.git and self.gitmode == 'keep':
526 if not diffopts.git and self.gitmode == 'keep':
527 for patchfn in patches:
527 for patchfn in patches:
528 patchf = self.opener(patchfn, 'r')
528 patchf = self.opener(patchfn, 'r')
529 # if the patch was a git patch, refresh it as a git patch
529 # if the patch was a git patch, refresh it as a git patch
530 for line in patchf:
530 for line in patchf:
531 if line.startswith('diff --git'):
531 if line.startswith('diff --git'):
532 diffopts.git = True
532 diffopts.git = True
533 break
533 break
534 patchf.close()
534 patchf.close()
535 return diffopts
535 return diffopts
536
536
537 def join(self, *p):
537 def join(self, *p):
538 return os.path.join(self.path, *p)
538 return os.path.join(self.path, *p)
539
539
540 def findseries(self, patch):
540 def findseries(self, patch):
541 def matchpatch(l):
541 def matchpatch(l):
542 l = l.split('#', 1)[0]
542 l = l.split('#', 1)[0]
543 return l.strip() == patch
543 return l.strip() == patch
544 for index, l in enumerate(self.fullseries):
544 for index, l in enumerate(self.fullseries):
545 if matchpatch(l):
545 if matchpatch(l):
546 return index
546 return index
547 return None
547 return None
548
548
549 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
549 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
550
550
551 def parseseries(self):
551 def parseseries(self):
552 self.series = []
552 self.series = []
553 self.seriesguards = []
553 self.seriesguards = []
554 for l in self.fullseries:
554 for l in self.fullseries:
555 h = l.find('#')
555 h = l.find('#')
556 if h == -1:
556 if h == -1:
557 patch = l
557 patch = l
558 comment = ''
558 comment = ''
559 elif h == 0:
559 elif h == 0:
560 continue
560 continue
561 else:
561 else:
562 patch = l[:h]
562 patch = l[:h]
563 comment = l[h:]
563 comment = l[h:]
564 patch = patch.strip()
564 patch = patch.strip()
565 if patch:
565 if patch:
566 if patch in self.series:
566 if patch in self.series:
567 raise error.Abort(_('%s appears more than once in %s') %
567 raise error.Abort(_('%s appears more than once in %s') %
568 (patch, self.join(self.seriespath)))
568 (patch, self.join(self.seriespath)))
569 self.series.append(patch)
569 self.series.append(patch)
570 self.seriesguards.append(self.guard_re.findall(comment))
570 self.seriesguards.append(self.guard_re.findall(comment))
571
571
572 def checkguard(self, guard):
572 def checkguard(self, guard):
573 if not guard:
573 if not guard:
574 return _('guard cannot be an empty string')
574 return _('guard cannot be an empty string')
575 bad_chars = '# \t\r\n\f'
575 bad_chars = '# \t\r\n\f'
576 first = guard[0]
576 first = guard[0]
577 if first in '-+':
577 if first in '-+':
578 return (_('guard %r starts with invalid character: %r') %
578 return (_('guard %r starts with invalid character: %r') %
579 (guard, first))
579 (guard, first))
580 for c in bad_chars:
580 for c in bad_chars:
581 if c in guard:
581 if c in guard:
582 return _('invalid character in guard %r: %r') % (guard, c)
582 return _('invalid character in guard %r: %r') % (guard, c)
583
583
584 def setactive(self, guards):
584 def setactive(self, guards):
585 for guard in guards:
585 for guard in guards:
586 bad = self.checkguard(guard)
586 bad = self.checkguard(guard)
587 if bad:
587 if bad:
588 raise error.Abort(bad)
588 raise error.Abort(bad)
589 guards = sorted(set(guards))
589 guards = sorted(set(guards))
590 self.ui.debug('active guards: %s\n' % ' '.join(guards))
590 self.ui.debug('active guards: %s\n' % ' '.join(guards))
591 self.activeguards = guards
591 self.activeguards = guards
592 self.guardsdirty = True
592 self.guardsdirty = True
593
593
594 def active(self):
594 def active(self):
595 if self.activeguards is None:
595 if self.activeguards is None:
596 self.activeguards = []
596 self.activeguards = []
597 try:
597 try:
598 guards = self.opener.read(self.guardspath).split()
598 guards = self.opener.read(self.guardspath).split()
599 except IOError as err:
599 except IOError as err:
600 if err.errno != errno.ENOENT:
600 if err.errno != errno.ENOENT:
601 raise
601 raise
602 guards = []
602 guards = []
603 for i, guard in enumerate(guards):
603 for i, guard in enumerate(guards):
604 bad = self.checkguard(guard)
604 bad = self.checkguard(guard)
605 if bad:
605 if bad:
606 self.ui.warn('%s:%d: %s\n' %
606 self.ui.warn('%s:%d: %s\n' %
607 (self.join(self.guardspath), i + 1, bad))
607 (self.join(self.guardspath), i + 1, bad))
608 else:
608 else:
609 self.activeguards.append(guard)
609 self.activeguards.append(guard)
610 return self.activeguards
610 return self.activeguards
611
611
612 def setguards(self, idx, guards):
612 def setguards(self, idx, guards):
613 for g in guards:
613 for g in guards:
614 if len(g) < 2:
614 if len(g) < 2:
615 raise error.Abort(_('guard %r too short') % g)
615 raise error.Abort(_('guard %r too short') % g)
616 if g[0] not in '-+':
616 if g[0] not in '-+':
617 raise error.Abort(_('guard %r starts with invalid char') % g)
617 raise error.Abort(_('guard %r starts with invalid char') % g)
618 bad = self.checkguard(g[1:])
618 bad = self.checkguard(g[1:])
619 if bad:
619 if bad:
620 raise error.Abort(bad)
620 raise error.Abort(bad)
621 drop = self.guard_re.sub('', self.fullseries[idx])
621 drop = self.guard_re.sub('', self.fullseries[idx])
622 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
622 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
623 self.parseseries()
623 self.parseseries()
624 self.seriesdirty = True
624 self.seriesdirty = True
625
625
626 def pushable(self, idx):
626 def pushable(self, idx):
627 if isinstance(idx, str):
627 if isinstance(idx, str):
628 idx = self.series.index(idx)
628 idx = self.series.index(idx)
629 patchguards = self.seriesguards[idx]
629 patchguards = self.seriesguards[idx]
630 if not patchguards:
630 if not patchguards:
631 return True, None
631 return True, None
632 guards = self.active()
632 guards = self.active()
633 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
633 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
634 if exactneg:
634 if exactneg:
635 return False, repr(exactneg[0])
635 return False, repr(exactneg[0])
636 pos = [g for g in patchguards if g[0] == '+']
636 pos = [g for g in patchguards if g[0] == '+']
637 exactpos = [g for g in pos if g[1:] in guards]
637 exactpos = [g for g in pos if g[1:] in guards]
638 if pos:
638 if pos:
639 if exactpos:
639 if exactpos:
640 return True, repr(exactpos[0])
640 return True, repr(exactpos[0])
641 return False, ' '.join(map(repr, pos))
641 return False, ' '.join(map(repr, pos))
642 return True, ''
642 return True, ''
643
643
644 def explainpushable(self, idx, all_patches=False):
644 def explainpushable(self, idx, all_patches=False):
645 if all_patches:
645 if all_patches:
646 write = self.ui.write
646 write = self.ui.write
647 else:
647 else:
648 write = self.ui.warn
648 write = self.ui.warn
649
649
650 if all_patches or self.ui.verbose:
650 if all_patches or self.ui.verbose:
651 if isinstance(idx, str):
651 if isinstance(idx, str):
652 idx = self.series.index(idx)
652 idx = self.series.index(idx)
653 pushable, why = self.pushable(idx)
653 pushable, why = self.pushable(idx)
654 if all_patches and pushable:
654 if all_patches and pushable:
655 if why is None:
655 if why is None:
656 write(_('allowing %s - no guards in effect\n') %
656 write(_('allowing %s - no guards in effect\n') %
657 self.series[idx])
657 self.series[idx])
658 else:
658 else:
659 if not why:
659 if not why:
660 write(_('allowing %s - no matching negative guards\n') %
660 write(_('allowing %s - no matching negative guards\n') %
661 self.series[idx])
661 self.series[idx])
662 else:
662 else:
663 write(_('allowing %s - guarded by %s\n') %
663 write(_('allowing %s - guarded by %s\n') %
664 (self.series[idx], why))
664 (self.series[idx], why))
665 if not pushable:
665 if not pushable:
666 if why:
666 if why:
667 write(_('skipping %s - guarded by %s\n') %
667 write(_('skipping %s - guarded by %s\n') %
668 (self.series[idx], why))
668 (self.series[idx], why))
669 else:
669 else:
670 write(_('skipping %s - no matching guards\n') %
670 write(_('skipping %s - no matching guards\n') %
671 self.series[idx])
671 self.series[idx])
672
672
673 def savedirty(self):
673 def savedirty(self):
674 def writelist(items, path):
674 def writelist(items, path):
675 fp = self.opener(path, 'w')
675 fp = self.opener(path, 'w')
676 for i in items:
676 for i in items:
677 fp.write("%s\n" % i)
677 fp.write("%s\n" % i)
678 fp.close()
678 fp.close()
679 if self.applieddirty:
679 if self.applieddirty:
680 writelist(map(str, self.applied), self.statuspath)
680 writelist(map(str, self.applied), self.statuspath)
681 self.applieddirty = False
681 self.applieddirty = False
682 if self.seriesdirty:
682 if self.seriesdirty:
683 writelist(self.fullseries, self.seriespath)
683 writelist(self.fullseries, self.seriespath)
684 self.seriesdirty = False
684 self.seriesdirty = False
685 if self.guardsdirty:
685 if self.guardsdirty:
686 writelist(self.activeguards, self.guardspath)
686 writelist(self.activeguards, self.guardspath)
687 self.guardsdirty = False
687 self.guardsdirty = False
688 if self.added:
688 if self.added:
689 qrepo = self.qrepo()
689 qrepo = self.qrepo()
690 if qrepo:
690 if qrepo:
691 qrepo[None].add(f for f in self.added if f not in qrepo[None])
691 qrepo[None].add(f for f in self.added if f not in qrepo[None])
692 self.added = []
692 self.added = []
693
693
694 def removeundo(self, repo):
694 def removeundo(self, repo):
695 undo = repo.sjoin('undo')
695 undo = repo.sjoin('undo')
696 if not os.path.exists(undo):
696 if not os.path.exists(undo):
697 return
697 return
698 try:
698 try:
699 os.unlink(undo)
699 os.unlink(undo)
700 except OSError as inst:
700 except OSError as inst:
701 self.ui.warn(_('error removing undo: %s\n') % str(inst))
701 self.ui.warn(_('error removing undo: %s\n') % str(inst))
702
702
703 def backup(self, repo, files, copy=False):
703 def backup(self, repo, files, copy=False):
704 # backup local changes in --force case
704 # backup local changes in --force case
705 for f in sorted(files):
705 for f in sorted(files):
706 absf = repo.wjoin(f)
706 absf = repo.wjoin(f)
707 if os.path.lexists(absf):
707 if os.path.lexists(absf):
708 self.ui.note(_('saving current version of %s as %s\n') %
708 self.ui.note(_('saving current version of %s as %s\n') %
709 (f, scmutil.origpath(self.ui, repo, f)))
709 (f, scmutil.origpath(self.ui, repo, f)))
710
710
711 absorig = scmutil.origpath(self.ui, repo, absf)
711 absorig = scmutil.origpath(self.ui, repo, absf)
712 if copy:
712 if copy:
713 util.copyfile(absf, absorig)
713 util.copyfile(absf, absorig)
714 else:
714 else:
715 util.rename(absf, absorig)
715 util.rename(absf, absorig)
716
716
717 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
717 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
718 fp=None, changes=None, opts=None):
718 fp=None, changes=None, opts=None):
719 if opts is None:
719 if opts is None:
720 opts = {}
720 opts = {}
721 stat = opts.get('stat')
721 stat = opts.get('stat')
722 m = scmutil.match(repo[node1], files, opts)
722 m = scmutil.match(repo[node1], files, opts)
723 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
723 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
724 changes, stat, fp)
724 changes, stat, fp)
725
725
726 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
726 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
727 # first try just applying the patch
727 # first try just applying the patch
728 (err, n) = self.apply(repo, [patch], update_status=False,
728 (err, n) = self.apply(repo, [patch], update_status=False,
729 strict=True, merge=rev)
729 strict=True, merge=rev)
730
730
731 if err == 0:
731 if err == 0:
732 return (err, n)
732 return (err, n)
733
733
734 if n is None:
734 if n is None:
735 raise error.Abort(_("apply failed for patch %s") % patch)
735 raise error.Abort(_("apply failed for patch %s") % patch)
736
736
737 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
737 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
738
738
739 # apply failed, strip away that rev and merge.
739 # apply failed, strip away that rev and merge.
740 hg.clean(repo, head)
740 hg.clean(repo, head)
741 strip(self.ui, repo, [n], update=False, backup=False)
741 strip(self.ui, repo, [n], update=False, backup=False)
742
742
743 ctx = repo[rev]
743 ctx = repo[rev]
744 ret = hg.merge(repo, rev)
744 ret = hg.merge(repo, rev)
745 if ret:
745 if ret:
746 raise error.Abort(_("update returned %d") % ret)
746 raise error.Abort(_("update returned %d") % ret)
747 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
747 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
748 if n is None:
748 if n is None:
749 raise error.Abort(_("repo commit failed"))
749 raise error.Abort(_("repo commit failed"))
750 try:
750 try:
751 ph = patchheader(mergeq.join(patch), self.plainmode)
751 ph = patchheader(mergeq.join(patch), self.plainmode)
752 except Exception:
752 except Exception:
753 raise error.Abort(_("unable to read %s") % patch)
753 raise error.Abort(_("unable to read %s") % patch)
754
754
755 diffopts = self.patchopts(diffopts, patch)
755 diffopts = self.patchopts(diffopts, patch)
756 patchf = self.opener(patch, "w")
756 patchf = self.opener(patch, "w")
757 comments = str(ph)
757 comments = str(ph)
758 if comments:
758 if comments:
759 patchf.write(comments)
759 patchf.write(comments)
760 self.printdiff(repo, diffopts, head, n, fp=patchf)
760 self.printdiff(repo, diffopts, head, n, fp=patchf)
761 patchf.close()
761 patchf.close()
762 self.removeundo(repo)
762 self.removeundo(repo)
763 return (0, n)
763 return (0, n)
764
764
765 def qparents(self, repo, rev=None):
765 def qparents(self, repo, rev=None):
766 """return the mq handled parent or p1
766 """return the mq handled parent or p1
767
767
768 In some case where mq get himself in being the parent of a merge the
768 In some case where mq get himself in being the parent of a merge the
769 appropriate parent may be p2.
769 appropriate parent may be p2.
770 (eg: an in progress merge started with mq disabled)
770 (eg: an in progress merge started with mq disabled)
771
771
772 If no parent are managed by mq, p1 is returned.
772 If no parent are managed by mq, p1 is returned.
773 """
773 """
774 if rev is None:
774 if rev is None:
775 (p1, p2) = repo.dirstate.parents()
775 (p1, p2) = repo.dirstate.parents()
776 if p2 == nullid:
776 if p2 == nullid:
777 return p1
777 return p1
778 if not self.applied:
778 if not self.applied:
779 return None
779 return None
780 return self.applied[-1].node
780 return self.applied[-1].node
781 p1, p2 = repo.changelog.parents(rev)
781 p1, p2 = repo.changelog.parents(rev)
782 if p2 != nullid and p2 in [x.node for x in self.applied]:
782 if p2 != nullid and p2 in [x.node for x in self.applied]:
783 return p2
783 return p2
784 return p1
784 return p1
785
785
786 def mergepatch(self, repo, mergeq, series, diffopts):
786 def mergepatch(self, repo, mergeq, series, diffopts):
787 if not self.applied:
787 if not self.applied:
788 # each of the patches merged in will have two parents. This
788 # each of the patches merged in will have two parents. This
789 # can confuse the qrefresh, qdiff, and strip code because it
789 # can confuse the qrefresh, qdiff, and strip code because it
790 # needs to know which parent is actually in the patch queue.
790 # needs to know which parent is actually in the patch queue.
791 # so, we insert a merge marker with only one parent. This way
791 # so, we insert a merge marker with only one parent. This way
792 # the first patch in the queue is never a merge patch
792 # the first patch in the queue is never a merge patch
793 #
793 #
794 pname = ".hg.patches.merge.marker"
794 pname = ".hg.patches.merge.marker"
795 n = newcommit(repo, None, '[mq]: merge marker', force=True)
795 n = newcommit(repo, None, '[mq]: merge marker', force=True)
796 self.removeundo(repo)
796 self.removeundo(repo)
797 self.applied.append(statusentry(n, pname))
797 self.applied.append(statusentry(n, pname))
798 self.applieddirty = True
798 self.applieddirty = True
799
799
800 head = self.qparents(repo)
800 head = self.qparents(repo)
801
801
802 for patch in series:
802 for patch in series:
803 patch = mergeq.lookup(patch, strict=True)
803 patch = mergeq.lookup(patch, strict=True)
804 if not patch:
804 if not patch:
805 self.ui.warn(_("patch %s does not exist\n") % patch)
805 self.ui.warn(_("patch %s does not exist\n") % patch)
806 return (1, None)
806 return (1, None)
807 pushable, reason = self.pushable(patch)
807 pushable, reason = self.pushable(patch)
808 if not pushable:
808 if not pushable:
809 self.explainpushable(patch, all_patches=True)
809 self.explainpushable(patch, all_patches=True)
810 continue
810 continue
811 info = mergeq.isapplied(patch)
811 info = mergeq.isapplied(patch)
812 if not info:
812 if not info:
813 self.ui.warn(_("patch %s is not applied\n") % patch)
813 self.ui.warn(_("patch %s is not applied\n") % patch)
814 return (1, None)
814 return (1, None)
815 rev = info[1]
815 rev = info[1]
816 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
816 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
817 if head:
817 if head:
818 self.applied.append(statusentry(head, patch))
818 self.applied.append(statusentry(head, patch))
819 self.applieddirty = True
819 self.applieddirty = True
820 if err:
820 if err:
821 return (err, head)
821 return (err, head)
822 self.savedirty()
822 self.savedirty()
823 return (0, head)
823 return (0, head)
824
824
825 def patch(self, repo, patchfile):
825 def patch(self, repo, patchfile):
826 '''Apply patchfile to the working directory.
826 '''Apply patchfile to the working directory.
827 patchfile: name of patch file'''
827 patchfile: name of patch file'''
828 files = set()
828 files = set()
829 try:
829 try:
830 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
830 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
831 files=files, eolmode=None)
831 files=files, eolmode=None)
832 return (True, list(files), fuzz)
832 return (True, list(files), fuzz)
833 except Exception as inst:
833 except Exception as inst:
834 self.ui.note(str(inst) + '\n')
834 self.ui.note(str(inst) + '\n')
835 if not self.ui.verbose:
835 if not self.ui.verbose:
836 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
836 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
837 self.ui.traceback()
837 self.ui.traceback()
838 return (False, list(files), False)
838 return (False, list(files), False)
839
839
840 def apply(self, repo, series, list=False, update_status=True,
840 def apply(self, repo, series, list=False, update_status=True,
841 strict=False, patchdir=None, merge=None, all_files=None,
841 strict=False, patchdir=None, merge=None, all_files=None,
842 tobackup=None, keepchanges=False):
842 tobackup=None, keepchanges=False):
843 wlock = lock = tr = None
843 wlock = lock = tr = None
844 try:
844 try:
845 wlock = repo.wlock()
845 wlock = repo.wlock()
846 lock = repo.lock()
846 lock = repo.lock()
847 tr = repo.transaction("qpush")
847 tr = repo.transaction("qpush")
848 try:
848 try:
849 ret = self._apply(repo, series, list, update_status,
849 ret = self._apply(repo, series, list, update_status,
850 strict, patchdir, merge, all_files=all_files,
850 strict, patchdir, merge, all_files=all_files,
851 tobackup=tobackup, keepchanges=keepchanges)
851 tobackup=tobackup, keepchanges=keepchanges)
852 tr.close()
852 tr.close()
853 self.savedirty()
853 self.savedirty()
854 return ret
854 return ret
855 except AbortNoCleanup:
855 except AbortNoCleanup:
856 tr.close()
856 tr.close()
857 self.savedirty()
857 self.savedirty()
858 raise
858 raise
859 except: # re-raises
859 except: # re-raises
860 try:
860 try:
861 tr.abort()
861 tr.abort()
862 finally:
862 finally:
863 self.invalidate()
863 self.invalidate()
864 raise
864 raise
865 finally:
865 finally:
866 release(tr, lock, wlock)
866 release(tr, lock, wlock)
867 self.removeundo(repo)
867 self.removeundo(repo)
868
868
869 def _apply(self, repo, series, list=False, update_status=True,
869 def _apply(self, repo, series, list=False, update_status=True,
870 strict=False, patchdir=None, merge=None, all_files=None,
870 strict=False, patchdir=None, merge=None, all_files=None,
871 tobackup=None, keepchanges=False):
871 tobackup=None, keepchanges=False):
872 """returns (error, hash)
872 """returns (error, hash)
873
873
874 error = 1 for unable to read, 2 for patch failed, 3 for patch
874 error = 1 for unable to read, 2 for patch failed, 3 for patch
875 fuzz. tobackup is None or a set of files to backup before they
875 fuzz. tobackup is None or a set of files to backup before they
876 are modified by a patch.
876 are modified by a patch.
877 """
877 """
878 # TODO unify with commands.py
878 # TODO unify with commands.py
879 if not patchdir:
879 if not patchdir:
880 patchdir = self.path
880 patchdir = self.path
881 err = 0
881 err = 0
882 n = None
882 n = None
883 for patchname in series:
883 for patchname in series:
884 pushable, reason = self.pushable(patchname)
884 pushable, reason = self.pushable(patchname)
885 if not pushable:
885 if not pushable:
886 self.explainpushable(patchname, all_patches=True)
886 self.explainpushable(patchname, all_patches=True)
887 continue
887 continue
888 self.ui.status(_("applying %s\n") % patchname)
888 self.ui.status(_("applying %s\n") % patchname)
889 pf = os.path.join(patchdir, patchname)
889 pf = os.path.join(patchdir, patchname)
890
890
891 try:
891 try:
892 ph = patchheader(self.join(patchname), self.plainmode)
892 ph = patchheader(self.join(patchname), self.plainmode)
893 except IOError:
893 except IOError:
894 self.ui.warn(_("unable to read %s\n") % patchname)
894 self.ui.warn(_("unable to read %s\n") % patchname)
895 err = 1
895 err = 1
896 break
896 break
897
897
898 message = ph.message
898 message = ph.message
899 if not message:
899 if not message:
900 # The commit message should not be translated
900 # The commit message should not be translated
901 message = "imported patch %s\n" % patchname
901 message = "imported patch %s\n" % patchname
902 else:
902 else:
903 if list:
903 if list:
904 # The commit message should not be translated
904 # The commit message should not be translated
905 message.append("\nimported patch %s" % patchname)
905 message.append("\nimported patch %s" % patchname)
906 message = '\n'.join(message)
906 message = '\n'.join(message)
907
907
908 if ph.haspatch:
908 if ph.haspatch:
909 if tobackup:
909 if tobackup:
910 touched = patchmod.changedfiles(self.ui, repo, pf)
910 touched = patchmod.changedfiles(self.ui, repo, pf)
911 touched = set(touched) & tobackup
911 touched = set(touched) & tobackup
912 if touched and keepchanges:
912 if touched and keepchanges:
913 raise AbortNoCleanup(
913 raise AbortNoCleanup(
914 _("conflicting local changes found"),
914 _("conflicting local changes found"),
915 hint=_("did you forget to qrefresh?"))
915 hint=_("did you forget to qrefresh?"))
916 self.backup(repo, touched, copy=True)
916 self.backup(repo, touched, copy=True)
917 tobackup = tobackup - touched
917 tobackup = tobackup - touched
918 (patcherr, files, fuzz) = self.patch(repo, pf)
918 (patcherr, files, fuzz) = self.patch(repo, pf)
919 if all_files is not None:
919 if all_files is not None:
920 all_files.update(files)
920 all_files.update(files)
921 patcherr = not patcherr
921 patcherr = not patcherr
922 else:
922 else:
923 self.ui.warn(_("patch %s is empty\n") % patchname)
923 self.ui.warn(_("patch %s is empty\n") % patchname)
924 patcherr, files, fuzz = 0, [], 0
924 patcherr, files, fuzz = 0, [], 0
925
925
926 if merge and files:
926 if merge and files:
927 # Mark as removed/merged and update dirstate parent info
927 # Mark as removed/merged and update dirstate parent info
928 removed = []
928 removed = []
929 merged = []
929 merged = []
930 for f in files:
930 for f in files:
931 if os.path.lexists(repo.wjoin(f)):
931 if os.path.lexists(repo.wjoin(f)):
932 merged.append(f)
932 merged.append(f)
933 else:
933 else:
934 removed.append(f)
934 removed.append(f)
935 repo.dirstate.beginparentchange()
935 repo.dirstate.beginparentchange()
936 for f in removed:
936 for f in removed:
937 repo.dirstate.remove(f)
937 repo.dirstate.remove(f)
938 for f in merged:
938 for f in merged:
939 repo.dirstate.merge(f)
939 repo.dirstate.merge(f)
940 p1, p2 = repo.dirstate.parents()
940 p1, p2 = repo.dirstate.parents()
941 repo.setparents(p1, merge)
941 repo.setparents(p1, merge)
942 repo.dirstate.endparentchange()
942 repo.dirstate.endparentchange()
943
943
944 if all_files and '.hgsubstate' in all_files:
944 if all_files and '.hgsubstate' in all_files:
945 wctx = repo[None]
945 wctx = repo[None]
946 pctx = repo['.']
946 pctx = repo['.']
947 overwrite = False
947 overwrite = False
948 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
948 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
949 overwrite)
949 overwrite)
950 files += mergedsubstate.keys()
950 files += mergedsubstate.keys()
951
951
952 match = scmutil.matchfiles(repo, files or [])
952 match = scmutil.matchfiles(repo, files or [])
953 oldtip = repo['tip']
953 oldtip = repo['tip']
954 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
954 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
955 force=True)
955 force=True)
956 if repo['tip'] == oldtip:
956 if repo['tip'] == oldtip:
957 raise error.Abort(_("qpush exactly duplicates child changeset"))
957 raise error.Abort(_("qpush exactly duplicates child changeset"))
958 if n is None:
958 if n is None:
959 raise error.Abort(_("repository commit failed"))
959 raise error.Abort(_("repository commit failed"))
960
960
961 if update_status:
961 if update_status:
962 self.applied.append(statusentry(n, patchname))
962 self.applied.append(statusentry(n, patchname))
963
963
964 if patcherr:
964 if patcherr:
965 self.ui.warn(_("patch failed, rejects left in working "
965 self.ui.warn(_("patch failed, rejects left in working "
966 "directory\n"))
966 "directory\n"))
967 err = 2
967 err = 2
968 break
968 break
969
969
970 if fuzz and strict:
970 if fuzz and strict:
971 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
971 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
972 err = 3
972 err = 3
973 break
973 break
974 return (err, n)
974 return (err, n)
975
975
976 def _cleanup(self, patches, numrevs, keep=False):
976 def _cleanup(self, patches, numrevs, keep=False):
977 if not keep:
977 if not keep:
978 r = self.qrepo()
978 r = self.qrepo()
979 if r:
979 if r:
980 r[None].forget(patches)
980 r[None].forget(patches)
981 for p in patches:
981 for p in patches:
982 try:
982 try:
983 os.unlink(self.join(p))
983 os.unlink(self.join(p))
984 except OSError as inst:
984 except OSError as inst:
985 if inst.errno != errno.ENOENT:
985 if inst.errno != errno.ENOENT:
986 raise
986 raise
987
987
988 qfinished = []
988 qfinished = []
989 if numrevs:
989 if numrevs:
990 qfinished = self.applied[:numrevs]
990 qfinished = self.applied[:numrevs]
991 del self.applied[:numrevs]
991 del self.applied[:numrevs]
992 self.applieddirty = True
992 self.applieddirty = True
993
993
994 unknown = []
994 unknown = []
995
995
996 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
996 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
997 reverse=True):
997 reverse=True):
998 if i is not None:
998 if i is not None:
999 del self.fullseries[i]
999 del self.fullseries[i]
1000 else:
1000 else:
1001 unknown.append(p)
1001 unknown.append(p)
1002
1002
1003 if unknown:
1003 if unknown:
1004 if numrevs:
1004 if numrevs:
1005 rev = dict((entry.name, entry.node) for entry in qfinished)
1005 rev = dict((entry.name, entry.node) for entry in qfinished)
1006 for p in unknown:
1006 for p in unknown:
1007 msg = _('revision %s refers to unknown patches: %s\n')
1007 msg = _('revision %s refers to unknown patches: %s\n')
1008 self.ui.warn(msg % (short(rev[p]), p))
1008 self.ui.warn(msg % (short(rev[p]), p))
1009 else:
1009 else:
1010 msg = _('unknown patches: %s\n')
1010 msg = _('unknown patches: %s\n')
1011 raise error.Abort(''.join(msg % p for p in unknown))
1011 raise error.Abort(''.join(msg % p for p in unknown))
1012
1012
1013 self.parseseries()
1013 self.parseseries()
1014 self.seriesdirty = True
1014 self.seriesdirty = True
1015 return [entry.node for entry in qfinished]
1015 return [entry.node for entry in qfinished]
1016
1016
1017 def _revpatches(self, repo, revs):
1017 def _revpatches(self, repo, revs):
1018 firstrev = repo[self.applied[0].node].rev()
1018 firstrev = repo[self.applied[0].node].rev()
1019 patches = []
1019 patches = []
1020 for i, rev in enumerate(revs):
1020 for i, rev in enumerate(revs):
1021
1021
1022 if rev < firstrev:
1022 if rev < firstrev:
1023 raise error.Abort(_('revision %d is not managed') % rev)
1023 raise error.Abort(_('revision %d is not managed') % rev)
1024
1024
1025 ctx = repo[rev]
1025 ctx = repo[rev]
1026 base = self.applied[i].node
1026 base = self.applied[i].node
1027 if ctx.node() != base:
1027 if ctx.node() != base:
1028 msg = _('cannot delete revision %d above applied patches')
1028 msg = _('cannot delete revision %d above applied patches')
1029 raise error.Abort(msg % rev)
1029 raise error.Abort(msg % rev)
1030
1030
1031 patch = self.applied[i].name
1031 patch = self.applied[i].name
1032 for fmt in ('[mq]: %s', 'imported patch %s'):
1032 for fmt in ('[mq]: %s', 'imported patch %s'):
1033 if ctx.description() == fmt % patch:
1033 if ctx.description() == fmt % patch:
1034 msg = _('patch %s finalized without changeset message\n')
1034 msg = _('patch %s finalized without changeset message\n')
1035 repo.ui.status(msg % patch)
1035 repo.ui.status(msg % patch)
1036 break
1036 break
1037
1037
1038 patches.append(patch)
1038 patches.append(patch)
1039 return patches
1039 return patches
1040
1040
1041 def finish(self, repo, revs):
1041 def finish(self, repo, revs):
1042 # Manually trigger phase computation to ensure phasedefaults is
1042 # Manually trigger phase computation to ensure phasedefaults is
1043 # executed before we remove the patches.
1043 # executed before we remove the patches.
1044 repo._phasecache
1044 repo._phasecache
1045 patches = self._revpatches(repo, sorted(revs))
1045 patches = self._revpatches(repo, sorted(revs))
1046 qfinished = self._cleanup(patches, len(patches))
1046 qfinished = self._cleanup(patches, len(patches))
1047 if qfinished and repo.ui.configbool('mq', 'secret', False):
1047 if qfinished and repo.ui.configbool('mq', 'secret', False):
1048 # only use this logic when the secret option is added
1048 # only use this logic when the secret option is added
1049 oldqbase = repo[qfinished[0]]
1049 oldqbase = repo[qfinished[0]]
1050 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1050 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1051 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1051 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1052 with repo.transaction('qfinish') as tr:
1052 with repo.transaction('qfinish') as tr:
1053 phases.advanceboundary(repo, tr, tphase, qfinished)
1053 phases.advanceboundary(repo, tr, tphase, qfinished)
1054
1054
1055 def delete(self, repo, patches, opts):
1055 def delete(self, repo, patches, opts):
1056 if not patches and not opts.get('rev'):
1056 if not patches and not opts.get('rev'):
1057 raise error.Abort(_('qdelete requires at least one revision or '
1057 raise error.Abort(_('qdelete requires at least one revision or '
1058 'patch name'))
1058 'patch name'))
1059
1059
1060 realpatches = []
1060 realpatches = []
1061 for patch in patches:
1061 for patch in patches:
1062 patch = self.lookup(patch, strict=True)
1062 patch = self.lookup(patch, strict=True)
1063 info = self.isapplied(patch)
1063 info = self.isapplied(patch)
1064 if info:
1064 if info:
1065 raise error.Abort(_("cannot delete applied patch %s") % patch)
1065 raise error.Abort(_("cannot delete applied patch %s") % patch)
1066 if patch not in self.series:
1066 if patch not in self.series:
1067 raise error.Abort(_("patch %s not in series file") % patch)
1067 raise error.Abort(_("patch %s not in series file") % patch)
1068 if patch not in realpatches:
1068 if patch not in realpatches:
1069 realpatches.append(patch)
1069 realpatches.append(patch)
1070
1070
1071 numrevs = 0
1071 numrevs = 0
1072 if opts.get('rev'):
1072 if opts.get('rev'):
1073 if not self.applied:
1073 if not self.applied:
1074 raise error.Abort(_('no patches applied'))
1074 raise error.Abort(_('no patches applied'))
1075 revs = scmutil.revrange(repo, opts.get('rev'))
1075 revs = scmutil.revrange(repo, opts.get('rev'))
1076 revs.sort()
1076 revs.sort()
1077 revpatches = self._revpatches(repo, revs)
1077 revpatches = self._revpatches(repo, revs)
1078 realpatches += revpatches
1078 realpatches += revpatches
1079 numrevs = len(revpatches)
1079 numrevs = len(revpatches)
1080
1080
1081 self._cleanup(realpatches, numrevs, opts.get('keep'))
1081 self._cleanup(realpatches, numrevs, opts.get('keep'))
1082
1082
1083 def checktoppatch(self, repo):
1083 def checktoppatch(self, repo):
1084 '''check that working directory is at qtip'''
1084 '''check that working directory is at qtip'''
1085 if self.applied:
1085 if self.applied:
1086 top = self.applied[-1].node
1086 top = self.applied[-1].node
1087 patch = self.applied[-1].name
1087 patch = self.applied[-1].name
1088 if repo.dirstate.p1() != top:
1088 if repo.dirstate.p1() != top:
1089 raise error.Abort(_("working directory revision is not qtip"))
1089 raise error.Abort(_("working directory revision is not qtip"))
1090 return top, patch
1090 return top, patch
1091 return None, None
1091 return None, None
1092
1092
1093 def putsubstate2changes(self, substatestate, changes):
1093 def putsubstate2changes(self, substatestate, changes):
1094 for files in changes[:3]:
1094 for files in changes[:3]:
1095 if '.hgsubstate' in files:
1095 if '.hgsubstate' in files:
1096 return # already listed up
1096 return # already listed up
1097 # not yet listed up
1097 # not yet listed up
1098 if substatestate in 'a?':
1098 if substatestate in 'a?':
1099 changes[1].append('.hgsubstate')
1099 changes[1].append('.hgsubstate')
1100 elif substatestate in 'r':
1100 elif substatestate in 'r':
1101 changes[2].append('.hgsubstate')
1101 changes[2].append('.hgsubstate')
1102 else: # modified
1102 else: # modified
1103 changes[0].append('.hgsubstate')
1103 changes[0].append('.hgsubstate')
1104
1104
1105 def checklocalchanges(self, repo, force=False, refresh=True):
1105 def checklocalchanges(self, repo, force=False, refresh=True):
1106 excsuffix = ''
1106 excsuffix = ''
1107 if refresh:
1107 if refresh:
1108 excsuffix = ', qrefresh first'
1108 excsuffix = ', qrefresh first'
1109 # plain versions for i18n tool to detect them
1109 # plain versions for i18n tool to detect them
1110 _("local changes found, qrefresh first")
1110 _("local changes found, qrefresh first")
1111 _("local changed subrepos found, qrefresh first")
1111 _("local changed subrepos found, qrefresh first")
1112 return checklocalchanges(repo, force, excsuffix)
1112 return checklocalchanges(repo, force, excsuffix)
1113
1113
1114 _reserved = ('series', 'status', 'guards', '.', '..')
1114 _reserved = ('series', 'status', 'guards', '.', '..')
1115 def checkreservedname(self, name):
1115 def checkreservedname(self, name):
1116 if name in self._reserved:
1116 if name in self._reserved:
1117 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1117 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1118 % name)
1118 % name)
1119 if name != name.strip():
1119 if name != name.strip():
1120 # whitespace is stripped by parseseries()
1120 # whitespace is stripped by parseseries()
1121 raise error.Abort(_('patch name cannot begin or end with '
1121 raise error.Abort(_('patch name cannot begin or end with '
1122 'whitespace'))
1122 'whitespace'))
1123 for prefix in ('.hg', '.mq'):
1123 for prefix in ('.hg', '.mq'):
1124 if name.startswith(prefix):
1124 if name.startswith(prefix):
1125 raise error.Abort(_('patch name cannot begin with "%s"')
1125 raise error.Abort(_('patch name cannot begin with "%s"')
1126 % prefix)
1126 % prefix)
1127 for c in ('#', ':', '\r', '\n'):
1127 for c in ('#', ':', '\r', '\n'):
1128 if c in name:
1128 if c in name:
1129 raise error.Abort(_('%r cannot be used in the name of a patch')
1129 raise error.Abort(_('%r cannot be used in the name of a patch')
1130 % c)
1130 % c)
1131
1131
1132 def checkpatchname(self, name, force=False):
1132 def checkpatchname(self, name, force=False):
1133 self.checkreservedname(name)
1133 self.checkreservedname(name)
1134 if not force and os.path.exists(self.join(name)):
1134 if not force and os.path.exists(self.join(name)):
1135 if os.path.isdir(self.join(name)):
1135 if os.path.isdir(self.join(name)):
1136 raise error.Abort(_('"%s" already exists as a directory')
1136 raise error.Abort(_('"%s" already exists as a directory')
1137 % name)
1137 % name)
1138 else:
1138 else:
1139 raise error.Abort(_('patch "%s" already exists') % name)
1139 raise error.Abort(_('patch "%s" already exists') % name)
1140
1140
1141 def makepatchname(self, title, fallbackname):
1141 def makepatchname(self, title, fallbackname):
1142 """Return a suitable filename for title, adding a suffix to make
1142 """Return a suitable filename for title, adding a suffix to make
1143 it unique in the existing list"""
1143 it unique in the existing list"""
1144 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1144 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1145 namebase = namebase[:75] # avoid too long name (issue5117)
1145 namebase = namebase[:75] # avoid too long name (issue5117)
1146 if namebase:
1146 if namebase:
1147 try:
1147 try:
1148 self.checkreservedname(namebase)
1148 self.checkreservedname(namebase)
1149 except error.Abort:
1149 except error.Abort:
1150 namebase = fallbackname
1150 namebase = fallbackname
1151 else:
1151 else:
1152 namebase = fallbackname
1152 namebase = fallbackname
1153 name = namebase
1153 name = namebase
1154 i = 0
1154 i = 0
1155 while True:
1155 while True:
1156 if name not in self.fullseries:
1156 if name not in self.fullseries:
1157 try:
1157 try:
1158 self.checkpatchname(name)
1158 self.checkpatchname(name)
1159 break
1159 break
1160 except error.Abort:
1160 except error.Abort:
1161 pass
1161 pass
1162 i += 1
1162 i += 1
1163 name = '%s__%s' % (namebase, i)
1163 name = '%s__%s' % (namebase, i)
1164 return name
1164 return name
1165
1165
1166 def checkkeepchanges(self, keepchanges, force):
1166 def checkkeepchanges(self, keepchanges, force):
1167 if force and keepchanges:
1167 if force and keepchanges:
1168 raise error.Abort(_('cannot use both --force and --keep-changes'))
1168 raise error.Abort(_('cannot use both --force and --keep-changes'))
1169
1169
1170 def new(self, repo, patchfn, *pats, **opts):
1170 def new(self, repo, patchfn, *pats, **opts):
1171 """options:
1171 """options:
1172 msg: a string or a no-argument function returning a string
1172 msg: a string or a no-argument function returning a string
1173 """
1173 """
1174 msg = opts.get('msg')
1174 msg = opts.get('msg')
1175 edit = opts.get('edit')
1175 edit = opts.get('edit')
1176 editform = opts.get('editform', 'mq.qnew')
1176 editform = opts.get('editform', 'mq.qnew')
1177 user = opts.get('user')
1177 user = opts.get('user')
1178 date = opts.get('date')
1178 date = opts.get('date')
1179 if date:
1179 if date:
1180 date = util.parsedate(date)
1180 date = util.parsedate(date)
1181 diffopts = self.diffopts({'git': opts.get('git')})
1181 diffopts = self.diffopts({'git': opts.get('git')})
1182 if opts.get('checkname', True):
1182 if opts.get('checkname', True):
1183 self.checkpatchname(patchfn)
1183 self.checkpatchname(patchfn)
1184 inclsubs = checksubstate(repo)
1184 inclsubs = checksubstate(repo)
1185 if inclsubs:
1185 if inclsubs:
1186 substatestate = repo.dirstate['.hgsubstate']
1186 substatestate = repo.dirstate['.hgsubstate']
1187 if opts.get('include') or opts.get('exclude') or pats:
1187 if opts.get('include') or opts.get('exclude') or pats:
1188 # detect missing files in pats
1188 # detect missing files in pats
1189 def badfn(f, msg):
1189 def badfn(f, msg):
1190 if f != '.hgsubstate': # .hgsubstate is auto-created
1190 if f != '.hgsubstate': # .hgsubstate is auto-created
1191 raise error.Abort('%s: %s' % (f, msg))
1191 raise error.Abort('%s: %s' % (f, msg))
1192 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1192 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1193 changes = repo.status(match=match)
1193 changes = repo.status(match=match)
1194 else:
1194 else:
1195 changes = self.checklocalchanges(repo, force=True)
1195 changes = self.checklocalchanges(repo, force=True)
1196 commitfiles = list(inclsubs)
1196 commitfiles = list(inclsubs)
1197 for files in changes[:3]:
1197 for files in changes[:3]:
1198 commitfiles.extend(files)
1198 commitfiles.extend(files)
1199 match = scmutil.matchfiles(repo, commitfiles)
1199 match = scmutil.matchfiles(repo, commitfiles)
1200 if len(repo[None].parents()) > 1:
1200 if len(repo[None].parents()) > 1:
1201 raise error.Abort(_('cannot manage merge changesets'))
1201 raise error.Abort(_('cannot manage merge changesets'))
1202 self.checktoppatch(repo)
1202 self.checktoppatch(repo)
1203 insert = self.fullseriesend()
1203 insert = self.fullseriesend()
1204 with repo.wlock():
1204 with repo.wlock():
1205 try:
1205 try:
1206 # if patch file write fails, abort early
1206 # if patch file write fails, abort early
1207 p = self.opener(patchfn, "w")
1207 p = self.opener(patchfn, "w")
1208 except IOError as e:
1208 except IOError as e:
1209 raise error.Abort(_('cannot write patch "%s": %s')
1209 raise error.Abort(_('cannot write patch "%s": %s')
1210 % (patchfn, e.strerror))
1210 % (patchfn, e.strerror))
1211 try:
1211 try:
1212 defaultmsg = "[mq]: %s" % patchfn
1212 defaultmsg = "[mq]: %s" % patchfn
1213 editor = cmdutil.getcommiteditor(editform=editform)
1213 editor = cmdutil.getcommiteditor(editform=editform)
1214 if edit:
1214 if edit:
1215 def finishdesc(desc):
1215 def finishdesc(desc):
1216 if desc.rstrip():
1216 if desc.rstrip():
1217 return desc
1217 return desc
1218 else:
1218 else:
1219 return defaultmsg
1219 return defaultmsg
1220 # i18n: this message is shown in editor with "HG: " prefix
1220 # i18n: this message is shown in editor with "HG: " prefix
1221 extramsg = _('Leave message empty to use default message.')
1221 extramsg = _('Leave message empty to use default message.')
1222 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1222 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1223 extramsg=extramsg,
1223 extramsg=extramsg,
1224 editform=editform)
1224 editform=editform)
1225 commitmsg = msg
1225 commitmsg = msg
1226 else:
1226 else:
1227 commitmsg = msg or defaultmsg
1227 commitmsg = msg or defaultmsg
1228
1228
1229 n = newcommit(repo, None, commitmsg, user, date, match=match,
1229 n = newcommit(repo, None, commitmsg, user, date, match=match,
1230 force=True, editor=editor)
1230 force=True, editor=editor)
1231 if n is None:
1231 if n is None:
1232 raise error.Abort(_("repo commit failed"))
1232 raise error.Abort(_("repo commit failed"))
1233 try:
1233 try:
1234 self.fullseries[insert:insert] = [patchfn]
1234 self.fullseries[insert:insert] = [patchfn]
1235 self.applied.append(statusentry(n, patchfn))
1235 self.applied.append(statusentry(n, patchfn))
1236 self.parseseries()
1236 self.parseseries()
1237 self.seriesdirty = True
1237 self.seriesdirty = True
1238 self.applieddirty = True
1238 self.applieddirty = True
1239 nctx = repo[n]
1239 nctx = repo[n]
1240 ph = patchheader(self.join(patchfn), self.plainmode)
1240 ph = patchheader(self.join(patchfn), self.plainmode)
1241 if user:
1241 if user:
1242 ph.setuser(user)
1242 ph.setuser(user)
1243 if date:
1243 if date:
1244 ph.setdate('%s %s' % date)
1244 ph.setdate('%s %s' % date)
1245 ph.setparent(hex(nctx.p1().node()))
1245 ph.setparent(hex(nctx.p1().node()))
1246 msg = nctx.description().strip()
1246 msg = nctx.description().strip()
1247 if msg == defaultmsg.strip():
1247 if msg == defaultmsg.strip():
1248 msg = ''
1248 msg = ''
1249 ph.setmessage(msg)
1249 ph.setmessage(msg)
1250 p.write(str(ph))
1250 p.write(str(ph))
1251 if commitfiles:
1251 if commitfiles:
1252 parent = self.qparents(repo, n)
1252 parent = self.qparents(repo, n)
1253 if inclsubs:
1253 if inclsubs:
1254 self.putsubstate2changes(substatestate, changes)
1254 self.putsubstate2changes(substatestate, changes)
1255 chunks = patchmod.diff(repo, node1=parent, node2=n,
1255 chunks = patchmod.diff(repo, node1=parent, node2=n,
1256 changes=changes, opts=diffopts)
1256 changes=changes, opts=diffopts)
1257 for chunk in chunks:
1257 for chunk in chunks:
1258 p.write(chunk)
1258 p.write(chunk)
1259 p.close()
1259 p.close()
1260 r = self.qrepo()
1260 r = self.qrepo()
1261 if r:
1261 if r:
1262 r[None].add([patchfn])
1262 r[None].add([patchfn])
1263 except: # re-raises
1263 except: # re-raises
1264 repo.rollback()
1264 repo.rollback()
1265 raise
1265 raise
1266 except Exception:
1266 except Exception:
1267 patchpath = self.join(patchfn)
1267 patchpath = self.join(patchfn)
1268 try:
1268 try:
1269 os.unlink(patchpath)
1269 os.unlink(patchpath)
1270 except OSError:
1270 except OSError:
1271 self.ui.warn(_('error unlinking %s\n') % patchpath)
1271 self.ui.warn(_('error unlinking %s\n') % patchpath)
1272 raise
1272 raise
1273 self.removeundo(repo)
1273 self.removeundo(repo)
1274
1274
1275 def isapplied(self, patch):
1275 def isapplied(self, patch):
1276 """returns (index, rev, patch)"""
1276 """returns (index, rev, patch)"""
1277 for i, a in enumerate(self.applied):
1277 for i, a in enumerate(self.applied):
1278 if a.name == patch:
1278 if a.name == patch:
1279 return (i, a.node, a.name)
1279 return (i, a.node, a.name)
1280 return None
1280 return None
1281
1281
1282 # if the exact patch name does not exist, we try a few
1282 # if the exact patch name does not exist, we try a few
1283 # variations. If strict is passed, we try only #1
1283 # variations. If strict is passed, we try only #1
1284 #
1284 #
1285 # 1) a number (as string) to indicate an offset in the series file
1285 # 1) a number (as string) to indicate an offset in the series file
1286 # 2) a unique substring of the patch name was given
1286 # 2) a unique substring of the patch name was given
1287 # 3) patchname[-+]num to indicate an offset in the series file
1287 # 3) patchname[-+]num to indicate an offset in the series file
1288 def lookup(self, patch, strict=False):
1288 def lookup(self, patch, strict=False):
1289 def partialname(s):
1289 def partialname(s):
1290 if s in self.series:
1290 if s in self.series:
1291 return s
1291 return s
1292 matches = [x for x in self.series if s in x]
1292 matches = [x for x in self.series if s in x]
1293 if len(matches) > 1:
1293 if len(matches) > 1:
1294 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1294 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1295 for m in matches:
1295 for m in matches:
1296 self.ui.warn(' %s\n' % m)
1296 self.ui.warn(' %s\n' % m)
1297 return None
1297 return None
1298 if matches:
1298 if matches:
1299 return matches[0]
1299 return matches[0]
1300 if self.series and self.applied:
1300 if self.series and self.applied:
1301 if s == 'qtip':
1301 if s == 'qtip':
1302 return self.series[self.seriesend(True) - 1]
1302 return self.series[self.seriesend(True) - 1]
1303 if s == 'qbase':
1303 if s == 'qbase':
1304 return self.series[0]
1304 return self.series[0]
1305 return None
1305 return None
1306
1306
1307 if patch in self.series:
1307 if patch in self.series:
1308 return patch
1308 return patch
1309
1309
1310 if not os.path.isfile(self.join(patch)):
1310 if not os.path.isfile(self.join(patch)):
1311 try:
1311 try:
1312 sno = int(patch)
1312 sno = int(patch)
1313 except (ValueError, OverflowError):
1313 except (ValueError, OverflowError):
1314 pass
1314 pass
1315 else:
1315 else:
1316 if -len(self.series) <= sno < len(self.series):
1316 if -len(self.series) <= sno < len(self.series):
1317 return self.series[sno]
1317 return self.series[sno]
1318
1318
1319 if not strict:
1319 if not strict:
1320 res = partialname(patch)
1320 res = partialname(patch)
1321 if res:
1321 if res:
1322 return res
1322 return res
1323 minus = patch.rfind('-')
1323 minus = patch.rfind('-')
1324 if minus >= 0:
1324 if minus >= 0:
1325 res = partialname(patch[:minus])
1325 res = partialname(patch[:minus])
1326 if res:
1326 if res:
1327 i = self.series.index(res)
1327 i = self.series.index(res)
1328 try:
1328 try:
1329 off = int(patch[minus + 1:] or 1)
1329 off = int(patch[minus + 1:] or 1)
1330 except (ValueError, OverflowError):
1330 except (ValueError, OverflowError):
1331 pass
1331 pass
1332 else:
1332 else:
1333 if i - off >= 0:
1333 if i - off >= 0:
1334 return self.series[i - off]
1334 return self.series[i - off]
1335 plus = patch.rfind('+')
1335 plus = patch.rfind('+')
1336 if plus >= 0:
1336 if plus >= 0:
1337 res = partialname(patch[:plus])
1337 res = partialname(patch[:plus])
1338 if res:
1338 if res:
1339 i = self.series.index(res)
1339 i = self.series.index(res)
1340 try:
1340 try:
1341 off = int(patch[plus + 1:] or 1)
1341 off = int(patch[plus + 1:] or 1)
1342 except (ValueError, OverflowError):
1342 except (ValueError, OverflowError):
1343 pass
1343 pass
1344 else:
1344 else:
1345 if i + off < len(self.series):
1345 if i + off < len(self.series):
1346 return self.series[i + off]
1346 return self.series[i + off]
1347 raise error.Abort(_("patch %s not in series") % patch)
1347 raise error.Abort(_("patch %s not in series") % patch)
1348
1348
1349 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1349 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1350 all=False, move=False, exact=False, nobackup=False,
1350 all=False, move=False, exact=False, nobackup=False,
1351 keepchanges=False):
1351 keepchanges=False):
1352 self.checkkeepchanges(keepchanges, force)
1352 self.checkkeepchanges(keepchanges, force)
1353 diffopts = self.diffopts()
1353 diffopts = self.diffopts()
1354 with repo.wlock():
1354 with repo.wlock():
1355 heads = []
1355 heads = []
1356 for hs in repo.branchmap().itervalues():
1356 for hs in repo.branchmap().itervalues():
1357 heads.extend(hs)
1357 heads.extend(hs)
1358 if not heads:
1358 if not heads:
1359 heads = [nullid]
1359 heads = [nullid]
1360 if repo.dirstate.p1() not in heads and not exact:
1360 if repo.dirstate.p1() not in heads and not exact:
1361 self.ui.status(_("(working directory not at a head)\n"))
1361 self.ui.status(_("(working directory not at a head)\n"))
1362
1362
1363 if not self.series:
1363 if not self.series:
1364 self.ui.warn(_('no patches in series\n'))
1364 self.ui.warn(_('no patches in series\n'))
1365 return 0
1365 return 0
1366
1366
1367 # Suppose our series file is: A B C and the current 'top'
1367 # Suppose our series file is: A B C and the current 'top'
1368 # patch is B. qpush C should be performed (moving forward)
1368 # patch is B. qpush C should be performed (moving forward)
1369 # qpush B is a NOP (no change) qpush A is an error (can't
1369 # qpush B is a NOP (no change) qpush A is an error (can't
1370 # go backwards with qpush)
1370 # go backwards with qpush)
1371 if patch:
1371 if patch:
1372 patch = self.lookup(patch)
1372 patch = self.lookup(patch)
1373 info = self.isapplied(patch)
1373 info = self.isapplied(patch)
1374 if info and info[0] >= len(self.applied) - 1:
1374 if info and info[0] >= len(self.applied) - 1:
1375 self.ui.warn(
1375 self.ui.warn(
1376 _('qpush: %s is already at the top\n') % patch)
1376 _('qpush: %s is already at the top\n') % patch)
1377 return 0
1377 return 0
1378
1378
1379 pushable, reason = self.pushable(patch)
1379 pushable, reason = self.pushable(patch)
1380 if pushable:
1380 if pushable:
1381 if self.series.index(patch) < self.seriesend():
1381 if self.series.index(patch) < self.seriesend():
1382 raise error.Abort(
1382 raise error.Abort(
1383 _("cannot push to a previous patch: %s") % patch)
1383 _("cannot push to a previous patch: %s") % patch)
1384 else:
1384 else:
1385 if reason:
1385 if reason:
1386 reason = _('guarded by %s') % reason
1386 reason = _('guarded by %s') % reason
1387 else:
1387 else:
1388 reason = _('no matching guards')
1388 reason = _('no matching guards')
1389 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1389 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1390 return 1
1390 return 1
1391 elif all:
1391 elif all:
1392 patch = self.series[-1]
1392 patch = self.series[-1]
1393 if self.isapplied(patch):
1393 if self.isapplied(patch):
1394 self.ui.warn(_('all patches are currently applied\n'))
1394 self.ui.warn(_('all patches are currently applied\n'))
1395 return 0
1395 return 0
1396
1396
1397 # Following the above example, starting at 'top' of B:
1397 # Following the above example, starting at 'top' of B:
1398 # qpush should be performed (pushes C), but a subsequent
1398 # qpush should be performed (pushes C), but a subsequent
1399 # qpush without an argument is an error (nothing to
1399 # qpush without an argument is an error (nothing to
1400 # apply). This allows a loop of "...while hg qpush..." to
1400 # apply). This allows a loop of "...while hg qpush..." to
1401 # work as it detects an error when done
1401 # work as it detects an error when done
1402 start = self.seriesend()
1402 start = self.seriesend()
1403 if start == len(self.series):
1403 if start == len(self.series):
1404 self.ui.warn(_('patch series already fully applied\n'))
1404 self.ui.warn(_('patch series already fully applied\n'))
1405 return 1
1405 return 1
1406 if not force and not keepchanges:
1406 if not force and not keepchanges:
1407 self.checklocalchanges(repo, refresh=self.applied)
1407 self.checklocalchanges(repo, refresh=self.applied)
1408
1408
1409 if exact:
1409 if exact:
1410 if keepchanges:
1410 if keepchanges:
1411 raise error.Abort(
1411 raise error.Abort(
1412 _("cannot use --exact and --keep-changes together"))
1412 _("cannot use --exact and --keep-changes together"))
1413 if move:
1413 if move:
1414 raise error.Abort(_('cannot use --exact and --move '
1414 raise error.Abort(_('cannot use --exact and --move '
1415 'together'))
1415 'together'))
1416 if self.applied:
1416 if self.applied:
1417 raise error.Abort(_('cannot push --exact with applied '
1417 raise error.Abort(_('cannot push --exact with applied '
1418 'patches'))
1418 'patches'))
1419 root = self.series[start]
1419 root = self.series[start]
1420 target = patchheader(self.join(root), self.plainmode).parent
1420 target = patchheader(self.join(root), self.plainmode).parent
1421 if not target:
1421 if not target:
1422 raise error.Abort(
1422 raise error.Abort(
1423 _("%s does not have a parent recorded") % root)
1423 _("%s does not have a parent recorded") % root)
1424 if not repo[target] == repo['.']:
1424 if not repo[target] == repo['.']:
1425 hg.update(repo, target)
1425 hg.update(repo, target)
1426
1426
1427 if move:
1427 if move:
1428 if not patch:
1428 if not patch:
1429 raise error.Abort(_("please specify the patch to move"))
1429 raise error.Abort(_("please specify the patch to move"))
1430 for fullstart, rpn in enumerate(self.fullseries):
1430 for fullstart, rpn in enumerate(self.fullseries):
1431 # strip markers for patch guards
1431 # strip markers for patch guards
1432 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1432 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1433 break
1433 break
1434 for i, rpn in enumerate(self.fullseries[fullstart:]):
1434 for i, rpn in enumerate(self.fullseries[fullstart:]):
1435 # strip markers for patch guards
1435 # strip markers for patch guards
1436 if self.guard_re.split(rpn, 1)[0] == patch:
1436 if self.guard_re.split(rpn, 1)[0] == patch:
1437 break
1437 break
1438 index = fullstart + i
1438 index = fullstart + i
1439 assert index < len(self.fullseries)
1439 assert index < len(self.fullseries)
1440 fullpatch = self.fullseries[index]
1440 fullpatch = self.fullseries[index]
1441 del self.fullseries[index]
1441 del self.fullseries[index]
1442 self.fullseries.insert(fullstart, fullpatch)
1442 self.fullseries.insert(fullstart, fullpatch)
1443 self.parseseries()
1443 self.parseseries()
1444 self.seriesdirty = True
1444 self.seriesdirty = True
1445
1445
1446 self.applieddirty = True
1446 self.applieddirty = True
1447 if start > 0:
1447 if start > 0:
1448 self.checktoppatch(repo)
1448 self.checktoppatch(repo)
1449 if not patch:
1449 if not patch:
1450 patch = self.series[start]
1450 patch = self.series[start]
1451 end = start + 1
1451 end = start + 1
1452 else:
1452 else:
1453 end = self.series.index(patch, start) + 1
1453 end = self.series.index(patch, start) + 1
1454
1454
1455 tobackup = set()
1455 tobackup = set()
1456 if (not nobackup and force) or keepchanges:
1456 if (not nobackup and force) or keepchanges:
1457 status = self.checklocalchanges(repo, force=True)
1457 status = self.checklocalchanges(repo, force=True)
1458 if keepchanges:
1458 if keepchanges:
1459 tobackup.update(status.modified + status.added +
1459 tobackup.update(status.modified + status.added +
1460 status.removed + status.deleted)
1460 status.removed + status.deleted)
1461 else:
1461 else:
1462 tobackup.update(status.modified + status.added)
1462 tobackup.update(status.modified + status.added)
1463
1463
1464 s = self.series[start:end]
1464 s = self.series[start:end]
1465 all_files = set()
1465 all_files = set()
1466 try:
1466 try:
1467 if mergeq:
1467 if mergeq:
1468 ret = self.mergepatch(repo, mergeq, s, diffopts)
1468 ret = self.mergepatch(repo, mergeq, s, diffopts)
1469 else:
1469 else:
1470 ret = self.apply(repo, s, list, all_files=all_files,
1470 ret = self.apply(repo, s, list, all_files=all_files,
1471 tobackup=tobackup, keepchanges=keepchanges)
1471 tobackup=tobackup, keepchanges=keepchanges)
1472 except AbortNoCleanup:
1472 except AbortNoCleanup:
1473 raise
1473 raise
1474 except: # re-raises
1474 except: # re-raises
1475 self.ui.warn(_('cleaning up working directory...\n'))
1475 self.ui.warn(_('cleaning up working directory...\n'))
1476 cmdutil.revert(self.ui, repo, repo['.'],
1476 cmdutil.revert(self.ui, repo, repo['.'],
1477 repo.dirstate.parents(), no_backup=True)
1477 repo.dirstate.parents(), no_backup=True)
1478 # only remove unknown files that we know we touched or
1478 # only remove unknown files that we know we touched or
1479 # created while patching
1479 # created while patching
1480 for f in all_files:
1480 for f in all_files:
1481 if f not in repo.dirstate:
1481 if f not in repo.dirstate:
1482 repo.wvfs.unlinkpath(f, ignoremissing=True)
1482 repo.wvfs.unlinkpath(f, ignoremissing=True)
1483 self.ui.warn(_('done\n'))
1483 self.ui.warn(_('done\n'))
1484 raise
1484 raise
1485
1485
1486 if not self.applied:
1486 if not self.applied:
1487 return ret[0]
1487 return ret[0]
1488 top = self.applied[-1].name
1488 top = self.applied[-1].name
1489 if ret[0] and ret[0] > 1:
1489 if ret[0] and ret[0] > 1:
1490 msg = _("errors during apply, please fix and qrefresh %s\n")
1490 msg = _("errors during apply, please fix and qrefresh %s\n")
1491 self.ui.write(msg % top)
1491 self.ui.write(msg % top)
1492 else:
1492 else:
1493 self.ui.write(_("now at: %s\n") % top)
1493 self.ui.write(_("now at: %s\n") % top)
1494 return ret[0]
1494 return ret[0]
1495
1495
1496 def pop(self, repo, patch=None, force=False, update=True, all=False,
1496 def pop(self, repo, patch=None, force=False, update=True, all=False,
1497 nobackup=False, keepchanges=False):
1497 nobackup=False, keepchanges=False):
1498 self.checkkeepchanges(keepchanges, force)
1498 self.checkkeepchanges(keepchanges, force)
1499 with repo.wlock():
1499 with repo.wlock():
1500 if patch:
1500 if patch:
1501 # index, rev, patch
1501 # index, rev, patch
1502 info = self.isapplied(patch)
1502 info = self.isapplied(patch)
1503 if not info:
1503 if not info:
1504 patch = self.lookup(patch)
1504 patch = self.lookup(patch)
1505 info = self.isapplied(patch)
1505 info = self.isapplied(patch)
1506 if not info:
1506 if not info:
1507 raise error.Abort(_("patch %s is not applied") % patch)
1507 raise error.Abort(_("patch %s is not applied") % patch)
1508
1508
1509 if not self.applied:
1509 if not self.applied:
1510 # Allow qpop -a to work repeatedly,
1510 # Allow qpop -a to work repeatedly,
1511 # but not qpop without an argument
1511 # but not qpop without an argument
1512 self.ui.warn(_("no patches applied\n"))
1512 self.ui.warn(_("no patches applied\n"))
1513 return not all
1513 return not all
1514
1514
1515 if all:
1515 if all:
1516 start = 0
1516 start = 0
1517 elif patch:
1517 elif patch:
1518 start = info[0] + 1
1518 start = info[0] + 1
1519 else:
1519 else:
1520 start = len(self.applied) - 1
1520 start = len(self.applied) - 1
1521
1521
1522 if start >= len(self.applied):
1522 if start >= len(self.applied):
1523 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1523 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1524 return
1524 return
1525
1525
1526 if not update:
1526 if not update:
1527 parents = repo.dirstate.parents()
1527 parents = repo.dirstate.parents()
1528 rr = [x.node for x in self.applied]
1528 rr = [x.node for x in self.applied]
1529 for p in parents:
1529 for p in parents:
1530 if p in rr:
1530 if p in rr:
1531 self.ui.warn(_("qpop: forcing dirstate update\n"))
1531 self.ui.warn(_("qpop: forcing dirstate update\n"))
1532 update = True
1532 update = True
1533 else:
1533 else:
1534 parents = [p.node() for p in repo[None].parents()]
1534 parents = [p.node() for p in repo[None].parents()]
1535 needupdate = False
1535 needupdate = False
1536 for entry in self.applied[start:]:
1536 for entry in self.applied[start:]:
1537 if entry.node in parents:
1537 if entry.node in parents:
1538 needupdate = True
1538 needupdate = True
1539 break
1539 break
1540 update = needupdate
1540 update = needupdate
1541
1541
1542 tobackup = set()
1542 tobackup = set()
1543 if update:
1543 if update:
1544 s = self.checklocalchanges(repo, force=force or keepchanges)
1544 s = self.checklocalchanges(repo, force=force or keepchanges)
1545 if force:
1545 if force:
1546 if not nobackup:
1546 if not nobackup:
1547 tobackup.update(s.modified + s.added)
1547 tobackup.update(s.modified + s.added)
1548 elif keepchanges:
1548 elif keepchanges:
1549 tobackup.update(s.modified + s.added +
1549 tobackup.update(s.modified + s.added +
1550 s.removed + s.deleted)
1550 s.removed + s.deleted)
1551
1551
1552 self.applieddirty = True
1552 self.applieddirty = True
1553 end = len(self.applied)
1553 end = len(self.applied)
1554 rev = self.applied[start].node
1554 rev = self.applied[start].node
1555
1555
1556 try:
1556 try:
1557 heads = repo.changelog.heads(rev)
1557 heads = repo.changelog.heads(rev)
1558 except error.LookupError:
1558 except error.LookupError:
1559 node = short(rev)
1559 node = short(rev)
1560 raise error.Abort(_('trying to pop unknown node %s') % node)
1560 raise error.Abort(_('trying to pop unknown node %s') % node)
1561
1561
1562 if heads != [self.applied[-1].node]:
1562 if heads != [self.applied[-1].node]:
1563 raise error.Abort(_("popping would remove a revision not "
1563 raise error.Abort(_("popping would remove a revision not "
1564 "managed by this patch queue"))
1564 "managed by this patch queue"))
1565 if not repo[self.applied[-1].node].mutable():
1565 if not repo[self.applied[-1].node].mutable():
1566 raise error.Abort(
1566 raise error.Abort(
1567 _("popping would remove a public revision"),
1567 _("popping would remove a public revision"),
1568 hint=_("see 'hg help phases' for details"))
1568 hint=_("see 'hg help phases' for details"))
1569
1569
1570 # we know there are no local changes, so we can make a simplified
1570 # we know there are no local changes, so we can make a simplified
1571 # form of hg.update.
1571 # form of hg.update.
1572 if update:
1572 if update:
1573 qp = self.qparents(repo, rev)
1573 qp = self.qparents(repo, rev)
1574 ctx = repo[qp]
1574 ctx = repo[qp]
1575 m, a, r, d = repo.status(qp, '.')[:4]
1575 m, a, r, d = repo.status(qp, '.')[:4]
1576 if d:
1576 if d:
1577 raise error.Abort(_("deletions found between repo revs"))
1577 raise error.Abort(_("deletions found between repo revs"))
1578
1578
1579 tobackup = set(a + m + r) & tobackup
1579 tobackup = set(a + m + r) & tobackup
1580 if keepchanges and tobackup:
1580 if keepchanges and tobackup:
1581 raise error.Abort(_("local changes found, qrefresh first"))
1581 raise error.Abort(_("local changes found, qrefresh first"))
1582 self.backup(repo, tobackup)
1582 self.backup(repo, tobackup)
1583 repo.dirstate.beginparentchange()
1583 repo.dirstate.beginparentchange()
1584 for f in a:
1584 for f in a:
1585 repo.wvfs.unlinkpath(f, ignoremissing=True)
1585 repo.wvfs.unlinkpath(f, ignoremissing=True)
1586 repo.dirstate.drop(f)
1586 repo.dirstate.drop(f)
1587 for f in m + r:
1587 for f in m + r:
1588 fctx = ctx[f]
1588 fctx = ctx[f]
1589 repo.wwrite(f, fctx.data(), fctx.flags())
1589 repo.wwrite(f, fctx.data(), fctx.flags())
1590 repo.dirstate.normal(f)
1590 repo.dirstate.normal(f)
1591 repo.setparents(qp, nullid)
1591 repo.setparents(qp, nullid)
1592 repo.dirstate.endparentchange()
1592 repo.dirstate.endparentchange()
1593 for patch in reversed(self.applied[start:end]):
1593 for patch in reversed(self.applied[start:end]):
1594 self.ui.status(_("popping %s\n") % patch.name)
1594 self.ui.status(_("popping %s\n") % patch.name)
1595 del self.applied[start:end]
1595 del self.applied[start:end]
1596 strip(self.ui, repo, [rev], update=False, backup=False)
1596 strip(self.ui, repo, [rev], update=False, backup=False)
1597 for s, state in repo['.'].substate.items():
1597 for s, state in repo['.'].substate.items():
1598 repo['.'].sub(s).get(state)
1598 repo['.'].sub(s).get(state)
1599 if self.applied:
1599 if self.applied:
1600 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1600 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1601 else:
1601 else:
1602 self.ui.write(_("patch queue now empty\n"))
1602 self.ui.write(_("patch queue now empty\n"))
1603
1603
1604 def diff(self, repo, pats, opts):
1604 def diff(self, repo, pats, opts):
1605 top, patch = self.checktoppatch(repo)
1605 top, patch = self.checktoppatch(repo)
1606 if not top:
1606 if not top:
1607 self.ui.write(_("no patches applied\n"))
1607 self.ui.write(_("no patches applied\n"))
1608 return
1608 return
1609 qp = self.qparents(repo, top)
1609 qp = self.qparents(repo, top)
1610 if opts.get('reverse'):
1610 if opts.get('reverse'):
1611 node1, node2 = None, qp
1611 node1, node2 = None, qp
1612 else:
1612 else:
1613 node1, node2 = qp, None
1613 node1, node2 = qp, None
1614 diffopts = self.diffopts(opts, patch)
1614 diffopts = self.diffopts(opts, patch)
1615 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1615 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1616
1616
1617 def refresh(self, repo, pats=None, **opts):
1617 def refresh(self, repo, pats=None, **opts):
1618 if not self.applied:
1618 if not self.applied:
1619 self.ui.write(_("no patches applied\n"))
1619 self.ui.write(_("no patches applied\n"))
1620 return 1
1620 return 1
1621 msg = opts.get('msg', '').rstrip()
1621 msg = opts.get('msg', '').rstrip()
1622 edit = opts.get('edit')
1622 edit = opts.get('edit')
1623 editform = opts.get('editform', 'mq.qrefresh')
1623 editform = opts.get('editform', 'mq.qrefresh')
1624 newuser = opts.get('user')
1624 newuser = opts.get('user')
1625 newdate = opts.get('date')
1625 newdate = opts.get('date')
1626 if newdate:
1626 if newdate:
1627 newdate = '%d %d' % util.parsedate(newdate)
1627 newdate = '%d %d' % util.parsedate(newdate)
1628 wlock = repo.wlock()
1628 wlock = repo.wlock()
1629
1629
1630 try:
1630 try:
1631 self.checktoppatch(repo)
1631 self.checktoppatch(repo)
1632 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1632 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1633 if repo.changelog.heads(top) != [top]:
1633 if repo.changelog.heads(top) != [top]:
1634 raise error.Abort(_("cannot qrefresh a revision with children"))
1634 raise error.Abort(_("cannot qrefresh a revision with children"))
1635 if not repo[top].mutable():
1635 if not repo[top].mutable():
1636 raise error.Abort(_("cannot qrefresh public revision"),
1636 raise error.Abort(_("cannot qrefresh public revision"),
1637 hint=_("see 'hg help phases' for details"))
1637 hint=_("see 'hg help phases' for details"))
1638
1638
1639 cparents = repo.changelog.parents(top)
1639 cparents = repo.changelog.parents(top)
1640 patchparent = self.qparents(repo, top)
1640 patchparent = self.qparents(repo, top)
1641
1641
1642 inclsubs = checksubstate(repo, hex(patchparent))
1642 inclsubs = checksubstate(repo, hex(patchparent))
1643 if inclsubs:
1643 if inclsubs:
1644 substatestate = repo.dirstate['.hgsubstate']
1644 substatestate = repo.dirstate['.hgsubstate']
1645
1645
1646 ph = patchheader(self.join(patchfn), self.plainmode)
1646 ph = patchheader(self.join(patchfn), self.plainmode)
1647 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1647 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1648 if newuser:
1648 if newuser:
1649 ph.setuser(newuser)
1649 ph.setuser(newuser)
1650 if newdate:
1650 if newdate:
1651 ph.setdate(newdate)
1651 ph.setdate(newdate)
1652 ph.setparent(hex(patchparent))
1652 ph.setparent(hex(patchparent))
1653
1653
1654 # only commit new patch when write is complete
1654 # only commit new patch when write is complete
1655 patchf = self.opener(patchfn, 'w', atomictemp=True)
1655 patchf = self.opener(patchfn, 'w', atomictemp=True)
1656
1656
1657 # update the dirstate in place, strip off the qtip commit
1657 # update the dirstate in place, strip off the qtip commit
1658 # and then commit.
1658 # and then commit.
1659 #
1659 #
1660 # this should really read:
1660 # this should really read:
1661 # mm, dd, aa = repo.status(top, patchparent)[:3]
1661 # mm, dd, aa = repo.status(top, patchparent)[:3]
1662 # but we do it backwards to take advantage of manifest/changelog
1662 # but we do it backwards to take advantage of manifest/changelog
1663 # caching against the next repo.status call
1663 # caching against the next repo.status call
1664 mm, aa, dd = repo.status(patchparent, top)[:3]
1664 mm, aa, dd = repo.status(patchparent, top)[:3]
1665 changes = repo.changelog.read(top)
1665 changes = repo.changelog.read(top)
1666 man = repo.manifestlog[changes[0]].read()
1666 man = repo.manifestlog[changes[0]].read()
1667 aaa = aa[:]
1667 aaa = aa[:]
1668 matchfn = scmutil.match(repo[None], pats, opts)
1668 matchfn = scmutil.match(repo[None], pats, opts)
1669 # in short mode, we only diff the files included in the
1669 # in short mode, we only diff the files included in the
1670 # patch already plus specified files
1670 # patch already plus specified files
1671 if opts.get('short'):
1671 if opts.get('short'):
1672 # if amending a patch, we start with existing
1672 # if amending a patch, we start with existing
1673 # files plus specified files - unfiltered
1673 # files plus specified files - unfiltered
1674 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1674 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1675 # filter with include/exclude options
1675 # filter with include/exclude options
1676 matchfn = scmutil.match(repo[None], opts=opts)
1676 matchfn = scmutil.match(repo[None], opts=opts)
1677 else:
1677 else:
1678 match = scmutil.matchall(repo)
1678 match = scmutil.matchall(repo)
1679 m, a, r, d = repo.status(match=match)[:4]
1679 m, a, r, d = repo.status(match=match)[:4]
1680 mm = set(mm)
1680 mm = set(mm)
1681 aa = set(aa)
1681 aa = set(aa)
1682 dd = set(dd)
1682 dd = set(dd)
1683
1683
1684 # we might end up with files that were added between
1684 # we might end up with files that were added between
1685 # qtip and the dirstate parent, but then changed in the
1685 # qtip and the dirstate parent, but then changed in the
1686 # local dirstate. in this case, we want them to only
1686 # local dirstate. in this case, we want them to only
1687 # show up in the added section
1687 # show up in the added section
1688 for x in m:
1688 for x in m:
1689 if x not in aa:
1689 if x not in aa:
1690 mm.add(x)
1690 mm.add(x)
1691 # we might end up with files added by the local dirstate that
1691 # we might end up with files added by the local dirstate that
1692 # were deleted by the patch. In this case, they should only
1692 # were deleted by the patch. In this case, they should only
1693 # show up in the changed section.
1693 # show up in the changed section.
1694 for x in a:
1694 for x in a:
1695 if x in dd:
1695 if x in dd:
1696 dd.remove(x)
1696 dd.remove(x)
1697 mm.add(x)
1697 mm.add(x)
1698 else:
1698 else:
1699 aa.add(x)
1699 aa.add(x)
1700 # make sure any files deleted in the local dirstate
1700 # make sure any files deleted in the local dirstate
1701 # are not in the add or change column of the patch
1701 # are not in the add or change column of the patch
1702 forget = []
1702 forget = []
1703 for x in d + r:
1703 for x in d + r:
1704 if x in aa:
1704 if x in aa:
1705 aa.remove(x)
1705 aa.remove(x)
1706 forget.append(x)
1706 forget.append(x)
1707 continue
1707 continue
1708 else:
1708 else:
1709 mm.discard(x)
1709 mm.discard(x)
1710 dd.add(x)
1710 dd.add(x)
1711
1711
1712 m = list(mm)
1712 m = list(mm)
1713 r = list(dd)
1713 r = list(dd)
1714 a = list(aa)
1714 a = list(aa)
1715
1715
1716 # create 'match' that includes the files to be recommitted.
1716 # create 'match' that includes the files to be recommitted.
1717 # apply matchfn via repo.status to ensure correct case handling.
1717 # apply matchfn via repo.status to ensure correct case handling.
1718 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1718 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1719 allmatches = set(cm + ca + cr + cd)
1719 allmatches = set(cm + ca + cr + cd)
1720 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1720 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1721
1721
1722 files = set(inclsubs)
1722 files = set(inclsubs)
1723 for x in refreshchanges:
1723 for x in refreshchanges:
1724 files.update(x)
1724 files.update(x)
1725 match = scmutil.matchfiles(repo, files)
1725 match = scmutil.matchfiles(repo, files)
1726
1726
1727 bmlist = repo[top].bookmarks()
1727 bmlist = repo[top].bookmarks()
1728
1728
1729 dsguard = None
1729 dsguard = None
1730 try:
1730 try:
1731 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1731 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1732 if diffopts.git or diffopts.upgrade:
1732 if diffopts.git or diffopts.upgrade:
1733 copies = {}
1733 copies = {}
1734 for dst in a:
1734 for dst in a:
1735 src = repo.dirstate.copied(dst)
1735 src = repo.dirstate.copied(dst)
1736 # during qfold, the source file for copies may
1736 # during qfold, the source file for copies may
1737 # be removed. Treat this as a simple add.
1737 # be removed. Treat this as a simple add.
1738 if src is not None and src in repo.dirstate:
1738 if src is not None and src in repo.dirstate:
1739 copies.setdefault(src, []).append(dst)
1739 copies.setdefault(src, []).append(dst)
1740 repo.dirstate.add(dst)
1740 repo.dirstate.add(dst)
1741 # remember the copies between patchparent and qtip
1741 # remember the copies between patchparent and qtip
1742 for dst in aaa:
1742 for dst in aaa:
1743 f = repo.file(dst)
1743 f = repo.file(dst)
1744 src = f.renamed(man[dst])
1744 src = f.renamed(man[dst])
1745 if src:
1745 if src:
1746 copies.setdefault(src[0], []).extend(
1746 copies.setdefault(src[0], []).extend(
1747 copies.get(dst, []))
1747 copies.get(dst, []))
1748 if dst in a:
1748 if dst in a:
1749 copies[src[0]].append(dst)
1749 copies[src[0]].append(dst)
1750 # we can't copy a file created by the patch itself
1750 # we can't copy a file created by the patch itself
1751 if dst in copies:
1751 if dst in copies:
1752 del copies[dst]
1752 del copies[dst]
1753 for src, dsts in copies.iteritems():
1753 for src, dsts in copies.iteritems():
1754 for dst in dsts:
1754 for dst in dsts:
1755 repo.dirstate.copy(src, dst)
1755 repo.dirstate.copy(src, dst)
1756 else:
1756 else:
1757 for dst in a:
1757 for dst in a:
1758 repo.dirstate.add(dst)
1758 repo.dirstate.add(dst)
1759 # Drop useless copy information
1759 # Drop useless copy information
1760 for f in list(repo.dirstate.copies()):
1760 for f in list(repo.dirstate.copies()):
1761 repo.dirstate.copy(None, f)
1761 repo.dirstate.copy(None, f)
1762 for f in r:
1762 for f in r:
1763 repo.dirstate.remove(f)
1763 repo.dirstate.remove(f)
1764 # if the patch excludes a modified file, mark that
1764 # if the patch excludes a modified file, mark that
1765 # file with mtime=0 so status can see it.
1765 # file with mtime=0 so status can see it.
1766 mm = []
1766 mm = []
1767 for i in xrange(len(m) - 1, -1, -1):
1767 for i in xrange(len(m) - 1, -1, -1):
1768 if not matchfn(m[i]):
1768 if not matchfn(m[i]):
1769 mm.append(m[i])
1769 mm.append(m[i])
1770 del m[i]
1770 del m[i]
1771 for f in m:
1771 for f in m:
1772 repo.dirstate.normal(f)
1772 repo.dirstate.normal(f)
1773 for f in mm:
1773 for f in mm:
1774 repo.dirstate.normallookup(f)
1774 repo.dirstate.normallookup(f)
1775 for f in forget:
1775 for f in forget:
1776 repo.dirstate.drop(f)
1776 repo.dirstate.drop(f)
1777
1777
1778 user = ph.user or changes[1]
1778 user = ph.user or changes[1]
1779
1779
1780 oldphase = repo[top].phase()
1780 oldphase = repo[top].phase()
1781
1781
1782 # assumes strip can roll itself back if interrupted
1782 # assumes strip can roll itself back if interrupted
1783 repo.setparents(*cparents)
1783 repo.setparents(*cparents)
1784 self.applied.pop()
1784 self.applied.pop()
1785 self.applieddirty = True
1785 self.applieddirty = True
1786 strip(self.ui, repo, [top], update=False, backup=False)
1786 strip(self.ui, repo, [top], update=False, backup=False)
1787 dsguard.close()
1787 dsguard.close()
1788 finally:
1788 finally:
1789 release(dsguard)
1789 release(dsguard)
1790
1790
1791 try:
1791 try:
1792 # might be nice to attempt to roll back strip after this
1792 # might be nice to attempt to roll back strip after this
1793
1793
1794 defaultmsg = "[mq]: %s" % patchfn
1794 defaultmsg = "[mq]: %s" % patchfn
1795 editor = cmdutil.getcommiteditor(editform=editform)
1795 editor = cmdutil.getcommiteditor(editform=editform)
1796 if edit:
1796 if edit:
1797 def finishdesc(desc):
1797 def finishdesc(desc):
1798 if desc.rstrip():
1798 if desc.rstrip():
1799 ph.setmessage(desc)
1799 ph.setmessage(desc)
1800 return desc
1800 return desc
1801 return defaultmsg
1801 return defaultmsg
1802 # i18n: this message is shown in editor with "HG: " prefix
1802 # i18n: this message is shown in editor with "HG: " prefix
1803 extramsg = _('Leave message empty to use default message.')
1803 extramsg = _('Leave message empty to use default message.')
1804 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1804 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1805 extramsg=extramsg,
1805 extramsg=extramsg,
1806 editform=editform)
1806 editform=editform)
1807 message = msg or "\n".join(ph.message)
1807 message = msg or "\n".join(ph.message)
1808 elif not msg:
1808 elif not msg:
1809 if not ph.message:
1809 if not ph.message:
1810 message = defaultmsg
1810 message = defaultmsg
1811 else:
1811 else:
1812 message = "\n".join(ph.message)
1812 message = "\n".join(ph.message)
1813 else:
1813 else:
1814 message = msg
1814 message = msg
1815 ph.setmessage(msg)
1815 ph.setmessage(msg)
1816
1816
1817 # Ensure we create a new changeset in the same phase than
1817 # Ensure we create a new changeset in the same phase than
1818 # the old one.
1818 # the old one.
1819 lock = tr = None
1819 lock = tr = None
1820 try:
1820 try:
1821 lock = repo.lock()
1821 lock = repo.lock()
1822 tr = repo.transaction('mq')
1822 tr = repo.transaction('mq')
1823 n = newcommit(repo, oldphase, message, user, ph.date,
1823 n = newcommit(repo, oldphase, message, user, ph.date,
1824 match=match, force=True, editor=editor)
1824 match=match, force=True, editor=editor)
1825 # only write patch after a successful commit
1825 # only write patch after a successful commit
1826 c = [list(x) for x in refreshchanges]
1826 c = [list(x) for x in refreshchanges]
1827 if inclsubs:
1827 if inclsubs:
1828 self.putsubstate2changes(substatestate, c)
1828 self.putsubstate2changes(substatestate, c)
1829 chunks = patchmod.diff(repo, patchparent,
1829 chunks = patchmod.diff(repo, patchparent,
1830 changes=c, opts=diffopts)
1830 changes=c, opts=diffopts)
1831 comments = str(ph)
1831 comments = str(ph)
1832 if comments:
1832 if comments:
1833 patchf.write(comments)
1833 patchf.write(comments)
1834 for chunk in chunks:
1834 for chunk in chunks:
1835 patchf.write(chunk)
1835 patchf.write(chunk)
1836 patchf.close()
1836 patchf.close()
1837
1837
1838 marks = repo._bookmarks
1838 marks = repo._bookmarks
1839 for bm in bmlist:
1839 for bm in bmlist:
1840 marks[bm] = n
1840 marks[bm] = n
1841 marks.recordchange(tr)
1841 marks.recordchange(tr)
1842 tr.close()
1842 tr.close()
1843
1843
1844 self.applied.append(statusentry(n, patchfn))
1844 self.applied.append(statusentry(n, patchfn))
1845 finally:
1845 finally:
1846 lockmod.release(tr, lock)
1846 lockmod.release(tr, lock)
1847 except: # re-raises
1847 except: # re-raises
1848 ctx = repo[cparents[0]]
1848 ctx = repo[cparents[0]]
1849 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1849 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1850 self.savedirty()
1850 self.savedirty()
1851 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1851 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1852 '(revert --all, qpush to recover)\n'))
1852 '(revert --all, qpush to recover)\n'))
1853 raise
1853 raise
1854 finally:
1854 finally:
1855 wlock.release()
1855 wlock.release()
1856 self.removeundo(repo)
1856 self.removeundo(repo)
1857
1857
1858 def init(self, repo, create=False):
1858 def init(self, repo, create=False):
1859 if not create and os.path.isdir(self.path):
1859 if not create and os.path.isdir(self.path):
1860 raise error.Abort(_("patch queue directory already exists"))
1860 raise error.Abort(_("patch queue directory already exists"))
1861 try:
1861 try:
1862 os.mkdir(self.path)
1862 os.mkdir(self.path)
1863 except OSError as inst:
1863 except OSError as inst:
1864 if inst.errno != errno.EEXIST or not create:
1864 if inst.errno != errno.EEXIST or not create:
1865 raise
1865 raise
1866 if create:
1866 if create:
1867 return self.qrepo(create=True)
1867 return self.qrepo(create=True)
1868
1868
1869 def unapplied(self, repo, patch=None):
1869 def unapplied(self, repo, patch=None):
1870 if patch and patch not in self.series:
1870 if patch and patch not in self.series:
1871 raise error.Abort(_("patch %s is not in series file") % patch)
1871 raise error.Abort(_("patch %s is not in series file") % patch)
1872 if not patch:
1872 if not patch:
1873 start = self.seriesend()
1873 start = self.seriesend()
1874 else:
1874 else:
1875 start = self.series.index(patch) + 1
1875 start = self.series.index(patch) + 1
1876 unapplied = []
1876 unapplied = []
1877 for i in xrange(start, len(self.series)):
1877 for i in xrange(start, len(self.series)):
1878 pushable, reason = self.pushable(i)
1878 pushable, reason = self.pushable(i)
1879 if pushable:
1879 if pushable:
1880 unapplied.append((i, self.series[i]))
1880 unapplied.append((i, self.series[i]))
1881 self.explainpushable(i)
1881 self.explainpushable(i)
1882 return unapplied
1882 return unapplied
1883
1883
1884 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1884 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1885 summary=False):
1885 summary=False):
1886 def displayname(pfx, patchname, state):
1886 def displayname(pfx, patchname, state):
1887 if pfx:
1887 if pfx:
1888 self.ui.write(pfx)
1888 self.ui.write(pfx)
1889 if summary:
1889 if summary:
1890 ph = patchheader(self.join(patchname), self.plainmode)
1890 ph = patchheader(self.join(patchname), self.plainmode)
1891 if ph.message:
1891 if ph.message:
1892 msg = ph.message[0]
1892 msg = ph.message[0]
1893 else:
1893 else:
1894 msg = ''
1894 msg = ''
1895
1895
1896 if self.ui.formatted():
1896 if self.ui.formatted():
1897 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1897 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1898 if width > 0:
1898 if width > 0:
1899 msg = util.ellipsis(msg, width)
1899 msg = util.ellipsis(msg, width)
1900 else:
1900 else:
1901 msg = ''
1901 msg = ''
1902 self.ui.write(patchname, label='qseries.' + state)
1902 self.ui.write(patchname, label='qseries.' + state)
1903 self.ui.write(': ')
1903 self.ui.write(': ')
1904 self.ui.write(msg, label='qseries.message.' + state)
1904 self.ui.write(msg, label='qseries.message.' + state)
1905 else:
1905 else:
1906 self.ui.write(patchname, label='qseries.' + state)
1906 self.ui.write(patchname, label='qseries.' + state)
1907 self.ui.write('\n')
1907 self.ui.write('\n')
1908
1908
1909 applied = set([p.name for p in self.applied])
1909 applied = set([p.name for p in self.applied])
1910 if length is None:
1910 if length is None:
1911 length = len(self.series) - start
1911 length = len(self.series) - start
1912 if not missing:
1912 if not missing:
1913 if self.ui.verbose:
1913 if self.ui.verbose:
1914 idxwidth = len(str(start + length - 1))
1914 idxwidth = len(str(start + length - 1))
1915 for i in xrange(start, start + length):
1915 for i in xrange(start, start + length):
1916 patch = self.series[i]
1916 patch = self.series[i]
1917 if patch in applied:
1917 if patch in applied:
1918 char, state = 'A', 'applied'
1918 char, state = 'A', 'applied'
1919 elif self.pushable(i)[0]:
1919 elif self.pushable(i)[0]:
1920 char, state = 'U', 'unapplied'
1920 char, state = 'U', 'unapplied'
1921 else:
1921 else:
1922 char, state = 'G', 'guarded'
1922 char, state = 'G', 'guarded'
1923 pfx = ''
1923 pfx = ''
1924 if self.ui.verbose:
1924 if self.ui.verbose:
1925 pfx = '%*d %s ' % (idxwidth, i, char)
1925 pfx = '%*d %s ' % (idxwidth, i, char)
1926 elif status and status != char:
1926 elif status and status != char:
1927 continue
1927 continue
1928 displayname(pfx, patch, state)
1928 displayname(pfx, patch, state)
1929 else:
1929 else:
1930 msng_list = []
1930 msng_list = []
1931 for root, dirs, files in os.walk(self.path):
1931 for root, dirs, files in os.walk(self.path):
1932 d = root[len(self.path) + 1:]
1932 d = root[len(self.path) + 1:]
1933 for f in files:
1933 for f in files:
1934 fl = os.path.join(d, f)
1934 fl = os.path.join(d, f)
1935 if (fl not in self.series and
1935 if (fl not in self.series and
1936 fl not in (self.statuspath, self.seriespath,
1936 fl not in (self.statuspath, self.seriespath,
1937 self.guardspath)
1937 self.guardspath)
1938 and not fl.startswith('.')):
1938 and not fl.startswith('.')):
1939 msng_list.append(fl)
1939 msng_list.append(fl)
1940 for x in sorted(msng_list):
1940 for x in sorted(msng_list):
1941 pfx = self.ui.verbose and ('D ') or ''
1941 pfx = self.ui.verbose and ('D ') or ''
1942 displayname(pfx, x, 'missing')
1942 displayname(pfx, x, 'missing')
1943
1943
1944 def issaveline(self, l):
1944 def issaveline(self, l):
1945 if l.name == '.hg.patches.save.line':
1945 if l.name == '.hg.patches.save.line':
1946 return True
1946 return True
1947
1947
1948 def qrepo(self, create=False):
1948 def qrepo(self, create=False):
1949 ui = self.baseui.copy()
1949 ui = self.baseui.copy()
1950 if create or os.path.isdir(self.join(".hg")):
1950 if create or os.path.isdir(self.join(".hg")):
1951 return hg.repository(ui, path=self.path, create=create)
1951 return hg.repository(ui, path=self.path, create=create)
1952
1952
1953 def restore(self, repo, rev, delete=None, qupdate=None):
1953 def restore(self, repo, rev, delete=None, qupdate=None):
1954 desc = repo[rev].description().strip()
1954 desc = repo[rev].description().strip()
1955 lines = desc.splitlines()
1955 lines = desc.splitlines()
1956 i = 0
1956 i = 0
1957 datastart = None
1957 datastart = None
1958 series = []
1958 series = []
1959 applied = []
1959 applied = []
1960 qpp = None
1960 qpp = None
1961 for i, line in enumerate(lines):
1961 for i, line in enumerate(lines):
1962 if line == 'Patch Data:':
1962 if line == 'Patch Data:':
1963 datastart = i + 1
1963 datastart = i + 1
1964 elif line.startswith('Dirstate:'):
1964 elif line.startswith('Dirstate:'):
1965 l = line.rstrip()
1965 l = line.rstrip()
1966 l = l[10:].split(' ')
1966 l = l[10:].split(' ')
1967 qpp = [bin(x) for x in l]
1967 qpp = [bin(x) for x in l]
1968 elif datastart is not None:
1968 elif datastart is not None:
1969 l = line.rstrip()
1969 l = line.rstrip()
1970 n, name = l.split(':', 1)
1970 n, name = l.split(':', 1)
1971 if n:
1971 if n:
1972 applied.append(statusentry(bin(n), name))
1972 applied.append(statusentry(bin(n), name))
1973 else:
1973 else:
1974 series.append(l)
1974 series.append(l)
1975 if datastart is None:
1975 if datastart is None:
1976 self.ui.warn(_("no saved patch data found\n"))
1976 self.ui.warn(_("no saved patch data found\n"))
1977 return 1
1977 return 1
1978 self.ui.warn(_("restoring status: %s\n") % lines[0])
1978 self.ui.warn(_("restoring status: %s\n") % lines[0])
1979 self.fullseries = series
1979 self.fullseries = series
1980 self.applied = applied
1980 self.applied = applied
1981 self.parseseries()
1981 self.parseseries()
1982 self.seriesdirty = True
1982 self.seriesdirty = True
1983 self.applieddirty = True
1983 self.applieddirty = True
1984 heads = repo.changelog.heads()
1984 heads = repo.changelog.heads()
1985 if delete:
1985 if delete:
1986 if rev not in heads:
1986 if rev not in heads:
1987 self.ui.warn(_("save entry has children, leaving it alone\n"))
1987 self.ui.warn(_("save entry has children, leaving it alone\n"))
1988 else:
1988 else:
1989 self.ui.warn(_("removing save entry %s\n") % short(rev))
1989 self.ui.warn(_("removing save entry %s\n") % short(rev))
1990 pp = repo.dirstate.parents()
1990 pp = repo.dirstate.parents()
1991 if rev in pp:
1991 if rev in pp:
1992 update = True
1992 update = True
1993 else:
1993 else:
1994 update = False
1994 update = False
1995 strip(self.ui, repo, [rev], update=update, backup=False)
1995 strip(self.ui, repo, [rev], update=update, backup=False)
1996 if qpp:
1996 if qpp:
1997 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1997 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1998 (short(qpp[0]), short(qpp[1])))
1998 (short(qpp[0]), short(qpp[1])))
1999 if qupdate:
1999 if qupdate:
2000 self.ui.status(_("updating queue directory\n"))
2000 self.ui.status(_("updating queue directory\n"))
2001 r = self.qrepo()
2001 r = self.qrepo()
2002 if not r:
2002 if not r:
2003 self.ui.warn(_("unable to load queue repository\n"))
2003 self.ui.warn(_("unable to load queue repository\n"))
2004 return 1
2004 return 1
2005 hg.clean(r, qpp[0])
2005 hg.clean(r, qpp[0])
2006
2006
2007 def save(self, repo, msg=None):
2007 def save(self, repo, msg=None):
2008 if not self.applied:
2008 if not self.applied:
2009 self.ui.warn(_("save: no patches applied, exiting\n"))
2009 self.ui.warn(_("save: no patches applied, exiting\n"))
2010 return 1
2010 return 1
2011 if self.issaveline(self.applied[-1]):
2011 if self.issaveline(self.applied[-1]):
2012 self.ui.warn(_("status is already saved\n"))
2012 self.ui.warn(_("status is already saved\n"))
2013 return 1
2013 return 1
2014
2014
2015 if not msg:
2015 if not msg:
2016 msg = _("hg patches saved state")
2016 msg = _("hg patches saved state")
2017 else:
2017 else:
2018 msg = "hg patches: " + msg.rstrip('\r\n')
2018 msg = "hg patches: " + msg.rstrip('\r\n')
2019 r = self.qrepo()
2019 r = self.qrepo()
2020 if r:
2020 if r:
2021 pp = r.dirstate.parents()
2021 pp = r.dirstate.parents()
2022 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2022 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2023 msg += "\n\nPatch Data:\n"
2023 msg += "\n\nPatch Data:\n"
2024 msg += ''.join('%s\n' % x for x in self.applied)
2024 msg += ''.join('%s\n' % x for x in self.applied)
2025 msg += ''.join(':%s\n' % x for x in self.fullseries)
2025 msg += ''.join(':%s\n' % x for x in self.fullseries)
2026 n = repo.commit(msg, force=True)
2026 n = repo.commit(msg, force=True)
2027 if not n:
2027 if not n:
2028 self.ui.warn(_("repo commit failed\n"))
2028 self.ui.warn(_("repo commit failed\n"))
2029 return 1
2029 return 1
2030 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2030 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2031 self.applieddirty = True
2031 self.applieddirty = True
2032 self.removeundo(repo)
2032 self.removeundo(repo)
2033
2033
2034 def fullseriesend(self):
2034 def fullseriesend(self):
2035 if self.applied:
2035 if self.applied:
2036 p = self.applied[-1].name
2036 p = self.applied[-1].name
2037 end = self.findseries(p)
2037 end = self.findseries(p)
2038 if end is None:
2038 if end is None:
2039 return len(self.fullseries)
2039 return len(self.fullseries)
2040 return end + 1
2040 return end + 1
2041 return 0
2041 return 0
2042
2042
2043 def seriesend(self, all_patches=False):
2043 def seriesend(self, all_patches=False):
2044 """If all_patches is False, return the index of the next pushable patch
2044 """If all_patches is False, return the index of the next pushable patch
2045 in the series, or the series length. If all_patches is True, return the
2045 in the series, or the series length. If all_patches is True, return the
2046 index of the first patch past the last applied one.
2046 index of the first patch past the last applied one.
2047 """
2047 """
2048 end = 0
2048 end = 0
2049 def nextpatch(start):
2049 def nextpatch(start):
2050 if all_patches or start >= len(self.series):
2050 if all_patches or start >= len(self.series):
2051 return start
2051 return start
2052 for i in xrange(start, len(self.series)):
2052 for i in xrange(start, len(self.series)):
2053 p, reason = self.pushable(i)
2053 p, reason = self.pushable(i)
2054 if p:
2054 if p:
2055 return i
2055 return i
2056 self.explainpushable(i)
2056 self.explainpushable(i)
2057 return len(self.series)
2057 return len(self.series)
2058 if self.applied:
2058 if self.applied:
2059 p = self.applied[-1].name
2059 p = self.applied[-1].name
2060 try:
2060 try:
2061 end = self.series.index(p)
2061 end = self.series.index(p)
2062 except ValueError:
2062 except ValueError:
2063 return 0
2063 return 0
2064 return nextpatch(end + 1)
2064 return nextpatch(end + 1)
2065 return nextpatch(end)
2065 return nextpatch(end)
2066
2066
2067 def appliedname(self, index):
2067 def appliedname(self, index):
2068 pname = self.applied[index].name
2068 pname = self.applied[index].name
2069 if not self.ui.verbose:
2069 if not self.ui.verbose:
2070 p = pname
2070 p = pname
2071 else:
2071 else:
2072 p = str(self.series.index(pname)) + " " + pname
2072 p = str(self.series.index(pname)) + " " + pname
2073 return p
2073 return p
2074
2074
2075 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2075 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2076 force=None, git=False):
2076 force=None, git=False):
2077 def checkseries(patchname):
2077 def checkseries(patchname):
2078 if patchname in self.series:
2078 if patchname in self.series:
2079 raise error.Abort(_('patch %s is already in the series file')
2079 raise error.Abort(_('patch %s is already in the series file')
2080 % patchname)
2080 % patchname)
2081
2081
2082 if rev:
2082 if rev:
2083 if files:
2083 if files:
2084 raise error.Abort(_('option "-r" not valid when importing '
2084 raise error.Abort(_('option "-r" not valid when importing '
2085 'files'))
2085 'files'))
2086 rev = scmutil.revrange(repo, rev)
2086 rev = scmutil.revrange(repo, rev)
2087 rev.sort(reverse=True)
2087 rev.sort(reverse=True)
2088 elif not files:
2088 elif not files:
2089 raise error.Abort(_('no files or revisions specified'))
2089 raise error.Abort(_('no files or revisions specified'))
2090 if (len(files) > 1 or len(rev) > 1) and patchname:
2090 if (len(files) > 1 or len(rev) > 1) and patchname:
2091 raise error.Abort(_('option "-n" not valid when importing multiple '
2091 raise error.Abort(_('option "-n" not valid when importing multiple '
2092 'patches'))
2092 'patches'))
2093 imported = []
2093 imported = []
2094 if rev:
2094 if rev:
2095 # If mq patches are applied, we can only import revisions
2095 # If mq patches are applied, we can only import revisions
2096 # that form a linear path to qbase.
2096 # that form a linear path to qbase.
2097 # Otherwise, they should form a linear path to a head.
2097 # Otherwise, they should form a linear path to a head.
2098 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2098 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2099 if len(heads) > 1:
2099 if len(heads) > 1:
2100 raise error.Abort(_('revision %d is the root of more than one '
2100 raise error.Abort(_('revision %d is the root of more than one '
2101 'branch') % rev.last())
2101 'branch') % rev.last())
2102 if self.applied:
2102 if self.applied:
2103 base = repo.changelog.node(rev.first())
2103 base = repo.changelog.node(rev.first())
2104 if base in [n.node for n in self.applied]:
2104 if base in [n.node for n in self.applied]:
2105 raise error.Abort(_('revision %d is already managed')
2105 raise error.Abort(_('revision %d is already managed')
2106 % rev.first())
2106 % rev.first())
2107 if heads != [self.applied[-1].node]:
2107 if heads != [self.applied[-1].node]:
2108 raise error.Abort(_('revision %d is not the parent of '
2108 raise error.Abort(_('revision %d is not the parent of '
2109 'the queue') % rev.first())
2109 'the queue') % rev.first())
2110 base = repo.changelog.rev(self.applied[0].node)
2110 base = repo.changelog.rev(self.applied[0].node)
2111 lastparent = repo.changelog.parentrevs(base)[0]
2111 lastparent = repo.changelog.parentrevs(base)[0]
2112 else:
2112 else:
2113 if heads != [repo.changelog.node(rev.first())]:
2113 if heads != [repo.changelog.node(rev.first())]:
2114 raise error.Abort(_('revision %d has unmanaged children')
2114 raise error.Abort(_('revision %d has unmanaged children')
2115 % rev.first())
2115 % rev.first())
2116 lastparent = None
2116 lastparent = None
2117
2117
2118 diffopts = self.diffopts({'git': git})
2118 diffopts = self.diffopts({'git': git})
2119 with repo.transaction('qimport') as tr:
2119 with repo.transaction('qimport') as tr:
2120 for r in rev:
2120 for r in rev:
2121 if not repo[r].mutable():
2121 if not repo[r].mutable():
2122 raise error.Abort(_('revision %d is not mutable') % r,
2122 raise error.Abort(_('revision %d is not mutable') % r,
2123 hint=_("see 'hg help phases' "
2123 hint=_("see 'hg help phases' "
2124 'for details'))
2124 'for details'))
2125 p1, p2 = repo.changelog.parentrevs(r)
2125 p1, p2 = repo.changelog.parentrevs(r)
2126 n = repo.changelog.node(r)
2126 n = repo.changelog.node(r)
2127 if p2 != nullrev:
2127 if p2 != nullrev:
2128 raise error.Abort(_('cannot import merge revision %d')
2128 raise error.Abort(_('cannot import merge revision %d')
2129 % r)
2129 % r)
2130 if lastparent and lastparent != r:
2130 if lastparent and lastparent != r:
2131 raise error.Abort(_('revision %d is not the parent of '
2131 raise error.Abort(_('revision %d is not the parent of '
2132 '%d')
2132 '%d')
2133 % (r, lastparent))
2133 % (r, lastparent))
2134 lastparent = p1
2134 lastparent = p1
2135
2135
2136 if not patchname:
2136 if not patchname:
2137 patchname = self.makepatchname(
2137 patchname = self.makepatchname(
2138 repo[r].description().split('\n', 1)[0],
2138 repo[r].description().split('\n', 1)[0],
2139 '%d.diff' % r)
2139 '%d.diff' % r)
2140 checkseries(patchname)
2140 checkseries(patchname)
2141 self.checkpatchname(patchname, force)
2141 self.checkpatchname(patchname, force)
2142 self.fullseries.insert(0, patchname)
2142 self.fullseries.insert(0, patchname)
2143
2143
2144 patchf = self.opener(patchname, "w")
2144 patchf = self.opener(patchname, "w")
2145 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2145 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2146 patchf.close()
2146 patchf.close()
2147
2147
2148 se = statusentry(n, patchname)
2148 se = statusentry(n, patchname)
2149 self.applied.insert(0, se)
2149 self.applied.insert(0, se)
2150
2150
2151 self.added.append(patchname)
2151 self.added.append(patchname)
2152 imported.append(patchname)
2152 imported.append(patchname)
2153 patchname = None
2153 patchname = None
2154 if rev and repo.ui.configbool('mq', 'secret', False):
2154 if rev and repo.ui.configbool('mq', 'secret', False):
2155 # if we added anything with --rev, move the secret root
2155 # if we added anything with --rev, move the secret root
2156 phases.retractboundary(repo, tr, phases.secret, [n])
2156 phases.retractboundary(repo, tr, phases.secret, [n])
2157 self.parseseries()
2157 self.parseseries()
2158 self.applieddirty = True
2158 self.applieddirty = True
2159 self.seriesdirty = True
2159 self.seriesdirty = True
2160
2160
2161 for i, filename in enumerate(files):
2161 for i, filename in enumerate(files):
2162 if existing:
2162 if existing:
2163 if filename == '-':
2163 if filename == '-':
2164 raise error.Abort(_('-e is incompatible with import from -')
2164 raise error.Abort(_('-e is incompatible with import from -')
2165 )
2165 )
2166 filename = normname(filename)
2166 filename = normname(filename)
2167 self.checkreservedname(filename)
2167 self.checkreservedname(filename)
2168 if util.url(filename).islocal():
2168 if util.url(filename).islocal():
2169 originpath = self.join(filename)
2169 originpath = self.join(filename)
2170 if not os.path.isfile(originpath):
2170 if not os.path.isfile(originpath):
2171 raise error.Abort(
2171 raise error.Abort(
2172 _("patch %s does not exist") % filename)
2172 _("patch %s does not exist") % filename)
2173
2173
2174 if patchname:
2174 if patchname:
2175 self.checkpatchname(patchname, force)
2175 self.checkpatchname(patchname, force)
2176
2176
2177 self.ui.write(_('renaming %s to %s\n')
2177 self.ui.write(_('renaming %s to %s\n')
2178 % (filename, patchname))
2178 % (filename, patchname))
2179 util.rename(originpath, self.join(patchname))
2179 util.rename(originpath, self.join(patchname))
2180 else:
2180 else:
2181 patchname = filename
2181 patchname = filename
2182
2182
2183 else:
2183 else:
2184 if filename == '-' and not patchname:
2184 if filename == '-' and not patchname:
2185 raise error.Abort(_('need --name to import a patch from -'))
2185 raise error.Abort(_('need --name to import a patch from -'))
2186 elif not patchname:
2186 elif not patchname:
2187 patchname = normname(os.path.basename(filename.rstrip('/')))
2187 patchname = normname(os.path.basename(filename.rstrip('/')))
2188 self.checkpatchname(patchname, force)
2188 self.checkpatchname(patchname, force)
2189 try:
2189 try:
2190 if filename == '-':
2190 if filename == '-':
2191 text = self.ui.fin.read()
2191 text = self.ui.fin.read()
2192 else:
2192 else:
2193 fp = hg.openpath(self.ui, filename)
2193 fp = hg.openpath(self.ui, filename)
2194 text = fp.read()
2194 text = fp.read()
2195 fp.close()
2195 fp.close()
2196 except (OSError, IOError):
2196 except (OSError, IOError):
2197 raise error.Abort(_("unable to read file %s") % filename)
2197 raise error.Abort(_("unable to read file %s") % filename)
2198 patchf = self.opener(patchname, "w")
2198 patchf = self.opener(patchname, "w")
2199 patchf.write(text)
2199 patchf.write(text)
2200 patchf.close()
2200 patchf.close()
2201 if not force:
2201 if not force:
2202 checkseries(patchname)
2202 checkseries(patchname)
2203 if patchname not in self.series:
2203 if patchname not in self.series:
2204 index = self.fullseriesend() + i
2204 index = self.fullseriesend() + i
2205 self.fullseries[index:index] = [patchname]
2205 self.fullseries[index:index] = [patchname]
2206 self.parseseries()
2206 self.parseseries()
2207 self.seriesdirty = True
2207 self.seriesdirty = True
2208 self.ui.warn(_("adding %s to series file\n") % patchname)
2208 self.ui.warn(_("adding %s to series file\n") % patchname)
2209 self.added.append(patchname)
2209 self.added.append(patchname)
2210 imported.append(patchname)
2210 imported.append(patchname)
2211 patchname = None
2211 patchname = None
2212
2212
2213 self.removeundo(repo)
2213 self.removeundo(repo)
2214 return imported
2214 return imported
2215
2215
2216 def fixkeepchangesopts(ui, opts):
2216 def fixkeepchangesopts(ui, opts):
2217 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2217 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2218 or opts.get('exact')):
2218 or opts.get('exact')):
2219 return opts
2219 return opts
2220 opts = dict(opts)
2220 opts = dict(opts)
2221 opts['keep_changes'] = True
2221 opts['keep_changes'] = True
2222 return opts
2222 return opts
2223
2223
2224 @command("qdelete|qremove|qrm",
2224 @command("qdelete|qremove|qrm",
2225 [('k', 'keep', None, _('keep patch file')),
2225 [('k', 'keep', None, _('keep patch file')),
2226 ('r', 'rev', [],
2226 ('r', 'rev', [],
2227 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2227 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2228 _('hg qdelete [-k] [PATCH]...'))
2228 _('hg qdelete [-k] [PATCH]...'))
2229 def delete(ui, repo, *patches, **opts):
2229 def delete(ui, repo, *patches, **opts):
2230 """remove patches from queue
2230 """remove patches from queue
2231
2231
2232 The patches must not be applied, and at least one patch is required. Exact
2232 The patches must not be applied, and at least one patch is required. Exact
2233 patch identifiers must be given. With -k/--keep, the patch files are
2233 patch identifiers must be given. With -k/--keep, the patch files are
2234 preserved in the patch directory.
2234 preserved in the patch directory.
2235
2235
2236 To stop managing a patch and move it into permanent history,
2236 To stop managing a patch and move it into permanent history,
2237 use the :hg:`qfinish` command."""
2237 use the :hg:`qfinish` command."""
2238 q = repo.mq
2238 q = repo.mq
2239 q.delete(repo, patches, opts)
2239 q.delete(repo, patches, opts)
2240 q.savedirty()
2240 q.savedirty()
2241 return 0
2241 return 0
2242
2242
2243 @command("qapplied",
2243 @command("qapplied",
2244 [('1', 'last', None, _('show only the preceding applied patch'))
2244 [('1', 'last', None, _('show only the preceding applied patch'))
2245 ] + seriesopts,
2245 ] + seriesopts,
2246 _('hg qapplied [-1] [-s] [PATCH]'))
2246 _('hg qapplied [-1] [-s] [PATCH]'))
2247 def applied(ui, repo, patch=None, **opts):
2247 def applied(ui, repo, patch=None, **opts):
2248 """print the patches already applied
2248 """print the patches already applied
2249
2249
2250 Returns 0 on success."""
2250 Returns 0 on success."""
2251
2251
2252 q = repo.mq
2252 q = repo.mq
2253
2253
2254 if patch:
2254 if patch:
2255 if patch not in q.series:
2255 if patch not in q.series:
2256 raise error.Abort(_("patch %s is not in series file") % patch)
2256 raise error.Abort(_("patch %s is not in series file") % patch)
2257 end = q.series.index(patch) + 1
2257 end = q.series.index(patch) + 1
2258 else:
2258 else:
2259 end = q.seriesend(True)
2259 end = q.seriesend(True)
2260
2260
2261 if opts.get('last') and not end:
2261 if opts.get('last') and not end:
2262 ui.write(_("no patches applied\n"))
2262 ui.write(_("no patches applied\n"))
2263 return 1
2263 return 1
2264 elif opts.get('last') and end == 1:
2264 elif opts.get('last') and end == 1:
2265 ui.write(_("only one patch applied\n"))
2265 ui.write(_("only one patch applied\n"))
2266 return 1
2266 return 1
2267 elif opts.get('last'):
2267 elif opts.get('last'):
2268 start = end - 2
2268 start = end - 2
2269 end = 1
2269 end = 1
2270 else:
2270 else:
2271 start = 0
2271 start = 0
2272
2272
2273 q.qseries(repo, length=end, start=start, status='A',
2273 q.qseries(repo, length=end, start=start, status='A',
2274 summary=opts.get('summary'))
2274 summary=opts.get('summary'))
2275
2275
2276
2276
2277 @command("qunapplied",
2277 @command("qunapplied",
2278 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2278 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2279 _('hg qunapplied [-1] [-s] [PATCH]'))
2279 _('hg qunapplied [-1] [-s] [PATCH]'))
2280 def unapplied(ui, repo, patch=None, **opts):
2280 def unapplied(ui, repo, patch=None, **opts):
2281 """print the patches not yet applied
2281 """print the patches not yet applied
2282
2282
2283 Returns 0 on success."""
2283 Returns 0 on success."""
2284
2284
2285 q = repo.mq
2285 q = repo.mq
2286 if patch:
2286 if patch:
2287 if patch not in q.series:
2287 if patch not in q.series:
2288 raise error.Abort(_("patch %s is not in series file") % patch)
2288 raise error.Abort(_("patch %s is not in series file") % patch)
2289 start = q.series.index(patch) + 1
2289 start = q.series.index(patch) + 1
2290 else:
2290 else:
2291 start = q.seriesend(True)
2291 start = q.seriesend(True)
2292
2292
2293 if start == len(q.series) and opts.get('first'):
2293 if start == len(q.series) and opts.get('first'):
2294 ui.write(_("all patches applied\n"))
2294 ui.write(_("all patches applied\n"))
2295 return 1
2295 return 1
2296
2296
2297 if opts.get('first'):
2297 if opts.get('first'):
2298 length = 1
2298 length = 1
2299 else:
2299 else:
2300 length = None
2300 length = None
2301 q.qseries(repo, start=start, length=length, status='U',
2301 q.qseries(repo, start=start, length=length, status='U',
2302 summary=opts.get('summary'))
2302 summary=opts.get('summary'))
2303
2303
2304 @command("qimport",
2304 @command("qimport",
2305 [('e', 'existing', None, _('import file in patch directory')),
2305 [('e', 'existing', None, _('import file in patch directory')),
2306 ('n', 'name', '',
2306 ('n', 'name', '',
2307 _('name of patch file'), _('NAME')),
2307 _('name of patch file'), _('NAME')),
2308 ('f', 'force', None, _('overwrite existing files')),
2308 ('f', 'force', None, _('overwrite existing files')),
2309 ('r', 'rev', [],
2309 ('r', 'rev', [],
2310 _('place existing revisions under mq control'), _('REV')),
2310 _('place existing revisions under mq control'), _('REV')),
2311 ('g', 'git', None, _('use git extended diff format')),
2311 ('g', 'git', None, _('use git extended diff format')),
2312 ('P', 'push', None, _('qpush after importing'))],
2312 ('P', 'push', None, _('qpush after importing'))],
2313 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2313 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2314 def qimport(ui, repo, *filename, **opts):
2314 def qimport(ui, repo, *filename, **opts):
2315 """import a patch or existing changeset
2315 """import a patch or existing changeset
2316
2316
2317 The patch is inserted into the series after the last applied
2317 The patch is inserted into the series after the last applied
2318 patch. If no patches have been applied, qimport prepends the patch
2318 patch. If no patches have been applied, qimport prepends the patch
2319 to the series.
2319 to the series.
2320
2320
2321 The patch will have the same name as its source file unless you
2321 The patch will have the same name as its source file unless you
2322 give it a new one with -n/--name.
2322 give it a new one with -n/--name.
2323
2323
2324 You can register an existing patch inside the patch directory with
2324 You can register an existing patch inside the patch directory with
2325 the -e/--existing flag.
2325 the -e/--existing flag.
2326
2326
2327 With -f/--force, an existing patch of the same name will be
2327 With -f/--force, an existing patch of the same name will be
2328 overwritten.
2328 overwritten.
2329
2329
2330 An existing changeset may be placed under mq control with -r/--rev
2330 An existing changeset may be placed under mq control with -r/--rev
2331 (e.g. qimport --rev . -n patch will place the current revision
2331 (e.g. qimport --rev . -n patch will place the current revision
2332 under mq control). With -g/--git, patches imported with --rev will
2332 under mq control). With -g/--git, patches imported with --rev will
2333 use the git diff format. See the diffs help topic for information
2333 use the git diff format. See the diffs help topic for information
2334 on why this is important for preserving rename/copy information
2334 on why this is important for preserving rename/copy information
2335 and permission changes. Use :hg:`qfinish` to remove changesets
2335 and permission changes. Use :hg:`qfinish` to remove changesets
2336 from mq control.
2336 from mq control.
2337
2337
2338 To import a patch from standard input, pass - as the patch file.
2338 To import a patch from standard input, pass - as the patch file.
2339 When importing from standard input, a patch name must be specified
2339 When importing from standard input, a patch name must be specified
2340 using the --name flag.
2340 using the --name flag.
2341
2341
2342 To import an existing patch while renaming it::
2342 To import an existing patch while renaming it::
2343
2343
2344 hg qimport -e existing-patch -n new-name
2344 hg qimport -e existing-patch -n new-name
2345
2345
2346 Returns 0 if import succeeded.
2346 Returns 0 if import succeeded.
2347 """
2347 """
2348 with repo.lock(): # cause this may move phase
2348 with repo.lock(): # cause this may move phase
2349 q = repo.mq
2349 q = repo.mq
2350 try:
2350 try:
2351 imported = q.qimport(
2351 imported = q.qimport(
2352 repo, filename, patchname=opts.get('name'),
2352 repo, filename, patchname=opts.get('name'),
2353 existing=opts.get('existing'), force=opts.get('force'),
2353 existing=opts.get('existing'), force=opts.get('force'),
2354 rev=opts.get('rev'), git=opts.get('git'))
2354 rev=opts.get('rev'), git=opts.get('git'))
2355 finally:
2355 finally:
2356 q.savedirty()
2356 q.savedirty()
2357
2357
2358 if imported and opts.get('push') and not opts.get('rev'):
2358 if imported and opts.get('push') and not opts.get('rev'):
2359 return q.push(repo, imported[-1])
2359 return q.push(repo, imported[-1])
2360 return 0
2360 return 0
2361
2361
2362 def qinit(ui, repo, create):
2362 def qinit(ui, repo, create):
2363 """initialize a new queue repository
2363 """initialize a new queue repository
2364
2364
2365 This command also creates a series file for ordering patches, and
2365 This command also creates a series file for ordering patches, and
2366 an mq-specific .hgignore file in the queue repository, to exclude
2366 an mq-specific .hgignore file in the queue repository, to exclude
2367 the status and guards files (these contain mostly transient state).
2367 the status and guards files (these contain mostly transient state).
2368
2368
2369 Returns 0 if initialization succeeded."""
2369 Returns 0 if initialization succeeded."""
2370 q = repo.mq
2370 q = repo.mq
2371 r = q.init(repo, create)
2371 r = q.init(repo, create)
2372 q.savedirty()
2372 q.savedirty()
2373 if r:
2373 if r:
2374 if not os.path.exists(r.wjoin('.hgignore')):
2374 if not os.path.exists(r.wjoin('.hgignore')):
2375 fp = r.wvfs('.hgignore', 'w')
2375 fp = r.wvfs('.hgignore', 'w')
2376 fp.write('^\\.hg\n')
2376 fp.write('^\\.hg\n')
2377 fp.write('^\\.mq\n')
2377 fp.write('^\\.mq\n')
2378 fp.write('syntax: glob\n')
2378 fp.write('syntax: glob\n')
2379 fp.write('status\n')
2379 fp.write('status\n')
2380 fp.write('guards\n')
2380 fp.write('guards\n')
2381 fp.close()
2381 fp.close()
2382 if not os.path.exists(r.wjoin('series')):
2382 if not os.path.exists(r.wjoin('series')):
2383 r.wvfs('series', 'w').close()
2383 r.wvfs('series', 'w').close()
2384 r[None].add(['.hgignore', 'series'])
2384 r[None].add(['.hgignore', 'series'])
2385 commands.add(ui, r)
2385 commands.add(ui, r)
2386 return 0
2386 return 0
2387
2387
2388 @command("^qinit",
2388 @command("^qinit",
2389 [('c', 'create-repo', None, _('create queue repository'))],
2389 [('c', 'create-repo', None, _('create queue repository'))],
2390 _('hg qinit [-c]'))
2390 _('hg qinit [-c]'))
2391 def init(ui, repo, **opts):
2391 def init(ui, repo, **opts):
2392 """init a new queue repository (DEPRECATED)
2392 """init a new queue repository (DEPRECATED)
2393
2393
2394 The queue repository is unversioned by default. If
2394 The queue repository is unversioned by default. If
2395 -c/--create-repo is specified, qinit will create a separate nested
2395 -c/--create-repo is specified, qinit will create a separate nested
2396 repository for patches (qinit -c may also be run later to convert
2396 repository for patches (qinit -c may also be run later to convert
2397 an unversioned patch repository into a versioned one). You can use
2397 an unversioned patch repository into a versioned one). You can use
2398 qcommit to commit changes to this queue repository.
2398 qcommit to commit changes to this queue repository.
2399
2399
2400 This command is deprecated. Without -c, it's implied by other relevant
2400 This command is deprecated. Without -c, it's implied by other relevant
2401 commands. With -c, use :hg:`init --mq` instead."""
2401 commands. With -c, use :hg:`init --mq` instead."""
2402 return qinit(ui, repo, create=opts.get('create_repo'))
2402 return qinit(ui, repo, create=opts.get('create_repo'))
2403
2403
2404 @command("qclone",
2404 @command("qclone",
2405 [('', 'pull', None, _('use pull protocol to copy metadata')),
2405 [('', 'pull', None, _('use pull protocol to copy metadata')),
2406 ('U', 'noupdate', None,
2406 ('U', 'noupdate', None,
2407 _('do not update the new working directories')),
2407 _('do not update the new working directories')),
2408 ('', 'uncompressed', None,
2408 ('', 'uncompressed', None,
2409 _('use uncompressed transfer (fast over LAN)')),
2409 _('use uncompressed transfer (fast over LAN)')),
2410 ('p', 'patches', '',
2410 ('p', 'patches', '',
2411 _('location of source patch repository'), _('REPO')),
2411 _('location of source patch repository'), _('REPO')),
2412 ] + commands.remoteopts,
2412 ] + commands.remoteopts,
2413 _('hg qclone [OPTION]... SOURCE [DEST]'),
2413 _('hg qclone [OPTION]... SOURCE [DEST]'),
2414 norepo=True)
2414 norepo=True)
2415 def clone(ui, source, dest=None, **opts):
2415 def clone(ui, source, dest=None, **opts):
2416 '''clone main and patch repository at same time
2416 '''clone main and patch repository at same time
2417
2417
2418 If source is local, destination will have no patches applied. If
2418 If source is local, destination will have no patches applied. If
2419 source is remote, this command can not check if patches are
2419 source is remote, this command can not check if patches are
2420 applied in source, so cannot guarantee that patches are not
2420 applied in source, so cannot guarantee that patches are not
2421 applied in destination. If you clone remote repository, be sure
2421 applied in destination. If you clone remote repository, be sure
2422 before that it has no patches applied.
2422 before that it has no patches applied.
2423
2423
2424 Source patch repository is looked for in <src>/.hg/patches by
2424 Source patch repository is looked for in <src>/.hg/patches by
2425 default. Use -p <url> to change.
2425 default. Use -p <url> to change.
2426
2426
2427 The patch directory must be a nested Mercurial repository, as
2427 The patch directory must be a nested Mercurial repository, as
2428 would be created by :hg:`init --mq`.
2428 would be created by :hg:`init --mq`.
2429
2429
2430 Return 0 on success.
2430 Return 0 on success.
2431 '''
2431 '''
2432 def patchdir(repo):
2432 def patchdir(repo):
2433 """compute a patch repo url from a repo object"""
2433 """compute a patch repo url from a repo object"""
2434 url = repo.url()
2434 url = repo.url()
2435 if url.endswith('/'):
2435 if url.endswith('/'):
2436 url = url[:-1]
2436 url = url[:-1]
2437 return url + '/.hg/patches'
2437 return url + '/.hg/patches'
2438
2438
2439 # main repo (destination and sources)
2439 # main repo (destination and sources)
2440 if dest is None:
2440 if dest is None:
2441 dest = hg.defaultdest(source)
2441 dest = hg.defaultdest(source)
2442 sr = hg.peer(ui, opts, ui.expandpath(source))
2442 sr = hg.peer(ui, opts, ui.expandpath(source))
2443
2443
2444 # patches repo (source only)
2444 # patches repo (source only)
2445 if opts.get('patches'):
2445 if opts.get('patches'):
2446 patchespath = ui.expandpath(opts.get('patches'))
2446 patchespath = ui.expandpath(opts.get('patches'))
2447 else:
2447 else:
2448 patchespath = patchdir(sr)
2448 patchespath = patchdir(sr)
2449 try:
2449 try:
2450 hg.peer(ui, opts, patchespath)
2450 hg.peer(ui, opts, patchespath)
2451 except error.RepoError:
2451 except error.RepoError:
2452 raise error.Abort(_('versioned patch repository not found'
2452 raise error.Abort(_('versioned patch repository not found'
2453 ' (see init --mq)'))
2453 ' (see init --mq)'))
2454 qbase, destrev = None, None
2454 qbase, destrev = None, None
2455 if sr.local():
2455 if sr.local():
2456 repo = sr.local()
2456 repo = sr.local()
2457 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2457 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2458 qbase = repo.mq.applied[0].node
2458 qbase = repo.mq.applied[0].node
2459 if not hg.islocal(dest):
2459 if not hg.islocal(dest):
2460 heads = set(repo.heads())
2460 heads = set(repo.heads())
2461 destrev = list(heads.difference(repo.heads(qbase)))
2461 destrev = list(heads.difference(repo.heads(qbase)))
2462 destrev.append(repo.changelog.parents(qbase)[0])
2462 destrev.append(repo.changelog.parents(qbase)[0])
2463 elif sr.capable('lookup'):
2463 elif sr.capable('lookup'):
2464 try:
2464 try:
2465 qbase = sr.lookup('qbase')
2465 qbase = sr.lookup('qbase')
2466 except error.RepoError:
2466 except error.RepoError:
2467 pass
2467 pass
2468
2468
2469 ui.note(_('cloning main repository\n'))
2469 ui.note(_('cloning main repository\n'))
2470 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2470 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2471 pull=opts.get('pull'),
2471 pull=opts.get('pull'),
2472 rev=destrev,
2472 rev=destrev,
2473 update=False,
2473 update=False,
2474 stream=opts.get('uncompressed'))
2474 stream=opts.get('uncompressed'))
2475
2475
2476 ui.note(_('cloning patch repository\n'))
2476 ui.note(_('cloning patch repository\n'))
2477 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2477 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2478 pull=opts.get('pull'), update=not opts.get('noupdate'),
2478 pull=opts.get('pull'), update=not opts.get('noupdate'),
2479 stream=opts.get('uncompressed'))
2479 stream=opts.get('uncompressed'))
2480
2480
2481 if dr.local():
2481 if dr.local():
2482 repo = dr.local()
2482 repo = dr.local()
2483 if qbase:
2483 if qbase:
2484 ui.note(_('stripping applied patches from destination '
2484 ui.note(_('stripping applied patches from destination '
2485 'repository\n'))
2485 'repository\n'))
2486 strip(ui, repo, [qbase], update=False, backup=None)
2486 strip(ui, repo, [qbase], update=False, backup=None)
2487 if not opts.get('noupdate'):
2487 if not opts.get('noupdate'):
2488 ui.note(_('updating destination repository\n'))
2488 ui.note(_('updating destination repository\n'))
2489 hg.update(repo, repo.changelog.tip())
2489 hg.update(repo, repo.changelog.tip())
2490
2490
2491 @command("qcommit|qci",
2491 @command("qcommit|qci",
2492 commands.table["^commit|ci"][1],
2492 commands.table["^commit|ci"][1],
2493 _('hg qcommit [OPTION]... [FILE]...'),
2493 _('hg qcommit [OPTION]... [FILE]...'),
2494 inferrepo=True)
2494 inferrepo=True)
2495 def commit(ui, repo, *pats, **opts):
2495 def commit(ui, repo, *pats, **opts):
2496 """commit changes in the queue repository (DEPRECATED)
2496 """commit changes in the queue repository (DEPRECATED)
2497
2497
2498 This command is deprecated; use :hg:`commit --mq` instead."""
2498 This command is deprecated; use :hg:`commit --mq` instead."""
2499 q = repo.mq
2499 q = repo.mq
2500 r = q.qrepo()
2500 r = q.qrepo()
2501 if not r:
2501 if not r:
2502 raise error.Abort('no queue repository')
2502 raise error.Abort('no queue repository')
2503 commands.commit(r.ui, r, *pats, **opts)
2503 commands.commit(r.ui, r, *pats, **opts)
2504
2504
2505 @command("qseries",
2505 @command("qseries",
2506 [('m', 'missing', None, _('print patches not in series')),
2506 [('m', 'missing', None, _('print patches not in series')),
2507 ] + seriesopts,
2507 ] + seriesopts,
2508 _('hg qseries [-ms]'))
2508 _('hg qseries [-ms]'))
2509 def series(ui, repo, **opts):
2509 def series(ui, repo, **opts):
2510 """print the entire series file
2510 """print the entire series file
2511
2511
2512 Returns 0 on success."""
2512 Returns 0 on success."""
2513 repo.mq.qseries(repo, missing=opts.get('missing'),
2513 repo.mq.qseries(repo, missing=opts.get('missing'),
2514 summary=opts.get('summary'))
2514 summary=opts.get('summary'))
2515 return 0
2515 return 0
2516
2516
2517 @command("qtop", seriesopts, _('hg qtop [-s]'))
2517 @command("qtop", seriesopts, _('hg qtop [-s]'))
2518 def top(ui, repo, **opts):
2518 def top(ui, repo, **opts):
2519 """print the name of the current patch
2519 """print the name of the current patch
2520
2520
2521 Returns 0 on success."""
2521 Returns 0 on success."""
2522 q = repo.mq
2522 q = repo.mq
2523 if q.applied:
2523 if q.applied:
2524 t = q.seriesend(True)
2524 t = q.seriesend(True)
2525 else:
2525 else:
2526 t = 0
2526 t = 0
2527
2527
2528 if t:
2528 if t:
2529 q.qseries(repo, start=t - 1, length=1, status='A',
2529 q.qseries(repo, start=t - 1, length=1, status='A',
2530 summary=opts.get('summary'))
2530 summary=opts.get('summary'))
2531 else:
2531 else:
2532 ui.write(_("no patches applied\n"))
2532 ui.write(_("no patches applied\n"))
2533 return 1
2533 return 1
2534
2534
2535 @command("qnext", seriesopts, _('hg qnext [-s]'))
2535 @command("qnext", seriesopts, _('hg qnext [-s]'))
2536 def next(ui, repo, **opts):
2536 def next(ui, repo, **opts):
2537 """print the name of the next pushable patch
2537 """print the name of the next pushable patch
2538
2538
2539 Returns 0 on success."""
2539 Returns 0 on success."""
2540 q = repo.mq
2540 q = repo.mq
2541 end = q.seriesend()
2541 end = q.seriesend()
2542 if end == len(q.series):
2542 if end == len(q.series):
2543 ui.write(_("all patches applied\n"))
2543 ui.write(_("all patches applied\n"))
2544 return 1
2544 return 1
2545 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2545 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2546
2546
2547 @command("qprev", seriesopts, _('hg qprev [-s]'))
2547 @command("qprev", seriesopts, _('hg qprev [-s]'))
2548 def prev(ui, repo, **opts):
2548 def prev(ui, repo, **opts):
2549 """print the name of the preceding applied patch
2549 """print the name of the preceding applied patch
2550
2550
2551 Returns 0 on success."""
2551 Returns 0 on success."""
2552 q = repo.mq
2552 q = repo.mq
2553 l = len(q.applied)
2553 l = len(q.applied)
2554 if l == 1:
2554 if l == 1:
2555 ui.write(_("only one patch applied\n"))
2555 ui.write(_("only one patch applied\n"))
2556 return 1
2556 return 1
2557 if not l:
2557 if not l:
2558 ui.write(_("no patches applied\n"))
2558 ui.write(_("no patches applied\n"))
2559 return 1
2559 return 1
2560 idx = q.series.index(q.applied[-2].name)
2560 idx = q.series.index(q.applied[-2].name)
2561 q.qseries(repo, start=idx, length=1, status='A',
2561 q.qseries(repo, start=idx, length=1, status='A',
2562 summary=opts.get('summary'))
2562 summary=opts.get('summary'))
2563
2563
2564 def setupheaderopts(ui, opts):
2564 def setupheaderopts(ui, opts):
2565 if not opts.get('user') and opts.get('currentuser'):
2565 if not opts.get('user') and opts.get('currentuser'):
2566 opts['user'] = ui.username()
2566 opts['user'] = ui.username()
2567 if not opts.get('date') and opts.get('currentdate'):
2567 if not opts.get('date') and opts.get('currentdate'):
2568 opts['date'] = "%d %d" % util.makedate()
2568 opts['date'] = "%d %d" % util.makedate()
2569
2569
2570 @command("^qnew",
2570 @command("^qnew",
2571 [('e', 'edit', None, _('invoke editor on commit messages')),
2571 [('e', 'edit', None, _('invoke editor on commit messages')),
2572 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2572 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2573 ('g', 'git', None, _('use git extended diff format')),
2573 ('g', 'git', None, _('use git extended diff format')),
2574 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2574 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2575 ('u', 'user', '',
2575 ('u', 'user', '',
2576 _('add "From: <USER>" to patch'), _('USER')),
2576 _('add "From: <USER>" to patch'), _('USER')),
2577 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2577 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2578 ('d', 'date', '',
2578 ('d', 'date', '',
2579 _('add "Date: <DATE>" to patch'), _('DATE'))
2579 _('add "Date: <DATE>" to patch'), _('DATE'))
2580 ] + commands.walkopts + commands.commitopts,
2580 ] + commands.walkopts + commands.commitopts,
2581 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2581 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2582 inferrepo=True)
2582 inferrepo=True)
2583 def new(ui, repo, patch, *args, **opts):
2583 def new(ui, repo, patch, *args, **opts):
2584 """create a new patch
2584 """create a new patch
2585
2585
2586 qnew creates a new patch on top of the currently-applied patch (if
2586 qnew creates a new patch on top of the currently-applied patch (if
2587 any). The patch will be initialized with any outstanding changes
2587 any). The patch will be initialized with any outstanding changes
2588 in the working directory. You may also use -I/--include,
2588 in the working directory. You may also use -I/--include,
2589 -X/--exclude, and/or a list of files after the patch name to add
2589 -X/--exclude, and/or a list of files after the patch name to add
2590 only changes to matching files to the new patch, leaving the rest
2590 only changes to matching files to the new patch, leaving the rest
2591 as uncommitted modifications.
2591 as uncommitted modifications.
2592
2592
2593 -u/--user and -d/--date can be used to set the (given) user and
2593 -u/--user and -d/--date can be used to set the (given) user and
2594 date, respectively. -U/--currentuser and -D/--currentdate set user
2594 date, respectively. -U/--currentuser and -D/--currentdate set user
2595 to current user and date to current date.
2595 to current user and date to current date.
2596
2596
2597 -e/--edit, -m/--message or -l/--logfile set the patch header as
2597 -e/--edit, -m/--message or -l/--logfile set the patch header as
2598 well as the commit message. If none is specified, the header is
2598 well as the commit message. If none is specified, the header is
2599 empty and the commit message is '[mq]: PATCH'.
2599 empty and the commit message is '[mq]: PATCH'.
2600
2600
2601 Use the -g/--git option to keep the patch in the git extended diff
2601 Use the -g/--git option to keep the patch in the git extended diff
2602 format. Read the diffs help topic for more information on why this
2602 format. Read the diffs help topic for more information on why this
2603 is important for preserving permission changes and copy/rename
2603 is important for preserving permission changes and copy/rename
2604 information.
2604 information.
2605
2605
2606 Returns 0 on successful creation of a new patch.
2606 Returns 0 on successful creation of a new patch.
2607 """
2607 """
2608 msg = cmdutil.logmessage(ui, opts)
2608 msg = cmdutil.logmessage(ui, opts)
2609 q = repo.mq
2609 q = repo.mq
2610 opts['msg'] = msg
2610 opts['msg'] = msg
2611 setupheaderopts(ui, opts)
2611 setupheaderopts(ui, opts)
2612 q.new(repo, patch, *args, **opts)
2612 q.new(repo, patch, *args, **opts)
2613 q.savedirty()
2613 q.savedirty()
2614 return 0
2614 return 0
2615
2615
2616 @command("^qrefresh",
2616 @command("^qrefresh",
2617 [('e', 'edit', None, _('invoke editor on commit messages')),
2617 [('e', 'edit', None, _('invoke editor on commit messages')),
2618 ('g', 'git', None, _('use git extended diff format')),
2618 ('g', 'git', None, _('use git extended diff format')),
2619 ('s', 'short', None,
2619 ('s', 'short', None,
2620 _('refresh only files already in the patch and specified files')),
2620 _('refresh only files already in the patch and specified files')),
2621 ('U', 'currentuser', None,
2621 ('U', 'currentuser', None,
2622 _('add/update author field in patch with current user')),
2622 _('add/update author field in patch with current user')),
2623 ('u', 'user', '',
2623 ('u', 'user', '',
2624 _('add/update author field in patch with given user'), _('USER')),
2624 _('add/update author field in patch with given user'), _('USER')),
2625 ('D', 'currentdate', None,
2625 ('D', 'currentdate', None,
2626 _('add/update date field in patch with current date')),
2626 _('add/update date field in patch with current date')),
2627 ('d', 'date', '',
2627 ('d', 'date', '',
2628 _('add/update date field in patch with given date'), _('DATE'))
2628 _('add/update date field in patch with given date'), _('DATE'))
2629 ] + commands.walkopts + commands.commitopts,
2629 ] + commands.walkopts + commands.commitopts,
2630 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2630 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2631 inferrepo=True)
2631 inferrepo=True)
2632 def refresh(ui, repo, *pats, **opts):
2632 def refresh(ui, repo, *pats, **opts):
2633 """update the current patch
2633 """update the current patch
2634
2634
2635 If any file patterns are provided, the refreshed patch will
2635 If any file patterns are provided, the refreshed patch will
2636 contain only the modifications that match those patterns; the
2636 contain only the modifications that match those patterns; the
2637 remaining modifications will remain in the working directory.
2637 remaining modifications will remain in the working directory.
2638
2638
2639 If -s/--short is specified, files currently included in the patch
2639 If -s/--short is specified, files currently included in the patch
2640 will be refreshed just like matched files and remain in the patch.
2640 will be refreshed just like matched files and remain in the patch.
2641
2641
2642 If -e/--edit is specified, Mercurial will start your configured editor for
2642 If -e/--edit is specified, Mercurial will start your configured editor for
2643 you to enter a message. In case qrefresh fails, you will find a backup of
2643 you to enter a message. In case qrefresh fails, you will find a backup of
2644 your message in ``.hg/last-message.txt``.
2644 your message in ``.hg/last-message.txt``.
2645
2645
2646 hg add/remove/copy/rename work as usual, though you might want to
2646 hg add/remove/copy/rename work as usual, though you might want to
2647 use git-style patches (-g/--git or [diff] git=1) to track copies
2647 use git-style patches (-g/--git or [diff] git=1) to track copies
2648 and renames. See the diffs help topic for more information on the
2648 and renames. See the diffs help topic for more information on the
2649 git diff format.
2649 git diff format.
2650
2650
2651 Returns 0 on success.
2651 Returns 0 on success.
2652 """
2652 """
2653 q = repo.mq
2653 q = repo.mq
2654 message = cmdutil.logmessage(ui, opts)
2654 message = cmdutil.logmessage(ui, opts)
2655 setupheaderopts(ui, opts)
2655 setupheaderopts(ui, opts)
2656 with repo.wlock():
2656 with repo.wlock():
2657 ret = q.refresh(repo, pats, msg=message, **opts)
2657 ret = q.refresh(repo, pats, msg=message, **opts)
2658 q.savedirty()
2658 q.savedirty()
2659 return ret
2659 return ret
2660
2660
2661 @command("^qdiff",
2661 @command("^qdiff",
2662 commands.diffopts + commands.diffopts2 + commands.walkopts,
2662 commands.diffopts + commands.diffopts2 + commands.walkopts,
2663 _('hg qdiff [OPTION]... [FILE]...'),
2663 _('hg qdiff [OPTION]... [FILE]...'),
2664 inferrepo=True)
2664 inferrepo=True)
2665 def diff(ui, repo, *pats, **opts):
2665 def diff(ui, repo, *pats, **opts):
2666 """diff of the current patch and subsequent modifications
2666 """diff of the current patch and subsequent modifications
2667
2667
2668 Shows a diff which includes the current patch as well as any
2668 Shows a diff which includes the current patch as well as any
2669 changes which have been made in the working directory since the
2669 changes which have been made in the working directory since the
2670 last refresh (thus showing what the current patch would become
2670 last refresh (thus showing what the current patch would become
2671 after a qrefresh).
2671 after a qrefresh).
2672
2672
2673 Use :hg:`diff` if you only want to see the changes made since the
2673 Use :hg:`diff` if you only want to see the changes made since the
2674 last qrefresh, or :hg:`export qtip` if you want to see changes
2674 last qrefresh, or :hg:`export qtip` if you want to see changes
2675 made by the current patch without including changes made since the
2675 made by the current patch without including changes made since the
2676 qrefresh.
2676 qrefresh.
2677
2677
2678 Returns 0 on success.
2678 Returns 0 on success.
2679 """
2679 """
2680 ui.pager('qdiff')
2680 ui.pager('qdiff')
2681 repo.mq.diff(repo, pats, opts)
2681 repo.mq.diff(repo, pats, opts)
2682 return 0
2682 return 0
2683
2683
2684 @command('qfold',
2684 @command('qfold',
2685 [('e', 'edit', None, _('invoke editor on commit messages')),
2685 [('e', 'edit', None, _('invoke editor on commit messages')),
2686 ('k', 'keep', None, _('keep folded patch files')),
2686 ('k', 'keep', None, _('keep folded patch files')),
2687 ] + commands.commitopts,
2687 ] + commands.commitopts,
2688 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2688 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2689 def fold(ui, repo, *files, **opts):
2689 def fold(ui, repo, *files, **opts):
2690 """fold the named patches into the current patch
2690 """fold the named patches into the current patch
2691
2691
2692 Patches must not yet be applied. Each patch will be successively
2692 Patches must not yet be applied. Each patch will be successively
2693 applied to the current patch in the order given. If all the
2693 applied to the current patch in the order given. If all the
2694 patches apply successfully, the current patch will be refreshed
2694 patches apply successfully, the current patch will be refreshed
2695 with the new cumulative patch, and the folded patches will be
2695 with the new cumulative patch, and the folded patches will be
2696 deleted. With -k/--keep, the folded patch files will not be
2696 deleted. With -k/--keep, the folded patch files will not be
2697 removed afterwards.
2697 removed afterwards.
2698
2698
2699 The header for each folded patch will be concatenated with the
2699 The header for each folded patch will be concatenated with the
2700 current patch header, separated by a line of ``* * *``.
2700 current patch header, separated by a line of ``* * *``.
2701
2701
2702 Returns 0 on success."""
2702 Returns 0 on success."""
2703 q = repo.mq
2703 q = repo.mq
2704 if not files:
2704 if not files:
2705 raise error.Abort(_('qfold requires at least one patch name'))
2705 raise error.Abort(_('qfold requires at least one patch name'))
2706 if not q.checktoppatch(repo)[0]:
2706 if not q.checktoppatch(repo)[0]:
2707 raise error.Abort(_('no patches applied'))
2707 raise error.Abort(_('no patches applied'))
2708 q.checklocalchanges(repo)
2708 q.checklocalchanges(repo)
2709
2709
2710 message = cmdutil.logmessage(ui, opts)
2710 message = cmdutil.logmessage(ui, opts)
2711
2711
2712 parent = q.lookup('qtip')
2712 parent = q.lookup('qtip')
2713 patches = []
2713 patches = []
2714 messages = []
2714 messages = []
2715 for f in files:
2715 for f in files:
2716 p = q.lookup(f)
2716 p = q.lookup(f)
2717 if p in patches or p == parent:
2717 if p in patches or p == parent:
2718 ui.warn(_('skipping already folded patch %s\n') % p)
2718 ui.warn(_('skipping already folded patch %s\n') % p)
2719 if q.isapplied(p):
2719 if q.isapplied(p):
2720 raise error.Abort(_('qfold cannot fold already applied patch %s')
2720 raise error.Abort(_('qfold cannot fold already applied patch %s')
2721 % p)
2721 % p)
2722 patches.append(p)
2722 patches.append(p)
2723
2723
2724 for p in patches:
2724 for p in patches:
2725 if not message:
2725 if not message:
2726 ph = patchheader(q.join(p), q.plainmode)
2726 ph = patchheader(q.join(p), q.plainmode)
2727 if ph.message:
2727 if ph.message:
2728 messages.append(ph.message)
2728 messages.append(ph.message)
2729 pf = q.join(p)
2729 pf = q.join(p)
2730 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2730 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2731 if not patchsuccess:
2731 if not patchsuccess:
2732 raise error.Abort(_('error folding patch %s') % p)
2732 raise error.Abort(_('error folding patch %s') % p)
2733
2733
2734 if not message:
2734 if not message:
2735 ph = patchheader(q.join(parent), q.plainmode)
2735 ph = patchheader(q.join(parent), q.plainmode)
2736 message = ph.message
2736 message = ph.message
2737 for msg in messages:
2737 for msg in messages:
2738 if msg:
2738 if msg:
2739 if message:
2739 if message:
2740 message.append('* * *')
2740 message.append('* * *')
2741 message.extend(msg)
2741 message.extend(msg)
2742 message = '\n'.join(message)
2742 message = '\n'.join(message)
2743
2743
2744 diffopts = q.patchopts(q.diffopts(), *patches)
2744 diffopts = q.patchopts(q.diffopts(), *patches)
2745 with repo.wlock():
2745 with repo.wlock():
2746 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2746 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2747 editform='mq.qfold')
2747 editform='mq.qfold')
2748 q.delete(repo, patches, opts)
2748 q.delete(repo, patches, opts)
2749 q.savedirty()
2749 q.savedirty()
2750
2750
2751 @command("qgoto",
2751 @command("qgoto",
2752 [('', 'keep-changes', None,
2752 [('', 'keep-changes', None,
2753 _('tolerate non-conflicting local changes')),
2753 _('tolerate non-conflicting local changes')),
2754 ('f', 'force', None, _('overwrite any local changes')),
2754 ('f', 'force', None, _('overwrite any local changes')),
2755 ('', 'no-backup', None, _('do not save backup copies of files'))],
2755 ('', 'no-backup', None, _('do not save backup copies of files'))],
2756 _('hg qgoto [OPTION]... PATCH'))
2756 _('hg qgoto [OPTION]... PATCH'))
2757 def goto(ui, repo, patch, **opts):
2757 def goto(ui, repo, patch, **opts):
2758 '''push or pop patches until named patch is at top of stack
2758 '''push or pop patches until named patch is at top of stack
2759
2759
2760 Returns 0 on success.'''
2760 Returns 0 on success.'''
2761 opts = fixkeepchangesopts(ui, opts)
2761 opts = fixkeepchangesopts(ui, opts)
2762 q = repo.mq
2762 q = repo.mq
2763 patch = q.lookup(patch)
2763 patch = q.lookup(patch)
2764 nobackup = opts.get('no_backup')
2764 nobackup = opts.get('no_backup')
2765 keepchanges = opts.get('keep_changes')
2765 keepchanges = opts.get('keep_changes')
2766 if q.isapplied(patch):
2766 if q.isapplied(patch):
2767 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2767 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2768 keepchanges=keepchanges)
2768 keepchanges=keepchanges)
2769 else:
2769 else:
2770 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2770 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2771 keepchanges=keepchanges)
2771 keepchanges=keepchanges)
2772 q.savedirty()
2772 q.savedirty()
2773 return ret
2773 return ret
2774
2774
2775 @command("qguard",
2775 @command("qguard",
2776 [('l', 'list', None, _('list all patches and guards')),
2776 [('l', 'list', None, _('list all patches and guards')),
2777 ('n', 'none', None, _('drop all guards'))],
2777 ('n', 'none', None, _('drop all guards'))],
2778 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2778 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2779 def guard(ui, repo, *args, **opts):
2779 def guard(ui, repo, *args, **opts):
2780 '''set or print guards for a patch
2780 '''set or print guards for a patch
2781
2781
2782 Guards control whether a patch can be pushed. A patch with no
2782 Guards control whether a patch can be pushed. A patch with no
2783 guards is always pushed. A patch with a positive guard ("+foo") is
2783 guards is always pushed. A patch with a positive guard ("+foo") is
2784 pushed only if the :hg:`qselect` command has activated it. A patch with
2784 pushed only if the :hg:`qselect` command has activated it. A patch with
2785 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2785 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2786 has activated it.
2786 has activated it.
2787
2787
2788 With no arguments, print the currently active guards.
2788 With no arguments, print the currently active guards.
2789 With arguments, set guards for the named patch.
2789 With arguments, set guards for the named patch.
2790
2790
2791 .. note::
2791 .. note::
2792
2792
2793 Specifying negative guards now requires '--'.
2793 Specifying negative guards now requires '--'.
2794
2794
2795 To set guards on another patch::
2795 To set guards on another patch::
2796
2796
2797 hg qguard other.patch -- +2.6.17 -stable
2797 hg qguard other.patch -- +2.6.17 -stable
2798
2798
2799 Returns 0 on success.
2799 Returns 0 on success.
2800 '''
2800 '''
2801 def status(idx):
2801 def status(idx):
2802 guards = q.seriesguards[idx] or ['unguarded']
2802 guards = q.seriesguards[idx] or ['unguarded']
2803 if q.series[idx] in applied:
2803 if q.series[idx] in applied:
2804 state = 'applied'
2804 state = 'applied'
2805 elif q.pushable(idx)[0]:
2805 elif q.pushable(idx)[0]:
2806 state = 'unapplied'
2806 state = 'unapplied'
2807 else:
2807 else:
2808 state = 'guarded'
2808 state = 'guarded'
2809 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2809 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2810 ui.write('%s: ' % ui.label(q.series[idx], label))
2810 ui.write('%s: ' % ui.label(q.series[idx], label))
2811
2811
2812 for i, guard in enumerate(guards):
2812 for i, guard in enumerate(guards):
2813 if guard.startswith('+'):
2813 if guard.startswith('+'):
2814 ui.write(guard, label='qguard.positive')
2814 ui.write(guard, label='qguard.positive')
2815 elif guard.startswith('-'):
2815 elif guard.startswith('-'):
2816 ui.write(guard, label='qguard.negative')
2816 ui.write(guard, label='qguard.negative')
2817 else:
2817 else:
2818 ui.write(guard, label='qguard.unguarded')
2818 ui.write(guard, label='qguard.unguarded')
2819 if i != len(guards) - 1:
2819 if i != len(guards) - 1:
2820 ui.write(' ')
2820 ui.write(' ')
2821 ui.write('\n')
2821 ui.write('\n')
2822 q = repo.mq
2822 q = repo.mq
2823 applied = set(p.name for p in q.applied)
2823 applied = set(p.name for p in q.applied)
2824 patch = None
2824 patch = None
2825 args = list(args)
2825 args = list(args)
2826 if opts.get('list'):
2826 if opts.get('list'):
2827 if args or opts.get('none'):
2827 if args or opts.get('none'):
2828 raise error.Abort(_('cannot mix -l/--list with options or '
2828 raise error.Abort(_('cannot mix -l/--list with options or '
2829 'arguments'))
2829 'arguments'))
2830 for i in xrange(len(q.series)):
2830 for i in xrange(len(q.series)):
2831 status(i)
2831 status(i)
2832 return
2832 return
2833 if not args or args[0][0:1] in '-+':
2833 if not args or args[0][0:1] in '-+':
2834 if not q.applied:
2834 if not q.applied:
2835 raise error.Abort(_('no patches applied'))
2835 raise error.Abort(_('no patches applied'))
2836 patch = q.applied[-1].name
2836 patch = q.applied[-1].name
2837 if patch is None and args[0][0:1] not in '-+':
2837 if patch is None and args[0][0:1] not in '-+':
2838 patch = args.pop(0)
2838 patch = args.pop(0)
2839 if patch is None:
2839 if patch is None:
2840 raise error.Abort(_('no patch to work with'))
2840 raise error.Abort(_('no patch to work with'))
2841 if args or opts.get('none'):
2841 if args or opts.get('none'):
2842 idx = q.findseries(patch)
2842 idx = q.findseries(patch)
2843 if idx is None:
2843 if idx is None:
2844 raise error.Abort(_('no patch named %s') % patch)
2844 raise error.Abort(_('no patch named %s') % patch)
2845 q.setguards(idx, args)
2845 q.setguards(idx, args)
2846 q.savedirty()
2846 q.savedirty()
2847 else:
2847 else:
2848 status(q.series.index(q.lookup(patch)))
2848 status(q.series.index(q.lookup(patch)))
2849
2849
2850 @command("qheader", [], _('hg qheader [PATCH]'))
2850 @command("qheader", [], _('hg qheader [PATCH]'))
2851 def header(ui, repo, patch=None):
2851 def header(ui, repo, patch=None):
2852 """print the header of the topmost or specified patch
2852 """print the header of the topmost or specified patch
2853
2853
2854 Returns 0 on success."""
2854 Returns 0 on success."""
2855 q = repo.mq
2855 q = repo.mq
2856
2856
2857 if patch:
2857 if patch:
2858 patch = q.lookup(patch)
2858 patch = q.lookup(patch)
2859 else:
2859 else:
2860 if not q.applied:
2860 if not q.applied:
2861 ui.write(_('no patches applied\n'))
2861 ui.write(_('no patches applied\n'))
2862 return 1
2862 return 1
2863 patch = q.lookup('qtip')
2863 patch = q.lookup('qtip')
2864 ph = patchheader(q.join(patch), q.plainmode)
2864 ph = patchheader(q.join(patch), q.plainmode)
2865
2865
2866 ui.write('\n'.join(ph.message) + '\n')
2866 ui.write('\n'.join(ph.message) + '\n')
2867
2867
2868 def lastsavename(path):
2868 def lastsavename(path):
2869 (directory, base) = os.path.split(path)
2869 (directory, base) = os.path.split(path)
2870 names = os.listdir(directory)
2870 names = os.listdir(directory)
2871 namere = re.compile("%s.([0-9]+)" % base)
2871 namere = re.compile("%s.([0-9]+)" % base)
2872 maxindex = None
2872 maxindex = None
2873 maxname = None
2873 maxname = None
2874 for f in names:
2874 for f in names:
2875 m = namere.match(f)
2875 m = namere.match(f)
2876 if m:
2876 if m:
2877 index = int(m.group(1))
2877 index = int(m.group(1))
2878 if maxindex is None or index > maxindex:
2878 if maxindex is None or index > maxindex:
2879 maxindex = index
2879 maxindex = index
2880 maxname = f
2880 maxname = f
2881 if maxname:
2881 if maxname:
2882 return (os.path.join(directory, maxname), maxindex)
2882 return (os.path.join(directory, maxname), maxindex)
2883 return (None, None)
2883 return (None, None)
2884
2884
2885 def savename(path):
2885 def savename(path):
2886 (last, index) = lastsavename(path)
2886 (last, index) = lastsavename(path)
2887 if last is None:
2887 if last is None:
2888 index = 0
2888 index = 0
2889 newpath = path + ".%d" % (index + 1)
2889 newpath = path + ".%d" % (index + 1)
2890 return newpath
2890 return newpath
2891
2891
2892 @command("^qpush",
2892 @command("^qpush",
2893 [('', 'keep-changes', None,
2893 [('', 'keep-changes', None,
2894 _('tolerate non-conflicting local changes')),
2894 _('tolerate non-conflicting local changes')),
2895 ('f', 'force', None, _('apply on top of local changes')),
2895 ('f', 'force', None, _('apply on top of local changes')),
2896 ('e', 'exact', None,
2896 ('e', 'exact', None,
2897 _('apply the target patch to its recorded parent')),
2897 _('apply the target patch to its recorded parent')),
2898 ('l', 'list', None, _('list patch name in commit text')),
2898 ('l', 'list', None, _('list patch name in commit text')),
2899 ('a', 'all', None, _('apply all patches')),
2899 ('a', 'all', None, _('apply all patches')),
2900 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2900 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2901 ('n', 'name', '',
2901 ('n', 'name', '',
2902 _('merge queue name (DEPRECATED)'), _('NAME')),
2902 _('merge queue name (DEPRECATED)'), _('NAME')),
2903 ('', 'move', None,
2903 ('', 'move', None,
2904 _('reorder patch series and apply only the patch')),
2904 _('reorder patch series and apply only the patch')),
2905 ('', 'no-backup', None, _('do not save backup copies of files'))],
2905 ('', 'no-backup', None, _('do not save backup copies of files'))],
2906 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2906 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2907 def push(ui, repo, patch=None, **opts):
2907 def push(ui, repo, patch=None, **opts):
2908 """push the next patch onto the stack
2908 """push the next patch onto the stack
2909
2909
2910 By default, abort if the working directory contains uncommitted
2910 By default, abort if the working directory contains uncommitted
2911 changes. With --keep-changes, abort only if the uncommitted files
2911 changes. With --keep-changes, abort only if the uncommitted files
2912 overlap with patched files. With -f/--force, backup and patch over
2912 overlap with patched files. With -f/--force, backup and patch over
2913 uncommitted changes.
2913 uncommitted changes.
2914
2914
2915 Return 0 on success.
2915 Return 0 on success.
2916 """
2916 """
2917 q = repo.mq
2917 q = repo.mq
2918 mergeq = None
2918 mergeq = None
2919
2919
2920 opts = fixkeepchangesopts(ui, opts)
2920 opts = fixkeepchangesopts(ui, opts)
2921 if opts.get('merge'):
2921 if opts.get('merge'):
2922 if opts.get('name'):
2922 if opts.get('name'):
2923 newpath = repo.vfs.join(opts.get('name'))
2923 newpath = repo.vfs.join(opts.get('name'))
2924 else:
2924 else:
2925 newpath, i = lastsavename(q.path)
2925 newpath, i = lastsavename(q.path)
2926 if not newpath:
2926 if not newpath:
2927 ui.warn(_("no saved queues found, please use -n\n"))
2927 ui.warn(_("no saved queues found, please use -n\n"))
2928 return 1
2928 return 1
2929 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2929 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2930 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2930 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2931 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2931 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2932 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2932 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2933 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2933 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2934 keepchanges=opts.get('keep_changes'))
2934 keepchanges=opts.get('keep_changes'))
2935 return ret
2935 return ret
2936
2936
2937 @command("^qpop",
2937 @command("^qpop",
2938 [('a', 'all', None, _('pop all patches')),
2938 [('a', 'all', None, _('pop all patches')),
2939 ('n', 'name', '',
2939 ('n', 'name', '',
2940 _('queue name to pop (DEPRECATED)'), _('NAME')),
2940 _('queue name to pop (DEPRECATED)'), _('NAME')),
2941 ('', 'keep-changes', None,
2941 ('', 'keep-changes', None,
2942 _('tolerate non-conflicting local changes')),
2942 _('tolerate non-conflicting local changes')),
2943 ('f', 'force', None, _('forget any local changes to patched files')),
2943 ('f', 'force', None, _('forget any local changes to patched files')),
2944 ('', 'no-backup', None, _('do not save backup copies of files'))],
2944 ('', 'no-backup', None, _('do not save backup copies of files'))],
2945 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2945 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2946 def pop(ui, repo, patch=None, **opts):
2946 def pop(ui, repo, patch=None, **opts):
2947 """pop the current patch off the stack
2947 """pop the current patch off the stack
2948
2948
2949 Without argument, pops off the top of the patch stack. If given a
2949 Without argument, pops off the top of the patch stack. If given a
2950 patch name, keeps popping off patches until the named patch is at
2950 patch name, keeps popping off patches until the named patch is at
2951 the top of the stack.
2951 the top of the stack.
2952
2952
2953 By default, abort if the working directory contains uncommitted
2953 By default, abort if the working directory contains uncommitted
2954 changes. With --keep-changes, abort only if the uncommitted files
2954 changes. With --keep-changes, abort only if the uncommitted files
2955 overlap with patched files. With -f/--force, backup and discard
2955 overlap with patched files. With -f/--force, backup and discard
2956 changes made to such files.
2956 changes made to such files.
2957
2957
2958 Return 0 on success.
2958 Return 0 on success.
2959 """
2959 """
2960 opts = fixkeepchangesopts(ui, opts)
2960 opts = fixkeepchangesopts(ui, opts)
2961 localupdate = True
2961 localupdate = True
2962 if opts.get('name'):
2962 if opts.get('name'):
2963 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
2963 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
2964 ui.warn(_('using patch queue: %s\n') % q.path)
2964 ui.warn(_('using patch queue: %s\n') % q.path)
2965 localupdate = False
2965 localupdate = False
2966 else:
2966 else:
2967 q = repo.mq
2967 q = repo.mq
2968 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2968 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2969 all=opts.get('all'), nobackup=opts.get('no_backup'),
2969 all=opts.get('all'), nobackup=opts.get('no_backup'),
2970 keepchanges=opts.get('keep_changes'))
2970 keepchanges=opts.get('keep_changes'))
2971 q.savedirty()
2971 q.savedirty()
2972 return ret
2972 return ret
2973
2973
2974 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2974 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2975 def rename(ui, repo, patch, name=None, **opts):
2975 def rename(ui, repo, patch, name=None, **opts):
2976 """rename a patch
2976 """rename a patch
2977
2977
2978 With one argument, renames the current patch to PATCH1.
2978 With one argument, renames the current patch to PATCH1.
2979 With two arguments, renames PATCH1 to PATCH2.
2979 With two arguments, renames PATCH1 to PATCH2.
2980
2980
2981 Returns 0 on success."""
2981 Returns 0 on success."""
2982 q = repo.mq
2982 q = repo.mq
2983 if not name:
2983 if not name:
2984 name = patch
2984 name = patch
2985 patch = None
2985 patch = None
2986
2986
2987 if patch:
2987 if patch:
2988 patch = q.lookup(patch)
2988 patch = q.lookup(patch)
2989 else:
2989 else:
2990 if not q.applied:
2990 if not q.applied:
2991 ui.write(_('no patches applied\n'))
2991 ui.write(_('no patches applied\n'))
2992 return
2992 return
2993 patch = q.lookup('qtip')
2993 patch = q.lookup('qtip')
2994 absdest = q.join(name)
2994 absdest = q.join(name)
2995 if os.path.isdir(absdest):
2995 if os.path.isdir(absdest):
2996 name = normname(os.path.join(name, os.path.basename(patch)))
2996 name = normname(os.path.join(name, os.path.basename(patch)))
2997 absdest = q.join(name)
2997 absdest = q.join(name)
2998 q.checkpatchname(name)
2998 q.checkpatchname(name)
2999
2999
3000 ui.note(_('renaming %s to %s\n') % (patch, name))
3000 ui.note(_('renaming %s to %s\n') % (patch, name))
3001 i = q.findseries(patch)
3001 i = q.findseries(patch)
3002 guards = q.guard_re.findall(q.fullseries[i])
3002 guards = q.guard_re.findall(q.fullseries[i])
3003 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3003 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3004 q.parseseries()
3004 q.parseseries()
3005 q.seriesdirty = True
3005 q.seriesdirty = True
3006
3006
3007 info = q.isapplied(patch)
3007 info = q.isapplied(patch)
3008 if info:
3008 if info:
3009 q.applied[info[0]] = statusentry(info[1], name)
3009 q.applied[info[0]] = statusentry(info[1], name)
3010 q.applieddirty = True
3010 q.applieddirty = True
3011
3011
3012 destdir = os.path.dirname(absdest)
3012 destdir = os.path.dirname(absdest)
3013 if not os.path.isdir(destdir):
3013 if not os.path.isdir(destdir):
3014 os.makedirs(destdir)
3014 os.makedirs(destdir)
3015 util.rename(q.join(patch), absdest)
3015 util.rename(q.join(patch), absdest)
3016 r = q.qrepo()
3016 r = q.qrepo()
3017 if r and patch in r.dirstate:
3017 if r and patch in r.dirstate:
3018 wctx = r[None]
3018 wctx = r[None]
3019 with r.wlock():
3019 with r.wlock():
3020 if r.dirstate[patch] == 'a':
3020 if r.dirstate[patch] == 'a':
3021 r.dirstate.drop(patch)
3021 r.dirstate.drop(patch)
3022 r.dirstate.add(name)
3022 r.dirstate.add(name)
3023 else:
3023 else:
3024 wctx.copy(patch, name)
3024 wctx.copy(patch, name)
3025 wctx.forget([patch])
3025 wctx.forget([patch])
3026
3026
3027 q.savedirty()
3027 q.savedirty()
3028
3028
3029 @command("qrestore",
3029 @command("qrestore",
3030 [('d', 'delete', None, _('delete save entry')),
3030 [('d', 'delete', None, _('delete save entry')),
3031 ('u', 'update', None, _('update queue working directory'))],
3031 ('u', 'update', None, _('update queue working directory'))],
3032 _('hg qrestore [-d] [-u] REV'))
3032 _('hg qrestore [-d] [-u] REV'))
3033 def restore(ui, repo, rev, **opts):
3033 def restore(ui, repo, rev, **opts):
3034 """restore the queue state saved by a revision (DEPRECATED)
3034 """restore the queue state saved by a revision (DEPRECATED)
3035
3035
3036 This command is deprecated, use :hg:`rebase` instead."""
3036 This command is deprecated, use :hg:`rebase` instead."""
3037 rev = repo.lookup(rev)
3037 rev = repo.lookup(rev)
3038 q = repo.mq
3038 q = repo.mq
3039 q.restore(repo, rev, delete=opts.get('delete'),
3039 q.restore(repo, rev, delete=opts.get('delete'),
3040 qupdate=opts.get('update'))
3040 qupdate=opts.get('update'))
3041 q.savedirty()
3041 q.savedirty()
3042 return 0
3042 return 0
3043
3043
3044 @command("qsave",
3044 @command("qsave",
3045 [('c', 'copy', None, _('copy patch directory')),
3045 [('c', 'copy', None, _('copy patch directory')),
3046 ('n', 'name', '',
3046 ('n', 'name', '',
3047 _('copy directory name'), _('NAME')),
3047 _('copy directory name'), _('NAME')),
3048 ('e', 'empty', None, _('clear queue status file')),
3048 ('e', 'empty', None, _('clear queue status file')),
3049 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3049 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3050 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3050 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3051 def save(ui, repo, **opts):
3051 def save(ui, repo, **opts):
3052 """save current queue state (DEPRECATED)
3052 """save current queue state (DEPRECATED)
3053
3053
3054 This command is deprecated, use :hg:`rebase` instead."""
3054 This command is deprecated, use :hg:`rebase` instead."""
3055 q = repo.mq
3055 q = repo.mq
3056 message = cmdutil.logmessage(ui, opts)
3056 message = cmdutil.logmessage(ui, opts)
3057 ret = q.save(repo, msg=message)
3057 ret = q.save(repo, msg=message)
3058 if ret:
3058 if ret:
3059 return ret
3059 return ret
3060 q.savedirty() # save to .hg/patches before copying
3060 q.savedirty() # save to .hg/patches before copying
3061 if opts.get('copy'):
3061 if opts.get('copy'):
3062 path = q.path
3062 path = q.path
3063 if opts.get('name'):
3063 if opts.get('name'):
3064 newpath = os.path.join(q.basepath, opts.get('name'))
3064 newpath = os.path.join(q.basepath, opts.get('name'))
3065 if os.path.exists(newpath):
3065 if os.path.exists(newpath):
3066 if not os.path.isdir(newpath):
3066 if not os.path.isdir(newpath):
3067 raise error.Abort(_('destination %s exists and is not '
3067 raise error.Abort(_('destination %s exists and is not '
3068 'a directory') % newpath)
3068 'a directory') % newpath)
3069 if not opts.get('force'):
3069 if not opts.get('force'):
3070 raise error.Abort(_('destination %s exists, '
3070 raise error.Abort(_('destination %s exists, '
3071 'use -f to force') % newpath)
3071 'use -f to force') % newpath)
3072 else:
3072 else:
3073 newpath = savename(path)
3073 newpath = savename(path)
3074 ui.warn(_("copy %s to %s\n") % (path, newpath))
3074 ui.warn(_("copy %s to %s\n") % (path, newpath))
3075 util.copyfiles(path, newpath)
3075 util.copyfiles(path, newpath)
3076 if opts.get('empty'):
3076 if opts.get('empty'):
3077 del q.applied[:]
3077 del q.applied[:]
3078 q.applieddirty = True
3078 q.applieddirty = True
3079 q.savedirty()
3079 q.savedirty()
3080 return 0
3080 return 0
3081
3081
3082
3082
3083 @command("qselect",
3083 @command("qselect",
3084 [('n', 'none', None, _('disable all guards')),
3084 [('n', 'none', None, _('disable all guards')),
3085 ('s', 'series', None, _('list all guards in series file')),
3085 ('s', 'series', None, _('list all guards in series file')),
3086 ('', 'pop', None, _('pop to before first guarded applied patch')),
3086 ('', 'pop', None, _('pop to before first guarded applied patch')),
3087 ('', 'reapply', None, _('pop, then reapply patches'))],
3087 ('', 'reapply', None, _('pop, then reapply patches'))],
3088 _('hg qselect [OPTION]... [GUARD]...'))
3088 _('hg qselect [OPTION]... [GUARD]...'))
3089 def select(ui, repo, *args, **opts):
3089 def select(ui, repo, *args, **opts):
3090 '''set or print guarded patches to push
3090 '''set or print guarded patches to push
3091
3091
3092 Use the :hg:`qguard` command to set or print guards on patch, then use
3092 Use the :hg:`qguard` command to set or print guards on patch, then use
3093 qselect to tell mq which guards to use. A patch will be pushed if
3093 qselect to tell mq which guards to use. A patch will be pushed if
3094 it has no guards or any positive guards match the currently
3094 it has no guards or any positive guards match the currently
3095 selected guard, but will not be pushed if any negative guards
3095 selected guard, but will not be pushed if any negative guards
3096 match the current guard. For example::
3096 match the current guard. For example::
3097
3097
3098 qguard foo.patch -- -stable (negative guard)
3098 qguard foo.patch -- -stable (negative guard)
3099 qguard bar.patch +stable (positive guard)
3099 qguard bar.patch +stable (positive guard)
3100 qselect stable
3100 qselect stable
3101
3101
3102 This activates the "stable" guard. mq will skip foo.patch (because
3102 This activates the "stable" guard. mq will skip foo.patch (because
3103 it has a negative match) but push bar.patch (because it has a
3103 it has a negative match) but push bar.patch (because it has a
3104 positive match).
3104 positive match).
3105
3105
3106 With no arguments, prints the currently active guards.
3106 With no arguments, prints the currently active guards.
3107 With one argument, sets the active guard.
3107 With one argument, sets the active guard.
3108
3108
3109 Use -n/--none to deactivate guards (no other arguments needed).
3109 Use -n/--none to deactivate guards (no other arguments needed).
3110 When no guards are active, patches with positive guards are
3110 When no guards are active, patches with positive guards are
3111 skipped and patches with negative guards are pushed.
3111 skipped and patches with negative guards are pushed.
3112
3112
3113 qselect can change the guards on applied patches. It does not pop
3113 qselect can change the guards on applied patches. It does not pop
3114 guarded patches by default. Use --pop to pop back to the last
3114 guarded patches by default. Use --pop to pop back to the last
3115 applied patch that is not guarded. Use --reapply (which implies
3115 applied patch that is not guarded. Use --reapply (which implies
3116 --pop) to push back to the current patch afterwards, but skip
3116 --pop) to push back to the current patch afterwards, but skip
3117 guarded patches.
3117 guarded patches.
3118
3118
3119 Use -s/--series to print a list of all guards in the series file
3119 Use -s/--series to print a list of all guards in the series file
3120 (no other arguments needed). Use -v for more information.
3120 (no other arguments needed). Use -v for more information.
3121
3121
3122 Returns 0 on success.'''
3122 Returns 0 on success.'''
3123
3123
3124 q = repo.mq
3124 q = repo.mq
3125 guards = q.active()
3125 guards = q.active()
3126 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3126 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3127 if args or opts.get('none'):
3127 if args or opts.get('none'):
3128 old_unapplied = q.unapplied(repo)
3128 old_unapplied = q.unapplied(repo)
3129 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3129 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3130 q.setactive(args)
3130 q.setactive(args)
3131 q.savedirty()
3131 q.savedirty()
3132 if not args:
3132 if not args:
3133 ui.status(_('guards deactivated\n'))
3133 ui.status(_('guards deactivated\n'))
3134 if not opts.get('pop') and not opts.get('reapply'):
3134 if not opts.get('pop') and not opts.get('reapply'):
3135 unapplied = q.unapplied(repo)
3135 unapplied = q.unapplied(repo)
3136 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3136 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3137 if len(unapplied) != len(old_unapplied):
3137 if len(unapplied) != len(old_unapplied):
3138 ui.status(_('number of unguarded, unapplied patches has '
3138 ui.status(_('number of unguarded, unapplied patches has '
3139 'changed from %d to %d\n') %
3139 'changed from %d to %d\n') %
3140 (len(old_unapplied), len(unapplied)))
3140 (len(old_unapplied), len(unapplied)))
3141 if len(guarded) != len(old_guarded):
3141 if len(guarded) != len(old_guarded):
3142 ui.status(_('number of guarded, applied patches has changed '
3142 ui.status(_('number of guarded, applied patches has changed '
3143 'from %d to %d\n') %
3143 'from %d to %d\n') %
3144 (len(old_guarded), len(guarded)))
3144 (len(old_guarded), len(guarded)))
3145 elif opts.get('series'):
3145 elif opts.get('series'):
3146 guards = {}
3146 guards = {}
3147 noguards = 0
3147 noguards = 0
3148 for gs in q.seriesguards:
3148 for gs in q.seriesguards:
3149 if not gs:
3149 if not gs:
3150 noguards += 1
3150 noguards += 1
3151 for g in gs:
3151 for g in gs:
3152 guards.setdefault(g, 0)
3152 guards.setdefault(g, 0)
3153 guards[g] += 1
3153 guards[g] += 1
3154 if ui.verbose:
3154 if ui.verbose:
3155 guards['NONE'] = noguards
3155 guards['NONE'] = noguards
3156 guards = guards.items()
3156 guards = guards.items()
3157 guards.sort(key=lambda x: x[0][1:])
3157 guards.sort(key=lambda x: x[0][1:])
3158 if guards:
3158 if guards:
3159 ui.note(_('guards in series file:\n'))
3159 ui.note(_('guards in series file:\n'))
3160 for guard, count in guards:
3160 for guard, count in guards:
3161 ui.note('%2d ' % count)
3161 ui.note('%2d ' % count)
3162 ui.write(guard, '\n')
3162 ui.write(guard, '\n')
3163 else:
3163 else:
3164 ui.note(_('no guards in series file\n'))
3164 ui.note(_('no guards in series file\n'))
3165 else:
3165 else:
3166 if guards:
3166 if guards:
3167 ui.note(_('active guards:\n'))
3167 ui.note(_('active guards:\n'))
3168 for g in guards:
3168 for g in guards:
3169 ui.write(g, '\n')
3169 ui.write(g, '\n')
3170 else:
3170 else:
3171 ui.write(_('no active guards\n'))
3171 ui.write(_('no active guards\n'))
3172 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3172 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3173 popped = False
3173 popped = False
3174 if opts.get('pop') or opts.get('reapply'):
3174 if opts.get('pop') or opts.get('reapply'):
3175 for i in xrange(len(q.applied)):
3175 for i in xrange(len(q.applied)):
3176 if not pushable(i):
3176 if not pushable(i):
3177 ui.status(_('popping guarded patches\n'))
3177 ui.status(_('popping guarded patches\n'))
3178 popped = True
3178 popped = True
3179 if i == 0:
3179 if i == 0:
3180 q.pop(repo, all=True)
3180 q.pop(repo, all=True)
3181 else:
3181 else:
3182 q.pop(repo, q.applied[i - 1].name)
3182 q.pop(repo, q.applied[i - 1].name)
3183 break
3183 break
3184 if popped:
3184 if popped:
3185 try:
3185 try:
3186 if reapply:
3186 if reapply:
3187 ui.status(_('reapplying unguarded patches\n'))
3187 ui.status(_('reapplying unguarded patches\n'))
3188 q.push(repo, reapply)
3188 q.push(repo, reapply)
3189 finally:
3189 finally:
3190 q.savedirty()
3190 q.savedirty()
3191
3191
3192 @command("qfinish",
3192 @command("qfinish",
3193 [('a', 'applied', None, _('finish all applied changesets'))],
3193 [('a', 'applied', None, _('finish all applied changesets'))],
3194 _('hg qfinish [-a] [REV]...'))
3194 _('hg qfinish [-a] [REV]...'))
3195 def finish(ui, repo, *revrange, **opts):
3195 def finish(ui, repo, *revrange, **opts):
3196 """move applied patches into repository history
3196 """move applied patches into repository history
3197
3197
3198 Finishes the specified revisions (corresponding to applied
3198 Finishes the specified revisions (corresponding to applied
3199 patches) by moving them out of mq control into regular repository
3199 patches) by moving them out of mq control into regular repository
3200 history.
3200 history.
3201
3201
3202 Accepts a revision range or the -a/--applied option. If --applied
3202 Accepts a revision range or the -a/--applied option. If --applied
3203 is specified, all applied mq revisions are removed from mq
3203 is specified, all applied mq revisions are removed from mq
3204 control. Otherwise, the given revisions must be at the base of the
3204 control. Otherwise, the given revisions must be at the base of the
3205 stack of applied patches.
3205 stack of applied patches.
3206
3206
3207 This can be especially useful if your changes have been applied to
3207 This can be especially useful if your changes have been applied to
3208 an upstream repository, or if you are about to push your changes
3208 an upstream repository, or if you are about to push your changes
3209 to upstream.
3209 to upstream.
3210
3210
3211 Returns 0 on success.
3211 Returns 0 on success.
3212 """
3212 """
3213 if not opts.get('applied') and not revrange:
3213 if not opts.get('applied') and not revrange:
3214 raise error.Abort(_('no revisions specified'))
3214 raise error.Abort(_('no revisions specified'))
3215 elif opts.get('applied'):
3215 elif opts.get('applied'):
3216 revrange = ('qbase::qtip',) + revrange
3216 revrange = ('qbase::qtip',) + revrange
3217
3217
3218 q = repo.mq
3218 q = repo.mq
3219 if not q.applied:
3219 if not q.applied:
3220 ui.status(_('no patches applied\n'))
3220 ui.status(_('no patches applied\n'))
3221 return 0
3221 return 0
3222
3222
3223 revs = scmutil.revrange(repo, revrange)
3223 revs = scmutil.revrange(repo, revrange)
3224 if repo['.'].rev() in revs and repo[None].files():
3224 if repo['.'].rev() in revs and repo[None].files():
3225 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3225 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3226 # queue.finish may changes phases but leave the responsibility to lock the
3226 # queue.finish may changes phases but leave the responsibility to lock the
3227 # repo to the caller to avoid deadlock with wlock. This command code is
3227 # repo to the caller to avoid deadlock with wlock. This command code is
3228 # responsibility for this locking.
3228 # responsibility for this locking.
3229 with repo.lock():
3229 with repo.lock():
3230 q.finish(repo, revs)
3230 q.finish(repo, revs)
3231 q.savedirty()
3231 q.savedirty()
3232 return 0
3232 return 0
3233
3233
3234 @command("qqueue",
3234 @command("qqueue",
3235 [('l', 'list', False, _('list all available queues')),
3235 [('l', 'list', False, _('list all available queues')),
3236 ('', 'active', False, _('print name of active queue')),
3236 ('', 'active', False, _('print name of active queue')),
3237 ('c', 'create', False, _('create new queue')),
3237 ('c', 'create', False, _('create new queue')),
3238 ('', 'rename', False, _('rename active queue')),
3238 ('', 'rename', False, _('rename active queue')),
3239 ('', 'delete', False, _('delete reference to queue')),
3239 ('', 'delete', False, _('delete reference to queue')),
3240 ('', 'purge', False, _('delete queue, and remove patch dir')),
3240 ('', 'purge', False, _('delete queue, and remove patch dir')),
3241 ],
3241 ],
3242 _('[OPTION] [QUEUE]'))
3242 _('[OPTION] [QUEUE]'))
3243 def qqueue(ui, repo, name=None, **opts):
3243 def qqueue(ui, repo, name=None, **opts):
3244 '''manage multiple patch queues
3244 '''manage multiple patch queues
3245
3245
3246 Supports switching between different patch queues, as well as creating
3246 Supports switching between different patch queues, as well as creating
3247 new patch queues and deleting existing ones.
3247 new patch queues and deleting existing ones.
3248
3248
3249 Omitting a queue name or specifying -l/--list will show you the registered
3249 Omitting a queue name or specifying -l/--list will show you the registered
3250 queues - by default the "normal" patches queue is registered. The currently
3250 queues - by default the "normal" patches queue is registered. The currently
3251 active queue will be marked with "(active)". Specifying --active will print
3251 active queue will be marked with "(active)". Specifying --active will print
3252 only the name of the active queue.
3252 only the name of the active queue.
3253
3253
3254 To create a new queue, use -c/--create. The queue is automatically made
3254 To create a new queue, use -c/--create. The queue is automatically made
3255 active, except in the case where there are applied patches from the
3255 active, except in the case where there are applied patches from the
3256 currently active queue in the repository. Then the queue will only be
3256 currently active queue in the repository. Then the queue will only be
3257 created and switching will fail.
3257 created and switching will fail.
3258
3258
3259 To delete an existing queue, use --delete. You cannot delete the currently
3259 To delete an existing queue, use --delete. You cannot delete the currently
3260 active queue.
3260 active queue.
3261
3261
3262 Returns 0 on success.
3262 Returns 0 on success.
3263 '''
3263 '''
3264 q = repo.mq
3264 q = repo.mq
3265 _defaultqueue = 'patches'
3265 _defaultqueue = 'patches'
3266 _allqueues = 'patches.queues'
3266 _allqueues = 'patches.queues'
3267 _activequeue = 'patches.queue'
3267 _activequeue = 'patches.queue'
3268
3268
3269 def _getcurrent():
3269 def _getcurrent():
3270 cur = os.path.basename(q.path)
3270 cur = os.path.basename(q.path)
3271 if cur.startswith('patches-'):
3271 if cur.startswith('patches-'):
3272 cur = cur[8:]
3272 cur = cur[8:]
3273 return cur
3273 return cur
3274
3274
3275 def _noqueues():
3275 def _noqueues():
3276 try:
3276 try:
3277 fh = repo.vfs(_allqueues, 'r')
3277 fh = repo.vfs(_allqueues, 'r')
3278 fh.close()
3278 fh.close()
3279 except IOError:
3279 except IOError:
3280 return True
3280 return True
3281
3281
3282 return False
3282 return False
3283
3283
3284 def _getqueues():
3284 def _getqueues():
3285 current = _getcurrent()
3285 current = _getcurrent()
3286
3286
3287 try:
3287 try:
3288 fh = repo.vfs(_allqueues, 'r')
3288 fh = repo.vfs(_allqueues, 'r')
3289 queues = [queue.strip() for queue in fh if queue.strip()]
3289 queues = [queue.strip() for queue in fh if queue.strip()]
3290 fh.close()
3290 fh.close()
3291 if current not in queues:
3291 if current not in queues:
3292 queues.append(current)
3292 queues.append(current)
3293 except IOError:
3293 except IOError:
3294 queues = [_defaultqueue]
3294 queues = [_defaultqueue]
3295
3295
3296 return sorted(queues)
3296 return sorted(queues)
3297
3297
3298 def _setactive(name):
3298 def _setactive(name):
3299 if q.applied:
3299 if q.applied:
3300 raise error.Abort(_('new queue created, but cannot make active '
3300 raise error.Abort(_('new queue created, but cannot make active '
3301 'as patches are applied'))
3301 'as patches are applied'))
3302 _setactivenocheck(name)
3302 _setactivenocheck(name)
3303
3303
3304 def _setactivenocheck(name):
3304 def _setactivenocheck(name):
3305 fh = repo.vfs(_activequeue, 'w')
3305 fh = repo.vfs(_activequeue, 'w')
3306 if name != 'patches':
3306 if name != 'patches':
3307 fh.write(name)
3307 fh.write(name)
3308 fh.close()
3308 fh.close()
3309
3309
3310 def _addqueue(name):
3310 def _addqueue(name):
3311 fh = repo.vfs(_allqueues, 'a')
3311 fh = repo.vfs(_allqueues, 'a')
3312 fh.write('%s\n' % (name,))
3312 fh.write('%s\n' % (name,))
3313 fh.close()
3313 fh.close()
3314
3314
3315 def _queuedir(name):
3315 def _queuedir(name):
3316 if name == 'patches':
3316 if name == 'patches':
3317 return repo.vfs.join('patches')
3317 return repo.vfs.join('patches')
3318 else:
3318 else:
3319 return repo.vfs.join('patches-' + name)
3319 return repo.vfs.join('patches-' + name)
3320
3320
3321 def _validname(name):
3321 def _validname(name):
3322 for n in name:
3322 for n in name:
3323 if n in ':\\/.':
3323 if n in ':\\/.':
3324 return False
3324 return False
3325 return True
3325 return True
3326
3326
3327 def _delete(name):
3327 def _delete(name):
3328 if name not in existing:
3328 if name not in existing:
3329 raise error.Abort(_('cannot delete queue that does not exist'))
3329 raise error.Abort(_('cannot delete queue that does not exist'))
3330
3330
3331 current = _getcurrent()
3331 current = _getcurrent()
3332
3332
3333 if name == current:
3333 if name == current:
3334 raise error.Abort(_('cannot delete currently active queue'))
3334 raise error.Abort(_('cannot delete currently active queue'))
3335
3335
3336 fh = repo.vfs('patches.queues.new', 'w')
3336 fh = repo.vfs('patches.queues.new', 'w')
3337 for queue in existing:
3337 for queue in existing:
3338 if queue == name:
3338 if queue == name:
3339 continue
3339 continue
3340 fh.write('%s\n' % (queue,))
3340 fh.write('%s\n' % (queue,))
3341 fh.close()
3341 fh.close()
3342 repo.vfs.rename('patches.queues.new', _allqueues)
3342 repo.vfs.rename('patches.queues.new', _allqueues)
3343
3343
3344 if not name or opts.get('list') or opts.get('active'):
3344 if not name or opts.get('list') or opts.get('active'):
3345 current = _getcurrent()
3345 current = _getcurrent()
3346 if opts.get('active'):
3346 if opts.get('active'):
3347 ui.write('%s\n' % (current,))
3347 ui.write('%s\n' % (current,))
3348 return
3348 return
3349 for queue in _getqueues():
3349 for queue in _getqueues():
3350 ui.write('%s' % (queue,))
3350 ui.write('%s' % (queue,))
3351 if queue == current and not ui.quiet:
3351 if queue == current and not ui.quiet:
3352 ui.write(_(' (active)\n'))
3352 ui.write(_(' (active)\n'))
3353 else:
3353 else:
3354 ui.write('\n')
3354 ui.write('\n')
3355 return
3355 return
3356
3356
3357 if not _validname(name):
3357 if not _validname(name):
3358 raise error.Abort(
3358 raise error.Abort(
3359 _('invalid queue name, may not contain the characters ":\\/."'))
3359 _('invalid queue name, may not contain the characters ":\\/."'))
3360
3360
3361 with repo.wlock():
3361 with repo.wlock():
3362 existing = _getqueues()
3362 existing = _getqueues()
3363
3363
3364 if opts.get('create'):
3364 if opts.get('create'):
3365 if name in existing:
3365 if name in existing:
3366 raise error.Abort(_('queue "%s" already exists') % name)
3366 raise error.Abort(_('queue "%s" already exists') % name)
3367 if _noqueues():
3367 if _noqueues():
3368 _addqueue(_defaultqueue)
3368 _addqueue(_defaultqueue)
3369 _addqueue(name)
3369 _addqueue(name)
3370 _setactive(name)
3370 _setactive(name)
3371 elif opts.get('rename'):
3371 elif opts.get('rename'):
3372 current = _getcurrent()
3372 current = _getcurrent()
3373 if name == current:
3373 if name == current:
3374 raise error.Abort(_('can\'t rename "%s" to its current name')
3374 raise error.Abort(_('can\'t rename "%s" to its current name')
3375 % name)
3375 % name)
3376 if name in existing:
3376 if name in existing:
3377 raise error.Abort(_('queue "%s" already exists') % name)
3377 raise error.Abort(_('queue "%s" already exists') % name)
3378
3378
3379 olddir = _queuedir(current)
3379 olddir = _queuedir(current)
3380 newdir = _queuedir(name)
3380 newdir = _queuedir(name)
3381
3381
3382 if os.path.exists(newdir):
3382 if os.path.exists(newdir):
3383 raise error.Abort(_('non-queue directory "%s" already exists') %
3383 raise error.Abort(_('non-queue directory "%s" already exists') %
3384 newdir)
3384 newdir)
3385
3385
3386 fh = repo.vfs('patches.queues.new', 'w')
3386 fh = repo.vfs('patches.queues.new', 'w')
3387 for queue in existing:
3387 for queue in existing:
3388 if queue == current:
3388 if queue == current:
3389 fh.write('%s\n' % (name,))
3389 fh.write('%s\n' % (name,))
3390 if os.path.exists(olddir):
3390 if os.path.exists(olddir):
3391 util.rename(olddir, newdir)
3391 util.rename(olddir, newdir)
3392 else:
3392 else:
3393 fh.write('%s\n' % (queue,))
3393 fh.write('%s\n' % (queue,))
3394 fh.close()
3394 fh.close()
3395 repo.vfs.rename('patches.queues.new', _allqueues)
3395 repo.vfs.rename('patches.queues.new', _allqueues)
3396 _setactivenocheck(name)
3396 _setactivenocheck(name)
3397 elif opts.get('delete'):
3397 elif opts.get('delete'):
3398 _delete(name)
3398 _delete(name)
3399 elif opts.get('purge'):
3399 elif opts.get('purge'):
3400 if name in existing:
3400 if name in existing:
3401 _delete(name)
3401 _delete(name)
3402 qdir = _queuedir(name)
3402 qdir = _queuedir(name)
3403 if os.path.exists(qdir):
3403 if os.path.exists(qdir):
3404 shutil.rmtree(qdir)
3404 shutil.rmtree(qdir)
3405 else:
3405 else:
3406 if name not in existing:
3406 if name not in existing:
3407 raise error.Abort(_('use --create to create a new queue'))
3407 raise error.Abort(_('use --create to create a new queue'))
3408 _setactive(name)
3408 _setactive(name)
3409
3409
3410 def mqphasedefaults(repo, roots):
3410 def mqphasedefaults(repo, roots):
3411 """callback used to set mq changeset as secret when no phase data exists"""
3411 """callback used to set mq changeset as secret when no phase data exists"""
3412 if repo.mq.applied:
3412 if repo.mq.applied:
3413 if repo.ui.configbool('mq', 'secret', False):
3413 if repo.ui.configbool('mq', 'secret', False):
3414 mqphase = phases.secret
3414 mqphase = phases.secret
3415 else:
3415 else:
3416 mqphase = phases.draft
3416 mqphase = phases.draft
3417 qbase = repo[repo.mq.applied[0].node]
3417 qbase = repo[repo.mq.applied[0].node]
3418 roots[mqphase].add(qbase.node())
3418 roots[mqphase].add(qbase.node())
3419 return roots
3419 return roots
3420
3420
3421 def reposetup(ui, repo):
3421 def reposetup(ui, repo):
3422 class mqrepo(repo.__class__):
3422 class mqrepo(repo.__class__):
3423 @localrepo.unfilteredpropertycache
3423 @localrepo.unfilteredpropertycache
3424 def mq(self):
3424 def mq(self):
3425 return queue(self.ui, self.baseui, self.path)
3425 return queue(self.ui, self.baseui, self.path)
3426
3426
3427 def invalidateall(self):
3427 def invalidateall(self):
3428 super(mqrepo, self).invalidateall()
3428 super(mqrepo, self).invalidateall()
3429 if localrepo.hasunfilteredcache(self, 'mq'):
3429 if localrepo.hasunfilteredcache(self, 'mq'):
3430 # recreate mq in case queue path was changed
3430 # recreate mq in case queue path was changed
3431 delattr(self.unfiltered(), 'mq')
3431 delattr(self.unfiltered(), 'mq')
3432
3432
3433 def abortifwdirpatched(self, errmsg, force=False):
3433 def abortifwdirpatched(self, errmsg, force=False):
3434 if self.mq.applied and self.mq.checkapplied and not force:
3434 if self.mq.applied and self.mq.checkapplied and not force:
3435 parents = self.dirstate.parents()
3435 parents = self.dirstate.parents()
3436 patches = [s.node for s in self.mq.applied]
3436 patches = [s.node for s in self.mq.applied]
3437 if parents[0] in patches or parents[1] in patches:
3437 if parents[0] in patches or parents[1] in patches:
3438 raise error.Abort(errmsg)
3438 raise error.Abort(errmsg)
3439
3439
3440 def commit(self, text="", user=None, date=None, match=None,
3440 def commit(self, text="", user=None, date=None, match=None,
3441 force=False, editor=False, extra=None):
3441 force=False, editor=False, extra=None):
3442 if extra is None:
3442 if extra is None:
3443 extra = {}
3443 extra = {}
3444 self.abortifwdirpatched(
3444 self.abortifwdirpatched(
3445 _('cannot commit over an applied mq patch'),
3445 _('cannot commit over an applied mq patch'),
3446 force)
3446 force)
3447
3447
3448 return super(mqrepo, self).commit(text, user, date, match, force,
3448 return super(mqrepo, self).commit(text, user, date, match, force,
3449 editor, extra)
3449 editor, extra)
3450
3450
3451 def checkpush(self, pushop):
3451 def checkpush(self, pushop):
3452 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3452 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3453 outapplied = [e.node for e in self.mq.applied]
3453 outapplied = [e.node for e in self.mq.applied]
3454 if pushop.revs:
3454 if pushop.revs:
3455 # Assume applied patches have no non-patch descendants and
3455 # Assume applied patches have no non-patch descendants and
3456 # are not on remote already. Filtering any changeset not
3456 # are not on remote already. Filtering any changeset not
3457 # pushed.
3457 # pushed.
3458 heads = set(pushop.revs)
3458 heads = set(pushop.revs)
3459 for node in reversed(outapplied):
3459 for node in reversed(outapplied):
3460 if node in heads:
3460 if node in heads:
3461 break
3461 break
3462 else:
3462 else:
3463 outapplied.pop()
3463 outapplied.pop()
3464 # looking for pushed and shared changeset
3464 # looking for pushed and shared changeset
3465 for node in outapplied:
3465 for node in outapplied:
3466 if self[node].phase() < phases.secret:
3466 if self[node].phase() < phases.secret:
3467 raise error.Abort(_('source has mq patches applied'))
3467 raise error.Abort(_('source has mq patches applied'))
3468 # no non-secret patches pushed
3468 # no non-secret patches pushed
3469 super(mqrepo, self).checkpush(pushop)
3469 super(mqrepo, self).checkpush(pushop)
3470
3470
3471 def _findtags(self):
3471 def _findtags(self):
3472 '''augment tags from base class with patch tags'''
3472 '''augment tags from base class with patch tags'''
3473 result = super(mqrepo, self)._findtags()
3473 result = super(mqrepo, self)._findtags()
3474
3474
3475 q = self.mq
3475 q = self.mq
3476 if not q.applied:
3476 if not q.applied:
3477 return result
3477 return result
3478
3478
3479 mqtags = [(patch.node, patch.name) for patch in q.applied]
3479 mqtags = [(patch.node, patch.name) for patch in q.applied]
3480
3480
3481 try:
3481 try:
3482 # for now ignore filtering business
3482 # for now ignore filtering business
3483 self.unfiltered().changelog.rev(mqtags[-1][0])
3483 self.unfiltered().changelog.rev(mqtags[-1][0])
3484 except error.LookupError:
3484 except error.LookupError:
3485 self.ui.warn(_('mq status file refers to unknown node %s\n')
3485 self.ui.warn(_('mq status file refers to unknown node %s\n')
3486 % short(mqtags[-1][0]))
3486 % short(mqtags[-1][0]))
3487 return result
3487 return result
3488
3488
3489 # do not add fake tags for filtered revisions
3489 # do not add fake tags for filtered revisions
3490 included = self.changelog.hasnode
3490 included = self.changelog.hasnode
3491 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3491 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3492 if not mqtags:
3492 if not mqtags:
3493 return result
3493 return result
3494
3494
3495 mqtags.append((mqtags[-1][0], 'qtip'))
3495 mqtags.append((mqtags[-1][0], 'qtip'))
3496 mqtags.append((mqtags[0][0], 'qbase'))
3496 mqtags.append((mqtags[0][0], 'qbase'))
3497 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3497 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3498 tags = result[0]
3498 tags = result[0]
3499 for patch in mqtags:
3499 for patch in mqtags:
3500 if patch[1] in tags:
3500 if patch[1] in tags:
3501 self.ui.warn(_('tag %s overrides mq patch of the same '
3501 self.ui.warn(_('tag %s overrides mq patch of the same '
3502 'name\n') % patch[1])
3502 'name\n') % patch[1])
3503 else:
3503 else:
3504 tags[patch[1]] = patch[0]
3504 tags[patch[1]] = patch[0]
3505
3505
3506 return result
3506 return result
3507
3507
3508 if repo.local():
3508 if repo.local():
3509 repo.__class__ = mqrepo
3509 repo.__class__ = mqrepo
3510
3510
3511 repo._phasedefaults.append(mqphasedefaults)
3511 repo._phasedefaults.append(mqphasedefaults)
3512
3512
3513 def mqimport(orig, ui, repo, *args, **kwargs):
3513 def mqimport(orig, ui, repo, *args, **kwargs):
3514 if (util.safehasattr(repo, 'abortifwdirpatched')
3514 if (util.safehasattr(repo, 'abortifwdirpatched')
3515 and not kwargs.get('no_commit', False)):
3515 and not kwargs.get('no_commit', False)):
3516 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3516 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3517 kwargs.get('force'))
3517 kwargs.get('force'))
3518 return orig(ui, repo, *args, **kwargs)
3518 return orig(ui, repo, *args, **kwargs)
3519
3519
3520 def mqinit(orig, ui, *args, **kwargs):
3520 def mqinit(orig, ui, *args, **kwargs):
3521 mq = kwargs.pop('mq', None)
3521 mq = kwargs.pop('mq', None)
3522
3522
3523 if not mq:
3523 if not mq:
3524 return orig(ui, *args, **kwargs)
3524 return orig(ui, *args, **kwargs)
3525
3525
3526 if args:
3526 if args:
3527 repopath = args[0]
3527 repopath = args[0]
3528 if not hg.islocal(repopath):
3528 if not hg.islocal(repopath):
3529 raise error.Abort(_('only a local queue repository '
3529 raise error.Abort(_('only a local queue repository '
3530 'may be initialized'))
3530 'may be initialized'))
3531 else:
3531 else:
3532 repopath = cmdutil.findrepo(pycompat.getcwd())
3532 repopath = cmdutil.findrepo(pycompat.getcwd())
3533 if not repopath:
3533 if not repopath:
3534 raise error.Abort(_('there is no Mercurial repository here '
3534 raise error.Abort(_('there is no Mercurial repository here '
3535 '(.hg not found)'))
3535 '(.hg not found)'))
3536 repo = hg.repository(ui, repopath)
3536 repo = hg.repository(ui, repopath)
3537 return qinit(ui, repo, True)
3537 return qinit(ui, repo, True)
3538
3538
3539 def mqcommand(orig, ui, repo, *args, **kwargs):
3539 def mqcommand(orig, ui, repo, *args, **kwargs):
3540 """Add --mq option to operate on patch repository instead of main"""
3540 """Add --mq option to operate on patch repository instead of main"""
3541
3541
3542 # some commands do not like getting unknown options
3542 # some commands do not like getting unknown options
3543 mq = kwargs.pop(r'mq', None)
3543 mq = kwargs.pop(r'mq', None)
3544
3544
3545 if not mq:
3545 if not mq:
3546 return orig(ui, repo, *args, **kwargs)
3546 return orig(ui, repo, *args, **kwargs)
3547
3547
3548 q = repo.mq
3548 q = repo.mq
3549 r = q.qrepo()
3549 r = q.qrepo()
3550 if not r:
3550 if not r:
3551 raise error.Abort(_('no queue repository'))
3551 raise error.Abort(_('no queue repository'))
3552 return orig(r.ui, r, *args, **kwargs)
3552 return orig(r.ui, r, *args, **kwargs)
3553
3553
3554 def summaryhook(ui, repo):
3554 def summaryhook(ui, repo):
3555 q = repo.mq
3555 q = repo.mq
3556 m = []
3556 m = []
3557 a, u = len(q.applied), len(q.unapplied(repo))
3557 a, u = len(q.applied), len(q.unapplied(repo))
3558 if a:
3558 if a:
3559 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3559 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3560 if u:
3560 if u:
3561 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3561 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3562 if m:
3562 if m:
3563 # i18n: column positioning for "hg summary"
3563 # i18n: column positioning for "hg summary"
3564 ui.write(_("mq: %s\n") % ', '.join(m))
3564 ui.write(_("mq: %s\n") % ', '.join(m))
3565 else:
3565 else:
3566 # i18n: column positioning for "hg summary"
3566 # i18n: column positioning for "hg summary"
3567 ui.note(_("mq: (empty queue)\n"))
3567 ui.note(_("mq: (empty queue)\n"))
3568
3568
3569 revsetpredicate = registrar.revsetpredicate()
3569 revsetpredicate = registrar.revsetpredicate()
3570
3570
3571 @revsetpredicate('mq()')
3571 @revsetpredicate('mq()')
3572 def revsetmq(repo, subset, x):
3572 def revsetmq(repo, subset, x):
3573 """Changesets managed by MQ.
3573 """Changesets managed by MQ.
3574 """
3574 """
3575 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3575 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3576 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3576 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3577 return smartset.baseset([r for r in subset if r in applied])
3577 return smartset.baseset([r for r in subset if r in applied])
3578
3578
3579 # tell hggettext to extract docstrings from these functions:
3579 # tell hggettext to extract docstrings from these functions:
3580 i18nfunctions = [revsetmq]
3580 i18nfunctions = [revsetmq]
3581
3581
3582 def extsetup(ui):
3582 def extsetup(ui):
3583 # Ensure mq wrappers are called first, regardless of extension load order by
3583 # Ensure mq wrappers are called first, regardless of extension load order by
3584 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3584 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3585 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3585 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3586
3586
3587 extensions.wrapcommand(commands.table, 'import', mqimport)
3587 extensions.wrapcommand(commands.table, 'import', mqimport)
3588 cmdutil.summaryhooks.add('mq', summaryhook)
3588 cmdutil.summaryhooks.add('mq', summaryhook)
3589
3589
3590 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3590 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3591 entry[1].extend(mqopt)
3591 entry[1].extend(mqopt)
3592
3592
3593 def dotable(cmdtable):
3593 def dotable(cmdtable):
3594 for cmd, entry in cmdtable.iteritems():
3594 for cmd, entry in cmdtable.iteritems():
3595 cmd = cmdutil.parsealiases(cmd)[0]
3595 cmd = cmdutil.parsealiases(cmd)[0]
3596 func = entry[0]
3596 func = entry[0]
3597 if func.norepo:
3597 if func.norepo:
3598 continue
3598 continue
3599 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3599 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3600 entry[1].extend(mqopt)
3600 entry[1].extend(mqopt)
3601
3601
3602 dotable(commands.table)
3602 dotable(commands.table)
3603
3603
3604 for extname, extmodule in extensions.extensions():
3604 for extname, extmodule in extensions.extensions():
3605 if extmodule.__file__ != __file__:
3605 if extmodule.__file__ != __file__:
3606 dotable(getattr(extmodule, 'cmdtable', {}))
3606 dotable(getattr(extmodule, 'cmdtable', {}))
3607
3607
3608 colortable = {'qguard.negative': 'red',
3608 colortable = {'qguard.negative': 'red',
3609 'qguard.positive': 'yellow',
3609 'qguard.positive': 'yellow',
3610 'qguard.unguarded': 'green',
3610 'qguard.unguarded': 'green',
3611 'qseries.applied': 'blue bold underline',
3611 'qseries.applied': 'blue bold underline',
3612 'qseries.guarded': 'black bold',
3612 'qseries.guarded': 'black bold',
3613 'qseries.missing': 'red bold',
3613 'qseries.missing': 'red bold',
3614 'qseries.unapplied': 'black bold'}
3614 'qseries.unapplied': 'black bold'}
@@ -1,741 +1,742 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 you do not supply one via configuration or the command line. You can
48 you do not supply one via configuration or the command line. You can
49 override this to never prompt by configuring an empty value::
49 override this to never prompt by configuring an empty value::
50
50
51 [email]
51 [email]
52 cc =
52 cc =
53
53
54 You can control the default inclusion of an introduction message with the
54 You can control the default inclusion of an introduction message with the
55 ``patchbomb.intro`` configuration option. The configuration is always
55 ``patchbomb.intro`` configuration option. The configuration is always
56 overwritten by command line flags like --intro and --desc::
56 overwritten by command line flags like --intro and --desc::
57
57
58 [patchbomb]
58 [patchbomb]
59 intro=auto # include introduction message if more than 1 patch (default)
59 intro=auto # include introduction message if more than 1 patch (default)
60 intro=never # never include an introduction message
60 intro=never # never include an introduction message
61 intro=always # always include an introduction message
61 intro=always # always include an introduction message
62
62
63 You can specify a template for flags to be added in subject prefixes. Flags
63 You can specify a template for flags to be added in subject prefixes. Flags
64 specified by --flag option are exported as ``{flags}`` keyword::
64 specified by --flag option are exported as ``{flags}`` keyword::
65
65
66 [patchbomb]
66 [patchbomb]
67 flagtemplate = "{separate(' ',
67 flagtemplate = "{separate(' ',
68 ifeq(branch, 'default', '', branch|upper),
68 ifeq(branch, 'default', '', branch|upper),
69 flags)}"
69 flags)}"
70
70
71 You can set patchbomb to always ask for confirmation by setting
71 You can set patchbomb to always ask for confirmation by setting
72 ``patchbomb.confirm`` to true.
72 ``patchbomb.confirm`` to true.
73 '''
73 '''
74 from __future__ import absolute_import
74 from __future__ import absolute_import
75
75
76 import email as emailmod
76 import email as emailmod
77 import errno
77 import errno
78 import os
78 import os
79 import socket
79 import socket
80 import tempfile
80 import tempfile
81
81
82 from mercurial.i18n import _
82 from mercurial.i18n import _
83 from mercurial import (
83 from mercurial import (
84 cmdutil,
84 cmdutil,
85 commands,
85 commands,
86 error,
86 error,
87 formatter,
87 formatter,
88 hg,
88 hg,
89 mail,
89 mail,
90 node as nodemod,
90 node as nodemod,
91 patch,
91 patch,
92 registrar,
92 scmutil,
93 scmutil,
93 templater,
94 templater,
94 util,
95 util,
95 )
96 )
96 stringio = util.stringio
97 stringio = util.stringio
97
98
98 cmdtable = {}
99 cmdtable = {}
99 command = cmdutil.command(cmdtable)
100 command = registrar.command(cmdtable)
100 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
101 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
101 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
102 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
102 # be specifying the version(s) of Mercurial they are tested with, or
103 # be specifying the version(s) of Mercurial they are tested with, or
103 # leave the attribute unspecified.
104 # leave the attribute unspecified.
104 testedwith = 'ships-with-hg-core'
105 testedwith = 'ships-with-hg-core'
105
106
106 def _addpullheader(seq, ctx):
107 def _addpullheader(seq, ctx):
107 """Add a header pointing to a public URL where the changeset is available
108 """Add a header pointing to a public URL where the changeset is available
108 """
109 """
109 repo = ctx.repo()
110 repo = ctx.repo()
110 # experimental config: patchbomb.publicurl
111 # experimental config: patchbomb.publicurl
111 # waiting for some logic that check that the changeset are available on the
112 # waiting for some logic that check that the changeset are available on the
112 # destination before patchbombing anything.
113 # destination before patchbombing anything.
113 pullurl = repo.ui.config('patchbomb', 'publicurl')
114 pullurl = repo.ui.config('patchbomb', 'publicurl')
114 if pullurl is not None:
115 if pullurl is not None:
115 return ('Available At %s\n'
116 return ('Available At %s\n'
116 '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
117 '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
117 return None
118 return None
118
119
119 def uisetup(ui):
120 def uisetup(ui):
120 cmdutil.extraexport.append('pullurl')
121 cmdutil.extraexport.append('pullurl')
121 cmdutil.extraexportmap['pullurl'] = _addpullheader
122 cmdutil.extraexportmap['pullurl'] = _addpullheader
122
123
123
124
124 def prompt(ui, prompt, default=None, rest=':'):
125 def prompt(ui, prompt, default=None, rest=':'):
125 if default:
126 if default:
126 prompt += ' [%s]' % default
127 prompt += ' [%s]' % default
127 return ui.prompt(prompt + rest, default)
128 return ui.prompt(prompt + rest, default)
128
129
129 def introwanted(ui, opts, number):
130 def introwanted(ui, opts, number):
130 '''is an introductory message apparently wanted?'''
131 '''is an introductory message apparently wanted?'''
131 introconfig = ui.config('patchbomb', 'intro', 'auto')
132 introconfig = ui.config('patchbomb', 'intro', 'auto')
132 if opts.get('intro') or opts.get('desc'):
133 if opts.get('intro') or opts.get('desc'):
133 intro = True
134 intro = True
134 elif introconfig == 'always':
135 elif introconfig == 'always':
135 intro = True
136 intro = True
136 elif introconfig == 'never':
137 elif introconfig == 'never':
137 intro = False
138 intro = False
138 elif introconfig == 'auto':
139 elif introconfig == 'auto':
139 intro = 1 < number
140 intro = 1 < number
140 else:
141 else:
141 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
142 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
142 % introconfig)
143 % introconfig)
143 ui.write_err(_('(should be one of always, never, auto)\n'))
144 ui.write_err(_('(should be one of always, never, auto)\n'))
144 intro = 1 < number
145 intro = 1 < number
145 return intro
146 return intro
146
147
147 def _formatflags(ui, repo, rev, flags):
148 def _formatflags(ui, repo, rev, flags):
148 """build flag string optionally by template"""
149 """build flag string optionally by template"""
149 tmpl = ui.config('patchbomb', 'flagtemplate')
150 tmpl = ui.config('patchbomb', 'flagtemplate')
150 if not tmpl:
151 if not tmpl:
151 return ' '.join(flags)
152 return ' '.join(flags)
152 out = util.stringio()
153 out = util.stringio()
153 opts = {'template': templater.unquotestring(tmpl)}
154 opts = {'template': templater.unquotestring(tmpl)}
154 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
155 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
155 fm.startitem()
156 fm.startitem()
156 fm.context(ctx=repo[rev])
157 fm.context(ctx=repo[rev])
157 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
158 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
158 return out.getvalue()
159 return out.getvalue()
159
160
160 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
161 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
161 """build prefix to patch subject"""
162 """build prefix to patch subject"""
162 flag = _formatflags(ui, repo, rev, flags)
163 flag = _formatflags(ui, repo, rev, flags)
163 if flag:
164 if flag:
164 flag = ' ' + flag
165 flag = ' ' + flag
165
166
166 if not numbered:
167 if not numbered:
167 return '[PATCH%s]' % flag
168 return '[PATCH%s]' % flag
168 else:
169 else:
169 tlen = len(str(total))
170 tlen = len(str(total))
170 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
171 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
171
172
172 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
173 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
173 patchname=None):
174 patchname=None):
174
175
175 desc = []
176 desc = []
176 node = None
177 node = None
177 body = ''
178 body = ''
178
179
179 for line in patchlines:
180 for line in patchlines:
180 if line.startswith('#'):
181 if line.startswith('#'):
181 if line.startswith('# Node ID'):
182 if line.startswith('# Node ID'):
182 node = line.split()[-1]
183 node = line.split()[-1]
183 continue
184 continue
184 if line.startswith('diff -r') or line.startswith('diff --git'):
185 if line.startswith('diff -r') or line.startswith('diff --git'):
185 break
186 break
186 desc.append(line)
187 desc.append(line)
187
188
188 if not patchname and not node:
189 if not patchname and not node:
189 raise ValueError
190 raise ValueError
190
191
191 if opts.get('attach') and not opts.get('body'):
192 if opts.get('attach') and not opts.get('body'):
192 body = ('\n'.join(desc[1:]).strip() or
193 body = ('\n'.join(desc[1:]).strip() or
193 'Patch subject is complete summary.')
194 'Patch subject is complete summary.')
194 body += '\n\n\n'
195 body += '\n\n\n'
195
196
196 if opts.get('plain'):
197 if opts.get('plain'):
197 while patchlines and patchlines[0].startswith('# '):
198 while patchlines and patchlines[0].startswith('# '):
198 patchlines.pop(0)
199 patchlines.pop(0)
199 if patchlines:
200 if patchlines:
200 patchlines.pop(0)
201 patchlines.pop(0)
201 while patchlines and not patchlines[0].strip():
202 while patchlines and not patchlines[0].strip():
202 patchlines.pop(0)
203 patchlines.pop(0)
203
204
204 ds = patch.diffstat(patchlines)
205 ds = patch.diffstat(patchlines)
205 if opts.get('diffstat'):
206 if opts.get('diffstat'):
206 body += ds + '\n\n'
207 body += ds + '\n\n'
207
208
208 addattachment = opts.get('attach') or opts.get('inline')
209 addattachment = opts.get('attach') or opts.get('inline')
209 if not addattachment or opts.get('body'):
210 if not addattachment or opts.get('body'):
210 body += '\n'.join(patchlines)
211 body += '\n'.join(patchlines)
211
212
212 if addattachment:
213 if addattachment:
213 msg = emailmod.MIMEMultipart.MIMEMultipart()
214 msg = emailmod.MIMEMultipart.MIMEMultipart()
214 if body:
215 if body:
215 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
216 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
216 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
217 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
217 opts.get('test'))
218 opts.get('test'))
218 binnode = nodemod.bin(node)
219 binnode = nodemod.bin(node)
219 # if node is mq patch, it will have the patch file's name as a tag
220 # if node is mq patch, it will have the patch file's name as a tag
220 if not patchname:
221 if not patchname:
221 patchtags = [t for t in repo.nodetags(binnode)
222 patchtags = [t for t in repo.nodetags(binnode)
222 if t.endswith('.patch') or t.endswith('.diff')]
223 if t.endswith('.patch') or t.endswith('.diff')]
223 if patchtags:
224 if patchtags:
224 patchname = patchtags[0]
225 patchname = patchtags[0]
225 elif total > 1:
226 elif total > 1:
226 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
227 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
227 binnode, seqno=idx,
228 binnode, seqno=idx,
228 total=total)
229 total=total)
229 else:
230 else:
230 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
231 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
231 disposition = 'inline'
232 disposition = 'inline'
232 if opts.get('attach'):
233 if opts.get('attach'):
233 disposition = 'attachment'
234 disposition = 'attachment'
234 p['Content-Disposition'] = disposition + '; filename=' + patchname
235 p['Content-Disposition'] = disposition + '; filename=' + patchname
235 msg.attach(p)
236 msg.attach(p)
236 else:
237 else:
237 msg = mail.mimetextpatch(body, display=opts.get('test'))
238 msg = mail.mimetextpatch(body, display=opts.get('test'))
238
239
239 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
240 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
240 numbered)
241 numbered)
241 subj = desc[0].strip().rstrip('. ')
242 subj = desc[0].strip().rstrip('. ')
242 if not numbered:
243 if not numbered:
243 subj = ' '.join([prefix, opts.get('subject') or subj])
244 subj = ' '.join([prefix, opts.get('subject') or subj])
244 else:
245 else:
245 subj = ' '.join([prefix, subj])
246 subj = ' '.join([prefix, subj])
246 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
247 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
247 msg['X-Mercurial-Node'] = node
248 msg['X-Mercurial-Node'] = node
248 msg['X-Mercurial-Series-Index'] = '%i' % idx
249 msg['X-Mercurial-Series-Index'] = '%i' % idx
249 msg['X-Mercurial-Series-Total'] = '%i' % total
250 msg['X-Mercurial-Series-Total'] = '%i' % total
250 return msg, subj, ds
251 return msg, subj, ds
251
252
252 def _getpatches(repo, revs, **opts):
253 def _getpatches(repo, revs, **opts):
253 """return a list of patches for a list of revisions
254 """return a list of patches for a list of revisions
254
255
255 Each patch in the list is itself a list of lines.
256 Each patch in the list is itself a list of lines.
256 """
257 """
257 ui = repo.ui
258 ui = repo.ui
258 prev = repo['.'].rev()
259 prev = repo['.'].rev()
259 for r in revs:
260 for r in revs:
260 if r == prev and (repo[None].files() or repo[None].deleted()):
261 if r == prev and (repo[None].files() or repo[None].deleted()):
261 ui.warn(_('warning: working directory has '
262 ui.warn(_('warning: working directory has '
262 'uncommitted changes\n'))
263 'uncommitted changes\n'))
263 output = stringio()
264 output = stringio()
264 cmdutil.export(repo, [r], fp=output,
265 cmdutil.export(repo, [r], fp=output,
265 opts=patch.difffeatureopts(ui, opts, git=True))
266 opts=patch.difffeatureopts(ui, opts, git=True))
266 yield output.getvalue().split('\n')
267 yield output.getvalue().split('\n')
267 def _getbundle(repo, dest, **opts):
268 def _getbundle(repo, dest, **opts):
268 """return a bundle containing changesets missing in "dest"
269 """return a bundle containing changesets missing in "dest"
269
270
270 The `opts` keyword-arguments are the same as the one accepted by the
271 The `opts` keyword-arguments are the same as the one accepted by the
271 `bundle` command.
272 `bundle` command.
272
273
273 The bundle is a returned as a single in-memory binary blob.
274 The bundle is a returned as a single in-memory binary blob.
274 """
275 """
275 ui = repo.ui
276 ui = repo.ui
276 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
277 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
277 tmpfn = os.path.join(tmpdir, 'bundle')
278 tmpfn = os.path.join(tmpdir, 'bundle')
278 btype = ui.config('patchbomb', 'bundletype')
279 btype = ui.config('patchbomb', 'bundletype')
279 if btype:
280 if btype:
280 opts['type'] = btype
281 opts['type'] = btype
281 try:
282 try:
282 commands.bundle(ui, repo, tmpfn, dest, **opts)
283 commands.bundle(ui, repo, tmpfn, dest, **opts)
283 return util.readfile(tmpfn)
284 return util.readfile(tmpfn)
284 finally:
285 finally:
285 try:
286 try:
286 os.unlink(tmpfn)
287 os.unlink(tmpfn)
287 except OSError:
288 except OSError:
288 pass
289 pass
289 os.rmdir(tmpdir)
290 os.rmdir(tmpdir)
290
291
291 def _getdescription(repo, defaultbody, sender, **opts):
292 def _getdescription(repo, defaultbody, sender, **opts):
292 """obtain the body of the introduction message and return it
293 """obtain the body of the introduction message and return it
293
294
294 This is also used for the body of email with an attached bundle.
295 This is also used for the body of email with an attached bundle.
295
296
296 The body can be obtained either from the command line option or entered by
297 The body can be obtained either from the command line option or entered by
297 the user through the editor.
298 the user through the editor.
298 """
299 """
299 ui = repo.ui
300 ui = repo.ui
300 if opts.get('desc'):
301 if opts.get('desc'):
301 body = open(opts.get('desc')).read()
302 body = open(opts.get('desc')).read()
302 else:
303 else:
303 ui.write(_('\nWrite the introductory message for the '
304 ui.write(_('\nWrite the introductory message for the '
304 'patch series.\n\n'))
305 'patch series.\n\n'))
305 body = ui.edit(defaultbody, sender, repopath=repo.path)
306 body = ui.edit(defaultbody, sender, repopath=repo.path)
306 # Save series description in case sendmail fails
307 # Save series description in case sendmail fails
307 msgfile = repo.vfs('last-email.txt', 'wb')
308 msgfile = repo.vfs('last-email.txt', 'wb')
308 msgfile.write(body)
309 msgfile.write(body)
309 msgfile.close()
310 msgfile.close()
310 return body
311 return body
311
312
312 def _getbundlemsgs(repo, sender, bundle, **opts):
313 def _getbundlemsgs(repo, sender, bundle, **opts):
313 """Get the full email for sending a given bundle
314 """Get the full email for sending a given bundle
314
315
315 This function returns a list of "email" tuples (subject, content, None).
316 This function returns a list of "email" tuples (subject, content, None).
316 The list is always one message long in that case.
317 The list is always one message long in that case.
317 """
318 """
318 ui = repo.ui
319 ui = repo.ui
319 _charsets = mail._charsets(ui)
320 _charsets = mail._charsets(ui)
320 subj = (opts.get('subject')
321 subj = (opts.get('subject')
321 or prompt(ui, 'Subject:', 'A bundle for your repository'))
322 or prompt(ui, 'Subject:', 'A bundle for your repository'))
322
323
323 body = _getdescription(repo, '', sender, **opts)
324 body = _getdescription(repo, '', sender, **opts)
324 msg = emailmod.MIMEMultipart.MIMEMultipart()
325 msg = emailmod.MIMEMultipart.MIMEMultipart()
325 if body:
326 if body:
326 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
327 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
327 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
328 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
328 datapart.set_payload(bundle)
329 datapart.set_payload(bundle)
329 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
330 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
330 datapart.add_header('Content-Disposition', 'attachment',
331 datapart.add_header('Content-Disposition', 'attachment',
331 filename=bundlename)
332 filename=bundlename)
332 emailmod.Encoders.encode_base64(datapart)
333 emailmod.Encoders.encode_base64(datapart)
333 msg.attach(datapart)
334 msg.attach(datapart)
334 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
335 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
335 return [(msg, subj, None)]
336 return [(msg, subj, None)]
336
337
337 def _makeintro(repo, sender, revs, patches, **opts):
338 def _makeintro(repo, sender, revs, patches, **opts):
338 """make an introduction email, asking the user for content if needed
339 """make an introduction email, asking the user for content if needed
339
340
340 email is returned as (subject, body, cumulative-diffstat)"""
341 email is returned as (subject, body, cumulative-diffstat)"""
341 ui = repo.ui
342 ui = repo.ui
342 _charsets = mail._charsets(ui)
343 _charsets = mail._charsets(ui)
343
344
344 # use the last revision which is likely to be a bookmarked head
345 # use the last revision which is likely to be a bookmarked head
345 prefix = _formatprefix(ui, repo, revs.last(), opts.get('flag'),
346 prefix = _formatprefix(ui, repo, revs.last(), opts.get('flag'),
346 0, len(patches), numbered=True)
347 0, len(patches), numbered=True)
347 subj = (opts.get('subject') or
348 subj = (opts.get('subject') or
348 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
349 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
349 if not subj:
350 if not subj:
350 return None # skip intro if the user doesn't bother
351 return None # skip intro if the user doesn't bother
351
352
352 subj = prefix + ' ' + subj
353 subj = prefix + ' ' + subj
353
354
354 body = ''
355 body = ''
355 if opts.get('diffstat'):
356 if opts.get('diffstat'):
356 # generate a cumulative diffstat of the whole patch series
357 # generate a cumulative diffstat of the whole patch series
357 diffstat = patch.diffstat(sum(patches, []))
358 diffstat = patch.diffstat(sum(patches, []))
358 body = '\n' + diffstat
359 body = '\n' + diffstat
359 else:
360 else:
360 diffstat = None
361 diffstat = None
361
362
362 body = _getdescription(repo, body, sender, **opts)
363 body = _getdescription(repo, body, sender, **opts)
363 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
364 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
364 msg['Subject'] = mail.headencode(ui, subj, _charsets,
365 msg['Subject'] = mail.headencode(ui, subj, _charsets,
365 opts.get('test'))
366 opts.get('test'))
366 return (msg, subj, diffstat)
367 return (msg, subj, diffstat)
367
368
368 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
369 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
369 """return a list of emails from a list of patches
370 """return a list of emails from a list of patches
370
371
371 This involves introduction message creation if necessary.
372 This involves introduction message creation if necessary.
372
373
373 This function returns a list of "email" tuples (subject, content, None).
374 This function returns a list of "email" tuples (subject, content, None).
374 """
375 """
375 ui = repo.ui
376 ui = repo.ui
376 _charsets = mail._charsets(ui)
377 _charsets = mail._charsets(ui)
377 patches = list(_getpatches(repo, revs, **opts))
378 patches = list(_getpatches(repo, revs, **opts))
378 msgs = []
379 msgs = []
379
380
380 ui.write(_('this patch series consists of %d patches.\n\n')
381 ui.write(_('this patch series consists of %d patches.\n\n')
381 % len(patches))
382 % len(patches))
382
383
383 # build the intro message, or skip it if the user declines
384 # build the intro message, or skip it if the user declines
384 if introwanted(ui, opts, len(patches)):
385 if introwanted(ui, opts, len(patches)):
385 msg = _makeintro(repo, sender, revs, patches, **opts)
386 msg = _makeintro(repo, sender, revs, patches, **opts)
386 if msg:
387 if msg:
387 msgs.append(msg)
388 msgs.append(msg)
388
389
389 # are we going to send more than one message?
390 # are we going to send more than one message?
390 numbered = len(msgs) + len(patches) > 1
391 numbered = len(msgs) + len(patches) > 1
391
392
392 # now generate the actual patch messages
393 # now generate the actual patch messages
393 name = None
394 name = None
394 assert len(revs) == len(patches)
395 assert len(revs) == len(patches)
395 for i, (r, p) in enumerate(zip(revs, patches)):
396 for i, (r, p) in enumerate(zip(revs, patches)):
396 if patchnames:
397 if patchnames:
397 name = patchnames[i]
398 name = patchnames[i]
398 msg = makepatch(ui, repo, r, p, opts, _charsets, i + 1,
399 msg = makepatch(ui, repo, r, p, opts, _charsets, i + 1,
399 len(patches), numbered, name)
400 len(patches), numbered, name)
400 msgs.append(msg)
401 msgs.append(msg)
401
402
402 return msgs
403 return msgs
403
404
404 def _getoutgoing(repo, dest, revs):
405 def _getoutgoing(repo, dest, revs):
405 '''Return the revisions present locally but not in dest'''
406 '''Return the revisions present locally but not in dest'''
406 ui = repo.ui
407 ui = repo.ui
407 url = ui.expandpath(dest or 'default-push', dest or 'default')
408 url = ui.expandpath(dest or 'default-push', dest or 'default')
408 url = hg.parseurl(url)[0]
409 url = hg.parseurl(url)[0]
409 ui.status(_('comparing with %s\n') % util.hidepassword(url))
410 ui.status(_('comparing with %s\n') % util.hidepassword(url))
410
411
411 revs = [r for r in revs if r >= 0]
412 revs = [r for r in revs if r >= 0]
412 if not revs:
413 if not revs:
413 revs = [len(repo) - 1]
414 revs = [len(repo) - 1]
414 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
415 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
415 if not revs:
416 if not revs:
416 ui.status(_("no changes found\n"))
417 ui.status(_("no changes found\n"))
417 return revs
418 return revs
418
419
419 emailopts = [
420 emailopts = [
420 ('', 'body', None, _('send patches as inline message text (default)')),
421 ('', 'body', None, _('send patches as inline message text (default)')),
421 ('a', 'attach', None, _('send patches as attachments')),
422 ('a', 'attach', None, _('send patches as attachments')),
422 ('i', 'inline', None, _('send patches as inline attachments')),
423 ('i', 'inline', None, _('send patches as inline attachments')),
423 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
424 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
424 ('c', 'cc', [], _('email addresses of copy recipients')),
425 ('c', 'cc', [], _('email addresses of copy recipients')),
425 ('', 'confirm', None, _('ask for confirmation before sending')),
426 ('', 'confirm', None, _('ask for confirmation before sending')),
426 ('d', 'diffstat', None, _('add diffstat output to messages')),
427 ('d', 'diffstat', None, _('add diffstat output to messages')),
427 ('', 'date', '', _('use the given date as the sending date')),
428 ('', 'date', '', _('use the given date as the sending date')),
428 ('', 'desc', '', _('use the given file as the series description')),
429 ('', 'desc', '', _('use the given file as the series description')),
429 ('f', 'from', '', _('email address of sender')),
430 ('f', 'from', '', _('email address of sender')),
430 ('n', 'test', None, _('print messages that would be sent')),
431 ('n', 'test', None, _('print messages that would be sent')),
431 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
432 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
432 ('', 'reply-to', [], _('email addresses replies should be sent to')),
433 ('', 'reply-to', [], _('email addresses replies should be sent to')),
433 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
434 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
434 ('', 'in-reply-to', '', _('message identifier to reply to')),
435 ('', 'in-reply-to', '', _('message identifier to reply to')),
435 ('', 'flag', [], _('flags to add in subject prefixes')),
436 ('', 'flag', [], _('flags to add in subject prefixes')),
436 ('t', 'to', [], _('email addresses of recipients'))]
437 ('t', 'to', [], _('email addresses of recipients'))]
437
438
438 @command('email',
439 @command('email',
439 [('g', 'git', None, _('use git extended diff format')),
440 [('g', 'git', None, _('use git extended diff format')),
440 ('', 'plain', None, _('omit hg patch header')),
441 ('', 'plain', None, _('omit hg patch header')),
441 ('o', 'outgoing', None,
442 ('o', 'outgoing', None,
442 _('send changes not found in the target repository')),
443 _('send changes not found in the target repository')),
443 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
444 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
444 ('', 'bundlename', 'bundle',
445 ('', 'bundlename', 'bundle',
445 _('name of the bundle attachment file'), _('NAME')),
446 _('name of the bundle attachment file'), _('NAME')),
446 ('r', 'rev', [], _('a revision to send'), _('REV')),
447 ('r', 'rev', [], _('a revision to send'), _('REV')),
447 ('', 'force', None, _('run even when remote repository is unrelated '
448 ('', 'force', None, _('run even when remote repository is unrelated '
448 '(with -b/--bundle)')),
449 '(with -b/--bundle)')),
449 ('', 'base', [], _('a base changeset to specify instead of a destination '
450 ('', 'base', [], _('a base changeset to specify instead of a destination '
450 '(with -b/--bundle)'), _('REV')),
451 '(with -b/--bundle)'), _('REV')),
451 ('', 'intro', None, _('send an introduction email for a single patch')),
452 ('', 'intro', None, _('send an introduction email for a single patch')),
452 ] + emailopts + commands.remoteopts,
453 ] + emailopts + commands.remoteopts,
453 _('hg email [OPTION]... [DEST]...'))
454 _('hg email [OPTION]... [DEST]...'))
454 def email(ui, repo, *revs, **opts):
455 def email(ui, repo, *revs, **opts):
455 '''send changesets by email
456 '''send changesets by email
456
457
457 By default, diffs are sent in the format generated by
458 By default, diffs are sent in the format generated by
458 :hg:`export`, one per message. The series starts with a "[PATCH 0
459 :hg:`export`, one per message. The series starts with a "[PATCH 0
459 of N]" introduction, which describes the series as a whole.
460 of N]" introduction, which describes the series as a whole.
460
461
461 Each patch email has a Subject line of "[PATCH M of N] ...", using
462 Each patch email has a Subject line of "[PATCH M of N] ...", using
462 the first line of the changeset description as the subject text.
463 the first line of the changeset description as the subject text.
463 The message contains two or three parts. First, the changeset
464 The message contains two or three parts. First, the changeset
464 description.
465 description.
465
466
466 With the -d/--diffstat option, if the diffstat program is
467 With the -d/--diffstat option, if the diffstat program is
467 installed, the result of running diffstat on the patch is inserted.
468 installed, the result of running diffstat on the patch is inserted.
468
469
469 Finally, the patch itself, as generated by :hg:`export`.
470 Finally, the patch itself, as generated by :hg:`export`.
470
471
471 With the -d/--diffstat or --confirm options, you will be presented
472 With the -d/--diffstat or --confirm options, you will be presented
472 with a final summary of all messages and asked for confirmation before
473 with a final summary of all messages and asked for confirmation before
473 the messages are sent.
474 the messages are sent.
474
475
475 By default the patch is included as text in the email body for
476 By default the patch is included as text in the email body for
476 easy reviewing. Using the -a/--attach option will instead create
477 easy reviewing. Using the -a/--attach option will instead create
477 an attachment for the patch. With -i/--inline an inline attachment
478 an attachment for the patch. With -i/--inline an inline attachment
478 will be created. You can include a patch both as text in the email
479 will be created. You can include a patch both as text in the email
479 body and as a regular or an inline attachment by combining the
480 body and as a regular or an inline attachment by combining the
480 -a/--attach or -i/--inline with the --body option.
481 -a/--attach or -i/--inline with the --body option.
481
482
482 With -o/--outgoing, emails will be generated for patches not found
483 With -o/--outgoing, emails will be generated for patches not found
483 in the destination repository (or only those which are ancestors
484 in the destination repository (or only those which are ancestors
484 of the specified revisions if any are provided)
485 of the specified revisions if any are provided)
485
486
486 With -b/--bundle, changesets are selected as for --outgoing, but a
487 With -b/--bundle, changesets are selected as for --outgoing, but a
487 single email containing a binary Mercurial bundle as an attachment
488 single email containing a binary Mercurial bundle as an attachment
488 will be sent. Use the ``patchbomb.bundletype`` config option to
489 will be sent. Use the ``patchbomb.bundletype`` config option to
489 control the bundle type as with :hg:`bundle --type`.
490 control the bundle type as with :hg:`bundle --type`.
490
491
491 With -m/--mbox, instead of previewing each patchbomb message in a
492 With -m/--mbox, instead of previewing each patchbomb message in a
492 pager or sending the messages directly, it will create a UNIX
493 pager or sending the messages directly, it will create a UNIX
493 mailbox file with the patch emails. This mailbox file can be
494 mailbox file with the patch emails. This mailbox file can be
494 previewed with any mail user agent which supports UNIX mbox
495 previewed with any mail user agent which supports UNIX mbox
495 files.
496 files.
496
497
497 With -n/--test, all steps will run, but mail will not be sent.
498 With -n/--test, all steps will run, but mail will not be sent.
498 You will be prompted for an email recipient address, a subject and
499 You will be prompted for an email recipient address, a subject and
499 an introductory message describing the patches of your patchbomb.
500 an introductory message describing the patches of your patchbomb.
500 Then when all is done, patchbomb messages are displayed.
501 Then when all is done, patchbomb messages are displayed.
501
502
502 In case email sending fails, you will find a backup of your series
503 In case email sending fails, you will find a backup of your series
503 introductory message in ``.hg/last-email.txt``.
504 introductory message in ``.hg/last-email.txt``.
504
505
505 The default behavior of this command can be customized through
506 The default behavior of this command can be customized through
506 configuration. (See :hg:`help patchbomb` for details)
507 configuration. (See :hg:`help patchbomb` for details)
507
508
508 Examples::
509 Examples::
509
510
510 hg email -r 3000 # send patch 3000 only
511 hg email -r 3000 # send patch 3000 only
511 hg email -r 3000 -r 3001 # send patches 3000 and 3001
512 hg email -r 3000 -r 3001 # send patches 3000 and 3001
512 hg email -r 3000:3005 # send patches 3000 through 3005
513 hg email -r 3000:3005 # send patches 3000 through 3005
513 hg email 3000 # send patch 3000 (deprecated)
514 hg email 3000 # send patch 3000 (deprecated)
514
515
515 hg email -o # send all patches not in default
516 hg email -o # send all patches not in default
516 hg email -o DEST # send all patches not in DEST
517 hg email -o DEST # send all patches not in DEST
517 hg email -o -r 3000 # send all ancestors of 3000 not in default
518 hg email -o -r 3000 # send all ancestors of 3000 not in default
518 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
519 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
519
520
520 hg email -b # send bundle of all patches not in default
521 hg email -b # send bundle of all patches not in default
521 hg email -b DEST # send bundle of all patches not in DEST
522 hg email -b DEST # send bundle of all patches not in DEST
522 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
523 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
523 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
524 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
524
525
525 hg email -o -m mbox && # generate an mbox file...
526 hg email -o -m mbox && # generate an mbox file...
526 mutt -R -f mbox # ... and view it with mutt
527 mutt -R -f mbox # ... and view it with mutt
527 hg email -o -m mbox && # generate an mbox file ...
528 hg email -o -m mbox && # generate an mbox file ...
528 formail -s sendmail \\ # ... and use formail to send from the mbox
529 formail -s sendmail \\ # ... and use formail to send from the mbox
529 -bm -t < mbox # ... using sendmail
530 -bm -t < mbox # ... using sendmail
530
531
531 Before using this command, you will need to enable email in your
532 Before using this command, you will need to enable email in your
532 hgrc. See the [email] section in hgrc(5) for details.
533 hgrc. See the [email] section in hgrc(5) for details.
533 '''
534 '''
534
535
535 _charsets = mail._charsets(ui)
536 _charsets = mail._charsets(ui)
536
537
537 bundle = opts.get('bundle')
538 bundle = opts.get('bundle')
538 date = opts.get('date')
539 date = opts.get('date')
539 mbox = opts.get('mbox')
540 mbox = opts.get('mbox')
540 outgoing = opts.get('outgoing')
541 outgoing = opts.get('outgoing')
541 rev = opts.get('rev')
542 rev = opts.get('rev')
542
543
543 if not (opts.get('test') or mbox):
544 if not (opts.get('test') or mbox):
544 # really sending
545 # really sending
545 mail.validateconfig(ui)
546 mail.validateconfig(ui)
546
547
547 if not (revs or rev or outgoing or bundle):
548 if not (revs or rev or outgoing or bundle):
548 raise error.Abort(_('specify at least one changeset with -r or -o'))
549 raise error.Abort(_('specify at least one changeset with -r or -o'))
549
550
550 if outgoing and bundle:
551 if outgoing and bundle:
551 raise error.Abort(_("--outgoing mode always on with --bundle;"
552 raise error.Abort(_("--outgoing mode always on with --bundle;"
552 " do not re-specify --outgoing"))
553 " do not re-specify --outgoing"))
553
554
554 if outgoing or bundle:
555 if outgoing or bundle:
555 if len(revs) > 1:
556 if len(revs) > 1:
556 raise error.Abort(_("too many destinations"))
557 raise error.Abort(_("too many destinations"))
557 if revs:
558 if revs:
558 dest = revs[0]
559 dest = revs[0]
559 else:
560 else:
560 dest = None
561 dest = None
561 revs = []
562 revs = []
562
563
563 if rev:
564 if rev:
564 if revs:
565 if revs:
565 raise error.Abort(_('use only one form to specify the revision'))
566 raise error.Abort(_('use only one form to specify the revision'))
566 revs = rev
567 revs = rev
567
568
568 revs = scmutil.revrange(repo, revs)
569 revs = scmutil.revrange(repo, revs)
569 if outgoing:
570 if outgoing:
570 revs = _getoutgoing(repo, dest, revs)
571 revs = _getoutgoing(repo, dest, revs)
571 if bundle:
572 if bundle:
572 opts['revs'] = [str(r) for r in revs]
573 opts['revs'] = [str(r) for r in revs]
573
574
574 # check if revision exist on the public destination
575 # check if revision exist on the public destination
575 publicurl = repo.ui.config('patchbomb', 'publicurl')
576 publicurl = repo.ui.config('patchbomb', 'publicurl')
576 if publicurl is not None:
577 if publicurl is not None:
577 repo.ui.debug('checking that revision exist in the public repo')
578 repo.ui.debug('checking that revision exist in the public repo')
578 try:
579 try:
579 publicpeer = hg.peer(repo, {}, publicurl)
580 publicpeer = hg.peer(repo, {}, publicurl)
580 except error.RepoError:
581 except error.RepoError:
581 repo.ui.write_err(_('unable to access public repo: %s\n')
582 repo.ui.write_err(_('unable to access public repo: %s\n')
582 % publicurl)
583 % publicurl)
583 raise
584 raise
584 if not publicpeer.capable('known'):
585 if not publicpeer.capable('known'):
585 repo.ui.debug('skipping existence checks: public repo too old')
586 repo.ui.debug('skipping existence checks: public repo too old')
586 else:
587 else:
587 out = [repo[r] for r in revs]
588 out = [repo[r] for r in revs]
588 known = publicpeer.known(h.node() for h in out)
589 known = publicpeer.known(h.node() for h in out)
589 missing = []
590 missing = []
590 for idx, h in enumerate(out):
591 for idx, h in enumerate(out):
591 if not known[idx]:
592 if not known[idx]:
592 missing.append(h)
593 missing.append(h)
593 if missing:
594 if missing:
594 if 1 < len(missing):
595 if 1 < len(missing):
595 msg = _('public "%s" is missing %s and %i others')
596 msg = _('public "%s" is missing %s and %i others')
596 msg %= (publicurl, missing[0], len(missing) - 1)
597 msg %= (publicurl, missing[0], len(missing) - 1)
597 else:
598 else:
598 msg = _('public url %s is missing %s')
599 msg = _('public url %s is missing %s')
599 msg %= (publicurl, missing[0])
600 msg %= (publicurl, missing[0])
600 revhint = ' '.join('-r %s' % h
601 revhint = ' '.join('-r %s' % h
601 for h in repo.set('heads(%ld)', missing))
602 for h in repo.set('heads(%ld)', missing))
602 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
603 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
603 raise error.Abort(msg, hint=hint)
604 raise error.Abort(msg, hint=hint)
604
605
605 # start
606 # start
606 if date:
607 if date:
607 start_time = util.parsedate(date)
608 start_time = util.parsedate(date)
608 else:
609 else:
609 start_time = util.makedate()
610 start_time = util.makedate()
610
611
611 def genmsgid(id):
612 def genmsgid(id):
612 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
613 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
613
614
614 # deprecated config: patchbomb.from
615 # deprecated config: patchbomb.from
615 sender = (opts.get('from') or ui.config('email', 'from') or
616 sender = (opts.get('from') or ui.config('email', 'from') or
616 ui.config('patchbomb', 'from') or
617 ui.config('patchbomb', 'from') or
617 prompt(ui, 'From', ui.username()))
618 prompt(ui, 'From', ui.username()))
618
619
619 if bundle:
620 if bundle:
620 bundledata = _getbundle(repo, dest, **opts)
621 bundledata = _getbundle(repo, dest, **opts)
621 bundleopts = opts.copy()
622 bundleopts = opts.copy()
622 bundleopts.pop('bundle', None) # already processed
623 bundleopts.pop('bundle', None) # already processed
623 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
624 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
624 else:
625 else:
625 msgs = _getpatchmsgs(repo, sender, revs, **opts)
626 msgs = _getpatchmsgs(repo, sender, revs, **opts)
626
627
627 showaddrs = []
628 showaddrs = []
628
629
629 def getaddrs(header, ask=False, default=None):
630 def getaddrs(header, ask=False, default=None):
630 configkey = header.lower()
631 configkey = header.lower()
631 opt = header.replace('-', '_').lower()
632 opt = header.replace('-', '_').lower()
632 addrs = opts.get(opt)
633 addrs = opts.get(opt)
633 if addrs:
634 if addrs:
634 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
635 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
635 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
636 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
636
637
637 # not on the command line: fallback to config and then maybe ask
638 # not on the command line: fallback to config and then maybe ask
638 addr = (ui.config('email', configkey) or
639 addr = (ui.config('email', configkey) or
639 ui.config('patchbomb', configkey))
640 ui.config('patchbomb', configkey))
640 if not addr:
641 if not addr:
641 specified = (ui.hasconfig('email', configkey) or
642 specified = (ui.hasconfig('email', configkey) or
642 ui.hasconfig('patchbomb', configkey))
643 ui.hasconfig('patchbomb', configkey))
643 if not specified and ask:
644 if not specified and ask:
644 addr = prompt(ui, header, default=default)
645 addr = prompt(ui, header, default=default)
645 if addr:
646 if addr:
646 showaddrs.append('%s: %s' % (header, addr))
647 showaddrs.append('%s: %s' % (header, addr))
647 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
648 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
648 else:
649 else:
649 return default
650 return default
650
651
651 to = getaddrs('To', ask=True)
652 to = getaddrs('To', ask=True)
652 if not to:
653 if not to:
653 # we can get here in non-interactive mode
654 # we can get here in non-interactive mode
654 raise error.Abort(_('no recipient addresses provided'))
655 raise error.Abort(_('no recipient addresses provided'))
655 cc = getaddrs('Cc', ask=True, default='') or []
656 cc = getaddrs('Cc', ask=True, default='') or []
656 bcc = getaddrs('Bcc') or []
657 bcc = getaddrs('Bcc') or []
657 replyto = getaddrs('Reply-To')
658 replyto = getaddrs('Reply-To')
658
659
659 confirm = ui.configbool('patchbomb', 'confirm')
660 confirm = ui.configbool('patchbomb', 'confirm')
660 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
661 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
661
662
662 if confirm:
663 if confirm:
663 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
664 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
664 ui.write(('From: %s\n' % sender), label='patchbomb.from')
665 ui.write(('From: %s\n' % sender), label='patchbomb.from')
665 for addr in showaddrs:
666 for addr in showaddrs:
666 ui.write('%s\n' % addr, label='patchbomb.to')
667 ui.write('%s\n' % addr, label='patchbomb.to')
667 for m, subj, ds in msgs:
668 for m, subj, ds in msgs:
668 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
669 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
669 if ds:
670 if ds:
670 ui.write(ds, label='patchbomb.diffstats')
671 ui.write(ds, label='patchbomb.diffstats')
671 ui.write('\n')
672 ui.write('\n')
672 if ui.promptchoice(_('are you sure you want to send (yn)?'
673 if ui.promptchoice(_('are you sure you want to send (yn)?'
673 '$$ &Yes $$ &No')):
674 '$$ &Yes $$ &No')):
674 raise error.Abort(_('patchbomb canceled'))
675 raise error.Abort(_('patchbomb canceled'))
675
676
676 ui.write('\n')
677 ui.write('\n')
677
678
678 parent = opts.get('in_reply_to') or None
679 parent = opts.get('in_reply_to') or None
679 # angle brackets may be omitted, they're not semantically part of the msg-id
680 # angle brackets may be omitted, they're not semantically part of the msg-id
680 if parent is not None:
681 if parent is not None:
681 if not parent.startswith('<'):
682 if not parent.startswith('<'):
682 parent = '<' + parent
683 parent = '<' + parent
683 if not parent.endswith('>'):
684 if not parent.endswith('>'):
684 parent += '>'
685 parent += '>'
685
686
686 sender_addr = emailmod.Utils.parseaddr(sender)[1]
687 sender_addr = emailmod.Utils.parseaddr(sender)[1]
687 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
688 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
688 sendmail = None
689 sendmail = None
689 firstpatch = None
690 firstpatch = None
690 for i, (m, subj, ds) in enumerate(msgs):
691 for i, (m, subj, ds) in enumerate(msgs):
691 try:
692 try:
692 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
693 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
693 if not firstpatch:
694 if not firstpatch:
694 firstpatch = m['Message-Id']
695 firstpatch = m['Message-Id']
695 m['X-Mercurial-Series-Id'] = firstpatch
696 m['X-Mercurial-Series-Id'] = firstpatch
696 except TypeError:
697 except TypeError:
697 m['Message-Id'] = genmsgid('patchbomb')
698 m['Message-Id'] = genmsgid('patchbomb')
698 if parent:
699 if parent:
699 m['In-Reply-To'] = parent
700 m['In-Reply-To'] = parent
700 m['References'] = parent
701 m['References'] = parent
701 if not parent or 'X-Mercurial-Node' not in m:
702 if not parent or 'X-Mercurial-Node' not in m:
702 parent = m['Message-Id']
703 parent = m['Message-Id']
703
704
704 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
705 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
705 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
706 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
706
707
707 start_time = (start_time[0] + 1, start_time[1])
708 start_time = (start_time[0] + 1, start_time[1])
708 m['From'] = sender
709 m['From'] = sender
709 m['To'] = ', '.join(to)
710 m['To'] = ', '.join(to)
710 if cc:
711 if cc:
711 m['Cc'] = ', '.join(cc)
712 m['Cc'] = ', '.join(cc)
712 if bcc:
713 if bcc:
713 m['Bcc'] = ', '.join(bcc)
714 m['Bcc'] = ', '.join(bcc)
714 if replyto:
715 if replyto:
715 m['Reply-To'] = ', '.join(replyto)
716 m['Reply-To'] = ', '.join(replyto)
716 if opts.get('test'):
717 if opts.get('test'):
717 ui.status(_('displaying '), subj, ' ...\n')
718 ui.status(_('displaying '), subj, ' ...\n')
718 ui.pager('email')
719 ui.pager('email')
719 generator = emailmod.Generator.Generator(ui, mangle_from_=False)
720 generator = emailmod.Generator.Generator(ui, mangle_from_=False)
720 try:
721 try:
721 generator.flatten(m, 0)
722 generator.flatten(m, 0)
722 ui.write('\n')
723 ui.write('\n')
723 except IOError as inst:
724 except IOError as inst:
724 if inst.errno != errno.EPIPE:
725 if inst.errno != errno.EPIPE:
725 raise
726 raise
726 else:
727 else:
727 if not sendmail:
728 if not sendmail:
728 sendmail = mail.connect(ui, mbox=mbox)
729 sendmail = mail.connect(ui, mbox=mbox)
729 ui.status(_('sending '), subj, ' ...\n')
730 ui.status(_('sending '), subj, ' ...\n')
730 ui.progress(_('sending'), i, item=subj, total=len(msgs),
731 ui.progress(_('sending'), i, item=subj, total=len(msgs),
731 unit=_('emails'))
732 unit=_('emails'))
732 if not mbox:
733 if not mbox:
733 # Exim does not remove the Bcc field
734 # Exim does not remove the Bcc field
734 del m['Bcc']
735 del m['Bcc']
735 fp = stringio()
736 fp = stringio()
736 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
737 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
737 generator.flatten(m, 0)
738 generator.flatten(m, 0)
738 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
739 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
739
740
740 ui.progress(_('writing'), None)
741 ui.progress(_('writing'), None)
741 ui.progress(_('sending'), None)
742 ui.progress(_('sending'), None)
@@ -1,127 +1,127 b''
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 #
2 #
3 # This is a small extension for Mercurial (https://mercurial-scm.org/)
3 # This is a small extension for Mercurial (https://mercurial-scm.org/)
4 # that removes files not known to mercurial
4 # that removes files not known to mercurial
5 #
5 #
6 # This program was inspired by the "cvspurge" script contained in CVS
6 # This program was inspired by the "cvspurge" script contained in CVS
7 # utilities (http://www.red-bean.com/cvsutils/).
7 # utilities (http://www.red-bean.com/cvsutils/).
8 #
8 #
9 # For help on the usage of "hg purge" use:
9 # For help on the usage of "hg purge" use:
10 # hg help purge
10 # hg help purge
11 #
11 #
12 # This program is free software; you can redistribute it and/or modify
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
15 # (at your option) any later version.
16 #
16 #
17 # This program is distributed in the hope that it will be useful,
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
20 # GNU General Public License for more details.
21 #
21 #
22 # You should have received a copy of the GNU General Public License
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, see <http://www.gnu.org/licenses/>.
23 # along with this program; if not, see <http://www.gnu.org/licenses/>.
24
24
25 '''command to delete untracked files from the working directory'''
25 '''command to delete untracked files from the working directory'''
26 from __future__ import absolute_import
26 from __future__ import absolute_import
27
27
28 import os
28 import os
29
29
30 from mercurial.i18n import _
30 from mercurial.i18n import _
31 from mercurial import (
31 from mercurial import (
32 cmdutil,
33 commands,
32 commands,
34 error,
33 error,
34 registrar,
35 scmutil,
35 scmutil,
36 util,
36 util,
37 )
37 )
38
38
39 cmdtable = {}
39 cmdtable = {}
40 command = cmdutil.command(cmdtable)
40 command = registrar.command(cmdtable)
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # be specifying the version(s) of Mercurial they are tested with, or
43 # be specifying the version(s) of Mercurial they are tested with, or
44 # leave the attribute unspecified.
44 # leave the attribute unspecified.
45 testedwith = 'ships-with-hg-core'
45 testedwith = 'ships-with-hg-core'
46
46
47 @command('purge|clean',
47 @command('purge|clean',
48 [('a', 'abort-on-err', None, _('abort if an error occurs')),
48 [('a', 'abort-on-err', None, _('abort if an error occurs')),
49 ('', 'all', None, _('purge ignored files too')),
49 ('', 'all', None, _('purge ignored files too')),
50 ('', 'dirs', None, _('purge empty directories')),
50 ('', 'dirs', None, _('purge empty directories')),
51 ('', 'files', None, _('purge files')),
51 ('', 'files', None, _('purge files')),
52 ('p', 'print', None, _('print filenames instead of deleting them')),
52 ('p', 'print', None, _('print filenames instead of deleting them')),
53 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
53 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
54 ' (implies -p/--print)')),
54 ' (implies -p/--print)')),
55 ] + commands.walkopts,
55 ] + commands.walkopts,
56 _('hg purge [OPTION]... [DIR]...'))
56 _('hg purge [OPTION]... [DIR]...'))
57 def purge(ui, repo, *dirs, **opts):
57 def purge(ui, repo, *dirs, **opts):
58 '''removes files not tracked by Mercurial
58 '''removes files not tracked by Mercurial
59
59
60 Delete files not known to Mercurial. This is useful to test local
60 Delete files not known to Mercurial. This is useful to test local
61 and uncommitted changes in an otherwise-clean source tree.
61 and uncommitted changes in an otherwise-clean source tree.
62
62
63 This means that purge will delete the following by default:
63 This means that purge will delete the following by default:
64
64
65 - Unknown files: files marked with "?" by :hg:`status`
65 - Unknown files: files marked with "?" by :hg:`status`
66 - Empty directories: in fact Mercurial ignores directories unless
66 - Empty directories: in fact Mercurial ignores directories unless
67 they contain files under source control management
67 they contain files under source control management
68
68
69 But it will leave untouched:
69 But it will leave untouched:
70
70
71 - Modified and unmodified tracked files
71 - Modified and unmodified tracked files
72 - Ignored files (unless --all is specified)
72 - Ignored files (unless --all is specified)
73 - New files added to the repository (with :hg:`add`)
73 - New files added to the repository (with :hg:`add`)
74
74
75 The --files and --dirs options can be used to direct purge to delete
75 The --files and --dirs options can be used to direct purge to delete
76 only files, only directories, or both. If neither option is given,
76 only files, only directories, or both. If neither option is given,
77 both will be deleted.
77 both will be deleted.
78
78
79 If directories are given on the command line, only files in these
79 If directories are given on the command line, only files in these
80 directories are considered.
80 directories are considered.
81
81
82 Be careful with purge, as you could irreversibly delete some files
82 Be careful with purge, as you could irreversibly delete some files
83 you forgot to add to the repository. If you only want to print the
83 you forgot to add to the repository. If you only want to print the
84 list of files that this program would delete, use the --print
84 list of files that this program would delete, use the --print
85 option.
85 option.
86 '''
86 '''
87 act = not opts.get('print')
87 act = not opts.get('print')
88 eol = '\n'
88 eol = '\n'
89 if opts.get('print0'):
89 if opts.get('print0'):
90 eol = '\0'
90 eol = '\0'
91 act = False # --print0 implies --print
91 act = False # --print0 implies --print
92 removefiles = opts.get('files')
92 removefiles = opts.get('files')
93 removedirs = opts.get('dirs')
93 removedirs = opts.get('dirs')
94 if not removefiles and not removedirs:
94 if not removefiles and not removedirs:
95 removefiles = True
95 removefiles = True
96 removedirs = True
96 removedirs = True
97
97
98 def remove(remove_func, name):
98 def remove(remove_func, name):
99 if act:
99 if act:
100 try:
100 try:
101 remove_func(repo.wjoin(name))
101 remove_func(repo.wjoin(name))
102 except OSError:
102 except OSError:
103 m = _('%s cannot be removed') % name
103 m = _('%s cannot be removed') % name
104 if opts.get('abort_on_err'):
104 if opts.get('abort_on_err'):
105 raise error.Abort(m)
105 raise error.Abort(m)
106 ui.warn(_('warning: %s\n') % m)
106 ui.warn(_('warning: %s\n') % m)
107 else:
107 else:
108 ui.write('%s%s' % (name, eol))
108 ui.write('%s%s' % (name, eol))
109
109
110 match = scmutil.match(repo[None], dirs, opts)
110 match = scmutil.match(repo[None], dirs, opts)
111 if removedirs:
111 if removedirs:
112 directories = []
112 directories = []
113 match.explicitdir = match.traversedir = directories.append
113 match.explicitdir = match.traversedir = directories.append
114 status = repo.status(match=match, ignored=opts.get('all'), unknown=True)
114 status = repo.status(match=match, ignored=opts.get('all'), unknown=True)
115
115
116 if removefiles:
116 if removefiles:
117 for f in sorted(status.unknown + status.ignored):
117 for f in sorted(status.unknown + status.ignored):
118 if act:
118 if act:
119 ui.note(_('removing file %s\n') % f)
119 ui.note(_('removing file %s\n') % f)
120 remove(util.unlink, f)
120 remove(util.unlink, f)
121
121
122 if removedirs:
122 if removedirs:
123 for f in sorted(directories, reverse=True):
123 for f in sorted(directories, reverse=True):
124 if match(f) and not os.listdir(repo.wjoin(f)):
124 if match(f) and not os.listdir(repo.wjoin(f)):
125 if act:
125 if act:
126 ui.note(_('removing directory %s\n') % f)
126 ui.note(_('removing directory %s\n') % f)
127 remove(os.rmdir, f)
127 remove(os.rmdir, f)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now