##// END OF EJS Templates
commands: move templates of common command options to cmdutil (API)...
Yuya Nishihara -
r32375:04baab18 default
parent child Browse files
Show More
@@ -1,1331 +1,1333 b''
1 # perf.py - performance test routines
1 # perf.py - performance test routines
2 '''helper extension to measure performance'''
2 '''helper extension to measure performance'''
3
3
4 # "historical portability" policy of perf.py:
4 # "historical portability" policy of perf.py:
5 #
5 #
6 # We have to do:
6 # We have to do:
7 # - make perf.py "loadable" with as wide Mercurial version as possible
7 # - make perf.py "loadable" with as wide Mercurial version as possible
8 # This doesn't mean that perf commands work correctly with that Mercurial.
8 # This doesn't mean that perf commands work correctly with that Mercurial.
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
10 # - make historical perf command work correctly with as wide Mercurial
10 # - make historical perf command work correctly with as wide Mercurial
11 # version as possible
11 # version as possible
12 #
12 #
13 # We have to do, if possible with reasonable cost:
13 # We have to do, if possible with reasonable cost:
14 # - make recent perf command for historical feature work correctly
14 # - make recent perf command for historical feature work correctly
15 # with early Mercurial
15 # with early Mercurial
16 #
16 #
17 # We don't have to do:
17 # We don't have to do:
18 # - make perf command for recent feature work correctly with early
18 # - make perf command for recent feature work correctly with early
19 # Mercurial
19 # Mercurial
20
20
21 from __future__ import absolute_import
21 from __future__ import absolute_import
22 import functools
22 import functools
23 import gc
23 import gc
24 import os
24 import os
25 import random
25 import random
26 import sys
26 import sys
27 import time
27 import time
28 from mercurial import (
28 from mercurial import (
29 changegroup,
29 changegroup,
30 cmdutil,
30 cmdutil,
31 commands,
31 commands,
32 copies,
32 copies,
33 error,
33 error,
34 extensions,
34 extensions,
35 mdiff,
35 mdiff,
36 merge,
36 merge,
37 util,
37 util,
38 )
38 )
39
39
40 # for "historical portability":
40 # for "historical portability":
41 # try to import modules separately (in dict order), and ignore
41 # try to import modules separately (in dict order), and ignore
42 # failure, because these aren't available with early Mercurial
42 # failure, because these aren't available with early Mercurial
43 try:
43 try:
44 from mercurial import branchmap # since 2.5 (or bcee63733aad)
44 from mercurial import branchmap # since 2.5 (or bcee63733aad)
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47 try:
47 try:
48 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
48 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
49 except ImportError:
49 except ImportError:
50 pass
50 pass
51 try:
51 try:
52 from mercurial import registrar # since 3.7 (or 37d50250b696)
52 from mercurial import registrar # since 3.7 (or 37d50250b696)
53 dir(registrar) # forcibly load it
53 dir(registrar) # forcibly load it
54 except ImportError:
54 except ImportError:
55 registrar = None
55 registrar = None
56 try:
56 try:
57 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
57 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
58 except ImportError:
58 except ImportError:
59 pass
59 pass
60 try:
60 try:
61 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
61 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
62 except ImportError:
62 except ImportError:
63 pass
63 pass
64
64
65 # for "historical portability":
65 # for "historical portability":
66 # define util.safehasattr forcibly, because util.safehasattr has been
66 # define util.safehasattr forcibly, because util.safehasattr has been
67 # available since 1.9.3 (or 94b200a11cf7)
67 # available since 1.9.3 (or 94b200a11cf7)
68 _undefined = object()
68 _undefined = object()
69 def safehasattr(thing, attr):
69 def safehasattr(thing, attr):
70 return getattr(thing, attr, _undefined) is not _undefined
70 return getattr(thing, attr, _undefined) is not _undefined
71 setattr(util, 'safehasattr', safehasattr)
71 setattr(util, 'safehasattr', safehasattr)
72
72
73 # for "historical portability":
73 # for "historical portability":
74 # define util.timer forcibly, because util.timer has been available
74 # define util.timer forcibly, because util.timer has been available
75 # since ae5d60bb70c9
75 # since ae5d60bb70c9
76 if safehasattr(time, 'perf_counter'):
76 if safehasattr(time, 'perf_counter'):
77 util.timer = time.perf_counter
77 util.timer = time.perf_counter
78 elif os.name == 'nt':
78 elif os.name == 'nt':
79 util.timer = time.clock
79 util.timer = time.clock
80 else:
80 else:
81 util.timer = time.time
81 util.timer = time.time
82
82
83 # for "historical portability":
83 # for "historical portability":
84 # use locally defined empty option list, if formatteropts isn't
84 # use locally defined empty option list, if formatteropts isn't
85 # available, because commands.formatteropts has been available since
85 # available, because commands.formatteropts has been available since
86 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
86 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
87 # available since 2.2 (or ae5f92e154d3)
87 # available since 2.2 (or ae5f92e154d3)
88 formatteropts = getattr(commands, "formatteropts", [])
88 formatteropts = getattr(cmdutil, "formatteropts",
89 getattr(commands, "formatteropts", []))
89
90
90 # for "historical portability":
91 # for "historical portability":
91 # use locally defined option list, if debugrevlogopts isn't available,
92 # use locally defined option list, if debugrevlogopts isn't available,
92 # because commands.debugrevlogopts has been available since 3.7 (or
93 # because commands.debugrevlogopts has been available since 3.7 (or
93 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
94 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
94 # since 1.9 (or a79fea6b3e77).
95 # since 1.9 (or a79fea6b3e77).
95 revlogopts = getattr(commands, "debugrevlogopts", [
96 revlogopts = getattr(cmdutil, "debugrevlogopts",
97 getattr(commands, "debugrevlogopts", [
96 ('c', 'changelog', False, ('open changelog')),
98 ('c', 'changelog', False, ('open changelog')),
97 ('m', 'manifest', False, ('open manifest')),
99 ('m', 'manifest', False, ('open manifest')),
98 ('', 'dir', False, ('open directory manifest')),
100 ('', 'dir', False, ('open directory manifest')),
99 ])
101 ]))
100
102
101 cmdtable = {}
103 cmdtable = {}
102
104
103 # for "historical portability":
105 # for "historical portability":
104 # define parsealiases locally, because cmdutil.parsealiases has been
106 # define parsealiases locally, because cmdutil.parsealiases has been
105 # available since 1.5 (or 6252852b4332)
107 # available since 1.5 (or 6252852b4332)
106 def parsealiases(cmd):
108 def parsealiases(cmd):
107 return cmd.lstrip("^").split("|")
109 return cmd.lstrip("^").split("|")
108
110
109 if safehasattr(registrar, 'command'):
111 if safehasattr(registrar, 'command'):
110 command = registrar.command(cmdtable)
112 command = registrar.command(cmdtable)
111 elif safehasattr(cmdutil, 'command'):
113 elif safehasattr(cmdutil, 'command'):
112 import inspect
114 import inspect
113 command = cmdutil.command(cmdtable)
115 command = cmdutil.command(cmdtable)
114 if 'norepo' not in inspect.getargspec(command)[0]:
116 if 'norepo' not in inspect.getargspec(command)[0]:
115 # for "historical portability":
117 # for "historical portability":
116 # wrap original cmdutil.command, because "norepo" option has
118 # wrap original cmdutil.command, because "norepo" option has
117 # been available since 3.1 (or 75a96326cecb)
119 # been available since 3.1 (or 75a96326cecb)
118 _command = command
120 _command = command
119 def command(name, options=(), synopsis=None, norepo=False):
121 def command(name, options=(), synopsis=None, norepo=False):
120 if norepo:
122 if norepo:
121 commands.norepo += ' %s' % ' '.join(parsealiases(name))
123 commands.norepo += ' %s' % ' '.join(parsealiases(name))
122 return _command(name, list(options), synopsis)
124 return _command(name, list(options), synopsis)
123 else:
125 else:
124 # for "historical portability":
126 # for "historical portability":
125 # define "@command" annotation locally, because cmdutil.command
127 # define "@command" annotation locally, because cmdutil.command
126 # has been available since 1.9 (or 2daa5179e73f)
128 # has been available since 1.9 (or 2daa5179e73f)
127 def command(name, options=(), synopsis=None, norepo=False):
129 def command(name, options=(), synopsis=None, norepo=False):
128 def decorator(func):
130 def decorator(func):
129 if synopsis:
131 if synopsis:
130 cmdtable[name] = func, list(options), synopsis
132 cmdtable[name] = func, list(options), synopsis
131 else:
133 else:
132 cmdtable[name] = func, list(options)
134 cmdtable[name] = func, list(options)
133 if norepo:
135 if norepo:
134 commands.norepo += ' %s' % ' '.join(parsealiases(name))
136 commands.norepo += ' %s' % ' '.join(parsealiases(name))
135 return func
137 return func
136 return decorator
138 return decorator
137
139
138 def getlen(ui):
140 def getlen(ui):
139 if ui.configbool("perf", "stub"):
141 if ui.configbool("perf", "stub"):
140 return lambda x: 1
142 return lambda x: 1
141 return len
143 return len
142
144
143 def gettimer(ui, opts=None):
145 def gettimer(ui, opts=None):
144 """return a timer function and formatter: (timer, formatter)
146 """return a timer function and formatter: (timer, formatter)
145
147
146 This function exists to gather the creation of formatter in a single
148 This function exists to gather the creation of formatter in a single
147 place instead of duplicating it in all performance commands."""
149 place instead of duplicating it in all performance commands."""
148
150
149 # enforce an idle period before execution to counteract power management
151 # enforce an idle period before execution to counteract power management
150 # experimental config: perf.presleep
152 # experimental config: perf.presleep
151 time.sleep(getint(ui, "perf", "presleep", 1))
153 time.sleep(getint(ui, "perf", "presleep", 1))
152
154
153 if opts is None:
155 if opts is None:
154 opts = {}
156 opts = {}
155 # redirect all to stderr unless buffer api is in use
157 # redirect all to stderr unless buffer api is in use
156 if not ui._buffers:
158 if not ui._buffers:
157 ui = ui.copy()
159 ui = ui.copy()
158 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
160 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
159 if uifout:
161 if uifout:
160 # for "historical portability":
162 # for "historical portability":
161 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
163 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
162 uifout.set(ui.ferr)
164 uifout.set(ui.ferr)
163
165
164 # get a formatter
166 # get a formatter
165 uiformatter = getattr(ui, 'formatter', None)
167 uiformatter = getattr(ui, 'formatter', None)
166 if uiformatter:
168 if uiformatter:
167 fm = uiformatter('perf', opts)
169 fm = uiformatter('perf', opts)
168 else:
170 else:
169 # for "historical portability":
171 # for "historical portability":
170 # define formatter locally, because ui.formatter has been
172 # define formatter locally, because ui.formatter has been
171 # available since 2.2 (or ae5f92e154d3)
173 # available since 2.2 (or ae5f92e154d3)
172 from mercurial import node
174 from mercurial import node
173 class defaultformatter(object):
175 class defaultformatter(object):
174 """Minimized composition of baseformatter and plainformatter
176 """Minimized composition of baseformatter and plainformatter
175 """
177 """
176 def __init__(self, ui, topic, opts):
178 def __init__(self, ui, topic, opts):
177 self._ui = ui
179 self._ui = ui
178 if ui.debugflag:
180 if ui.debugflag:
179 self.hexfunc = node.hex
181 self.hexfunc = node.hex
180 else:
182 else:
181 self.hexfunc = node.short
183 self.hexfunc = node.short
182 def __nonzero__(self):
184 def __nonzero__(self):
183 return False
185 return False
184 __bool__ = __nonzero__
186 __bool__ = __nonzero__
185 def startitem(self):
187 def startitem(self):
186 pass
188 pass
187 def data(self, **data):
189 def data(self, **data):
188 pass
190 pass
189 def write(self, fields, deftext, *fielddata, **opts):
191 def write(self, fields, deftext, *fielddata, **opts):
190 self._ui.write(deftext % fielddata, **opts)
192 self._ui.write(deftext % fielddata, **opts)
191 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
193 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
192 if cond:
194 if cond:
193 self._ui.write(deftext % fielddata, **opts)
195 self._ui.write(deftext % fielddata, **opts)
194 def plain(self, text, **opts):
196 def plain(self, text, **opts):
195 self._ui.write(text, **opts)
197 self._ui.write(text, **opts)
196 def end(self):
198 def end(self):
197 pass
199 pass
198 fm = defaultformatter(ui, 'perf', opts)
200 fm = defaultformatter(ui, 'perf', opts)
199
201
200 # stub function, runs code only once instead of in a loop
202 # stub function, runs code only once instead of in a loop
201 # experimental config: perf.stub
203 # experimental config: perf.stub
202 if ui.configbool("perf", "stub"):
204 if ui.configbool("perf", "stub"):
203 return functools.partial(stub_timer, fm), fm
205 return functools.partial(stub_timer, fm), fm
204 return functools.partial(_timer, fm), fm
206 return functools.partial(_timer, fm), fm
205
207
206 def stub_timer(fm, func, title=None):
208 def stub_timer(fm, func, title=None):
207 func()
209 func()
208
210
209 def _timer(fm, func, title=None):
211 def _timer(fm, func, title=None):
210 gc.collect()
212 gc.collect()
211 results = []
213 results = []
212 begin = util.timer()
214 begin = util.timer()
213 count = 0
215 count = 0
214 while True:
216 while True:
215 ostart = os.times()
217 ostart = os.times()
216 cstart = util.timer()
218 cstart = util.timer()
217 r = func()
219 r = func()
218 cstop = util.timer()
220 cstop = util.timer()
219 ostop = os.times()
221 ostop = os.times()
220 count += 1
222 count += 1
221 a, b = ostart, ostop
223 a, b = ostart, ostop
222 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
224 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
223 if cstop - begin > 3 and count >= 100:
225 if cstop - begin > 3 and count >= 100:
224 break
226 break
225 if cstop - begin > 10 and count >= 3:
227 if cstop - begin > 10 and count >= 3:
226 break
228 break
227
229
228 fm.startitem()
230 fm.startitem()
229
231
230 if title:
232 if title:
231 fm.write('title', '! %s\n', title)
233 fm.write('title', '! %s\n', title)
232 if r:
234 if r:
233 fm.write('result', '! result: %s\n', r)
235 fm.write('result', '! result: %s\n', r)
234 m = min(results)
236 m = min(results)
235 fm.plain('!')
237 fm.plain('!')
236 fm.write('wall', ' wall %f', m[0])
238 fm.write('wall', ' wall %f', m[0])
237 fm.write('comb', ' comb %f', m[1] + m[2])
239 fm.write('comb', ' comb %f', m[1] + m[2])
238 fm.write('user', ' user %f', m[1])
240 fm.write('user', ' user %f', m[1])
239 fm.write('sys', ' sys %f', m[2])
241 fm.write('sys', ' sys %f', m[2])
240 fm.write('count', ' (best of %d)', count)
242 fm.write('count', ' (best of %d)', count)
241 fm.plain('\n')
243 fm.plain('\n')
242
244
243 # utilities for historical portability
245 # utilities for historical portability
244
246
245 def getint(ui, section, name, default):
247 def getint(ui, section, name, default):
246 # for "historical portability":
248 # for "historical portability":
247 # ui.configint has been available since 1.9 (or fa2b596db182)
249 # ui.configint has been available since 1.9 (or fa2b596db182)
248 v = ui.config(section, name, None)
250 v = ui.config(section, name, None)
249 if v is None:
251 if v is None:
250 return default
252 return default
251 try:
253 try:
252 return int(v)
254 return int(v)
253 except ValueError:
255 except ValueError:
254 raise error.ConfigError(("%s.%s is not an integer ('%s')")
256 raise error.ConfigError(("%s.%s is not an integer ('%s')")
255 % (section, name, v))
257 % (section, name, v))
256
258
257 def safeattrsetter(obj, name, ignoremissing=False):
259 def safeattrsetter(obj, name, ignoremissing=False):
258 """Ensure that 'obj' has 'name' attribute before subsequent setattr
260 """Ensure that 'obj' has 'name' attribute before subsequent setattr
259
261
260 This function is aborted, if 'obj' doesn't have 'name' attribute
262 This function is aborted, if 'obj' doesn't have 'name' attribute
261 at runtime. This avoids overlooking removal of an attribute, which
263 at runtime. This avoids overlooking removal of an attribute, which
262 breaks assumption of performance measurement, in the future.
264 breaks assumption of performance measurement, in the future.
263
265
264 This function returns the object to (1) assign a new value, and
266 This function returns the object to (1) assign a new value, and
265 (2) restore an original value to the attribute.
267 (2) restore an original value to the attribute.
266
268
267 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
269 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
268 abortion, and this function returns None. This is useful to
270 abortion, and this function returns None. This is useful to
269 examine an attribute, which isn't ensured in all Mercurial
271 examine an attribute, which isn't ensured in all Mercurial
270 versions.
272 versions.
271 """
273 """
272 if not util.safehasattr(obj, name):
274 if not util.safehasattr(obj, name):
273 if ignoremissing:
275 if ignoremissing:
274 return None
276 return None
275 raise error.Abort(("missing attribute %s of %s might break assumption"
277 raise error.Abort(("missing attribute %s of %s might break assumption"
276 " of performance measurement") % (name, obj))
278 " of performance measurement") % (name, obj))
277
279
278 origvalue = getattr(obj, name)
280 origvalue = getattr(obj, name)
279 class attrutil(object):
281 class attrutil(object):
280 def set(self, newvalue):
282 def set(self, newvalue):
281 setattr(obj, name, newvalue)
283 setattr(obj, name, newvalue)
282 def restore(self):
284 def restore(self):
283 setattr(obj, name, origvalue)
285 setattr(obj, name, origvalue)
284
286
285 return attrutil()
287 return attrutil()
286
288
287 # utilities to examine each internal API changes
289 # utilities to examine each internal API changes
288
290
289 def getbranchmapsubsettable():
291 def getbranchmapsubsettable():
290 # for "historical portability":
292 # for "historical portability":
291 # subsettable is defined in:
293 # subsettable is defined in:
292 # - branchmap since 2.9 (or 175c6fd8cacc)
294 # - branchmap since 2.9 (or 175c6fd8cacc)
293 # - repoview since 2.5 (or 59a9f18d4587)
295 # - repoview since 2.5 (or 59a9f18d4587)
294 for mod in (branchmap, repoview):
296 for mod in (branchmap, repoview):
295 subsettable = getattr(mod, 'subsettable', None)
297 subsettable = getattr(mod, 'subsettable', None)
296 if subsettable:
298 if subsettable:
297 return subsettable
299 return subsettable
298
300
299 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
301 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
300 # branchmap and repoview modules exist, but subsettable attribute
302 # branchmap and repoview modules exist, but subsettable attribute
301 # doesn't)
303 # doesn't)
302 raise error.Abort(("perfbranchmap not available with this Mercurial"),
304 raise error.Abort(("perfbranchmap not available with this Mercurial"),
303 hint="use 2.5 or later")
305 hint="use 2.5 or later")
304
306
305 def getsvfs(repo):
307 def getsvfs(repo):
306 """Return appropriate object to access files under .hg/store
308 """Return appropriate object to access files under .hg/store
307 """
309 """
308 # for "historical portability":
310 # for "historical portability":
309 # repo.svfs has been available since 2.3 (or 7034365089bf)
311 # repo.svfs has been available since 2.3 (or 7034365089bf)
310 svfs = getattr(repo, 'svfs', None)
312 svfs = getattr(repo, 'svfs', None)
311 if svfs:
313 if svfs:
312 return svfs
314 return svfs
313 else:
315 else:
314 return getattr(repo, 'sopener')
316 return getattr(repo, 'sopener')
315
317
316 def getvfs(repo):
318 def getvfs(repo):
317 """Return appropriate object to access files under .hg
319 """Return appropriate object to access files under .hg
318 """
320 """
319 # for "historical portability":
321 # for "historical portability":
320 # repo.vfs has been available since 2.3 (or 7034365089bf)
322 # repo.vfs has been available since 2.3 (or 7034365089bf)
321 vfs = getattr(repo, 'vfs', None)
323 vfs = getattr(repo, 'vfs', None)
322 if vfs:
324 if vfs:
323 return vfs
325 return vfs
324 else:
326 else:
325 return getattr(repo, 'opener')
327 return getattr(repo, 'opener')
326
328
327 def repocleartagscachefunc(repo):
329 def repocleartagscachefunc(repo):
328 """Return the function to clear tags cache according to repo internal API
330 """Return the function to clear tags cache according to repo internal API
329 """
331 """
330 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
332 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
331 # in this case, setattr(repo, '_tagscache', None) or so isn't
333 # in this case, setattr(repo, '_tagscache', None) or so isn't
332 # correct way to clear tags cache, because existing code paths
334 # correct way to clear tags cache, because existing code paths
333 # expect _tagscache to be a structured object.
335 # expect _tagscache to be a structured object.
334 def clearcache():
336 def clearcache():
335 # _tagscache has been filteredpropertycache since 2.5 (or
337 # _tagscache has been filteredpropertycache since 2.5 (or
336 # 98c867ac1330), and delattr() can't work in such case
338 # 98c867ac1330), and delattr() can't work in such case
337 if '_tagscache' in vars(repo):
339 if '_tagscache' in vars(repo):
338 del repo.__dict__['_tagscache']
340 del repo.__dict__['_tagscache']
339 return clearcache
341 return clearcache
340
342
341 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
343 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
342 if repotags: # since 1.4 (or 5614a628d173)
344 if repotags: # since 1.4 (or 5614a628d173)
343 return lambda : repotags.set(None)
345 return lambda : repotags.set(None)
344
346
345 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
347 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
346 if repotagscache: # since 0.6 (or d7df759d0e97)
348 if repotagscache: # since 0.6 (or d7df759d0e97)
347 return lambda : repotagscache.set(None)
349 return lambda : repotagscache.set(None)
348
350
349 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
351 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
350 # this point, but it isn't so problematic, because:
352 # this point, but it isn't so problematic, because:
351 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
353 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
352 # in perftags() causes failure soon
354 # in perftags() causes failure soon
353 # - perf.py itself has been available since 1.1 (or eb240755386d)
355 # - perf.py itself has been available since 1.1 (or eb240755386d)
354 raise error.Abort(("tags API of this hg command is unknown"))
356 raise error.Abort(("tags API of this hg command is unknown"))
355
357
356 # perf commands
358 # perf commands
357
359
358 @command('perfwalk', formatteropts)
360 @command('perfwalk', formatteropts)
359 def perfwalk(ui, repo, *pats, **opts):
361 def perfwalk(ui, repo, *pats, **opts):
360 timer, fm = gettimer(ui, opts)
362 timer, fm = gettimer(ui, opts)
361 try:
363 try:
362 m = scmutil.match(repo[None], pats, {})
364 m = scmutil.match(repo[None], pats, {})
363 timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
365 timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
364 except Exception:
366 except Exception:
365 try:
367 try:
366 m = scmutil.match(repo[None], pats, {})
368 m = scmutil.match(repo[None], pats, {})
367 timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
369 timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
368 except Exception:
370 except Exception:
369 timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
371 timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
370 fm.end()
372 fm.end()
371
373
372 @command('perfannotate', formatteropts)
374 @command('perfannotate', formatteropts)
373 def perfannotate(ui, repo, f, **opts):
375 def perfannotate(ui, repo, f, **opts):
374 timer, fm = gettimer(ui, opts)
376 timer, fm = gettimer(ui, opts)
375 fc = repo['.'][f]
377 fc = repo['.'][f]
376 timer(lambda: len(fc.annotate(True)))
378 timer(lambda: len(fc.annotate(True)))
377 fm.end()
379 fm.end()
378
380
379 @command('perfstatus',
381 @command('perfstatus',
380 [('u', 'unknown', False,
382 [('u', 'unknown', False,
381 'ask status to look for unknown files')] + formatteropts)
383 'ask status to look for unknown files')] + formatteropts)
382 def perfstatus(ui, repo, **opts):
384 def perfstatus(ui, repo, **opts):
383 #m = match.always(repo.root, repo.getcwd())
385 #m = match.always(repo.root, repo.getcwd())
384 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
386 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
385 # False))))
387 # False))))
386 timer, fm = gettimer(ui, opts)
388 timer, fm = gettimer(ui, opts)
387 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
389 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
388 fm.end()
390 fm.end()
389
391
390 @command('perfaddremove', formatteropts)
392 @command('perfaddremove', formatteropts)
391 def perfaddremove(ui, repo, **opts):
393 def perfaddremove(ui, repo, **opts):
392 timer, fm = gettimer(ui, opts)
394 timer, fm = gettimer(ui, opts)
393 try:
395 try:
394 oldquiet = repo.ui.quiet
396 oldquiet = repo.ui.quiet
395 repo.ui.quiet = True
397 repo.ui.quiet = True
396 matcher = scmutil.match(repo[None])
398 matcher = scmutil.match(repo[None])
397 timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
399 timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
398 finally:
400 finally:
399 repo.ui.quiet = oldquiet
401 repo.ui.quiet = oldquiet
400 fm.end()
402 fm.end()
401
403
402 def clearcaches(cl):
404 def clearcaches(cl):
403 # behave somewhat consistently across internal API changes
405 # behave somewhat consistently across internal API changes
404 if util.safehasattr(cl, 'clearcaches'):
406 if util.safehasattr(cl, 'clearcaches'):
405 cl.clearcaches()
407 cl.clearcaches()
406 elif util.safehasattr(cl, '_nodecache'):
408 elif util.safehasattr(cl, '_nodecache'):
407 from mercurial.node import nullid, nullrev
409 from mercurial.node import nullid, nullrev
408 cl._nodecache = {nullid: nullrev}
410 cl._nodecache = {nullid: nullrev}
409 cl._nodepos = None
411 cl._nodepos = None
410
412
411 @command('perfheads', formatteropts)
413 @command('perfheads', formatteropts)
412 def perfheads(ui, repo, **opts):
414 def perfheads(ui, repo, **opts):
413 timer, fm = gettimer(ui, opts)
415 timer, fm = gettimer(ui, opts)
414 cl = repo.changelog
416 cl = repo.changelog
415 def d():
417 def d():
416 len(cl.headrevs())
418 len(cl.headrevs())
417 clearcaches(cl)
419 clearcaches(cl)
418 timer(d)
420 timer(d)
419 fm.end()
421 fm.end()
420
422
421 @command('perftags', formatteropts)
423 @command('perftags', formatteropts)
422 def perftags(ui, repo, **opts):
424 def perftags(ui, repo, **opts):
423 import mercurial.changelog
425 import mercurial.changelog
424 import mercurial.manifest
426 import mercurial.manifest
425 timer, fm = gettimer(ui, opts)
427 timer, fm = gettimer(ui, opts)
426 svfs = getsvfs(repo)
428 svfs = getsvfs(repo)
427 repocleartagscache = repocleartagscachefunc(repo)
429 repocleartagscache = repocleartagscachefunc(repo)
428 def t():
430 def t():
429 repo.changelog = mercurial.changelog.changelog(svfs)
431 repo.changelog = mercurial.changelog.changelog(svfs)
430 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
432 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
431 repocleartagscache()
433 repocleartagscache()
432 return len(repo.tags())
434 return len(repo.tags())
433 timer(t)
435 timer(t)
434 fm.end()
436 fm.end()
435
437
436 @command('perfancestors', formatteropts)
438 @command('perfancestors', formatteropts)
437 def perfancestors(ui, repo, **opts):
439 def perfancestors(ui, repo, **opts):
438 timer, fm = gettimer(ui, opts)
440 timer, fm = gettimer(ui, opts)
439 heads = repo.changelog.headrevs()
441 heads = repo.changelog.headrevs()
440 def d():
442 def d():
441 for a in repo.changelog.ancestors(heads):
443 for a in repo.changelog.ancestors(heads):
442 pass
444 pass
443 timer(d)
445 timer(d)
444 fm.end()
446 fm.end()
445
447
446 @command('perfancestorset', formatteropts)
448 @command('perfancestorset', formatteropts)
447 def perfancestorset(ui, repo, revset, **opts):
449 def perfancestorset(ui, repo, revset, **opts):
448 timer, fm = gettimer(ui, opts)
450 timer, fm = gettimer(ui, opts)
449 revs = repo.revs(revset)
451 revs = repo.revs(revset)
450 heads = repo.changelog.headrevs()
452 heads = repo.changelog.headrevs()
451 def d():
453 def d():
452 s = repo.changelog.ancestors(heads)
454 s = repo.changelog.ancestors(heads)
453 for rev in revs:
455 for rev in revs:
454 rev in s
456 rev in s
455 timer(d)
457 timer(d)
456 fm.end()
458 fm.end()
457
459
458 @command('perfchangegroupchangelog', formatteropts +
460 @command('perfchangegroupchangelog', formatteropts +
459 [('', 'version', '02', 'changegroup version'),
461 [('', 'version', '02', 'changegroup version'),
460 ('r', 'rev', '', 'revisions to add to changegroup')])
462 ('r', 'rev', '', 'revisions to add to changegroup')])
461 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
463 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
462 """Benchmark producing a changelog group for a changegroup.
464 """Benchmark producing a changelog group for a changegroup.
463
465
464 This measures the time spent processing the changelog during a
466 This measures the time spent processing the changelog during a
465 bundle operation. This occurs during `hg bundle` and on a server
467 bundle operation. This occurs during `hg bundle` and on a server
466 processing a `getbundle` wire protocol request (handles clones
468 processing a `getbundle` wire protocol request (handles clones
467 and pull requests).
469 and pull requests).
468
470
469 By default, all revisions are added to the changegroup.
471 By default, all revisions are added to the changegroup.
470 """
472 """
471 cl = repo.changelog
473 cl = repo.changelog
472 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
474 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
473 bundler = changegroup.getbundler(version, repo)
475 bundler = changegroup.getbundler(version, repo)
474
476
475 def lookup(node):
477 def lookup(node):
476 # The real bundler reads the revision in order to access the
478 # The real bundler reads the revision in order to access the
477 # manifest node and files list. Do that here.
479 # manifest node and files list. Do that here.
478 cl.read(node)
480 cl.read(node)
479 return node
481 return node
480
482
481 def d():
483 def d():
482 for chunk in bundler.group(revs, cl, lookup):
484 for chunk in bundler.group(revs, cl, lookup):
483 pass
485 pass
484
486
485 timer, fm = gettimer(ui, opts)
487 timer, fm = gettimer(ui, opts)
486 timer(d)
488 timer(d)
487 fm.end()
489 fm.end()
488
490
489 @command('perfdirs', formatteropts)
491 @command('perfdirs', formatteropts)
490 def perfdirs(ui, repo, **opts):
492 def perfdirs(ui, repo, **opts):
491 timer, fm = gettimer(ui, opts)
493 timer, fm = gettimer(ui, opts)
492 dirstate = repo.dirstate
494 dirstate = repo.dirstate
493 'a' in dirstate
495 'a' in dirstate
494 def d():
496 def d():
495 dirstate.dirs()
497 dirstate.dirs()
496 del dirstate._dirs
498 del dirstate._dirs
497 timer(d)
499 timer(d)
498 fm.end()
500 fm.end()
499
501
500 @command('perfdirstate', formatteropts)
502 @command('perfdirstate', formatteropts)
501 def perfdirstate(ui, repo, **opts):
503 def perfdirstate(ui, repo, **opts):
502 timer, fm = gettimer(ui, opts)
504 timer, fm = gettimer(ui, opts)
503 "a" in repo.dirstate
505 "a" in repo.dirstate
504 def d():
506 def d():
505 repo.dirstate.invalidate()
507 repo.dirstate.invalidate()
506 "a" in repo.dirstate
508 "a" in repo.dirstate
507 timer(d)
509 timer(d)
508 fm.end()
510 fm.end()
509
511
510 @command('perfdirstatedirs', formatteropts)
512 @command('perfdirstatedirs', formatteropts)
511 def perfdirstatedirs(ui, repo, **opts):
513 def perfdirstatedirs(ui, repo, **opts):
512 timer, fm = gettimer(ui, opts)
514 timer, fm = gettimer(ui, opts)
513 "a" in repo.dirstate
515 "a" in repo.dirstate
514 def d():
516 def d():
515 "a" in repo.dirstate._dirs
517 "a" in repo.dirstate._dirs
516 del repo.dirstate._dirs
518 del repo.dirstate._dirs
517 timer(d)
519 timer(d)
518 fm.end()
520 fm.end()
519
521
520 @command('perfdirstatefoldmap', formatteropts)
522 @command('perfdirstatefoldmap', formatteropts)
521 def perfdirstatefoldmap(ui, repo, **opts):
523 def perfdirstatefoldmap(ui, repo, **opts):
522 timer, fm = gettimer(ui, opts)
524 timer, fm = gettimer(ui, opts)
523 dirstate = repo.dirstate
525 dirstate = repo.dirstate
524 'a' in dirstate
526 'a' in dirstate
525 def d():
527 def d():
526 dirstate._filefoldmap.get('a')
528 dirstate._filefoldmap.get('a')
527 del dirstate._filefoldmap
529 del dirstate._filefoldmap
528 timer(d)
530 timer(d)
529 fm.end()
531 fm.end()
530
532
531 @command('perfdirfoldmap', formatteropts)
533 @command('perfdirfoldmap', formatteropts)
532 def perfdirfoldmap(ui, repo, **opts):
534 def perfdirfoldmap(ui, repo, **opts):
533 timer, fm = gettimer(ui, opts)
535 timer, fm = gettimer(ui, opts)
534 dirstate = repo.dirstate
536 dirstate = repo.dirstate
535 'a' in dirstate
537 'a' in dirstate
536 def d():
538 def d():
537 dirstate._dirfoldmap.get('a')
539 dirstate._dirfoldmap.get('a')
538 del dirstate._dirfoldmap
540 del dirstate._dirfoldmap
539 del dirstate._dirs
541 del dirstate._dirs
540 timer(d)
542 timer(d)
541 fm.end()
543 fm.end()
542
544
543 @command('perfdirstatewrite', formatteropts)
545 @command('perfdirstatewrite', formatteropts)
544 def perfdirstatewrite(ui, repo, **opts):
546 def perfdirstatewrite(ui, repo, **opts):
545 timer, fm = gettimer(ui, opts)
547 timer, fm = gettimer(ui, opts)
546 ds = repo.dirstate
548 ds = repo.dirstate
547 "a" in ds
549 "a" in ds
548 def d():
550 def d():
549 ds._dirty = True
551 ds._dirty = True
550 ds.write(repo.currenttransaction())
552 ds.write(repo.currenttransaction())
551 timer(d)
553 timer(d)
552 fm.end()
554 fm.end()
553
555
554 @command('perfmergecalculate',
556 @command('perfmergecalculate',
555 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
557 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
556 def perfmergecalculate(ui, repo, rev, **opts):
558 def perfmergecalculate(ui, repo, rev, **opts):
557 timer, fm = gettimer(ui, opts)
559 timer, fm = gettimer(ui, opts)
558 wctx = repo[None]
560 wctx = repo[None]
559 rctx = scmutil.revsingle(repo, rev, rev)
561 rctx = scmutil.revsingle(repo, rev, rev)
560 ancestor = wctx.ancestor(rctx)
562 ancestor = wctx.ancestor(rctx)
561 # we don't want working dir files to be stat'd in the benchmark, so prime
563 # we don't want working dir files to be stat'd in the benchmark, so prime
562 # that cache
564 # that cache
563 wctx.dirty()
565 wctx.dirty()
564 def d():
566 def d():
565 # acceptremote is True because we don't want prompts in the middle of
567 # acceptremote is True because we don't want prompts in the middle of
566 # our benchmark
568 # our benchmark
567 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
569 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
568 acceptremote=True, followcopies=True)
570 acceptremote=True, followcopies=True)
569 timer(d)
571 timer(d)
570 fm.end()
572 fm.end()
571
573
572 @command('perfpathcopies', [], "REV REV")
574 @command('perfpathcopies', [], "REV REV")
573 def perfpathcopies(ui, repo, rev1, rev2, **opts):
575 def perfpathcopies(ui, repo, rev1, rev2, **opts):
574 timer, fm = gettimer(ui, opts)
576 timer, fm = gettimer(ui, opts)
575 ctx1 = scmutil.revsingle(repo, rev1, rev1)
577 ctx1 = scmutil.revsingle(repo, rev1, rev1)
576 ctx2 = scmutil.revsingle(repo, rev2, rev2)
578 ctx2 = scmutil.revsingle(repo, rev2, rev2)
577 def d():
579 def d():
578 copies.pathcopies(ctx1, ctx2)
580 copies.pathcopies(ctx1, ctx2)
579 timer(d)
581 timer(d)
580 fm.end()
582 fm.end()
581
583
582 @command('perfmanifest', [], 'REV')
584 @command('perfmanifest', [], 'REV')
583 def perfmanifest(ui, repo, rev, **opts):
585 def perfmanifest(ui, repo, rev, **opts):
584 timer, fm = gettimer(ui, opts)
586 timer, fm = gettimer(ui, opts)
585 ctx = scmutil.revsingle(repo, rev, rev)
587 ctx = scmutil.revsingle(repo, rev, rev)
586 t = ctx.manifestnode()
588 t = ctx.manifestnode()
587 def d():
589 def d():
588 repo.manifestlog.clearcaches()
590 repo.manifestlog.clearcaches()
589 repo.manifestlog[t].read()
591 repo.manifestlog[t].read()
590 timer(d)
592 timer(d)
591 fm.end()
593 fm.end()
592
594
593 @command('perfchangeset', formatteropts)
595 @command('perfchangeset', formatteropts)
594 def perfchangeset(ui, repo, rev, **opts):
596 def perfchangeset(ui, repo, rev, **opts):
595 timer, fm = gettimer(ui, opts)
597 timer, fm = gettimer(ui, opts)
596 n = repo[rev].node()
598 n = repo[rev].node()
597 def d():
599 def d():
598 repo.changelog.read(n)
600 repo.changelog.read(n)
599 #repo.changelog._cache = None
601 #repo.changelog._cache = None
600 timer(d)
602 timer(d)
601 fm.end()
603 fm.end()
602
604
603 @command('perfindex', formatteropts)
605 @command('perfindex', formatteropts)
604 def perfindex(ui, repo, **opts):
606 def perfindex(ui, repo, **opts):
605 import mercurial.revlog
607 import mercurial.revlog
606 timer, fm = gettimer(ui, opts)
608 timer, fm = gettimer(ui, opts)
607 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
609 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
608 n = repo["tip"].node()
610 n = repo["tip"].node()
609 svfs = getsvfs(repo)
611 svfs = getsvfs(repo)
610 def d():
612 def d():
611 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
613 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
612 cl.rev(n)
614 cl.rev(n)
613 timer(d)
615 timer(d)
614 fm.end()
616 fm.end()
615
617
616 @command('perfstartup', formatteropts)
618 @command('perfstartup', formatteropts)
617 def perfstartup(ui, repo, **opts):
619 def perfstartup(ui, repo, **opts):
618 timer, fm = gettimer(ui, opts)
620 timer, fm = gettimer(ui, opts)
619 cmd = sys.argv[0]
621 cmd = sys.argv[0]
620 def d():
622 def d():
621 if os.name != 'nt':
623 if os.name != 'nt':
622 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
624 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
623 else:
625 else:
624 os.environ['HGRCPATH'] = ''
626 os.environ['HGRCPATH'] = ''
625 os.system("%s version -q > NUL" % cmd)
627 os.system("%s version -q > NUL" % cmd)
626 timer(d)
628 timer(d)
627 fm.end()
629 fm.end()
628
630
629 @command('perfparents', formatteropts)
631 @command('perfparents', formatteropts)
630 def perfparents(ui, repo, **opts):
632 def perfparents(ui, repo, **opts):
631 timer, fm = gettimer(ui, opts)
633 timer, fm = gettimer(ui, opts)
632 # control the number of commits perfparents iterates over
634 # control the number of commits perfparents iterates over
633 # experimental config: perf.parentscount
635 # experimental config: perf.parentscount
634 count = getint(ui, "perf", "parentscount", 1000)
636 count = getint(ui, "perf", "parentscount", 1000)
635 if len(repo.changelog) < count:
637 if len(repo.changelog) < count:
636 raise error.Abort("repo needs %d commits for this test" % count)
638 raise error.Abort("repo needs %d commits for this test" % count)
637 repo = repo.unfiltered()
639 repo = repo.unfiltered()
638 nl = [repo.changelog.node(i) for i in xrange(count)]
640 nl = [repo.changelog.node(i) for i in xrange(count)]
639 def d():
641 def d():
640 for n in nl:
642 for n in nl:
641 repo.changelog.parents(n)
643 repo.changelog.parents(n)
642 timer(d)
644 timer(d)
643 fm.end()
645 fm.end()
644
646
645 @command('perfctxfiles', formatteropts)
647 @command('perfctxfiles', formatteropts)
646 def perfctxfiles(ui, repo, x, **opts):
648 def perfctxfiles(ui, repo, x, **opts):
647 x = int(x)
649 x = int(x)
648 timer, fm = gettimer(ui, opts)
650 timer, fm = gettimer(ui, opts)
649 def d():
651 def d():
650 len(repo[x].files())
652 len(repo[x].files())
651 timer(d)
653 timer(d)
652 fm.end()
654 fm.end()
653
655
654 @command('perfrawfiles', formatteropts)
656 @command('perfrawfiles', formatteropts)
655 def perfrawfiles(ui, repo, x, **opts):
657 def perfrawfiles(ui, repo, x, **opts):
656 x = int(x)
658 x = int(x)
657 timer, fm = gettimer(ui, opts)
659 timer, fm = gettimer(ui, opts)
658 cl = repo.changelog
660 cl = repo.changelog
659 def d():
661 def d():
660 len(cl.read(x)[3])
662 len(cl.read(x)[3])
661 timer(d)
663 timer(d)
662 fm.end()
664 fm.end()
663
665
664 @command('perflookup', formatteropts)
666 @command('perflookup', formatteropts)
665 def perflookup(ui, repo, rev, **opts):
667 def perflookup(ui, repo, rev, **opts):
666 timer, fm = gettimer(ui, opts)
668 timer, fm = gettimer(ui, opts)
667 timer(lambda: len(repo.lookup(rev)))
669 timer(lambda: len(repo.lookup(rev)))
668 fm.end()
670 fm.end()
669
671
670 @command('perfrevrange', formatteropts)
672 @command('perfrevrange', formatteropts)
671 def perfrevrange(ui, repo, *specs, **opts):
673 def perfrevrange(ui, repo, *specs, **opts):
672 timer, fm = gettimer(ui, opts)
674 timer, fm = gettimer(ui, opts)
673 revrange = scmutil.revrange
675 revrange = scmutil.revrange
674 timer(lambda: len(revrange(repo, specs)))
676 timer(lambda: len(revrange(repo, specs)))
675 fm.end()
677 fm.end()
676
678
677 @command('perfnodelookup', formatteropts)
679 @command('perfnodelookup', formatteropts)
678 def perfnodelookup(ui, repo, rev, **opts):
680 def perfnodelookup(ui, repo, rev, **opts):
679 timer, fm = gettimer(ui, opts)
681 timer, fm = gettimer(ui, opts)
680 import mercurial.revlog
682 import mercurial.revlog
681 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
683 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
682 n = repo[rev].node()
684 n = repo[rev].node()
683 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
685 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
684 def d():
686 def d():
685 cl.rev(n)
687 cl.rev(n)
686 clearcaches(cl)
688 clearcaches(cl)
687 timer(d)
689 timer(d)
688 fm.end()
690 fm.end()
689
691
690 @command('perflog',
692 @command('perflog',
691 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
693 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
692 def perflog(ui, repo, rev=None, **opts):
694 def perflog(ui, repo, rev=None, **opts):
693 if rev is None:
695 if rev is None:
694 rev=[]
696 rev=[]
695 timer, fm = gettimer(ui, opts)
697 timer, fm = gettimer(ui, opts)
696 ui.pushbuffer()
698 ui.pushbuffer()
697 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
699 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
698 copies=opts.get('rename')))
700 copies=opts.get('rename')))
699 ui.popbuffer()
701 ui.popbuffer()
700 fm.end()
702 fm.end()
701
703
702 @command('perfmoonwalk', formatteropts)
704 @command('perfmoonwalk', formatteropts)
703 def perfmoonwalk(ui, repo, **opts):
705 def perfmoonwalk(ui, repo, **opts):
704 """benchmark walking the changelog backwards
706 """benchmark walking the changelog backwards
705
707
706 This also loads the changelog data for each revision in the changelog.
708 This also loads the changelog data for each revision in the changelog.
707 """
709 """
708 timer, fm = gettimer(ui, opts)
710 timer, fm = gettimer(ui, opts)
709 def moonwalk():
711 def moonwalk():
710 for i in xrange(len(repo), -1, -1):
712 for i in xrange(len(repo), -1, -1):
711 ctx = repo[i]
713 ctx = repo[i]
712 ctx.branch() # read changelog data (in addition to the index)
714 ctx.branch() # read changelog data (in addition to the index)
713 timer(moonwalk)
715 timer(moonwalk)
714 fm.end()
716 fm.end()
715
717
716 @command('perftemplating', formatteropts)
718 @command('perftemplating', formatteropts)
717 def perftemplating(ui, repo, rev=None, **opts):
719 def perftemplating(ui, repo, rev=None, **opts):
718 if rev is None:
720 if rev is None:
719 rev=[]
721 rev=[]
720 timer, fm = gettimer(ui, opts)
722 timer, fm = gettimer(ui, opts)
721 ui.pushbuffer()
723 ui.pushbuffer()
722 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
724 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
723 template='{date|shortdate} [{rev}:{node|short}]'
725 template='{date|shortdate} [{rev}:{node|short}]'
724 ' {author|person}: {desc|firstline}\n'))
726 ' {author|person}: {desc|firstline}\n'))
725 ui.popbuffer()
727 ui.popbuffer()
726 fm.end()
728 fm.end()
727
729
728 @command('perfcca', formatteropts)
730 @command('perfcca', formatteropts)
729 def perfcca(ui, repo, **opts):
731 def perfcca(ui, repo, **opts):
730 timer, fm = gettimer(ui, opts)
732 timer, fm = gettimer(ui, opts)
731 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
733 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
732 fm.end()
734 fm.end()
733
735
734 @command('perffncacheload', formatteropts)
736 @command('perffncacheload', formatteropts)
735 def perffncacheload(ui, repo, **opts):
737 def perffncacheload(ui, repo, **opts):
736 timer, fm = gettimer(ui, opts)
738 timer, fm = gettimer(ui, opts)
737 s = repo.store
739 s = repo.store
738 def d():
740 def d():
739 s.fncache._load()
741 s.fncache._load()
740 timer(d)
742 timer(d)
741 fm.end()
743 fm.end()
742
744
743 @command('perffncachewrite', formatteropts)
745 @command('perffncachewrite', formatteropts)
744 def perffncachewrite(ui, repo, **opts):
746 def perffncachewrite(ui, repo, **opts):
745 timer, fm = gettimer(ui, opts)
747 timer, fm = gettimer(ui, opts)
746 s = repo.store
748 s = repo.store
747 s.fncache._load()
749 s.fncache._load()
748 lock = repo.lock()
750 lock = repo.lock()
749 tr = repo.transaction('perffncachewrite')
751 tr = repo.transaction('perffncachewrite')
750 def d():
752 def d():
751 s.fncache._dirty = True
753 s.fncache._dirty = True
752 s.fncache.write(tr)
754 s.fncache.write(tr)
753 timer(d)
755 timer(d)
754 tr.close()
756 tr.close()
755 lock.release()
757 lock.release()
756 fm.end()
758 fm.end()
757
759
758 @command('perffncacheencode', formatteropts)
760 @command('perffncacheencode', formatteropts)
759 def perffncacheencode(ui, repo, **opts):
761 def perffncacheencode(ui, repo, **opts):
760 timer, fm = gettimer(ui, opts)
762 timer, fm = gettimer(ui, opts)
761 s = repo.store
763 s = repo.store
762 s.fncache._load()
764 s.fncache._load()
763 def d():
765 def d():
764 for p in s.fncache.entries:
766 for p in s.fncache.entries:
765 s.encode(p)
767 s.encode(p)
766 timer(d)
768 timer(d)
767 fm.end()
769 fm.end()
768
770
769 @command('perfbdiff', revlogopts + formatteropts + [
771 @command('perfbdiff', revlogopts + formatteropts + [
770 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
772 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
771 ('', 'alldata', False, 'test bdiffs for all associated revisions')],
773 ('', 'alldata', False, 'test bdiffs for all associated revisions')],
772 '-c|-m|FILE REV')
774 '-c|-m|FILE REV')
773 def perfbdiff(ui, repo, file_, rev=None, count=None, **opts):
775 def perfbdiff(ui, repo, file_, rev=None, count=None, **opts):
774 """benchmark a bdiff between revisions
776 """benchmark a bdiff between revisions
775
777
776 By default, benchmark a bdiff between its delta parent and itself.
778 By default, benchmark a bdiff between its delta parent and itself.
777
779
778 With ``--count``, benchmark bdiffs between delta parents and self for N
780 With ``--count``, benchmark bdiffs between delta parents and self for N
779 revisions starting at the specified revision.
781 revisions starting at the specified revision.
780
782
781 With ``--alldata``, assume the requested revision is a changeset and
783 With ``--alldata``, assume the requested revision is a changeset and
782 measure bdiffs for all changes related to that changeset (manifest
784 measure bdiffs for all changes related to that changeset (manifest
783 and filelogs).
785 and filelogs).
784 """
786 """
785 if opts['alldata']:
787 if opts['alldata']:
786 opts['changelog'] = True
788 opts['changelog'] = True
787
789
788 if opts.get('changelog') or opts.get('manifest'):
790 if opts.get('changelog') or opts.get('manifest'):
789 file_, rev = None, file_
791 file_, rev = None, file_
790 elif rev is None:
792 elif rev is None:
791 raise error.CommandError('perfbdiff', 'invalid arguments')
793 raise error.CommandError('perfbdiff', 'invalid arguments')
792
794
793 textpairs = []
795 textpairs = []
794
796
795 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
797 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
796
798
797 startrev = r.rev(r.lookup(rev))
799 startrev = r.rev(r.lookup(rev))
798 for rev in range(startrev, min(startrev + count, len(r) - 1)):
800 for rev in range(startrev, min(startrev + count, len(r) - 1)):
799 if opts['alldata']:
801 if opts['alldata']:
800 # Load revisions associated with changeset.
802 # Load revisions associated with changeset.
801 ctx = repo[rev]
803 ctx = repo[rev]
802 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
804 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
803 for pctx in ctx.parents():
805 for pctx in ctx.parents():
804 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
806 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
805 textpairs.append((pman, mtext))
807 textpairs.append((pman, mtext))
806
808
807 # Load filelog revisions by iterating manifest delta.
809 # Load filelog revisions by iterating manifest delta.
808 man = ctx.manifest()
810 man = ctx.manifest()
809 pman = ctx.p1().manifest()
811 pman = ctx.p1().manifest()
810 for filename, change in pman.diff(man).items():
812 for filename, change in pman.diff(man).items():
811 fctx = repo.file(filename)
813 fctx = repo.file(filename)
812 f1 = fctx.revision(change[0][0] or -1)
814 f1 = fctx.revision(change[0][0] or -1)
813 f2 = fctx.revision(change[1][0] or -1)
815 f2 = fctx.revision(change[1][0] or -1)
814 textpairs.append((f1, f2))
816 textpairs.append((f1, f2))
815 else:
817 else:
816 dp = r.deltaparent(rev)
818 dp = r.deltaparent(rev)
817 textpairs.append((r.revision(dp), r.revision(rev)))
819 textpairs.append((r.revision(dp), r.revision(rev)))
818
820
819 def d():
821 def d():
820 for pair in textpairs:
822 for pair in textpairs:
821 mdiff.textdiff(*pair)
823 mdiff.textdiff(*pair)
822
824
823 timer, fm = gettimer(ui, opts)
825 timer, fm = gettimer(ui, opts)
824 timer(d)
826 timer(d)
825 fm.end()
827 fm.end()
826
828
827 @command('perfdiffwd', formatteropts)
829 @command('perfdiffwd', formatteropts)
828 def perfdiffwd(ui, repo, **opts):
830 def perfdiffwd(ui, repo, **opts):
829 """Profile diff of working directory changes"""
831 """Profile diff of working directory changes"""
830 timer, fm = gettimer(ui, opts)
832 timer, fm = gettimer(ui, opts)
831 options = {
833 options = {
832 'w': 'ignore_all_space',
834 'w': 'ignore_all_space',
833 'b': 'ignore_space_change',
835 'b': 'ignore_space_change',
834 'B': 'ignore_blank_lines',
836 'B': 'ignore_blank_lines',
835 }
837 }
836
838
837 for diffopt in ('', 'w', 'b', 'B', 'wB'):
839 for diffopt in ('', 'w', 'b', 'B', 'wB'):
838 opts = dict((options[c], '1') for c in diffopt)
840 opts = dict((options[c], '1') for c in diffopt)
839 def d():
841 def d():
840 ui.pushbuffer()
842 ui.pushbuffer()
841 commands.diff(ui, repo, **opts)
843 commands.diff(ui, repo, **opts)
842 ui.popbuffer()
844 ui.popbuffer()
843 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
845 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
844 timer(d, title)
846 timer(d, title)
845 fm.end()
847 fm.end()
846
848
847 @command('perfrevlog', revlogopts + formatteropts +
849 @command('perfrevlog', revlogopts + formatteropts +
848 [('d', 'dist', 100, 'distance between the revisions'),
850 [('d', 'dist', 100, 'distance between the revisions'),
849 ('s', 'startrev', 0, 'revision to start reading at'),
851 ('s', 'startrev', 0, 'revision to start reading at'),
850 ('', 'reverse', False, 'read in reverse')],
852 ('', 'reverse', False, 'read in reverse')],
851 '-c|-m|FILE')
853 '-c|-m|FILE')
852 def perfrevlog(ui, repo, file_=None, startrev=0, reverse=False, **opts):
854 def perfrevlog(ui, repo, file_=None, startrev=0, reverse=False, **opts):
853 """Benchmark reading a series of revisions from a revlog.
855 """Benchmark reading a series of revisions from a revlog.
854
856
855 By default, we read every ``-d/--dist`` revision from 0 to tip of
857 By default, we read every ``-d/--dist`` revision from 0 to tip of
856 the specified revlog.
858 the specified revlog.
857
859
858 The start revision can be defined via ``-s/--startrev``.
860 The start revision can be defined via ``-s/--startrev``.
859 """
861 """
860 rl = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts)
862 rl = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts)
861 rllen = getlen(ui)(rl)
863 rllen = getlen(ui)(rl)
862
864
863 def d():
865 def d():
864 rl.clearcaches()
866 rl.clearcaches()
865
867
866 beginrev = startrev
868 beginrev = startrev
867 endrev = rllen
869 endrev = rllen
868 dist = opts['dist']
870 dist = opts['dist']
869
871
870 if reverse:
872 if reverse:
871 beginrev, endrev = endrev, beginrev
873 beginrev, endrev = endrev, beginrev
872 dist = -1 * dist
874 dist = -1 * dist
873
875
874 for x in xrange(beginrev, endrev, dist):
876 for x in xrange(beginrev, endrev, dist):
875 # Old revisions don't support passing int.
877 # Old revisions don't support passing int.
876 n = rl.node(x)
878 n = rl.node(x)
877 rl.revision(n)
879 rl.revision(n)
878
880
879 timer, fm = gettimer(ui, opts)
881 timer, fm = gettimer(ui, opts)
880 timer(d)
882 timer(d)
881 fm.end()
883 fm.end()
882
884
883 @command('perfrevlogchunks', revlogopts + formatteropts +
885 @command('perfrevlogchunks', revlogopts + formatteropts +
884 [('e', 'engines', '', 'compression engines to use'),
886 [('e', 'engines', '', 'compression engines to use'),
885 ('s', 'startrev', 0, 'revision to start at')],
887 ('s', 'startrev', 0, 'revision to start at')],
886 '-c|-m|FILE')
888 '-c|-m|FILE')
887 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
889 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
888 """Benchmark operations on revlog chunks.
890 """Benchmark operations on revlog chunks.
889
891
890 Logically, each revlog is a collection of fulltext revisions. However,
892 Logically, each revlog is a collection of fulltext revisions. However,
891 stored within each revlog are "chunks" of possibly compressed data. This
893 stored within each revlog are "chunks" of possibly compressed data. This
892 data needs to be read and decompressed or compressed and written.
894 data needs to be read and decompressed or compressed and written.
893
895
894 This command measures the time it takes to read+decompress and recompress
896 This command measures the time it takes to read+decompress and recompress
895 chunks in a revlog. It effectively isolates I/O and compression performance.
897 chunks in a revlog. It effectively isolates I/O and compression performance.
896 For measurements of higher-level operations like resolving revisions,
898 For measurements of higher-level operations like resolving revisions,
897 see ``perfrevlog`` and ``perfrevlogrevision``.
899 see ``perfrevlog`` and ``perfrevlogrevision``.
898 """
900 """
899 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
901 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
900
902
901 # _chunkraw was renamed to _getsegmentforrevs.
903 # _chunkraw was renamed to _getsegmentforrevs.
902 try:
904 try:
903 segmentforrevs = rl._getsegmentforrevs
905 segmentforrevs = rl._getsegmentforrevs
904 except AttributeError:
906 except AttributeError:
905 segmentforrevs = rl._chunkraw
907 segmentforrevs = rl._chunkraw
906
908
907 # Verify engines argument.
909 # Verify engines argument.
908 if engines:
910 if engines:
909 engines = set(e.strip() for e in engines.split(','))
911 engines = set(e.strip() for e in engines.split(','))
910 for engine in engines:
912 for engine in engines:
911 try:
913 try:
912 util.compressionengines[engine]
914 util.compressionengines[engine]
913 except KeyError:
915 except KeyError:
914 raise error.Abort('unknown compression engine: %s' % engine)
916 raise error.Abort('unknown compression engine: %s' % engine)
915 else:
917 else:
916 engines = []
918 engines = []
917 for e in util.compengines:
919 for e in util.compengines:
918 engine = util.compengines[e]
920 engine = util.compengines[e]
919 try:
921 try:
920 if engine.available():
922 if engine.available():
921 engine.revlogcompressor().compress('dummy')
923 engine.revlogcompressor().compress('dummy')
922 engines.append(e)
924 engines.append(e)
923 except NotImplementedError:
925 except NotImplementedError:
924 pass
926 pass
925
927
926 revs = list(rl.revs(startrev, len(rl) - 1))
928 revs = list(rl.revs(startrev, len(rl) - 1))
927
929
928 def rlfh(rl):
930 def rlfh(rl):
929 if rl._inline:
931 if rl._inline:
930 return getsvfs(repo)(rl.indexfile)
932 return getsvfs(repo)(rl.indexfile)
931 else:
933 else:
932 return getsvfs(repo)(rl.datafile)
934 return getsvfs(repo)(rl.datafile)
933
935
934 def doread():
936 def doread():
935 rl.clearcaches()
937 rl.clearcaches()
936 for rev in revs:
938 for rev in revs:
937 segmentforrevs(rev, rev)
939 segmentforrevs(rev, rev)
938
940
939 def doreadcachedfh():
941 def doreadcachedfh():
940 rl.clearcaches()
942 rl.clearcaches()
941 fh = rlfh(rl)
943 fh = rlfh(rl)
942 for rev in revs:
944 for rev in revs:
943 segmentforrevs(rev, rev, df=fh)
945 segmentforrevs(rev, rev, df=fh)
944
946
945 def doreadbatch():
947 def doreadbatch():
946 rl.clearcaches()
948 rl.clearcaches()
947 segmentforrevs(revs[0], revs[-1])
949 segmentforrevs(revs[0], revs[-1])
948
950
949 def doreadbatchcachedfh():
951 def doreadbatchcachedfh():
950 rl.clearcaches()
952 rl.clearcaches()
951 fh = rlfh(rl)
953 fh = rlfh(rl)
952 segmentforrevs(revs[0], revs[-1], df=fh)
954 segmentforrevs(revs[0], revs[-1], df=fh)
953
955
954 def dochunk():
956 def dochunk():
955 rl.clearcaches()
957 rl.clearcaches()
956 fh = rlfh(rl)
958 fh = rlfh(rl)
957 for rev in revs:
959 for rev in revs:
958 rl._chunk(rev, df=fh)
960 rl._chunk(rev, df=fh)
959
961
960 chunks = [None]
962 chunks = [None]
961
963
962 def dochunkbatch():
964 def dochunkbatch():
963 rl.clearcaches()
965 rl.clearcaches()
964 fh = rlfh(rl)
966 fh = rlfh(rl)
965 # Save chunks as a side-effect.
967 # Save chunks as a side-effect.
966 chunks[0] = rl._chunks(revs, df=fh)
968 chunks[0] = rl._chunks(revs, df=fh)
967
969
968 def docompress(compressor):
970 def docompress(compressor):
969 rl.clearcaches()
971 rl.clearcaches()
970
972
971 try:
973 try:
972 # Swap in the requested compression engine.
974 # Swap in the requested compression engine.
973 oldcompressor = rl._compressor
975 oldcompressor = rl._compressor
974 rl._compressor = compressor
976 rl._compressor = compressor
975 for chunk in chunks[0]:
977 for chunk in chunks[0]:
976 rl.compress(chunk)
978 rl.compress(chunk)
977 finally:
979 finally:
978 rl._compressor = oldcompressor
980 rl._compressor = oldcompressor
979
981
980 benches = [
982 benches = [
981 (lambda: doread(), 'read'),
983 (lambda: doread(), 'read'),
982 (lambda: doreadcachedfh(), 'read w/ reused fd'),
984 (lambda: doreadcachedfh(), 'read w/ reused fd'),
983 (lambda: doreadbatch(), 'read batch'),
985 (lambda: doreadbatch(), 'read batch'),
984 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
986 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
985 (lambda: dochunk(), 'chunk'),
987 (lambda: dochunk(), 'chunk'),
986 (lambda: dochunkbatch(), 'chunk batch'),
988 (lambda: dochunkbatch(), 'chunk batch'),
987 ]
989 ]
988
990
989 for engine in sorted(engines):
991 for engine in sorted(engines):
990 compressor = util.compengines[engine].revlogcompressor()
992 compressor = util.compengines[engine].revlogcompressor()
991 benches.append((functools.partial(docompress, compressor),
993 benches.append((functools.partial(docompress, compressor),
992 'compress w/ %s' % engine))
994 'compress w/ %s' % engine))
993
995
994 for fn, title in benches:
996 for fn, title in benches:
995 timer, fm = gettimer(ui, opts)
997 timer, fm = gettimer(ui, opts)
996 timer(fn, title=title)
998 timer(fn, title=title)
997 fm.end()
999 fm.end()
998
1000
999 @command('perfrevlogrevision', revlogopts + formatteropts +
1001 @command('perfrevlogrevision', revlogopts + formatteropts +
1000 [('', 'cache', False, 'use caches instead of clearing')],
1002 [('', 'cache', False, 'use caches instead of clearing')],
1001 '-c|-m|FILE REV')
1003 '-c|-m|FILE REV')
1002 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1004 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1003 """Benchmark obtaining a revlog revision.
1005 """Benchmark obtaining a revlog revision.
1004
1006
1005 Obtaining a revlog revision consists of roughly the following steps:
1007 Obtaining a revlog revision consists of roughly the following steps:
1006
1008
1007 1. Compute the delta chain
1009 1. Compute the delta chain
1008 2. Obtain the raw chunks for that delta chain
1010 2. Obtain the raw chunks for that delta chain
1009 3. Decompress each raw chunk
1011 3. Decompress each raw chunk
1010 4. Apply binary patches to obtain fulltext
1012 4. Apply binary patches to obtain fulltext
1011 5. Verify hash of fulltext
1013 5. Verify hash of fulltext
1012
1014
1013 This command measures the time spent in each of these phases.
1015 This command measures the time spent in each of these phases.
1014 """
1016 """
1015 if opts.get('changelog') or opts.get('manifest'):
1017 if opts.get('changelog') or opts.get('manifest'):
1016 file_, rev = None, file_
1018 file_, rev = None, file_
1017 elif rev is None:
1019 elif rev is None:
1018 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1020 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1019
1021
1020 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1022 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1021
1023
1022 # _chunkraw was renamed to _getsegmentforrevs.
1024 # _chunkraw was renamed to _getsegmentforrevs.
1023 try:
1025 try:
1024 segmentforrevs = r._getsegmentforrevs
1026 segmentforrevs = r._getsegmentforrevs
1025 except AttributeError:
1027 except AttributeError:
1026 segmentforrevs = r._chunkraw
1028 segmentforrevs = r._chunkraw
1027
1029
1028 node = r.lookup(rev)
1030 node = r.lookup(rev)
1029 rev = r.rev(node)
1031 rev = r.rev(node)
1030
1032
1031 def getrawchunks(data, chain):
1033 def getrawchunks(data, chain):
1032 start = r.start
1034 start = r.start
1033 length = r.length
1035 length = r.length
1034 inline = r._inline
1036 inline = r._inline
1035 iosize = r._io.size
1037 iosize = r._io.size
1036 buffer = util.buffer
1038 buffer = util.buffer
1037 offset = start(chain[0])
1039 offset = start(chain[0])
1038
1040
1039 chunks = []
1041 chunks = []
1040 ladd = chunks.append
1042 ladd = chunks.append
1041
1043
1042 for rev in chain:
1044 for rev in chain:
1043 chunkstart = start(rev)
1045 chunkstart = start(rev)
1044 if inline:
1046 if inline:
1045 chunkstart += (rev + 1) * iosize
1047 chunkstart += (rev + 1) * iosize
1046 chunklength = length(rev)
1048 chunklength = length(rev)
1047 ladd(buffer(data, chunkstart - offset, chunklength))
1049 ladd(buffer(data, chunkstart - offset, chunklength))
1048
1050
1049 return chunks
1051 return chunks
1050
1052
1051 def dodeltachain(rev):
1053 def dodeltachain(rev):
1052 if not cache:
1054 if not cache:
1053 r.clearcaches()
1055 r.clearcaches()
1054 r._deltachain(rev)
1056 r._deltachain(rev)
1055
1057
1056 def doread(chain):
1058 def doread(chain):
1057 if not cache:
1059 if not cache:
1058 r.clearcaches()
1060 r.clearcaches()
1059 segmentforrevs(chain[0], chain[-1])
1061 segmentforrevs(chain[0], chain[-1])
1060
1062
1061 def dorawchunks(data, chain):
1063 def dorawchunks(data, chain):
1062 if not cache:
1064 if not cache:
1063 r.clearcaches()
1065 r.clearcaches()
1064 getrawchunks(data, chain)
1066 getrawchunks(data, chain)
1065
1067
1066 def dodecompress(chunks):
1068 def dodecompress(chunks):
1067 decomp = r.decompress
1069 decomp = r.decompress
1068 for chunk in chunks:
1070 for chunk in chunks:
1069 decomp(chunk)
1071 decomp(chunk)
1070
1072
1071 def dopatch(text, bins):
1073 def dopatch(text, bins):
1072 if not cache:
1074 if not cache:
1073 r.clearcaches()
1075 r.clearcaches()
1074 mdiff.patches(text, bins)
1076 mdiff.patches(text, bins)
1075
1077
1076 def dohash(text):
1078 def dohash(text):
1077 if not cache:
1079 if not cache:
1078 r.clearcaches()
1080 r.clearcaches()
1079 r.checkhash(text, node, rev=rev)
1081 r.checkhash(text, node, rev=rev)
1080
1082
1081 def dorevision():
1083 def dorevision():
1082 if not cache:
1084 if not cache:
1083 r.clearcaches()
1085 r.clearcaches()
1084 r.revision(node)
1086 r.revision(node)
1085
1087
1086 chain = r._deltachain(rev)[0]
1088 chain = r._deltachain(rev)[0]
1087 data = segmentforrevs(chain[0], chain[-1])[1]
1089 data = segmentforrevs(chain[0], chain[-1])[1]
1088 rawchunks = getrawchunks(data, chain)
1090 rawchunks = getrawchunks(data, chain)
1089 bins = r._chunks(chain)
1091 bins = r._chunks(chain)
1090 text = str(bins[0])
1092 text = str(bins[0])
1091 bins = bins[1:]
1093 bins = bins[1:]
1092 text = mdiff.patches(text, bins)
1094 text = mdiff.patches(text, bins)
1093
1095
1094 benches = [
1096 benches = [
1095 (lambda: dorevision(), 'full'),
1097 (lambda: dorevision(), 'full'),
1096 (lambda: dodeltachain(rev), 'deltachain'),
1098 (lambda: dodeltachain(rev), 'deltachain'),
1097 (lambda: doread(chain), 'read'),
1099 (lambda: doread(chain), 'read'),
1098 (lambda: dorawchunks(data, chain), 'rawchunks'),
1100 (lambda: dorawchunks(data, chain), 'rawchunks'),
1099 (lambda: dodecompress(rawchunks), 'decompress'),
1101 (lambda: dodecompress(rawchunks), 'decompress'),
1100 (lambda: dopatch(text, bins), 'patch'),
1102 (lambda: dopatch(text, bins), 'patch'),
1101 (lambda: dohash(text), 'hash'),
1103 (lambda: dohash(text), 'hash'),
1102 ]
1104 ]
1103
1105
1104 for fn, title in benches:
1106 for fn, title in benches:
1105 timer, fm = gettimer(ui, opts)
1107 timer, fm = gettimer(ui, opts)
1106 timer(fn, title=title)
1108 timer(fn, title=title)
1107 fm.end()
1109 fm.end()
1108
1110
1109 @command('perfrevset',
1111 @command('perfrevset',
1110 [('C', 'clear', False, 'clear volatile cache between each call.'),
1112 [('C', 'clear', False, 'clear volatile cache between each call.'),
1111 ('', 'contexts', False, 'obtain changectx for each revision')]
1113 ('', 'contexts', False, 'obtain changectx for each revision')]
1112 + formatteropts, "REVSET")
1114 + formatteropts, "REVSET")
1113 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1115 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1114 """benchmark the execution time of a revset
1116 """benchmark the execution time of a revset
1115
1117
1116 Use the --clean option if need to evaluate the impact of build volatile
1118 Use the --clean option if need to evaluate the impact of build volatile
1117 revisions set cache on the revset execution. Volatile cache hold filtered
1119 revisions set cache on the revset execution. Volatile cache hold filtered
1118 and obsolete related cache."""
1120 and obsolete related cache."""
1119 timer, fm = gettimer(ui, opts)
1121 timer, fm = gettimer(ui, opts)
1120 def d():
1122 def d():
1121 if clear:
1123 if clear:
1122 repo.invalidatevolatilesets()
1124 repo.invalidatevolatilesets()
1123 if contexts:
1125 if contexts:
1124 for ctx in repo.set(expr): pass
1126 for ctx in repo.set(expr): pass
1125 else:
1127 else:
1126 for r in repo.revs(expr): pass
1128 for r in repo.revs(expr): pass
1127 timer(d)
1129 timer(d)
1128 fm.end()
1130 fm.end()
1129
1131
1130 @command('perfvolatilesets', formatteropts)
1132 @command('perfvolatilesets', formatteropts)
1131 def perfvolatilesets(ui, repo, *names, **opts):
1133 def perfvolatilesets(ui, repo, *names, **opts):
1132 """benchmark the computation of various volatile set
1134 """benchmark the computation of various volatile set
1133
1135
1134 Volatile set computes element related to filtering and obsolescence."""
1136 Volatile set computes element related to filtering and obsolescence."""
1135 timer, fm = gettimer(ui, opts)
1137 timer, fm = gettimer(ui, opts)
1136 repo = repo.unfiltered()
1138 repo = repo.unfiltered()
1137
1139
1138 def getobs(name):
1140 def getobs(name):
1139 def d():
1141 def d():
1140 repo.invalidatevolatilesets()
1142 repo.invalidatevolatilesets()
1141 obsolete.getrevs(repo, name)
1143 obsolete.getrevs(repo, name)
1142 return d
1144 return d
1143
1145
1144 allobs = sorted(obsolete.cachefuncs)
1146 allobs = sorted(obsolete.cachefuncs)
1145 if names:
1147 if names:
1146 allobs = [n for n in allobs if n in names]
1148 allobs = [n for n in allobs if n in names]
1147
1149
1148 for name in allobs:
1150 for name in allobs:
1149 timer(getobs(name), title=name)
1151 timer(getobs(name), title=name)
1150
1152
1151 def getfiltered(name):
1153 def getfiltered(name):
1152 def d():
1154 def d():
1153 repo.invalidatevolatilesets()
1155 repo.invalidatevolatilesets()
1154 repoview.filterrevs(repo, name)
1156 repoview.filterrevs(repo, name)
1155 return d
1157 return d
1156
1158
1157 allfilter = sorted(repoview.filtertable)
1159 allfilter = sorted(repoview.filtertable)
1158 if names:
1160 if names:
1159 allfilter = [n for n in allfilter if n in names]
1161 allfilter = [n for n in allfilter if n in names]
1160
1162
1161 for name in allfilter:
1163 for name in allfilter:
1162 timer(getfiltered(name), title=name)
1164 timer(getfiltered(name), title=name)
1163 fm.end()
1165 fm.end()
1164
1166
1165 @command('perfbranchmap',
1167 @command('perfbranchmap',
1166 [('f', 'full', False,
1168 [('f', 'full', False,
1167 'Includes build time of subset'),
1169 'Includes build time of subset'),
1168 ] + formatteropts)
1170 ] + formatteropts)
1169 def perfbranchmap(ui, repo, full=False, **opts):
1171 def perfbranchmap(ui, repo, full=False, **opts):
1170 """benchmark the update of a branchmap
1172 """benchmark the update of a branchmap
1171
1173
1172 This benchmarks the full repo.branchmap() call with read and write disabled
1174 This benchmarks the full repo.branchmap() call with read and write disabled
1173 """
1175 """
1174 timer, fm = gettimer(ui, opts)
1176 timer, fm = gettimer(ui, opts)
1175 def getbranchmap(filtername):
1177 def getbranchmap(filtername):
1176 """generate a benchmark function for the filtername"""
1178 """generate a benchmark function for the filtername"""
1177 if filtername is None:
1179 if filtername is None:
1178 view = repo
1180 view = repo
1179 else:
1181 else:
1180 view = repo.filtered(filtername)
1182 view = repo.filtered(filtername)
1181 def d():
1183 def d():
1182 if full:
1184 if full:
1183 view._branchcaches.clear()
1185 view._branchcaches.clear()
1184 else:
1186 else:
1185 view._branchcaches.pop(filtername, None)
1187 view._branchcaches.pop(filtername, None)
1186 view.branchmap()
1188 view.branchmap()
1187 return d
1189 return d
1188 # add filter in smaller subset to bigger subset
1190 # add filter in smaller subset to bigger subset
1189 possiblefilters = set(repoview.filtertable)
1191 possiblefilters = set(repoview.filtertable)
1190 subsettable = getbranchmapsubsettable()
1192 subsettable = getbranchmapsubsettable()
1191 allfilters = []
1193 allfilters = []
1192 while possiblefilters:
1194 while possiblefilters:
1193 for name in possiblefilters:
1195 for name in possiblefilters:
1194 subset = subsettable.get(name)
1196 subset = subsettable.get(name)
1195 if subset not in possiblefilters:
1197 if subset not in possiblefilters:
1196 break
1198 break
1197 else:
1199 else:
1198 assert False, 'subset cycle %s!' % possiblefilters
1200 assert False, 'subset cycle %s!' % possiblefilters
1199 allfilters.append(name)
1201 allfilters.append(name)
1200 possiblefilters.remove(name)
1202 possiblefilters.remove(name)
1201
1203
1202 # warm the cache
1204 # warm the cache
1203 if not full:
1205 if not full:
1204 for name in allfilters:
1206 for name in allfilters:
1205 repo.filtered(name).branchmap()
1207 repo.filtered(name).branchmap()
1206 # add unfiltered
1208 # add unfiltered
1207 allfilters.append(None)
1209 allfilters.append(None)
1208
1210
1209 branchcacheread = safeattrsetter(branchmap, 'read')
1211 branchcacheread = safeattrsetter(branchmap, 'read')
1210 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1212 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1211 branchcacheread.set(lambda repo: None)
1213 branchcacheread.set(lambda repo: None)
1212 branchcachewrite.set(lambda bc, repo: None)
1214 branchcachewrite.set(lambda bc, repo: None)
1213 try:
1215 try:
1214 for name in allfilters:
1216 for name in allfilters:
1215 timer(getbranchmap(name), title=str(name))
1217 timer(getbranchmap(name), title=str(name))
1216 finally:
1218 finally:
1217 branchcacheread.restore()
1219 branchcacheread.restore()
1218 branchcachewrite.restore()
1220 branchcachewrite.restore()
1219 fm.end()
1221 fm.end()
1220
1222
1221 @command('perfloadmarkers')
1223 @command('perfloadmarkers')
1222 def perfloadmarkers(ui, repo):
1224 def perfloadmarkers(ui, repo):
1223 """benchmark the time to parse the on-disk markers for a repo
1225 """benchmark the time to parse the on-disk markers for a repo
1224
1226
1225 Result is the number of markers in the repo."""
1227 Result is the number of markers in the repo."""
1226 timer, fm = gettimer(ui)
1228 timer, fm = gettimer(ui)
1227 svfs = getsvfs(repo)
1229 svfs = getsvfs(repo)
1228 timer(lambda: len(obsolete.obsstore(svfs)))
1230 timer(lambda: len(obsolete.obsstore(svfs)))
1229 fm.end()
1231 fm.end()
1230
1232
1231 @command('perflrucachedict', formatteropts +
1233 @command('perflrucachedict', formatteropts +
1232 [('', 'size', 4, 'size of cache'),
1234 [('', 'size', 4, 'size of cache'),
1233 ('', 'gets', 10000, 'number of key lookups'),
1235 ('', 'gets', 10000, 'number of key lookups'),
1234 ('', 'sets', 10000, 'number of key sets'),
1236 ('', 'sets', 10000, 'number of key sets'),
1235 ('', 'mixed', 10000, 'number of mixed mode operations'),
1237 ('', 'mixed', 10000, 'number of mixed mode operations'),
1236 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1238 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1237 norepo=True)
1239 norepo=True)
1238 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1240 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1239 mixedgetfreq=50, **opts):
1241 mixedgetfreq=50, **opts):
1240 def doinit():
1242 def doinit():
1241 for i in xrange(10000):
1243 for i in xrange(10000):
1242 util.lrucachedict(size)
1244 util.lrucachedict(size)
1243
1245
1244 values = []
1246 values = []
1245 for i in xrange(size):
1247 for i in xrange(size):
1246 values.append(random.randint(0, sys.maxint))
1248 values.append(random.randint(0, sys.maxint))
1247
1249
1248 # Get mode fills the cache and tests raw lookup performance with no
1250 # Get mode fills the cache and tests raw lookup performance with no
1249 # eviction.
1251 # eviction.
1250 getseq = []
1252 getseq = []
1251 for i in xrange(gets):
1253 for i in xrange(gets):
1252 getseq.append(random.choice(values))
1254 getseq.append(random.choice(values))
1253
1255
1254 def dogets():
1256 def dogets():
1255 d = util.lrucachedict(size)
1257 d = util.lrucachedict(size)
1256 for v in values:
1258 for v in values:
1257 d[v] = v
1259 d[v] = v
1258 for key in getseq:
1260 for key in getseq:
1259 value = d[key]
1261 value = d[key]
1260 value # silence pyflakes warning
1262 value # silence pyflakes warning
1261
1263
1262 # Set mode tests insertion speed with cache eviction.
1264 # Set mode tests insertion speed with cache eviction.
1263 setseq = []
1265 setseq = []
1264 for i in xrange(sets):
1266 for i in xrange(sets):
1265 setseq.append(random.randint(0, sys.maxint))
1267 setseq.append(random.randint(0, sys.maxint))
1266
1268
1267 def dosets():
1269 def dosets():
1268 d = util.lrucachedict(size)
1270 d = util.lrucachedict(size)
1269 for v in setseq:
1271 for v in setseq:
1270 d[v] = v
1272 d[v] = v
1271
1273
1272 # Mixed mode randomly performs gets and sets with eviction.
1274 # Mixed mode randomly performs gets and sets with eviction.
1273 mixedops = []
1275 mixedops = []
1274 for i in xrange(mixed):
1276 for i in xrange(mixed):
1275 r = random.randint(0, 100)
1277 r = random.randint(0, 100)
1276 if r < mixedgetfreq:
1278 if r < mixedgetfreq:
1277 op = 0
1279 op = 0
1278 else:
1280 else:
1279 op = 1
1281 op = 1
1280
1282
1281 mixedops.append((op, random.randint(0, size * 2)))
1283 mixedops.append((op, random.randint(0, size * 2)))
1282
1284
1283 def domixed():
1285 def domixed():
1284 d = util.lrucachedict(size)
1286 d = util.lrucachedict(size)
1285
1287
1286 for op, v in mixedops:
1288 for op, v in mixedops:
1287 if op == 0:
1289 if op == 0:
1288 try:
1290 try:
1289 d[v]
1291 d[v]
1290 except KeyError:
1292 except KeyError:
1291 pass
1293 pass
1292 else:
1294 else:
1293 d[v] = v
1295 d[v] = v
1294
1296
1295 benches = [
1297 benches = [
1296 (doinit, 'init'),
1298 (doinit, 'init'),
1297 (dogets, 'gets'),
1299 (dogets, 'gets'),
1298 (dosets, 'sets'),
1300 (dosets, 'sets'),
1299 (domixed, 'mixed')
1301 (domixed, 'mixed')
1300 ]
1302 ]
1301
1303
1302 for fn, title in benches:
1304 for fn, title in benches:
1303 timer, fm = gettimer(ui, opts)
1305 timer, fm = gettimer(ui, opts)
1304 timer(fn, title=title)
1306 timer(fn, title=title)
1305 fm.end()
1307 fm.end()
1306
1308
1307 @command('perfwrite', formatteropts)
1309 @command('perfwrite', formatteropts)
1308 def perfwrite(ui, repo, **opts):
1310 def perfwrite(ui, repo, **opts):
1309 """microbenchmark ui.write
1311 """microbenchmark ui.write
1310 """
1312 """
1311 timer, fm = gettimer(ui, opts)
1313 timer, fm = gettimer(ui, opts)
1312 def write():
1314 def write():
1313 for i in range(100000):
1315 for i in range(100000):
1314 ui.write(('Testing write performance\n'))
1316 ui.write(('Testing write performance\n'))
1315 timer(write)
1317 timer(write)
1316 fm.end()
1318 fm.end()
1317
1319
1318 def uisetup(ui):
1320 def uisetup(ui):
1319 if (util.safehasattr(cmdutil, 'openrevlog') and
1321 if (util.safehasattr(cmdutil, 'openrevlog') and
1320 not util.safehasattr(commands, 'debugrevlogopts')):
1322 not util.safehasattr(commands, 'debugrevlogopts')):
1321 # for "historical portability":
1323 # for "historical portability":
1322 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1324 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1323 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1325 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1324 # openrevlog() should cause failure, because it has been
1326 # openrevlog() should cause failure, because it has been
1325 # available since 3.5 (or 49c583ca48c4).
1327 # available since 3.5 (or 49c583ca48c4).
1326 def openrevlog(orig, repo, cmd, file_, opts):
1328 def openrevlog(orig, repo, cmd, file_, opts):
1327 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1329 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1328 raise error.Abort("This version doesn't support --dir option",
1330 raise error.Abort("This version doesn't support --dir option",
1329 hint="use 3.5 or later")
1331 hint="use 3.5 or later")
1330 return orig(repo, cmd, file_, opts)
1332 return orig(repo, cmd, file_, opts)
1331 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
1333 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
@@ -1,70 +1,69 b''
1 # Mercurial extension to provide the 'hg children' command
1 # Mercurial extension to provide the 'hg children' command
2 #
2 #
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 #
4 #
5 # Author(s):
5 # Author(s):
6 # Thomas Arendsen Hein <thomas@intevation.de>
6 # Thomas Arendsen Hein <thomas@intevation.de>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2 or any later version.
9 # GNU General Public License version 2 or any later version.
10
10
11 '''command to display child changesets (DEPRECATED)
11 '''command to display child changesets (DEPRECATED)
12
12
13 This extension is deprecated. You should use :hg:`log -r
13 This extension is deprecated. You should use :hg:`log -r
14 "children(REV)"` instead.
14 "children(REV)"` instead.
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 from mercurial.i18n import _
19 from mercurial.i18n import _
20 from mercurial import (
20 from mercurial import (
21 cmdutil,
21 cmdutil,
22 commands,
23 registrar,
22 registrar,
24 )
23 )
25
24
26 templateopts = commands.templateopts
25 templateopts = cmdutil.templateopts
27
26
28 cmdtable = {}
27 cmdtable = {}
29 command = registrar.command(cmdtable)
28 command = registrar.command(cmdtable)
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
31 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
32 # leave the attribute unspecified.
34 testedwith = 'ships-with-hg-core'
33 testedwith = 'ships-with-hg-core'
35
34
36 @command('children',
35 @command('children',
37 [('r', 'rev', '',
36 [('r', 'rev', '',
38 _('show children of the specified revision'), _('REV')),
37 _('show children of the specified revision'), _('REV')),
39 ] + templateopts,
38 ] + templateopts,
40 _('hg children [-r REV] [FILE]'),
39 _('hg children [-r REV] [FILE]'),
41 inferrepo=True)
40 inferrepo=True)
42 def children(ui, repo, file_=None, **opts):
41 def children(ui, repo, file_=None, **opts):
43 """show the children of the given or working directory revision
42 """show the children of the given or working directory revision
44
43
45 Print the children of the working directory's revisions. If a
44 Print the children of the working directory's revisions. If a
46 revision is given via -r/--rev, the children of that revision will
45 revision is given via -r/--rev, the children of that revision will
47 be printed. If a file argument is given, revision in which the
46 be printed. If a file argument is given, revision in which the
48 file was last changed (after the working directory revision or the
47 file was last changed (after the working directory revision or the
49 argument to --rev if given) is printed.
48 argument to --rev if given) is printed.
50
49
51 Please use :hg:`log` instead::
50 Please use :hg:`log` instead::
52
51
53 hg children => hg log -r "children()"
52 hg children => hg log -r "children()"
54 hg children -r REV => hg log -r "children(REV)"
53 hg children -r REV => hg log -r "children(REV)"
55
54
56 See :hg:`help log` and :hg:`help revsets.children`.
55 See :hg:`help log` and :hg:`help revsets.children`.
57
56
58 """
57 """
59 rev = opts.get('rev')
58 rev = opts.get('rev')
60 if file_:
59 if file_:
61 fctx = repo.filectx(file_, changeid=rev)
60 fctx = repo.filectx(file_, changeid=rev)
62 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
61 childctxs = [fcctx.changectx() for fcctx in fctx.children()]
63 else:
62 else:
64 ctx = repo[rev]
63 ctx = repo[rev]
65 childctxs = ctx.children()
64 childctxs = ctx.children()
66
65
67 displayer = cmdutil.show_changeset(ui, repo, opts)
66 displayer = cmdutil.show_changeset(ui, repo, opts)
68 for cctx in childctxs:
67 for cctx in childctxs:
69 displayer.show(cctx)
68 displayer.show(cctx)
70 displayer.close()
69 displayer.close()
@@ -1,212 +1,211 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 import datetime
13 import datetime
14 import os
14 import os
15 import time
15 import time
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 cmdutil,
19 cmdutil,
20 commands,
21 encoding,
20 encoding,
22 patch,
21 patch,
23 registrar,
22 registrar,
24 scmutil,
23 scmutil,
25 util,
24 util,
26 )
25 )
27
26
28 cmdtable = {}
27 cmdtable = {}
29 command = registrar.command(cmdtable)
28 command = registrar.command(cmdtable)
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
31 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
32 # leave the attribute unspecified.
34 testedwith = 'ships-with-hg-core'
33 testedwith = 'ships-with-hg-core'
35
34
36 def maketemplater(ui, repo, tmpl):
35 def maketemplater(ui, repo, tmpl):
37 return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
36 return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
38
37
39 def changedlines(ui, repo, ctx1, ctx2, fns):
38 def changedlines(ui, repo, ctx1, ctx2, fns):
40 added, removed = 0, 0
39 added, removed = 0, 0
41 fmatch = scmutil.matchfiles(repo, fns)
40 fmatch = scmutil.matchfiles(repo, fns)
42 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
41 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
43 for l in diff.split('\n'):
42 for l in diff.split('\n'):
44 if l.startswith("+") and not l.startswith("+++ "):
43 if l.startswith("+") and not l.startswith("+++ "):
45 added += 1
44 added += 1
46 elif l.startswith("-") and not l.startswith("--- "):
45 elif l.startswith("-") and not l.startswith("--- "):
47 removed += 1
46 removed += 1
48 return (added, removed)
47 return (added, removed)
49
48
50 def countrate(ui, repo, amap, *pats, **opts):
49 def countrate(ui, repo, amap, *pats, **opts):
51 """Calculate stats"""
50 """Calculate stats"""
52 if opts.get('dateformat'):
51 if opts.get('dateformat'):
53 def getkey(ctx):
52 def getkey(ctx):
54 t, tz = ctx.date()
53 t, tz = ctx.date()
55 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
56 return date.strftime(opts['dateformat'])
55 return date.strftime(opts['dateformat'])
57 else:
56 else:
58 tmpl = opts.get('oldtemplate') or opts.get('template')
57 tmpl = opts.get('oldtemplate') or opts.get('template')
59 tmpl = maketemplater(ui, repo, tmpl)
58 tmpl = maketemplater(ui, repo, tmpl)
60 def getkey(ctx):
59 def getkey(ctx):
61 ui.pushbuffer()
60 ui.pushbuffer()
62 tmpl.show(ctx)
61 tmpl.show(ctx)
63 return ui.popbuffer()
62 return ui.popbuffer()
64
63
65 state = {'count': 0}
64 state = {'count': 0}
66 rate = {}
65 rate = {}
67 df = False
66 df = False
68 if opts.get('date'):
67 if opts.get('date'):
69 df = util.matchdate(opts['date'])
68 df = util.matchdate(opts['date'])
70
69
71 m = scmutil.match(repo[None], pats, opts)
70 m = scmutil.match(repo[None], pats, opts)
72 def prep(ctx, fns):
71 def prep(ctx, fns):
73 rev = ctx.rev()
72 rev = ctx.rev()
74 if df and not df(ctx.date()[0]): # doesn't match date format
73 if df and not df(ctx.date()[0]): # doesn't match date format
75 return
74 return
76
75
77 key = getkey(ctx).strip()
76 key = getkey(ctx).strip()
78 key = amap.get(key, key) # alias remap
77 key = amap.get(key, key) # alias remap
79 if opts.get('changesets'):
78 if opts.get('changesets'):
80 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
79 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
81 else:
80 else:
82 parents = ctx.parents()
81 parents = ctx.parents()
83 if len(parents) > 1:
82 if len(parents) > 1:
84 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
83 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
85 return
84 return
86
85
87 ctx1 = parents[0]
86 ctx1 = parents[0]
88 lines = changedlines(ui, repo, ctx1, ctx, fns)
87 lines = changedlines(ui, repo, ctx1, ctx, fns)
89 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
88 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
90
89
91 state['count'] += 1
90 state['count'] += 1
92 ui.progress(_('analyzing'), state['count'], total=len(repo),
91 ui.progress(_('analyzing'), state['count'], total=len(repo),
93 unit=_('revisions'))
92 unit=_('revisions'))
94
93
95 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
94 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
96 continue
95 continue
97
96
98 ui.progress(_('analyzing'), None)
97 ui.progress(_('analyzing'), None)
99
98
100 return rate
99 return rate
101
100
102
101
103 @command('churn',
102 @command('churn',
104 [('r', 'rev', [],
103 [('r', 'rev', [],
105 _('count rate for the specified revision or revset'), _('REV')),
104 _('count rate for the specified revision or revset'), _('REV')),
106 ('d', 'date', '',
105 ('d', 'date', '',
107 _('count rate for revisions matching date spec'), _('DATE')),
106 _('count rate for revisions matching date spec'), _('DATE')),
108 ('t', 'oldtemplate', '',
107 ('t', 'oldtemplate', '',
109 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
108 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
110 ('T', 'template', '{author|email}',
109 ('T', 'template', '{author|email}',
111 _('template to group changesets'), _('TEMPLATE')),
110 _('template to group changesets'), _('TEMPLATE')),
112 ('f', 'dateformat', '',
111 ('f', 'dateformat', '',
113 _('strftime-compatible format for grouping by date'), _('FORMAT')),
112 _('strftime-compatible format for grouping by date'), _('FORMAT')),
114 ('c', 'changesets', False, _('count rate by number of changesets')),
113 ('c', 'changesets', False, _('count rate by number of changesets')),
115 ('s', 'sort', False, _('sort by key (default: sort by count)')),
114 ('s', 'sort', False, _('sort by key (default: sort by count)')),
116 ('', 'diffstat', False, _('display added/removed lines separately')),
115 ('', 'diffstat', False, _('display added/removed lines separately')),
117 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
116 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
118 ] + commands.walkopts,
117 ] + cmdutil.walkopts,
119 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
118 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
120 inferrepo=True)
119 inferrepo=True)
121 def churn(ui, repo, *pats, **opts):
120 def churn(ui, repo, *pats, **opts):
122 '''histogram of changes to the repository
121 '''histogram of changes to the repository
123
122
124 This command will display a histogram representing the number
123 This command will display a histogram representing the number
125 of changed lines or revisions, grouped according to the given
124 of changed lines or revisions, grouped according to the given
126 template. The default template will group changes by author.
125 template. The default template will group changes by author.
127 The --dateformat option may be used to group the results by
126 The --dateformat option may be used to group the results by
128 date instead.
127 date instead.
129
128
130 Statistics are based on the number of changed lines, or
129 Statistics are based on the number of changed lines, or
131 alternatively the number of matching revisions if the
130 alternatively the number of matching revisions if the
132 --changesets option is specified.
131 --changesets option is specified.
133
132
134 Examples::
133 Examples::
135
134
136 # display count of changed lines for every committer
135 # display count of changed lines for every committer
137 hg churn -T "{author|email}"
136 hg churn -T "{author|email}"
138
137
139 # display daily activity graph
138 # display daily activity graph
140 hg churn -f "%H" -s -c
139 hg churn -f "%H" -s -c
141
140
142 # display activity of developers by month
141 # display activity of developers by month
143 hg churn -f "%Y-%m" -s -c
142 hg churn -f "%Y-%m" -s -c
144
143
145 # display count of lines changed in every year
144 # display count of lines changed in every year
146 hg churn -f "%Y" -s
145 hg churn -f "%Y" -s
147
146
148 It is possible to map alternate email addresses to a main address
147 It is possible to map alternate email addresses to a main address
149 by providing a file using the following format::
148 by providing a file using the following format::
150
149
151 <alias email> = <actual email>
150 <alias email> = <actual email>
152
151
153 Such a file may be specified with the --aliases option, otherwise
152 Such a file may be specified with the --aliases option, otherwise
154 a .hgchurn file will be looked for in the working directory root.
153 a .hgchurn file will be looked for in the working directory root.
155 Aliases will be split from the rightmost "=".
154 Aliases will be split from the rightmost "=".
156 '''
155 '''
157 def pad(s, l):
156 def pad(s, l):
158 return s + " " * (l - encoding.colwidth(s))
157 return s + " " * (l - encoding.colwidth(s))
159
158
160 amap = {}
159 amap = {}
161 aliases = opts.get('aliases')
160 aliases = opts.get('aliases')
162 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
161 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
163 aliases = repo.wjoin('.hgchurn')
162 aliases = repo.wjoin('.hgchurn')
164 if aliases:
163 if aliases:
165 for l in open(aliases, "r"):
164 for l in open(aliases, "r"):
166 try:
165 try:
167 alias, actual = l.rsplit('=' in l and '=' or None, 1)
166 alias, actual = l.rsplit('=' in l and '=' or None, 1)
168 amap[alias.strip()] = actual.strip()
167 amap[alias.strip()] = actual.strip()
169 except ValueError:
168 except ValueError:
170 l = l.strip()
169 l = l.strip()
171 if l:
170 if l:
172 ui.warn(_("skipping malformed alias: %s\n") % l)
171 ui.warn(_("skipping malformed alias: %s\n") % l)
173 continue
172 continue
174
173
175 rate = countrate(ui, repo, amap, *pats, **opts).items()
174 rate = countrate(ui, repo, amap, *pats, **opts).items()
176 if not rate:
175 if not rate:
177 return
176 return
178
177
179 if opts.get('sort'):
178 if opts.get('sort'):
180 rate.sort()
179 rate.sort()
181 else:
180 else:
182 rate.sort(key=lambda x: (-sum(x[1]), x))
181 rate.sort(key=lambda x: (-sum(x[1]), x))
183
182
184 # Be careful not to have a zero maxcount (issue833)
183 # Be careful not to have a zero maxcount (issue833)
185 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
184 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
186 maxname = max(len(k) for k, v in rate)
185 maxname = max(len(k) for k, v in rate)
187
186
188 ttywidth = ui.termwidth()
187 ttywidth = ui.termwidth()
189 ui.debug("assuming %i character terminal\n" % ttywidth)
188 ui.debug("assuming %i character terminal\n" % ttywidth)
190 width = ttywidth - maxname - 2 - 2 - 2
189 width = ttywidth - maxname - 2 - 2 - 2
191
190
192 if opts.get('diffstat'):
191 if opts.get('diffstat'):
193 width -= 15
192 width -= 15
194 def format(name, diffstat):
193 def format(name, diffstat):
195 added, removed = diffstat
194 added, removed = diffstat
196 return "%s %15s %s%s\n" % (pad(name, maxname),
195 return "%s %15s %s%s\n" % (pad(name, maxname),
197 '+%d/-%d' % (added, removed),
196 '+%d/-%d' % (added, removed),
198 ui.label('+' * charnum(added),
197 ui.label('+' * charnum(added),
199 'diffstat.inserted'),
198 'diffstat.inserted'),
200 ui.label('-' * charnum(removed),
199 ui.label('-' * charnum(removed),
201 'diffstat.deleted'))
200 'diffstat.deleted'))
202 else:
201 else:
203 width -= 6
202 width -= 6
204 def format(name, count):
203 def format(name, count):
205 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
204 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
206 '*' * charnum(sum(count)))
205 '*' * charnum(sum(count)))
207
206
208 def charnum(count):
207 def charnum(count):
209 return int(round(count * width / maxcount))
208 return int(round(count * width / maxcount))
210
209
211 for name, count in rate:
210 for name, count in rate:
212 ui.write(format(name, count))
211 ui.write(format(name, count))
@@ -1,401 +1,400 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 The extdiff extension also allows you to configure new diff commands, so
16 The extdiff extension also allows you to configure new diff commands, so
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18
18
19 [extdiff]
19 [extdiff]
20 # add new command that runs GNU diff(1) in 'context diff' mode
20 # add new command that runs GNU diff(1) in 'context diff' mode
21 cdiff = gdiff -Nprc5
21 cdiff = gdiff -Nprc5
22 ## or the old way:
22 ## or the old way:
23 #cmd.cdiff = gdiff
23 #cmd.cdiff = gdiff
24 #opts.cdiff = -Nprc5
24 #opts.cdiff = -Nprc5
25
25
26 # add new command called meld, runs meld (no need to name twice). If
26 # add new command called meld, runs meld (no need to name twice). If
27 # the meld executable is not available, the meld tool in [merge-tools]
27 # the meld executable is not available, the meld tool in [merge-tools]
28 # will be used, if available
28 # will be used, if available
29 meld =
29 meld =
30
30
31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
31 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
32 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
33 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 # your .vimrc
34 # your .vimrc
35 vimdiff = gvim -f "+next" \\
35 vimdiff = gvim -f "+next" \\
36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
36 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
37
37
38 Tool arguments can include variables that are expanded at runtime::
38 Tool arguments can include variables that are expanded at runtime::
39
39
40 $parent1, $plabel1 - filename, descriptive label of first parent
40 $parent1, $plabel1 - filename, descriptive label of first parent
41 $child, $clabel - filename, descriptive label of child revision
41 $child, $clabel - filename, descriptive label of child revision
42 $parent2, $plabel2 - filename, descriptive label of second parent
42 $parent2, $plabel2 - filename, descriptive label of second parent
43 $root - repository root
43 $root - repository root
44 $parent is an alias for $parent1.
44 $parent is an alias for $parent1.
45
45
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
47 sections for diff tool arguments, when none are specified in [extdiff].
47 sections for diff tool arguments, when none are specified in [extdiff].
48
48
49 ::
49 ::
50
50
51 [extdiff]
51 [extdiff]
52 kdiff3 =
52 kdiff3 =
53
53
54 [diff-tools]
54 [diff-tools]
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
56
56
57 You can use -I/-X and list of file or directory names like normal
57 You can use -I/-X and list of file or directory names like normal
58 :hg:`diff` command. The extdiff extension makes snapshots of only
58 :hg:`diff` command. The extdiff extension makes snapshots of only
59 needed files, so running the external diff program will actually be
59 needed files, so running the external diff program will actually be
60 pretty fast (at least faster than having to compare the entire tree).
60 pretty fast (at least faster than having to compare the entire tree).
61 '''
61 '''
62
62
63 from __future__ import absolute_import
63 from __future__ import absolute_import
64
64
65 import os
65 import os
66 import re
66 import re
67 import shutil
67 import shutil
68 import tempfile
68 import tempfile
69 from mercurial.i18n import _
69 from mercurial.i18n import _
70 from mercurial.node import (
70 from mercurial.node import (
71 nullid,
71 nullid,
72 short,
72 short,
73 )
73 )
74 from mercurial import (
74 from mercurial import (
75 archival,
75 archival,
76 cmdutil,
76 cmdutil,
77 commands,
78 error,
77 error,
79 filemerge,
78 filemerge,
80 pycompat,
79 pycompat,
81 registrar,
80 registrar,
82 scmutil,
81 scmutil,
83 util,
82 util,
84 )
83 )
85
84
86 cmdtable = {}
85 cmdtable = {}
87 command = registrar.command(cmdtable)
86 command = registrar.command(cmdtable)
88 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
87 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
89 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
88 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
90 # be specifying the version(s) of Mercurial they are tested with, or
89 # be specifying the version(s) of Mercurial they are tested with, or
91 # leave the attribute unspecified.
90 # leave the attribute unspecified.
92 testedwith = 'ships-with-hg-core'
91 testedwith = 'ships-with-hg-core'
93
92
94 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
93 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
95 '''snapshot files as of some revision
94 '''snapshot files as of some revision
96 if not using snapshot, -I/-X does not work and recursive diff
95 if not using snapshot, -I/-X does not work and recursive diff
97 in tools like kdiff3 and meld displays too many files.'''
96 in tools like kdiff3 and meld displays too many files.'''
98 dirname = os.path.basename(repo.root)
97 dirname = os.path.basename(repo.root)
99 if dirname == "":
98 if dirname == "":
100 dirname = "root"
99 dirname = "root"
101 if node is not None:
100 if node is not None:
102 dirname = '%s.%s' % (dirname, short(node))
101 dirname = '%s.%s' % (dirname, short(node))
103 base = os.path.join(tmproot, dirname)
102 base = os.path.join(tmproot, dirname)
104 os.mkdir(base)
103 os.mkdir(base)
105 fnsandstat = []
104 fnsandstat = []
106
105
107 if node is not None:
106 if node is not None:
108 ui.note(_('making snapshot of %d files from rev %s\n') %
107 ui.note(_('making snapshot of %d files from rev %s\n') %
109 (len(files), short(node)))
108 (len(files), short(node)))
110 else:
109 else:
111 ui.note(_('making snapshot of %d files from working directory\n') %
110 ui.note(_('making snapshot of %d files from working directory\n') %
112 (len(files)))
111 (len(files)))
113
112
114 if files:
113 if files:
115 repo.ui.setconfig("ui", "archivemeta", False)
114 repo.ui.setconfig("ui", "archivemeta", False)
116
115
117 archival.archive(repo, base, node, 'files',
116 archival.archive(repo, base, node, 'files',
118 matchfn=scmutil.matchfiles(repo, files),
117 matchfn=scmutil.matchfiles(repo, files),
119 subrepos=listsubrepos)
118 subrepos=listsubrepos)
120
119
121 for fn in sorted(files):
120 for fn in sorted(files):
122 wfn = util.pconvert(fn)
121 wfn = util.pconvert(fn)
123 ui.note(' %s\n' % wfn)
122 ui.note(' %s\n' % wfn)
124
123
125 if node is None:
124 if node is None:
126 dest = os.path.join(base, wfn)
125 dest = os.path.join(base, wfn)
127
126
128 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
127 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
129 return dirname, fnsandstat
128 return dirname, fnsandstat
130
129
131 def dodiff(ui, repo, cmdline, pats, opts):
130 def dodiff(ui, repo, cmdline, pats, opts):
132 '''Do the actual diff:
131 '''Do the actual diff:
133
132
134 - copy to a temp structure if diffing 2 internal revisions
133 - copy to a temp structure if diffing 2 internal revisions
135 - copy to a temp structure if diffing working revision with
134 - copy to a temp structure if diffing working revision with
136 another one and more than 1 file is changed
135 another one and more than 1 file is changed
137 - just invoke the diff for a single file in the working dir
136 - just invoke the diff for a single file in the working dir
138 '''
137 '''
139
138
140 revs = opts.get('rev')
139 revs = opts.get('rev')
141 change = opts.get('change')
140 change = opts.get('change')
142 do3way = '$parent2' in cmdline
141 do3way = '$parent2' in cmdline
143
142
144 if revs and change:
143 if revs and change:
145 msg = _('cannot specify --rev and --change at the same time')
144 msg = _('cannot specify --rev and --change at the same time')
146 raise error.Abort(msg)
145 raise error.Abort(msg)
147 elif change:
146 elif change:
148 node2 = scmutil.revsingle(repo, change, None).node()
147 node2 = scmutil.revsingle(repo, change, None).node()
149 node1a, node1b = repo.changelog.parents(node2)
148 node1a, node1b = repo.changelog.parents(node2)
150 else:
149 else:
151 node1a, node2 = scmutil.revpair(repo, revs)
150 node1a, node2 = scmutil.revpair(repo, revs)
152 if not revs:
151 if not revs:
153 node1b = repo.dirstate.p2()
152 node1b = repo.dirstate.p2()
154 else:
153 else:
155 node1b = nullid
154 node1b = nullid
156
155
157 # Disable 3-way merge if there is only one parent
156 # Disable 3-way merge if there is only one parent
158 if do3way:
157 if do3way:
159 if node1b == nullid:
158 if node1b == nullid:
160 do3way = False
159 do3way = False
161
160
162 subrepos=opts.get('subrepos')
161 subrepos=opts.get('subrepos')
163
162
164 matcher = scmutil.match(repo[node2], pats, opts)
163 matcher = scmutil.match(repo[node2], pats, opts)
165
164
166 if opts.get('patch'):
165 if opts.get('patch'):
167 if subrepos:
166 if subrepos:
168 raise error.Abort(_('--patch cannot be used with --subrepos'))
167 raise error.Abort(_('--patch cannot be used with --subrepos'))
169 if node2 is None:
168 if node2 is None:
170 raise error.Abort(_('--patch requires two revisions'))
169 raise error.Abort(_('--patch requires two revisions'))
171 else:
170 else:
172 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
171 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
173 listsubrepos=subrepos)[:3])
172 listsubrepos=subrepos)[:3])
174 if do3way:
173 if do3way:
175 mod_b, add_b, rem_b = map(set,
174 mod_b, add_b, rem_b = map(set,
176 repo.status(node1b, node2, matcher,
175 repo.status(node1b, node2, matcher,
177 listsubrepos=subrepos)[:3])
176 listsubrepos=subrepos)[:3])
178 else:
177 else:
179 mod_b, add_b, rem_b = set(), set(), set()
178 mod_b, add_b, rem_b = set(), set(), set()
180 modadd = mod_a | add_a | mod_b | add_b
179 modadd = mod_a | add_a | mod_b | add_b
181 common = modadd | rem_a | rem_b
180 common = modadd | rem_a | rem_b
182 if not common:
181 if not common:
183 return 0
182 return 0
184
183
185 tmproot = tempfile.mkdtemp(prefix='extdiff.')
184 tmproot = tempfile.mkdtemp(prefix='extdiff.')
186 try:
185 try:
187 if not opts.get('patch'):
186 if not opts.get('patch'):
188 # Always make a copy of node1a (and node1b, if applicable)
187 # Always make a copy of node1a (and node1b, if applicable)
189 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
188 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
190 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
189 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
191 subrepos)[0]
190 subrepos)[0]
192 rev1a = '@%d' % repo[node1a].rev()
191 rev1a = '@%d' % repo[node1a].rev()
193 if do3way:
192 if do3way:
194 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
193 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
195 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
194 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
196 subrepos)[0]
195 subrepos)[0]
197 rev1b = '@%d' % repo[node1b].rev()
196 rev1b = '@%d' % repo[node1b].rev()
198 else:
197 else:
199 dir1b = None
198 dir1b = None
200 rev1b = ''
199 rev1b = ''
201
200
202 fnsandstat = []
201 fnsandstat = []
203
202
204 # If node2 in not the wc or there is >1 change, copy it
203 # If node2 in not the wc or there is >1 change, copy it
205 dir2root = ''
204 dir2root = ''
206 rev2 = ''
205 rev2 = ''
207 if node2:
206 if node2:
208 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
207 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
209 rev2 = '@%d' % repo[node2].rev()
208 rev2 = '@%d' % repo[node2].rev()
210 elif len(common) > 1:
209 elif len(common) > 1:
211 #we only actually need to get the files to copy back to
210 #we only actually need to get the files to copy back to
212 #the working dir in this case (because the other cases
211 #the working dir in this case (because the other cases
213 #are: diffing 2 revisions or single file -- in which case
212 #are: diffing 2 revisions or single file -- in which case
214 #the file is already directly passed to the diff tool).
213 #the file is already directly passed to the diff tool).
215 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
214 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
216 subrepos)
215 subrepos)
217 else:
216 else:
218 # This lets the diff tool open the changed file directly
217 # This lets the diff tool open the changed file directly
219 dir2 = ''
218 dir2 = ''
220 dir2root = repo.root
219 dir2root = repo.root
221
220
222 label1a = rev1a
221 label1a = rev1a
223 label1b = rev1b
222 label1b = rev1b
224 label2 = rev2
223 label2 = rev2
225
224
226 # If only one change, diff the files instead of the directories
225 # If only one change, diff the files instead of the directories
227 # Handle bogus modifies correctly by checking if the files exist
226 # Handle bogus modifies correctly by checking if the files exist
228 if len(common) == 1:
227 if len(common) == 1:
229 common_file = util.localpath(common.pop())
228 common_file = util.localpath(common.pop())
230 dir1a = os.path.join(tmproot, dir1a, common_file)
229 dir1a = os.path.join(tmproot, dir1a, common_file)
231 label1a = common_file + rev1a
230 label1a = common_file + rev1a
232 if not os.path.isfile(dir1a):
231 if not os.path.isfile(dir1a):
233 dir1a = os.devnull
232 dir1a = os.devnull
234 if do3way:
233 if do3way:
235 dir1b = os.path.join(tmproot, dir1b, common_file)
234 dir1b = os.path.join(tmproot, dir1b, common_file)
236 label1b = common_file + rev1b
235 label1b = common_file + rev1b
237 if not os.path.isfile(dir1b):
236 if not os.path.isfile(dir1b):
238 dir1b = os.devnull
237 dir1b = os.devnull
239 dir2 = os.path.join(dir2root, dir2, common_file)
238 dir2 = os.path.join(dir2root, dir2, common_file)
240 label2 = common_file + rev2
239 label2 = common_file + rev2
241 else:
240 else:
242 template = 'hg-%h.patch'
241 template = 'hg-%h.patch'
243 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
242 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
244 template=repo.vfs.reljoin(tmproot, template),
243 template=repo.vfs.reljoin(tmproot, template),
245 match=matcher)
244 match=matcher)
246 label1a = cmdutil.makefilename(repo, template, node1a)
245 label1a = cmdutil.makefilename(repo, template, node1a)
247 label2 = cmdutil.makefilename(repo, template, node2)
246 label2 = cmdutil.makefilename(repo, template, node2)
248 dir1a = repo.vfs.reljoin(tmproot, label1a)
247 dir1a = repo.vfs.reljoin(tmproot, label1a)
249 dir2 = repo.vfs.reljoin(tmproot, label2)
248 dir2 = repo.vfs.reljoin(tmproot, label2)
250 dir1b = None
249 dir1b = None
251 label1b = None
250 label1b = None
252 fnsandstat = []
251 fnsandstat = []
253
252
254 # Function to quote file/dir names in the argument string.
253 # Function to quote file/dir names in the argument string.
255 # When not operating in 3-way mode, an empty string is
254 # When not operating in 3-way mode, an empty string is
256 # returned for parent2
255 # returned for parent2
257 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
256 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
258 'plabel1': label1a, 'plabel2': label1b,
257 'plabel1': label1a, 'plabel2': label1b,
259 'clabel': label2, 'child': dir2,
258 'clabel': label2, 'child': dir2,
260 'root': repo.root}
259 'root': repo.root}
261 def quote(match):
260 def quote(match):
262 pre = match.group(2)
261 pre = match.group(2)
263 key = match.group(3)
262 key = match.group(3)
264 if not do3way and key == 'parent2':
263 if not do3way and key == 'parent2':
265 return pre
264 return pre
266 return pre + util.shellquote(replace[key])
265 return pre + util.shellquote(replace[key])
267
266
268 # Match parent2 first, so 'parent1?' will match both parent1 and parent
267 # Match parent2 first, so 'parent1?' will match both parent1 and parent
269 regex = (r'''(['"]?)([^\s'"$]*)'''
268 regex = (r'''(['"]?)([^\s'"$]*)'''
270 r'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
269 r'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
271 if not do3way and not re.search(regex, cmdline):
270 if not do3way and not re.search(regex, cmdline):
272 cmdline += ' $parent1 $child'
271 cmdline += ' $parent1 $child'
273 cmdline = re.sub(regex, quote, cmdline)
272 cmdline = re.sub(regex, quote, cmdline)
274
273
275 ui.debug('running %r in %s\n' % (cmdline, tmproot))
274 ui.debug('running %r in %s\n' % (cmdline, tmproot))
276 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
275 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
277
276
278 for copy_fn, working_fn, st in fnsandstat:
277 for copy_fn, working_fn, st in fnsandstat:
279 cpstat = os.lstat(copy_fn)
278 cpstat = os.lstat(copy_fn)
280 # Some tools copy the file and attributes, so mtime may not detect
279 # Some tools copy the file and attributes, so mtime may not detect
281 # all changes. A size check will detect more cases, but not all.
280 # all changes. A size check will detect more cases, but not all.
282 # The only certain way to detect every case is to diff all files,
281 # The only certain way to detect every case is to diff all files,
283 # which could be expensive.
282 # which could be expensive.
284 # copyfile() carries over the permission, so the mode check could
283 # copyfile() carries over the permission, so the mode check could
285 # be in an 'elif' branch, but for the case where the file has
284 # be in an 'elif' branch, but for the case where the file has
286 # changed without affecting mtime or size.
285 # changed without affecting mtime or size.
287 if (cpstat.st_mtime != st.st_mtime or cpstat.st_size != st.st_size
286 if (cpstat.st_mtime != st.st_mtime or cpstat.st_size != st.st_size
288 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
287 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
289 ui.debug('file changed while diffing. '
288 ui.debug('file changed while diffing. '
290 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
289 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
291 util.copyfile(copy_fn, working_fn)
290 util.copyfile(copy_fn, working_fn)
292
291
293 return 1
292 return 1
294 finally:
293 finally:
295 ui.note(_('cleaning up temp directory\n'))
294 ui.note(_('cleaning up temp directory\n'))
296 shutil.rmtree(tmproot)
295 shutil.rmtree(tmproot)
297
296
298 extdiffopts = [
297 extdiffopts = [
299 ('o', 'option', [],
298 ('o', 'option', [],
300 _('pass option to comparison program'), _('OPT')),
299 _('pass option to comparison program'), _('OPT')),
301 ('r', 'rev', [], _('revision'), _('REV')),
300 ('r', 'rev', [], _('revision'), _('REV')),
302 ('c', 'change', '', _('change made by revision'), _('REV')),
301 ('c', 'change', '', _('change made by revision'), _('REV')),
303 ('', 'patch', None, _('compare patches for two revisions'))
302 ('', 'patch', None, _('compare patches for two revisions'))
304 ] + commands.walkopts + commands.subrepoopts
303 ] + cmdutil.walkopts + cmdutil.subrepoopts
305
304
306 @command('extdiff',
305 @command('extdiff',
307 [('p', 'program', '', _('comparison program to run'), _('CMD')),
306 [('p', 'program', '', _('comparison program to run'), _('CMD')),
308 ] + extdiffopts,
307 ] + extdiffopts,
309 _('hg extdiff [OPT]... [FILE]...'),
308 _('hg extdiff [OPT]... [FILE]...'),
310 inferrepo=True)
309 inferrepo=True)
311 def extdiff(ui, repo, *pats, **opts):
310 def extdiff(ui, repo, *pats, **opts):
312 '''use external program to diff repository (or selected files)
311 '''use external program to diff repository (or selected files)
313
312
314 Show differences between revisions for the specified files, using
313 Show differences between revisions for the specified files, using
315 an external program. The default program used is diff, with
314 an external program. The default program used is diff, with
316 default options "-Npru".
315 default options "-Npru".
317
316
318 To select a different program, use the -p/--program option. The
317 To select a different program, use the -p/--program option. The
319 program will be passed the names of two directories to compare. To
318 program will be passed the names of two directories to compare. To
320 pass additional options to the program, use -o/--option. These
319 pass additional options to the program, use -o/--option. These
321 will be passed before the names of the directories to compare.
320 will be passed before the names of the directories to compare.
322
321
323 When two revision arguments are given, then changes are shown
322 When two revision arguments are given, then changes are shown
324 between those revisions. If only one revision is specified then
323 between those revisions. If only one revision is specified then
325 that revision is compared to the working directory, and, when no
324 that revision is compared to the working directory, and, when no
326 revisions are specified, the working directory files are compared
325 revisions are specified, the working directory files are compared
327 to its parent.'''
326 to its parent.'''
328 program = opts.get('program')
327 program = opts.get('program')
329 option = opts.get('option')
328 option = opts.get('option')
330 if not program:
329 if not program:
331 program = 'diff'
330 program = 'diff'
332 option = option or ['-Npru']
331 option = option or ['-Npru']
333 cmdline = ' '.join(map(util.shellquote, [program] + option))
332 cmdline = ' '.join(map(util.shellquote, [program] + option))
334 return dodiff(ui, repo, cmdline, pats, opts)
333 return dodiff(ui, repo, cmdline, pats, opts)
335
334
336 class savedcmd(object):
335 class savedcmd(object):
337 """use external program to diff repository (or selected files)
336 """use external program to diff repository (or selected files)
338
337
339 Show differences between revisions for the specified files, using
338 Show differences between revisions for the specified files, using
340 the following program::
339 the following program::
341
340
342 %(path)s
341 %(path)s
343
342
344 When two revision arguments are given, then changes are shown
343 When two revision arguments are given, then changes are shown
345 between those revisions. If only one revision is specified then
344 between those revisions. If only one revision is specified then
346 that revision is compared to the working directory, and, when no
345 that revision is compared to the working directory, and, when no
347 revisions are specified, the working directory files are compared
346 revisions are specified, the working directory files are compared
348 to its parent.
347 to its parent.
349 """
348 """
350
349
351 def __init__(self, path, cmdline):
350 def __init__(self, path, cmdline):
352 # We can't pass non-ASCII through docstrings (and path is
351 # We can't pass non-ASCII through docstrings (and path is
353 # in an unknown encoding anyway)
352 # in an unknown encoding anyway)
354 docpath = util.escapestr(path)
353 docpath = util.escapestr(path)
355 self.__doc__ = self.__doc__ % {'path': util.uirepr(docpath)}
354 self.__doc__ = self.__doc__ % {'path': util.uirepr(docpath)}
356 self._cmdline = cmdline
355 self._cmdline = cmdline
357
356
358 def __call__(self, ui, repo, *pats, **opts):
357 def __call__(self, ui, repo, *pats, **opts):
359 options = ' '.join(map(util.shellquote, opts['option']))
358 options = ' '.join(map(util.shellquote, opts['option']))
360 if options:
359 if options:
361 options = ' ' + options
360 options = ' ' + options
362 return dodiff(ui, repo, self._cmdline + options, pats, opts)
361 return dodiff(ui, repo, self._cmdline + options, pats, opts)
363
362
364 def uisetup(ui):
363 def uisetup(ui):
365 for cmd, path in ui.configitems('extdiff'):
364 for cmd, path in ui.configitems('extdiff'):
366 path = util.expandpath(path)
365 path = util.expandpath(path)
367 if cmd.startswith('cmd.'):
366 if cmd.startswith('cmd.'):
368 cmd = cmd[4:]
367 cmd = cmd[4:]
369 if not path:
368 if not path:
370 path = util.findexe(cmd)
369 path = util.findexe(cmd)
371 if path is None:
370 if path is None:
372 path = filemerge.findexternaltool(ui, cmd) or cmd
371 path = filemerge.findexternaltool(ui, cmd) or cmd
373 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
372 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
374 cmdline = util.shellquote(path)
373 cmdline = util.shellquote(path)
375 if diffopts:
374 if diffopts:
376 cmdline += ' ' + diffopts
375 cmdline += ' ' + diffopts
377 elif cmd.startswith('opts.'):
376 elif cmd.startswith('opts.'):
378 continue
377 continue
379 else:
378 else:
380 if path:
379 if path:
381 # case "cmd = path opts"
380 # case "cmd = path opts"
382 cmdline = path
381 cmdline = path
383 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
382 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
384 else:
383 else:
385 # case "cmd ="
384 # case "cmd ="
386 path = util.findexe(cmd)
385 path = util.findexe(cmd)
387 if path is None:
386 if path is None:
388 path = filemerge.findexternaltool(ui, cmd) or cmd
387 path = filemerge.findexternaltool(ui, cmd) or cmd
389 cmdline = util.shellquote(path)
388 cmdline = util.shellquote(path)
390 diffopts = False
389 diffopts = False
391 # look for diff arguments in [diff-tools] then [merge-tools]
390 # look for diff arguments in [diff-tools] then [merge-tools]
392 if not diffopts:
391 if not diffopts:
393 args = ui.config('diff-tools', cmd+'.diffargs') or \
392 args = ui.config('diff-tools', cmd+'.diffargs') or \
394 ui.config('merge-tools', cmd+'.diffargs')
393 ui.config('merge-tools', cmd+'.diffargs')
395 if args:
394 if args:
396 cmdline += ' ' + args
395 cmdline += ' ' + args
397 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
396 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
398 inferrepo=True)(savedcmd(path, cmdline))
397 inferrepo=True)(savedcmd(path, cmdline))
399
398
400 # tell hggettext to extract docstrings from these functions:
399 # tell hggettext to extract docstrings from these functions:
401 i18nfunctions = [savedcmd]
400 i18nfunctions = [savedcmd]
@@ -1,166 +1,165 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''pull, update and merge in one command (DEPRECATED)'''
8 '''pull, update and merge in one command (DEPRECATED)'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.node import (
13 from mercurial.node import (
14 short,
14 short,
15 )
15 )
16 from mercurial import (
16 from mercurial import (
17 cmdutil,
17 cmdutil,
18 commands,
19 error,
18 error,
20 exchange,
19 exchange,
21 hg,
20 hg,
22 lock,
21 lock,
23 registrar,
22 registrar,
24 util,
23 util,
25 )
24 )
26
25
27 release = lock.release
26 release = lock.release
28 cmdtable = {}
27 cmdtable = {}
29 command = registrar.command(cmdtable)
28 command = registrar.command(cmdtable)
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
31 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
32 # leave the attribute unspecified.
34 testedwith = 'ships-with-hg-core'
33 testedwith = 'ships-with-hg-core'
35
34
36 @command('fetch',
35 @command('fetch',
37 [('r', 'rev', [],
36 [('r', 'rev', [],
38 _('a specific revision you would like to pull'), _('REV')),
37 _('a specific revision you would like to pull'), _('REV')),
39 ('e', 'edit', None, _('invoke editor on commit messages')),
38 ('e', 'edit', None, _('invoke editor on commit messages')),
40 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
39 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
41 ('', 'switch-parent', None, _('switch parents when merging')),
40 ('', 'switch-parent', None, _('switch parents when merging')),
42 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
41 ] + cmdutil.commitopts + cmdutil.commitopts2 + cmdutil.remoteopts,
43 _('hg fetch [SOURCE]'))
42 _('hg fetch [SOURCE]'))
44 def fetch(ui, repo, source='default', **opts):
43 def fetch(ui, repo, source='default', **opts):
45 '''pull changes from a remote repository, merge new changes if needed.
44 '''pull changes from a remote repository, merge new changes if needed.
46
45
47 This finds all changes from the repository at the specified path
46 This finds all changes from the repository at the specified path
48 or URL and adds them to the local repository.
47 or URL and adds them to the local repository.
49
48
50 If the pulled changes add a new branch head, the head is
49 If the pulled changes add a new branch head, the head is
51 automatically merged, and the result of the merge is committed.
50 automatically merged, and the result of the merge is committed.
52 Otherwise, the working directory is updated to include the new
51 Otherwise, the working directory is updated to include the new
53 changes.
52 changes.
54
53
55 When a merge is needed, the working directory is first updated to
54 When a merge is needed, the working directory is first updated to
56 the newly pulled changes. Local changes are then merged into the
55 the newly pulled changes. Local changes are then merged into the
57 pulled changes. To switch the merge order, use --switch-parent.
56 pulled changes. To switch the merge order, use --switch-parent.
58
57
59 See :hg:`help dates` for a list of formats valid for -d/--date.
58 See :hg:`help dates` for a list of formats valid for -d/--date.
60
59
61 Returns 0 on success.
60 Returns 0 on success.
62 '''
61 '''
63
62
64 date = opts.get('date')
63 date = opts.get('date')
65 if date:
64 if date:
66 opts['date'] = util.parsedate(date)
65 opts['date'] = util.parsedate(date)
67
66
68 parent, _p2 = repo.dirstate.parents()
67 parent, _p2 = repo.dirstate.parents()
69 branch = repo.dirstate.branch()
68 branch = repo.dirstate.branch()
70 try:
69 try:
71 branchnode = repo.branchtip(branch)
70 branchnode = repo.branchtip(branch)
72 except error.RepoLookupError:
71 except error.RepoLookupError:
73 branchnode = None
72 branchnode = None
74 if parent != branchnode:
73 if parent != branchnode:
75 raise error.Abort(_('working directory not at branch tip'),
74 raise error.Abort(_('working directory not at branch tip'),
76 hint=_("use 'hg update' to check out branch tip"))
75 hint=_("use 'hg update' to check out branch tip"))
77
76
78 wlock = lock = None
77 wlock = lock = None
79 try:
78 try:
80 wlock = repo.wlock()
79 wlock = repo.wlock()
81 lock = repo.lock()
80 lock = repo.lock()
82
81
83 cmdutil.bailifchanged(repo)
82 cmdutil.bailifchanged(repo)
84
83
85 bheads = repo.branchheads(branch)
84 bheads = repo.branchheads(branch)
86 bheads = [head for head in bheads if len(repo[head].children()) == 0]
85 bheads = [head for head in bheads if len(repo[head].children()) == 0]
87 if len(bheads) > 1:
86 if len(bheads) > 1:
88 raise error.Abort(_('multiple heads in this branch '
87 raise error.Abort(_('multiple heads in this branch '
89 '(use "hg heads ." and "hg merge" to merge)'))
88 '(use "hg heads ." and "hg merge" to merge)'))
90
89
91 other = hg.peer(repo, opts, ui.expandpath(source))
90 other = hg.peer(repo, opts, ui.expandpath(source))
92 ui.status(_('pulling from %s\n') %
91 ui.status(_('pulling from %s\n') %
93 util.hidepassword(ui.expandpath(source)))
92 util.hidepassword(ui.expandpath(source)))
94 revs = None
93 revs = None
95 if opts['rev']:
94 if opts['rev']:
96 try:
95 try:
97 revs = [other.lookup(rev) for rev in opts['rev']]
96 revs = [other.lookup(rev) for rev in opts['rev']]
98 except error.CapabilityError:
97 except error.CapabilityError:
99 err = _("other repository doesn't support revision lookup, "
98 err = _("other repository doesn't support revision lookup, "
100 "so a rev cannot be specified.")
99 "so a rev cannot be specified.")
101 raise error.Abort(err)
100 raise error.Abort(err)
102
101
103 # Are there any changes at all?
102 # Are there any changes at all?
104 modheads = exchange.pull(repo, other, heads=revs).cgresult
103 modheads = exchange.pull(repo, other, heads=revs).cgresult
105 if modheads == 0:
104 if modheads == 0:
106 return 0
105 return 0
107
106
108 # Is this a simple fast-forward along the current branch?
107 # Is this a simple fast-forward along the current branch?
109 newheads = repo.branchheads(branch)
108 newheads = repo.branchheads(branch)
110 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
109 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
111 if len(newheads) == 1 and len(newchildren):
110 if len(newheads) == 1 and len(newchildren):
112 if newchildren[0] != parent:
111 if newchildren[0] != parent:
113 return hg.update(repo, newchildren[0])
112 return hg.update(repo, newchildren[0])
114 else:
113 else:
115 return 0
114 return 0
116
115
117 # Are there more than one additional branch heads?
116 # Are there more than one additional branch heads?
118 newchildren = [n for n in newchildren if n != parent]
117 newchildren = [n for n in newchildren if n != parent]
119 newparent = parent
118 newparent = parent
120 if newchildren:
119 if newchildren:
121 newparent = newchildren[0]
120 newparent = newchildren[0]
122 hg.clean(repo, newparent)
121 hg.clean(repo, newparent)
123 newheads = [n for n in newheads if n != newparent]
122 newheads = [n for n in newheads if n != newparent]
124 if len(newheads) > 1:
123 if len(newheads) > 1:
125 ui.status(_('not merging with %d other new branch heads '
124 ui.status(_('not merging with %d other new branch heads '
126 '(use "hg heads ." and "hg merge" to merge them)\n') %
125 '(use "hg heads ." and "hg merge" to merge them)\n') %
127 (len(newheads) - 1))
126 (len(newheads) - 1))
128 return 1
127 return 1
129
128
130 if not newheads:
129 if not newheads:
131 return 0
130 return 0
132
131
133 # Otherwise, let's merge.
132 # Otherwise, let's merge.
134 err = False
133 err = False
135 if newheads:
134 if newheads:
136 # By default, we consider the repository we're pulling
135 # By default, we consider the repository we're pulling
137 # *from* as authoritative, so we merge our changes into
136 # *from* as authoritative, so we merge our changes into
138 # theirs.
137 # theirs.
139 if opts['switch_parent']:
138 if opts['switch_parent']:
140 firstparent, secondparent = newparent, newheads[0]
139 firstparent, secondparent = newparent, newheads[0]
141 else:
140 else:
142 firstparent, secondparent = newheads[0], newparent
141 firstparent, secondparent = newheads[0], newparent
143 ui.status(_('updating to %d:%s\n') %
142 ui.status(_('updating to %d:%s\n') %
144 (repo.changelog.rev(firstparent),
143 (repo.changelog.rev(firstparent),
145 short(firstparent)))
144 short(firstparent)))
146 hg.clean(repo, firstparent)
145 hg.clean(repo, firstparent)
147 ui.status(_('merging with %d:%s\n') %
146 ui.status(_('merging with %d:%s\n') %
148 (repo.changelog.rev(secondparent), short(secondparent)))
147 (repo.changelog.rev(secondparent), short(secondparent)))
149 err = hg.merge(repo, secondparent, remind=False)
148 err = hg.merge(repo, secondparent, remind=False)
150
149
151 if not err:
150 if not err:
152 # we don't translate commit messages
151 # we don't translate commit messages
153 message = (cmdutil.logmessage(ui, opts) or
152 message = (cmdutil.logmessage(ui, opts) or
154 ('Automated merge with %s' %
153 ('Automated merge with %s' %
155 util.removeauth(other.url())))
154 util.removeauth(other.url())))
156 editopt = opts.get('edit') or opts.get('force_editor')
155 editopt = opts.get('edit') or opts.get('force_editor')
157 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
156 editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
158 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
157 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
159 ui.status(_('new changeset %d:%s merges remote changes '
158 ui.status(_('new changeset %d:%s merges remote changes '
160 'with local\n') % (repo.changelog.rev(n),
159 'with local\n') % (repo.changelog.rev(n),
161 short(n)))
160 short(n)))
162
161
163 return err
162 return err
164
163
165 finally:
164 finally:
166 release(lock, wlock)
165 release(lock, wlock)
@@ -1,320 +1,319 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import os
11 import os
12 import tempfile
12 import tempfile
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 cmdutil,
16 cmdutil,
17 commands,
18 error,
17 error,
19 match,
18 match,
20 node as hgnode,
19 node as hgnode,
21 pycompat,
20 pycompat,
22 registrar,
21 registrar,
23 util,
22 util,
24 )
23 )
25
24
26 cmdtable = {}
25 cmdtable = {}
27 command = registrar.command(cmdtable)
26 command = registrar.command(cmdtable)
28 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
27 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
28 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # be specifying the version(s) of Mercurial they are tested with, or
29 # be specifying the version(s) of Mercurial they are tested with, or
31 # leave the attribute unspecified.
30 # leave the attribute unspecified.
32 testedwith = 'ships-with-hg-core'
31 testedwith = 'ships-with-hg-core'
33
32
34 class gpg(object):
33 class gpg(object):
35 def __init__(self, path, key=None):
34 def __init__(self, path, key=None):
36 self.path = path
35 self.path = path
37 self.key = (key and " --local-user \"%s\"" % key) or ""
36 self.key = (key and " --local-user \"%s\"" % key) or ""
38
37
39 def sign(self, data):
38 def sign(self, data):
40 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
39 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
41 return util.filter(data, gpgcmd)
40 return util.filter(data, gpgcmd)
42
41
43 def verify(self, data, sig):
42 def verify(self, data, sig):
44 """ returns of the good and bad signatures"""
43 """ returns of the good and bad signatures"""
45 sigfile = datafile = None
44 sigfile = datafile = None
46 try:
45 try:
47 # create temporary files
46 # create temporary files
48 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
47 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
49 fp = os.fdopen(fd, pycompat.sysstr('wb'))
48 fp = os.fdopen(fd, pycompat.sysstr('wb'))
50 fp.write(sig)
49 fp.write(sig)
51 fp.close()
50 fp.close()
52 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
51 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
53 fp = os.fdopen(fd, pycompat.sysstr('wb'))
52 fp = os.fdopen(fd, pycompat.sysstr('wb'))
54 fp.write(data)
53 fp.write(data)
55 fp.close()
54 fp.close()
56 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
55 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
57 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
56 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
58 ret = util.filter("", gpgcmd)
57 ret = util.filter("", gpgcmd)
59 finally:
58 finally:
60 for f in (sigfile, datafile):
59 for f in (sigfile, datafile):
61 try:
60 try:
62 if f:
61 if f:
63 os.unlink(f)
62 os.unlink(f)
64 except OSError:
63 except OSError:
65 pass
64 pass
66 keys = []
65 keys = []
67 key, fingerprint = None, None
66 key, fingerprint = None, None
68 for l in ret.splitlines():
67 for l in ret.splitlines():
69 # see DETAILS in the gnupg documentation
68 # see DETAILS in the gnupg documentation
70 # filter the logger output
69 # filter the logger output
71 if not l.startswith("[GNUPG:]"):
70 if not l.startswith("[GNUPG:]"):
72 continue
71 continue
73 l = l[9:]
72 l = l[9:]
74 if l.startswith("VALIDSIG"):
73 if l.startswith("VALIDSIG"):
75 # fingerprint of the primary key
74 # fingerprint of the primary key
76 fingerprint = l.split()[10]
75 fingerprint = l.split()[10]
77 elif l.startswith("ERRSIG"):
76 elif l.startswith("ERRSIG"):
78 key = l.split(" ", 3)[:2]
77 key = l.split(" ", 3)[:2]
79 key.append("")
78 key.append("")
80 fingerprint = None
79 fingerprint = None
81 elif (l.startswith("GOODSIG") or
80 elif (l.startswith("GOODSIG") or
82 l.startswith("EXPSIG") or
81 l.startswith("EXPSIG") or
83 l.startswith("EXPKEYSIG") or
82 l.startswith("EXPKEYSIG") or
84 l.startswith("BADSIG")):
83 l.startswith("BADSIG")):
85 if key is not None:
84 if key is not None:
86 keys.append(key + [fingerprint])
85 keys.append(key + [fingerprint])
87 key = l.split(" ", 2)
86 key = l.split(" ", 2)
88 fingerprint = None
87 fingerprint = None
89 if key is not None:
88 if key is not None:
90 keys.append(key + [fingerprint])
89 keys.append(key + [fingerprint])
91 return keys
90 return keys
92
91
93 def newgpg(ui, **opts):
92 def newgpg(ui, **opts):
94 """create a new gpg instance"""
93 """create a new gpg instance"""
95 gpgpath = ui.config("gpg", "cmd", "gpg")
94 gpgpath = ui.config("gpg", "cmd", "gpg")
96 gpgkey = opts.get('key')
95 gpgkey = opts.get('key')
97 if not gpgkey:
96 if not gpgkey:
98 gpgkey = ui.config("gpg", "key", None)
97 gpgkey = ui.config("gpg", "key", None)
99 return gpg(gpgpath, gpgkey)
98 return gpg(gpgpath, gpgkey)
100
99
101 def sigwalk(repo):
100 def sigwalk(repo):
102 """
101 """
103 walk over every sigs, yields a couple
102 walk over every sigs, yields a couple
104 ((node, version, sig), (filename, linenumber))
103 ((node, version, sig), (filename, linenumber))
105 """
104 """
106 def parsefile(fileiter, context):
105 def parsefile(fileiter, context):
107 ln = 1
106 ln = 1
108 for l in fileiter:
107 for l in fileiter:
109 if not l:
108 if not l:
110 continue
109 continue
111 yield (l.split(" ", 2), (context, ln))
110 yield (l.split(" ", 2), (context, ln))
112 ln += 1
111 ln += 1
113
112
114 # read the heads
113 # read the heads
115 fl = repo.file(".hgsigs")
114 fl = repo.file(".hgsigs")
116 for r in reversed(fl.heads()):
115 for r in reversed(fl.heads()):
117 fn = ".hgsigs|%s" % hgnode.short(r)
116 fn = ".hgsigs|%s" % hgnode.short(r)
118 for item in parsefile(fl.read(r).splitlines(), fn):
117 for item in parsefile(fl.read(r).splitlines(), fn):
119 yield item
118 yield item
120 try:
119 try:
121 # read local signatures
120 # read local signatures
122 fn = "localsigs"
121 fn = "localsigs"
123 for item in parsefile(repo.vfs(fn), fn):
122 for item in parsefile(repo.vfs(fn), fn):
124 yield item
123 yield item
125 except IOError:
124 except IOError:
126 pass
125 pass
127
126
128 def getkeys(ui, repo, mygpg, sigdata, context):
127 def getkeys(ui, repo, mygpg, sigdata, context):
129 """get the keys who signed a data"""
128 """get the keys who signed a data"""
130 fn, ln = context
129 fn, ln = context
131 node, version, sig = sigdata
130 node, version, sig = sigdata
132 prefix = "%s:%d" % (fn, ln)
131 prefix = "%s:%d" % (fn, ln)
133 node = hgnode.bin(node)
132 node = hgnode.bin(node)
134
133
135 data = node2txt(repo, node, version)
134 data = node2txt(repo, node, version)
136 sig = binascii.a2b_base64(sig)
135 sig = binascii.a2b_base64(sig)
137 keys = mygpg.verify(data, sig)
136 keys = mygpg.verify(data, sig)
138
137
139 validkeys = []
138 validkeys = []
140 # warn for expired key and/or sigs
139 # warn for expired key and/or sigs
141 for key in keys:
140 for key in keys:
142 if key[0] == "ERRSIG":
141 if key[0] == "ERRSIG":
143 ui.write(_("%s Unknown key ID \"%s\"\n")
142 ui.write(_("%s Unknown key ID \"%s\"\n")
144 % (prefix, shortkey(ui, key[1][:15])))
143 % (prefix, shortkey(ui, key[1][:15])))
145 continue
144 continue
146 if key[0] == "BADSIG":
145 if key[0] == "BADSIG":
147 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
146 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
148 continue
147 continue
149 if key[0] == "EXPSIG":
148 if key[0] == "EXPSIG":
150 ui.write(_("%s Note: Signature has expired"
149 ui.write(_("%s Note: Signature has expired"
151 " (signed by: \"%s\")\n") % (prefix, key[2]))
150 " (signed by: \"%s\")\n") % (prefix, key[2]))
152 elif key[0] == "EXPKEYSIG":
151 elif key[0] == "EXPKEYSIG":
153 ui.write(_("%s Note: This key has expired"
152 ui.write(_("%s Note: This key has expired"
154 " (signed by: \"%s\")\n") % (prefix, key[2]))
153 " (signed by: \"%s\")\n") % (prefix, key[2]))
155 validkeys.append((key[1], key[2], key[3]))
154 validkeys.append((key[1], key[2], key[3]))
156 return validkeys
155 return validkeys
157
156
158 @command("sigs", [], _('hg sigs'))
157 @command("sigs", [], _('hg sigs'))
159 def sigs(ui, repo):
158 def sigs(ui, repo):
160 """list signed changesets"""
159 """list signed changesets"""
161 mygpg = newgpg(ui)
160 mygpg = newgpg(ui)
162 revs = {}
161 revs = {}
163
162
164 for data, context in sigwalk(repo):
163 for data, context in sigwalk(repo):
165 node, version, sig = data
164 node, version, sig = data
166 fn, ln = context
165 fn, ln = context
167 try:
166 try:
168 n = repo.lookup(node)
167 n = repo.lookup(node)
169 except KeyError:
168 except KeyError:
170 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
169 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
171 continue
170 continue
172 r = repo.changelog.rev(n)
171 r = repo.changelog.rev(n)
173 keys = getkeys(ui, repo, mygpg, data, context)
172 keys = getkeys(ui, repo, mygpg, data, context)
174 if not keys:
173 if not keys:
175 continue
174 continue
176 revs.setdefault(r, [])
175 revs.setdefault(r, [])
177 revs[r].extend(keys)
176 revs[r].extend(keys)
178 for rev in sorted(revs, reverse=True):
177 for rev in sorted(revs, reverse=True):
179 for k in revs[rev]:
178 for k in revs[rev]:
180 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
179 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
181 ui.write("%-30s %s\n" % (keystr(ui, k), r))
180 ui.write("%-30s %s\n" % (keystr(ui, k), r))
182
181
183 @command("sigcheck", [], _('hg sigcheck REV'))
182 @command("sigcheck", [], _('hg sigcheck REV'))
184 def sigcheck(ui, repo, rev):
183 def sigcheck(ui, repo, rev):
185 """verify all the signatures there may be for a particular revision"""
184 """verify all the signatures there may be for a particular revision"""
186 mygpg = newgpg(ui)
185 mygpg = newgpg(ui)
187 rev = repo.lookup(rev)
186 rev = repo.lookup(rev)
188 hexrev = hgnode.hex(rev)
187 hexrev = hgnode.hex(rev)
189 keys = []
188 keys = []
190
189
191 for data, context in sigwalk(repo):
190 for data, context in sigwalk(repo):
192 node, version, sig = data
191 node, version, sig = data
193 if node == hexrev:
192 if node == hexrev:
194 k = getkeys(ui, repo, mygpg, data, context)
193 k = getkeys(ui, repo, mygpg, data, context)
195 if k:
194 if k:
196 keys.extend(k)
195 keys.extend(k)
197
196
198 if not keys:
197 if not keys:
199 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
198 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
200 return
199 return
201
200
202 # print summary
201 # print summary
203 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
202 ui.write(_("%s is signed by:\n") % hgnode.short(rev))
204 for key in keys:
203 for key in keys:
205 ui.write(" %s\n" % keystr(ui, key))
204 ui.write(" %s\n" % keystr(ui, key))
206
205
207 def keystr(ui, key):
206 def keystr(ui, key):
208 """associate a string to a key (username, comment)"""
207 """associate a string to a key (username, comment)"""
209 keyid, user, fingerprint = key
208 keyid, user, fingerprint = key
210 comment = ui.config("gpg", fingerprint, None)
209 comment = ui.config("gpg", fingerprint, None)
211 if comment:
210 if comment:
212 return "%s (%s)" % (user, comment)
211 return "%s (%s)" % (user, comment)
213 else:
212 else:
214 return user
213 return user
215
214
216 @command("sign",
215 @command("sign",
217 [('l', 'local', None, _('make the signature local')),
216 [('l', 'local', None, _('make the signature local')),
218 ('f', 'force', None, _('sign even if the sigfile is modified')),
217 ('f', 'force', None, _('sign even if the sigfile is modified')),
219 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
218 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
220 ('k', 'key', '',
219 ('k', 'key', '',
221 _('the key id to sign with'), _('ID')),
220 _('the key id to sign with'), _('ID')),
222 ('m', 'message', '',
221 ('m', 'message', '',
223 _('use text as commit message'), _('TEXT')),
222 _('use text as commit message'), _('TEXT')),
224 ('e', 'edit', False, _('invoke editor on commit messages')),
223 ('e', 'edit', False, _('invoke editor on commit messages')),
225 ] + commands.commitopts2,
224 ] + cmdutil.commitopts2,
226 _('hg sign [OPTION]... [REV]...'))
225 _('hg sign [OPTION]... [REV]...'))
227 def sign(ui, repo, *revs, **opts):
226 def sign(ui, repo, *revs, **opts):
228 """add a signature for the current or given revision
227 """add a signature for the current or given revision
229
228
230 If no revision is given, the parent of the working directory is used,
229 If no revision is given, the parent of the working directory is used,
231 or tip if no revision is checked out.
230 or tip if no revision is checked out.
232
231
233 The ``gpg.cmd`` config setting can be used to specify the command
232 The ``gpg.cmd`` config setting can be used to specify the command
234 to run. A default key can be specified with ``gpg.key``.
233 to run. A default key can be specified with ``gpg.key``.
235
234
236 See :hg:`help dates` for a list of formats valid for -d/--date.
235 See :hg:`help dates` for a list of formats valid for -d/--date.
237 """
236 """
238 with repo.wlock():
237 with repo.wlock():
239 return _dosign(ui, repo, *revs, **opts)
238 return _dosign(ui, repo, *revs, **opts)
240
239
241 def _dosign(ui, repo, *revs, **opts):
240 def _dosign(ui, repo, *revs, **opts):
242 mygpg = newgpg(ui, **opts)
241 mygpg = newgpg(ui, **opts)
243 sigver = "0"
242 sigver = "0"
244 sigmessage = ""
243 sigmessage = ""
245
244
246 date = opts.get('date')
245 date = opts.get('date')
247 if date:
246 if date:
248 opts['date'] = util.parsedate(date)
247 opts['date'] = util.parsedate(date)
249
248
250 if revs:
249 if revs:
251 nodes = [repo.lookup(n) for n in revs]
250 nodes = [repo.lookup(n) for n in revs]
252 else:
251 else:
253 nodes = [node for node in repo.dirstate.parents()
252 nodes = [node for node in repo.dirstate.parents()
254 if node != hgnode.nullid]
253 if node != hgnode.nullid]
255 if len(nodes) > 1:
254 if len(nodes) > 1:
256 raise error.Abort(_('uncommitted merge - please provide a '
255 raise error.Abort(_('uncommitted merge - please provide a '
257 'specific revision'))
256 'specific revision'))
258 if not nodes:
257 if not nodes:
259 nodes = [repo.changelog.tip()]
258 nodes = [repo.changelog.tip()]
260
259
261 for n in nodes:
260 for n in nodes:
262 hexnode = hgnode.hex(n)
261 hexnode = hgnode.hex(n)
263 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
262 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
264 hgnode.short(n)))
263 hgnode.short(n)))
265 # build data
264 # build data
266 data = node2txt(repo, n, sigver)
265 data = node2txt(repo, n, sigver)
267 sig = mygpg.sign(data)
266 sig = mygpg.sign(data)
268 if not sig:
267 if not sig:
269 raise error.Abort(_("error while signing"))
268 raise error.Abort(_("error while signing"))
270 sig = binascii.b2a_base64(sig)
269 sig = binascii.b2a_base64(sig)
271 sig = sig.replace("\n", "")
270 sig = sig.replace("\n", "")
272 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
271 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
273
272
274 # write it
273 # write it
275 if opts['local']:
274 if opts['local']:
276 repo.vfs.append("localsigs", sigmessage)
275 repo.vfs.append("localsigs", sigmessage)
277 return
276 return
278
277
279 if not opts["force"]:
278 if not opts["force"]:
280 msigs = match.exact(repo.root, '', ['.hgsigs'])
279 msigs = match.exact(repo.root, '', ['.hgsigs'])
281 if any(repo.status(match=msigs, unknown=True, ignored=True)):
280 if any(repo.status(match=msigs, unknown=True, ignored=True)):
282 raise error.Abort(_("working copy of .hgsigs is changed "),
281 raise error.Abort(_("working copy of .hgsigs is changed "),
283 hint=_("please commit .hgsigs manually"))
282 hint=_("please commit .hgsigs manually"))
284
283
285 sigsfile = repo.wvfs(".hgsigs", "ab")
284 sigsfile = repo.wvfs(".hgsigs", "ab")
286 sigsfile.write(sigmessage)
285 sigsfile.write(sigmessage)
287 sigsfile.close()
286 sigsfile.close()
288
287
289 if '.hgsigs' not in repo.dirstate:
288 if '.hgsigs' not in repo.dirstate:
290 repo[None].add([".hgsigs"])
289 repo[None].add([".hgsigs"])
291
290
292 if opts["no_commit"]:
291 if opts["no_commit"]:
293 return
292 return
294
293
295 message = opts['message']
294 message = opts['message']
296 if not message:
295 if not message:
297 # we don't translate commit messages
296 # we don't translate commit messages
298 message = "\n".join(["Added signature for changeset %s"
297 message = "\n".join(["Added signature for changeset %s"
299 % hgnode.short(n)
298 % hgnode.short(n)
300 for n in nodes])
299 for n in nodes])
301 try:
300 try:
302 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
301 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
303 repo.commit(message, opts['user'], opts['date'], match=msigs,
302 repo.commit(message, opts['user'], opts['date'], match=msigs,
304 editor=editor)
303 editor=editor)
305 except ValueError as inst:
304 except ValueError as inst:
306 raise error.Abort(str(inst))
305 raise error.Abort(str(inst))
307
306
308 def shortkey(ui, key):
307 def shortkey(ui, key):
309 if len(key) != 16:
308 if len(key) != 16:
310 ui.debug("key ID \"%s\" format error\n" % key)
309 ui.debug("key ID \"%s\" format error\n" % key)
311 return key
310 return key
312
311
313 return key[-8:]
312 return key[-8:]
314
313
315 def node2txt(repo, node, ver):
314 def node2txt(repo, node, ver):
316 """map a manifest into some text"""
315 """map a manifest into some text"""
317 if ver == "0":
316 if ver == "0":
318 return "%s\n" % hgnode.hex(node)
317 return "%s\n" % hgnode.hex(node)
319 else:
318 else:
320 raise error.Abort(_("unknown signature version"))
319 raise error.Abort(_("unknown signature version"))
@@ -1,69 +1,70 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell (DEPRECATED)
8 '''command to view revision graphs from a shell (DEPRECATED)
9
9
10 The functionality of this extension has been include in core Mercurial
10 The functionality of this extension has been include in core Mercurial
11 since version 2.3. Please use :hg:`log -G ...` instead.
11 since version 2.3. Please use :hg:`log -G ...` instead.
12
12
13 This extension adds a --graph option to the incoming, outgoing and log
13 This extension adds a --graph option to the incoming, outgoing and log
14 commands. When this options is given, an ASCII representation of the
14 commands. When this options is given, an ASCII representation of the
15 revision graph is also shown.
15 revision graph is also shown.
16 '''
16 '''
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import (
21 from mercurial import (
22 cmdutil,
22 commands,
23 commands,
23 registrar,
24 registrar,
24 )
25 )
25
26
26 cmdtable = {}
27 cmdtable = {}
27 command = registrar.command(cmdtable)
28 command = registrar.command(cmdtable)
28 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # be specifying the version(s) of Mercurial they are tested with, or
31 # be specifying the version(s) of Mercurial they are tested with, or
31 # leave the attribute unspecified.
32 # leave the attribute unspecified.
32 testedwith = 'ships-with-hg-core'
33 testedwith = 'ships-with-hg-core'
33
34
34 @command('glog',
35 @command('glog',
35 [('f', 'follow', None,
36 [('f', 'follow', None,
36 _('follow changeset history, or file history across copies and renames')),
37 _('follow changeset history, or file history across copies and renames')),
37 ('', 'follow-first', None,
38 ('', 'follow-first', None,
38 _('only follow the first parent of merge changesets (DEPRECATED)')),
39 _('only follow the first parent of merge changesets (DEPRECATED)')),
39 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
40 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
40 ('C', 'copies', None, _('show copied files')),
41 ('C', 'copies', None, _('show copied files')),
41 ('k', 'keyword', [],
42 ('k', 'keyword', [],
42 _('do case-insensitive search for a given text'), _('TEXT')),
43 _('do case-insensitive search for a given text'), _('TEXT')),
43 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
44 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
44 ('', 'removed', None, _('include revisions where files were removed')),
45 ('', 'removed', None, _('include revisions where files were removed')),
45 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
46 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
46 ('u', 'user', [], _('revisions committed by user'), _('USER')),
47 ('u', 'user', [], _('revisions committed by user'), _('USER')),
47 ('', 'only-branch', [],
48 ('', 'only-branch', [],
48 _('show only changesets within the given named branch (DEPRECATED)'),
49 _('show only changesets within the given named branch (DEPRECATED)'),
49 _('BRANCH')),
50 _('BRANCH')),
50 ('b', 'branch', [],
51 ('b', 'branch', [],
51 _('show changesets within the given named branch'), _('BRANCH')),
52 _('show changesets within the given named branch'), _('BRANCH')),
52 ('P', 'prune', [],
53 ('P', 'prune', [],
53 _('do not display revision or any of its ancestors'), _('REV')),
54 _('do not display revision or any of its ancestors'), _('REV')),
54 ] + commands.logopts + commands.walkopts,
55 ] + cmdutil.logopts + cmdutil.walkopts,
55 _('[OPTION]... [FILE]'),
56 _('[OPTION]... [FILE]'),
56 inferrepo=True)
57 inferrepo=True)
57 def glog(ui, repo, *pats, **opts):
58 def glog(ui, repo, *pats, **opts):
58 """show revision history alongside an ASCII revision graph
59 """show revision history alongside an ASCII revision graph
59
60
60 Print a revision history alongside a revision graph drawn with
61 Print a revision history alongside a revision graph drawn with
61 ASCII characters.
62 ASCII characters.
62
63
63 Nodes printed as an @ character are parents of the working
64 Nodes printed as an @ character are parents of the working
64 directory.
65 directory.
65
66
66 This is an alias to :hg:`log -G`.
67 This is an alias to :hg:`log -G`.
67 """
68 """
68 opts['graph'] = True
69 opts['graph'] = True
69 return commands.log(ui, repo, *pats, **opts)
70 return commands.log(ui, repo, *pats, **opts)
@@ -1,504 +1,503 b''
1 # journal.py
1 # journal.py
2 #
2 #
3 # Copyright 2014-2016 Facebook, Inc.
3 # Copyright 2014-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """track previous positions of bookmarks (EXPERIMENTAL)
7 """track previous positions of bookmarks (EXPERIMENTAL)
8
8
9 This extension adds a new command: `hg journal`, which shows you where
9 This extension adds a new command: `hg journal`, which shows you where
10 bookmarks were previously located.
10 bookmarks were previously located.
11
11
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import collections
16 import collections
17 import errno
17 import errno
18 import os
18 import os
19 import weakref
19 import weakref
20
20
21 from mercurial.i18n import _
21 from mercurial.i18n import _
22
22
23 from mercurial import (
23 from mercurial import (
24 bookmarks,
24 bookmarks,
25 cmdutil,
25 cmdutil,
26 commands,
27 dispatch,
26 dispatch,
28 error,
27 error,
29 extensions,
28 extensions,
30 hg,
29 hg,
31 localrepo,
30 localrepo,
32 lock,
31 lock,
33 node,
32 node,
34 registrar,
33 registrar,
35 util,
34 util,
36 )
35 )
37
36
38 from . import share
37 from . import share
39
38
40 cmdtable = {}
39 cmdtable = {}
41 command = registrar.command(cmdtable)
40 command = registrar.command(cmdtable)
42
41
43 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
42 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
44 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
45 # be specifying the version(s) of Mercurial they are tested with, or
44 # be specifying the version(s) of Mercurial they are tested with, or
46 # leave the attribute unspecified.
45 # leave the attribute unspecified.
47 testedwith = 'ships-with-hg-core'
46 testedwith = 'ships-with-hg-core'
48
47
49 # storage format version; increment when the format changes
48 # storage format version; increment when the format changes
50 storageversion = 0
49 storageversion = 0
51
50
52 # namespaces
51 # namespaces
53 bookmarktype = 'bookmark'
52 bookmarktype = 'bookmark'
54 wdirparenttype = 'wdirparent'
53 wdirparenttype = 'wdirparent'
55 # In a shared repository, what shared feature name is used
54 # In a shared repository, what shared feature name is used
56 # to indicate this namespace is shared with the source?
55 # to indicate this namespace is shared with the source?
57 sharednamespaces = {
56 sharednamespaces = {
58 bookmarktype: hg.sharedbookmarks,
57 bookmarktype: hg.sharedbookmarks,
59 }
58 }
60
59
61 # Journal recording, register hooks and storage object
60 # Journal recording, register hooks and storage object
62 def extsetup(ui):
61 def extsetup(ui):
63 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
62 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
64 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
63 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
65 extensions.wrapfunction(
64 extensions.wrapfunction(
66 localrepo.localrepository.dirstate, 'func', wrapdirstate)
65 localrepo.localrepository.dirstate, 'func', wrapdirstate)
67 extensions.wrapfunction(hg, 'postshare', wrappostshare)
66 extensions.wrapfunction(hg, 'postshare', wrappostshare)
68 extensions.wrapfunction(hg, 'copystore', unsharejournal)
67 extensions.wrapfunction(hg, 'copystore', unsharejournal)
69
68
70 def reposetup(ui, repo):
69 def reposetup(ui, repo):
71 if repo.local():
70 if repo.local():
72 repo.journal = journalstorage(repo)
71 repo.journal = journalstorage(repo)
73
72
74 def runcommand(orig, lui, repo, cmd, fullargs, *args):
73 def runcommand(orig, lui, repo, cmd, fullargs, *args):
75 """Track the command line options for recording in the journal"""
74 """Track the command line options for recording in the journal"""
76 journalstorage.recordcommand(*fullargs)
75 journalstorage.recordcommand(*fullargs)
77 return orig(lui, repo, cmd, fullargs, *args)
76 return orig(lui, repo, cmd, fullargs, *args)
78
77
79 # hooks to record dirstate changes
78 # hooks to record dirstate changes
80 def wrapdirstate(orig, repo):
79 def wrapdirstate(orig, repo):
81 """Make journal storage available to the dirstate object"""
80 """Make journal storage available to the dirstate object"""
82 dirstate = orig(repo)
81 dirstate = orig(repo)
83 if util.safehasattr(repo, 'journal'):
82 if util.safehasattr(repo, 'journal'):
84 dirstate.journalstorage = repo.journal
83 dirstate.journalstorage = repo.journal
85 dirstate.addparentchangecallback('journal', recorddirstateparents)
84 dirstate.addparentchangecallback('journal', recorddirstateparents)
86 return dirstate
85 return dirstate
87
86
88 def recorddirstateparents(dirstate, old, new):
87 def recorddirstateparents(dirstate, old, new):
89 """Records all dirstate parent changes in the journal."""
88 """Records all dirstate parent changes in the journal."""
90 old = list(old)
89 old = list(old)
91 new = list(new)
90 new = list(new)
92 if util.safehasattr(dirstate, 'journalstorage'):
91 if util.safehasattr(dirstate, 'journalstorage'):
93 # only record two hashes if there was a merge
92 # only record two hashes if there was a merge
94 oldhashes = old[:1] if old[1] == node.nullid else old
93 oldhashes = old[:1] if old[1] == node.nullid else old
95 newhashes = new[:1] if new[1] == node.nullid else new
94 newhashes = new[:1] if new[1] == node.nullid else new
96 dirstate.journalstorage.record(
95 dirstate.journalstorage.record(
97 wdirparenttype, '.', oldhashes, newhashes)
96 wdirparenttype, '.', oldhashes, newhashes)
98
97
99 # hooks to record bookmark changes (both local and remote)
98 # hooks to record bookmark changes (both local and remote)
100 def recordbookmarks(orig, store, fp):
99 def recordbookmarks(orig, store, fp):
101 """Records all bookmark changes in the journal."""
100 """Records all bookmark changes in the journal."""
102 repo = store._repo
101 repo = store._repo
103 if util.safehasattr(repo, 'journal'):
102 if util.safehasattr(repo, 'journal'):
104 oldmarks = bookmarks.bmstore(repo)
103 oldmarks = bookmarks.bmstore(repo)
105 for mark, value in store.iteritems():
104 for mark, value in store.iteritems():
106 oldvalue = oldmarks.get(mark, node.nullid)
105 oldvalue = oldmarks.get(mark, node.nullid)
107 if value != oldvalue:
106 if value != oldvalue:
108 repo.journal.record(bookmarktype, mark, oldvalue, value)
107 repo.journal.record(bookmarktype, mark, oldvalue, value)
109 return orig(store, fp)
108 return orig(store, fp)
110
109
111 # shared repository support
110 # shared repository support
112 def _readsharedfeatures(repo):
111 def _readsharedfeatures(repo):
113 """A set of shared features for this repository"""
112 """A set of shared features for this repository"""
114 try:
113 try:
115 return set(repo.vfs.read('shared').splitlines())
114 return set(repo.vfs.read('shared').splitlines())
116 except IOError as inst:
115 except IOError as inst:
117 if inst.errno != errno.ENOENT:
116 if inst.errno != errno.ENOENT:
118 raise
117 raise
119 return set()
118 return set()
120
119
121 def _mergeentriesiter(*iterables, **kwargs):
120 def _mergeentriesiter(*iterables, **kwargs):
122 """Given a set of sorted iterables, yield the next entry in merged order
121 """Given a set of sorted iterables, yield the next entry in merged order
123
122
124 Note that by default entries go from most recent to oldest.
123 Note that by default entries go from most recent to oldest.
125 """
124 """
126 order = kwargs.pop('order', max)
125 order = kwargs.pop('order', max)
127 iterables = [iter(it) for it in iterables]
126 iterables = [iter(it) for it in iterables]
128 # this tracks still active iterables; iterables are deleted as they are
127 # this tracks still active iterables; iterables are deleted as they are
129 # exhausted, which is why this is a dictionary and why each entry also
128 # exhausted, which is why this is a dictionary and why each entry also
130 # stores the key. Entries are mutable so we can store the next value each
129 # stores the key. Entries are mutable so we can store the next value each
131 # time.
130 # time.
132 iterable_map = {}
131 iterable_map = {}
133 for key, it in enumerate(iterables):
132 for key, it in enumerate(iterables):
134 try:
133 try:
135 iterable_map[key] = [next(it), key, it]
134 iterable_map[key] = [next(it), key, it]
136 except StopIteration:
135 except StopIteration:
137 # empty entry, can be ignored
136 # empty entry, can be ignored
138 pass
137 pass
139
138
140 while iterable_map:
139 while iterable_map:
141 value, key, it = order(iterable_map.itervalues())
140 value, key, it = order(iterable_map.itervalues())
142 yield value
141 yield value
143 try:
142 try:
144 iterable_map[key][0] = next(it)
143 iterable_map[key][0] = next(it)
145 except StopIteration:
144 except StopIteration:
146 # this iterable is empty, remove it from consideration
145 # this iterable is empty, remove it from consideration
147 del iterable_map[key]
146 del iterable_map[key]
148
147
149 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
148 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
150 """Mark this shared working copy as sharing journal information"""
149 """Mark this shared working copy as sharing journal information"""
151 with destrepo.wlock():
150 with destrepo.wlock():
152 orig(sourcerepo, destrepo, **kwargs)
151 orig(sourcerepo, destrepo, **kwargs)
153 with destrepo.vfs('shared', 'a') as fp:
152 with destrepo.vfs('shared', 'a') as fp:
154 fp.write('journal\n')
153 fp.write('journal\n')
155
154
156 def unsharejournal(orig, ui, repo, repopath):
155 def unsharejournal(orig, ui, repo, repopath):
157 """Copy shared journal entries into this repo when unsharing"""
156 """Copy shared journal entries into this repo when unsharing"""
158 if (repo.path == repopath and repo.shared() and
157 if (repo.path == repopath and repo.shared() and
159 util.safehasattr(repo, 'journal')):
158 util.safehasattr(repo, 'journal')):
160 sharedrepo = share._getsrcrepo(repo)
159 sharedrepo = share._getsrcrepo(repo)
161 sharedfeatures = _readsharedfeatures(repo)
160 sharedfeatures = _readsharedfeatures(repo)
162 if sharedrepo and sharedfeatures > {'journal'}:
161 if sharedrepo and sharedfeatures > {'journal'}:
163 # there is a shared repository and there are shared journal entries
162 # there is a shared repository and there are shared journal entries
164 # to copy. move shared date over from source to destination but
163 # to copy. move shared date over from source to destination but
165 # move the local file first
164 # move the local file first
166 if repo.vfs.exists('namejournal'):
165 if repo.vfs.exists('namejournal'):
167 journalpath = repo.vfs.join('namejournal')
166 journalpath = repo.vfs.join('namejournal')
168 util.rename(journalpath, journalpath + '.bak')
167 util.rename(journalpath, journalpath + '.bak')
169 storage = repo.journal
168 storage = repo.journal
170 local = storage._open(
169 local = storage._open(
171 repo.vfs, filename='namejournal.bak', _newestfirst=False)
170 repo.vfs, filename='namejournal.bak', _newestfirst=False)
172 shared = (
171 shared = (
173 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
172 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
174 if sharednamespaces.get(e.namespace) in sharedfeatures)
173 if sharednamespaces.get(e.namespace) in sharedfeatures)
175 for entry in _mergeentriesiter(local, shared, order=min):
174 for entry in _mergeentriesiter(local, shared, order=min):
176 storage._write(repo.vfs, entry)
175 storage._write(repo.vfs, entry)
177
176
178 return orig(ui, repo, repopath)
177 return orig(ui, repo, repopath)
179
178
180 class journalentry(collections.namedtuple(
179 class journalentry(collections.namedtuple(
181 u'journalentry',
180 u'journalentry',
182 u'timestamp user command namespace name oldhashes newhashes')):
181 u'timestamp user command namespace name oldhashes newhashes')):
183 """Individual journal entry
182 """Individual journal entry
184
183
185 * timestamp: a mercurial (time, timezone) tuple
184 * timestamp: a mercurial (time, timezone) tuple
186 * user: the username that ran the command
185 * user: the username that ran the command
187 * namespace: the entry namespace, an opaque string
186 * namespace: the entry namespace, an opaque string
188 * name: the name of the changed item, opaque string with meaning in the
187 * name: the name of the changed item, opaque string with meaning in the
189 namespace
188 namespace
190 * command: the hg command that triggered this record
189 * command: the hg command that triggered this record
191 * oldhashes: a tuple of one or more binary hashes for the old location
190 * oldhashes: a tuple of one or more binary hashes for the old location
192 * newhashes: a tuple of one or more binary hashes for the new location
191 * newhashes: a tuple of one or more binary hashes for the new location
193
192
194 Handles serialisation from and to the storage format. Fields are
193 Handles serialisation from and to the storage format. Fields are
195 separated by newlines, hashes are written out in hex separated by commas,
194 separated by newlines, hashes are written out in hex separated by commas,
196 timestamp and timezone are separated by a space.
195 timestamp and timezone are separated by a space.
197
196
198 """
197 """
199 @classmethod
198 @classmethod
200 def fromstorage(cls, line):
199 def fromstorage(cls, line):
201 (time, user, command, namespace, name,
200 (time, user, command, namespace, name,
202 oldhashes, newhashes) = line.split('\n')
201 oldhashes, newhashes) = line.split('\n')
203 timestamp, tz = time.split()
202 timestamp, tz = time.split()
204 timestamp, tz = float(timestamp), int(tz)
203 timestamp, tz = float(timestamp), int(tz)
205 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
204 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
206 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
205 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
207 return cls(
206 return cls(
208 (timestamp, tz), user, command, namespace, name,
207 (timestamp, tz), user, command, namespace, name,
209 oldhashes, newhashes)
208 oldhashes, newhashes)
210
209
211 def __str__(self):
210 def __str__(self):
212 """String representation for storage"""
211 """String representation for storage"""
213 time = ' '.join(map(str, self.timestamp))
212 time = ' '.join(map(str, self.timestamp))
214 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
213 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
215 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
214 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
216 return '\n'.join((
215 return '\n'.join((
217 time, self.user, self.command, self.namespace, self.name,
216 time, self.user, self.command, self.namespace, self.name,
218 oldhashes, newhashes))
217 oldhashes, newhashes))
219
218
220 class journalstorage(object):
219 class journalstorage(object):
221 """Storage for journal entries
220 """Storage for journal entries
222
221
223 Entries are divided over two files; one with entries that pertain to the
222 Entries are divided over two files; one with entries that pertain to the
224 local working copy *only*, and one with entries that are shared across
223 local working copy *only*, and one with entries that are shared across
225 multiple working copies when shared using the share extension.
224 multiple working copies when shared using the share extension.
226
225
227 Entries are stored with NUL bytes as separators. See the journalentry
226 Entries are stored with NUL bytes as separators. See the journalentry
228 class for the per-entry structure.
227 class for the per-entry structure.
229
228
230 The file format starts with an integer version, delimited by a NUL.
229 The file format starts with an integer version, delimited by a NUL.
231
230
232 This storage uses a dedicated lock; this makes it easier to avoid issues
231 This storage uses a dedicated lock; this makes it easier to avoid issues
233 with adding entries that added when the regular wlock is unlocked (e.g.
232 with adding entries that added when the regular wlock is unlocked (e.g.
234 the dirstate).
233 the dirstate).
235
234
236 """
235 """
237 _currentcommand = ()
236 _currentcommand = ()
238 _lockref = None
237 _lockref = None
239
238
240 def __init__(self, repo):
239 def __init__(self, repo):
241 self.user = util.getuser()
240 self.user = util.getuser()
242 self.ui = repo.ui
241 self.ui = repo.ui
243 self.vfs = repo.vfs
242 self.vfs = repo.vfs
244
243
245 # is this working copy using a shared storage?
244 # is this working copy using a shared storage?
246 self.sharedfeatures = self.sharedvfs = None
245 self.sharedfeatures = self.sharedvfs = None
247 if repo.shared():
246 if repo.shared():
248 features = _readsharedfeatures(repo)
247 features = _readsharedfeatures(repo)
249 sharedrepo = share._getsrcrepo(repo)
248 sharedrepo = share._getsrcrepo(repo)
250 if sharedrepo is not None and 'journal' in features:
249 if sharedrepo is not None and 'journal' in features:
251 self.sharedvfs = sharedrepo.vfs
250 self.sharedvfs = sharedrepo.vfs
252 self.sharedfeatures = features
251 self.sharedfeatures = features
253
252
254 # track the current command for recording in journal entries
253 # track the current command for recording in journal entries
255 @property
254 @property
256 def command(self):
255 def command(self):
257 commandstr = ' '.join(
256 commandstr = ' '.join(
258 map(util.shellquote, journalstorage._currentcommand))
257 map(util.shellquote, journalstorage._currentcommand))
259 if '\n' in commandstr:
258 if '\n' in commandstr:
260 # truncate multi-line commands
259 # truncate multi-line commands
261 commandstr = commandstr.partition('\n')[0] + ' ...'
260 commandstr = commandstr.partition('\n')[0] + ' ...'
262 return commandstr
261 return commandstr
263
262
264 @classmethod
263 @classmethod
265 def recordcommand(cls, *fullargs):
264 def recordcommand(cls, *fullargs):
266 """Set the current hg arguments, stored with recorded entries"""
265 """Set the current hg arguments, stored with recorded entries"""
267 # Set the current command on the class because we may have started
266 # Set the current command on the class because we may have started
268 # with a non-local repo (cloning for example).
267 # with a non-local repo (cloning for example).
269 cls._currentcommand = fullargs
268 cls._currentcommand = fullargs
270
269
271 def _currentlock(self, lockref):
270 def _currentlock(self, lockref):
272 """Returns the lock if it's held, or None if it's not.
271 """Returns the lock if it's held, or None if it's not.
273
272
274 (This is copied from the localrepo class)
273 (This is copied from the localrepo class)
275 """
274 """
276 if lockref is None:
275 if lockref is None:
277 return None
276 return None
278 l = lockref()
277 l = lockref()
279 if l is None or not l.held:
278 if l is None or not l.held:
280 return None
279 return None
281 return l
280 return l
282
281
283 def jlock(self, vfs):
282 def jlock(self, vfs):
284 """Create a lock for the journal file"""
283 """Create a lock for the journal file"""
285 if self._currentlock(self._lockref) is not None:
284 if self._currentlock(self._lockref) is not None:
286 raise error.Abort(_('journal lock does not support nesting'))
285 raise error.Abort(_('journal lock does not support nesting'))
287 desc = _('journal of %s') % vfs.base
286 desc = _('journal of %s') % vfs.base
288 try:
287 try:
289 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
288 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
290 except error.LockHeld as inst:
289 except error.LockHeld as inst:
291 self.ui.warn(
290 self.ui.warn(
292 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
291 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
293 # default to 600 seconds timeout
292 # default to 600 seconds timeout
294 l = lock.lock(
293 l = lock.lock(
295 vfs, 'namejournal.lock',
294 vfs, 'namejournal.lock',
296 int(self.ui.config("ui", "timeout", "600")), desc=desc)
295 int(self.ui.config("ui", "timeout", "600")), desc=desc)
297 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
296 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
298 self._lockref = weakref.ref(l)
297 self._lockref = weakref.ref(l)
299 return l
298 return l
300
299
301 def record(self, namespace, name, oldhashes, newhashes):
300 def record(self, namespace, name, oldhashes, newhashes):
302 """Record a new journal entry
301 """Record a new journal entry
303
302
304 * namespace: an opaque string; this can be used to filter on the type
303 * namespace: an opaque string; this can be used to filter on the type
305 of recorded entries.
304 of recorded entries.
306 * name: the name defining this entry; for bookmarks, this is the
305 * name: the name defining this entry; for bookmarks, this is the
307 bookmark name. Can be filtered on when retrieving entries.
306 bookmark name. Can be filtered on when retrieving entries.
308 * oldhashes and newhashes: each a single binary hash, or a list of
307 * oldhashes and newhashes: each a single binary hash, or a list of
309 binary hashes. These represent the old and new position of the named
308 binary hashes. These represent the old and new position of the named
310 item.
309 item.
311
310
312 """
311 """
313 if not isinstance(oldhashes, list):
312 if not isinstance(oldhashes, list):
314 oldhashes = [oldhashes]
313 oldhashes = [oldhashes]
315 if not isinstance(newhashes, list):
314 if not isinstance(newhashes, list):
316 newhashes = [newhashes]
315 newhashes = [newhashes]
317
316
318 entry = journalentry(
317 entry = journalentry(
319 util.makedate(), self.user, self.command, namespace, name,
318 util.makedate(), self.user, self.command, namespace, name,
320 oldhashes, newhashes)
319 oldhashes, newhashes)
321
320
322 vfs = self.vfs
321 vfs = self.vfs
323 if self.sharedvfs is not None:
322 if self.sharedvfs is not None:
324 # write to the shared repository if this feature is being
323 # write to the shared repository if this feature is being
325 # shared between working copies.
324 # shared between working copies.
326 if sharednamespaces.get(namespace) in self.sharedfeatures:
325 if sharednamespaces.get(namespace) in self.sharedfeatures:
327 vfs = self.sharedvfs
326 vfs = self.sharedvfs
328
327
329 self._write(vfs, entry)
328 self._write(vfs, entry)
330
329
331 def _write(self, vfs, entry):
330 def _write(self, vfs, entry):
332 with self.jlock(vfs):
331 with self.jlock(vfs):
333 version = None
332 version = None
334 # open file in amend mode to ensure it is created if missing
333 # open file in amend mode to ensure it is created if missing
335 with vfs('namejournal', mode='a+b', atomictemp=True) as f:
334 with vfs('namejournal', mode='a+b', atomictemp=True) as f:
336 f.seek(0, os.SEEK_SET)
335 f.seek(0, os.SEEK_SET)
337 # Read just enough bytes to get a version number (up to 2
336 # Read just enough bytes to get a version number (up to 2
338 # digits plus separator)
337 # digits plus separator)
339 version = f.read(3).partition('\0')[0]
338 version = f.read(3).partition('\0')[0]
340 if version and version != str(storageversion):
339 if version and version != str(storageversion):
341 # different version of the storage. Exit early (and not
340 # different version of the storage. Exit early (and not
342 # write anything) if this is not a version we can handle or
341 # write anything) if this is not a version we can handle or
343 # the file is corrupt. In future, perhaps rotate the file
342 # the file is corrupt. In future, perhaps rotate the file
344 # instead?
343 # instead?
345 self.ui.warn(
344 self.ui.warn(
346 _("unsupported journal file version '%s'\n") % version)
345 _("unsupported journal file version '%s'\n") % version)
347 return
346 return
348 if not version:
347 if not version:
349 # empty file, write version first
348 # empty file, write version first
350 f.write(str(storageversion) + '\0')
349 f.write(str(storageversion) + '\0')
351 f.seek(0, os.SEEK_END)
350 f.seek(0, os.SEEK_END)
352 f.write(str(entry) + '\0')
351 f.write(str(entry) + '\0')
353
352
354 def filtered(self, namespace=None, name=None):
353 def filtered(self, namespace=None, name=None):
355 """Yield all journal entries with the given namespace or name
354 """Yield all journal entries with the given namespace or name
356
355
357 Both the namespace and the name are optional; if neither is given all
356 Both the namespace and the name are optional; if neither is given all
358 entries in the journal are produced.
357 entries in the journal are produced.
359
358
360 Matching supports regular expressions by using the `re:` prefix
359 Matching supports regular expressions by using the `re:` prefix
361 (use `literal:` to match names or namespaces that start with `re:`)
360 (use `literal:` to match names or namespaces that start with `re:`)
362
361
363 """
362 """
364 if namespace is not None:
363 if namespace is not None:
365 namespace = util.stringmatcher(namespace)[-1]
364 namespace = util.stringmatcher(namespace)[-1]
366 if name is not None:
365 if name is not None:
367 name = util.stringmatcher(name)[-1]
366 name = util.stringmatcher(name)[-1]
368 for entry in self:
367 for entry in self:
369 if namespace is not None and not namespace(entry.namespace):
368 if namespace is not None and not namespace(entry.namespace):
370 continue
369 continue
371 if name is not None and not name(entry.name):
370 if name is not None and not name(entry.name):
372 continue
371 continue
373 yield entry
372 yield entry
374
373
375 def __iter__(self):
374 def __iter__(self):
376 """Iterate over the storage
375 """Iterate over the storage
377
376
378 Yields journalentry instances for each contained journal record.
377 Yields journalentry instances for each contained journal record.
379
378
380 """
379 """
381 local = self._open(self.vfs)
380 local = self._open(self.vfs)
382
381
383 if self.sharedvfs is None:
382 if self.sharedvfs is None:
384 return local
383 return local
385
384
386 # iterate over both local and shared entries, but only those
385 # iterate over both local and shared entries, but only those
387 # shared entries that are among the currently shared features
386 # shared entries that are among the currently shared features
388 shared = (
387 shared = (
389 e for e in self._open(self.sharedvfs)
388 e for e in self._open(self.sharedvfs)
390 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
389 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
391 return _mergeentriesiter(local, shared)
390 return _mergeentriesiter(local, shared)
392
391
393 def _open(self, vfs, filename='namejournal', _newestfirst=True):
392 def _open(self, vfs, filename='namejournal', _newestfirst=True):
394 if not vfs.exists(filename):
393 if not vfs.exists(filename):
395 return
394 return
396
395
397 with vfs(filename) as f:
396 with vfs(filename) as f:
398 raw = f.read()
397 raw = f.read()
399
398
400 lines = raw.split('\0')
399 lines = raw.split('\0')
401 version = lines and lines[0]
400 version = lines and lines[0]
402 if version != str(storageversion):
401 if version != str(storageversion):
403 version = version or _('not available')
402 version = version or _('not available')
404 raise error.Abort(_("unknown journal file version '%s'") % version)
403 raise error.Abort(_("unknown journal file version '%s'") % version)
405
404
406 # Skip the first line, it's a version number. Normally we iterate over
405 # Skip the first line, it's a version number. Normally we iterate over
407 # these in reverse order to list newest first; only when copying across
406 # these in reverse order to list newest first; only when copying across
408 # a shared storage do we forgo reversing.
407 # a shared storage do we forgo reversing.
409 lines = lines[1:]
408 lines = lines[1:]
410 if _newestfirst:
409 if _newestfirst:
411 lines = reversed(lines)
410 lines = reversed(lines)
412 for line in lines:
411 for line in lines:
413 if not line:
412 if not line:
414 continue
413 continue
415 yield journalentry.fromstorage(line)
414 yield journalentry.fromstorage(line)
416
415
417 # journal reading
416 # journal reading
418 # log options that don't make sense for journal
417 # log options that don't make sense for journal
419 _ignoreopts = ('no-merges', 'graph')
418 _ignoreopts = ('no-merges', 'graph')
420 @command(
419 @command(
421 'journal', [
420 'journal', [
422 ('', 'all', None, 'show history for all names'),
421 ('', 'all', None, 'show history for all names'),
423 ('c', 'commits', None, 'show commit metadata'),
422 ('c', 'commits', None, 'show commit metadata'),
424 ] + [opt for opt in commands.logopts if opt[1] not in _ignoreopts],
423 ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
425 '[OPTION]... [BOOKMARKNAME]')
424 '[OPTION]... [BOOKMARKNAME]')
426 def journal(ui, repo, *args, **opts):
425 def journal(ui, repo, *args, **opts):
427 """show the previous position of bookmarks and the working copy
426 """show the previous position of bookmarks and the working copy
428
427
429 The journal is used to see the previous commits that bookmarks and the
428 The journal is used to see the previous commits that bookmarks and the
430 working copy pointed to. By default the previous locations for the working
429 working copy pointed to. By default the previous locations for the working
431 copy. Passing a bookmark name will show all the previous positions of
430 copy. Passing a bookmark name will show all the previous positions of
432 that bookmark. Use the --all switch to show previous locations for all
431 that bookmark. Use the --all switch to show previous locations for all
433 bookmarks and the working copy; each line will then include the bookmark
432 bookmarks and the working copy; each line will then include the bookmark
434 name, or '.' for the working copy, as well.
433 name, or '.' for the working copy, as well.
435
434
436 If `name` starts with `re:`, the remainder of the name is treated as
435 If `name` starts with `re:`, the remainder of the name is treated as
437 a regular expression. To match a name that actually starts with `re:`,
436 a regular expression. To match a name that actually starts with `re:`,
438 use the prefix `literal:`.
437 use the prefix `literal:`.
439
438
440 By default hg journal only shows the commit hash and the command that was
439 By default hg journal only shows the commit hash and the command that was
441 running at that time. -v/--verbose will show the prior hash, the user, and
440 running at that time. -v/--verbose will show the prior hash, the user, and
442 the time at which it happened.
441 the time at which it happened.
443
442
444 Use -c/--commits to output log information on each commit hash; at this
443 Use -c/--commits to output log information on each commit hash; at this
445 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
444 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
446 switches to alter the log output for these.
445 switches to alter the log output for these.
447
446
448 `hg journal -T json` can be used to produce machine readable output.
447 `hg journal -T json` can be used to produce machine readable output.
449
448
450 """
449 """
451 name = '.'
450 name = '.'
452 if opts.get('all'):
451 if opts.get('all'):
453 if args:
452 if args:
454 raise error.Abort(
453 raise error.Abort(
455 _("You can't combine --all and filtering on a name"))
454 _("You can't combine --all and filtering on a name"))
456 name = None
455 name = None
457 if args:
456 if args:
458 name = args[0]
457 name = args[0]
459
458
460 fm = ui.formatter('journal', opts)
459 fm = ui.formatter('journal', opts)
461
460
462 if opts.get("template") != "json":
461 if opts.get("template") != "json":
463 if name is None:
462 if name is None:
464 displayname = _('the working copy and bookmarks')
463 displayname = _('the working copy and bookmarks')
465 else:
464 else:
466 displayname = "'%s'" % name
465 displayname = "'%s'" % name
467 ui.status(_("previous locations of %s:\n") % displayname)
466 ui.status(_("previous locations of %s:\n") % displayname)
468
467
469 limit = cmdutil.loglimit(opts)
468 limit = cmdutil.loglimit(opts)
470 entry = None
469 entry = None
471 for count, entry in enumerate(repo.journal.filtered(name=name)):
470 for count, entry in enumerate(repo.journal.filtered(name=name)):
472 if count == limit:
471 if count == limit:
473 break
472 break
474 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
473 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
475 name='node', sep=',')
474 name='node', sep=',')
476 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
475 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
477 name='node', sep=',')
476 name='node', sep=',')
478
477
479 fm.startitem()
478 fm.startitem()
480 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
479 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
481 fm.write('newhashes', '%s', newhashesstr)
480 fm.write('newhashes', '%s', newhashesstr)
482 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
481 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
483 fm.condwrite(
482 fm.condwrite(
484 opts.get('all') or name.startswith('re:'),
483 opts.get('all') or name.startswith('re:'),
485 'name', ' %-8s', entry.name)
484 'name', ' %-8s', entry.name)
486
485
487 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
486 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
488 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
487 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
489 fm.write('command', ' %s\n', entry.command)
488 fm.write('command', ' %s\n', entry.command)
490
489
491 if opts.get("commits"):
490 if opts.get("commits"):
492 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False)
491 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False)
493 for hash in entry.newhashes:
492 for hash in entry.newhashes:
494 try:
493 try:
495 ctx = repo[hash]
494 ctx = repo[hash]
496 displayer.show(ctx)
495 displayer.show(ctx)
497 except error.RepoLookupError as e:
496 except error.RepoLookupError as e:
498 fm.write('repolookuperror', "%s\n\n", str(e))
497 fm.write('repolookuperror', "%s\n\n", str(e))
499 displayer.close()
498 displayer.close()
500
499
501 fm.end()
500 fm.end()
502
501
503 if entry is None:
502 if entry is None:
504 ui.status(_("no recorded locations\n"))
503 ui.status(_("no recorded locations\n"))
@@ -1,760 +1,759 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85
85
86 from __future__ import absolute_import
86 from __future__ import absolute_import
87
87
88 import os
88 import os
89 import re
89 import re
90 import tempfile
90 import tempfile
91
91
92 from mercurial.i18n import _
92 from mercurial.i18n import _
93 from mercurial.hgweb import webcommands
93 from mercurial.hgweb import webcommands
94
94
95 from mercurial import (
95 from mercurial import (
96 cmdutil,
96 cmdutil,
97 commands,
98 context,
97 context,
99 dispatch,
98 dispatch,
100 error,
99 error,
101 extensions,
100 extensions,
102 filelog,
101 filelog,
103 localrepo,
102 localrepo,
104 match,
103 match,
105 patch,
104 patch,
106 pathutil,
105 pathutil,
107 registrar,
106 registrar,
108 scmutil,
107 scmutil,
109 templatefilters,
108 templatefilters,
110 util,
109 util,
111 )
110 )
112
111
113 cmdtable = {}
112 cmdtable = {}
114 command = registrar.command(cmdtable)
113 command = registrar.command(cmdtable)
115 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
114 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
116 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
115 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
117 # be specifying the version(s) of Mercurial they are tested with, or
116 # be specifying the version(s) of Mercurial they are tested with, or
118 # leave the attribute unspecified.
117 # leave the attribute unspecified.
119 testedwith = 'ships-with-hg-core'
118 testedwith = 'ships-with-hg-core'
120
119
121 # hg commands that do not act on keywords
120 # hg commands that do not act on keywords
122 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
121 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
123 ' outgoing push tip verify convert email glog')
122 ' outgoing push tip verify convert email glog')
124
123
125 # hg commands that trigger expansion only when writing to working dir,
124 # hg commands that trigger expansion only when writing to working dir,
126 # not when reading filelog, and unexpand when reading from working dir
125 # not when reading filelog, and unexpand when reading from working dir
127 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
126 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
128 ' unshelve rebase graft backout histedit fetch')
127 ' unshelve rebase graft backout histedit fetch')
129
128
130 # names of extensions using dorecord
129 # names of extensions using dorecord
131 recordextensions = 'record'
130 recordextensions = 'record'
132
131
133 colortable = {
132 colortable = {
134 'kwfiles.enabled': 'green bold',
133 'kwfiles.enabled': 'green bold',
135 'kwfiles.deleted': 'cyan bold underline',
134 'kwfiles.deleted': 'cyan bold underline',
136 'kwfiles.enabledunknown': 'green',
135 'kwfiles.enabledunknown': 'green',
137 'kwfiles.ignored': 'bold',
136 'kwfiles.ignored': 'bold',
138 'kwfiles.ignoredunknown': 'none'
137 'kwfiles.ignoredunknown': 'none'
139 }
138 }
140
139
141 templatefilter = registrar.templatefilter()
140 templatefilter = registrar.templatefilter()
142
141
143 # date like in cvs' $Date
142 # date like in cvs' $Date
144 @templatefilter('utcdate')
143 @templatefilter('utcdate')
145 def utcdate(text):
144 def utcdate(text):
146 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
145 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
147 '''
146 '''
148 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
147 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
149 # date like in svn's $Date
148 # date like in svn's $Date
150 @templatefilter('svnisodate')
149 @templatefilter('svnisodate')
151 def svnisodate(text):
150 def svnisodate(text):
152 '''Date. Returns a date in this format: "2009-08-18 13:00:13
151 '''Date. Returns a date in this format: "2009-08-18 13:00:13
153 +0200 (Tue, 18 Aug 2009)".
152 +0200 (Tue, 18 Aug 2009)".
154 '''
153 '''
155 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
154 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
156 # date like in svn's $Id
155 # date like in svn's $Id
157 @templatefilter('svnutcdate')
156 @templatefilter('svnutcdate')
158 def svnutcdate(text):
157 def svnutcdate(text):
159 '''Date. Returns a UTC-date in this format: "2009-08-18
158 '''Date. Returns a UTC-date in this format: "2009-08-18
160 11:00:13Z".
159 11:00:13Z".
161 '''
160 '''
162 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
161 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
163
162
164 # make keyword tools accessible
163 # make keyword tools accessible
165 kwtools = {'templater': None, 'hgcmd': ''}
164 kwtools = {'templater': None, 'hgcmd': ''}
166
165
167 def _defaultkwmaps(ui):
166 def _defaultkwmaps(ui):
168 '''Returns default keywordmaps according to keywordset configuration.'''
167 '''Returns default keywordmaps according to keywordset configuration.'''
169 templates = {
168 templates = {
170 'Revision': '{node|short}',
169 'Revision': '{node|short}',
171 'Author': '{author|user}',
170 'Author': '{author|user}',
172 }
171 }
173 kwsets = ({
172 kwsets = ({
174 'Date': '{date|utcdate}',
173 'Date': '{date|utcdate}',
175 'RCSfile': '{file|basename},v',
174 'RCSfile': '{file|basename},v',
176 'RCSFile': '{file|basename},v', # kept for backwards compatibility
175 'RCSFile': '{file|basename},v', # kept for backwards compatibility
177 # with hg-keyword
176 # with hg-keyword
178 'Source': '{root}/{file},v',
177 'Source': '{root}/{file},v',
179 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
178 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
180 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
179 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
181 }, {
180 }, {
182 'Date': '{date|svnisodate}',
181 'Date': '{date|svnisodate}',
183 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
182 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
184 'LastChangedRevision': '{node|short}',
183 'LastChangedRevision': '{node|short}',
185 'LastChangedBy': '{author|user}',
184 'LastChangedBy': '{author|user}',
186 'LastChangedDate': '{date|svnisodate}',
185 'LastChangedDate': '{date|svnisodate}',
187 })
186 })
188 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
187 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
189 return templates
188 return templates
190
189
191 def _shrinktext(text, subfunc):
190 def _shrinktext(text, subfunc):
192 '''Helper for keyword expansion removal in text.
191 '''Helper for keyword expansion removal in text.
193 Depending on subfunc also returns number of substitutions.'''
192 Depending on subfunc also returns number of substitutions.'''
194 return subfunc(r'$\1$', text)
193 return subfunc(r'$\1$', text)
195
194
196 def _preselect(wstatus, changed):
195 def _preselect(wstatus, changed):
197 '''Retrieves modified and added files from a working directory state
196 '''Retrieves modified and added files from a working directory state
198 and returns the subset of each contained in given changed files
197 and returns the subset of each contained in given changed files
199 retrieved from a change context.'''
198 retrieved from a change context.'''
200 modified = [f for f in wstatus.modified if f in changed]
199 modified = [f for f in wstatus.modified if f in changed]
201 added = [f for f in wstatus.added if f in changed]
200 added = [f for f in wstatus.added if f in changed]
202 return modified, added
201 return modified, added
203
202
204
203
205 class kwtemplater(object):
204 class kwtemplater(object):
206 '''
205 '''
207 Sets up keyword templates, corresponding keyword regex, and
206 Sets up keyword templates, corresponding keyword regex, and
208 provides keyword substitution functions.
207 provides keyword substitution functions.
209 '''
208 '''
210
209
211 def __init__(self, ui, repo, inc, exc):
210 def __init__(self, ui, repo, inc, exc):
212 self.ui = ui
211 self.ui = ui
213 self.repo = repo
212 self.repo = repo
214 self.match = match.match(repo.root, '', [], inc, exc)
213 self.match = match.match(repo.root, '', [], inc, exc)
215 self.restrict = kwtools['hgcmd'] in restricted.split()
214 self.restrict = kwtools['hgcmd'] in restricted.split()
216 self.postcommit = False
215 self.postcommit = False
217
216
218 kwmaps = self.ui.configitems('keywordmaps')
217 kwmaps = self.ui.configitems('keywordmaps')
219 if kwmaps: # override default templates
218 if kwmaps: # override default templates
220 self.templates = dict(kwmaps)
219 self.templates = dict(kwmaps)
221 else:
220 else:
222 self.templates = _defaultkwmaps(self.ui)
221 self.templates = _defaultkwmaps(self.ui)
223
222
224 @util.propertycache
223 @util.propertycache
225 def escape(self):
224 def escape(self):
226 '''Returns bar-separated and escaped keywords.'''
225 '''Returns bar-separated and escaped keywords.'''
227 return '|'.join(map(re.escape, self.templates.keys()))
226 return '|'.join(map(re.escape, self.templates.keys()))
228
227
229 @util.propertycache
228 @util.propertycache
230 def rekw(self):
229 def rekw(self):
231 '''Returns regex for unexpanded keywords.'''
230 '''Returns regex for unexpanded keywords.'''
232 return re.compile(r'\$(%s)\$' % self.escape)
231 return re.compile(r'\$(%s)\$' % self.escape)
233
232
234 @util.propertycache
233 @util.propertycache
235 def rekwexp(self):
234 def rekwexp(self):
236 '''Returns regex for expanded keywords.'''
235 '''Returns regex for expanded keywords.'''
237 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
236 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
238
237
239 def substitute(self, data, path, ctx, subfunc):
238 def substitute(self, data, path, ctx, subfunc):
240 '''Replaces keywords in data with expanded template.'''
239 '''Replaces keywords in data with expanded template.'''
241 def kwsub(mobj):
240 def kwsub(mobj):
242 kw = mobj.group(1)
241 kw = mobj.group(1)
243 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
242 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
244 self.templates[kw], '', False)
243 self.templates[kw], '', False)
245 self.ui.pushbuffer()
244 self.ui.pushbuffer()
246 ct.show(ctx, root=self.repo.root, file=path)
245 ct.show(ctx, root=self.repo.root, file=path)
247 ekw = templatefilters.firstline(self.ui.popbuffer())
246 ekw = templatefilters.firstline(self.ui.popbuffer())
248 return '$%s: %s $' % (kw, ekw)
247 return '$%s: %s $' % (kw, ekw)
249 return subfunc(kwsub, data)
248 return subfunc(kwsub, data)
250
249
251 def linkctx(self, path, fileid):
250 def linkctx(self, path, fileid):
252 '''Similar to filelog.linkrev, but returns a changectx.'''
251 '''Similar to filelog.linkrev, but returns a changectx.'''
253 return self.repo.filectx(path, fileid=fileid).changectx()
252 return self.repo.filectx(path, fileid=fileid).changectx()
254
253
255 def expand(self, path, node, data):
254 def expand(self, path, node, data):
256 '''Returns data with keywords expanded.'''
255 '''Returns data with keywords expanded.'''
257 if not self.restrict and self.match(path) and not util.binary(data):
256 if not self.restrict and self.match(path) and not util.binary(data):
258 ctx = self.linkctx(path, node)
257 ctx = self.linkctx(path, node)
259 return self.substitute(data, path, ctx, self.rekw.sub)
258 return self.substitute(data, path, ctx, self.rekw.sub)
260 return data
259 return data
261
260
262 def iskwfile(self, cand, ctx):
261 def iskwfile(self, cand, ctx):
263 '''Returns subset of candidates which are configured for keyword
262 '''Returns subset of candidates which are configured for keyword
264 expansion but are not symbolic links.'''
263 expansion but are not symbolic links.'''
265 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
264 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
266
265
267 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
266 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
268 '''Overwrites selected files expanding/shrinking keywords.'''
267 '''Overwrites selected files expanding/shrinking keywords.'''
269 if self.restrict or lookup or self.postcommit: # exclude kw_copy
268 if self.restrict or lookup or self.postcommit: # exclude kw_copy
270 candidates = self.iskwfile(candidates, ctx)
269 candidates = self.iskwfile(candidates, ctx)
271 if not candidates:
270 if not candidates:
272 return
271 return
273 kwcmd = self.restrict and lookup # kwexpand/kwshrink
272 kwcmd = self.restrict and lookup # kwexpand/kwshrink
274 if self.restrict or expand and lookup:
273 if self.restrict or expand and lookup:
275 mf = ctx.manifest()
274 mf = ctx.manifest()
276 if self.restrict or rekw:
275 if self.restrict or rekw:
277 re_kw = self.rekw
276 re_kw = self.rekw
278 else:
277 else:
279 re_kw = self.rekwexp
278 re_kw = self.rekwexp
280 if expand:
279 if expand:
281 msg = _('overwriting %s expanding keywords\n')
280 msg = _('overwriting %s expanding keywords\n')
282 else:
281 else:
283 msg = _('overwriting %s shrinking keywords\n')
282 msg = _('overwriting %s shrinking keywords\n')
284 for f in candidates:
283 for f in candidates:
285 if self.restrict:
284 if self.restrict:
286 data = self.repo.file(f).read(mf[f])
285 data = self.repo.file(f).read(mf[f])
287 else:
286 else:
288 data = self.repo.wread(f)
287 data = self.repo.wread(f)
289 if util.binary(data):
288 if util.binary(data):
290 continue
289 continue
291 if expand:
290 if expand:
292 parents = ctx.parents()
291 parents = ctx.parents()
293 if lookup:
292 if lookup:
294 ctx = self.linkctx(f, mf[f])
293 ctx = self.linkctx(f, mf[f])
295 elif self.restrict and len(parents) > 1:
294 elif self.restrict and len(parents) > 1:
296 # merge commit
295 # merge commit
297 # in case of conflict f is in modified state during
296 # in case of conflict f is in modified state during
298 # merge, even if f does not differ from f in parent
297 # merge, even if f does not differ from f in parent
299 for p in parents:
298 for p in parents:
300 if f in p and not p[f].cmp(ctx[f]):
299 if f in p and not p[f].cmp(ctx[f]):
301 ctx = p[f].changectx()
300 ctx = p[f].changectx()
302 break
301 break
303 data, found = self.substitute(data, f, ctx, re_kw.subn)
302 data, found = self.substitute(data, f, ctx, re_kw.subn)
304 elif self.restrict:
303 elif self.restrict:
305 found = re_kw.search(data)
304 found = re_kw.search(data)
306 else:
305 else:
307 data, found = _shrinktext(data, re_kw.subn)
306 data, found = _shrinktext(data, re_kw.subn)
308 if found:
307 if found:
309 self.ui.note(msg % f)
308 self.ui.note(msg % f)
310 fp = self.repo.wvfs(f, "wb", atomictemp=True)
309 fp = self.repo.wvfs(f, "wb", atomictemp=True)
311 fp.write(data)
310 fp.write(data)
312 fp.close()
311 fp.close()
313 if kwcmd:
312 if kwcmd:
314 self.repo.dirstate.normal(f)
313 self.repo.dirstate.normal(f)
315 elif self.postcommit:
314 elif self.postcommit:
316 self.repo.dirstate.normallookup(f)
315 self.repo.dirstate.normallookup(f)
317
316
318 def shrink(self, fname, text):
317 def shrink(self, fname, text):
319 '''Returns text with all keyword substitutions removed.'''
318 '''Returns text with all keyword substitutions removed.'''
320 if self.match(fname) and not util.binary(text):
319 if self.match(fname) and not util.binary(text):
321 return _shrinktext(text, self.rekwexp.sub)
320 return _shrinktext(text, self.rekwexp.sub)
322 return text
321 return text
323
322
324 def shrinklines(self, fname, lines):
323 def shrinklines(self, fname, lines):
325 '''Returns lines with keyword substitutions removed.'''
324 '''Returns lines with keyword substitutions removed.'''
326 if self.match(fname):
325 if self.match(fname):
327 text = ''.join(lines)
326 text = ''.join(lines)
328 if not util.binary(text):
327 if not util.binary(text):
329 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
328 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
330 return lines
329 return lines
331
330
332 def wread(self, fname, data):
331 def wread(self, fname, data):
333 '''If in restricted mode returns data read from wdir with
332 '''If in restricted mode returns data read from wdir with
334 keyword substitutions removed.'''
333 keyword substitutions removed.'''
335 if self.restrict:
334 if self.restrict:
336 return self.shrink(fname, data)
335 return self.shrink(fname, data)
337 return data
336 return data
338
337
339 class kwfilelog(filelog.filelog):
338 class kwfilelog(filelog.filelog):
340 '''
339 '''
341 Subclass of filelog to hook into its read, add, cmp methods.
340 Subclass of filelog to hook into its read, add, cmp methods.
342 Keywords are "stored" unexpanded, and processed on reading.
341 Keywords are "stored" unexpanded, and processed on reading.
343 '''
342 '''
344 def __init__(self, opener, kwt, path):
343 def __init__(self, opener, kwt, path):
345 super(kwfilelog, self).__init__(opener, path)
344 super(kwfilelog, self).__init__(opener, path)
346 self.kwt = kwt
345 self.kwt = kwt
347 self.path = path
346 self.path = path
348
347
349 def read(self, node):
348 def read(self, node):
350 '''Expands keywords when reading filelog.'''
349 '''Expands keywords when reading filelog.'''
351 data = super(kwfilelog, self).read(node)
350 data = super(kwfilelog, self).read(node)
352 if self.renamed(node):
351 if self.renamed(node):
353 return data
352 return data
354 return self.kwt.expand(self.path, node, data)
353 return self.kwt.expand(self.path, node, data)
355
354
356 def add(self, text, meta, tr, link, p1=None, p2=None):
355 def add(self, text, meta, tr, link, p1=None, p2=None):
357 '''Removes keyword substitutions when adding to filelog.'''
356 '''Removes keyword substitutions when adding to filelog.'''
358 text = self.kwt.shrink(self.path, text)
357 text = self.kwt.shrink(self.path, text)
359 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
358 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
360
359
361 def cmp(self, node, text):
360 def cmp(self, node, text):
362 '''Removes keyword substitutions for comparison.'''
361 '''Removes keyword substitutions for comparison.'''
363 text = self.kwt.shrink(self.path, text)
362 text = self.kwt.shrink(self.path, text)
364 return super(kwfilelog, self).cmp(node, text)
363 return super(kwfilelog, self).cmp(node, text)
365
364
366 def _status(ui, repo, wctx, kwt, *pats, **opts):
365 def _status(ui, repo, wctx, kwt, *pats, **opts):
367 '''Bails out if [keyword] configuration is not active.
366 '''Bails out if [keyword] configuration is not active.
368 Returns status of working directory.'''
367 Returns status of working directory.'''
369 if kwt:
368 if kwt:
370 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
369 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
371 unknown=opts.get('unknown') or opts.get('all'))
370 unknown=opts.get('unknown') or opts.get('all'))
372 if ui.configitems('keyword'):
371 if ui.configitems('keyword'):
373 raise error.Abort(_('[keyword] patterns cannot match'))
372 raise error.Abort(_('[keyword] patterns cannot match'))
374 raise error.Abort(_('no [keyword] patterns configured'))
373 raise error.Abort(_('no [keyword] patterns configured'))
375
374
376 def _kwfwrite(ui, repo, expand, *pats, **opts):
375 def _kwfwrite(ui, repo, expand, *pats, **opts):
377 '''Selects files and passes them to kwtemplater.overwrite.'''
376 '''Selects files and passes them to kwtemplater.overwrite.'''
378 wctx = repo[None]
377 wctx = repo[None]
379 if len(wctx.parents()) > 1:
378 if len(wctx.parents()) > 1:
380 raise error.Abort(_('outstanding uncommitted merge'))
379 raise error.Abort(_('outstanding uncommitted merge'))
381 kwt = kwtools['templater']
380 kwt = kwtools['templater']
382 with repo.wlock():
381 with repo.wlock():
383 status = _status(ui, repo, wctx, kwt, *pats, **opts)
382 status = _status(ui, repo, wctx, kwt, *pats, **opts)
384 if status.modified or status.added or status.removed or status.deleted:
383 if status.modified or status.added or status.removed or status.deleted:
385 raise error.Abort(_('outstanding uncommitted changes'))
384 raise error.Abort(_('outstanding uncommitted changes'))
386 kwt.overwrite(wctx, status.clean, True, expand)
385 kwt.overwrite(wctx, status.clean, True, expand)
387
386
388 @command('kwdemo',
387 @command('kwdemo',
389 [('d', 'default', None, _('show default keyword template maps')),
388 [('d', 'default', None, _('show default keyword template maps')),
390 ('f', 'rcfile', '',
389 ('f', 'rcfile', '',
391 _('read maps from rcfile'), _('FILE'))],
390 _('read maps from rcfile'), _('FILE'))],
392 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
391 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
393 optionalrepo=True)
392 optionalrepo=True)
394 def demo(ui, repo, *args, **opts):
393 def demo(ui, repo, *args, **opts):
395 '''print [keywordmaps] configuration and an expansion example
394 '''print [keywordmaps] configuration and an expansion example
396
395
397 Show current, custom, or default keyword template maps and their
396 Show current, custom, or default keyword template maps and their
398 expansions.
397 expansions.
399
398
400 Extend the current configuration by specifying maps as arguments
399 Extend the current configuration by specifying maps as arguments
401 and using -f/--rcfile to source an external hgrc file.
400 and using -f/--rcfile to source an external hgrc file.
402
401
403 Use -d/--default to disable current configuration.
402 Use -d/--default to disable current configuration.
404
403
405 See :hg:`help templates` for information on templates and filters.
404 See :hg:`help templates` for information on templates and filters.
406 '''
405 '''
407 def demoitems(section, items):
406 def demoitems(section, items):
408 ui.write('[%s]\n' % section)
407 ui.write('[%s]\n' % section)
409 for k, v in sorted(items):
408 for k, v in sorted(items):
410 ui.write('%s = %s\n' % (k, v))
409 ui.write('%s = %s\n' % (k, v))
411
410
412 fn = 'demo.txt'
411 fn = 'demo.txt'
413 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
412 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
414 ui.note(_('creating temporary repository at %s\n') % tmpdir)
413 ui.note(_('creating temporary repository at %s\n') % tmpdir)
415 if repo is None:
414 if repo is None:
416 baseui = ui
415 baseui = ui
417 else:
416 else:
418 baseui = repo.baseui
417 baseui = repo.baseui
419 repo = localrepo.localrepository(baseui, tmpdir, True)
418 repo = localrepo.localrepository(baseui, tmpdir, True)
420 ui.setconfig('keyword', fn, '', 'keyword')
419 ui.setconfig('keyword', fn, '', 'keyword')
421 svn = ui.configbool('keywordset', 'svn')
420 svn = ui.configbool('keywordset', 'svn')
422 # explicitly set keywordset for demo output
421 # explicitly set keywordset for demo output
423 ui.setconfig('keywordset', 'svn', svn, 'keyword')
422 ui.setconfig('keywordset', 'svn', svn, 'keyword')
424
423
425 uikwmaps = ui.configitems('keywordmaps')
424 uikwmaps = ui.configitems('keywordmaps')
426 if args or opts.get('rcfile'):
425 if args or opts.get('rcfile'):
427 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
426 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
428 if uikwmaps:
427 if uikwmaps:
429 ui.status(_('\textending current template maps\n'))
428 ui.status(_('\textending current template maps\n'))
430 if opts.get('default') or not uikwmaps:
429 if opts.get('default') or not uikwmaps:
431 if svn:
430 if svn:
432 ui.status(_('\toverriding default svn keywordset\n'))
431 ui.status(_('\toverriding default svn keywordset\n'))
433 else:
432 else:
434 ui.status(_('\toverriding default cvs keywordset\n'))
433 ui.status(_('\toverriding default cvs keywordset\n'))
435 if opts.get('rcfile'):
434 if opts.get('rcfile'):
436 ui.readconfig(opts.get('rcfile'))
435 ui.readconfig(opts.get('rcfile'))
437 if args:
436 if args:
438 # simulate hgrc parsing
437 # simulate hgrc parsing
439 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
438 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
440 repo.vfs.write('hgrc', rcmaps)
439 repo.vfs.write('hgrc', rcmaps)
441 ui.readconfig(repo.vfs.join('hgrc'))
440 ui.readconfig(repo.vfs.join('hgrc'))
442 kwmaps = dict(ui.configitems('keywordmaps'))
441 kwmaps = dict(ui.configitems('keywordmaps'))
443 elif opts.get('default'):
442 elif opts.get('default'):
444 if svn:
443 if svn:
445 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
444 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
446 else:
445 else:
447 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
446 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
448 kwmaps = _defaultkwmaps(ui)
447 kwmaps = _defaultkwmaps(ui)
449 if uikwmaps:
448 if uikwmaps:
450 ui.status(_('\tdisabling current template maps\n'))
449 ui.status(_('\tdisabling current template maps\n'))
451 for k, v in kwmaps.iteritems():
450 for k, v in kwmaps.iteritems():
452 ui.setconfig('keywordmaps', k, v, 'keyword')
451 ui.setconfig('keywordmaps', k, v, 'keyword')
453 else:
452 else:
454 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
453 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
455 if uikwmaps:
454 if uikwmaps:
456 kwmaps = dict(uikwmaps)
455 kwmaps = dict(uikwmaps)
457 else:
456 else:
458 kwmaps = _defaultkwmaps(ui)
457 kwmaps = _defaultkwmaps(ui)
459
458
460 uisetup(ui)
459 uisetup(ui)
461 reposetup(ui, repo)
460 reposetup(ui, repo)
462 ui.write(('[extensions]\nkeyword =\n'))
461 ui.write(('[extensions]\nkeyword =\n'))
463 demoitems('keyword', ui.configitems('keyword'))
462 demoitems('keyword', ui.configitems('keyword'))
464 demoitems('keywordset', ui.configitems('keywordset'))
463 demoitems('keywordset', ui.configitems('keywordset'))
465 demoitems('keywordmaps', kwmaps.iteritems())
464 demoitems('keywordmaps', kwmaps.iteritems())
466 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
465 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
467 repo.wvfs.write(fn, keywords)
466 repo.wvfs.write(fn, keywords)
468 repo[None].add([fn])
467 repo[None].add([fn])
469 ui.note(_('\nkeywords written to %s:\n') % fn)
468 ui.note(_('\nkeywords written to %s:\n') % fn)
470 ui.note(keywords)
469 ui.note(keywords)
471 with repo.wlock():
470 with repo.wlock():
472 repo.dirstate.setbranch('demobranch')
471 repo.dirstate.setbranch('demobranch')
473 for name, cmd in ui.configitems('hooks'):
472 for name, cmd in ui.configitems('hooks'):
474 if name.split('.', 1)[0].find('commit') > -1:
473 if name.split('.', 1)[0].find('commit') > -1:
475 repo.ui.setconfig('hooks', name, '', 'keyword')
474 repo.ui.setconfig('hooks', name, '', 'keyword')
476 msg = _('hg keyword configuration and expansion example')
475 msg = _('hg keyword configuration and expansion example')
477 ui.note(("hg ci -m '%s'\n" % msg))
476 ui.note(("hg ci -m '%s'\n" % msg))
478 repo.commit(text=msg)
477 repo.commit(text=msg)
479 ui.status(_('\n\tkeywords expanded\n'))
478 ui.status(_('\n\tkeywords expanded\n'))
480 ui.write(repo.wread(fn))
479 ui.write(repo.wread(fn))
481 repo.wvfs.rmtree(repo.root)
480 repo.wvfs.rmtree(repo.root)
482
481
483 @command('kwexpand',
482 @command('kwexpand',
484 commands.walkopts,
483 cmdutil.walkopts,
485 _('hg kwexpand [OPTION]... [FILE]...'),
484 _('hg kwexpand [OPTION]... [FILE]...'),
486 inferrepo=True)
485 inferrepo=True)
487 def expand(ui, repo, *pats, **opts):
486 def expand(ui, repo, *pats, **opts):
488 '''expand keywords in the working directory
487 '''expand keywords in the working directory
489
488
490 Run after (re)enabling keyword expansion.
489 Run after (re)enabling keyword expansion.
491
490
492 kwexpand refuses to run if given files contain local changes.
491 kwexpand refuses to run if given files contain local changes.
493 '''
492 '''
494 # 3rd argument sets expansion to True
493 # 3rd argument sets expansion to True
495 _kwfwrite(ui, repo, True, *pats, **opts)
494 _kwfwrite(ui, repo, True, *pats, **opts)
496
495
497 @command('kwfiles',
496 @command('kwfiles',
498 [('A', 'all', None, _('show keyword status flags of all files')),
497 [('A', 'all', None, _('show keyword status flags of all files')),
499 ('i', 'ignore', None, _('show files excluded from expansion')),
498 ('i', 'ignore', None, _('show files excluded from expansion')),
500 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
499 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
501 ] + commands.walkopts,
500 ] + cmdutil.walkopts,
502 _('hg kwfiles [OPTION]... [FILE]...'),
501 _('hg kwfiles [OPTION]... [FILE]...'),
503 inferrepo=True)
502 inferrepo=True)
504 def files(ui, repo, *pats, **opts):
503 def files(ui, repo, *pats, **opts):
505 '''show files configured for keyword expansion
504 '''show files configured for keyword expansion
506
505
507 List which files in the working directory are matched by the
506 List which files in the working directory are matched by the
508 [keyword] configuration patterns.
507 [keyword] configuration patterns.
509
508
510 Useful to prevent inadvertent keyword expansion and to speed up
509 Useful to prevent inadvertent keyword expansion and to speed up
511 execution by including only files that are actual candidates for
510 execution by including only files that are actual candidates for
512 expansion.
511 expansion.
513
512
514 See :hg:`help keyword` on how to construct patterns both for
513 See :hg:`help keyword` on how to construct patterns both for
515 inclusion and exclusion of files.
514 inclusion and exclusion of files.
516
515
517 With -A/--all and -v/--verbose the codes used to show the status
516 With -A/--all and -v/--verbose the codes used to show the status
518 of files are::
517 of files are::
519
518
520 K = keyword expansion candidate
519 K = keyword expansion candidate
521 k = keyword expansion candidate (not tracked)
520 k = keyword expansion candidate (not tracked)
522 I = ignored
521 I = ignored
523 i = ignored (not tracked)
522 i = ignored (not tracked)
524 '''
523 '''
525 kwt = kwtools['templater']
524 kwt = kwtools['templater']
526 wctx = repo[None]
525 wctx = repo[None]
527 status = _status(ui, repo, wctx, kwt, *pats, **opts)
526 status = _status(ui, repo, wctx, kwt, *pats, **opts)
528 if pats:
527 if pats:
529 cwd = repo.getcwd()
528 cwd = repo.getcwd()
530 else:
529 else:
531 cwd = ''
530 cwd = ''
532 files = []
531 files = []
533 if not opts.get('unknown') or opts.get('all'):
532 if not opts.get('unknown') or opts.get('all'):
534 files = sorted(status.modified + status.added + status.clean)
533 files = sorted(status.modified + status.added + status.clean)
535 kwfiles = kwt.iskwfile(files, wctx)
534 kwfiles = kwt.iskwfile(files, wctx)
536 kwdeleted = kwt.iskwfile(status.deleted, wctx)
535 kwdeleted = kwt.iskwfile(status.deleted, wctx)
537 kwunknown = kwt.iskwfile(status.unknown, wctx)
536 kwunknown = kwt.iskwfile(status.unknown, wctx)
538 if not opts.get('ignore') or opts.get('all'):
537 if not opts.get('ignore') or opts.get('all'):
539 showfiles = kwfiles, kwdeleted, kwunknown
538 showfiles = kwfiles, kwdeleted, kwunknown
540 else:
539 else:
541 showfiles = [], [], []
540 showfiles = [], [], []
542 if opts.get('all') or opts.get('ignore'):
541 if opts.get('all') or opts.get('ignore'):
543 showfiles += ([f for f in files if f not in kwfiles],
542 showfiles += ([f for f in files if f not in kwfiles],
544 [f for f in status.unknown if f not in kwunknown])
543 [f for f in status.unknown if f not in kwunknown])
545 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
544 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
546 kwstates = zip(kwlabels, 'K!kIi', showfiles)
545 kwstates = zip(kwlabels, 'K!kIi', showfiles)
547 fm = ui.formatter('kwfiles', opts)
546 fm = ui.formatter('kwfiles', opts)
548 fmt = '%.0s%s\n'
547 fmt = '%.0s%s\n'
549 if opts.get('all') or ui.verbose:
548 if opts.get('all') or ui.verbose:
550 fmt = '%s %s\n'
549 fmt = '%s %s\n'
551 for kwstate, char, filenames in kwstates:
550 for kwstate, char, filenames in kwstates:
552 label = 'kwfiles.' + kwstate
551 label = 'kwfiles.' + kwstate
553 for f in filenames:
552 for f in filenames:
554 fm.startitem()
553 fm.startitem()
555 fm.write('kwstatus path', fmt, char,
554 fm.write('kwstatus path', fmt, char,
556 repo.pathto(f, cwd), label=label)
555 repo.pathto(f, cwd), label=label)
557 fm.end()
556 fm.end()
558
557
559 @command('kwshrink',
558 @command('kwshrink',
560 commands.walkopts,
559 cmdutil.walkopts,
561 _('hg kwshrink [OPTION]... [FILE]...'),
560 _('hg kwshrink [OPTION]... [FILE]...'),
562 inferrepo=True)
561 inferrepo=True)
563 def shrink(ui, repo, *pats, **opts):
562 def shrink(ui, repo, *pats, **opts):
564 '''revert expanded keywords in the working directory
563 '''revert expanded keywords in the working directory
565
564
566 Must be run before changing/disabling active keywords.
565 Must be run before changing/disabling active keywords.
567
566
568 kwshrink refuses to run if given files contain local changes.
567 kwshrink refuses to run if given files contain local changes.
569 '''
568 '''
570 # 3rd argument sets expansion to False
569 # 3rd argument sets expansion to False
571 _kwfwrite(ui, repo, False, *pats, **opts)
570 _kwfwrite(ui, repo, False, *pats, **opts)
572
571
573
572
574 def uisetup(ui):
573 def uisetup(ui):
575 ''' Monkeypatches dispatch._parse to retrieve user command.'''
574 ''' Monkeypatches dispatch._parse to retrieve user command.'''
576
575
577 def kwdispatch_parse(orig, ui, args):
576 def kwdispatch_parse(orig, ui, args):
578 '''Monkeypatch dispatch._parse to obtain running hg command.'''
577 '''Monkeypatch dispatch._parse to obtain running hg command.'''
579 cmd, func, args, options, cmdoptions = orig(ui, args)
578 cmd, func, args, options, cmdoptions = orig(ui, args)
580 kwtools['hgcmd'] = cmd
579 kwtools['hgcmd'] = cmd
581 return cmd, func, args, options, cmdoptions
580 return cmd, func, args, options, cmdoptions
582
581
583 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
582 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
584
583
585 def reposetup(ui, repo):
584 def reposetup(ui, repo):
586 '''Sets up repo as kwrepo for keyword substitution.
585 '''Sets up repo as kwrepo for keyword substitution.
587 Overrides file method to return kwfilelog instead of filelog
586 Overrides file method to return kwfilelog instead of filelog
588 if file matches user configuration.
587 if file matches user configuration.
589 Wraps commit to overwrite configured files with updated
588 Wraps commit to overwrite configured files with updated
590 keyword substitutions.
589 keyword substitutions.
591 Monkeypatches patch and webcommands.'''
590 Monkeypatches patch and webcommands.'''
592
591
593 try:
592 try:
594 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
593 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
595 or '.hg' in util.splitpath(repo.root)
594 or '.hg' in util.splitpath(repo.root)
596 or repo._url.startswith('bundle:')):
595 or repo._url.startswith('bundle:')):
597 return
596 return
598 except AttributeError:
597 except AttributeError:
599 pass
598 pass
600
599
601 inc, exc = [], ['.hg*']
600 inc, exc = [], ['.hg*']
602 for pat, opt in ui.configitems('keyword'):
601 for pat, opt in ui.configitems('keyword'):
603 if opt != 'ignore':
602 if opt != 'ignore':
604 inc.append(pat)
603 inc.append(pat)
605 else:
604 else:
606 exc.append(pat)
605 exc.append(pat)
607 if not inc:
606 if not inc:
608 return
607 return
609
608
610 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
609 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
611
610
612 class kwrepo(repo.__class__):
611 class kwrepo(repo.__class__):
613 def file(self, f):
612 def file(self, f):
614 if f[0] == '/':
613 if f[0] == '/':
615 f = f[1:]
614 f = f[1:]
616 return kwfilelog(self.svfs, kwt, f)
615 return kwfilelog(self.svfs, kwt, f)
617
616
618 def wread(self, filename):
617 def wread(self, filename):
619 data = super(kwrepo, self).wread(filename)
618 data = super(kwrepo, self).wread(filename)
620 return kwt.wread(filename, data)
619 return kwt.wread(filename, data)
621
620
622 def commit(self, *args, **opts):
621 def commit(self, *args, **opts):
623 # use custom commitctx for user commands
622 # use custom commitctx for user commands
624 # other extensions can still wrap repo.commitctx directly
623 # other extensions can still wrap repo.commitctx directly
625 self.commitctx = self.kwcommitctx
624 self.commitctx = self.kwcommitctx
626 try:
625 try:
627 return super(kwrepo, self).commit(*args, **opts)
626 return super(kwrepo, self).commit(*args, **opts)
628 finally:
627 finally:
629 del self.commitctx
628 del self.commitctx
630
629
631 def kwcommitctx(self, ctx, error=False):
630 def kwcommitctx(self, ctx, error=False):
632 n = super(kwrepo, self).commitctx(ctx, error)
631 n = super(kwrepo, self).commitctx(ctx, error)
633 # no lock needed, only called from repo.commit() which already locks
632 # no lock needed, only called from repo.commit() which already locks
634 if not kwt.postcommit:
633 if not kwt.postcommit:
635 restrict = kwt.restrict
634 restrict = kwt.restrict
636 kwt.restrict = True
635 kwt.restrict = True
637 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
636 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
638 False, True)
637 False, True)
639 kwt.restrict = restrict
638 kwt.restrict = restrict
640 return n
639 return n
641
640
642 def rollback(self, dryrun=False, force=False):
641 def rollback(self, dryrun=False, force=False):
643 wlock = self.wlock()
642 wlock = self.wlock()
644 origrestrict = kwt.restrict
643 origrestrict = kwt.restrict
645 try:
644 try:
646 if not dryrun:
645 if not dryrun:
647 changed = self['.'].files()
646 changed = self['.'].files()
648 ret = super(kwrepo, self).rollback(dryrun, force)
647 ret = super(kwrepo, self).rollback(dryrun, force)
649 if not dryrun:
648 if not dryrun:
650 ctx = self['.']
649 ctx = self['.']
651 modified, added = _preselect(ctx.status(), changed)
650 modified, added = _preselect(ctx.status(), changed)
652 kwt.restrict = False
651 kwt.restrict = False
653 kwt.overwrite(ctx, modified, True, True)
652 kwt.overwrite(ctx, modified, True, True)
654 kwt.overwrite(ctx, added, True, False)
653 kwt.overwrite(ctx, added, True, False)
655 return ret
654 return ret
656 finally:
655 finally:
657 kwt.restrict = origrestrict
656 kwt.restrict = origrestrict
658 wlock.release()
657 wlock.release()
659
658
660 # monkeypatches
659 # monkeypatches
661 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
660 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
662 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
661 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
663 rejects or conflicts due to expanded keywords in working dir.'''
662 rejects or conflicts due to expanded keywords in working dir.'''
664 orig(self, ui, gp, backend, store, eolmode)
663 orig(self, ui, gp, backend, store, eolmode)
665 # shrink keywords read from working dir
664 # shrink keywords read from working dir
666 self.lines = kwt.shrinklines(self.fname, self.lines)
665 self.lines = kwt.shrinklines(self.fname, self.lines)
667
666
668 def kwdiff(orig, *args, **kwargs):
667 def kwdiff(orig, *args, **kwargs):
669 '''Monkeypatch patch.diff to avoid expansion.'''
668 '''Monkeypatch patch.diff to avoid expansion.'''
670 kwt.restrict = True
669 kwt.restrict = True
671 return orig(*args, **kwargs)
670 return orig(*args, **kwargs)
672
671
673 def kwweb_skip(orig, web, req, tmpl):
672 def kwweb_skip(orig, web, req, tmpl):
674 '''Wraps webcommands.x turning off keyword expansion.'''
673 '''Wraps webcommands.x turning off keyword expansion.'''
675 kwt.match = util.never
674 kwt.match = util.never
676 return orig(web, req, tmpl)
675 return orig(web, req, tmpl)
677
676
678 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
677 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
679 '''Wraps cmdutil.amend expanding keywords after amend.'''
678 '''Wraps cmdutil.amend expanding keywords after amend.'''
680 with repo.wlock():
679 with repo.wlock():
681 kwt.postcommit = True
680 kwt.postcommit = True
682 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
681 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
683 if newid != old.node():
682 if newid != old.node():
684 ctx = repo[newid]
683 ctx = repo[newid]
685 kwt.restrict = True
684 kwt.restrict = True
686 kwt.overwrite(ctx, ctx.files(), False, True)
685 kwt.overwrite(ctx, ctx.files(), False, True)
687 kwt.restrict = False
686 kwt.restrict = False
688 return newid
687 return newid
689
688
690 def kw_copy(orig, ui, repo, pats, opts, rename=False):
689 def kw_copy(orig, ui, repo, pats, opts, rename=False):
691 '''Wraps cmdutil.copy so that copy/rename destinations do not
690 '''Wraps cmdutil.copy so that copy/rename destinations do not
692 contain expanded keywords.
691 contain expanded keywords.
693 Note that the source of a regular file destination may also be a
692 Note that the source of a regular file destination may also be a
694 symlink:
693 symlink:
695 hg cp sym x -> x is symlink
694 hg cp sym x -> x is symlink
696 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
695 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
697 For the latter we have to follow the symlink to find out whether its
696 For the latter we have to follow the symlink to find out whether its
698 target is configured for expansion and we therefore must unexpand the
697 target is configured for expansion and we therefore must unexpand the
699 keywords in the destination.'''
698 keywords in the destination.'''
700 with repo.wlock():
699 with repo.wlock():
701 orig(ui, repo, pats, opts, rename)
700 orig(ui, repo, pats, opts, rename)
702 if opts.get('dry_run'):
701 if opts.get('dry_run'):
703 return
702 return
704 wctx = repo[None]
703 wctx = repo[None]
705 cwd = repo.getcwd()
704 cwd = repo.getcwd()
706
705
707 def haskwsource(dest):
706 def haskwsource(dest):
708 '''Returns true if dest is a regular file and configured for
707 '''Returns true if dest is a regular file and configured for
709 expansion or a symlink which points to a file configured for
708 expansion or a symlink which points to a file configured for
710 expansion. '''
709 expansion. '''
711 source = repo.dirstate.copied(dest)
710 source = repo.dirstate.copied(dest)
712 if 'l' in wctx.flags(source):
711 if 'l' in wctx.flags(source):
713 source = pathutil.canonpath(repo.root, cwd,
712 source = pathutil.canonpath(repo.root, cwd,
714 os.path.realpath(source))
713 os.path.realpath(source))
715 return kwt.match(source)
714 return kwt.match(source)
716
715
717 candidates = [f for f in repo.dirstate.copies() if
716 candidates = [f for f in repo.dirstate.copies() if
718 'l' not in wctx.flags(f) and haskwsource(f)]
717 'l' not in wctx.flags(f) and haskwsource(f)]
719 kwt.overwrite(wctx, candidates, False, False)
718 kwt.overwrite(wctx, candidates, False, False)
720
719
721 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
720 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
722 '''Wraps record.dorecord expanding keywords after recording.'''
721 '''Wraps record.dorecord expanding keywords after recording.'''
723 with repo.wlock():
722 with repo.wlock():
724 # record returns 0 even when nothing has changed
723 # record returns 0 even when nothing has changed
725 # therefore compare nodes before and after
724 # therefore compare nodes before and after
726 kwt.postcommit = True
725 kwt.postcommit = True
727 ctx = repo['.']
726 ctx = repo['.']
728 wstatus = ctx.status()
727 wstatus = ctx.status()
729 ret = orig(ui, repo, commitfunc, *pats, **opts)
728 ret = orig(ui, repo, commitfunc, *pats, **opts)
730 recctx = repo['.']
729 recctx = repo['.']
731 if ctx != recctx:
730 if ctx != recctx:
732 modified, added = _preselect(wstatus, recctx.files())
731 modified, added = _preselect(wstatus, recctx.files())
733 kwt.restrict = False
732 kwt.restrict = False
734 kwt.overwrite(recctx, modified, False, True)
733 kwt.overwrite(recctx, modified, False, True)
735 kwt.overwrite(recctx, added, False, True, True)
734 kwt.overwrite(recctx, added, False, True, True)
736 kwt.restrict = True
735 kwt.restrict = True
737 return ret
736 return ret
738
737
739 def kwfilectx_cmp(orig, self, fctx):
738 def kwfilectx_cmp(orig, self, fctx):
740 if fctx._customcmp:
739 if fctx._customcmp:
741 return fctx.cmp(self)
740 return fctx.cmp(self)
742 # keyword affects data size, comparing wdir and filelog size does
741 # keyword affects data size, comparing wdir and filelog size does
743 # not make sense
742 # not make sense
744 if (fctx._filenode is None and
743 if (fctx._filenode is None and
745 (self._repo._encodefilterpats or
744 (self._repo._encodefilterpats or
746 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
745 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
747 self.size() - 4 == fctx.size()) or
746 self.size() - 4 == fctx.size()) or
748 self.size() == fctx.size()):
747 self.size() == fctx.size()):
749 return self._filelog.cmp(self._filenode, fctx.data())
748 return self._filelog.cmp(self._filenode, fctx.data())
750 return True
749 return True
751
750
752 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
751 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
753 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
752 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
754 extensions.wrapfunction(patch, 'diff', kwdiff)
753 extensions.wrapfunction(patch, 'diff', kwdiff)
755 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
754 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
756 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
755 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
757 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
756 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
758 for c in 'annotate changeset rev filediff diff'.split():
757 for c in 'annotate changeset rev filediff diff'.split():
759 extensions.wrapfunction(webcommands, c, kwweb_skip)
758 extensions.wrapfunction(webcommands, c, kwweb_skip)
760 repo.__class__ = kwrepo
759 repo.__class__ = kwrepo
@@ -1,580 +1,579 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import errno
12 import errno
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import shutil
15 import shutil
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18
18
19 from mercurial import (
19 from mercurial import (
20 cmdutil,
20 cmdutil,
21 commands,
22 context,
21 context,
23 error,
22 error,
24 hg,
23 hg,
25 lock,
24 lock,
26 match as matchmod,
25 match as matchmod,
27 node,
26 node,
28 registrar,
27 registrar,
29 scmutil,
28 scmutil,
30 util,
29 util,
31 )
30 )
32
31
33 from ..convert import (
32 from ..convert import (
34 convcmd,
33 convcmd,
35 filemap,
34 filemap,
36 )
35 )
37
36
38 from . import (
37 from . import (
39 lfutil,
38 lfutil,
40 storefactory
39 storefactory
41 )
40 )
42
41
43 release = lock.release
42 release = lock.release
44
43
45 # -- Commands ----------------------------------------------------------
44 # -- Commands ----------------------------------------------------------
46
45
47 cmdtable = {}
46 cmdtable = {}
48 command = registrar.command(cmdtable)
47 command = registrar.command(cmdtable)
49
48
50 @command('lfconvert',
49 @command('lfconvert',
51 [('s', 'size', '',
50 [('s', 'size', '',
52 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
51 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
53 ('', 'to-normal', False,
52 ('', 'to-normal', False,
54 _('convert from a largefiles repo to a normal repo')),
53 _('convert from a largefiles repo to a normal repo')),
55 ],
54 ],
56 _('hg lfconvert SOURCE DEST [FILE ...]'),
55 _('hg lfconvert SOURCE DEST [FILE ...]'),
57 norepo=True,
56 norepo=True,
58 inferrepo=True)
57 inferrepo=True)
59 def lfconvert(ui, src, dest, *pats, **opts):
58 def lfconvert(ui, src, dest, *pats, **opts):
60 '''convert a normal repository to a largefiles repository
59 '''convert a normal repository to a largefiles repository
61
60
62 Convert repository SOURCE to a new repository DEST, identical to
61 Convert repository SOURCE to a new repository DEST, identical to
63 SOURCE except that certain files will be converted as largefiles:
62 SOURCE except that certain files will be converted as largefiles:
64 specifically, any file that matches any PATTERN *or* whose size is
63 specifically, any file that matches any PATTERN *or* whose size is
65 above the minimum size threshold is converted as a largefile. The
64 above the minimum size threshold is converted as a largefile. The
66 size used to determine whether or not to track a file as a
65 size used to determine whether or not to track a file as a
67 largefile is the size of the first version of the file. The
66 largefile is the size of the first version of the file. The
68 minimum size can be specified either with --size or in
67 minimum size can be specified either with --size or in
69 configuration as ``largefiles.size``.
68 configuration as ``largefiles.size``.
70
69
71 After running this command you will need to make sure that
70 After running this command you will need to make sure that
72 largefiles is enabled anywhere you intend to push the new
71 largefiles is enabled anywhere you intend to push the new
73 repository.
72 repository.
74
73
75 Use --to-normal to convert largefiles back to normal files; after
74 Use --to-normal to convert largefiles back to normal files; after
76 this, the DEST repository can be used without largefiles at all.'''
75 this, the DEST repository can be used without largefiles at all.'''
77
76
78 if opts['to_normal']:
77 if opts['to_normal']:
79 tolfile = False
78 tolfile = False
80 else:
79 else:
81 tolfile = True
80 tolfile = True
82 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
81 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
83
82
84 if not hg.islocal(src):
83 if not hg.islocal(src):
85 raise error.Abort(_('%s is not a local Mercurial repo') % src)
84 raise error.Abort(_('%s is not a local Mercurial repo') % src)
86 if not hg.islocal(dest):
85 if not hg.islocal(dest):
87 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
86 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
88
87
89 rsrc = hg.repository(ui, src)
88 rsrc = hg.repository(ui, src)
90 ui.status(_('initializing destination %s\n') % dest)
89 ui.status(_('initializing destination %s\n') % dest)
91 rdst = hg.repository(ui, dest, create=True)
90 rdst = hg.repository(ui, dest, create=True)
92
91
93 success = False
92 success = False
94 dstwlock = dstlock = None
93 dstwlock = dstlock = None
95 try:
94 try:
96 # Get a list of all changesets in the source. The easy way to do this
95 # Get a list of all changesets in the source. The easy way to do this
97 # is to simply walk the changelog, using changelog.nodesbetween().
96 # is to simply walk the changelog, using changelog.nodesbetween().
98 # Take a look at mercurial/revlog.py:639 for more details.
97 # Take a look at mercurial/revlog.py:639 for more details.
99 # Use a generator instead of a list to decrease memory usage
98 # Use a generator instead of a list to decrease memory usage
100 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
99 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
101 rsrc.heads())[0])
100 rsrc.heads())[0])
102 revmap = {node.nullid: node.nullid}
101 revmap = {node.nullid: node.nullid}
103 if tolfile:
102 if tolfile:
104 # Lock destination to prevent modification while it is converted to.
103 # Lock destination to prevent modification while it is converted to.
105 # Don't need to lock src because we are just reading from its
104 # Don't need to lock src because we are just reading from its
106 # history which can't change.
105 # history which can't change.
107 dstwlock = rdst.wlock()
106 dstwlock = rdst.wlock()
108 dstlock = rdst.lock()
107 dstlock = rdst.lock()
109
108
110 lfiles = set()
109 lfiles = set()
111 normalfiles = set()
110 normalfiles = set()
112 if not pats:
111 if not pats:
113 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
112 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
114 if pats:
113 if pats:
115 matcher = matchmod.match(rsrc.root, '', list(pats))
114 matcher = matchmod.match(rsrc.root, '', list(pats))
116 else:
115 else:
117 matcher = None
116 matcher = None
118
117
119 lfiletohash = {}
118 lfiletohash = {}
120 for ctx in ctxs:
119 for ctx in ctxs:
121 ui.progress(_('converting revisions'), ctx.rev(),
120 ui.progress(_('converting revisions'), ctx.rev(),
122 unit=_('revisions'), total=rsrc['tip'].rev())
121 unit=_('revisions'), total=rsrc['tip'].rev())
123 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
122 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
124 lfiles, normalfiles, matcher, size, lfiletohash)
123 lfiles, normalfiles, matcher, size, lfiletohash)
125 ui.progress(_('converting revisions'), None)
124 ui.progress(_('converting revisions'), None)
126
125
127 if rdst.wvfs.exists(lfutil.shortname):
126 if rdst.wvfs.exists(lfutil.shortname):
128 rdst.wvfs.rmtree(lfutil.shortname)
127 rdst.wvfs.rmtree(lfutil.shortname)
129
128
130 for f in lfiletohash.keys():
129 for f in lfiletohash.keys():
131 if rdst.wvfs.isfile(f):
130 if rdst.wvfs.isfile(f):
132 rdst.wvfs.unlink(f)
131 rdst.wvfs.unlink(f)
133 try:
132 try:
134 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
133 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
135 except OSError:
134 except OSError:
136 pass
135 pass
137
136
138 # If there were any files converted to largefiles, add largefiles
137 # If there were any files converted to largefiles, add largefiles
139 # to the destination repository's requirements.
138 # to the destination repository's requirements.
140 if lfiles:
139 if lfiles:
141 rdst.requirements.add('largefiles')
140 rdst.requirements.add('largefiles')
142 rdst._writerequirements()
141 rdst._writerequirements()
143 else:
142 else:
144 class lfsource(filemap.filemap_source):
143 class lfsource(filemap.filemap_source):
145 def __init__(self, ui, source):
144 def __init__(self, ui, source):
146 super(lfsource, self).__init__(ui, source, None)
145 super(lfsource, self).__init__(ui, source, None)
147 self.filemapper.rename[lfutil.shortname] = '.'
146 self.filemapper.rename[lfutil.shortname] = '.'
148
147
149 def getfile(self, name, rev):
148 def getfile(self, name, rev):
150 realname, realrev = rev
149 realname, realrev = rev
151 f = super(lfsource, self).getfile(name, rev)
150 f = super(lfsource, self).getfile(name, rev)
152
151
153 if (not realname.startswith(lfutil.shortnameslash)
152 if (not realname.startswith(lfutil.shortnameslash)
154 or f[0] is None):
153 or f[0] is None):
155 return f
154 return f
156
155
157 # Substitute in the largefile data for the hash
156 # Substitute in the largefile data for the hash
158 hash = f[0].strip()
157 hash = f[0].strip()
159 path = lfutil.findfile(rsrc, hash)
158 path = lfutil.findfile(rsrc, hash)
160
159
161 if path is None:
160 if path is None:
162 raise error.Abort(_("missing largefile for '%s' in %s")
161 raise error.Abort(_("missing largefile for '%s' in %s")
163 % (realname, realrev))
162 % (realname, realrev))
164 return util.readfile(path), f[1]
163 return util.readfile(path), f[1]
165
164
166 class converter(convcmd.converter):
165 class converter(convcmd.converter):
167 def __init__(self, ui, source, dest, revmapfile, opts):
166 def __init__(self, ui, source, dest, revmapfile, opts):
168 src = lfsource(ui, source)
167 src = lfsource(ui, source)
169
168
170 super(converter, self).__init__(ui, src, dest, revmapfile,
169 super(converter, self).__init__(ui, src, dest, revmapfile,
171 opts)
170 opts)
172
171
173 found, missing = downloadlfiles(ui, rsrc)
172 found, missing = downloadlfiles(ui, rsrc)
174 if missing != 0:
173 if missing != 0:
175 raise error.Abort(_("all largefiles must be present locally"))
174 raise error.Abort(_("all largefiles must be present locally"))
176
175
177 orig = convcmd.converter
176 orig = convcmd.converter
178 convcmd.converter = converter
177 convcmd.converter = converter
179
178
180 try:
179 try:
181 convcmd.convert(ui, src, dest)
180 convcmd.convert(ui, src, dest)
182 finally:
181 finally:
183 convcmd.converter = orig
182 convcmd.converter = orig
184 success = True
183 success = True
185 finally:
184 finally:
186 if tolfile:
185 if tolfile:
187 rdst.dirstate.clear()
186 rdst.dirstate.clear()
188 release(dstlock, dstwlock)
187 release(dstlock, dstwlock)
189 if not success:
188 if not success:
190 # we failed, remove the new directory
189 # we failed, remove the new directory
191 shutil.rmtree(rdst.root)
190 shutil.rmtree(rdst.root)
192
191
193 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
192 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
194 matcher, size, lfiletohash):
193 matcher, size, lfiletohash):
195 # Convert src parents to dst parents
194 # Convert src parents to dst parents
196 parents = _convertparents(ctx, revmap)
195 parents = _convertparents(ctx, revmap)
197
196
198 # Generate list of changed files
197 # Generate list of changed files
199 files = _getchangedfiles(ctx, parents)
198 files = _getchangedfiles(ctx, parents)
200
199
201 dstfiles = []
200 dstfiles = []
202 for f in files:
201 for f in files:
203 if f not in lfiles and f not in normalfiles:
202 if f not in lfiles and f not in normalfiles:
204 islfile = _islfile(f, ctx, matcher, size)
203 islfile = _islfile(f, ctx, matcher, size)
205 # If this file was renamed or copied then copy
204 # If this file was renamed or copied then copy
206 # the largefile-ness of its predecessor
205 # the largefile-ness of its predecessor
207 if f in ctx.manifest():
206 if f in ctx.manifest():
208 fctx = ctx.filectx(f)
207 fctx = ctx.filectx(f)
209 renamed = fctx.renamed()
208 renamed = fctx.renamed()
210 renamedlfile = renamed and renamed[0] in lfiles
209 renamedlfile = renamed and renamed[0] in lfiles
211 islfile |= renamedlfile
210 islfile |= renamedlfile
212 if 'l' in fctx.flags():
211 if 'l' in fctx.flags():
213 if renamedlfile:
212 if renamedlfile:
214 raise error.Abort(
213 raise error.Abort(
215 _('renamed/copied largefile %s becomes symlink')
214 _('renamed/copied largefile %s becomes symlink')
216 % f)
215 % f)
217 islfile = False
216 islfile = False
218 if islfile:
217 if islfile:
219 lfiles.add(f)
218 lfiles.add(f)
220 else:
219 else:
221 normalfiles.add(f)
220 normalfiles.add(f)
222
221
223 if f in lfiles:
222 if f in lfiles:
224 fstandin = lfutil.standin(f)
223 fstandin = lfutil.standin(f)
225 dstfiles.append(fstandin)
224 dstfiles.append(fstandin)
226 # largefile in manifest if it has not been removed/renamed
225 # largefile in manifest if it has not been removed/renamed
227 if f in ctx.manifest():
226 if f in ctx.manifest():
228 fctx = ctx.filectx(f)
227 fctx = ctx.filectx(f)
229 if 'l' in fctx.flags():
228 if 'l' in fctx.flags():
230 renamed = fctx.renamed()
229 renamed = fctx.renamed()
231 if renamed and renamed[0] in lfiles:
230 if renamed and renamed[0] in lfiles:
232 raise error.Abort(_('largefile %s becomes symlink') % f)
231 raise error.Abort(_('largefile %s becomes symlink') % f)
233
232
234 # largefile was modified, update standins
233 # largefile was modified, update standins
235 m = hashlib.sha1('')
234 m = hashlib.sha1('')
236 m.update(ctx[f].data())
235 m.update(ctx[f].data())
237 hash = m.hexdigest()
236 hash = m.hexdigest()
238 if f not in lfiletohash or lfiletohash[f] != hash:
237 if f not in lfiletohash or lfiletohash[f] != hash:
239 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
238 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
240 executable = 'x' in ctx[f].flags()
239 executable = 'x' in ctx[f].flags()
241 lfutil.writestandin(rdst, fstandin, hash,
240 lfutil.writestandin(rdst, fstandin, hash,
242 executable)
241 executable)
243 lfiletohash[f] = hash
242 lfiletohash[f] = hash
244 else:
243 else:
245 # normal file
244 # normal file
246 dstfiles.append(f)
245 dstfiles.append(f)
247
246
248 def getfilectx(repo, memctx, f):
247 def getfilectx(repo, memctx, f):
249 srcfname = lfutil.splitstandin(f)
248 srcfname = lfutil.splitstandin(f)
250 if srcfname is not None:
249 if srcfname is not None:
251 # if the file isn't in the manifest then it was removed
250 # if the file isn't in the manifest then it was removed
252 # or renamed, return None to indicate this
251 # or renamed, return None to indicate this
253 try:
252 try:
254 fctx = ctx.filectx(srcfname)
253 fctx = ctx.filectx(srcfname)
255 except error.LookupError:
254 except error.LookupError:
256 return None
255 return None
257 renamed = fctx.renamed()
256 renamed = fctx.renamed()
258 if renamed:
257 if renamed:
259 # standin is always a largefile because largefile-ness
258 # standin is always a largefile because largefile-ness
260 # doesn't change after rename or copy
259 # doesn't change after rename or copy
261 renamed = lfutil.standin(renamed[0])
260 renamed = lfutil.standin(renamed[0])
262
261
263 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
262 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
264 'l' in fctx.flags(), 'x' in fctx.flags(),
263 'l' in fctx.flags(), 'x' in fctx.flags(),
265 renamed)
264 renamed)
266 else:
265 else:
267 return _getnormalcontext(repo, ctx, f, revmap)
266 return _getnormalcontext(repo, ctx, f, revmap)
268
267
269 # Commit
268 # Commit
270 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
269 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
271
270
272 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
271 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
273 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
272 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
274 getfilectx, ctx.user(), ctx.date(), ctx.extra())
273 getfilectx, ctx.user(), ctx.date(), ctx.extra())
275 ret = rdst.commitctx(mctx)
274 ret = rdst.commitctx(mctx)
276 lfutil.copyalltostore(rdst, ret)
275 lfutil.copyalltostore(rdst, ret)
277 rdst.setparents(ret)
276 rdst.setparents(ret)
278 revmap[ctx.node()] = rdst.changelog.tip()
277 revmap[ctx.node()] = rdst.changelog.tip()
279
278
280 # Generate list of changed files
279 # Generate list of changed files
281 def _getchangedfiles(ctx, parents):
280 def _getchangedfiles(ctx, parents):
282 files = set(ctx.files())
281 files = set(ctx.files())
283 if node.nullid not in parents:
282 if node.nullid not in parents:
284 mc = ctx.manifest()
283 mc = ctx.manifest()
285 mp1 = ctx.parents()[0].manifest()
284 mp1 = ctx.parents()[0].manifest()
286 mp2 = ctx.parents()[1].manifest()
285 mp2 = ctx.parents()[1].manifest()
287 files |= (set(mp1) | set(mp2)) - set(mc)
286 files |= (set(mp1) | set(mp2)) - set(mc)
288 for f in mc:
287 for f in mc:
289 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
288 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
290 files.add(f)
289 files.add(f)
291 return files
290 return files
292
291
293 # Convert src parents to dst parents
292 # Convert src parents to dst parents
294 def _convertparents(ctx, revmap):
293 def _convertparents(ctx, revmap):
295 parents = []
294 parents = []
296 for p in ctx.parents():
295 for p in ctx.parents():
297 parents.append(revmap[p.node()])
296 parents.append(revmap[p.node()])
298 while len(parents) < 2:
297 while len(parents) < 2:
299 parents.append(node.nullid)
298 parents.append(node.nullid)
300 return parents
299 return parents
301
300
302 # Get memfilectx for a normal file
301 # Get memfilectx for a normal file
303 def _getnormalcontext(repo, ctx, f, revmap):
302 def _getnormalcontext(repo, ctx, f, revmap):
304 try:
303 try:
305 fctx = ctx.filectx(f)
304 fctx = ctx.filectx(f)
306 except error.LookupError:
305 except error.LookupError:
307 return None
306 return None
308 renamed = fctx.renamed()
307 renamed = fctx.renamed()
309 if renamed:
308 if renamed:
310 renamed = renamed[0]
309 renamed = renamed[0]
311
310
312 data = fctx.data()
311 data = fctx.data()
313 if f == '.hgtags':
312 if f == '.hgtags':
314 data = _converttags (repo.ui, revmap, data)
313 data = _converttags (repo.ui, revmap, data)
315 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
314 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
316 'x' in fctx.flags(), renamed)
315 'x' in fctx.flags(), renamed)
317
316
318 # Remap tag data using a revision map
317 # Remap tag data using a revision map
319 def _converttags(ui, revmap, data):
318 def _converttags(ui, revmap, data):
320 newdata = []
319 newdata = []
321 for line in data.splitlines():
320 for line in data.splitlines():
322 try:
321 try:
323 id, name = line.split(' ', 1)
322 id, name = line.split(' ', 1)
324 except ValueError:
323 except ValueError:
325 ui.warn(_('skipping incorrectly formatted tag %s\n')
324 ui.warn(_('skipping incorrectly formatted tag %s\n')
326 % line)
325 % line)
327 continue
326 continue
328 try:
327 try:
329 newid = node.bin(id)
328 newid = node.bin(id)
330 except TypeError:
329 except TypeError:
331 ui.warn(_('skipping incorrectly formatted id %s\n')
330 ui.warn(_('skipping incorrectly formatted id %s\n')
332 % id)
331 % id)
333 continue
332 continue
334 try:
333 try:
335 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
334 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
336 name))
335 name))
337 except KeyError:
336 except KeyError:
338 ui.warn(_('no mapping for id %s\n') % id)
337 ui.warn(_('no mapping for id %s\n') % id)
339 continue
338 continue
340 return ''.join(newdata)
339 return ''.join(newdata)
341
340
342 def _islfile(file, ctx, matcher, size):
341 def _islfile(file, ctx, matcher, size):
343 '''Return true if file should be considered a largefile, i.e.
342 '''Return true if file should be considered a largefile, i.e.
344 matcher matches it or it is larger than size.'''
343 matcher matches it or it is larger than size.'''
345 # never store special .hg* files as largefiles
344 # never store special .hg* files as largefiles
346 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
345 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
347 return False
346 return False
348 if matcher and matcher(file):
347 if matcher and matcher(file):
349 return True
348 return True
350 try:
349 try:
351 return ctx.filectx(file).size() >= size * 1024 * 1024
350 return ctx.filectx(file).size() >= size * 1024 * 1024
352 except error.LookupError:
351 except error.LookupError:
353 return False
352 return False
354
353
355 def uploadlfiles(ui, rsrc, rdst, files):
354 def uploadlfiles(ui, rsrc, rdst, files):
356 '''upload largefiles to the central store'''
355 '''upload largefiles to the central store'''
357
356
358 if not files:
357 if not files:
359 return
358 return
360
359
361 store = storefactory.openstore(rsrc, rdst, put=True)
360 store = storefactory.openstore(rsrc, rdst, put=True)
362
361
363 at = 0
362 at = 0
364 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
363 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
365 retval = store.exists(files)
364 retval = store.exists(files)
366 files = filter(lambda h: not retval[h], files)
365 files = filter(lambda h: not retval[h], files)
367 ui.debug("%d largefiles need to be uploaded\n" % len(files))
366 ui.debug("%d largefiles need to be uploaded\n" % len(files))
368
367
369 for hash in files:
368 for hash in files:
370 ui.progress(_('uploading largefiles'), at, unit=_('files'),
369 ui.progress(_('uploading largefiles'), at, unit=_('files'),
371 total=len(files))
370 total=len(files))
372 source = lfutil.findfile(rsrc, hash)
371 source = lfutil.findfile(rsrc, hash)
373 if not source:
372 if not source:
374 raise error.Abort(_('largefile %s missing from store'
373 raise error.Abort(_('largefile %s missing from store'
375 ' (needs to be uploaded)') % hash)
374 ' (needs to be uploaded)') % hash)
376 # XXX check for errors here
375 # XXX check for errors here
377 store.put(source, hash)
376 store.put(source, hash)
378 at += 1
377 at += 1
379 ui.progress(_('uploading largefiles'), None)
378 ui.progress(_('uploading largefiles'), None)
380
379
381 def verifylfiles(ui, repo, all=False, contents=False):
380 def verifylfiles(ui, repo, all=False, contents=False):
382 '''Verify that every largefile revision in the current changeset
381 '''Verify that every largefile revision in the current changeset
383 exists in the central store. With --contents, also verify that
382 exists in the central store. With --contents, also verify that
384 the contents of each local largefile file revision are correct (SHA-1 hash
383 the contents of each local largefile file revision are correct (SHA-1 hash
385 matches the revision ID). With --all, check every changeset in
384 matches the revision ID). With --all, check every changeset in
386 this repository.'''
385 this repository.'''
387 if all:
386 if all:
388 revs = repo.revs('all()')
387 revs = repo.revs('all()')
389 else:
388 else:
390 revs = ['.']
389 revs = ['.']
391
390
392 store = storefactory.openstore(repo)
391 store = storefactory.openstore(repo)
393 return store.verify(revs, contents=contents)
392 return store.verify(revs, contents=contents)
394
393
395 def cachelfiles(ui, repo, node, filelist=None):
394 def cachelfiles(ui, repo, node, filelist=None):
396 '''cachelfiles ensures that all largefiles needed by the specified revision
395 '''cachelfiles ensures that all largefiles needed by the specified revision
397 are present in the repository's largefile cache.
396 are present in the repository's largefile cache.
398
397
399 returns a tuple (cached, missing). cached is the list of files downloaded
398 returns a tuple (cached, missing). cached is the list of files downloaded
400 by this operation; missing is the list of files that were needed but could
399 by this operation; missing is the list of files that were needed but could
401 not be found.'''
400 not be found.'''
402 lfiles = lfutil.listlfiles(repo, node)
401 lfiles = lfutil.listlfiles(repo, node)
403 if filelist:
402 if filelist:
404 lfiles = set(lfiles) & set(filelist)
403 lfiles = set(lfiles) & set(filelist)
405 toget = []
404 toget = []
406
405
407 ctx = repo[node]
406 ctx = repo[node]
408 for lfile in lfiles:
407 for lfile in lfiles:
409 try:
408 try:
410 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
409 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
411 except IOError as err:
410 except IOError as err:
412 if err.errno == errno.ENOENT:
411 if err.errno == errno.ENOENT:
413 continue # node must be None and standin wasn't found in wctx
412 continue # node must be None and standin wasn't found in wctx
414 raise
413 raise
415 if not lfutil.findfile(repo, expectedhash):
414 if not lfutil.findfile(repo, expectedhash):
416 toget.append((lfile, expectedhash))
415 toget.append((lfile, expectedhash))
417
416
418 if toget:
417 if toget:
419 store = storefactory.openstore(repo)
418 store = storefactory.openstore(repo)
420 ret = store.get(toget)
419 ret = store.get(toget)
421 return ret
420 return ret
422
421
423 return ([], [])
422 return ([], [])
424
423
425 def downloadlfiles(ui, repo, rev=None):
424 def downloadlfiles(ui, repo, rev=None):
426 matchfn = scmutil.match(repo[None],
425 matchfn = scmutil.match(repo[None],
427 [repo.wjoin(lfutil.shortname)], {})
426 [repo.wjoin(lfutil.shortname)], {})
428 def prepare(ctx, fns):
427 def prepare(ctx, fns):
429 pass
428 pass
430 totalsuccess = 0
429 totalsuccess = 0
431 totalmissing = 0
430 totalmissing = 0
432 if rev != []: # walkchangerevs on empty list would return all revs
431 if rev != []: # walkchangerevs on empty list would return all revs
433 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
432 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
434 prepare):
433 prepare):
435 success, missing = cachelfiles(ui, repo, ctx.node())
434 success, missing = cachelfiles(ui, repo, ctx.node())
436 totalsuccess += len(success)
435 totalsuccess += len(success)
437 totalmissing += len(missing)
436 totalmissing += len(missing)
438 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
437 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
439 if totalmissing > 0:
438 if totalmissing > 0:
440 ui.status(_("%d largefiles failed to download\n") % totalmissing)
439 ui.status(_("%d largefiles failed to download\n") % totalmissing)
441 return totalsuccess, totalmissing
440 return totalsuccess, totalmissing
442
441
443 def updatelfiles(ui, repo, filelist=None, printmessage=None,
442 def updatelfiles(ui, repo, filelist=None, printmessage=None,
444 normallookup=False):
443 normallookup=False):
445 '''Update largefiles according to standins in the working directory
444 '''Update largefiles according to standins in the working directory
446
445
447 If ``printmessage`` is other than ``None``, it means "print (or
446 If ``printmessage`` is other than ``None``, it means "print (or
448 ignore, for false) message forcibly".
447 ignore, for false) message forcibly".
449 '''
448 '''
450 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
449 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
451 with repo.wlock():
450 with repo.wlock():
452 lfdirstate = lfutil.openlfdirstate(ui, repo)
451 lfdirstate = lfutil.openlfdirstate(ui, repo)
453 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
452 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
454
453
455 if filelist is not None:
454 if filelist is not None:
456 filelist = set(filelist)
455 filelist = set(filelist)
457 lfiles = [f for f in lfiles if f in filelist]
456 lfiles = [f for f in lfiles if f in filelist]
458
457
459 update = {}
458 update = {}
460 updated, removed = 0, 0
459 updated, removed = 0, 0
461 wvfs = repo.wvfs
460 wvfs = repo.wvfs
462 wctx = repo[None]
461 wctx = repo[None]
463 for lfile in lfiles:
462 for lfile in lfiles:
464 rellfile = lfile
463 rellfile = lfile
465 rellfileorig = os.path.relpath(
464 rellfileorig = os.path.relpath(
466 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
465 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
467 start=repo.root)
466 start=repo.root)
468 relstandin = lfutil.standin(lfile)
467 relstandin = lfutil.standin(lfile)
469 relstandinorig = os.path.relpath(
468 relstandinorig = os.path.relpath(
470 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
469 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
471 start=repo.root)
470 start=repo.root)
472 if wvfs.exists(relstandin):
471 if wvfs.exists(relstandin):
473 if (wvfs.exists(relstandinorig) and
472 if (wvfs.exists(relstandinorig) and
474 wvfs.exists(rellfile)):
473 wvfs.exists(rellfile)):
475 shutil.copyfile(wvfs.join(rellfile),
474 shutil.copyfile(wvfs.join(rellfile),
476 wvfs.join(rellfileorig))
475 wvfs.join(rellfileorig))
477 wvfs.unlinkpath(relstandinorig)
476 wvfs.unlinkpath(relstandinorig)
478 expecthash = lfutil.readasstandin(wctx[relstandin])
477 expecthash = lfutil.readasstandin(wctx[relstandin])
479 if expecthash != '':
478 if expecthash != '':
480 if lfile not in wctx: # not switched to normal file
479 if lfile not in wctx: # not switched to normal file
481 wvfs.unlinkpath(rellfile, ignoremissing=True)
480 wvfs.unlinkpath(rellfile, ignoremissing=True)
482 # use normallookup() to allocate an entry in largefiles
481 # use normallookup() to allocate an entry in largefiles
483 # dirstate to prevent lfilesrepo.status() from reporting
482 # dirstate to prevent lfilesrepo.status() from reporting
484 # missing files as removed.
483 # missing files as removed.
485 lfdirstate.normallookup(lfile)
484 lfdirstate.normallookup(lfile)
486 update[lfile] = expecthash
485 update[lfile] = expecthash
487 else:
486 else:
488 # Remove lfiles for which the standin is deleted, unless the
487 # Remove lfiles for which the standin is deleted, unless the
489 # lfile is added to the repository again. This happens when a
488 # lfile is added to the repository again. This happens when a
490 # largefile is converted back to a normal file: the standin
489 # largefile is converted back to a normal file: the standin
491 # disappears, but a new (normal) file appears as the lfile.
490 # disappears, but a new (normal) file appears as the lfile.
492 if (wvfs.exists(rellfile) and
491 if (wvfs.exists(rellfile) and
493 repo.dirstate.normalize(lfile) not in wctx):
492 repo.dirstate.normalize(lfile) not in wctx):
494 wvfs.unlinkpath(rellfile)
493 wvfs.unlinkpath(rellfile)
495 removed += 1
494 removed += 1
496
495
497 # largefile processing might be slow and be interrupted - be prepared
496 # largefile processing might be slow and be interrupted - be prepared
498 lfdirstate.write()
497 lfdirstate.write()
499
498
500 if lfiles:
499 if lfiles:
501 statuswriter(_('getting changed largefiles\n'))
500 statuswriter(_('getting changed largefiles\n'))
502 cachelfiles(ui, repo, None, lfiles)
501 cachelfiles(ui, repo, None, lfiles)
503
502
504 for lfile in lfiles:
503 for lfile in lfiles:
505 update1 = 0
504 update1 = 0
506
505
507 expecthash = update.get(lfile)
506 expecthash = update.get(lfile)
508 if expecthash:
507 if expecthash:
509 if not lfutil.copyfromcache(repo, expecthash, lfile):
508 if not lfutil.copyfromcache(repo, expecthash, lfile):
510 # failed ... but already removed and set to normallookup
509 # failed ... but already removed and set to normallookup
511 continue
510 continue
512 # Synchronize largefile dirstate to the last modified
511 # Synchronize largefile dirstate to the last modified
513 # time of the file
512 # time of the file
514 lfdirstate.normal(lfile)
513 lfdirstate.normal(lfile)
515 update1 = 1
514 update1 = 1
516
515
517 # copy the exec mode of largefile standin from the repository's
516 # copy the exec mode of largefile standin from the repository's
518 # dirstate to its state in the lfdirstate.
517 # dirstate to its state in the lfdirstate.
519 rellfile = lfile
518 rellfile = lfile
520 relstandin = lfutil.standin(lfile)
519 relstandin = lfutil.standin(lfile)
521 if wvfs.exists(relstandin):
520 if wvfs.exists(relstandin):
522 # exec is decided by the users permissions using mask 0o100
521 # exec is decided by the users permissions using mask 0o100
523 standinexec = wvfs.stat(relstandin).st_mode & 0o100
522 standinexec = wvfs.stat(relstandin).st_mode & 0o100
524 st = wvfs.stat(rellfile)
523 st = wvfs.stat(rellfile)
525 mode = st.st_mode
524 mode = st.st_mode
526 if standinexec != mode & 0o100:
525 if standinexec != mode & 0o100:
527 # first remove all X bits, then shift all R bits to X
526 # first remove all X bits, then shift all R bits to X
528 mode &= ~0o111
527 mode &= ~0o111
529 if standinexec:
528 if standinexec:
530 mode |= (mode >> 2) & 0o111 & ~util.umask
529 mode |= (mode >> 2) & 0o111 & ~util.umask
531 wvfs.chmod(rellfile, mode)
530 wvfs.chmod(rellfile, mode)
532 update1 = 1
531 update1 = 1
533
532
534 updated += update1
533 updated += update1
535
534
536 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
535 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
537
536
538 lfdirstate.write()
537 lfdirstate.write()
539 if lfiles:
538 if lfiles:
540 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
539 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
541 removed))
540 removed))
542
541
543 @command('lfpull',
542 @command('lfpull',
544 [('r', 'rev', [], _('pull largefiles for these revisions'))
543 [('r', 'rev', [], _('pull largefiles for these revisions'))
545 ] + commands.remoteopts,
544 ] + cmdutil.remoteopts,
546 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
545 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
547 def lfpull(ui, repo, source="default", **opts):
546 def lfpull(ui, repo, source="default", **opts):
548 """pull largefiles for the specified revisions from the specified source
547 """pull largefiles for the specified revisions from the specified source
549
548
550 Pull largefiles that are referenced from local changesets but missing
549 Pull largefiles that are referenced from local changesets but missing
551 locally, pulling from a remote repository to the local cache.
550 locally, pulling from a remote repository to the local cache.
552
551
553 If SOURCE is omitted, the 'default' path will be used.
552 If SOURCE is omitted, the 'default' path will be used.
554 See :hg:`help urls` for more information.
553 See :hg:`help urls` for more information.
555
554
556 .. container:: verbose
555 .. container:: verbose
557
556
558 Some examples:
557 Some examples:
559
558
560 - pull largefiles for all branch heads::
559 - pull largefiles for all branch heads::
561
560
562 hg lfpull -r "head() and not closed()"
561 hg lfpull -r "head() and not closed()"
563
562
564 - pull largefiles on the default branch::
563 - pull largefiles on the default branch::
565
564
566 hg lfpull -r "branch(default)"
565 hg lfpull -r "branch(default)"
567 """
566 """
568 repo.lfpullsource = source
567 repo.lfpullsource = source
569
568
570 revs = opts.get('rev', [])
569 revs = opts.get('rev', [])
571 if not revs:
570 if not revs:
572 raise error.Abort(_('no revisions specified'))
571 raise error.Abort(_('no revisions specified'))
573 revs = scmutil.revrange(repo, revs)
572 revs = scmutil.revrange(repo, revs)
574
573
575 numcached = 0
574 numcached = 0
576 for rev in revs:
575 for rev in revs:
577 ui.note(_('pulling largefiles for revision %s\n') % rev)
576 ui.note(_('pulling largefiles for revision %s\n') % rev)
578 (cached, missing) = cachelfiles(ui, repo, rev)
577 (cached, missing) = cachelfiles(ui, repo, rev)
579 numcached += len(cached)
578 numcached += len(cached)
580 ui.status(_("%d largefiles cached\n") % numcached)
579 ui.status(_("%d largefiles cached\n") % numcached)
@@ -1,3612 +1,3612 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help COMMAND` for more details)::
17 Common tasks (use :hg:`help COMMAND` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import
65 from __future__ import absolute_import
66
66
67 import errno
67 import errno
68 import os
68 import os
69 import re
69 import re
70 import shutil
70 import shutil
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import (
72 from mercurial.node import (
73 bin,
73 bin,
74 hex,
74 hex,
75 nullid,
75 nullid,
76 nullrev,
76 nullrev,
77 short,
77 short,
78 )
78 )
79 from mercurial import (
79 from mercurial import (
80 cmdutil,
80 cmdutil,
81 commands,
81 commands,
82 dirstateguard,
82 dirstateguard,
83 error,
83 error,
84 extensions,
84 extensions,
85 hg,
85 hg,
86 localrepo,
86 localrepo,
87 lock as lockmod,
87 lock as lockmod,
88 patch as patchmod,
88 patch as patchmod,
89 phases,
89 phases,
90 pycompat,
90 pycompat,
91 registrar,
91 registrar,
92 revsetlang,
92 revsetlang,
93 scmutil,
93 scmutil,
94 smartset,
94 smartset,
95 subrepo,
95 subrepo,
96 util,
96 util,
97 vfs as vfsmod,
97 vfs as vfsmod,
98 )
98 )
99
99
100 release = lockmod.release
100 release = lockmod.release
101 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
101 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
102
102
103 cmdtable = {}
103 cmdtable = {}
104 command = registrar.command(cmdtable)
104 command = registrar.command(cmdtable)
105 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
105 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
106 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
106 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
107 # be specifying the version(s) of Mercurial they are tested with, or
107 # be specifying the version(s) of Mercurial they are tested with, or
108 # leave the attribute unspecified.
108 # leave the attribute unspecified.
109 testedwith = 'ships-with-hg-core'
109 testedwith = 'ships-with-hg-core'
110
110
111 # force load strip extension formerly included in mq and import some utility
111 # force load strip extension formerly included in mq and import some utility
112 try:
112 try:
113 stripext = extensions.find('strip')
113 stripext = extensions.find('strip')
114 except KeyError:
114 except KeyError:
115 # note: load is lazy so we could avoid the try-except,
115 # note: load is lazy so we could avoid the try-except,
116 # but I (marmoute) prefer this explicit code.
116 # but I (marmoute) prefer this explicit code.
117 class dummyui(object):
117 class dummyui(object):
118 def debug(self, msg):
118 def debug(self, msg):
119 pass
119 pass
120 stripext = extensions.load(dummyui(), 'strip', '')
120 stripext = extensions.load(dummyui(), 'strip', '')
121
121
122 strip = stripext.strip
122 strip = stripext.strip
123 checksubstate = stripext.checksubstate
123 checksubstate = stripext.checksubstate
124 checklocalchanges = stripext.checklocalchanges
124 checklocalchanges = stripext.checklocalchanges
125
125
126
126
127 # Patch names looks like unix-file names.
127 # Patch names looks like unix-file names.
128 # They must be joinable with queue directory and result in the patch path.
128 # They must be joinable with queue directory and result in the patch path.
129 normname = util.normpath
129 normname = util.normpath
130
130
131 class statusentry(object):
131 class statusentry(object):
132 def __init__(self, node, name):
132 def __init__(self, node, name):
133 self.node, self.name = node, name
133 self.node, self.name = node, name
134 def __repr__(self):
134 def __repr__(self):
135 return hex(self.node) + ':' + self.name
135 return hex(self.node) + ':' + self.name
136
136
137 # The order of the headers in 'hg export' HG patches:
137 # The order of the headers in 'hg export' HG patches:
138 HGHEADERS = [
138 HGHEADERS = [
139 # '# HG changeset patch',
139 # '# HG changeset patch',
140 '# User ',
140 '# User ',
141 '# Date ',
141 '# Date ',
142 '# ',
142 '# ',
143 '# Branch ',
143 '# Branch ',
144 '# Node ID ',
144 '# Node ID ',
145 '# Parent ', # can occur twice for merges - but that is not relevant for mq
145 '# Parent ', # can occur twice for merges - but that is not relevant for mq
146 ]
146 ]
147 # The order of headers in plain 'mail style' patches:
147 # The order of headers in plain 'mail style' patches:
148 PLAINHEADERS = {
148 PLAINHEADERS = {
149 'from': 0,
149 'from': 0,
150 'date': 1,
150 'date': 1,
151 'subject': 2,
151 'subject': 2,
152 }
152 }
153
153
154 def inserthgheader(lines, header, value):
154 def inserthgheader(lines, header, value):
155 """Assuming lines contains a HG patch header, add a header line with value.
155 """Assuming lines contains a HG patch header, add a header line with value.
156 >>> try: inserthgheader([], '# Date ', 'z')
156 >>> try: inserthgheader([], '# Date ', 'z')
157 ... except ValueError, inst: print "oops"
157 ... except ValueError, inst: print "oops"
158 oops
158 oops
159 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
159 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
160 ['# HG changeset patch', '# Date z']
160 ['# HG changeset patch', '# Date z']
161 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
161 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
162 ['# HG changeset patch', '# Date z', '']
162 ['# HG changeset patch', '# Date z', '']
163 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
163 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
164 ['# HG changeset patch', '# User y', '# Date z']
164 ['# HG changeset patch', '# User y', '# Date z']
165 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
165 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
166 ... '# User ', 'z')
166 ... '# User ', 'z')
167 ['# HG changeset patch', '# Date x', '# User z']
167 ['# HG changeset patch', '# Date x', '# User z']
168 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
168 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
169 ['# HG changeset patch', '# Date z']
169 ['# HG changeset patch', '# Date z']
170 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
170 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
171 ['# HG changeset patch', '# Date z', '', '# Date y']
171 ['# HG changeset patch', '# Date z', '', '# Date y']
172 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
172 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
173 ['# HG changeset patch', '# Date z', '# Parent y']
173 ['# HG changeset patch', '# Date z', '# Parent y']
174 """
174 """
175 start = lines.index('# HG changeset patch') + 1
175 start = lines.index('# HG changeset patch') + 1
176 newindex = HGHEADERS.index(header)
176 newindex = HGHEADERS.index(header)
177 bestpos = len(lines)
177 bestpos = len(lines)
178 for i in range(start, len(lines)):
178 for i in range(start, len(lines)):
179 line = lines[i]
179 line = lines[i]
180 if not line.startswith('# '):
180 if not line.startswith('# '):
181 bestpos = min(bestpos, i)
181 bestpos = min(bestpos, i)
182 break
182 break
183 for lineindex, h in enumerate(HGHEADERS):
183 for lineindex, h in enumerate(HGHEADERS):
184 if line.startswith(h):
184 if line.startswith(h):
185 if lineindex == newindex:
185 if lineindex == newindex:
186 lines[i] = header + value
186 lines[i] = header + value
187 return lines
187 return lines
188 if lineindex > newindex:
188 if lineindex > newindex:
189 bestpos = min(bestpos, i)
189 bestpos = min(bestpos, i)
190 break # next line
190 break # next line
191 lines.insert(bestpos, header + value)
191 lines.insert(bestpos, header + value)
192 return lines
192 return lines
193
193
194 def insertplainheader(lines, header, value):
194 def insertplainheader(lines, header, value):
195 """For lines containing a plain patch header, add a header line with value.
195 """For lines containing a plain patch header, add a header line with value.
196 >>> insertplainheader([], 'Date', 'z')
196 >>> insertplainheader([], 'Date', 'z')
197 ['Date: z']
197 ['Date: z']
198 >>> insertplainheader([''], 'Date', 'z')
198 >>> insertplainheader([''], 'Date', 'z')
199 ['Date: z', '']
199 ['Date: z', '']
200 >>> insertplainheader(['x'], 'Date', 'z')
200 >>> insertplainheader(['x'], 'Date', 'z')
201 ['Date: z', '', 'x']
201 ['Date: z', '', 'x']
202 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
202 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
203 ['From: y', 'Date: z', '', 'x']
203 ['From: y', 'Date: z', '', 'x']
204 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
204 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
205 [' date : x', 'From: z', '']
205 [' date : x', 'From: z', '']
206 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
206 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
207 ['Date: z', '', 'Date: y']
207 ['Date: z', '', 'Date: y']
208 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
208 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
209 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
209 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
210 """
210 """
211 newprio = PLAINHEADERS[header.lower()]
211 newprio = PLAINHEADERS[header.lower()]
212 bestpos = len(lines)
212 bestpos = len(lines)
213 for i, line in enumerate(lines):
213 for i, line in enumerate(lines):
214 if ':' in line:
214 if ':' in line:
215 lheader = line.split(':', 1)[0].strip().lower()
215 lheader = line.split(':', 1)[0].strip().lower()
216 lprio = PLAINHEADERS.get(lheader, newprio + 1)
216 lprio = PLAINHEADERS.get(lheader, newprio + 1)
217 if lprio == newprio:
217 if lprio == newprio:
218 lines[i] = '%s: %s' % (header, value)
218 lines[i] = '%s: %s' % (header, value)
219 return lines
219 return lines
220 if lprio > newprio and i < bestpos:
220 if lprio > newprio and i < bestpos:
221 bestpos = i
221 bestpos = i
222 else:
222 else:
223 if line:
223 if line:
224 lines.insert(i, '')
224 lines.insert(i, '')
225 if i < bestpos:
225 if i < bestpos:
226 bestpos = i
226 bestpos = i
227 break
227 break
228 lines.insert(bestpos, '%s: %s' % (header, value))
228 lines.insert(bestpos, '%s: %s' % (header, value))
229 return lines
229 return lines
230
230
231 class patchheader(object):
231 class patchheader(object):
232 def __init__(self, pf, plainmode=False):
232 def __init__(self, pf, plainmode=False):
233 def eatdiff(lines):
233 def eatdiff(lines):
234 while lines:
234 while lines:
235 l = lines[-1]
235 l = lines[-1]
236 if (l.startswith("diff -") or
236 if (l.startswith("diff -") or
237 l.startswith("Index:") or
237 l.startswith("Index:") or
238 l.startswith("===========")):
238 l.startswith("===========")):
239 del lines[-1]
239 del lines[-1]
240 else:
240 else:
241 break
241 break
242 def eatempty(lines):
242 def eatempty(lines):
243 while lines:
243 while lines:
244 if not lines[-1].strip():
244 if not lines[-1].strip():
245 del lines[-1]
245 del lines[-1]
246 else:
246 else:
247 break
247 break
248
248
249 message = []
249 message = []
250 comments = []
250 comments = []
251 user = None
251 user = None
252 date = None
252 date = None
253 parent = None
253 parent = None
254 format = None
254 format = None
255 subject = None
255 subject = None
256 branch = None
256 branch = None
257 nodeid = None
257 nodeid = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if (line.startswith('diff --git')
262 if (line.startswith('diff --git')
263 or (diffstart and line.startswith('+++ '))):
263 or (diffstart and line.startswith('+++ '))):
264 diffstart = 2
264 diffstart = 2
265 break
265 break
266 diffstart = 0 # reset
266 diffstart = 0 # reset
267 if line.startswith("--- "):
267 if line.startswith("--- "):
268 diffstart = 1
268 diffstart = 1
269 continue
269 continue
270 elif format == "hgpatch":
270 elif format == "hgpatch":
271 # parse values when importing the result of an hg export
271 # parse values when importing the result of an hg export
272 if line.startswith("# User "):
272 if line.startswith("# User "):
273 user = line[7:]
273 user = line[7:]
274 elif line.startswith("# Date "):
274 elif line.startswith("# Date "):
275 date = line[7:]
275 date = line[7:]
276 elif line.startswith("# Parent "):
276 elif line.startswith("# Parent "):
277 parent = line[9:].lstrip() # handle double trailing space
277 parent = line[9:].lstrip() # handle double trailing space
278 elif line.startswith("# Branch "):
278 elif line.startswith("# Branch "):
279 branch = line[9:]
279 branch = line[9:]
280 elif line.startswith("# Node ID "):
280 elif line.startswith("# Node ID "):
281 nodeid = line[10:]
281 nodeid = line[10:]
282 elif not line.startswith("# ") and line:
282 elif not line.startswith("# ") and line:
283 message.append(line)
283 message.append(line)
284 format = None
284 format = None
285 elif line == '# HG changeset patch':
285 elif line == '# HG changeset patch':
286 message = []
286 message = []
287 format = "hgpatch"
287 format = "hgpatch"
288 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 elif (format != "tagdone" and (line.startswith("Subject: ") or
289 line.startswith("subject: "))):
289 line.startswith("subject: "))):
290 subject = line[9:]
290 subject = line[9:]
291 format = "tag"
291 format = "tag"
292 elif (format != "tagdone" and (line.startswith("From: ") or
292 elif (format != "tagdone" and (line.startswith("From: ") or
293 line.startswith("from: "))):
293 line.startswith("from: "))):
294 user = line[6:]
294 user = line[6:]
295 format = "tag"
295 format = "tag"
296 elif (format != "tagdone" and (line.startswith("Date: ") or
296 elif (format != "tagdone" and (line.startswith("Date: ") or
297 line.startswith("date: "))):
297 line.startswith("date: "))):
298 date = line[6:]
298 date = line[6:]
299 format = "tag"
299 format = "tag"
300 elif format == "tag" and line == "":
300 elif format == "tag" and line == "":
301 # when looking for tags (subject: from: etc) they
301 # when looking for tags (subject: from: etc) they
302 # end once you find a blank line in the source
302 # end once you find a blank line in the source
303 format = "tagdone"
303 format = "tagdone"
304 elif message or line:
304 elif message or line:
305 message.append(line)
305 message.append(line)
306 comments.append(line)
306 comments.append(line)
307
307
308 eatdiff(message)
308 eatdiff(message)
309 eatdiff(comments)
309 eatdiff(comments)
310 # Remember the exact starting line of the patch diffs before consuming
310 # Remember the exact starting line of the patch diffs before consuming
311 # empty lines, for external use by TortoiseHg and others
311 # empty lines, for external use by TortoiseHg and others
312 self.diffstartline = len(comments)
312 self.diffstartline = len(comments)
313 eatempty(message)
313 eatempty(message)
314 eatempty(comments)
314 eatempty(comments)
315
315
316 # make sure message isn't empty
316 # make sure message isn't empty
317 if format and format.startswith("tag") and subject:
317 if format and format.startswith("tag") and subject:
318 message.insert(0, subject)
318 message.insert(0, subject)
319
319
320 self.message = message
320 self.message = message
321 self.comments = comments
321 self.comments = comments
322 self.user = user
322 self.user = user
323 self.date = date
323 self.date = date
324 self.parent = parent
324 self.parent = parent
325 # nodeid and branch are for external use by TortoiseHg and others
325 # nodeid and branch are for external use by TortoiseHg and others
326 self.nodeid = nodeid
326 self.nodeid = nodeid
327 self.branch = branch
327 self.branch = branch
328 self.haspatch = diffstart > 1
328 self.haspatch = diffstart > 1
329 self.plainmode = (plainmode or
329 self.plainmode = (plainmode or
330 '# HG changeset patch' not in self.comments and
330 '# HG changeset patch' not in self.comments and
331 any(c.startswith('Date: ') or
331 any(c.startswith('Date: ') or
332 c.startswith('From: ')
332 c.startswith('From: ')
333 for c in self.comments))
333 for c in self.comments))
334
334
335 def setuser(self, user):
335 def setuser(self, user):
336 try:
336 try:
337 inserthgheader(self.comments, '# User ', user)
337 inserthgheader(self.comments, '# User ', user)
338 except ValueError:
338 except ValueError:
339 if self.plainmode:
339 if self.plainmode:
340 insertplainheader(self.comments, 'From', user)
340 insertplainheader(self.comments, 'From', user)
341 else:
341 else:
342 tmp = ['# HG changeset patch', '# User ' + user]
342 tmp = ['# HG changeset patch', '# User ' + user]
343 self.comments = tmp + self.comments
343 self.comments = tmp + self.comments
344 self.user = user
344 self.user = user
345
345
346 def setdate(self, date):
346 def setdate(self, date):
347 try:
347 try:
348 inserthgheader(self.comments, '# Date ', date)
348 inserthgheader(self.comments, '# Date ', date)
349 except ValueError:
349 except ValueError:
350 if self.plainmode:
350 if self.plainmode:
351 insertplainheader(self.comments, 'Date', date)
351 insertplainheader(self.comments, 'Date', date)
352 else:
352 else:
353 tmp = ['# HG changeset patch', '# Date ' + date]
353 tmp = ['# HG changeset patch', '# Date ' + date]
354 self.comments = tmp + self.comments
354 self.comments = tmp + self.comments
355 self.date = date
355 self.date = date
356
356
357 def setparent(self, parent):
357 def setparent(self, parent):
358 try:
358 try:
359 inserthgheader(self.comments, '# Parent ', parent)
359 inserthgheader(self.comments, '# Parent ', parent)
360 except ValueError:
360 except ValueError:
361 if not self.plainmode:
361 if not self.plainmode:
362 tmp = ['# HG changeset patch', '# Parent ' + parent]
362 tmp = ['# HG changeset patch', '# Parent ' + parent]
363 self.comments = tmp + self.comments
363 self.comments = tmp + self.comments
364 self.parent = parent
364 self.parent = parent
365
365
366 def setmessage(self, message):
366 def setmessage(self, message):
367 if self.comments:
367 if self.comments:
368 self._delmsg()
368 self._delmsg()
369 self.message = [message]
369 self.message = [message]
370 if message:
370 if message:
371 if self.plainmode and self.comments and self.comments[-1]:
371 if self.plainmode and self.comments and self.comments[-1]:
372 self.comments.append('')
372 self.comments.append('')
373 self.comments.append(message)
373 self.comments.append(message)
374
374
375 def __str__(self):
375 def __str__(self):
376 s = '\n'.join(self.comments).rstrip()
376 s = '\n'.join(self.comments).rstrip()
377 if not s:
377 if not s:
378 return ''
378 return ''
379 return s + '\n\n'
379 return s + '\n\n'
380
380
381 def _delmsg(self):
381 def _delmsg(self):
382 '''Remove existing message, keeping the rest of the comments fields.
382 '''Remove existing message, keeping the rest of the comments fields.
383 If comments contains 'subject: ', message will prepend
383 If comments contains 'subject: ', message will prepend
384 the field and a blank line.'''
384 the field and a blank line.'''
385 if self.message:
385 if self.message:
386 subj = 'subject: ' + self.message[0].lower()
386 subj = 'subject: ' + self.message[0].lower()
387 for i in xrange(len(self.comments)):
387 for i in xrange(len(self.comments)):
388 if subj == self.comments[i].lower():
388 if subj == self.comments[i].lower():
389 del self.comments[i]
389 del self.comments[i]
390 self.message = self.message[2:]
390 self.message = self.message[2:]
391 break
391 break
392 ci = 0
392 ci = 0
393 for mi in self.message:
393 for mi in self.message:
394 while mi != self.comments[ci]:
394 while mi != self.comments[ci]:
395 ci += 1
395 ci += 1
396 del self.comments[ci]
396 del self.comments[ci]
397
397
398 def newcommit(repo, phase, *args, **kwargs):
398 def newcommit(repo, phase, *args, **kwargs):
399 """helper dedicated to ensure a commit respect mq.secret setting
399 """helper dedicated to ensure a commit respect mq.secret setting
400
400
401 It should be used instead of repo.commit inside the mq source for operation
401 It should be used instead of repo.commit inside the mq source for operation
402 creating new changeset.
402 creating new changeset.
403 """
403 """
404 repo = repo.unfiltered()
404 repo = repo.unfiltered()
405 if phase is None:
405 if phase is None:
406 if repo.ui.configbool('mq', 'secret', False):
406 if repo.ui.configbool('mq', 'secret', False):
407 phase = phases.secret
407 phase = phases.secret
408 overrides = {('ui', 'allowemptycommit'): True}
408 overrides = {('ui', 'allowemptycommit'): True}
409 if phase is not None:
409 if phase is not None:
410 overrides[('phases', 'new-commit')] = phase
410 overrides[('phases', 'new-commit')] = phase
411 with repo.ui.configoverride(overrides, 'mq'):
411 with repo.ui.configoverride(overrides, 'mq'):
412 repo.ui.setconfig('ui', 'allowemptycommit', True)
412 repo.ui.setconfig('ui', 'allowemptycommit', True)
413 return repo.commit(*args, **kwargs)
413 return repo.commit(*args, **kwargs)
414
414
415 class AbortNoCleanup(error.Abort):
415 class AbortNoCleanup(error.Abort):
416 pass
416 pass
417
417
418 class queue(object):
418 class queue(object):
419 def __init__(self, ui, baseui, path, patchdir=None):
419 def __init__(self, ui, baseui, path, patchdir=None):
420 self.basepath = path
420 self.basepath = path
421 try:
421 try:
422 fh = open(os.path.join(path, 'patches.queue'))
422 fh = open(os.path.join(path, 'patches.queue'))
423 cur = fh.read().rstrip()
423 cur = fh.read().rstrip()
424 fh.close()
424 fh.close()
425 if not cur:
425 if not cur:
426 curpath = os.path.join(path, 'patches')
426 curpath = os.path.join(path, 'patches')
427 else:
427 else:
428 curpath = os.path.join(path, 'patches-' + cur)
428 curpath = os.path.join(path, 'patches-' + cur)
429 except IOError:
429 except IOError:
430 curpath = os.path.join(path, 'patches')
430 curpath = os.path.join(path, 'patches')
431 self.path = patchdir or curpath
431 self.path = patchdir or curpath
432 self.opener = vfsmod.vfs(self.path)
432 self.opener = vfsmod.vfs(self.path)
433 self.ui = ui
433 self.ui = ui
434 self.baseui = baseui
434 self.baseui = baseui
435 self.applieddirty = False
435 self.applieddirty = False
436 self.seriesdirty = False
436 self.seriesdirty = False
437 self.added = []
437 self.added = []
438 self.seriespath = "series"
438 self.seriespath = "series"
439 self.statuspath = "status"
439 self.statuspath = "status"
440 self.guardspath = "guards"
440 self.guardspath = "guards"
441 self.activeguards = None
441 self.activeguards = None
442 self.guardsdirty = False
442 self.guardsdirty = False
443 # Handle mq.git as a bool with extended values
443 # Handle mq.git as a bool with extended values
444 try:
444 try:
445 gitmode = ui.configbool('mq', 'git', None)
445 gitmode = ui.configbool('mq', 'git', None)
446 if gitmode is None:
446 if gitmode is None:
447 raise error.ConfigError
447 raise error.ConfigError
448 if gitmode:
448 if gitmode:
449 self.gitmode = 'yes'
449 self.gitmode = 'yes'
450 else:
450 else:
451 self.gitmode = 'no'
451 self.gitmode = 'no'
452 except error.ConfigError:
452 except error.ConfigError:
453 # let's have check-config ignore the type mismatch
453 # let's have check-config ignore the type mismatch
454 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
454 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
455 # deprecated config: mq.plain
455 # deprecated config: mq.plain
456 self.plainmode = ui.configbool('mq', 'plain', False)
456 self.plainmode = ui.configbool('mq', 'plain', False)
457 self.checkapplied = True
457 self.checkapplied = True
458
458
459 @util.propertycache
459 @util.propertycache
460 def applied(self):
460 def applied(self):
461 def parselines(lines):
461 def parselines(lines):
462 for l in lines:
462 for l in lines:
463 entry = l.split(':', 1)
463 entry = l.split(':', 1)
464 if len(entry) > 1:
464 if len(entry) > 1:
465 n, name = entry
465 n, name = entry
466 yield statusentry(bin(n), name)
466 yield statusentry(bin(n), name)
467 elif l.strip():
467 elif l.strip():
468 self.ui.warn(_('malformated mq status line: %s\n') % entry)
468 self.ui.warn(_('malformated mq status line: %s\n') % entry)
469 # else we ignore empty lines
469 # else we ignore empty lines
470 try:
470 try:
471 lines = self.opener.read(self.statuspath).splitlines()
471 lines = self.opener.read(self.statuspath).splitlines()
472 return list(parselines(lines))
472 return list(parselines(lines))
473 except IOError as e:
473 except IOError as e:
474 if e.errno == errno.ENOENT:
474 if e.errno == errno.ENOENT:
475 return []
475 return []
476 raise
476 raise
477
477
478 @util.propertycache
478 @util.propertycache
479 def fullseries(self):
479 def fullseries(self):
480 try:
480 try:
481 return self.opener.read(self.seriespath).splitlines()
481 return self.opener.read(self.seriespath).splitlines()
482 except IOError as e:
482 except IOError as e:
483 if e.errno == errno.ENOENT:
483 if e.errno == errno.ENOENT:
484 return []
484 return []
485 raise
485 raise
486
486
487 @util.propertycache
487 @util.propertycache
488 def series(self):
488 def series(self):
489 self.parseseries()
489 self.parseseries()
490 return self.series
490 return self.series
491
491
492 @util.propertycache
492 @util.propertycache
493 def seriesguards(self):
493 def seriesguards(self):
494 self.parseseries()
494 self.parseseries()
495 return self.seriesguards
495 return self.seriesguards
496
496
497 def invalidate(self):
497 def invalidate(self):
498 for a in 'applied fullseries series seriesguards'.split():
498 for a in 'applied fullseries series seriesguards'.split():
499 if a in self.__dict__:
499 if a in self.__dict__:
500 delattr(self, a)
500 delattr(self, a)
501 self.applieddirty = False
501 self.applieddirty = False
502 self.seriesdirty = False
502 self.seriesdirty = False
503 self.guardsdirty = False
503 self.guardsdirty = False
504 self.activeguards = None
504 self.activeguards = None
505
505
506 def diffopts(self, opts=None, patchfn=None):
506 def diffopts(self, opts=None, patchfn=None):
507 diffopts = patchmod.diffopts(self.ui, opts)
507 diffopts = patchmod.diffopts(self.ui, opts)
508 if self.gitmode == 'auto':
508 if self.gitmode == 'auto':
509 diffopts.upgrade = True
509 diffopts.upgrade = True
510 elif self.gitmode == 'keep':
510 elif self.gitmode == 'keep':
511 pass
511 pass
512 elif self.gitmode in ('yes', 'no'):
512 elif self.gitmode in ('yes', 'no'):
513 diffopts.git = self.gitmode == 'yes'
513 diffopts.git = self.gitmode == 'yes'
514 else:
514 else:
515 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
515 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
516 ' got %s') % self.gitmode)
516 ' got %s') % self.gitmode)
517 if patchfn:
517 if patchfn:
518 diffopts = self.patchopts(diffopts, patchfn)
518 diffopts = self.patchopts(diffopts, patchfn)
519 return diffopts
519 return diffopts
520
520
521 def patchopts(self, diffopts, *patches):
521 def patchopts(self, diffopts, *patches):
522 """Return a copy of input diff options with git set to true if
522 """Return a copy of input diff options with git set to true if
523 referenced patch is a git patch and should be preserved as such.
523 referenced patch is a git patch and should be preserved as such.
524 """
524 """
525 diffopts = diffopts.copy()
525 diffopts = diffopts.copy()
526 if not diffopts.git and self.gitmode == 'keep':
526 if not diffopts.git and self.gitmode == 'keep':
527 for patchfn in patches:
527 for patchfn in patches:
528 patchf = self.opener(patchfn, 'r')
528 patchf = self.opener(patchfn, 'r')
529 # if the patch was a git patch, refresh it as a git patch
529 # if the patch was a git patch, refresh it as a git patch
530 for line in patchf:
530 for line in patchf:
531 if line.startswith('diff --git'):
531 if line.startswith('diff --git'):
532 diffopts.git = True
532 diffopts.git = True
533 break
533 break
534 patchf.close()
534 patchf.close()
535 return diffopts
535 return diffopts
536
536
537 def join(self, *p):
537 def join(self, *p):
538 return os.path.join(self.path, *p)
538 return os.path.join(self.path, *p)
539
539
540 def findseries(self, patch):
540 def findseries(self, patch):
541 def matchpatch(l):
541 def matchpatch(l):
542 l = l.split('#', 1)[0]
542 l = l.split('#', 1)[0]
543 return l.strip() == patch
543 return l.strip() == patch
544 for index, l in enumerate(self.fullseries):
544 for index, l in enumerate(self.fullseries):
545 if matchpatch(l):
545 if matchpatch(l):
546 return index
546 return index
547 return None
547 return None
548
548
549 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
549 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
550
550
551 def parseseries(self):
551 def parseseries(self):
552 self.series = []
552 self.series = []
553 self.seriesguards = []
553 self.seriesguards = []
554 for l in self.fullseries:
554 for l in self.fullseries:
555 h = l.find('#')
555 h = l.find('#')
556 if h == -1:
556 if h == -1:
557 patch = l
557 patch = l
558 comment = ''
558 comment = ''
559 elif h == 0:
559 elif h == 0:
560 continue
560 continue
561 else:
561 else:
562 patch = l[:h]
562 patch = l[:h]
563 comment = l[h:]
563 comment = l[h:]
564 patch = patch.strip()
564 patch = patch.strip()
565 if patch:
565 if patch:
566 if patch in self.series:
566 if patch in self.series:
567 raise error.Abort(_('%s appears more than once in %s') %
567 raise error.Abort(_('%s appears more than once in %s') %
568 (patch, self.join(self.seriespath)))
568 (patch, self.join(self.seriespath)))
569 self.series.append(patch)
569 self.series.append(patch)
570 self.seriesguards.append(self.guard_re.findall(comment))
570 self.seriesguards.append(self.guard_re.findall(comment))
571
571
572 def checkguard(self, guard):
572 def checkguard(self, guard):
573 if not guard:
573 if not guard:
574 return _('guard cannot be an empty string')
574 return _('guard cannot be an empty string')
575 bad_chars = '# \t\r\n\f'
575 bad_chars = '# \t\r\n\f'
576 first = guard[0]
576 first = guard[0]
577 if first in '-+':
577 if first in '-+':
578 return (_('guard %r starts with invalid character: %r') %
578 return (_('guard %r starts with invalid character: %r') %
579 (guard, first))
579 (guard, first))
580 for c in bad_chars:
580 for c in bad_chars:
581 if c in guard:
581 if c in guard:
582 return _('invalid character in guard %r: %r') % (guard, c)
582 return _('invalid character in guard %r: %r') % (guard, c)
583
583
584 def setactive(self, guards):
584 def setactive(self, guards):
585 for guard in guards:
585 for guard in guards:
586 bad = self.checkguard(guard)
586 bad = self.checkguard(guard)
587 if bad:
587 if bad:
588 raise error.Abort(bad)
588 raise error.Abort(bad)
589 guards = sorted(set(guards))
589 guards = sorted(set(guards))
590 self.ui.debug('active guards: %s\n' % ' '.join(guards))
590 self.ui.debug('active guards: %s\n' % ' '.join(guards))
591 self.activeguards = guards
591 self.activeguards = guards
592 self.guardsdirty = True
592 self.guardsdirty = True
593
593
594 def active(self):
594 def active(self):
595 if self.activeguards is None:
595 if self.activeguards is None:
596 self.activeguards = []
596 self.activeguards = []
597 try:
597 try:
598 guards = self.opener.read(self.guardspath).split()
598 guards = self.opener.read(self.guardspath).split()
599 except IOError as err:
599 except IOError as err:
600 if err.errno != errno.ENOENT:
600 if err.errno != errno.ENOENT:
601 raise
601 raise
602 guards = []
602 guards = []
603 for i, guard in enumerate(guards):
603 for i, guard in enumerate(guards):
604 bad = self.checkguard(guard)
604 bad = self.checkguard(guard)
605 if bad:
605 if bad:
606 self.ui.warn('%s:%d: %s\n' %
606 self.ui.warn('%s:%d: %s\n' %
607 (self.join(self.guardspath), i + 1, bad))
607 (self.join(self.guardspath), i + 1, bad))
608 else:
608 else:
609 self.activeguards.append(guard)
609 self.activeguards.append(guard)
610 return self.activeguards
610 return self.activeguards
611
611
612 def setguards(self, idx, guards):
612 def setguards(self, idx, guards):
613 for g in guards:
613 for g in guards:
614 if len(g) < 2:
614 if len(g) < 2:
615 raise error.Abort(_('guard %r too short') % g)
615 raise error.Abort(_('guard %r too short') % g)
616 if g[0] not in '-+':
616 if g[0] not in '-+':
617 raise error.Abort(_('guard %r starts with invalid char') % g)
617 raise error.Abort(_('guard %r starts with invalid char') % g)
618 bad = self.checkguard(g[1:])
618 bad = self.checkguard(g[1:])
619 if bad:
619 if bad:
620 raise error.Abort(bad)
620 raise error.Abort(bad)
621 drop = self.guard_re.sub('', self.fullseries[idx])
621 drop = self.guard_re.sub('', self.fullseries[idx])
622 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
622 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
623 self.parseseries()
623 self.parseseries()
624 self.seriesdirty = True
624 self.seriesdirty = True
625
625
626 def pushable(self, idx):
626 def pushable(self, idx):
627 if isinstance(idx, str):
627 if isinstance(idx, str):
628 idx = self.series.index(idx)
628 idx = self.series.index(idx)
629 patchguards = self.seriesguards[idx]
629 patchguards = self.seriesguards[idx]
630 if not patchguards:
630 if not patchguards:
631 return True, None
631 return True, None
632 guards = self.active()
632 guards = self.active()
633 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
633 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
634 if exactneg:
634 if exactneg:
635 return False, repr(exactneg[0])
635 return False, repr(exactneg[0])
636 pos = [g for g in patchguards if g[0] == '+']
636 pos = [g for g in patchguards if g[0] == '+']
637 exactpos = [g for g in pos if g[1:] in guards]
637 exactpos = [g for g in pos if g[1:] in guards]
638 if pos:
638 if pos:
639 if exactpos:
639 if exactpos:
640 return True, repr(exactpos[0])
640 return True, repr(exactpos[0])
641 return False, ' '.join(map(repr, pos))
641 return False, ' '.join(map(repr, pos))
642 return True, ''
642 return True, ''
643
643
644 def explainpushable(self, idx, all_patches=False):
644 def explainpushable(self, idx, all_patches=False):
645 if all_patches:
645 if all_patches:
646 write = self.ui.write
646 write = self.ui.write
647 else:
647 else:
648 write = self.ui.warn
648 write = self.ui.warn
649
649
650 if all_patches or self.ui.verbose:
650 if all_patches or self.ui.verbose:
651 if isinstance(idx, str):
651 if isinstance(idx, str):
652 idx = self.series.index(idx)
652 idx = self.series.index(idx)
653 pushable, why = self.pushable(idx)
653 pushable, why = self.pushable(idx)
654 if all_patches and pushable:
654 if all_patches and pushable:
655 if why is None:
655 if why is None:
656 write(_('allowing %s - no guards in effect\n') %
656 write(_('allowing %s - no guards in effect\n') %
657 self.series[idx])
657 self.series[idx])
658 else:
658 else:
659 if not why:
659 if not why:
660 write(_('allowing %s - no matching negative guards\n') %
660 write(_('allowing %s - no matching negative guards\n') %
661 self.series[idx])
661 self.series[idx])
662 else:
662 else:
663 write(_('allowing %s - guarded by %s\n') %
663 write(_('allowing %s - guarded by %s\n') %
664 (self.series[idx], why))
664 (self.series[idx], why))
665 if not pushable:
665 if not pushable:
666 if why:
666 if why:
667 write(_('skipping %s - guarded by %s\n') %
667 write(_('skipping %s - guarded by %s\n') %
668 (self.series[idx], why))
668 (self.series[idx], why))
669 else:
669 else:
670 write(_('skipping %s - no matching guards\n') %
670 write(_('skipping %s - no matching guards\n') %
671 self.series[idx])
671 self.series[idx])
672
672
673 def savedirty(self):
673 def savedirty(self):
674 def writelist(items, path):
674 def writelist(items, path):
675 fp = self.opener(path, 'w')
675 fp = self.opener(path, 'w')
676 for i in items:
676 for i in items:
677 fp.write("%s\n" % i)
677 fp.write("%s\n" % i)
678 fp.close()
678 fp.close()
679 if self.applieddirty:
679 if self.applieddirty:
680 writelist(map(str, self.applied), self.statuspath)
680 writelist(map(str, self.applied), self.statuspath)
681 self.applieddirty = False
681 self.applieddirty = False
682 if self.seriesdirty:
682 if self.seriesdirty:
683 writelist(self.fullseries, self.seriespath)
683 writelist(self.fullseries, self.seriespath)
684 self.seriesdirty = False
684 self.seriesdirty = False
685 if self.guardsdirty:
685 if self.guardsdirty:
686 writelist(self.activeguards, self.guardspath)
686 writelist(self.activeguards, self.guardspath)
687 self.guardsdirty = False
687 self.guardsdirty = False
688 if self.added:
688 if self.added:
689 qrepo = self.qrepo()
689 qrepo = self.qrepo()
690 if qrepo:
690 if qrepo:
691 qrepo[None].add(f for f in self.added if f not in qrepo[None])
691 qrepo[None].add(f for f in self.added if f not in qrepo[None])
692 self.added = []
692 self.added = []
693
693
694 def removeundo(self, repo):
694 def removeundo(self, repo):
695 undo = repo.sjoin('undo')
695 undo = repo.sjoin('undo')
696 if not os.path.exists(undo):
696 if not os.path.exists(undo):
697 return
697 return
698 try:
698 try:
699 os.unlink(undo)
699 os.unlink(undo)
700 except OSError as inst:
700 except OSError as inst:
701 self.ui.warn(_('error removing undo: %s\n') % str(inst))
701 self.ui.warn(_('error removing undo: %s\n') % str(inst))
702
702
703 def backup(self, repo, files, copy=False):
703 def backup(self, repo, files, copy=False):
704 # backup local changes in --force case
704 # backup local changes in --force case
705 for f in sorted(files):
705 for f in sorted(files):
706 absf = repo.wjoin(f)
706 absf = repo.wjoin(f)
707 if os.path.lexists(absf):
707 if os.path.lexists(absf):
708 self.ui.note(_('saving current version of %s as %s\n') %
708 self.ui.note(_('saving current version of %s as %s\n') %
709 (f, scmutil.origpath(self.ui, repo, f)))
709 (f, scmutil.origpath(self.ui, repo, f)))
710
710
711 absorig = scmutil.origpath(self.ui, repo, absf)
711 absorig = scmutil.origpath(self.ui, repo, absf)
712 if copy:
712 if copy:
713 util.copyfile(absf, absorig)
713 util.copyfile(absf, absorig)
714 else:
714 else:
715 util.rename(absf, absorig)
715 util.rename(absf, absorig)
716
716
717 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
717 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
718 fp=None, changes=None, opts=None):
718 fp=None, changes=None, opts=None):
719 if opts is None:
719 if opts is None:
720 opts = {}
720 opts = {}
721 stat = opts.get('stat')
721 stat = opts.get('stat')
722 m = scmutil.match(repo[node1], files, opts)
722 m = scmutil.match(repo[node1], files, opts)
723 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
723 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
724 changes, stat, fp)
724 changes, stat, fp)
725
725
726 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
726 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
727 # first try just applying the patch
727 # first try just applying the patch
728 (err, n) = self.apply(repo, [patch], update_status=False,
728 (err, n) = self.apply(repo, [patch], update_status=False,
729 strict=True, merge=rev)
729 strict=True, merge=rev)
730
730
731 if err == 0:
731 if err == 0:
732 return (err, n)
732 return (err, n)
733
733
734 if n is None:
734 if n is None:
735 raise error.Abort(_("apply failed for patch %s") % patch)
735 raise error.Abort(_("apply failed for patch %s") % patch)
736
736
737 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
737 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
738
738
739 # apply failed, strip away that rev and merge.
739 # apply failed, strip away that rev and merge.
740 hg.clean(repo, head)
740 hg.clean(repo, head)
741 strip(self.ui, repo, [n], update=False, backup=False)
741 strip(self.ui, repo, [n], update=False, backup=False)
742
742
743 ctx = repo[rev]
743 ctx = repo[rev]
744 ret = hg.merge(repo, rev)
744 ret = hg.merge(repo, rev)
745 if ret:
745 if ret:
746 raise error.Abort(_("update returned %d") % ret)
746 raise error.Abort(_("update returned %d") % ret)
747 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
747 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
748 if n is None:
748 if n is None:
749 raise error.Abort(_("repo commit failed"))
749 raise error.Abort(_("repo commit failed"))
750 try:
750 try:
751 ph = patchheader(mergeq.join(patch), self.plainmode)
751 ph = patchheader(mergeq.join(patch), self.plainmode)
752 except Exception:
752 except Exception:
753 raise error.Abort(_("unable to read %s") % patch)
753 raise error.Abort(_("unable to read %s") % patch)
754
754
755 diffopts = self.patchopts(diffopts, patch)
755 diffopts = self.patchopts(diffopts, patch)
756 patchf = self.opener(patch, "w")
756 patchf = self.opener(patch, "w")
757 comments = str(ph)
757 comments = str(ph)
758 if comments:
758 if comments:
759 patchf.write(comments)
759 patchf.write(comments)
760 self.printdiff(repo, diffopts, head, n, fp=patchf)
760 self.printdiff(repo, diffopts, head, n, fp=patchf)
761 patchf.close()
761 patchf.close()
762 self.removeundo(repo)
762 self.removeundo(repo)
763 return (0, n)
763 return (0, n)
764
764
765 def qparents(self, repo, rev=None):
765 def qparents(self, repo, rev=None):
766 """return the mq handled parent or p1
766 """return the mq handled parent or p1
767
767
768 In some case where mq get himself in being the parent of a merge the
768 In some case where mq get himself in being the parent of a merge the
769 appropriate parent may be p2.
769 appropriate parent may be p2.
770 (eg: an in progress merge started with mq disabled)
770 (eg: an in progress merge started with mq disabled)
771
771
772 If no parent are managed by mq, p1 is returned.
772 If no parent are managed by mq, p1 is returned.
773 """
773 """
774 if rev is None:
774 if rev is None:
775 (p1, p2) = repo.dirstate.parents()
775 (p1, p2) = repo.dirstate.parents()
776 if p2 == nullid:
776 if p2 == nullid:
777 return p1
777 return p1
778 if not self.applied:
778 if not self.applied:
779 return None
779 return None
780 return self.applied[-1].node
780 return self.applied[-1].node
781 p1, p2 = repo.changelog.parents(rev)
781 p1, p2 = repo.changelog.parents(rev)
782 if p2 != nullid and p2 in [x.node for x in self.applied]:
782 if p2 != nullid and p2 in [x.node for x in self.applied]:
783 return p2
783 return p2
784 return p1
784 return p1
785
785
786 def mergepatch(self, repo, mergeq, series, diffopts):
786 def mergepatch(self, repo, mergeq, series, diffopts):
787 if not self.applied:
787 if not self.applied:
788 # each of the patches merged in will have two parents. This
788 # each of the patches merged in will have two parents. This
789 # can confuse the qrefresh, qdiff, and strip code because it
789 # can confuse the qrefresh, qdiff, and strip code because it
790 # needs to know which parent is actually in the patch queue.
790 # needs to know which parent is actually in the patch queue.
791 # so, we insert a merge marker with only one parent. This way
791 # so, we insert a merge marker with only one parent. This way
792 # the first patch in the queue is never a merge patch
792 # the first patch in the queue is never a merge patch
793 #
793 #
794 pname = ".hg.patches.merge.marker"
794 pname = ".hg.patches.merge.marker"
795 n = newcommit(repo, None, '[mq]: merge marker', force=True)
795 n = newcommit(repo, None, '[mq]: merge marker', force=True)
796 self.removeundo(repo)
796 self.removeundo(repo)
797 self.applied.append(statusentry(n, pname))
797 self.applied.append(statusentry(n, pname))
798 self.applieddirty = True
798 self.applieddirty = True
799
799
800 head = self.qparents(repo)
800 head = self.qparents(repo)
801
801
802 for patch in series:
802 for patch in series:
803 patch = mergeq.lookup(patch, strict=True)
803 patch = mergeq.lookup(patch, strict=True)
804 if not patch:
804 if not patch:
805 self.ui.warn(_("patch %s does not exist\n") % patch)
805 self.ui.warn(_("patch %s does not exist\n") % patch)
806 return (1, None)
806 return (1, None)
807 pushable, reason = self.pushable(patch)
807 pushable, reason = self.pushable(patch)
808 if not pushable:
808 if not pushable:
809 self.explainpushable(patch, all_patches=True)
809 self.explainpushable(patch, all_patches=True)
810 continue
810 continue
811 info = mergeq.isapplied(patch)
811 info = mergeq.isapplied(patch)
812 if not info:
812 if not info:
813 self.ui.warn(_("patch %s is not applied\n") % patch)
813 self.ui.warn(_("patch %s is not applied\n") % patch)
814 return (1, None)
814 return (1, None)
815 rev = info[1]
815 rev = info[1]
816 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
816 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
817 if head:
817 if head:
818 self.applied.append(statusentry(head, patch))
818 self.applied.append(statusentry(head, patch))
819 self.applieddirty = True
819 self.applieddirty = True
820 if err:
820 if err:
821 return (err, head)
821 return (err, head)
822 self.savedirty()
822 self.savedirty()
823 return (0, head)
823 return (0, head)
824
824
825 def patch(self, repo, patchfile):
825 def patch(self, repo, patchfile):
826 '''Apply patchfile to the working directory.
826 '''Apply patchfile to the working directory.
827 patchfile: name of patch file'''
827 patchfile: name of patch file'''
828 files = set()
828 files = set()
829 try:
829 try:
830 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
830 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
831 files=files, eolmode=None)
831 files=files, eolmode=None)
832 return (True, list(files), fuzz)
832 return (True, list(files), fuzz)
833 except Exception as inst:
833 except Exception as inst:
834 self.ui.note(str(inst) + '\n')
834 self.ui.note(str(inst) + '\n')
835 if not self.ui.verbose:
835 if not self.ui.verbose:
836 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
836 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
837 self.ui.traceback()
837 self.ui.traceback()
838 return (False, list(files), False)
838 return (False, list(files), False)
839
839
840 def apply(self, repo, series, list=False, update_status=True,
840 def apply(self, repo, series, list=False, update_status=True,
841 strict=False, patchdir=None, merge=None, all_files=None,
841 strict=False, patchdir=None, merge=None, all_files=None,
842 tobackup=None, keepchanges=False):
842 tobackup=None, keepchanges=False):
843 wlock = lock = tr = None
843 wlock = lock = tr = None
844 try:
844 try:
845 wlock = repo.wlock()
845 wlock = repo.wlock()
846 lock = repo.lock()
846 lock = repo.lock()
847 tr = repo.transaction("qpush")
847 tr = repo.transaction("qpush")
848 try:
848 try:
849 ret = self._apply(repo, series, list, update_status,
849 ret = self._apply(repo, series, list, update_status,
850 strict, patchdir, merge, all_files=all_files,
850 strict, patchdir, merge, all_files=all_files,
851 tobackup=tobackup, keepchanges=keepchanges)
851 tobackup=tobackup, keepchanges=keepchanges)
852 tr.close()
852 tr.close()
853 self.savedirty()
853 self.savedirty()
854 return ret
854 return ret
855 except AbortNoCleanup:
855 except AbortNoCleanup:
856 tr.close()
856 tr.close()
857 self.savedirty()
857 self.savedirty()
858 raise
858 raise
859 except: # re-raises
859 except: # re-raises
860 try:
860 try:
861 tr.abort()
861 tr.abort()
862 finally:
862 finally:
863 self.invalidate()
863 self.invalidate()
864 raise
864 raise
865 finally:
865 finally:
866 release(tr, lock, wlock)
866 release(tr, lock, wlock)
867 self.removeundo(repo)
867 self.removeundo(repo)
868
868
869 def _apply(self, repo, series, list=False, update_status=True,
869 def _apply(self, repo, series, list=False, update_status=True,
870 strict=False, patchdir=None, merge=None, all_files=None,
870 strict=False, patchdir=None, merge=None, all_files=None,
871 tobackup=None, keepchanges=False):
871 tobackup=None, keepchanges=False):
872 """returns (error, hash)
872 """returns (error, hash)
873
873
874 error = 1 for unable to read, 2 for patch failed, 3 for patch
874 error = 1 for unable to read, 2 for patch failed, 3 for patch
875 fuzz. tobackup is None or a set of files to backup before they
875 fuzz. tobackup is None or a set of files to backup before they
876 are modified by a patch.
876 are modified by a patch.
877 """
877 """
878 # TODO unify with commands.py
878 # TODO unify with commands.py
879 if not patchdir:
879 if not patchdir:
880 patchdir = self.path
880 patchdir = self.path
881 err = 0
881 err = 0
882 n = None
882 n = None
883 for patchname in series:
883 for patchname in series:
884 pushable, reason = self.pushable(patchname)
884 pushable, reason = self.pushable(patchname)
885 if not pushable:
885 if not pushable:
886 self.explainpushable(patchname, all_patches=True)
886 self.explainpushable(patchname, all_patches=True)
887 continue
887 continue
888 self.ui.status(_("applying %s\n") % patchname)
888 self.ui.status(_("applying %s\n") % patchname)
889 pf = os.path.join(patchdir, patchname)
889 pf = os.path.join(patchdir, patchname)
890
890
891 try:
891 try:
892 ph = patchheader(self.join(patchname), self.plainmode)
892 ph = patchheader(self.join(patchname), self.plainmode)
893 except IOError:
893 except IOError:
894 self.ui.warn(_("unable to read %s\n") % patchname)
894 self.ui.warn(_("unable to read %s\n") % patchname)
895 err = 1
895 err = 1
896 break
896 break
897
897
898 message = ph.message
898 message = ph.message
899 if not message:
899 if not message:
900 # The commit message should not be translated
900 # The commit message should not be translated
901 message = "imported patch %s\n" % patchname
901 message = "imported patch %s\n" % patchname
902 else:
902 else:
903 if list:
903 if list:
904 # The commit message should not be translated
904 # The commit message should not be translated
905 message.append("\nimported patch %s" % patchname)
905 message.append("\nimported patch %s" % patchname)
906 message = '\n'.join(message)
906 message = '\n'.join(message)
907
907
908 if ph.haspatch:
908 if ph.haspatch:
909 if tobackup:
909 if tobackup:
910 touched = patchmod.changedfiles(self.ui, repo, pf)
910 touched = patchmod.changedfiles(self.ui, repo, pf)
911 touched = set(touched) & tobackup
911 touched = set(touched) & tobackup
912 if touched and keepchanges:
912 if touched and keepchanges:
913 raise AbortNoCleanup(
913 raise AbortNoCleanup(
914 _("conflicting local changes found"),
914 _("conflicting local changes found"),
915 hint=_("did you forget to qrefresh?"))
915 hint=_("did you forget to qrefresh?"))
916 self.backup(repo, touched, copy=True)
916 self.backup(repo, touched, copy=True)
917 tobackup = tobackup - touched
917 tobackup = tobackup - touched
918 (patcherr, files, fuzz) = self.patch(repo, pf)
918 (patcherr, files, fuzz) = self.patch(repo, pf)
919 if all_files is not None:
919 if all_files is not None:
920 all_files.update(files)
920 all_files.update(files)
921 patcherr = not patcherr
921 patcherr = not patcherr
922 else:
922 else:
923 self.ui.warn(_("patch %s is empty\n") % patchname)
923 self.ui.warn(_("patch %s is empty\n") % patchname)
924 patcherr, files, fuzz = 0, [], 0
924 patcherr, files, fuzz = 0, [], 0
925
925
926 if merge and files:
926 if merge and files:
927 # Mark as removed/merged and update dirstate parent info
927 # Mark as removed/merged and update dirstate parent info
928 removed = []
928 removed = []
929 merged = []
929 merged = []
930 for f in files:
930 for f in files:
931 if os.path.lexists(repo.wjoin(f)):
931 if os.path.lexists(repo.wjoin(f)):
932 merged.append(f)
932 merged.append(f)
933 else:
933 else:
934 removed.append(f)
934 removed.append(f)
935 with repo.dirstate.parentchange():
935 with repo.dirstate.parentchange():
936 for f in removed:
936 for f in removed:
937 repo.dirstate.remove(f)
937 repo.dirstate.remove(f)
938 for f in merged:
938 for f in merged:
939 repo.dirstate.merge(f)
939 repo.dirstate.merge(f)
940 p1, p2 = repo.dirstate.parents()
940 p1, p2 = repo.dirstate.parents()
941 repo.setparents(p1, merge)
941 repo.setparents(p1, merge)
942
942
943 if all_files and '.hgsubstate' in all_files:
943 if all_files and '.hgsubstate' in all_files:
944 wctx = repo[None]
944 wctx = repo[None]
945 pctx = repo['.']
945 pctx = repo['.']
946 overwrite = False
946 overwrite = False
947 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
947 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
948 overwrite)
948 overwrite)
949 files += mergedsubstate.keys()
949 files += mergedsubstate.keys()
950
950
951 match = scmutil.matchfiles(repo, files or [])
951 match = scmutil.matchfiles(repo, files or [])
952 oldtip = repo['tip']
952 oldtip = repo['tip']
953 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
953 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
954 force=True)
954 force=True)
955 if repo['tip'] == oldtip:
955 if repo['tip'] == oldtip:
956 raise error.Abort(_("qpush exactly duplicates child changeset"))
956 raise error.Abort(_("qpush exactly duplicates child changeset"))
957 if n is None:
957 if n is None:
958 raise error.Abort(_("repository commit failed"))
958 raise error.Abort(_("repository commit failed"))
959
959
960 if update_status:
960 if update_status:
961 self.applied.append(statusentry(n, patchname))
961 self.applied.append(statusentry(n, patchname))
962
962
963 if patcherr:
963 if patcherr:
964 self.ui.warn(_("patch failed, rejects left in working "
964 self.ui.warn(_("patch failed, rejects left in working "
965 "directory\n"))
965 "directory\n"))
966 err = 2
966 err = 2
967 break
967 break
968
968
969 if fuzz and strict:
969 if fuzz and strict:
970 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
970 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
971 err = 3
971 err = 3
972 break
972 break
973 return (err, n)
973 return (err, n)
974
974
975 def _cleanup(self, patches, numrevs, keep=False):
975 def _cleanup(self, patches, numrevs, keep=False):
976 if not keep:
976 if not keep:
977 r = self.qrepo()
977 r = self.qrepo()
978 if r:
978 if r:
979 r[None].forget(patches)
979 r[None].forget(patches)
980 for p in patches:
980 for p in patches:
981 try:
981 try:
982 os.unlink(self.join(p))
982 os.unlink(self.join(p))
983 except OSError as inst:
983 except OSError as inst:
984 if inst.errno != errno.ENOENT:
984 if inst.errno != errno.ENOENT:
985 raise
985 raise
986
986
987 qfinished = []
987 qfinished = []
988 if numrevs:
988 if numrevs:
989 qfinished = self.applied[:numrevs]
989 qfinished = self.applied[:numrevs]
990 del self.applied[:numrevs]
990 del self.applied[:numrevs]
991 self.applieddirty = True
991 self.applieddirty = True
992
992
993 unknown = []
993 unknown = []
994
994
995 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
995 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
996 reverse=True):
996 reverse=True):
997 if i is not None:
997 if i is not None:
998 del self.fullseries[i]
998 del self.fullseries[i]
999 else:
999 else:
1000 unknown.append(p)
1000 unknown.append(p)
1001
1001
1002 if unknown:
1002 if unknown:
1003 if numrevs:
1003 if numrevs:
1004 rev = dict((entry.name, entry.node) for entry in qfinished)
1004 rev = dict((entry.name, entry.node) for entry in qfinished)
1005 for p in unknown:
1005 for p in unknown:
1006 msg = _('revision %s refers to unknown patches: %s\n')
1006 msg = _('revision %s refers to unknown patches: %s\n')
1007 self.ui.warn(msg % (short(rev[p]), p))
1007 self.ui.warn(msg % (short(rev[p]), p))
1008 else:
1008 else:
1009 msg = _('unknown patches: %s\n')
1009 msg = _('unknown patches: %s\n')
1010 raise error.Abort(''.join(msg % p for p in unknown))
1010 raise error.Abort(''.join(msg % p for p in unknown))
1011
1011
1012 self.parseseries()
1012 self.parseseries()
1013 self.seriesdirty = True
1013 self.seriesdirty = True
1014 return [entry.node for entry in qfinished]
1014 return [entry.node for entry in qfinished]
1015
1015
1016 def _revpatches(self, repo, revs):
1016 def _revpatches(self, repo, revs):
1017 firstrev = repo[self.applied[0].node].rev()
1017 firstrev = repo[self.applied[0].node].rev()
1018 patches = []
1018 patches = []
1019 for i, rev in enumerate(revs):
1019 for i, rev in enumerate(revs):
1020
1020
1021 if rev < firstrev:
1021 if rev < firstrev:
1022 raise error.Abort(_('revision %d is not managed') % rev)
1022 raise error.Abort(_('revision %d is not managed') % rev)
1023
1023
1024 ctx = repo[rev]
1024 ctx = repo[rev]
1025 base = self.applied[i].node
1025 base = self.applied[i].node
1026 if ctx.node() != base:
1026 if ctx.node() != base:
1027 msg = _('cannot delete revision %d above applied patches')
1027 msg = _('cannot delete revision %d above applied patches')
1028 raise error.Abort(msg % rev)
1028 raise error.Abort(msg % rev)
1029
1029
1030 patch = self.applied[i].name
1030 patch = self.applied[i].name
1031 for fmt in ('[mq]: %s', 'imported patch %s'):
1031 for fmt in ('[mq]: %s', 'imported patch %s'):
1032 if ctx.description() == fmt % patch:
1032 if ctx.description() == fmt % patch:
1033 msg = _('patch %s finalized without changeset message\n')
1033 msg = _('patch %s finalized without changeset message\n')
1034 repo.ui.status(msg % patch)
1034 repo.ui.status(msg % patch)
1035 break
1035 break
1036
1036
1037 patches.append(patch)
1037 patches.append(patch)
1038 return patches
1038 return patches
1039
1039
1040 def finish(self, repo, revs):
1040 def finish(self, repo, revs):
1041 # Manually trigger phase computation to ensure phasedefaults is
1041 # Manually trigger phase computation to ensure phasedefaults is
1042 # executed before we remove the patches.
1042 # executed before we remove the patches.
1043 repo._phasecache
1043 repo._phasecache
1044 patches = self._revpatches(repo, sorted(revs))
1044 patches = self._revpatches(repo, sorted(revs))
1045 qfinished = self._cleanup(patches, len(patches))
1045 qfinished = self._cleanup(patches, len(patches))
1046 if qfinished and repo.ui.configbool('mq', 'secret', False):
1046 if qfinished and repo.ui.configbool('mq', 'secret', False):
1047 # only use this logic when the secret option is added
1047 # only use this logic when the secret option is added
1048 oldqbase = repo[qfinished[0]]
1048 oldqbase = repo[qfinished[0]]
1049 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1049 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1050 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1050 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1051 with repo.transaction('qfinish') as tr:
1051 with repo.transaction('qfinish') as tr:
1052 phases.advanceboundary(repo, tr, tphase, qfinished)
1052 phases.advanceboundary(repo, tr, tphase, qfinished)
1053
1053
1054 def delete(self, repo, patches, opts):
1054 def delete(self, repo, patches, opts):
1055 if not patches and not opts.get('rev'):
1055 if not patches and not opts.get('rev'):
1056 raise error.Abort(_('qdelete requires at least one revision or '
1056 raise error.Abort(_('qdelete requires at least one revision or '
1057 'patch name'))
1057 'patch name'))
1058
1058
1059 realpatches = []
1059 realpatches = []
1060 for patch in patches:
1060 for patch in patches:
1061 patch = self.lookup(patch, strict=True)
1061 patch = self.lookup(patch, strict=True)
1062 info = self.isapplied(patch)
1062 info = self.isapplied(patch)
1063 if info:
1063 if info:
1064 raise error.Abort(_("cannot delete applied patch %s") % patch)
1064 raise error.Abort(_("cannot delete applied patch %s") % patch)
1065 if patch not in self.series:
1065 if patch not in self.series:
1066 raise error.Abort(_("patch %s not in series file") % patch)
1066 raise error.Abort(_("patch %s not in series file") % patch)
1067 if patch not in realpatches:
1067 if patch not in realpatches:
1068 realpatches.append(patch)
1068 realpatches.append(patch)
1069
1069
1070 numrevs = 0
1070 numrevs = 0
1071 if opts.get('rev'):
1071 if opts.get('rev'):
1072 if not self.applied:
1072 if not self.applied:
1073 raise error.Abort(_('no patches applied'))
1073 raise error.Abort(_('no patches applied'))
1074 revs = scmutil.revrange(repo, opts.get('rev'))
1074 revs = scmutil.revrange(repo, opts.get('rev'))
1075 revs.sort()
1075 revs.sort()
1076 revpatches = self._revpatches(repo, revs)
1076 revpatches = self._revpatches(repo, revs)
1077 realpatches += revpatches
1077 realpatches += revpatches
1078 numrevs = len(revpatches)
1078 numrevs = len(revpatches)
1079
1079
1080 self._cleanup(realpatches, numrevs, opts.get('keep'))
1080 self._cleanup(realpatches, numrevs, opts.get('keep'))
1081
1081
1082 def checktoppatch(self, repo):
1082 def checktoppatch(self, repo):
1083 '''check that working directory is at qtip'''
1083 '''check that working directory is at qtip'''
1084 if self.applied:
1084 if self.applied:
1085 top = self.applied[-1].node
1085 top = self.applied[-1].node
1086 patch = self.applied[-1].name
1086 patch = self.applied[-1].name
1087 if repo.dirstate.p1() != top:
1087 if repo.dirstate.p1() != top:
1088 raise error.Abort(_("working directory revision is not qtip"))
1088 raise error.Abort(_("working directory revision is not qtip"))
1089 return top, patch
1089 return top, patch
1090 return None, None
1090 return None, None
1091
1091
1092 def putsubstate2changes(self, substatestate, changes):
1092 def putsubstate2changes(self, substatestate, changes):
1093 for files in changes[:3]:
1093 for files in changes[:3]:
1094 if '.hgsubstate' in files:
1094 if '.hgsubstate' in files:
1095 return # already listed up
1095 return # already listed up
1096 # not yet listed up
1096 # not yet listed up
1097 if substatestate in 'a?':
1097 if substatestate in 'a?':
1098 changes[1].append('.hgsubstate')
1098 changes[1].append('.hgsubstate')
1099 elif substatestate in 'r':
1099 elif substatestate in 'r':
1100 changes[2].append('.hgsubstate')
1100 changes[2].append('.hgsubstate')
1101 else: # modified
1101 else: # modified
1102 changes[0].append('.hgsubstate')
1102 changes[0].append('.hgsubstate')
1103
1103
1104 def checklocalchanges(self, repo, force=False, refresh=True):
1104 def checklocalchanges(self, repo, force=False, refresh=True):
1105 excsuffix = ''
1105 excsuffix = ''
1106 if refresh:
1106 if refresh:
1107 excsuffix = ', qrefresh first'
1107 excsuffix = ', qrefresh first'
1108 # plain versions for i18n tool to detect them
1108 # plain versions for i18n tool to detect them
1109 _("local changes found, qrefresh first")
1109 _("local changes found, qrefresh first")
1110 _("local changed subrepos found, qrefresh first")
1110 _("local changed subrepos found, qrefresh first")
1111 return checklocalchanges(repo, force, excsuffix)
1111 return checklocalchanges(repo, force, excsuffix)
1112
1112
1113 _reserved = ('series', 'status', 'guards', '.', '..')
1113 _reserved = ('series', 'status', 'guards', '.', '..')
1114 def checkreservedname(self, name):
1114 def checkreservedname(self, name):
1115 if name in self._reserved:
1115 if name in self._reserved:
1116 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1116 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1117 % name)
1117 % name)
1118 if name != name.strip():
1118 if name != name.strip():
1119 # whitespace is stripped by parseseries()
1119 # whitespace is stripped by parseseries()
1120 raise error.Abort(_('patch name cannot begin or end with '
1120 raise error.Abort(_('patch name cannot begin or end with '
1121 'whitespace'))
1121 'whitespace'))
1122 for prefix in ('.hg', '.mq'):
1122 for prefix in ('.hg', '.mq'):
1123 if name.startswith(prefix):
1123 if name.startswith(prefix):
1124 raise error.Abort(_('patch name cannot begin with "%s"')
1124 raise error.Abort(_('patch name cannot begin with "%s"')
1125 % prefix)
1125 % prefix)
1126 for c in ('#', ':', '\r', '\n'):
1126 for c in ('#', ':', '\r', '\n'):
1127 if c in name:
1127 if c in name:
1128 raise error.Abort(_('%r cannot be used in the name of a patch')
1128 raise error.Abort(_('%r cannot be used in the name of a patch')
1129 % c)
1129 % c)
1130
1130
1131 def checkpatchname(self, name, force=False):
1131 def checkpatchname(self, name, force=False):
1132 self.checkreservedname(name)
1132 self.checkreservedname(name)
1133 if not force and os.path.exists(self.join(name)):
1133 if not force and os.path.exists(self.join(name)):
1134 if os.path.isdir(self.join(name)):
1134 if os.path.isdir(self.join(name)):
1135 raise error.Abort(_('"%s" already exists as a directory')
1135 raise error.Abort(_('"%s" already exists as a directory')
1136 % name)
1136 % name)
1137 else:
1137 else:
1138 raise error.Abort(_('patch "%s" already exists') % name)
1138 raise error.Abort(_('patch "%s" already exists') % name)
1139
1139
1140 def makepatchname(self, title, fallbackname):
1140 def makepatchname(self, title, fallbackname):
1141 """Return a suitable filename for title, adding a suffix to make
1141 """Return a suitable filename for title, adding a suffix to make
1142 it unique in the existing list"""
1142 it unique in the existing list"""
1143 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1143 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1144 namebase = namebase[:75] # avoid too long name (issue5117)
1144 namebase = namebase[:75] # avoid too long name (issue5117)
1145 if namebase:
1145 if namebase:
1146 try:
1146 try:
1147 self.checkreservedname(namebase)
1147 self.checkreservedname(namebase)
1148 except error.Abort:
1148 except error.Abort:
1149 namebase = fallbackname
1149 namebase = fallbackname
1150 else:
1150 else:
1151 namebase = fallbackname
1151 namebase = fallbackname
1152 name = namebase
1152 name = namebase
1153 i = 0
1153 i = 0
1154 while True:
1154 while True:
1155 if name not in self.fullseries:
1155 if name not in self.fullseries:
1156 try:
1156 try:
1157 self.checkpatchname(name)
1157 self.checkpatchname(name)
1158 break
1158 break
1159 except error.Abort:
1159 except error.Abort:
1160 pass
1160 pass
1161 i += 1
1161 i += 1
1162 name = '%s__%s' % (namebase, i)
1162 name = '%s__%s' % (namebase, i)
1163 return name
1163 return name
1164
1164
1165 def checkkeepchanges(self, keepchanges, force):
1165 def checkkeepchanges(self, keepchanges, force):
1166 if force and keepchanges:
1166 if force and keepchanges:
1167 raise error.Abort(_('cannot use both --force and --keep-changes'))
1167 raise error.Abort(_('cannot use both --force and --keep-changes'))
1168
1168
1169 def new(self, repo, patchfn, *pats, **opts):
1169 def new(self, repo, patchfn, *pats, **opts):
1170 """options:
1170 """options:
1171 msg: a string or a no-argument function returning a string
1171 msg: a string or a no-argument function returning a string
1172 """
1172 """
1173 msg = opts.get('msg')
1173 msg = opts.get('msg')
1174 edit = opts.get('edit')
1174 edit = opts.get('edit')
1175 editform = opts.get('editform', 'mq.qnew')
1175 editform = opts.get('editform', 'mq.qnew')
1176 user = opts.get('user')
1176 user = opts.get('user')
1177 date = opts.get('date')
1177 date = opts.get('date')
1178 if date:
1178 if date:
1179 date = util.parsedate(date)
1179 date = util.parsedate(date)
1180 diffopts = self.diffopts({'git': opts.get('git')})
1180 diffopts = self.diffopts({'git': opts.get('git')})
1181 if opts.get('checkname', True):
1181 if opts.get('checkname', True):
1182 self.checkpatchname(patchfn)
1182 self.checkpatchname(patchfn)
1183 inclsubs = checksubstate(repo)
1183 inclsubs = checksubstate(repo)
1184 if inclsubs:
1184 if inclsubs:
1185 substatestate = repo.dirstate['.hgsubstate']
1185 substatestate = repo.dirstate['.hgsubstate']
1186 if opts.get('include') or opts.get('exclude') or pats:
1186 if opts.get('include') or opts.get('exclude') or pats:
1187 # detect missing files in pats
1187 # detect missing files in pats
1188 def badfn(f, msg):
1188 def badfn(f, msg):
1189 if f != '.hgsubstate': # .hgsubstate is auto-created
1189 if f != '.hgsubstate': # .hgsubstate is auto-created
1190 raise error.Abort('%s: %s' % (f, msg))
1190 raise error.Abort('%s: %s' % (f, msg))
1191 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1191 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1192 changes = repo.status(match=match)
1192 changes = repo.status(match=match)
1193 else:
1193 else:
1194 changes = self.checklocalchanges(repo, force=True)
1194 changes = self.checklocalchanges(repo, force=True)
1195 commitfiles = list(inclsubs)
1195 commitfiles = list(inclsubs)
1196 for files in changes[:3]:
1196 for files in changes[:3]:
1197 commitfiles.extend(files)
1197 commitfiles.extend(files)
1198 match = scmutil.matchfiles(repo, commitfiles)
1198 match = scmutil.matchfiles(repo, commitfiles)
1199 if len(repo[None].parents()) > 1:
1199 if len(repo[None].parents()) > 1:
1200 raise error.Abort(_('cannot manage merge changesets'))
1200 raise error.Abort(_('cannot manage merge changesets'))
1201 self.checktoppatch(repo)
1201 self.checktoppatch(repo)
1202 insert = self.fullseriesend()
1202 insert = self.fullseriesend()
1203 with repo.wlock():
1203 with repo.wlock():
1204 try:
1204 try:
1205 # if patch file write fails, abort early
1205 # if patch file write fails, abort early
1206 p = self.opener(patchfn, "w")
1206 p = self.opener(patchfn, "w")
1207 except IOError as e:
1207 except IOError as e:
1208 raise error.Abort(_('cannot write patch "%s": %s')
1208 raise error.Abort(_('cannot write patch "%s": %s')
1209 % (patchfn, e.strerror))
1209 % (patchfn, e.strerror))
1210 try:
1210 try:
1211 defaultmsg = "[mq]: %s" % patchfn
1211 defaultmsg = "[mq]: %s" % patchfn
1212 editor = cmdutil.getcommiteditor(editform=editform)
1212 editor = cmdutil.getcommiteditor(editform=editform)
1213 if edit:
1213 if edit:
1214 def finishdesc(desc):
1214 def finishdesc(desc):
1215 if desc.rstrip():
1215 if desc.rstrip():
1216 return desc
1216 return desc
1217 else:
1217 else:
1218 return defaultmsg
1218 return defaultmsg
1219 # i18n: this message is shown in editor with "HG: " prefix
1219 # i18n: this message is shown in editor with "HG: " prefix
1220 extramsg = _('Leave message empty to use default message.')
1220 extramsg = _('Leave message empty to use default message.')
1221 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1221 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1222 extramsg=extramsg,
1222 extramsg=extramsg,
1223 editform=editform)
1223 editform=editform)
1224 commitmsg = msg
1224 commitmsg = msg
1225 else:
1225 else:
1226 commitmsg = msg or defaultmsg
1226 commitmsg = msg or defaultmsg
1227
1227
1228 n = newcommit(repo, None, commitmsg, user, date, match=match,
1228 n = newcommit(repo, None, commitmsg, user, date, match=match,
1229 force=True, editor=editor)
1229 force=True, editor=editor)
1230 if n is None:
1230 if n is None:
1231 raise error.Abort(_("repo commit failed"))
1231 raise error.Abort(_("repo commit failed"))
1232 try:
1232 try:
1233 self.fullseries[insert:insert] = [patchfn]
1233 self.fullseries[insert:insert] = [patchfn]
1234 self.applied.append(statusentry(n, patchfn))
1234 self.applied.append(statusentry(n, patchfn))
1235 self.parseseries()
1235 self.parseseries()
1236 self.seriesdirty = True
1236 self.seriesdirty = True
1237 self.applieddirty = True
1237 self.applieddirty = True
1238 nctx = repo[n]
1238 nctx = repo[n]
1239 ph = patchheader(self.join(patchfn), self.plainmode)
1239 ph = patchheader(self.join(patchfn), self.plainmode)
1240 if user:
1240 if user:
1241 ph.setuser(user)
1241 ph.setuser(user)
1242 if date:
1242 if date:
1243 ph.setdate('%s %s' % date)
1243 ph.setdate('%s %s' % date)
1244 ph.setparent(hex(nctx.p1().node()))
1244 ph.setparent(hex(nctx.p1().node()))
1245 msg = nctx.description().strip()
1245 msg = nctx.description().strip()
1246 if msg == defaultmsg.strip():
1246 if msg == defaultmsg.strip():
1247 msg = ''
1247 msg = ''
1248 ph.setmessage(msg)
1248 ph.setmessage(msg)
1249 p.write(str(ph))
1249 p.write(str(ph))
1250 if commitfiles:
1250 if commitfiles:
1251 parent = self.qparents(repo, n)
1251 parent = self.qparents(repo, n)
1252 if inclsubs:
1252 if inclsubs:
1253 self.putsubstate2changes(substatestate, changes)
1253 self.putsubstate2changes(substatestate, changes)
1254 chunks = patchmod.diff(repo, node1=parent, node2=n,
1254 chunks = patchmod.diff(repo, node1=parent, node2=n,
1255 changes=changes, opts=diffopts)
1255 changes=changes, opts=diffopts)
1256 for chunk in chunks:
1256 for chunk in chunks:
1257 p.write(chunk)
1257 p.write(chunk)
1258 p.close()
1258 p.close()
1259 r = self.qrepo()
1259 r = self.qrepo()
1260 if r:
1260 if r:
1261 r[None].add([patchfn])
1261 r[None].add([patchfn])
1262 except: # re-raises
1262 except: # re-raises
1263 repo.rollback()
1263 repo.rollback()
1264 raise
1264 raise
1265 except Exception:
1265 except Exception:
1266 patchpath = self.join(patchfn)
1266 patchpath = self.join(patchfn)
1267 try:
1267 try:
1268 os.unlink(patchpath)
1268 os.unlink(patchpath)
1269 except OSError:
1269 except OSError:
1270 self.ui.warn(_('error unlinking %s\n') % patchpath)
1270 self.ui.warn(_('error unlinking %s\n') % patchpath)
1271 raise
1271 raise
1272 self.removeundo(repo)
1272 self.removeundo(repo)
1273
1273
1274 def isapplied(self, patch):
1274 def isapplied(self, patch):
1275 """returns (index, rev, patch)"""
1275 """returns (index, rev, patch)"""
1276 for i, a in enumerate(self.applied):
1276 for i, a in enumerate(self.applied):
1277 if a.name == patch:
1277 if a.name == patch:
1278 return (i, a.node, a.name)
1278 return (i, a.node, a.name)
1279 return None
1279 return None
1280
1280
1281 # if the exact patch name does not exist, we try a few
1281 # if the exact patch name does not exist, we try a few
1282 # variations. If strict is passed, we try only #1
1282 # variations. If strict is passed, we try only #1
1283 #
1283 #
1284 # 1) a number (as string) to indicate an offset in the series file
1284 # 1) a number (as string) to indicate an offset in the series file
1285 # 2) a unique substring of the patch name was given
1285 # 2) a unique substring of the patch name was given
1286 # 3) patchname[-+]num to indicate an offset in the series file
1286 # 3) patchname[-+]num to indicate an offset in the series file
1287 def lookup(self, patch, strict=False):
1287 def lookup(self, patch, strict=False):
1288 def partialname(s):
1288 def partialname(s):
1289 if s in self.series:
1289 if s in self.series:
1290 return s
1290 return s
1291 matches = [x for x in self.series if s in x]
1291 matches = [x for x in self.series if s in x]
1292 if len(matches) > 1:
1292 if len(matches) > 1:
1293 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1293 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1294 for m in matches:
1294 for m in matches:
1295 self.ui.warn(' %s\n' % m)
1295 self.ui.warn(' %s\n' % m)
1296 return None
1296 return None
1297 if matches:
1297 if matches:
1298 return matches[0]
1298 return matches[0]
1299 if self.series and self.applied:
1299 if self.series and self.applied:
1300 if s == 'qtip':
1300 if s == 'qtip':
1301 return self.series[self.seriesend(True) - 1]
1301 return self.series[self.seriesend(True) - 1]
1302 if s == 'qbase':
1302 if s == 'qbase':
1303 return self.series[0]
1303 return self.series[0]
1304 return None
1304 return None
1305
1305
1306 if patch in self.series:
1306 if patch in self.series:
1307 return patch
1307 return patch
1308
1308
1309 if not os.path.isfile(self.join(patch)):
1309 if not os.path.isfile(self.join(patch)):
1310 try:
1310 try:
1311 sno = int(patch)
1311 sno = int(patch)
1312 except (ValueError, OverflowError):
1312 except (ValueError, OverflowError):
1313 pass
1313 pass
1314 else:
1314 else:
1315 if -len(self.series) <= sno < len(self.series):
1315 if -len(self.series) <= sno < len(self.series):
1316 return self.series[sno]
1316 return self.series[sno]
1317
1317
1318 if not strict:
1318 if not strict:
1319 res = partialname(patch)
1319 res = partialname(patch)
1320 if res:
1320 if res:
1321 return res
1321 return res
1322 minus = patch.rfind('-')
1322 minus = patch.rfind('-')
1323 if minus >= 0:
1323 if minus >= 0:
1324 res = partialname(patch[:minus])
1324 res = partialname(patch[:minus])
1325 if res:
1325 if res:
1326 i = self.series.index(res)
1326 i = self.series.index(res)
1327 try:
1327 try:
1328 off = int(patch[minus + 1:] or 1)
1328 off = int(patch[minus + 1:] or 1)
1329 except (ValueError, OverflowError):
1329 except (ValueError, OverflowError):
1330 pass
1330 pass
1331 else:
1331 else:
1332 if i - off >= 0:
1332 if i - off >= 0:
1333 return self.series[i - off]
1333 return self.series[i - off]
1334 plus = patch.rfind('+')
1334 plus = patch.rfind('+')
1335 if plus >= 0:
1335 if plus >= 0:
1336 res = partialname(patch[:plus])
1336 res = partialname(patch[:plus])
1337 if res:
1337 if res:
1338 i = self.series.index(res)
1338 i = self.series.index(res)
1339 try:
1339 try:
1340 off = int(patch[plus + 1:] or 1)
1340 off = int(patch[plus + 1:] or 1)
1341 except (ValueError, OverflowError):
1341 except (ValueError, OverflowError):
1342 pass
1342 pass
1343 else:
1343 else:
1344 if i + off < len(self.series):
1344 if i + off < len(self.series):
1345 return self.series[i + off]
1345 return self.series[i + off]
1346 raise error.Abort(_("patch %s not in series") % patch)
1346 raise error.Abort(_("patch %s not in series") % patch)
1347
1347
1348 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1348 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1349 all=False, move=False, exact=False, nobackup=False,
1349 all=False, move=False, exact=False, nobackup=False,
1350 keepchanges=False):
1350 keepchanges=False):
1351 self.checkkeepchanges(keepchanges, force)
1351 self.checkkeepchanges(keepchanges, force)
1352 diffopts = self.diffopts()
1352 diffopts = self.diffopts()
1353 with repo.wlock():
1353 with repo.wlock():
1354 heads = []
1354 heads = []
1355 for hs in repo.branchmap().itervalues():
1355 for hs in repo.branchmap().itervalues():
1356 heads.extend(hs)
1356 heads.extend(hs)
1357 if not heads:
1357 if not heads:
1358 heads = [nullid]
1358 heads = [nullid]
1359 if repo.dirstate.p1() not in heads and not exact:
1359 if repo.dirstate.p1() not in heads and not exact:
1360 self.ui.status(_("(working directory not at a head)\n"))
1360 self.ui.status(_("(working directory not at a head)\n"))
1361
1361
1362 if not self.series:
1362 if not self.series:
1363 self.ui.warn(_('no patches in series\n'))
1363 self.ui.warn(_('no patches in series\n'))
1364 return 0
1364 return 0
1365
1365
1366 # Suppose our series file is: A B C and the current 'top'
1366 # Suppose our series file is: A B C and the current 'top'
1367 # patch is B. qpush C should be performed (moving forward)
1367 # patch is B. qpush C should be performed (moving forward)
1368 # qpush B is a NOP (no change) qpush A is an error (can't
1368 # qpush B is a NOP (no change) qpush A is an error (can't
1369 # go backwards with qpush)
1369 # go backwards with qpush)
1370 if patch:
1370 if patch:
1371 patch = self.lookup(patch)
1371 patch = self.lookup(patch)
1372 info = self.isapplied(patch)
1372 info = self.isapplied(patch)
1373 if info and info[0] >= len(self.applied) - 1:
1373 if info and info[0] >= len(self.applied) - 1:
1374 self.ui.warn(
1374 self.ui.warn(
1375 _('qpush: %s is already at the top\n') % patch)
1375 _('qpush: %s is already at the top\n') % patch)
1376 return 0
1376 return 0
1377
1377
1378 pushable, reason = self.pushable(patch)
1378 pushable, reason = self.pushable(patch)
1379 if pushable:
1379 if pushable:
1380 if self.series.index(patch) < self.seriesend():
1380 if self.series.index(patch) < self.seriesend():
1381 raise error.Abort(
1381 raise error.Abort(
1382 _("cannot push to a previous patch: %s") % patch)
1382 _("cannot push to a previous patch: %s") % patch)
1383 else:
1383 else:
1384 if reason:
1384 if reason:
1385 reason = _('guarded by %s') % reason
1385 reason = _('guarded by %s') % reason
1386 else:
1386 else:
1387 reason = _('no matching guards')
1387 reason = _('no matching guards')
1388 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1388 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1389 return 1
1389 return 1
1390 elif all:
1390 elif all:
1391 patch = self.series[-1]
1391 patch = self.series[-1]
1392 if self.isapplied(patch):
1392 if self.isapplied(patch):
1393 self.ui.warn(_('all patches are currently applied\n'))
1393 self.ui.warn(_('all patches are currently applied\n'))
1394 return 0
1394 return 0
1395
1395
1396 # Following the above example, starting at 'top' of B:
1396 # Following the above example, starting at 'top' of B:
1397 # qpush should be performed (pushes C), but a subsequent
1397 # qpush should be performed (pushes C), but a subsequent
1398 # qpush without an argument is an error (nothing to
1398 # qpush without an argument is an error (nothing to
1399 # apply). This allows a loop of "...while hg qpush..." to
1399 # apply). This allows a loop of "...while hg qpush..." to
1400 # work as it detects an error when done
1400 # work as it detects an error when done
1401 start = self.seriesend()
1401 start = self.seriesend()
1402 if start == len(self.series):
1402 if start == len(self.series):
1403 self.ui.warn(_('patch series already fully applied\n'))
1403 self.ui.warn(_('patch series already fully applied\n'))
1404 return 1
1404 return 1
1405 if not force and not keepchanges:
1405 if not force and not keepchanges:
1406 self.checklocalchanges(repo, refresh=self.applied)
1406 self.checklocalchanges(repo, refresh=self.applied)
1407
1407
1408 if exact:
1408 if exact:
1409 if keepchanges:
1409 if keepchanges:
1410 raise error.Abort(
1410 raise error.Abort(
1411 _("cannot use --exact and --keep-changes together"))
1411 _("cannot use --exact and --keep-changes together"))
1412 if move:
1412 if move:
1413 raise error.Abort(_('cannot use --exact and --move '
1413 raise error.Abort(_('cannot use --exact and --move '
1414 'together'))
1414 'together'))
1415 if self.applied:
1415 if self.applied:
1416 raise error.Abort(_('cannot push --exact with applied '
1416 raise error.Abort(_('cannot push --exact with applied '
1417 'patches'))
1417 'patches'))
1418 root = self.series[start]
1418 root = self.series[start]
1419 target = patchheader(self.join(root), self.plainmode).parent
1419 target = patchheader(self.join(root), self.plainmode).parent
1420 if not target:
1420 if not target:
1421 raise error.Abort(
1421 raise error.Abort(
1422 _("%s does not have a parent recorded") % root)
1422 _("%s does not have a parent recorded") % root)
1423 if not repo[target] == repo['.']:
1423 if not repo[target] == repo['.']:
1424 hg.update(repo, target)
1424 hg.update(repo, target)
1425
1425
1426 if move:
1426 if move:
1427 if not patch:
1427 if not patch:
1428 raise error.Abort(_("please specify the patch to move"))
1428 raise error.Abort(_("please specify the patch to move"))
1429 for fullstart, rpn in enumerate(self.fullseries):
1429 for fullstart, rpn in enumerate(self.fullseries):
1430 # strip markers for patch guards
1430 # strip markers for patch guards
1431 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1431 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1432 break
1432 break
1433 for i, rpn in enumerate(self.fullseries[fullstart:]):
1433 for i, rpn in enumerate(self.fullseries[fullstart:]):
1434 # strip markers for patch guards
1434 # strip markers for patch guards
1435 if self.guard_re.split(rpn, 1)[0] == patch:
1435 if self.guard_re.split(rpn, 1)[0] == patch:
1436 break
1436 break
1437 index = fullstart + i
1437 index = fullstart + i
1438 assert index < len(self.fullseries)
1438 assert index < len(self.fullseries)
1439 fullpatch = self.fullseries[index]
1439 fullpatch = self.fullseries[index]
1440 del self.fullseries[index]
1440 del self.fullseries[index]
1441 self.fullseries.insert(fullstart, fullpatch)
1441 self.fullseries.insert(fullstart, fullpatch)
1442 self.parseseries()
1442 self.parseseries()
1443 self.seriesdirty = True
1443 self.seriesdirty = True
1444
1444
1445 self.applieddirty = True
1445 self.applieddirty = True
1446 if start > 0:
1446 if start > 0:
1447 self.checktoppatch(repo)
1447 self.checktoppatch(repo)
1448 if not patch:
1448 if not patch:
1449 patch = self.series[start]
1449 patch = self.series[start]
1450 end = start + 1
1450 end = start + 1
1451 else:
1451 else:
1452 end = self.series.index(patch, start) + 1
1452 end = self.series.index(patch, start) + 1
1453
1453
1454 tobackup = set()
1454 tobackup = set()
1455 if (not nobackup and force) or keepchanges:
1455 if (not nobackup and force) or keepchanges:
1456 status = self.checklocalchanges(repo, force=True)
1456 status = self.checklocalchanges(repo, force=True)
1457 if keepchanges:
1457 if keepchanges:
1458 tobackup.update(status.modified + status.added +
1458 tobackup.update(status.modified + status.added +
1459 status.removed + status.deleted)
1459 status.removed + status.deleted)
1460 else:
1460 else:
1461 tobackup.update(status.modified + status.added)
1461 tobackup.update(status.modified + status.added)
1462
1462
1463 s = self.series[start:end]
1463 s = self.series[start:end]
1464 all_files = set()
1464 all_files = set()
1465 try:
1465 try:
1466 if mergeq:
1466 if mergeq:
1467 ret = self.mergepatch(repo, mergeq, s, diffopts)
1467 ret = self.mergepatch(repo, mergeq, s, diffopts)
1468 else:
1468 else:
1469 ret = self.apply(repo, s, list, all_files=all_files,
1469 ret = self.apply(repo, s, list, all_files=all_files,
1470 tobackup=tobackup, keepchanges=keepchanges)
1470 tobackup=tobackup, keepchanges=keepchanges)
1471 except AbortNoCleanup:
1471 except AbortNoCleanup:
1472 raise
1472 raise
1473 except: # re-raises
1473 except: # re-raises
1474 self.ui.warn(_('cleaning up working directory...\n'))
1474 self.ui.warn(_('cleaning up working directory...\n'))
1475 cmdutil.revert(self.ui, repo, repo['.'],
1475 cmdutil.revert(self.ui, repo, repo['.'],
1476 repo.dirstate.parents(), no_backup=True)
1476 repo.dirstate.parents(), no_backup=True)
1477 # only remove unknown files that we know we touched or
1477 # only remove unknown files that we know we touched or
1478 # created while patching
1478 # created while patching
1479 for f in all_files:
1479 for f in all_files:
1480 if f not in repo.dirstate:
1480 if f not in repo.dirstate:
1481 repo.wvfs.unlinkpath(f, ignoremissing=True)
1481 repo.wvfs.unlinkpath(f, ignoremissing=True)
1482 self.ui.warn(_('done\n'))
1482 self.ui.warn(_('done\n'))
1483 raise
1483 raise
1484
1484
1485 if not self.applied:
1485 if not self.applied:
1486 return ret[0]
1486 return ret[0]
1487 top = self.applied[-1].name
1487 top = self.applied[-1].name
1488 if ret[0] and ret[0] > 1:
1488 if ret[0] and ret[0] > 1:
1489 msg = _("errors during apply, please fix and qrefresh %s\n")
1489 msg = _("errors during apply, please fix and qrefresh %s\n")
1490 self.ui.write(msg % top)
1490 self.ui.write(msg % top)
1491 else:
1491 else:
1492 self.ui.write(_("now at: %s\n") % top)
1492 self.ui.write(_("now at: %s\n") % top)
1493 return ret[0]
1493 return ret[0]
1494
1494
1495 def pop(self, repo, patch=None, force=False, update=True, all=False,
1495 def pop(self, repo, patch=None, force=False, update=True, all=False,
1496 nobackup=False, keepchanges=False):
1496 nobackup=False, keepchanges=False):
1497 self.checkkeepchanges(keepchanges, force)
1497 self.checkkeepchanges(keepchanges, force)
1498 with repo.wlock():
1498 with repo.wlock():
1499 if patch:
1499 if patch:
1500 # index, rev, patch
1500 # index, rev, patch
1501 info = self.isapplied(patch)
1501 info = self.isapplied(patch)
1502 if not info:
1502 if not info:
1503 patch = self.lookup(patch)
1503 patch = self.lookup(patch)
1504 info = self.isapplied(patch)
1504 info = self.isapplied(patch)
1505 if not info:
1505 if not info:
1506 raise error.Abort(_("patch %s is not applied") % patch)
1506 raise error.Abort(_("patch %s is not applied") % patch)
1507
1507
1508 if not self.applied:
1508 if not self.applied:
1509 # Allow qpop -a to work repeatedly,
1509 # Allow qpop -a to work repeatedly,
1510 # but not qpop without an argument
1510 # but not qpop without an argument
1511 self.ui.warn(_("no patches applied\n"))
1511 self.ui.warn(_("no patches applied\n"))
1512 return not all
1512 return not all
1513
1513
1514 if all:
1514 if all:
1515 start = 0
1515 start = 0
1516 elif patch:
1516 elif patch:
1517 start = info[0] + 1
1517 start = info[0] + 1
1518 else:
1518 else:
1519 start = len(self.applied) - 1
1519 start = len(self.applied) - 1
1520
1520
1521 if start >= len(self.applied):
1521 if start >= len(self.applied):
1522 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1522 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1523 return
1523 return
1524
1524
1525 if not update:
1525 if not update:
1526 parents = repo.dirstate.parents()
1526 parents = repo.dirstate.parents()
1527 rr = [x.node for x in self.applied]
1527 rr = [x.node for x in self.applied]
1528 for p in parents:
1528 for p in parents:
1529 if p in rr:
1529 if p in rr:
1530 self.ui.warn(_("qpop: forcing dirstate update\n"))
1530 self.ui.warn(_("qpop: forcing dirstate update\n"))
1531 update = True
1531 update = True
1532 else:
1532 else:
1533 parents = [p.node() for p in repo[None].parents()]
1533 parents = [p.node() for p in repo[None].parents()]
1534 needupdate = False
1534 needupdate = False
1535 for entry in self.applied[start:]:
1535 for entry in self.applied[start:]:
1536 if entry.node in parents:
1536 if entry.node in parents:
1537 needupdate = True
1537 needupdate = True
1538 break
1538 break
1539 update = needupdate
1539 update = needupdate
1540
1540
1541 tobackup = set()
1541 tobackup = set()
1542 if update:
1542 if update:
1543 s = self.checklocalchanges(repo, force=force or keepchanges)
1543 s = self.checklocalchanges(repo, force=force or keepchanges)
1544 if force:
1544 if force:
1545 if not nobackup:
1545 if not nobackup:
1546 tobackup.update(s.modified + s.added)
1546 tobackup.update(s.modified + s.added)
1547 elif keepchanges:
1547 elif keepchanges:
1548 tobackup.update(s.modified + s.added +
1548 tobackup.update(s.modified + s.added +
1549 s.removed + s.deleted)
1549 s.removed + s.deleted)
1550
1550
1551 self.applieddirty = True
1551 self.applieddirty = True
1552 end = len(self.applied)
1552 end = len(self.applied)
1553 rev = self.applied[start].node
1553 rev = self.applied[start].node
1554
1554
1555 try:
1555 try:
1556 heads = repo.changelog.heads(rev)
1556 heads = repo.changelog.heads(rev)
1557 except error.LookupError:
1557 except error.LookupError:
1558 node = short(rev)
1558 node = short(rev)
1559 raise error.Abort(_('trying to pop unknown node %s') % node)
1559 raise error.Abort(_('trying to pop unknown node %s') % node)
1560
1560
1561 if heads != [self.applied[-1].node]:
1561 if heads != [self.applied[-1].node]:
1562 raise error.Abort(_("popping would remove a revision not "
1562 raise error.Abort(_("popping would remove a revision not "
1563 "managed by this patch queue"))
1563 "managed by this patch queue"))
1564 if not repo[self.applied[-1].node].mutable():
1564 if not repo[self.applied[-1].node].mutable():
1565 raise error.Abort(
1565 raise error.Abort(
1566 _("popping would remove a public revision"),
1566 _("popping would remove a public revision"),
1567 hint=_("see 'hg help phases' for details"))
1567 hint=_("see 'hg help phases' for details"))
1568
1568
1569 # we know there are no local changes, so we can make a simplified
1569 # we know there are no local changes, so we can make a simplified
1570 # form of hg.update.
1570 # form of hg.update.
1571 if update:
1571 if update:
1572 qp = self.qparents(repo, rev)
1572 qp = self.qparents(repo, rev)
1573 ctx = repo[qp]
1573 ctx = repo[qp]
1574 m, a, r, d = repo.status(qp, '.')[:4]
1574 m, a, r, d = repo.status(qp, '.')[:4]
1575 if d:
1575 if d:
1576 raise error.Abort(_("deletions found between repo revs"))
1576 raise error.Abort(_("deletions found between repo revs"))
1577
1577
1578 tobackup = set(a + m + r) & tobackup
1578 tobackup = set(a + m + r) & tobackup
1579 if keepchanges and tobackup:
1579 if keepchanges and tobackup:
1580 raise error.Abort(_("local changes found, qrefresh first"))
1580 raise error.Abort(_("local changes found, qrefresh first"))
1581 self.backup(repo, tobackup)
1581 self.backup(repo, tobackup)
1582 with repo.dirstate.parentchange():
1582 with repo.dirstate.parentchange():
1583 for f in a:
1583 for f in a:
1584 repo.wvfs.unlinkpath(f, ignoremissing=True)
1584 repo.wvfs.unlinkpath(f, ignoremissing=True)
1585 repo.dirstate.drop(f)
1585 repo.dirstate.drop(f)
1586 for f in m + r:
1586 for f in m + r:
1587 fctx = ctx[f]
1587 fctx = ctx[f]
1588 repo.wwrite(f, fctx.data(), fctx.flags())
1588 repo.wwrite(f, fctx.data(), fctx.flags())
1589 repo.dirstate.normal(f)
1589 repo.dirstate.normal(f)
1590 repo.setparents(qp, nullid)
1590 repo.setparents(qp, nullid)
1591 for patch in reversed(self.applied[start:end]):
1591 for patch in reversed(self.applied[start:end]):
1592 self.ui.status(_("popping %s\n") % patch.name)
1592 self.ui.status(_("popping %s\n") % patch.name)
1593 del self.applied[start:end]
1593 del self.applied[start:end]
1594 strip(self.ui, repo, [rev], update=False, backup=False)
1594 strip(self.ui, repo, [rev], update=False, backup=False)
1595 for s, state in repo['.'].substate.items():
1595 for s, state in repo['.'].substate.items():
1596 repo['.'].sub(s).get(state)
1596 repo['.'].sub(s).get(state)
1597 if self.applied:
1597 if self.applied:
1598 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1598 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1599 else:
1599 else:
1600 self.ui.write(_("patch queue now empty\n"))
1600 self.ui.write(_("patch queue now empty\n"))
1601
1601
1602 def diff(self, repo, pats, opts):
1602 def diff(self, repo, pats, opts):
1603 top, patch = self.checktoppatch(repo)
1603 top, patch = self.checktoppatch(repo)
1604 if not top:
1604 if not top:
1605 self.ui.write(_("no patches applied\n"))
1605 self.ui.write(_("no patches applied\n"))
1606 return
1606 return
1607 qp = self.qparents(repo, top)
1607 qp = self.qparents(repo, top)
1608 if opts.get('reverse'):
1608 if opts.get('reverse'):
1609 node1, node2 = None, qp
1609 node1, node2 = None, qp
1610 else:
1610 else:
1611 node1, node2 = qp, None
1611 node1, node2 = qp, None
1612 diffopts = self.diffopts(opts, patch)
1612 diffopts = self.diffopts(opts, patch)
1613 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1613 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1614
1614
1615 def refresh(self, repo, pats=None, **opts):
1615 def refresh(self, repo, pats=None, **opts):
1616 if not self.applied:
1616 if not self.applied:
1617 self.ui.write(_("no patches applied\n"))
1617 self.ui.write(_("no patches applied\n"))
1618 return 1
1618 return 1
1619 msg = opts.get('msg', '').rstrip()
1619 msg = opts.get('msg', '').rstrip()
1620 edit = opts.get('edit')
1620 edit = opts.get('edit')
1621 editform = opts.get('editform', 'mq.qrefresh')
1621 editform = opts.get('editform', 'mq.qrefresh')
1622 newuser = opts.get('user')
1622 newuser = opts.get('user')
1623 newdate = opts.get('date')
1623 newdate = opts.get('date')
1624 if newdate:
1624 if newdate:
1625 newdate = '%d %d' % util.parsedate(newdate)
1625 newdate = '%d %d' % util.parsedate(newdate)
1626 wlock = repo.wlock()
1626 wlock = repo.wlock()
1627
1627
1628 try:
1628 try:
1629 self.checktoppatch(repo)
1629 self.checktoppatch(repo)
1630 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1630 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1631 if repo.changelog.heads(top) != [top]:
1631 if repo.changelog.heads(top) != [top]:
1632 raise error.Abort(_("cannot qrefresh a revision with children"))
1632 raise error.Abort(_("cannot qrefresh a revision with children"))
1633 if not repo[top].mutable():
1633 if not repo[top].mutable():
1634 raise error.Abort(_("cannot qrefresh public revision"),
1634 raise error.Abort(_("cannot qrefresh public revision"),
1635 hint=_("see 'hg help phases' for details"))
1635 hint=_("see 'hg help phases' for details"))
1636
1636
1637 cparents = repo.changelog.parents(top)
1637 cparents = repo.changelog.parents(top)
1638 patchparent = self.qparents(repo, top)
1638 patchparent = self.qparents(repo, top)
1639
1639
1640 inclsubs = checksubstate(repo, hex(patchparent))
1640 inclsubs = checksubstate(repo, hex(patchparent))
1641 if inclsubs:
1641 if inclsubs:
1642 substatestate = repo.dirstate['.hgsubstate']
1642 substatestate = repo.dirstate['.hgsubstate']
1643
1643
1644 ph = patchheader(self.join(patchfn), self.plainmode)
1644 ph = patchheader(self.join(patchfn), self.plainmode)
1645 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1645 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1646 if newuser:
1646 if newuser:
1647 ph.setuser(newuser)
1647 ph.setuser(newuser)
1648 if newdate:
1648 if newdate:
1649 ph.setdate(newdate)
1649 ph.setdate(newdate)
1650 ph.setparent(hex(patchparent))
1650 ph.setparent(hex(patchparent))
1651
1651
1652 # only commit new patch when write is complete
1652 # only commit new patch when write is complete
1653 patchf = self.opener(patchfn, 'w', atomictemp=True)
1653 patchf = self.opener(patchfn, 'w', atomictemp=True)
1654
1654
1655 # update the dirstate in place, strip off the qtip commit
1655 # update the dirstate in place, strip off the qtip commit
1656 # and then commit.
1656 # and then commit.
1657 #
1657 #
1658 # this should really read:
1658 # this should really read:
1659 # mm, dd, aa = repo.status(top, patchparent)[:3]
1659 # mm, dd, aa = repo.status(top, patchparent)[:3]
1660 # but we do it backwards to take advantage of manifest/changelog
1660 # but we do it backwards to take advantage of manifest/changelog
1661 # caching against the next repo.status call
1661 # caching against the next repo.status call
1662 mm, aa, dd = repo.status(patchparent, top)[:3]
1662 mm, aa, dd = repo.status(patchparent, top)[:3]
1663 changes = repo.changelog.read(top)
1663 changes = repo.changelog.read(top)
1664 man = repo.manifestlog[changes[0]].read()
1664 man = repo.manifestlog[changes[0]].read()
1665 aaa = aa[:]
1665 aaa = aa[:]
1666 matchfn = scmutil.match(repo[None], pats, opts)
1666 matchfn = scmutil.match(repo[None], pats, opts)
1667 # in short mode, we only diff the files included in the
1667 # in short mode, we only diff the files included in the
1668 # patch already plus specified files
1668 # patch already plus specified files
1669 if opts.get('short'):
1669 if opts.get('short'):
1670 # if amending a patch, we start with existing
1670 # if amending a patch, we start with existing
1671 # files plus specified files - unfiltered
1671 # files plus specified files - unfiltered
1672 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1672 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1673 # filter with include/exclude options
1673 # filter with include/exclude options
1674 matchfn = scmutil.match(repo[None], opts=opts)
1674 matchfn = scmutil.match(repo[None], opts=opts)
1675 else:
1675 else:
1676 match = scmutil.matchall(repo)
1676 match = scmutil.matchall(repo)
1677 m, a, r, d = repo.status(match=match)[:4]
1677 m, a, r, d = repo.status(match=match)[:4]
1678 mm = set(mm)
1678 mm = set(mm)
1679 aa = set(aa)
1679 aa = set(aa)
1680 dd = set(dd)
1680 dd = set(dd)
1681
1681
1682 # we might end up with files that were added between
1682 # we might end up with files that were added between
1683 # qtip and the dirstate parent, but then changed in the
1683 # qtip and the dirstate parent, but then changed in the
1684 # local dirstate. in this case, we want them to only
1684 # local dirstate. in this case, we want them to only
1685 # show up in the added section
1685 # show up in the added section
1686 for x in m:
1686 for x in m:
1687 if x not in aa:
1687 if x not in aa:
1688 mm.add(x)
1688 mm.add(x)
1689 # we might end up with files added by the local dirstate that
1689 # we might end up with files added by the local dirstate that
1690 # were deleted by the patch. In this case, they should only
1690 # were deleted by the patch. In this case, they should only
1691 # show up in the changed section.
1691 # show up in the changed section.
1692 for x in a:
1692 for x in a:
1693 if x in dd:
1693 if x in dd:
1694 dd.remove(x)
1694 dd.remove(x)
1695 mm.add(x)
1695 mm.add(x)
1696 else:
1696 else:
1697 aa.add(x)
1697 aa.add(x)
1698 # make sure any files deleted in the local dirstate
1698 # make sure any files deleted in the local dirstate
1699 # are not in the add or change column of the patch
1699 # are not in the add or change column of the patch
1700 forget = []
1700 forget = []
1701 for x in d + r:
1701 for x in d + r:
1702 if x in aa:
1702 if x in aa:
1703 aa.remove(x)
1703 aa.remove(x)
1704 forget.append(x)
1704 forget.append(x)
1705 continue
1705 continue
1706 else:
1706 else:
1707 mm.discard(x)
1707 mm.discard(x)
1708 dd.add(x)
1708 dd.add(x)
1709
1709
1710 m = list(mm)
1710 m = list(mm)
1711 r = list(dd)
1711 r = list(dd)
1712 a = list(aa)
1712 a = list(aa)
1713
1713
1714 # create 'match' that includes the files to be recommitted.
1714 # create 'match' that includes the files to be recommitted.
1715 # apply matchfn via repo.status to ensure correct case handling.
1715 # apply matchfn via repo.status to ensure correct case handling.
1716 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1716 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1717 allmatches = set(cm + ca + cr + cd)
1717 allmatches = set(cm + ca + cr + cd)
1718 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1718 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1719
1719
1720 files = set(inclsubs)
1720 files = set(inclsubs)
1721 for x in refreshchanges:
1721 for x in refreshchanges:
1722 files.update(x)
1722 files.update(x)
1723 match = scmutil.matchfiles(repo, files)
1723 match = scmutil.matchfiles(repo, files)
1724
1724
1725 bmlist = repo[top].bookmarks()
1725 bmlist = repo[top].bookmarks()
1726
1726
1727 dsguard = None
1727 dsguard = None
1728 try:
1728 try:
1729 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1729 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1730 if diffopts.git or diffopts.upgrade:
1730 if diffopts.git or diffopts.upgrade:
1731 copies = {}
1731 copies = {}
1732 for dst in a:
1732 for dst in a:
1733 src = repo.dirstate.copied(dst)
1733 src = repo.dirstate.copied(dst)
1734 # during qfold, the source file for copies may
1734 # during qfold, the source file for copies may
1735 # be removed. Treat this as a simple add.
1735 # be removed. Treat this as a simple add.
1736 if src is not None and src in repo.dirstate:
1736 if src is not None and src in repo.dirstate:
1737 copies.setdefault(src, []).append(dst)
1737 copies.setdefault(src, []).append(dst)
1738 repo.dirstate.add(dst)
1738 repo.dirstate.add(dst)
1739 # remember the copies between patchparent and qtip
1739 # remember the copies between patchparent and qtip
1740 for dst in aaa:
1740 for dst in aaa:
1741 f = repo.file(dst)
1741 f = repo.file(dst)
1742 src = f.renamed(man[dst])
1742 src = f.renamed(man[dst])
1743 if src:
1743 if src:
1744 copies.setdefault(src[0], []).extend(
1744 copies.setdefault(src[0], []).extend(
1745 copies.get(dst, []))
1745 copies.get(dst, []))
1746 if dst in a:
1746 if dst in a:
1747 copies[src[0]].append(dst)
1747 copies[src[0]].append(dst)
1748 # we can't copy a file created by the patch itself
1748 # we can't copy a file created by the patch itself
1749 if dst in copies:
1749 if dst in copies:
1750 del copies[dst]
1750 del copies[dst]
1751 for src, dsts in copies.iteritems():
1751 for src, dsts in copies.iteritems():
1752 for dst in dsts:
1752 for dst in dsts:
1753 repo.dirstate.copy(src, dst)
1753 repo.dirstate.copy(src, dst)
1754 else:
1754 else:
1755 for dst in a:
1755 for dst in a:
1756 repo.dirstate.add(dst)
1756 repo.dirstate.add(dst)
1757 # Drop useless copy information
1757 # Drop useless copy information
1758 for f in list(repo.dirstate.copies()):
1758 for f in list(repo.dirstate.copies()):
1759 repo.dirstate.copy(None, f)
1759 repo.dirstate.copy(None, f)
1760 for f in r:
1760 for f in r:
1761 repo.dirstate.remove(f)
1761 repo.dirstate.remove(f)
1762 # if the patch excludes a modified file, mark that
1762 # if the patch excludes a modified file, mark that
1763 # file with mtime=0 so status can see it.
1763 # file with mtime=0 so status can see it.
1764 mm = []
1764 mm = []
1765 for i in xrange(len(m) - 1, -1, -1):
1765 for i in xrange(len(m) - 1, -1, -1):
1766 if not matchfn(m[i]):
1766 if not matchfn(m[i]):
1767 mm.append(m[i])
1767 mm.append(m[i])
1768 del m[i]
1768 del m[i]
1769 for f in m:
1769 for f in m:
1770 repo.dirstate.normal(f)
1770 repo.dirstate.normal(f)
1771 for f in mm:
1771 for f in mm:
1772 repo.dirstate.normallookup(f)
1772 repo.dirstate.normallookup(f)
1773 for f in forget:
1773 for f in forget:
1774 repo.dirstate.drop(f)
1774 repo.dirstate.drop(f)
1775
1775
1776 user = ph.user or changes[1]
1776 user = ph.user or changes[1]
1777
1777
1778 oldphase = repo[top].phase()
1778 oldphase = repo[top].phase()
1779
1779
1780 # assumes strip can roll itself back if interrupted
1780 # assumes strip can roll itself back if interrupted
1781 repo.setparents(*cparents)
1781 repo.setparents(*cparents)
1782 self.applied.pop()
1782 self.applied.pop()
1783 self.applieddirty = True
1783 self.applieddirty = True
1784 strip(self.ui, repo, [top], update=False, backup=False)
1784 strip(self.ui, repo, [top], update=False, backup=False)
1785 dsguard.close()
1785 dsguard.close()
1786 finally:
1786 finally:
1787 release(dsguard)
1787 release(dsguard)
1788
1788
1789 try:
1789 try:
1790 # might be nice to attempt to roll back strip after this
1790 # might be nice to attempt to roll back strip after this
1791
1791
1792 defaultmsg = "[mq]: %s" % patchfn
1792 defaultmsg = "[mq]: %s" % patchfn
1793 editor = cmdutil.getcommiteditor(editform=editform)
1793 editor = cmdutil.getcommiteditor(editform=editform)
1794 if edit:
1794 if edit:
1795 def finishdesc(desc):
1795 def finishdesc(desc):
1796 if desc.rstrip():
1796 if desc.rstrip():
1797 ph.setmessage(desc)
1797 ph.setmessage(desc)
1798 return desc
1798 return desc
1799 return defaultmsg
1799 return defaultmsg
1800 # i18n: this message is shown in editor with "HG: " prefix
1800 # i18n: this message is shown in editor with "HG: " prefix
1801 extramsg = _('Leave message empty to use default message.')
1801 extramsg = _('Leave message empty to use default message.')
1802 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1802 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1803 extramsg=extramsg,
1803 extramsg=extramsg,
1804 editform=editform)
1804 editform=editform)
1805 message = msg or "\n".join(ph.message)
1805 message = msg or "\n".join(ph.message)
1806 elif not msg:
1806 elif not msg:
1807 if not ph.message:
1807 if not ph.message:
1808 message = defaultmsg
1808 message = defaultmsg
1809 else:
1809 else:
1810 message = "\n".join(ph.message)
1810 message = "\n".join(ph.message)
1811 else:
1811 else:
1812 message = msg
1812 message = msg
1813 ph.setmessage(msg)
1813 ph.setmessage(msg)
1814
1814
1815 # Ensure we create a new changeset in the same phase than
1815 # Ensure we create a new changeset in the same phase than
1816 # the old one.
1816 # the old one.
1817 lock = tr = None
1817 lock = tr = None
1818 try:
1818 try:
1819 lock = repo.lock()
1819 lock = repo.lock()
1820 tr = repo.transaction('mq')
1820 tr = repo.transaction('mq')
1821 n = newcommit(repo, oldphase, message, user, ph.date,
1821 n = newcommit(repo, oldphase, message, user, ph.date,
1822 match=match, force=True, editor=editor)
1822 match=match, force=True, editor=editor)
1823 # only write patch after a successful commit
1823 # only write patch after a successful commit
1824 c = [list(x) for x in refreshchanges]
1824 c = [list(x) for x in refreshchanges]
1825 if inclsubs:
1825 if inclsubs:
1826 self.putsubstate2changes(substatestate, c)
1826 self.putsubstate2changes(substatestate, c)
1827 chunks = patchmod.diff(repo, patchparent,
1827 chunks = patchmod.diff(repo, patchparent,
1828 changes=c, opts=diffopts)
1828 changes=c, opts=diffopts)
1829 comments = str(ph)
1829 comments = str(ph)
1830 if comments:
1830 if comments:
1831 patchf.write(comments)
1831 patchf.write(comments)
1832 for chunk in chunks:
1832 for chunk in chunks:
1833 patchf.write(chunk)
1833 patchf.write(chunk)
1834 patchf.close()
1834 patchf.close()
1835
1835
1836 marks = repo._bookmarks
1836 marks = repo._bookmarks
1837 for bm in bmlist:
1837 for bm in bmlist:
1838 marks[bm] = n
1838 marks[bm] = n
1839 marks.recordchange(tr)
1839 marks.recordchange(tr)
1840 tr.close()
1840 tr.close()
1841
1841
1842 self.applied.append(statusentry(n, patchfn))
1842 self.applied.append(statusentry(n, patchfn))
1843 finally:
1843 finally:
1844 lockmod.release(tr, lock)
1844 lockmod.release(tr, lock)
1845 except: # re-raises
1845 except: # re-raises
1846 ctx = repo[cparents[0]]
1846 ctx = repo[cparents[0]]
1847 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1847 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1848 self.savedirty()
1848 self.savedirty()
1849 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1849 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1850 '(revert --all, qpush to recover)\n'))
1850 '(revert --all, qpush to recover)\n'))
1851 raise
1851 raise
1852 finally:
1852 finally:
1853 wlock.release()
1853 wlock.release()
1854 self.removeundo(repo)
1854 self.removeundo(repo)
1855
1855
1856 def init(self, repo, create=False):
1856 def init(self, repo, create=False):
1857 if not create and os.path.isdir(self.path):
1857 if not create and os.path.isdir(self.path):
1858 raise error.Abort(_("patch queue directory already exists"))
1858 raise error.Abort(_("patch queue directory already exists"))
1859 try:
1859 try:
1860 os.mkdir(self.path)
1860 os.mkdir(self.path)
1861 except OSError as inst:
1861 except OSError as inst:
1862 if inst.errno != errno.EEXIST or not create:
1862 if inst.errno != errno.EEXIST or not create:
1863 raise
1863 raise
1864 if create:
1864 if create:
1865 return self.qrepo(create=True)
1865 return self.qrepo(create=True)
1866
1866
1867 def unapplied(self, repo, patch=None):
1867 def unapplied(self, repo, patch=None):
1868 if patch and patch not in self.series:
1868 if patch and patch not in self.series:
1869 raise error.Abort(_("patch %s is not in series file") % patch)
1869 raise error.Abort(_("patch %s is not in series file") % patch)
1870 if not patch:
1870 if not patch:
1871 start = self.seriesend()
1871 start = self.seriesend()
1872 else:
1872 else:
1873 start = self.series.index(patch) + 1
1873 start = self.series.index(patch) + 1
1874 unapplied = []
1874 unapplied = []
1875 for i in xrange(start, len(self.series)):
1875 for i in xrange(start, len(self.series)):
1876 pushable, reason = self.pushable(i)
1876 pushable, reason = self.pushable(i)
1877 if pushable:
1877 if pushable:
1878 unapplied.append((i, self.series[i]))
1878 unapplied.append((i, self.series[i]))
1879 self.explainpushable(i)
1879 self.explainpushable(i)
1880 return unapplied
1880 return unapplied
1881
1881
1882 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1882 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1883 summary=False):
1883 summary=False):
1884 def displayname(pfx, patchname, state):
1884 def displayname(pfx, patchname, state):
1885 if pfx:
1885 if pfx:
1886 self.ui.write(pfx)
1886 self.ui.write(pfx)
1887 if summary:
1887 if summary:
1888 ph = patchheader(self.join(patchname), self.plainmode)
1888 ph = patchheader(self.join(patchname), self.plainmode)
1889 if ph.message:
1889 if ph.message:
1890 msg = ph.message[0]
1890 msg = ph.message[0]
1891 else:
1891 else:
1892 msg = ''
1892 msg = ''
1893
1893
1894 if self.ui.formatted():
1894 if self.ui.formatted():
1895 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1895 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1896 if width > 0:
1896 if width > 0:
1897 msg = util.ellipsis(msg, width)
1897 msg = util.ellipsis(msg, width)
1898 else:
1898 else:
1899 msg = ''
1899 msg = ''
1900 self.ui.write(patchname, label='qseries.' + state)
1900 self.ui.write(patchname, label='qseries.' + state)
1901 self.ui.write(': ')
1901 self.ui.write(': ')
1902 self.ui.write(msg, label='qseries.message.' + state)
1902 self.ui.write(msg, label='qseries.message.' + state)
1903 else:
1903 else:
1904 self.ui.write(patchname, label='qseries.' + state)
1904 self.ui.write(patchname, label='qseries.' + state)
1905 self.ui.write('\n')
1905 self.ui.write('\n')
1906
1906
1907 applied = set([p.name for p in self.applied])
1907 applied = set([p.name for p in self.applied])
1908 if length is None:
1908 if length is None:
1909 length = len(self.series) - start
1909 length = len(self.series) - start
1910 if not missing:
1910 if not missing:
1911 if self.ui.verbose:
1911 if self.ui.verbose:
1912 idxwidth = len(str(start + length - 1))
1912 idxwidth = len(str(start + length - 1))
1913 for i in xrange(start, start + length):
1913 for i in xrange(start, start + length):
1914 patch = self.series[i]
1914 patch = self.series[i]
1915 if patch in applied:
1915 if patch in applied:
1916 char, state = 'A', 'applied'
1916 char, state = 'A', 'applied'
1917 elif self.pushable(i)[0]:
1917 elif self.pushable(i)[0]:
1918 char, state = 'U', 'unapplied'
1918 char, state = 'U', 'unapplied'
1919 else:
1919 else:
1920 char, state = 'G', 'guarded'
1920 char, state = 'G', 'guarded'
1921 pfx = ''
1921 pfx = ''
1922 if self.ui.verbose:
1922 if self.ui.verbose:
1923 pfx = '%*d %s ' % (idxwidth, i, char)
1923 pfx = '%*d %s ' % (idxwidth, i, char)
1924 elif status and status != char:
1924 elif status and status != char:
1925 continue
1925 continue
1926 displayname(pfx, patch, state)
1926 displayname(pfx, patch, state)
1927 else:
1927 else:
1928 msng_list = []
1928 msng_list = []
1929 for root, dirs, files in os.walk(self.path):
1929 for root, dirs, files in os.walk(self.path):
1930 d = root[len(self.path) + 1:]
1930 d = root[len(self.path) + 1:]
1931 for f in files:
1931 for f in files:
1932 fl = os.path.join(d, f)
1932 fl = os.path.join(d, f)
1933 if (fl not in self.series and
1933 if (fl not in self.series and
1934 fl not in (self.statuspath, self.seriespath,
1934 fl not in (self.statuspath, self.seriespath,
1935 self.guardspath)
1935 self.guardspath)
1936 and not fl.startswith('.')):
1936 and not fl.startswith('.')):
1937 msng_list.append(fl)
1937 msng_list.append(fl)
1938 for x in sorted(msng_list):
1938 for x in sorted(msng_list):
1939 pfx = self.ui.verbose and ('D ') or ''
1939 pfx = self.ui.verbose and ('D ') or ''
1940 displayname(pfx, x, 'missing')
1940 displayname(pfx, x, 'missing')
1941
1941
1942 def issaveline(self, l):
1942 def issaveline(self, l):
1943 if l.name == '.hg.patches.save.line':
1943 if l.name == '.hg.patches.save.line':
1944 return True
1944 return True
1945
1945
1946 def qrepo(self, create=False):
1946 def qrepo(self, create=False):
1947 ui = self.baseui.copy()
1947 ui = self.baseui.copy()
1948 if create or os.path.isdir(self.join(".hg")):
1948 if create or os.path.isdir(self.join(".hg")):
1949 return hg.repository(ui, path=self.path, create=create)
1949 return hg.repository(ui, path=self.path, create=create)
1950
1950
1951 def restore(self, repo, rev, delete=None, qupdate=None):
1951 def restore(self, repo, rev, delete=None, qupdate=None):
1952 desc = repo[rev].description().strip()
1952 desc = repo[rev].description().strip()
1953 lines = desc.splitlines()
1953 lines = desc.splitlines()
1954 i = 0
1954 i = 0
1955 datastart = None
1955 datastart = None
1956 series = []
1956 series = []
1957 applied = []
1957 applied = []
1958 qpp = None
1958 qpp = None
1959 for i, line in enumerate(lines):
1959 for i, line in enumerate(lines):
1960 if line == 'Patch Data:':
1960 if line == 'Patch Data:':
1961 datastart = i + 1
1961 datastart = i + 1
1962 elif line.startswith('Dirstate:'):
1962 elif line.startswith('Dirstate:'):
1963 l = line.rstrip()
1963 l = line.rstrip()
1964 l = l[10:].split(' ')
1964 l = l[10:].split(' ')
1965 qpp = [bin(x) for x in l]
1965 qpp = [bin(x) for x in l]
1966 elif datastart is not None:
1966 elif datastart is not None:
1967 l = line.rstrip()
1967 l = line.rstrip()
1968 n, name = l.split(':', 1)
1968 n, name = l.split(':', 1)
1969 if n:
1969 if n:
1970 applied.append(statusentry(bin(n), name))
1970 applied.append(statusentry(bin(n), name))
1971 else:
1971 else:
1972 series.append(l)
1972 series.append(l)
1973 if datastart is None:
1973 if datastart is None:
1974 self.ui.warn(_("no saved patch data found\n"))
1974 self.ui.warn(_("no saved patch data found\n"))
1975 return 1
1975 return 1
1976 self.ui.warn(_("restoring status: %s\n") % lines[0])
1976 self.ui.warn(_("restoring status: %s\n") % lines[0])
1977 self.fullseries = series
1977 self.fullseries = series
1978 self.applied = applied
1978 self.applied = applied
1979 self.parseseries()
1979 self.parseseries()
1980 self.seriesdirty = True
1980 self.seriesdirty = True
1981 self.applieddirty = True
1981 self.applieddirty = True
1982 heads = repo.changelog.heads()
1982 heads = repo.changelog.heads()
1983 if delete:
1983 if delete:
1984 if rev not in heads:
1984 if rev not in heads:
1985 self.ui.warn(_("save entry has children, leaving it alone\n"))
1985 self.ui.warn(_("save entry has children, leaving it alone\n"))
1986 else:
1986 else:
1987 self.ui.warn(_("removing save entry %s\n") % short(rev))
1987 self.ui.warn(_("removing save entry %s\n") % short(rev))
1988 pp = repo.dirstate.parents()
1988 pp = repo.dirstate.parents()
1989 if rev in pp:
1989 if rev in pp:
1990 update = True
1990 update = True
1991 else:
1991 else:
1992 update = False
1992 update = False
1993 strip(self.ui, repo, [rev], update=update, backup=False)
1993 strip(self.ui, repo, [rev], update=update, backup=False)
1994 if qpp:
1994 if qpp:
1995 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1995 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1996 (short(qpp[0]), short(qpp[1])))
1996 (short(qpp[0]), short(qpp[1])))
1997 if qupdate:
1997 if qupdate:
1998 self.ui.status(_("updating queue directory\n"))
1998 self.ui.status(_("updating queue directory\n"))
1999 r = self.qrepo()
1999 r = self.qrepo()
2000 if not r:
2000 if not r:
2001 self.ui.warn(_("unable to load queue repository\n"))
2001 self.ui.warn(_("unable to load queue repository\n"))
2002 return 1
2002 return 1
2003 hg.clean(r, qpp[0])
2003 hg.clean(r, qpp[0])
2004
2004
2005 def save(self, repo, msg=None):
2005 def save(self, repo, msg=None):
2006 if not self.applied:
2006 if not self.applied:
2007 self.ui.warn(_("save: no patches applied, exiting\n"))
2007 self.ui.warn(_("save: no patches applied, exiting\n"))
2008 return 1
2008 return 1
2009 if self.issaveline(self.applied[-1]):
2009 if self.issaveline(self.applied[-1]):
2010 self.ui.warn(_("status is already saved\n"))
2010 self.ui.warn(_("status is already saved\n"))
2011 return 1
2011 return 1
2012
2012
2013 if not msg:
2013 if not msg:
2014 msg = _("hg patches saved state")
2014 msg = _("hg patches saved state")
2015 else:
2015 else:
2016 msg = "hg patches: " + msg.rstrip('\r\n')
2016 msg = "hg patches: " + msg.rstrip('\r\n')
2017 r = self.qrepo()
2017 r = self.qrepo()
2018 if r:
2018 if r:
2019 pp = r.dirstate.parents()
2019 pp = r.dirstate.parents()
2020 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2020 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2021 msg += "\n\nPatch Data:\n"
2021 msg += "\n\nPatch Data:\n"
2022 msg += ''.join('%s\n' % x for x in self.applied)
2022 msg += ''.join('%s\n' % x for x in self.applied)
2023 msg += ''.join(':%s\n' % x for x in self.fullseries)
2023 msg += ''.join(':%s\n' % x for x in self.fullseries)
2024 n = repo.commit(msg, force=True)
2024 n = repo.commit(msg, force=True)
2025 if not n:
2025 if not n:
2026 self.ui.warn(_("repo commit failed\n"))
2026 self.ui.warn(_("repo commit failed\n"))
2027 return 1
2027 return 1
2028 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2028 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2029 self.applieddirty = True
2029 self.applieddirty = True
2030 self.removeundo(repo)
2030 self.removeundo(repo)
2031
2031
2032 def fullseriesend(self):
2032 def fullseriesend(self):
2033 if self.applied:
2033 if self.applied:
2034 p = self.applied[-1].name
2034 p = self.applied[-1].name
2035 end = self.findseries(p)
2035 end = self.findseries(p)
2036 if end is None:
2036 if end is None:
2037 return len(self.fullseries)
2037 return len(self.fullseries)
2038 return end + 1
2038 return end + 1
2039 return 0
2039 return 0
2040
2040
2041 def seriesend(self, all_patches=False):
2041 def seriesend(self, all_patches=False):
2042 """If all_patches is False, return the index of the next pushable patch
2042 """If all_patches is False, return the index of the next pushable patch
2043 in the series, or the series length. If all_patches is True, return the
2043 in the series, or the series length. If all_patches is True, return the
2044 index of the first patch past the last applied one.
2044 index of the first patch past the last applied one.
2045 """
2045 """
2046 end = 0
2046 end = 0
2047 def nextpatch(start):
2047 def nextpatch(start):
2048 if all_patches or start >= len(self.series):
2048 if all_patches or start >= len(self.series):
2049 return start
2049 return start
2050 for i in xrange(start, len(self.series)):
2050 for i in xrange(start, len(self.series)):
2051 p, reason = self.pushable(i)
2051 p, reason = self.pushable(i)
2052 if p:
2052 if p:
2053 return i
2053 return i
2054 self.explainpushable(i)
2054 self.explainpushable(i)
2055 return len(self.series)
2055 return len(self.series)
2056 if self.applied:
2056 if self.applied:
2057 p = self.applied[-1].name
2057 p = self.applied[-1].name
2058 try:
2058 try:
2059 end = self.series.index(p)
2059 end = self.series.index(p)
2060 except ValueError:
2060 except ValueError:
2061 return 0
2061 return 0
2062 return nextpatch(end + 1)
2062 return nextpatch(end + 1)
2063 return nextpatch(end)
2063 return nextpatch(end)
2064
2064
2065 def appliedname(self, index):
2065 def appliedname(self, index):
2066 pname = self.applied[index].name
2066 pname = self.applied[index].name
2067 if not self.ui.verbose:
2067 if not self.ui.verbose:
2068 p = pname
2068 p = pname
2069 else:
2069 else:
2070 p = str(self.series.index(pname)) + " " + pname
2070 p = str(self.series.index(pname)) + " " + pname
2071 return p
2071 return p
2072
2072
2073 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2073 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2074 force=None, git=False):
2074 force=None, git=False):
2075 def checkseries(patchname):
2075 def checkseries(patchname):
2076 if patchname in self.series:
2076 if patchname in self.series:
2077 raise error.Abort(_('patch %s is already in the series file')
2077 raise error.Abort(_('patch %s is already in the series file')
2078 % patchname)
2078 % patchname)
2079
2079
2080 if rev:
2080 if rev:
2081 if files:
2081 if files:
2082 raise error.Abort(_('option "-r" not valid when importing '
2082 raise error.Abort(_('option "-r" not valid when importing '
2083 'files'))
2083 'files'))
2084 rev = scmutil.revrange(repo, rev)
2084 rev = scmutil.revrange(repo, rev)
2085 rev.sort(reverse=True)
2085 rev.sort(reverse=True)
2086 elif not files:
2086 elif not files:
2087 raise error.Abort(_('no files or revisions specified'))
2087 raise error.Abort(_('no files or revisions specified'))
2088 if (len(files) > 1 or len(rev) > 1) and patchname:
2088 if (len(files) > 1 or len(rev) > 1) and patchname:
2089 raise error.Abort(_('option "-n" not valid when importing multiple '
2089 raise error.Abort(_('option "-n" not valid when importing multiple '
2090 'patches'))
2090 'patches'))
2091 imported = []
2091 imported = []
2092 if rev:
2092 if rev:
2093 # If mq patches are applied, we can only import revisions
2093 # If mq patches are applied, we can only import revisions
2094 # that form a linear path to qbase.
2094 # that form a linear path to qbase.
2095 # Otherwise, they should form a linear path to a head.
2095 # Otherwise, they should form a linear path to a head.
2096 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2096 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2097 if len(heads) > 1:
2097 if len(heads) > 1:
2098 raise error.Abort(_('revision %d is the root of more than one '
2098 raise error.Abort(_('revision %d is the root of more than one '
2099 'branch') % rev.last())
2099 'branch') % rev.last())
2100 if self.applied:
2100 if self.applied:
2101 base = repo.changelog.node(rev.first())
2101 base = repo.changelog.node(rev.first())
2102 if base in [n.node for n in self.applied]:
2102 if base in [n.node for n in self.applied]:
2103 raise error.Abort(_('revision %d is already managed')
2103 raise error.Abort(_('revision %d is already managed')
2104 % rev.first())
2104 % rev.first())
2105 if heads != [self.applied[-1].node]:
2105 if heads != [self.applied[-1].node]:
2106 raise error.Abort(_('revision %d is not the parent of '
2106 raise error.Abort(_('revision %d is not the parent of '
2107 'the queue') % rev.first())
2107 'the queue') % rev.first())
2108 base = repo.changelog.rev(self.applied[0].node)
2108 base = repo.changelog.rev(self.applied[0].node)
2109 lastparent = repo.changelog.parentrevs(base)[0]
2109 lastparent = repo.changelog.parentrevs(base)[0]
2110 else:
2110 else:
2111 if heads != [repo.changelog.node(rev.first())]:
2111 if heads != [repo.changelog.node(rev.first())]:
2112 raise error.Abort(_('revision %d has unmanaged children')
2112 raise error.Abort(_('revision %d has unmanaged children')
2113 % rev.first())
2113 % rev.first())
2114 lastparent = None
2114 lastparent = None
2115
2115
2116 diffopts = self.diffopts({'git': git})
2116 diffopts = self.diffopts({'git': git})
2117 with repo.transaction('qimport') as tr:
2117 with repo.transaction('qimport') as tr:
2118 for r in rev:
2118 for r in rev:
2119 if not repo[r].mutable():
2119 if not repo[r].mutable():
2120 raise error.Abort(_('revision %d is not mutable') % r,
2120 raise error.Abort(_('revision %d is not mutable') % r,
2121 hint=_("see 'hg help phases' "
2121 hint=_("see 'hg help phases' "
2122 'for details'))
2122 'for details'))
2123 p1, p2 = repo.changelog.parentrevs(r)
2123 p1, p2 = repo.changelog.parentrevs(r)
2124 n = repo.changelog.node(r)
2124 n = repo.changelog.node(r)
2125 if p2 != nullrev:
2125 if p2 != nullrev:
2126 raise error.Abort(_('cannot import merge revision %d')
2126 raise error.Abort(_('cannot import merge revision %d')
2127 % r)
2127 % r)
2128 if lastparent and lastparent != r:
2128 if lastparent and lastparent != r:
2129 raise error.Abort(_('revision %d is not the parent of '
2129 raise error.Abort(_('revision %d is not the parent of '
2130 '%d')
2130 '%d')
2131 % (r, lastparent))
2131 % (r, lastparent))
2132 lastparent = p1
2132 lastparent = p1
2133
2133
2134 if not patchname:
2134 if not patchname:
2135 patchname = self.makepatchname(
2135 patchname = self.makepatchname(
2136 repo[r].description().split('\n', 1)[0],
2136 repo[r].description().split('\n', 1)[0],
2137 '%d.diff' % r)
2137 '%d.diff' % r)
2138 checkseries(patchname)
2138 checkseries(patchname)
2139 self.checkpatchname(patchname, force)
2139 self.checkpatchname(patchname, force)
2140 self.fullseries.insert(0, patchname)
2140 self.fullseries.insert(0, patchname)
2141
2141
2142 patchf = self.opener(patchname, "w")
2142 patchf = self.opener(patchname, "w")
2143 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2143 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2144 patchf.close()
2144 patchf.close()
2145
2145
2146 se = statusentry(n, patchname)
2146 se = statusentry(n, patchname)
2147 self.applied.insert(0, se)
2147 self.applied.insert(0, se)
2148
2148
2149 self.added.append(patchname)
2149 self.added.append(patchname)
2150 imported.append(patchname)
2150 imported.append(patchname)
2151 patchname = None
2151 patchname = None
2152 if rev and repo.ui.configbool('mq', 'secret', False):
2152 if rev and repo.ui.configbool('mq', 'secret', False):
2153 # if we added anything with --rev, move the secret root
2153 # if we added anything with --rev, move the secret root
2154 phases.retractboundary(repo, tr, phases.secret, [n])
2154 phases.retractboundary(repo, tr, phases.secret, [n])
2155 self.parseseries()
2155 self.parseseries()
2156 self.applieddirty = True
2156 self.applieddirty = True
2157 self.seriesdirty = True
2157 self.seriesdirty = True
2158
2158
2159 for i, filename in enumerate(files):
2159 for i, filename in enumerate(files):
2160 if existing:
2160 if existing:
2161 if filename == '-':
2161 if filename == '-':
2162 raise error.Abort(_('-e is incompatible with import from -')
2162 raise error.Abort(_('-e is incompatible with import from -')
2163 )
2163 )
2164 filename = normname(filename)
2164 filename = normname(filename)
2165 self.checkreservedname(filename)
2165 self.checkreservedname(filename)
2166 if util.url(filename).islocal():
2166 if util.url(filename).islocal():
2167 originpath = self.join(filename)
2167 originpath = self.join(filename)
2168 if not os.path.isfile(originpath):
2168 if not os.path.isfile(originpath):
2169 raise error.Abort(
2169 raise error.Abort(
2170 _("patch %s does not exist") % filename)
2170 _("patch %s does not exist") % filename)
2171
2171
2172 if patchname:
2172 if patchname:
2173 self.checkpatchname(patchname, force)
2173 self.checkpatchname(patchname, force)
2174
2174
2175 self.ui.write(_('renaming %s to %s\n')
2175 self.ui.write(_('renaming %s to %s\n')
2176 % (filename, patchname))
2176 % (filename, patchname))
2177 util.rename(originpath, self.join(patchname))
2177 util.rename(originpath, self.join(patchname))
2178 else:
2178 else:
2179 patchname = filename
2179 patchname = filename
2180
2180
2181 else:
2181 else:
2182 if filename == '-' and not patchname:
2182 if filename == '-' and not patchname:
2183 raise error.Abort(_('need --name to import a patch from -'))
2183 raise error.Abort(_('need --name to import a patch from -'))
2184 elif not patchname:
2184 elif not patchname:
2185 patchname = normname(os.path.basename(filename.rstrip('/')))
2185 patchname = normname(os.path.basename(filename.rstrip('/')))
2186 self.checkpatchname(patchname, force)
2186 self.checkpatchname(patchname, force)
2187 try:
2187 try:
2188 if filename == '-':
2188 if filename == '-':
2189 text = self.ui.fin.read()
2189 text = self.ui.fin.read()
2190 else:
2190 else:
2191 fp = hg.openpath(self.ui, filename)
2191 fp = hg.openpath(self.ui, filename)
2192 text = fp.read()
2192 text = fp.read()
2193 fp.close()
2193 fp.close()
2194 except (OSError, IOError):
2194 except (OSError, IOError):
2195 raise error.Abort(_("unable to read file %s") % filename)
2195 raise error.Abort(_("unable to read file %s") % filename)
2196 patchf = self.opener(patchname, "w")
2196 patchf = self.opener(patchname, "w")
2197 patchf.write(text)
2197 patchf.write(text)
2198 patchf.close()
2198 patchf.close()
2199 if not force:
2199 if not force:
2200 checkseries(patchname)
2200 checkseries(patchname)
2201 if patchname not in self.series:
2201 if patchname not in self.series:
2202 index = self.fullseriesend() + i
2202 index = self.fullseriesend() + i
2203 self.fullseries[index:index] = [patchname]
2203 self.fullseries[index:index] = [patchname]
2204 self.parseseries()
2204 self.parseseries()
2205 self.seriesdirty = True
2205 self.seriesdirty = True
2206 self.ui.warn(_("adding %s to series file\n") % patchname)
2206 self.ui.warn(_("adding %s to series file\n") % patchname)
2207 self.added.append(patchname)
2207 self.added.append(patchname)
2208 imported.append(patchname)
2208 imported.append(patchname)
2209 patchname = None
2209 patchname = None
2210
2210
2211 self.removeundo(repo)
2211 self.removeundo(repo)
2212 return imported
2212 return imported
2213
2213
2214 def fixkeepchangesopts(ui, opts):
2214 def fixkeepchangesopts(ui, opts):
2215 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2215 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2216 or opts.get('exact')):
2216 or opts.get('exact')):
2217 return opts
2217 return opts
2218 opts = dict(opts)
2218 opts = dict(opts)
2219 opts['keep_changes'] = True
2219 opts['keep_changes'] = True
2220 return opts
2220 return opts
2221
2221
2222 @command("qdelete|qremove|qrm",
2222 @command("qdelete|qremove|qrm",
2223 [('k', 'keep', None, _('keep patch file')),
2223 [('k', 'keep', None, _('keep patch file')),
2224 ('r', 'rev', [],
2224 ('r', 'rev', [],
2225 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2225 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2226 _('hg qdelete [-k] [PATCH]...'))
2226 _('hg qdelete [-k] [PATCH]...'))
2227 def delete(ui, repo, *patches, **opts):
2227 def delete(ui, repo, *patches, **opts):
2228 """remove patches from queue
2228 """remove patches from queue
2229
2229
2230 The patches must not be applied, and at least one patch is required. Exact
2230 The patches must not be applied, and at least one patch is required. Exact
2231 patch identifiers must be given. With -k/--keep, the patch files are
2231 patch identifiers must be given. With -k/--keep, the patch files are
2232 preserved in the patch directory.
2232 preserved in the patch directory.
2233
2233
2234 To stop managing a patch and move it into permanent history,
2234 To stop managing a patch and move it into permanent history,
2235 use the :hg:`qfinish` command."""
2235 use the :hg:`qfinish` command."""
2236 q = repo.mq
2236 q = repo.mq
2237 q.delete(repo, patches, opts)
2237 q.delete(repo, patches, opts)
2238 q.savedirty()
2238 q.savedirty()
2239 return 0
2239 return 0
2240
2240
2241 @command("qapplied",
2241 @command("qapplied",
2242 [('1', 'last', None, _('show only the preceding applied patch'))
2242 [('1', 'last', None, _('show only the preceding applied patch'))
2243 ] + seriesopts,
2243 ] + seriesopts,
2244 _('hg qapplied [-1] [-s] [PATCH]'))
2244 _('hg qapplied [-1] [-s] [PATCH]'))
2245 def applied(ui, repo, patch=None, **opts):
2245 def applied(ui, repo, patch=None, **opts):
2246 """print the patches already applied
2246 """print the patches already applied
2247
2247
2248 Returns 0 on success."""
2248 Returns 0 on success."""
2249
2249
2250 q = repo.mq
2250 q = repo.mq
2251
2251
2252 if patch:
2252 if patch:
2253 if patch not in q.series:
2253 if patch not in q.series:
2254 raise error.Abort(_("patch %s is not in series file") % patch)
2254 raise error.Abort(_("patch %s is not in series file") % patch)
2255 end = q.series.index(patch) + 1
2255 end = q.series.index(patch) + 1
2256 else:
2256 else:
2257 end = q.seriesend(True)
2257 end = q.seriesend(True)
2258
2258
2259 if opts.get('last') and not end:
2259 if opts.get('last') and not end:
2260 ui.write(_("no patches applied\n"))
2260 ui.write(_("no patches applied\n"))
2261 return 1
2261 return 1
2262 elif opts.get('last') and end == 1:
2262 elif opts.get('last') and end == 1:
2263 ui.write(_("only one patch applied\n"))
2263 ui.write(_("only one patch applied\n"))
2264 return 1
2264 return 1
2265 elif opts.get('last'):
2265 elif opts.get('last'):
2266 start = end - 2
2266 start = end - 2
2267 end = 1
2267 end = 1
2268 else:
2268 else:
2269 start = 0
2269 start = 0
2270
2270
2271 q.qseries(repo, length=end, start=start, status='A',
2271 q.qseries(repo, length=end, start=start, status='A',
2272 summary=opts.get('summary'))
2272 summary=opts.get('summary'))
2273
2273
2274
2274
2275 @command("qunapplied",
2275 @command("qunapplied",
2276 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2276 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2277 _('hg qunapplied [-1] [-s] [PATCH]'))
2277 _('hg qunapplied [-1] [-s] [PATCH]'))
2278 def unapplied(ui, repo, patch=None, **opts):
2278 def unapplied(ui, repo, patch=None, **opts):
2279 """print the patches not yet applied
2279 """print the patches not yet applied
2280
2280
2281 Returns 0 on success."""
2281 Returns 0 on success."""
2282
2282
2283 q = repo.mq
2283 q = repo.mq
2284 if patch:
2284 if patch:
2285 if patch not in q.series:
2285 if patch not in q.series:
2286 raise error.Abort(_("patch %s is not in series file") % patch)
2286 raise error.Abort(_("patch %s is not in series file") % patch)
2287 start = q.series.index(patch) + 1
2287 start = q.series.index(patch) + 1
2288 else:
2288 else:
2289 start = q.seriesend(True)
2289 start = q.seriesend(True)
2290
2290
2291 if start == len(q.series) and opts.get('first'):
2291 if start == len(q.series) and opts.get('first'):
2292 ui.write(_("all patches applied\n"))
2292 ui.write(_("all patches applied\n"))
2293 return 1
2293 return 1
2294
2294
2295 if opts.get('first'):
2295 if opts.get('first'):
2296 length = 1
2296 length = 1
2297 else:
2297 else:
2298 length = None
2298 length = None
2299 q.qseries(repo, start=start, length=length, status='U',
2299 q.qseries(repo, start=start, length=length, status='U',
2300 summary=opts.get('summary'))
2300 summary=opts.get('summary'))
2301
2301
2302 @command("qimport",
2302 @command("qimport",
2303 [('e', 'existing', None, _('import file in patch directory')),
2303 [('e', 'existing', None, _('import file in patch directory')),
2304 ('n', 'name', '',
2304 ('n', 'name', '',
2305 _('name of patch file'), _('NAME')),
2305 _('name of patch file'), _('NAME')),
2306 ('f', 'force', None, _('overwrite existing files')),
2306 ('f', 'force', None, _('overwrite existing files')),
2307 ('r', 'rev', [],
2307 ('r', 'rev', [],
2308 _('place existing revisions under mq control'), _('REV')),
2308 _('place existing revisions under mq control'), _('REV')),
2309 ('g', 'git', None, _('use git extended diff format')),
2309 ('g', 'git', None, _('use git extended diff format')),
2310 ('P', 'push', None, _('qpush after importing'))],
2310 ('P', 'push', None, _('qpush after importing'))],
2311 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2311 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2312 def qimport(ui, repo, *filename, **opts):
2312 def qimport(ui, repo, *filename, **opts):
2313 """import a patch or existing changeset
2313 """import a patch or existing changeset
2314
2314
2315 The patch is inserted into the series after the last applied
2315 The patch is inserted into the series after the last applied
2316 patch. If no patches have been applied, qimport prepends the patch
2316 patch. If no patches have been applied, qimport prepends the patch
2317 to the series.
2317 to the series.
2318
2318
2319 The patch will have the same name as its source file unless you
2319 The patch will have the same name as its source file unless you
2320 give it a new one with -n/--name.
2320 give it a new one with -n/--name.
2321
2321
2322 You can register an existing patch inside the patch directory with
2322 You can register an existing patch inside the patch directory with
2323 the -e/--existing flag.
2323 the -e/--existing flag.
2324
2324
2325 With -f/--force, an existing patch of the same name will be
2325 With -f/--force, an existing patch of the same name will be
2326 overwritten.
2326 overwritten.
2327
2327
2328 An existing changeset may be placed under mq control with -r/--rev
2328 An existing changeset may be placed under mq control with -r/--rev
2329 (e.g. qimport --rev . -n patch will place the current revision
2329 (e.g. qimport --rev . -n patch will place the current revision
2330 under mq control). With -g/--git, patches imported with --rev will
2330 under mq control). With -g/--git, patches imported with --rev will
2331 use the git diff format. See the diffs help topic for information
2331 use the git diff format. See the diffs help topic for information
2332 on why this is important for preserving rename/copy information
2332 on why this is important for preserving rename/copy information
2333 and permission changes. Use :hg:`qfinish` to remove changesets
2333 and permission changes. Use :hg:`qfinish` to remove changesets
2334 from mq control.
2334 from mq control.
2335
2335
2336 To import a patch from standard input, pass - as the patch file.
2336 To import a patch from standard input, pass - as the patch file.
2337 When importing from standard input, a patch name must be specified
2337 When importing from standard input, a patch name must be specified
2338 using the --name flag.
2338 using the --name flag.
2339
2339
2340 To import an existing patch while renaming it::
2340 To import an existing patch while renaming it::
2341
2341
2342 hg qimport -e existing-patch -n new-name
2342 hg qimport -e existing-patch -n new-name
2343
2343
2344 Returns 0 if import succeeded.
2344 Returns 0 if import succeeded.
2345 """
2345 """
2346 with repo.lock(): # cause this may move phase
2346 with repo.lock(): # cause this may move phase
2347 q = repo.mq
2347 q = repo.mq
2348 try:
2348 try:
2349 imported = q.qimport(
2349 imported = q.qimport(
2350 repo, filename, patchname=opts.get('name'),
2350 repo, filename, patchname=opts.get('name'),
2351 existing=opts.get('existing'), force=opts.get('force'),
2351 existing=opts.get('existing'), force=opts.get('force'),
2352 rev=opts.get('rev'), git=opts.get('git'))
2352 rev=opts.get('rev'), git=opts.get('git'))
2353 finally:
2353 finally:
2354 q.savedirty()
2354 q.savedirty()
2355
2355
2356 if imported and opts.get('push') and not opts.get('rev'):
2356 if imported and opts.get('push') and not opts.get('rev'):
2357 return q.push(repo, imported[-1])
2357 return q.push(repo, imported[-1])
2358 return 0
2358 return 0
2359
2359
2360 def qinit(ui, repo, create):
2360 def qinit(ui, repo, create):
2361 """initialize a new queue repository
2361 """initialize a new queue repository
2362
2362
2363 This command also creates a series file for ordering patches, and
2363 This command also creates a series file for ordering patches, and
2364 an mq-specific .hgignore file in the queue repository, to exclude
2364 an mq-specific .hgignore file in the queue repository, to exclude
2365 the status and guards files (these contain mostly transient state).
2365 the status and guards files (these contain mostly transient state).
2366
2366
2367 Returns 0 if initialization succeeded."""
2367 Returns 0 if initialization succeeded."""
2368 q = repo.mq
2368 q = repo.mq
2369 r = q.init(repo, create)
2369 r = q.init(repo, create)
2370 q.savedirty()
2370 q.savedirty()
2371 if r:
2371 if r:
2372 if not os.path.exists(r.wjoin('.hgignore')):
2372 if not os.path.exists(r.wjoin('.hgignore')):
2373 fp = r.wvfs('.hgignore', 'w')
2373 fp = r.wvfs('.hgignore', 'w')
2374 fp.write('^\\.hg\n')
2374 fp.write('^\\.hg\n')
2375 fp.write('^\\.mq\n')
2375 fp.write('^\\.mq\n')
2376 fp.write('syntax: glob\n')
2376 fp.write('syntax: glob\n')
2377 fp.write('status\n')
2377 fp.write('status\n')
2378 fp.write('guards\n')
2378 fp.write('guards\n')
2379 fp.close()
2379 fp.close()
2380 if not os.path.exists(r.wjoin('series')):
2380 if not os.path.exists(r.wjoin('series')):
2381 r.wvfs('series', 'w').close()
2381 r.wvfs('series', 'w').close()
2382 r[None].add(['.hgignore', 'series'])
2382 r[None].add(['.hgignore', 'series'])
2383 commands.add(ui, r)
2383 commands.add(ui, r)
2384 return 0
2384 return 0
2385
2385
2386 @command("^qinit",
2386 @command("^qinit",
2387 [('c', 'create-repo', None, _('create queue repository'))],
2387 [('c', 'create-repo', None, _('create queue repository'))],
2388 _('hg qinit [-c]'))
2388 _('hg qinit [-c]'))
2389 def init(ui, repo, **opts):
2389 def init(ui, repo, **opts):
2390 """init a new queue repository (DEPRECATED)
2390 """init a new queue repository (DEPRECATED)
2391
2391
2392 The queue repository is unversioned by default. If
2392 The queue repository is unversioned by default. If
2393 -c/--create-repo is specified, qinit will create a separate nested
2393 -c/--create-repo is specified, qinit will create a separate nested
2394 repository for patches (qinit -c may also be run later to convert
2394 repository for patches (qinit -c may also be run later to convert
2395 an unversioned patch repository into a versioned one). You can use
2395 an unversioned patch repository into a versioned one). You can use
2396 qcommit to commit changes to this queue repository.
2396 qcommit to commit changes to this queue repository.
2397
2397
2398 This command is deprecated. Without -c, it's implied by other relevant
2398 This command is deprecated. Without -c, it's implied by other relevant
2399 commands. With -c, use :hg:`init --mq` instead."""
2399 commands. With -c, use :hg:`init --mq` instead."""
2400 return qinit(ui, repo, create=opts.get('create_repo'))
2400 return qinit(ui, repo, create=opts.get('create_repo'))
2401
2401
2402 @command("qclone",
2402 @command("qclone",
2403 [('', 'pull', None, _('use pull protocol to copy metadata')),
2403 [('', 'pull', None, _('use pull protocol to copy metadata')),
2404 ('U', 'noupdate', None,
2404 ('U', 'noupdate', None,
2405 _('do not update the new working directories')),
2405 _('do not update the new working directories')),
2406 ('', 'uncompressed', None,
2406 ('', 'uncompressed', None,
2407 _('use uncompressed transfer (fast over LAN)')),
2407 _('use uncompressed transfer (fast over LAN)')),
2408 ('p', 'patches', '',
2408 ('p', 'patches', '',
2409 _('location of source patch repository'), _('REPO')),
2409 _('location of source patch repository'), _('REPO')),
2410 ] + commands.remoteopts,
2410 ] + cmdutil.remoteopts,
2411 _('hg qclone [OPTION]... SOURCE [DEST]'),
2411 _('hg qclone [OPTION]... SOURCE [DEST]'),
2412 norepo=True)
2412 norepo=True)
2413 def clone(ui, source, dest=None, **opts):
2413 def clone(ui, source, dest=None, **opts):
2414 '''clone main and patch repository at same time
2414 '''clone main and patch repository at same time
2415
2415
2416 If source is local, destination will have no patches applied. If
2416 If source is local, destination will have no patches applied. If
2417 source is remote, this command can not check if patches are
2417 source is remote, this command can not check if patches are
2418 applied in source, so cannot guarantee that patches are not
2418 applied in source, so cannot guarantee that patches are not
2419 applied in destination. If you clone remote repository, be sure
2419 applied in destination. If you clone remote repository, be sure
2420 before that it has no patches applied.
2420 before that it has no patches applied.
2421
2421
2422 Source patch repository is looked for in <src>/.hg/patches by
2422 Source patch repository is looked for in <src>/.hg/patches by
2423 default. Use -p <url> to change.
2423 default. Use -p <url> to change.
2424
2424
2425 The patch directory must be a nested Mercurial repository, as
2425 The patch directory must be a nested Mercurial repository, as
2426 would be created by :hg:`init --mq`.
2426 would be created by :hg:`init --mq`.
2427
2427
2428 Return 0 on success.
2428 Return 0 on success.
2429 '''
2429 '''
2430 def patchdir(repo):
2430 def patchdir(repo):
2431 """compute a patch repo url from a repo object"""
2431 """compute a patch repo url from a repo object"""
2432 url = repo.url()
2432 url = repo.url()
2433 if url.endswith('/'):
2433 if url.endswith('/'):
2434 url = url[:-1]
2434 url = url[:-1]
2435 return url + '/.hg/patches'
2435 return url + '/.hg/patches'
2436
2436
2437 # main repo (destination and sources)
2437 # main repo (destination and sources)
2438 if dest is None:
2438 if dest is None:
2439 dest = hg.defaultdest(source)
2439 dest = hg.defaultdest(source)
2440 sr = hg.peer(ui, opts, ui.expandpath(source))
2440 sr = hg.peer(ui, opts, ui.expandpath(source))
2441
2441
2442 # patches repo (source only)
2442 # patches repo (source only)
2443 if opts.get('patches'):
2443 if opts.get('patches'):
2444 patchespath = ui.expandpath(opts.get('patches'))
2444 patchespath = ui.expandpath(opts.get('patches'))
2445 else:
2445 else:
2446 patchespath = patchdir(sr)
2446 patchespath = patchdir(sr)
2447 try:
2447 try:
2448 hg.peer(ui, opts, patchespath)
2448 hg.peer(ui, opts, patchespath)
2449 except error.RepoError:
2449 except error.RepoError:
2450 raise error.Abort(_('versioned patch repository not found'
2450 raise error.Abort(_('versioned patch repository not found'
2451 ' (see init --mq)'))
2451 ' (see init --mq)'))
2452 qbase, destrev = None, None
2452 qbase, destrev = None, None
2453 if sr.local():
2453 if sr.local():
2454 repo = sr.local()
2454 repo = sr.local()
2455 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2455 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2456 qbase = repo.mq.applied[0].node
2456 qbase = repo.mq.applied[0].node
2457 if not hg.islocal(dest):
2457 if not hg.islocal(dest):
2458 heads = set(repo.heads())
2458 heads = set(repo.heads())
2459 destrev = list(heads.difference(repo.heads(qbase)))
2459 destrev = list(heads.difference(repo.heads(qbase)))
2460 destrev.append(repo.changelog.parents(qbase)[0])
2460 destrev.append(repo.changelog.parents(qbase)[0])
2461 elif sr.capable('lookup'):
2461 elif sr.capable('lookup'):
2462 try:
2462 try:
2463 qbase = sr.lookup('qbase')
2463 qbase = sr.lookup('qbase')
2464 except error.RepoError:
2464 except error.RepoError:
2465 pass
2465 pass
2466
2466
2467 ui.note(_('cloning main repository\n'))
2467 ui.note(_('cloning main repository\n'))
2468 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2468 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2469 pull=opts.get('pull'),
2469 pull=opts.get('pull'),
2470 rev=destrev,
2470 rev=destrev,
2471 update=False,
2471 update=False,
2472 stream=opts.get('uncompressed'))
2472 stream=opts.get('uncompressed'))
2473
2473
2474 ui.note(_('cloning patch repository\n'))
2474 ui.note(_('cloning patch repository\n'))
2475 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2475 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2476 pull=opts.get('pull'), update=not opts.get('noupdate'),
2476 pull=opts.get('pull'), update=not opts.get('noupdate'),
2477 stream=opts.get('uncompressed'))
2477 stream=opts.get('uncompressed'))
2478
2478
2479 if dr.local():
2479 if dr.local():
2480 repo = dr.local()
2480 repo = dr.local()
2481 if qbase:
2481 if qbase:
2482 ui.note(_('stripping applied patches from destination '
2482 ui.note(_('stripping applied patches from destination '
2483 'repository\n'))
2483 'repository\n'))
2484 strip(ui, repo, [qbase], update=False, backup=None)
2484 strip(ui, repo, [qbase], update=False, backup=None)
2485 if not opts.get('noupdate'):
2485 if not opts.get('noupdate'):
2486 ui.note(_('updating destination repository\n'))
2486 ui.note(_('updating destination repository\n'))
2487 hg.update(repo, repo.changelog.tip())
2487 hg.update(repo, repo.changelog.tip())
2488
2488
2489 @command("qcommit|qci",
2489 @command("qcommit|qci",
2490 commands.table["^commit|ci"][1],
2490 commands.table["^commit|ci"][1],
2491 _('hg qcommit [OPTION]... [FILE]...'),
2491 _('hg qcommit [OPTION]... [FILE]...'),
2492 inferrepo=True)
2492 inferrepo=True)
2493 def commit(ui, repo, *pats, **opts):
2493 def commit(ui, repo, *pats, **opts):
2494 """commit changes in the queue repository (DEPRECATED)
2494 """commit changes in the queue repository (DEPRECATED)
2495
2495
2496 This command is deprecated; use :hg:`commit --mq` instead."""
2496 This command is deprecated; use :hg:`commit --mq` instead."""
2497 q = repo.mq
2497 q = repo.mq
2498 r = q.qrepo()
2498 r = q.qrepo()
2499 if not r:
2499 if not r:
2500 raise error.Abort('no queue repository')
2500 raise error.Abort('no queue repository')
2501 commands.commit(r.ui, r, *pats, **opts)
2501 commands.commit(r.ui, r, *pats, **opts)
2502
2502
2503 @command("qseries",
2503 @command("qseries",
2504 [('m', 'missing', None, _('print patches not in series')),
2504 [('m', 'missing', None, _('print patches not in series')),
2505 ] + seriesopts,
2505 ] + seriesopts,
2506 _('hg qseries [-ms]'))
2506 _('hg qseries [-ms]'))
2507 def series(ui, repo, **opts):
2507 def series(ui, repo, **opts):
2508 """print the entire series file
2508 """print the entire series file
2509
2509
2510 Returns 0 on success."""
2510 Returns 0 on success."""
2511 repo.mq.qseries(repo, missing=opts.get('missing'),
2511 repo.mq.qseries(repo, missing=opts.get('missing'),
2512 summary=opts.get('summary'))
2512 summary=opts.get('summary'))
2513 return 0
2513 return 0
2514
2514
2515 @command("qtop", seriesopts, _('hg qtop [-s]'))
2515 @command("qtop", seriesopts, _('hg qtop [-s]'))
2516 def top(ui, repo, **opts):
2516 def top(ui, repo, **opts):
2517 """print the name of the current patch
2517 """print the name of the current patch
2518
2518
2519 Returns 0 on success."""
2519 Returns 0 on success."""
2520 q = repo.mq
2520 q = repo.mq
2521 if q.applied:
2521 if q.applied:
2522 t = q.seriesend(True)
2522 t = q.seriesend(True)
2523 else:
2523 else:
2524 t = 0
2524 t = 0
2525
2525
2526 if t:
2526 if t:
2527 q.qseries(repo, start=t - 1, length=1, status='A',
2527 q.qseries(repo, start=t - 1, length=1, status='A',
2528 summary=opts.get('summary'))
2528 summary=opts.get('summary'))
2529 else:
2529 else:
2530 ui.write(_("no patches applied\n"))
2530 ui.write(_("no patches applied\n"))
2531 return 1
2531 return 1
2532
2532
2533 @command("qnext", seriesopts, _('hg qnext [-s]'))
2533 @command("qnext", seriesopts, _('hg qnext [-s]'))
2534 def next(ui, repo, **opts):
2534 def next(ui, repo, **opts):
2535 """print the name of the next pushable patch
2535 """print the name of the next pushable patch
2536
2536
2537 Returns 0 on success."""
2537 Returns 0 on success."""
2538 q = repo.mq
2538 q = repo.mq
2539 end = q.seriesend()
2539 end = q.seriesend()
2540 if end == len(q.series):
2540 if end == len(q.series):
2541 ui.write(_("all patches applied\n"))
2541 ui.write(_("all patches applied\n"))
2542 return 1
2542 return 1
2543 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2543 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2544
2544
2545 @command("qprev", seriesopts, _('hg qprev [-s]'))
2545 @command("qprev", seriesopts, _('hg qprev [-s]'))
2546 def prev(ui, repo, **opts):
2546 def prev(ui, repo, **opts):
2547 """print the name of the preceding applied patch
2547 """print the name of the preceding applied patch
2548
2548
2549 Returns 0 on success."""
2549 Returns 0 on success."""
2550 q = repo.mq
2550 q = repo.mq
2551 l = len(q.applied)
2551 l = len(q.applied)
2552 if l == 1:
2552 if l == 1:
2553 ui.write(_("only one patch applied\n"))
2553 ui.write(_("only one patch applied\n"))
2554 return 1
2554 return 1
2555 if not l:
2555 if not l:
2556 ui.write(_("no patches applied\n"))
2556 ui.write(_("no patches applied\n"))
2557 return 1
2557 return 1
2558 idx = q.series.index(q.applied[-2].name)
2558 idx = q.series.index(q.applied[-2].name)
2559 q.qseries(repo, start=idx, length=1, status='A',
2559 q.qseries(repo, start=idx, length=1, status='A',
2560 summary=opts.get('summary'))
2560 summary=opts.get('summary'))
2561
2561
2562 def setupheaderopts(ui, opts):
2562 def setupheaderopts(ui, opts):
2563 if not opts.get('user') and opts.get('currentuser'):
2563 if not opts.get('user') and opts.get('currentuser'):
2564 opts['user'] = ui.username()
2564 opts['user'] = ui.username()
2565 if not opts.get('date') and opts.get('currentdate'):
2565 if not opts.get('date') and opts.get('currentdate'):
2566 opts['date'] = "%d %d" % util.makedate()
2566 opts['date'] = "%d %d" % util.makedate()
2567
2567
2568 @command("^qnew",
2568 @command("^qnew",
2569 [('e', 'edit', None, _('invoke editor on commit messages')),
2569 [('e', 'edit', None, _('invoke editor on commit messages')),
2570 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2570 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2571 ('g', 'git', None, _('use git extended diff format')),
2571 ('g', 'git', None, _('use git extended diff format')),
2572 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2572 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2573 ('u', 'user', '',
2573 ('u', 'user', '',
2574 _('add "From: <USER>" to patch'), _('USER')),
2574 _('add "From: <USER>" to patch'), _('USER')),
2575 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2575 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2576 ('d', 'date', '',
2576 ('d', 'date', '',
2577 _('add "Date: <DATE>" to patch'), _('DATE'))
2577 _('add "Date: <DATE>" to patch'), _('DATE'))
2578 ] + commands.walkopts + commands.commitopts,
2578 ] + cmdutil.walkopts + cmdutil.commitopts,
2579 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2579 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2580 inferrepo=True)
2580 inferrepo=True)
2581 def new(ui, repo, patch, *args, **opts):
2581 def new(ui, repo, patch, *args, **opts):
2582 """create a new patch
2582 """create a new patch
2583
2583
2584 qnew creates a new patch on top of the currently-applied patch (if
2584 qnew creates a new patch on top of the currently-applied patch (if
2585 any). The patch will be initialized with any outstanding changes
2585 any). The patch will be initialized with any outstanding changes
2586 in the working directory. You may also use -I/--include,
2586 in the working directory. You may also use -I/--include,
2587 -X/--exclude, and/or a list of files after the patch name to add
2587 -X/--exclude, and/or a list of files after the patch name to add
2588 only changes to matching files to the new patch, leaving the rest
2588 only changes to matching files to the new patch, leaving the rest
2589 as uncommitted modifications.
2589 as uncommitted modifications.
2590
2590
2591 -u/--user and -d/--date can be used to set the (given) user and
2591 -u/--user and -d/--date can be used to set the (given) user and
2592 date, respectively. -U/--currentuser and -D/--currentdate set user
2592 date, respectively. -U/--currentuser and -D/--currentdate set user
2593 to current user and date to current date.
2593 to current user and date to current date.
2594
2594
2595 -e/--edit, -m/--message or -l/--logfile set the patch header as
2595 -e/--edit, -m/--message or -l/--logfile set the patch header as
2596 well as the commit message. If none is specified, the header is
2596 well as the commit message. If none is specified, the header is
2597 empty and the commit message is '[mq]: PATCH'.
2597 empty and the commit message is '[mq]: PATCH'.
2598
2598
2599 Use the -g/--git option to keep the patch in the git extended diff
2599 Use the -g/--git option to keep the patch in the git extended diff
2600 format. Read the diffs help topic for more information on why this
2600 format. Read the diffs help topic for more information on why this
2601 is important for preserving permission changes and copy/rename
2601 is important for preserving permission changes and copy/rename
2602 information.
2602 information.
2603
2603
2604 Returns 0 on successful creation of a new patch.
2604 Returns 0 on successful creation of a new patch.
2605 """
2605 """
2606 msg = cmdutil.logmessage(ui, opts)
2606 msg = cmdutil.logmessage(ui, opts)
2607 q = repo.mq
2607 q = repo.mq
2608 opts['msg'] = msg
2608 opts['msg'] = msg
2609 setupheaderopts(ui, opts)
2609 setupheaderopts(ui, opts)
2610 q.new(repo, patch, *args, **opts)
2610 q.new(repo, patch, *args, **opts)
2611 q.savedirty()
2611 q.savedirty()
2612 return 0
2612 return 0
2613
2613
2614 @command("^qrefresh",
2614 @command("^qrefresh",
2615 [('e', 'edit', None, _('invoke editor on commit messages')),
2615 [('e', 'edit', None, _('invoke editor on commit messages')),
2616 ('g', 'git', None, _('use git extended diff format')),
2616 ('g', 'git', None, _('use git extended diff format')),
2617 ('s', 'short', None,
2617 ('s', 'short', None,
2618 _('refresh only files already in the patch and specified files')),
2618 _('refresh only files already in the patch and specified files')),
2619 ('U', 'currentuser', None,
2619 ('U', 'currentuser', None,
2620 _('add/update author field in patch with current user')),
2620 _('add/update author field in patch with current user')),
2621 ('u', 'user', '',
2621 ('u', 'user', '',
2622 _('add/update author field in patch with given user'), _('USER')),
2622 _('add/update author field in patch with given user'), _('USER')),
2623 ('D', 'currentdate', None,
2623 ('D', 'currentdate', None,
2624 _('add/update date field in patch with current date')),
2624 _('add/update date field in patch with current date')),
2625 ('d', 'date', '',
2625 ('d', 'date', '',
2626 _('add/update date field in patch with given date'), _('DATE'))
2626 _('add/update date field in patch with given date'), _('DATE'))
2627 ] + commands.walkopts + commands.commitopts,
2627 ] + cmdutil.walkopts + cmdutil.commitopts,
2628 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2628 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2629 inferrepo=True)
2629 inferrepo=True)
2630 def refresh(ui, repo, *pats, **opts):
2630 def refresh(ui, repo, *pats, **opts):
2631 """update the current patch
2631 """update the current patch
2632
2632
2633 If any file patterns are provided, the refreshed patch will
2633 If any file patterns are provided, the refreshed patch will
2634 contain only the modifications that match those patterns; the
2634 contain only the modifications that match those patterns; the
2635 remaining modifications will remain in the working directory.
2635 remaining modifications will remain in the working directory.
2636
2636
2637 If -s/--short is specified, files currently included in the patch
2637 If -s/--short is specified, files currently included in the patch
2638 will be refreshed just like matched files and remain in the patch.
2638 will be refreshed just like matched files and remain in the patch.
2639
2639
2640 If -e/--edit is specified, Mercurial will start your configured editor for
2640 If -e/--edit is specified, Mercurial will start your configured editor for
2641 you to enter a message. In case qrefresh fails, you will find a backup of
2641 you to enter a message. In case qrefresh fails, you will find a backup of
2642 your message in ``.hg/last-message.txt``.
2642 your message in ``.hg/last-message.txt``.
2643
2643
2644 hg add/remove/copy/rename work as usual, though you might want to
2644 hg add/remove/copy/rename work as usual, though you might want to
2645 use git-style patches (-g/--git or [diff] git=1) to track copies
2645 use git-style patches (-g/--git or [diff] git=1) to track copies
2646 and renames. See the diffs help topic for more information on the
2646 and renames. See the diffs help topic for more information on the
2647 git diff format.
2647 git diff format.
2648
2648
2649 Returns 0 on success.
2649 Returns 0 on success.
2650 """
2650 """
2651 q = repo.mq
2651 q = repo.mq
2652 message = cmdutil.logmessage(ui, opts)
2652 message = cmdutil.logmessage(ui, opts)
2653 setupheaderopts(ui, opts)
2653 setupheaderopts(ui, opts)
2654 with repo.wlock():
2654 with repo.wlock():
2655 ret = q.refresh(repo, pats, msg=message, **opts)
2655 ret = q.refresh(repo, pats, msg=message, **opts)
2656 q.savedirty()
2656 q.savedirty()
2657 return ret
2657 return ret
2658
2658
2659 @command("^qdiff",
2659 @command("^qdiff",
2660 commands.diffopts + commands.diffopts2 + commands.walkopts,
2660 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
2661 _('hg qdiff [OPTION]... [FILE]...'),
2661 _('hg qdiff [OPTION]... [FILE]...'),
2662 inferrepo=True)
2662 inferrepo=True)
2663 def diff(ui, repo, *pats, **opts):
2663 def diff(ui, repo, *pats, **opts):
2664 """diff of the current patch and subsequent modifications
2664 """diff of the current patch and subsequent modifications
2665
2665
2666 Shows a diff which includes the current patch as well as any
2666 Shows a diff which includes the current patch as well as any
2667 changes which have been made in the working directory since the
2667 changes which have been made in the working directory since the
2668 last refresh (thus showing what the current patch would become
2668 last refresh (thus showing what the current patch would become
2669 after a qrefresh).
2669 after a qrefresh).
2670
2670
2671 Use :hg:`diff` if you only want to see the changes made since the
2671 Use :hg:`diff` if you only want to see the changes made since the
2672 last qrefresh, or :hg:`export qtip` if you want to see changes
2672 last qrefresh, or :hg:`export qtip` if you want to see changes
2673 made by the current patch without including changes made since the
2673 made by the current patch without including changes made since the
2674 qrefresh.
2674 qrefresh.
2675
2675
2676 Returns 0 on success.
2676 Returns 0 on success.
2677 """
2677 """
2678 ui.pager('qdiff')
2678 ui.pager('qdiff')
2679 repo.mq.diff(repo, pats, opts)
2679 repo.mq.diff(repo, pats, opts)
2680 return 0
2680 return 0
2681
2681
2682 @command('qfold',
2682 @command('qfold',
2683 [('e', 'edit', None, _('invoke editor on commit messages')),
2683 [('e', 'edit', None, _('invoke editor on commit messages')),
2684 ('k', 'keep', None, _('keep folded patch files')),
2684 ('k', 'keep', None, _('keep folded patch files')),
2685 ] + commands.commitopts,
2685 ] + cmdutil.commitopts,
2686 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2686 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2687 def fold(ui, repo, *files, **opts):
2687 def fold(ui, repo, *files, **opts):
2688 """fold the named patches into the current patch
2688 """fold the named patches into the current patch
2689
2689
2690 Patches must not yet be applied. Each patch will be successively
2690 Patches must not yet be applied. Each patch will be successively
2691 applied to the current patch in the order given. If all the
2691 applied to the current patch in the order given. If all the
2692 patches apply successfully, the current patch will be refreshed
2692 patches apply successfully, the current patch will be refreshed
2693 with the new cumulative patch, and the folded patches will be
2693 with the new cumulative patch, and the folded patches will be
2694 deleted. With -k/--keep, the folded patch files will not be
2694 deleted. With -k/--keep, the folded patch files will not be
2695 removed afterwards.
2695 removed afterwards.
2696
2696
2697 The header for each folded patch will be concatenated with the
2697 The header for each folded patch will be concatenated with the
2698 current patch header, separated by a line of ``* * *``.
2698 current patch header, separated by a line of ``* * *``.
2699
2699
2700 Returns 0 on success."""
2700 Returns 0 on success."""
2701 q = repo.mq
2701 q = repo.mq
2702 if not files:
2702 if not files:
2703 raise error.Abort(_('qfold requires at least one patch name'))
2703 raise error.Abort(_('qfold requires at least one patch name'))
2704 if not q.checktoppatch(repo)[0]:
2704 if not q.checktoppatch(repo)[0]:
2705 raise error.Abort(_('no patches applied'))
2705 raise error.Abort(_('no patches applied'))
2706 q.checklocalchanges(repo)
2706 q.checklocalchanges(repo)
2707
2707
2708 message = cmdutil.logmessage(ui, opts)
2708 message = cmdutil.logmessage(ui, opts)
2709
2709
2710 parent = q.lookup('qtip')
2710 parent = q.lookup('qtip')
2711 patches = []
2711 patches = []
2712 messages = []
2712 messages = []
2713 for f in files:
2713 for f in files:
2714 p = q.lookup(f)
2714 p = q.lookup(f)
2715 if p in patches or p == parent:
2715 if p in patches or p == parent:
2716 ui.warn(_('skipping already folded patch %s\n') % p)
2716 ui.warn(_('skipping already folded patch %s\n') % p)
2717 if q.isapplied(p):
2717 if q.isapplied(p):
2718 raise error.Abort(_('qfold cannot fold already applied patch %s')
2718 raise error.Abort(_('qfold cannot fold already applied patch %s')
2719 % p)
2719 % p)
2720 patches.append(p)
2720 patches.append(p)
2721
2721
2722 for p in patches:
2722 for p in patches:
2723 if not message:
2723 if not message:
2724 ph = patchheader(q.join(p), q.plainmode)
2724 ph = patchheader(q.join(p), q.plainmode)
2725 if ph.message:
2725 if ph.message:
2726 messages.append(ph.message)
2726 messages.append(ph.message)
2727 pf = q.join(p)
2727 pf = q.join(p)
2728 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2728 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2729 if not patchsuccess:
2729 if not patchsuccess:
2730 raise error.Abort(_('error folding patch %s') % p)
2730 raise error.Abort(_('error folding patch %s') % p)
2731
2731
2732 if not message:
2732 if not message:
2733 ph = patchheader(q.join(parent), q.plainmode)
2733 ph = patchheader(q.join(parent), q.plainmode)
2734 message = ph.message
2734 message = ph.message
2735 for msg in messages:
2735 for msg in messages:
2736 if msg:
2736 if msg:
2737 if message:
2737 if message:
2738 message.append('* * *')
2738 message.append('* * *')
2739 message.extend(msg)
2739 message.extend(msg)
2740 message = '\n'.join(message)
2740 message = '\n'.join(message)
2741
2741
2742 diffopts = q.patchopts(q.diffopts(), *patches)
2742 diffopts = q.patchopts(q.diffopts(), *patches)
2743 with repo.wlock():
2743 with repo.wlock():
2744 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2744 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2745 editform='mq.qfold')
2745 editform='mq.qfold')
2746 q.delete(repo, patches, opts)
2746 q.delete(repo, patches, opts)
2747 q.savedirty()
2747 q.savedirty()
2748
2748
2749 @command("qgoto",
2749 @command("qgoto",
2750 [('', 'keep-changes', None,
2750 [('', 'keep-changes', None,
2751 _('tolerate non-conflicting local changes')),
2751 _('tolerate non-conflicting local changes')),
2752 ('f', 'force', None, _('overwrite any local changes')),
2752 ('f', 'force', None, _('overwrite any local changes')),
2753 ('', 'no-backup', None, _('do not save backup copies of files'))],
2753 ('', 'no-backup', None, _('do not save backup copies of files'))],
2754 _('hg qgoto [OPTION]... PATCH'))
2754 _('hg qgoto [OPTION]... PATCH'))
2755 def goto(ui, repo, patch, **opts):
2755 def goto(ui, repo, patch, **opts):
2756 '''push or pop patches until named patch is at top of stack
2756 '''push or pop patches until named patch is at top of stack
2757
2757
2758 Returns 0 on success.'''
2758 Returns 0 on success.'''
2759 opts = fixkeepchangesopts(ui, opts)
2759 opts = fixkeepchangesopts(ui, opts)
2760 q = repo.mq
2760 q = repo.mq
2761 patch = q.lookup(patch)
2761 patch = q.lookup(patch)
2762 nobackup = opts.get('no_backup')
2762 nobackup = opts.get('no_backup')
2763 keepchanges = opts.get('keep_changes')
2763 keepchanges = opts.get('keep_changes')
2764 if q.isapplied(patch):
2764 if q.isapplied(patch):
2765 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2765 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2766 keepchanges=keepchanges)
2766 keepchanges=keepchanges)
2767 else:
2767 else:
2768 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2768 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2769 keepchanges=keepchanges)
2769 keepchanges=keepchanges)
2770 q.savedirty()
2770 q.savedirty()
2771 return ret
2771 return ret
2772
2772
2773 @command("qguard",
2773 @command("qguard",
2774 [('l', 'list', None, _('list all patches and guards')),
2774 [('l', 'list', None, _('list all patches and guards')),
2775 ('n', 'none', None, _('drop all guards'))],
2775 ('n', 'none', None, _('drop all guards'))],
2776 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2776 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2777 def guard(ui, repo, *args, **opts):
2777 def guard(ui, repo, *args, **opts):
2778 '''set or print guards for a patch
2778 '''set or print guards for a patch
2779
2779
2780 Guards control whether a patch can be pushed. A patch with no
2780 Guards control whether a patch can be pushed. A patch with no
2781 guards is always pushed. A patch with a positive guard ("+foo") is
2781 guards is always pushed. A patch with a positive guard ("+foo") is
2782 pushed only if the :hg:`qselect` command has activated it. A patch with
2782 pushed only if the :hg:`qselect` command has activated it. A patch with
2783 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2783 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2784 has activated it.
2784 has activated it.
2785
2785
2786 With no arguments, print the currently active guards.
2786 With no arguments, print the currently active guards.
2787 With arguments, set guards for the named patch.
2787 With arguments, set guards for the named patch.
2788
2788
2789 .. note::
2789 .. note::
2790
2790
2791 Specifying negative guards now requires '--'.
2791 Specifying negative guards now requires '--'.
2792
2792
2793 To set guards on another patch::
2793 To set guards on another patch::
2794
2794
2795 hg qguard other.patch -- +2.6.17 -stable
2795 hg qguard other.patch -- +2.6.17 -stable
2796
2796
2797 Returns 0 on success.
2797 Returns 0 on success.
2798 '''
2798 '''
2799 def status(idx):
2799 def status(idx):
2800 guards = q.seriesguards[idx] or ['unguarded']
2800 guards = q.seriesguards[idx] or ['unguarded']
2801 if q.series[idx] in applied:
2801 if q.series[idx] in applied:
2802 state = 'applied'
2802 state = 'applied'
2803 elif q.pushable(idx)[0]:
2803 elif q.pushable(idx)[0]:
2804 state = 'unapplied'
2804 state = 'unapplied'
2805 else:
2805 else:
2806 state = 'guarded'
2806 state = 'guarded'
2807 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2807 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2808 ui.write('%s: ' % ui.label(q.series[idx], label))
2808 ui.write('%s: ' % ui.label(q.series[idx], label))
2809
2809
2810 for i, guard in enumerate(guards):
2810 for i, guard in enumerate(guards):
2811 if guard.startswith('+'):
2811 if guard.startswith('+'):
2812 ui.write(guard, label='qguard.positive')
2812 ui.write(guard, label='qguard.positive')
2813 elif guard.startswith('-'):
2813 elif guard.startswith('-'):
2814 ui.write(guard, label='qguard.negative')
2814 ui.write(guard, label='qguard.negative')
2815 else:
2815 else:
2816 ui.write(guard, label='qguard.unguarded')
2816 ui.write(guard, label='qguard.unguarded')
2817 if i != len(guards) - 1:
2817 if i != len(guards) - 1:
2818 ui.write(' ')
2818 ui.write(' ')
2819 ui.write('\n')
2819 ui.write('\n')
2820 q = repo.mq
2820 q = repo.mq
2821 applied = set(p.name for p in q.applied)
2821 applied = set(p.name for p in q.applied)
2822 patch = None
2822 patch = None
2823 args = list(args)
2823 args = list(args)
2824 if opts.get('list'):
2824 if opts.get('list'):
2825 if args or opts.get('none'):
2825 if args or opts.get('none'):
2826 raise error.Abort(_('cannot mix -l/--list with options or '
2826 raise error.Abort(_('cannot mix -l/--list with options or '
2827 'arguments'))
2827 'arguments'))
2828 for i in xrange(len(q.series)):
2828 for i in xrange(len(q.series)):
2829 status(i)
2829 status(i)
2830 return
2830 return
2831 if not args or args[0][0:1] in '-+':
2831 if not args or args[0][0:1] in '-+':
2832 if not q.applied:
2832 if not q.applied:
2833 raise error.Abort(_('no patches applied'))
2833 raise error.Abort(_('no patches applied'))
2834 patch = q.applied[-1].name
2834 patch = q.applied[-1].name
2835 if patch is None and args[0][0:1] not in '-+':
2835 if patch is None and args[0][0:1] not in '-+':
2836 patch = args.pop(0)
2836 patch = args.pop(0)
2837 if patch is None:
2837 if patch is None:
2838 raise error.Abort(_('no patch to work with'))
2838 raise error.Abort(_('no patch to work with'))
2839 if args or opts.get('none'):
2839 if args or opts.get('none'):
2840 idx = q.findseries(patch)
2840 idx = q.findseries(patch)
2841 if idx is None:
2841 if idx is None:
2842 raise error.Abort(_('no patch named %s') % patch)
2842 raise error.Abort(_('no patch named %s') % patch)
2843 q.setguards(idx, args)
2843 q.setguards(idx, args)
2844 q.savedirty()
2844 q.savedirty()
2845 else:
2845 else:
2846 status(q.series.index(q.lookup(patch)))
2846 status(q.series.index(q.lookup(patch)))
2847
2847
2848 @command("qheader", [], _('hg qheader [PATCH]'))
2848 @command("qheader", [], _('hg qheader [PATCH]'))
2849 def header(ui, repo, patch=None):
2849 def header(ui, repo, patch=None):
2850 """print the header of the topmost or specified patch
2850 """print the header of the topmost or specified patch
2851
2851
2852 Returns 0 on success."""
2852 Returns 0 on success."""
2853 q = repo.mq
2853 q = repo.mq
2854
2854
2855 if patch:
2855 if patch:
2856 patch = q.lookup(patch)
2856 patch = q.lookup(patch)
2857 else:
2857 else:
2858 if not q.applied:
2858 if not q.applied:
2859 ui.write(_('no patches applied\n'))
2859 ui.write(_('no patches applied\n'))
2860 return 1
2860 return 1
2861 patch = q.lookup('qtip')
2861 patch = q.lookup('qtip')
2862 ph = patchheader(q.join(patch), q.plainmode)
2862 ph = patchheader(q.join(patch), q.plainmode)
2863
2863
2864 ui.write('\n'.join(ph.message) + '\n')
2864 ui.write('\n'.join(ph.message) + '\n')
2865
2865
2866 def lastsavename(path):
2866 def lastsavename(path):
2867 (directory, base) = os.path.split(path)
2867 (directory, base) = os.path.split(path)
2868 names = os.listdir(directory)
2868 names = os.listdir(directory)
2869 namere = re.compile("%s.([0-9]+)" % base)
2869 namere = re.compile("%s.([0-9]+)" % base)
2870 maxindex = None
2870 maxindex = None
2871 maxname = None
2871 maxname = None
2872 for f in names:
2872 for f in names:
2873 m = namere.match(f)
2873 m = namere.match(f)
2874 if m:
2874 if m:
2875 index = int(m.group(1))
2875 index = int(m.group(1))
2876 if maxindex is None or index > maxindex:
2876 if maxindex is None or index > maxindex:
2877 maxindex = index
2877 maxindex = index
2878 maxname = f
2878 maxname = f
2879 if maxname:
2879 if maxname:
2880 return (os.path.join(directory, maxname), maxindex)
2880 return (os.path.join(directory, maxname), maxindex)
2881 return (None, None)
2881 return (None, None)
2882
2882
2883 def savename(path):
2883 def savename(path):
2884 (last, index) = lastsavename(path)
2884 (last, index) = lastsavename(path)
2885 if last is None:
2885 if last is None:
2886 index = 0
2886 index = 0
2887 newpath = path + ".%d" % (index + 1)
2887 newpath = path + ".%d" % (index + 1)
2888 return newpath
2888 return newpath
2889
2889
2890 @command("^qpush",
2890 @command("^qpush",
2891 [('', 'keep-changes', None,
2891 [('', 'keep-changes', None,
2892 _('tolerate non-conflicting local changes')),
2892 _('tolerate non-conflicting local changes')),
2893 ('f', 'force', None, _('apply on top of local changes')),
2893 ('f', 'force', None, _('apply on top of local changes')),
2894 ('e', 'exact', None,
2894 ('e', 'exact', None,
2895 _('apply the target patch to its recorded parent')),
2895 _('apply the target patch to its recorded parent')),
2896 ('l', 'list', None, _('list patch name in commit text')),
2896 ('l', 'list', None, _('list patch name in commit text')),
2897 ('a', 'all', None, _('apply all patches')),
2897 ('a', 'all', None, _('apply all patches')),
2898 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2898 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2899 ('n', 'name', '',
2899 ('n', 'name', '',
2900 _('merge queue name (DEPRECATED)'), _('NAME')),
2900 _('merge queue name (DEPRECATED)'), _('NAME')),
2901 ('', 'move', None,
2901 ('', 'move', None,
2902 _('reorder patch series and apply only the patch')),
2902 _('reorder patch series and apply only the patch')),
2903 ('', 'no-backup', None, _('do not save backup copies of files'))],
2903 ('', 'no-backup', None, _('do not save backup copies of files'))],
2904 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2904 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2905 def push(ui, repo, patch=None, **opts):
2905 def push(ui, repo, patch=None, **opts):
2906 """push the next patch onto the stack
2906 """push the next patch onto the stack
2907
2907
2908 By default, abort if the working directory contains uncommitted
2908 By default, abort if the working directory contains uncommitted
2909 changes. With --keep-changes, abort only if the uncommitted files
2909 changes. With --keep-changes, abort only if the uncommitted files
2910 overlap with patched files. With -f/--force, backup and patch over
2910 overlap with patched files. With -f/--force, backup and patch over
2911 uncommitted changes.
2911 uncommitted changes.
2912
2912
2913 Return 0 on success.
2913 Return 0 on success.
2914 """
2914 """
2915 q = repo.mq
2915 q = repo.mq
2916 mergeq = None
2916 mergeq = None
2917
2917
2918 opts = fixkeepchangesopts(ui, opts)
2918 opts = fixkeepchangesopts(ui, opts)
2919 if opts.get('merge'):
2919 if opts.get('merge'):
2920 if opts.get('name'):
2920 if opts.get('name'):
2921 newpath = repo.vfs.join(opts.get('name'))
2921 newpath = repo.vfs.join(opts.get('name'))
2922 else:
2922 else:
2923 newpath, i = lastsavename(q.path)
2923 newpath, i = lastsavename(q.path)
2924 if not newpath:
2924 if not newpath:
2925 ui.warn(_("no saved queues found, please use -n\n"))
2925 ui.warn(_("no saved queues found, please use -n\n"))
2926 return 1
2926 return 1
2927 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2927 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2928 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2928 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2929 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2929 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2930 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2930 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2931 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2931 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2932 keepchanges=opts.get('keep_changes'))
2932 keepchanges=opts.get('keep_changes'))
2933 return ret
2933 return ret
2934
2934
2935 @command("^qpop",
2935 @command("^qpop",
2936 [('a', 'all', None, _('pop all patches')),
2936 [('a', 'all', None, _('pop all patches')),
2937 ('n', 'name', '',
2937 ('n', 'name', '',
2938 _('queue name to pop (DEPRECATED)'), _('NAME')),
2938 _('queue name to pop (DEPRECATED)'), _('NAME')),
2939 ('', 'keep-changes', None,
2939 ('', 'keep-changes', None,
2940 _('tolerate non-conflicting local changes')),
2940 _('tolerate non-conflicting local changes')),
2941 ('f', 'force', None, _('forget any local changes to patched files')),
2941 ('f', 'force', None, _('forget any local changes to patched files')),
2942 ('', 'no-backup', None, _('do not save backup copies of files'))],
2942 ('', 'no-backup', None, _('do not save backup copies of files'))],
2943 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2943 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2944 def pop(ui, repo, patch=None, **opts):
2944 def pop(ui, repo, patch=None, **opts):
2945 """pop the current patch off the stack
2945 """pop the current patch off the stack
2946
2946
2947 Without argument, pops off the top of the patch stack. If given a
2947 Without argument, pops off the top of the patch stack. If given a
2948 patch name, keeps popping off patches until the named patch is at
2948 patch name, keeps popping off patches until the named patch is at
2949 the top of the stack.
2949 the top of the stack.
2950
2950
2951 By default, abort if the working directory contains uncommitted
2951 By default, abort if the working directory contains uncommitted
2952 changes. With --keep-changes, abort only if the uncommitted files
2952 changes. With --keep-changes, abort only if the uncommitted files
2953 overlap with patched files. With -f/--force, backup and discard
2953 overlap with patched files. With -f/--force, backup and discard
2954 changes made to such files.
2954 changes made to such files.
2955
2955
2956 Return 0 on success.
2956 Return 0 on success.
2957 """
2957 """
2958 opts = fixkeepchangesopts(ui, opts)
2958 opts = fixkeepchangesopts(ui, opts)
2959 localupdate = True
2959 localupdate = True
2960 if opts.get('name'):
2960 if opts.get('name'):
2961 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
2961 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
2962 ui.warn(_('using patch queue: %s\n') % q.path)
2962 ui.warn(_('using patch queue: %s\n') % q.path)
2963 localupdate = False
2963 localupdate = False
2964 else:
2964 else:
2965 q = repo.mq
2965 q = repo.mq
2966 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2966 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2967 all=opts.get('all'), nobackup=opts.get('no_backup'),
2967 all=opts.get('all'), nobackup=opts.get('no_backup'),
2968 keepchanges=opts.get('keep_changes'))
2968 keepchanges=opts.get('keep_changes'))
2969 q.savedirty()
2969 q.savedirty()
2970 return ret
2970 return ret
2971
2971
2972 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2972 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2973 def rename(ui, repo, patch, name=None, **opts):
2973 def rename(ui, repo, patch, name=None, **opts):
2974 """rename a patch
2974 """rename a patch
2975
2975
2976 With one argument, renames the current patch to PATCH1.
2976 With one argument, renames the current patch to PATCH1.
2977 With two arguments, renames PATCH1 to PATCH2.
2977 With two arguments, renames PATCH1 to PATCH2.
2978
2978
2979 Returns 0 on success."""
2979 Returns 0 on success."""
2980 q = repo.mq
2980 q = repo.mq
2981 if not name:
2981 if not name:
2982 name = patch
2982 name = patch
2983 patch = None
2983 patch = None
2984
2984
2985 if patch:
2985 if patch:
2986 patch = q.lookup(patch)
2986 patch = q.lookup(patch)
2987 else:
2987 else:
2988 if not q.applied:
2988 if not q.applied:
2989 ui.write(_('no patches applied\n'))
2989 ui.write(_('no patches applied\n'))
2990 return
2990 return
2991 patch = q.lookup('qtip')
2991 patch = q.lookup('qtip')
2992 absdest = q.join(name)
2992 absdest = q.join(name)
2993 if os.path.isdir(absdest):
2993 if os.path.isdir(absdest):
2994 name = normname(os.path.join(name, os.path.basename(patch)))
2994 name = normname(os.path.join(name, os.path.basename(patch)))
2995 absdest = q.join(name)
2995 absdest = q.join(name)
2996 q.checkpatchname(name)
2996 q.checkpatchname(name)
2997
2997
2998 ui.note(_('renaming %s to %s\n') % (patch, name))
2998 ui.note(_('renaming %s to %s\n') % (patch, name))
2999 i = q.findseries(patch)
2999 i = q.findseries(patch)
3000 guards = q.guard_re.findall(q.fullseries[i])
3000 guards = q.guard_re.findall(q.fullseries[i])
3001 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3001 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3002 q.parseseries()
3002 q.parseseries()
3003 q.seriesdirty = True
3003 q.seriesdirty = True
3004
3004
3005 info = q.isapplied(patch)
3005 info = q.isapplied(patch)
3006 if info:
3006 if info:
3007 q.applied[info[0]] = statusentry(info[1], name)
3007 q.applied[info[0]] = statusentry(info[1], name)
3008 q.applieddirty = True
3008 q.applieddirty = True
3009
3009
3010 destdir = os.path.dirname(absdest)
3010 destdir = os.path.dirname(absdest)
3011 if not os.path.isdir(destdir):
3011 if not os.path.isdir(destdir):
3012 os.makedirs(destdir)
3012 os.makedirs(destdir)
3013 util.rename(q.join(patch), absdest)
3013 util.rename(q.join(patch), absdest)
3014 r = q.qrepo()
3014 r = q.qrepo()
3015 if r and patch in r.dirstate:
3015 if r and patch in r.dirstate:
3016 wctx = r[None]
3016 wctx = r[None]
3017 with r.wlock():
3017 with r.wlock():
3018 if r.dirstate[patch] == 'a':
3018 if r.dirstate[patch] == 'a':
3019 r.dirstate.drop(patch)
3019 r.dirstate.drop(patch)
3020 r.dirstate.add(name)
3020 r.dirstate.add(name)
3021 else:
3021 else:
3022 wctx.copy(patch, name)
3022 wctx.copy(patch, name)
3023 wctx.forget([patch])
3023 wctx.forget([patch])
3024
3024
3025 q.savedirty()
3025 q.savedirty()
3026
3026
3027 @command("qrestore",
3027 @command("qrestore",
3028 [('d', 'delete', None, _('delete save entry')),
3028 [('d', 'delete', None, _('delete save entry')),
3029 ('u', 'update', None, _('update queue working directory'))],
3029 ('u', 'update', None, _('update queue working directory'))],
3030 _('hg qrestore [-d] [-u] REV'))
3030 _('hg qrestore [-d] [-u] REV'))
3031 def restore(ui, repo, rev, **opts):
3031 def restore(ui, repo, rev, **opts):
3032 """restore the queue state saved by a revision (DEPRECATED)
3032 """restore the queue state saved by a revision (DEPRECATED)
3033
3033
3034 This command is deprecated, use :hg:`rebase` instead."""
3034 This command is deprecated, use :hg:`rebase` instead."""
3035 rev = repo.lookup(rev)
3035 rev = repo.lookup(rev)
3036 q = repo.mq
3036 q = repo.mq
3037 q.restore(repo, rev, delete=opts.get('delete'),
3037 q.restore(repo, rev, delete=opts.get('delete'),
3038 qupdate=opts.get('update'))
3038 qupdate=opts.get('update'))
3039 q.savedirty()
3039 q.savedirty()
3040 return 0
3040 return 0
3041
3041
3042 @command("qsave",
3042 @command("qsave",
3043 [('c', 'copy', None, _('copy patch directory')),
3043 [('c', 'copy', None, _('copy patch directory')),
3044 ('n', 'name', '',
3044 ('n', 'name', '',
3045 _('copy directory name'), _('NAME')),
3045 _('copy directory name'), _('NAME')),
3046 ('e', 'empty', None, _('clear queue status file')),
3046 ('e', 'empty', None, _('clear queue status file')),
3047 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3047 ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
3048 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3048 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3049 def save(ui, repo, **opts):
3049 def save(ui, repo, **opts):
3050 """save current queue state (DEPRECATED)
3050 """save current queue state (DEPRECATED)
3051
3051
3052 This command is deprecated, use :hg:`rebase` instead."""
3052 This command is deprecated, use :hg:`rebase` instead."""
3053 q = repo.mq
3053 q = repo.mq
3054 message = cmdutil.logmessage(ui, opts)
3054 message = cmdutil.logmessage(ui, opts)
3055 ret = q.save(repo, msg=message)
3055 ret = q.save(repo, msg=message)
3056 if ret:
3056 if ret:
3057 return ret
3057 return ret
3058 q.savedirty() # save to .hg/patches before copying
3058 q.savedirty() # save to .hg/patches before copying
3059 if opts.get('copy'):
3059 if opts.get('copy'):
3060 path = q.path
3060 path = q.path
3061 if opts.get('name'):
3061 if opts.get('name'):
3062 newpath = os.path.join(q.basepath, opts.get('name'))
3062 newpath = os.path.join(q.basepath, opts.get('name'))
3063 if os.path.exists(newpath):
3063 if os.path.exists(newpath):
3064 if not os.path.isdir(newpath):
3064 if not os.path.isdir(newpath):
3065 raise error.Abort(_('destination %s exists and is not '
3065 raise error.Abort(_('destination %s exists and is not '
3066 'a directory') % newpath)
3066 'a directory') % newpath)
3067 if not opts.get('force'):
3067 if not opts.get('force'):
3068 raise error.Abort(_('destination %s exists, '
3068 raise error.Abort(_('destination %s exists, '
3069 'use -f to force') % newpath)
3069 'use -f to force') % newpath)
3070 else:
3070 else:
3071 newpath = savename(path)
3071 newpath = savename(path)
3072 ui.warn(_("copy %s to %s\n") % (path, newpath))
3072 ui.warn(_("copy %s to %s\n") % (path, newpath))
3073 util.copyfiles(path, newpath)
3073 util.copyfiles(path, newpath)
3074 if opts.get('empty'):
3074 if opts.get('empty'):
3075 del q.applied[:]
3075 del q.applied[:]
3076 q.applieddirty = True
3076 q.applieddirty = True
3077 q.savedirty()
3077 q.savedirty()
3078 return 0
3078 return 0
3079
3079
3080
3080
3081 @command("qselect",
3081 @command("qselect",
3082 [('n', 'none', None, _('disable all guards')),
3082 [('n', 'none', None, _('disable all guards')),
3083 ('s', 'series', None, _('list all guards in series file')),
3083 ('s', 'series', None, _('list all guards in series file')),
3084 ('', 'pop', None, _('pop to before first guarded applied patch')),
3084 ('', 'pop', None, _('pop to before first guarded applied patch')),
3085 ('', 'reapply', None, _('pop, then reapply patches'))],
3085 ('', 'reapply', None, _('pop, then reapply patches'))],
3086 _('hg qselect [OPTION]... [GUARD]...'))
3086 _('hg qselect [OPTION]... [GUARD]...'))
3087 def select(ui, repo, *args, **opts):
3087 def select(ui, repo, *args, **opts):
3088 '''set or print guarded patches to push
3088 '''set or print guarded patches to push
3089
3089
3090 Use the :hg:`qguard` command to set or print guards on patch, then use
3090 Use the :hg:`qguard` command to set or print guards on patch, then use
3091 qselect to tell mq which guards to use. A patch will be pushed if
3091 qselect to tell mq which guards to use. A patch will be pushed if
3092 it has no guards or any positive guards match the currently
3092 it has no guards or any positive guards match the currently
3093 selected guard, but will not be pushed if any negative guards
3093 selected guard, but will not be pushed if any negative guards
3094 match the current guard. For example::
3094 match the current guard. For example::
3095
3095
3096 qguard foo.patch -- -stable (negative guard)
3096 qguard foo.patch -- -stable (negative guard)
3097 qguard bar.patch +stable (positive guard)
3097 qguard bar.patch +stable (positive guard)
3098 qselect stable
3098 qselect stable
3099
3099
3100 This activates the "stable" guard. mq will skip foo.patch (because
3100 This activates the "stable" guard. mq will skip foo.patch (because
3101 it has a negative match) but push bar.patch (because it has a
3101 it has a negative match) but push bar.patch (because it has a
3102 positive match).
3102 positive match).
3103
3103
3104 With no arguments, prints the currently active guards.
3104 With no arguments, prints the currently active guards.
3105 With one argument, sets the active guard.
3105 With one argument, sets the active guard.
3106
3106
3107 Use -n/--none to deactivate guards (no other arguments needed).
3107 Use -n/--none to deactivate guards (no other arguments needed).
3108 When no guards are active, patches with positive guards are
3108 When no guards are active, patches with positive guards are
3109 skipped and patches with negative guards are pushed.
3109 skipped and patches with negative guards are pushed.
3110
3110
3111 qselect can change the guards on applied patches. It does not pop
3111 qselect can change the guards on applied patches. It does not pop
3112 guarded patches by default. Use --pop to pop back to the last
3112 guarded patches by default. Use --pop to pop back to the last
3113 applied patch that is not guarded. Use --reapply (which implies
3113 applied patch that is not guarded. Use --reapply (which implies
3114 --pop) to push back to the current patch afterwards, but skip
3114 --pop) to push back to the current patch afterwards, but skip
3115 guarded patches.
3115 guarded patches.
3116
3116
3117 Use -s/--series to print a list of all guards in the series file
3117 Use -s/--series to print a list of all guards in the series file
3118 (no other arguments needed). Use -v for more information.
3118 (no other arguments needed). Use -v for more information.
3119
3119
3120 Returns 0 on success.'''
3120 Returns 0 on success.'''
3121
3121
3122 q = repo.mq
3122 q = repo.mq
3123 guards = q.active()
3123 guards = q.active()
3124 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3124 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3125 if args or opts.get('none'):
3125 if args or opts.get('none'):
3126 old_unapplied = q.unapplied(repo)
3126 old_unapplied = q.unapplied(repo)
3127 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3127 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3128 q.setactive(args)
3128 q.setactive(args)
3129 q.savedirty()
3129 q.savedirty()
3130 if not args:
3130 if not args:
3131 ui.status(_('guards deactivated\n'))
3131 ui.status(_('guards deactivated\n'))
3132 if not opts.get('pop') and not opts.get('reapply'):
3132 if not opts.get('pop') and not opts.get('reapply'):
3133 unapplied = q.unapplied(repo)
3133 unapplied = q.unapplied(repo)
3134 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3134 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3135 if len(unapplied) != len(old_unapplied):
3135 if len(unapplied) != len(old_unapplied):
3136 ui.status(_('number of unguarded, unapplied patches has '
3136 ui.status(_('number of unguarded, unapplied patches has '
3137 'changed from %d to %d\n') %
3137 'changed from %d to %d\n') %
3138 (len(old_unapplied), len(unapplied)))
3138 (len(old_unapplied), len(unapplied)))
3139 if len(guarded) != len(old_guarded):
3139 if len(guarded) != len(old_guarded):
3140 ui.status(_('number of guarded, applied patches has changed '
3140 ui.status(_('number of guarded, applied patches has changed '
3141 'from %d to %d\n') %
3141 'from %d to %d\n') %
3142 (len(old_guarded), len(guarded)))
3142 (len(old_guarded), len(guarded)))
3143 elif opts.get('series'):
3143 elif opts.get('series'):
3144 guards = {}
3144 guards = {}
3145 noguards = 0
3145 noguards = 0
3146 for gs in q.seriesguards:
3146 for gs in q.seriesguards:
3147 if not gs:
3147 if not gs:
3148 noguards += 1
3148 noguards += 1
3149 for g in gs:
3149 for g in gs:
3150 guards.setdefault(g, 0)
3150 guards.setdefault(g, 0)
3151 guards[g] += 1
3151 guards[g] += 1
3152 if ui.verbose:
3152 if ui.verbose:
3153 guards['NONE'] = noguards
3153 guards['NONE'] = noguards
3154 guards = guards.items()
3154 guards = guards.items()
3155 guards.sort(key=lambda x: x[0][1:])
3155 guards.sort(key=lambda x: x[0][1:])
3156 if guards:
3156 if guards:
3157 ui.note(_('guards in series file:\n'))
3157 ui.note(_('guards in series file:\n'))
3158 for guard, count in guards:
3158 for guard, count in guards:
3159 ui.note('%2d ' % count)
3159 ui.note('%2d ' % count)
3160 ui.write(guard, '\n')
3160 ui.write(guard, '\n')
3161 else:
3161 else:
3162 ui.note(_('no guards in series file\n'))
3162 ui.note(_('no guards in series file\n'))
3163 else:
3163 else:
3164 if guards:
3164 if guards:
3165 ui.note(_('active guards:\n'))
3165 ui.note(_('active guards:\n'))
3166 for g in guards:
3166 for g in guards:
3167 ui.write(g, '\n')
3167 ui.write(g, '\n')
3168 else:
3168 else:
3169 ui.write(_('no active guards\n'))
3169 ui.write(_('no active guards\n'))
3170 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3170 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3171 popped = False
3171 popped = False
3172 if opts.get('pop') or opts.get('reapply'):
3172 if opts.get('pop') or opts.get('reapply'):
3173 for i in xrange(len(q.applied)):
3173 for i in xrange(len(q.applied)):
3174 if not pushable(i):
3174 if not pushable(i):
3175 ui.status(_('popping guarded patches\n'))
3175 ui.status(_('popping guarded patches\n'))
3176 popped = True
3176 popped = True
3177 if i == 0:
3177 if i == 0:
3178 q.pop(repo, all=True)
3178 q.pop(repo, all=True)
3179 else:
3179 else:
3180 q.pop(repo, q.applied[i - 1].name)
3180 q.pop(repo, q.applied[i - 1].name)
3181 break
3181 break
3182 if popped:
3182 if popped:
3183 try:
3183 try:
3184 if reapply:
3184 if reapply:
3185 ui.status(_('reapplying unguarded patches\n'))
3185 ui.status(_('reapplying unguarded patches\n'))
3186 q.push(repo, reapply)
3186 q.push(repo, reapply)
3187 finally:
3187 finally:
3188 q.savedirty()
3188 q.savedirty()
3189
3189
3190 @command("qfinish",
3190 @command("qfinish",
3191 [('a', 'applied', None, _('finish all applied changesets'))],
3191 [('a', 'applied', None, _('finish all applied changesets'))],
3192 _('hg qfinish [-a] [REV]...'))
3192 _('hg qfinish [-a] [REV]...'))
3193 def finish(ui, repo, *revrange, **opts):
3193 def finish(ui, repo, *revrange, **opts):
3194 """move applied patches into repository history
3194 """move applied patches into repository history
3195
3195
3196 Finishes the specified revisions (corresponding to applied
3196 Finishes the specified revisions (corresponding to applied
3197 patches) by moving them out of mq control into regular repository
3197 patches) by moving them out of mq control into regular repository
3198 history.
3198 history.
3199
3199
3200 Accepts a revision range or the -a/--applied option. If --applied
3200 Accepts a revision range or the -a/--applied option. If --applied
3201 is specified, all applied mq revisions are removed from mq
3201 is specified, all applied mq revisions are removed from mq
3202 control. Otherwise, the given revisions must be at the base of the
3202 control. Otherwise, the given revisions must be at the base of the
3203 stack of applied patches.
3203 stack of applied patches.
3204
3204
3205 This can be especially useful if your changes have been applied to
3205 This can be especially useful if your changes have been applied to
3206 an upstream repository, or if you are about to push your changes
3206 an upstream repository, or if you are about to push your changes
3207 to upstream.
3207 to upstream.
3208
3208
3209 Returns 0 on success.
3209 Returns 0 on success.
3210 """
3210 """
3211 if not opts.get('applied') and not revrange:
3211 if not opts.get('applied') and not revrange:
3212 raise error.Abort(_('no revisions specified'))
3212 raise error.Abort(_('no revisions specified'))
3213 elif opts.get('applied'):
3213 elif opts.get('applied'):
3214 revrange = ('qbase::qtip',) + revrange
3214 revrange = ('qbase::qtip',) + revrange
3215
3215
3216 q = repo.mq
3216 q = repo.mq
3217 if not q.applied:
3217 if not q.applied:
3218 ui.status(_('no patches applied\n'))
3218 ui.status(_('no patches applied\n'))
3219 return 0
3219 return 0
3220
3220
3221 revs = scmutil.revrange(repo, revrange)
3221 revs = scmutil.revrange(repo, revrange)
3222 if repo['.'].rev() in revs and repo[None].files():
3222 if repo['.'].rev() in revs and repo[None].files():
3223 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3223 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3224 # queue.finish may changes phases but leave the responsibility to lock the
3224 # queue.finish may changes phases but leave the responsibility to lock the
3225 # repo to the caller to avoid deadlock with wlock. This command code is
3225 # repo to the caller to avoid deadlock with wlock. This command code is
3226 # responsibility for this locking.
3226 # responsibility for this locking.
3227 with repo.lock():
3227 with repo.lock():
3228 q.finish(repo, revs)
3228 q.finish(repo, revs)
3229 q.savedirty()
3229 q.savedirty()
3230 return 0
3230 return 0
3231
3231
3232 @command("qqueue",
3232 @command("qqueue",
3233 [('l', 'list', False, _('list all available queues')),
3233 [('l', 'list', False, _('list all available queues')),
3234 ('', 'active', False, _('print name of active queue')),
3234 ('', 'active', False, _('print name of active queue')),
3235 ('c', 'create', False, _('create new queue')),
3235 ('c', 'create', False, _('create new queue')),
3236 ('', 'rename', False, _('rename active queue')),
3236 ('', 'rename', False, _('rename active queue')),
3237 ('', 'delete', False, _('delete reference to queue')),
3237 ('', 'delete', False, _('delete reference to queue')),
3238 ('', 'purge', False, _('delete queue, and remove patch dir')),
3238 ('', 'purge', False, _('delete queue, and remove patch dir')),
3239 ],
3239 ],
3240 _('[OPTION] [QUEUE]'))
3240 _('[OPTION] [QUEUE]'))
3241 def qqueue(ui, repo, name=None, **opts):
3241 def qqueue(ui, repo, name=None, **opts):
3242 '''manage multiple patch queues
3242 '''manage multiple patch queues
3243
3243
3244 Supports switching between different patch queues, as well as creating
3244 Supports switching between different patch queues, as well as creating
3245 new patch queues and deleting existing ones.
3245 new patch queues and deleting existing ones.
3246
3246
3247 Omitting a queue name or specifying -l/--list will show you the registered
3247 Omitting a queue name or specifying -l/--list will show you the registered
3248 queues - by default the "normal" patches queue is registered. The currently
3248 queues - by default the "normal" patches queue is registered. The currently
3249 active queue will be marked with "(active)". Specifying --active will print
3249 active queue will be marked with "(active)". Specifying --active will print
3250 only the name of the active queue.
3250 only the name of the active queue.
3251
3251
3252 To create a new queue, use -c/--create. The queue is automatically made
3252 To create a new queue, use -c/--create. The queue is automatically made
3253 active, except in the case where there are applied patches from the
3253 active, except in the case where there are applied patches from the
3254 currently active queue in the repository. Then the queue will only be
3254 currently active queue in the repository. Then the queue will only be
3255 created and switching will fail.
3255 created and switching will fail.
3256
3256
3257 To delete an existing queue, use --delete. You cannot delete the currently
3257 To delete an existing queue, use --delete. You cannot delete the currently
3258 active queue.
3258 active queue.
3259
3259
3260 Returns 0 on success.
3260 Returns 0 on success.
3261 '''
3261 '''
3262 q = repo.mq
3262 q = repo.mq
3263 _defaultqueue = 'patches'
3263 _defaultqueue = 'patches'
3264 _allqueues = 'patches.queues'
3264 _allqueues = 'patches.queues'
3265 _activequeue = 'patches.queue'
3265 _activequeue = 'patches.queue'
3266
3266
3267 def _getcurrent():
3267 def _getcurrent():
3268 cur = os.path.basename(q.path)
3268 cur = os.path.basename(q.path)
3269 if cur.startswith('patches-'):
3269 if cur.startswith('patches-'):
3270 cur = cur[8:]
3270 cur = cur[8:]
3271 return cur
3271 return cur
3272
3272
3273 def _noqueues():
3273 def _noqueues():
3274 try:
3274 try:
3275 fh = repo.vfs(_allqueues, 'r')
3275 fh = repo.vfs(_allqueues, 'r')
3276 fh.close()
3276 fh.close()
3277 except IOError:
3277 except IOError:
3278 return True
3278 return True
3279
3279
3280 return False
3280 return False
3281
3281
3282 def _getqueues():
3282 def _getqueues():
3283 current = _getcurrent()
3283 current = _getcurrent()
3284
3284
3285 try:
3285 try:
3286 fh = repo.vfs(_allqueues, 'r')
3286 fh = repo.vfs(_allqueues, 'r')
3287 queues = [queue.strip() for queue in fh if queue.strip()]
3287 queues = [queue.strip() for queue in fh if queue.strip()]
3288 fh.close()
3288 fh.close()
3289 if current not in queues:
3289 if current not in queues:
3290 queues.append(current)
3290 queues.append(current)
3291 except IOError:
3291 except IOError:
3292 queues = [_defaultqueue]
3292 queues = [_defaultqueue]
3293
3293
3294 return sorted(queues)
3294 return sorted(queues)
3295
3295
3296 def _setactive(name):
3296 def _setactive(name):
3297 if q.applied:
3297 if q.applied:
3298 raise error.Abort(_('new queue created, but cannot make active '
3298 raise error.Abort(_('new queue created, but cannot make active '
3299 'as patches are applied'))
3299 'as patches are applied'))
3300 _setactivenocheck(name)
3300 _setactivenocheck(name)
3301
3301
3302 def _setactivenocheck(name):
3302 def _setactivenocheck(name):
3303 fh = repo.vfs(_activequeue, 'w')
3303 fh = repo.vfs(_activequeue, 'w')
3304 if name != 'patches':
3304 if name != 'patches':
3305 fh.write(name)
3305 fh.write(name)
3306 fh.close()
3306 fh.close()
3307
3307
3308 def _addqueue(name):
3308 def _addqueue(name):
3309 fh = repo.vfs(_allqueues, 'a')
3309 fh = repo.vfs(_allqueues, 'a')
3310 fh.write('%s\n' % (name,))
3310 fh.write('%s\n' % (name,))
3311 fh.close()
3311 fh.close()
3312
3312
3313 def _queuedir(name):
3313 def _queuedir(name):
3314 if name == 'patches':
3314 if name == 'patches':
3315 return repo.vfs.join('patches')
3315 return repo.vfs.join('patches')
3316 else:
3316 else:
3317 return repo.vfs.join('patches-' + name)
3317 return repo.vfs.join('patches-' + name)
3318
3318
3319 def _validname(name):
3319 def _validname(name):
3320 for n in name:
3320 for n in name:
3321 if n in ':\\/.':
3321 if n in ':\\/.':
3322 return False
3322 return False
3323 return True
3323 return True
3324
3324
3325 def _delete(name):
3325 def _delete(name):
3326 if name not in existing:
3326 if name not in existing:
3327 raise error.Abort(_('cannot delete queue that does not exist'))
3327 raise error.Abort(_('cannot delete queue that does not exist'))
3328
3328
3329 current = _getcurrent()
3329 current = _getcurrent()
3330
3330
3331 if name == current:
3331 if name == current:
3332 raise error.Abort(_('cannot delete currently active queue'))
3332 raise error.Abort(_('cannot delete currently active queue'))
3333
3333
3334 fh = repo.vfs('patches.queues.new', 'w')
3334 fh = repo.vfs('patches.queues.new', 'w')
3335 for queue in existing:
3335 for queue in existing:
3336 if queue == name:
3336 if queue == name:
3337 continue
3337 continue
3338 fh.write('%s\n' % (queue,))
3338 fh.write('%s\n' % (queue,))
3339 fh.close()
3339 fh.close()
3340 repo.vfs.rename('patches.queues.new', _allqueues)
3340 repo.vfs.rename('patches.queues.new', _allqueues)
3341
3341
3342 if not name or opts.get('list') or opts.get('active'):
3342 if not name or opts.get('list') or opts.get('active'):
3343 current = _getcurrent()
3343 current = _getcurrent()
3344 if opts.get('active'):
3344 if opts.get('active'):
3345 ui.write('%s\n' % (current,))
3345 ui.write('%s\n' % (current,))
3346 return
3346 return
3347 for queue in _getqueues():
3347 for queue in _getqueues():
3348 ui.write('%s' % (queue,))
3348 ui.write('%s' % (queue,))
3349 if queue == current and not ui.quiet:
3349 if queue == current and not ui.quiet:
3350 ui.write(_(' (active)\n'))
3350 ui.write(_(' (active)\n'))
3351 else:
3351 else:
3352 ui.write('\n')
3352 ui.write('\n')
3353 return
3353 return
3354
3354
3355 if not _validname(name):
3355 if not _validname(name):
3356 raise error.Abort(
3356 raise error.Abort(
3357 _('invalid queue name, may not contain the characters ":\\/."'))
3357 _('invalid queue name, may not contain the characters ":\\/."'))
3358
3358
3359 with repo.wlock():
3359 with repo.wlock():
3360 existing = _getqueues()
3360 existing = _getqueues()
3361
3361
3362 if opts.get('create'):
3362 if opts.get('create'):
3363 if name in existing:
3363 if name in existing:
3364 raise error.Abort(_('queue "%s" already exists') % name)
3364 raise error.Abort(_('queue "%s" already exists') % name)
3365 if _noqueues():
3365 if _noqueues():
3366 _addqueue(_defaultqueue)
3366 _addqueue(_defaultqueue)
3367 _addqueue(name)
3367 _addqueue(name)
3368 _setactive(name)
3368 _setactive(name)
3369 elif opts.get('rename'):
3369 elif opts.get('rename'):
3370 current = _getcurrent()
3370 current = _getcurrent()
3371 if name == current:
3371 if name == current:
3372 raise error.Abort(_('can\'t rename "%s" to its current name')
3372 raise error.Abort(_('can\'t rename "%s" to its current name')
3373 % name)
3373 % name)
3374 if name in existing:
3374 if name in existing:
3375 raise error.Abort(_('queue "%s" already exists') % name)
3375 raise error.Abort(_('queue "%s" already exists') % name)
3376
3376
3377 olddir = _queuedir(current)
3377 olddir = _queuedir(current)
3378 newdir = _queuedir(name)
3378 newdir = _queuedir(name)
3379
3379
3380 if os.path.exists(newdir):
3380 if os.path.exists(newdir):
3381 raise error.Abort(_('non-queue directory "%s" already exists') %
3381 raise error.Abort(_('non-queue directory "%s" already exists') %
3382 newdir)
3382 newdir)
3383
3383
3384 fh = repo.vfs('patches.queues.new', 'w')
3384 fh = repo.vfs('patches.queues.new', 'w')
3385 for queue in existing:
3385 for queue in existing:
3386 if queue == current:
3386 if queue == current:
3387 fh.write('%s\n' % (name,))
3387 fh.write('%s\n' % (name,))
3388 if os.path.exists(olddir):
3388 if os.path.exists(olddir):
3389 util.rename(olddir, newdir)
3389 util.rename(olddir, newdir)
3390 else:
3390 else:
3391 fh.write('%s\n' % (queue,))
3391 fh.write('%s\n' % (queue,))
3392 fh.close()
3392 fh.close()
3393 repo.vfs.rename('patches.queues.new', _allqueues)
3393 repo.vfs.rename('patches.queues.new', _allqueues)
3394 _setactivenocheck(name)
3394 _setactivenocheck(name)
3395 elif opts.get('delete'):
3395 elif opts.get('delete'):
3396 _delete(name)
3396 _delete(name)
3397 elif opts.get('purge'):
3397 elif opts.get('purge'):
3398 if name in existing:
3398 if name in existing:
3399 _delete(name)
3399 _delete(name)
3400 qdir = _queuedir(name)
3400 qdir = _queuedir(name)
3401 if os.path.exists(qdir):
3401 if os.path.exists(qdir):
3402 shutil.rmtree(qdir)
3402 shutil.rmtree(qdir)
3403 else:
3403 else:
3404 if name not in existing:
3404 if name not in existing:
3405 raise error.Abort(_('use --create to create a new queue'))
3405 raise error.Abort(_('use --create to create a new queue'))
3406 _setactive(name)
3406 _setactive(name)
3407
3407
3408 def mqphasedefaults(repo, roots):
3408 def mqphasedefaults(repo, roots):
3409 """callback used to set mq changeset as secret when no phase data exists"""
3409 """callback used to set mq changeset as secret when no phase data exists"""
3410 if repo.mq.applied:
3410 if repo.mq.applied:
3411 if repo.ui.configbool('mq', 'secret', False):
3411 if repo.ui.configbool('mq', 'secret', False):
3412 mqphase = phases.secret
3412 mqphase = phases.secret
3413 else:
3413 else:
3414 mqphase = phases.draft
3414 mqphase = phases.draft
3415 qbase = repo[repo.mq.applied[0].node]
3415 qbase = repo[repo.mq.applied[0].node]
3416 roots[mqphase].add(qbase.node())
3416 roots[mqphase].add(qbase.node())
3417 return roots
3417 return roots
3418
3418
3419 def reposetup(ui, repo):
3419 def reposetup(ui, repo):
3420 class mqrepo(repo.__class__):
3420 class mqrepo(repo.__class__):
3421 @localrepo.unfilteredpropertycache
3421 @localrepo.unfilteredpropertycache
3422 def mq(self):
3422 def mq(self):
3423 return queue(self.ui, self.baseui, self.path)
3423 return queue(self.ui, self.baseui, self.path)
3424
3424
3425 def invalidateall(self):
3425 def invalidateall(self):
3426 super(mqrepo, self).invalidateall()
3426 super(mqrepo, self).invalidateall()
3427 if localrepo.hasunfilteredcache(self, 'mq'):
3427 if localrepo.hasunfilteredcache(self, 'mq'):
3428 # recreate mq in case queue path was changed
3428 # recreate mq in case queue path was changed
3429 delattr(self.unfiltered(), 'mq')
3429 delattr(self.unfiltered(), 'mq')
3430
3430
3431 def abortifwdirpatched(self, errmsg, force=False):
3431 def abortifwdirpatched(self, errmsg, force=False):
3432 if self.mq.applied and self.mq.checkapplied and not force:
3432 if self.mq.applied and self.mq.checkapplied and not force:
3433 parents = self.dirstate.parents()
3433 parents = self.dirstate.parents()
3434 patches = [s.node for s in self.mq.applied]
3434 patches = [s.node for s in self.mq.applied]
3435 if parents[0] in patches or parents[1] in patches:
3435 if parents[0] in patches or parents[1] in patches:
3436 raise error.Abort(errmsg)
3436 raise error.Abort(errmsg)
3437
3437
3438 def commit(self, text="", user=None, date=None, match=None,
3438 def commit(self, text="", user=None, date=None, match=None,
3439 force=False, editor=False, extra=None):
3439 force=False, editor=False, extra=None):
3440 if extra is None:
3440 if extra is None:
3441 extra = {}
3441 extra = {}
3442 self.abortifwdirpatched(
3442 self.abortifwdirpatched(
3443 _('cannot commit over an applied mq patch'),
3443 _('cannot commit over an applied mq patch'),
3444 force)
3444 force)
3445
3445
3446 return super(mqrepo, self).commit(text, user, date, match, force,
3446 return super(mqrepo, self).commit(text, user, date, match, force,
3447 editor, extra)
3447 editor, extra)
3448
3448
3449 def checkpush(self, pushop):
3449 def checkpush(self, pushop):
3450 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3450 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3451 outapplied = [e.node for e in self.mq.applied]
3451 outapplied = [e.node for e in self.mq.applied]
3452 if pushop.revs:
3452 if pushop.revs:
3453 # Assume applied patches have no non-patch descendants and
3453 # Assume applied patches have no non-patch descendants and
3454 # are not on remote already. Filtering any changeset not
3454 # are not on remote already. Filtering any changeset not
3455 # pushed.
3455 # pushed.
3456 heads = set(pushop.revs)
3456 heads = set(pushop.revs)
3457 for node in reversed(outapplied):
3457 for node in reversed(outapplied):
3458 if node in heads:
3458 if node in heads:
3459 break
3459 break
3460 else:
3460 else:
3461 outapplied.pop()
3461 outapplied.pop()
3462 # looking for pushed and shared changeset
3462 # looking for pushed and shared changeset
3463 for node in outapplied:
3463 for node in outapplied:
3464 if self[node].phase() < phases.secret:
3464 if self[node].phase() < phases.secret:
3465 raise error.Abort(_('source has mq patches applied'))
3465 raise error.Abort(_('source has mq patches applied'))
3466 # no non-secret patches pushed
3466 # no non-secret patches pushed
3467 super(mqrepo, self).checkpush(pushop)
3467 super(mqrepo, self).checkpush(pushop)
3468
3468
3469 def _findtags(self):
3469 def _findtags(self):
3470 '''augment tags from base class with patch tags'''
3470 '''augment tags from base class with patch tags'''
3471 result = super(mqrepo, self)._findtags()
3471 result = super(mqrepo, self)._findtags()
3472
3472
3473 q = self.mq
3473 q = self.mq
3474 if not q.applied:
3474 if not q.applied:
3475 return result
3475 return result
3476
3476
3477 mqtags = [(patch.node, patch.name) for patch in q.applied]
3477 mqtags = [(patch.node, patch.name) for patch in q.applied]
3478
3478
3479 try:
3479 try:
3480 # for now ignore filtering business
3480 # for now ignore filtering business
3481 self.unfiltered().changelog.rev(mqtags[-1][0])
3481 self.unfiltered().changelog.rev(mqtags[-1][0])
3482 except error.LookupError:
3482 except error.LookupError:
3483 self.ui.warn(_('mq status file refers to unknown node %s\n')
3483 self.ui.warn(_('mq status file refers to unknown node %s\n')
3484 % short(mqtags[-1][0]))
3484 % short(mqtags[-1][0]))
3485 return result
3485 return result
3486
3486
3487 # do not add fake tags for filtered revisions
3487 # do not add fake tags for filtered revisions
3488 included = self.changelog.hasnode
3488 included = self.changelog.hasnode
3489 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3489 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3490 if not mqtags:
3490 if not mqtags:
3491 return result
3491 return result
3492
3492
3493 mqtags.append((mqtags[-1][0], 'qtip'))
3493 mqtags.append((mqtags[-1][0], 'qtip'))
3494 mqtags.append((mqtags[0][0], 'qbase'))
3494 mqtags.append((mqtags[0][0], 'qbase'))
3495 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3495 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3496 tags = result[0]
3496 tags = result[0]
3497 for patch in mqtags:
3497 for patch in mqtags:
3498 if patch[1] in tags:
3498 if patch[1] in tags:
3499 self.ui.warn(_('tag %s overrides mq patch of the same '
3499 self.ui.warn(_('tag %s overrides mq patch of the same '
3500 'name\n') % patch[1])
3500 'name\n') % patch[1])
3501 else:
3501 else:
3502 tags[patch[1]] = patch[0]
3502 tags[patch[1]] = patch[0]
3503
3503
3504 return result
3504 return result
3505
3505
3506 if repo.local():
3506 if repo.local():
3507 repo.__class__ = mqrepo
3507 repo.__class__ = mqrepo
3508
3508
3509 repo._phasedefaults.append(mqphasedefaults)
3509 repo._phasedefaults.append(mqphasedefaults)
3510
3510
3511 def mqimport(orig, ui, repo, *args, **kwargs):
3511 def mqimport(orig, ui, repo, *args, **kwargs):
3512 if (util.safehasattr(repo, 'abortifwdirpatched')
3512 if (util.safehasattr(repo, 'abortifwdirpatched')
3513 and not kwargs.get('no_commit', False)):
3513 and not kwargs.get('no_commit', False)):
3514 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3514 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3515 kwargs.get('force'))
3515 kwargs.get('force'))
3516 return orig(ui, repo, *args, **kwargs)
3516 return orig(ui, repo, *args, **kwargs)
3517
3517
3518 def mqinit(orig, ui, *args, **kwargs):
3518 def mqinit(orig, ui, *args, **kwargs):
3519 mq = kwargs.pop('mq', None)
3519 mq = kwargs.pop('mq', None)
3520
3520
3521 if not mq:
3521 if not mq:
3522 return orig(ui, *args, **kwargs)
3522 return orig(ui, *args, **kwargs)
3523
3523
3524 if args:
3524 if args:
3525 repopath = args[0]
3525 repopath = args[0]
3526 if not hg.islocal(repopath):
3526 if not hg.islocal(repopath):
3527 raise error.Abort(_('only a local queue repository '
3527 raise error.Abort(_('only a local queue repository '
3528 'may be initialized'))
3528 'may be initialized'))
3529 else:
3529 else:
3530 repopath = cmdutil.findrepo(pycompat.getcwd())
3530 repopath = cmdutil.findrepo(pycompat.getcwd())
3531 if not repopath:
3531 if not repopath:
3532 raise error.Abort(_('there is no Mercurial repository here '
3532 raise error.Abort(_('there is no Mercurial repository here '
3533 '(.hg not found)'))
3533 '(.hg not found)'))
3534 repo = hg.repository(ui, repopath)
3534 repo = hg.repository(ui, repopath)
3535 return qinit(ui, repo, True)
3535 return qinit(ui, repo, True)
3536
3536
3537 def mqcommand(orig, ui, repo, *args, **kwargs):
3537 def mqcommand(orig, ui, repo, *args, **kwargs):
3538 """Add --mq option to operate on patch repository instead of main"""
3538 """Add --mq option to operate on patch repository instead of main"""
3539
3539
3540 # some commands do not like getting unknown options
3540 # some commands do not like getting unknown options
3541 mq = kwargs.pop(r'mq', None)
3541 mq = kwargs.pop(r'mq', None)
3542
3542
3543 if not mq:
3543 if not mq:
3544 return orig(ui, repo, *args, **kwargs)
3544 return orig(ui, repo, *args, **kwargs)
3545
3545
3546 q = repo.mq
3546 q = repo.mq
3547 r = q.qrepo()
3547 r = q.qrepo()
3548 if not r:
3548 if not r:
3549 raise error.Abort(_('no queue repository'))
3549 raise error.Abort(_('no queue repository'))
3550 return orig(r.ui, r, *args, **kwargs)
3550 return orig(r.ui, r, *args, **kwargs)
3551
3551
3552 def summaryhook(ui, repo):
3552 def summaryhook(ui, repo):
3553 q = repo.mq
3553 q = repo.mq
3554 m = []
3554 m = []
3555 a, u = len(q.applied), len(q.unapplied(repo))
3555 a, u = len(q.applied), len(q.unapplied(repo))
3556 if a:
3556 if a:
3557 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3557 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3558 if u:
3558 if u:
3559 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3559 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3560 if m:
3560 if m:
3561 # i18n: column positioning for "hg summary"
3561 # i18n: column positioning for "hg summary"
3562 ui.write(_("mq: %s\n") % ', '.join(m))
3562 ui.write(_("mq: %s\n") % ', '.join(m))
3563 else:
3563 else:
3564 # i18n: column positioning for "hg summary"
3564 # i18n: column positioning for "hg summary"
3565 ui.note(_("mq: (empty queue)\n"))
3565 ui.note(_("mq: (empty queue)\n"))
3566
3566
3567 revsetpredicate = registrar.revsetpredicate()
3567 revsetpredicate = registrar.revsetpredicate()
3568
3568
3569 @revsetpredicate('mq()')
3569 @revsetpredicate('mq()')
3570 def revsetmq(repo, subset, x):
3570 def revsetmq(repo, subset, x):
3571 """Changesets managed by MQ.
3571 """Changesets managed by MQ.
3572 """
3572 """
3573 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3573 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3574 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3574 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3575 return smartset.baseset([r for r in subset if r in applied])
3575 return smartset.baseset([r for r in subset if r in applied])
3576
3576
3577 # tell hggettext to extract docstrings from these functions:
3577 # tell hggettext to extract docstrings from these functions:
3578 i18nfunctions = [revsetmq]
3578 i18nfunctions = [revsetmq]
3579
3579
3580 def extsetup(ui):
3580 def extsetup(ui):
3581 # Ensure mq wrappers are called first, regardless of extension load order by
3581 # Ensure mq wrappers are called first, regardless of extension load order by
3582 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3582 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3583 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3583 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3584
3584
3585 extensions.wrapcommand(commands.table, 'import', mqimport)
3585 extensions.wrapcommand(commands.table, 'import', mqimport)
3586 cmdutil.summaryhooks.add('mq', summaryhook)
3586 cmdutil.summaryhooks.add('mq', summaryhook)
3587
3587
3588 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3588 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3589 entry[1].extend(mqopt)
3589 entry[1].extend(mqopt)
3590
3590
3591 def dotable(cmdtable):
3591 def dotable(cmdtable):
3592 for cmd, entry in cmdtable.iteritems():
3592 for cmd, entry in cmdtable.iteritems():
3593 cmd = cmdutil.parsealiases(cmd)[0]
3593 cmd = cmdutil.parsealiases(cmd)[0]
3594 func = entry[0]
3594 func = entry[0]
3595 if func.norepo:
3595 if func.norepo:
3596 continue
3596 continue
3597 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3597 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3598 entry[1].extend(mqopt)
3598 entry[1].extend(mqopt)
3599
3599
3600 dotable(commands.table)
3600 dotable(commands.table)
3601
3601
3602 for extname, extmodule in extensions.extensions():
3602 for extname, extmodule in extensions.extensions():
3603 if extmodule.__file__ != __file__:
3603 if extmodule.__file__ != __file__:
3604 dotable(getattr(extmodule, 'cmdtable', {}))
3604 dotable(getattr(extmodule, 'cmdtable', {}))
3605
3605
3606 colortable = {'qguard.negative': 'red',
3606 colortable = {'qguard.negative': 'red',
3607 'qguard.positive': 'yellow',
3607 'qguard.positive': 'yellow',
3608 'qguard.unguarded': 'green',
3608 'qguard.unguarded': 'green',
3609 'qseries.applied': 'blue bold underline',
3609 'qseries.applied': 'blue bold underline',
3610 'qseries.guarded': 'black bold',
3610 'qseries.guarded': 'black bold',
3611 'qseries.missing': 'red bold',
3611 'qseries.missing': 'red bold',
3612 'qseries.unapplied': 'black bold'}
3612 'qseries.unapplied': 'black bold'}
@@ -1,742 +1,742 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 you do not supply one via configuration or the command line. You can
48 you do not supply one via configuration or the command line. You can
49 override this to never prompt by configuring an empty value::
49 override this to never prompt by configuring an empty value::
50
50
51 [email]
51 [email]
52 cc =
52 cc =
53
53
54 You can control the default inclusion of an introduction message with the
54 You can control the default inclusion of an introduction message with the
55 ``patchbomb.intro`` configuration option. The configuration is always
55 ``patchbomb.intro`` configuration option. The configuration is always
56 overwritten by command line flags like --intro and --desc::
56 overwritten by command line flags like --intro and --desc::
57
57
58 [patchbomb]
58 [patchbomb]
59 intro=auto # include introduction message if more than 1 patch (default)
59 intro=auto # include introduction message if more than 1 patch (default)
60 intro=never # never include an introduction message
60 intro=never # never include an introduction message
61 intro=always # always include an introduction message
61 intro=always # always include an introduction message
62
62
63 You can specify a template for flags to be added in subject prefixes. Flags
63 You can specify a template for flags to be added in subject prefixes. Flags
64 specified by --flag option are exported as ``{flags}`` keyword::
64 specified by --flag option are exported as ``{flags}`` keyword::
65
65
66 [patchbomb]
66 [patchbomb]
67 flagtemplate = "{separate(' ',
67 flagtemplate = "{separate(' ',
68 ifeq(branch, 'default', '', branch|upper),
68 ifeq(branch, 'default', '', branch|upper),
69 flags)}"
69 flags)}"
70
70
71 You can set patchbomb to always ask for confirmation by setting
71 You can set patchbomb to always ask for confirmation by setting
72 ``patchbomb.confirm`` to true.
72 ``patchbomb.confirm`` to true.
73 '''
73 '''
74 from __future__ import absolute_import
74 from __future__ import absolute_import
75
75
76 import email as emailmod
76 import email as emailmod
77 import errno
77 import errno
78 import os
78 import os
79 import socket
79 import socket
80 import tempfile
80 import tempfile
81
81
82 from mercurial.i18n import _
82 from mercurial.i18n import _
83 from mercurial import (
83 from mercurial import (
84 cmdutil,
84 cmdutil,
85 commands,
85 commands,
86 error,
86 error,
87 formatter,
87 formatter,
88 hg,
88 hg,
89 mail,
89 mail,
90 node as nodemod,
90 node as nodemod,
91 patch,
91 patch,
92 registrar,
92 registrar,
93 scmutil,
93 scmutil,
94 templater,
94 templater,
95 util,
95 util,
96 )
96 )
97 stringio = util.stringio
97 stringio = util.stringio
98
98
99 cmdtable = {}
99 cmdtable = {}
100 command = registrar.command(cmdtable)
100 command = registrar.command(cmdtable)
101 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
101 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
102 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
102 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
103 # be specifying the version(s) of Mercurial they are tested with, or
103 # be specifying the version(s) of Mercurial they are tested with, or
104 # leave the attribute unspecified.
104 # leave the attribute unspecified.
105 testedwith = 'ships-with-hg-core'
105 testedwith = 'ships-with-hg-core'
106
106
107 def _addpullheader(seq, ctx):
107 def _addpullheader(seq, ctx):
108 """Add a header pointing to a public URL where the changeset is available
108 """Add a header pointing to a public URL where the changeset is available
109 """
109 """
110 repo = ctx.repo()
110 repo = ctx.repo()
111 # experimental config: patchbomb.publicurl
111 # experimental config: patchbomb.publicurl
112 # waiting for some logic that check that the changeset are available on the
112 # waiting for some logic that check that the changeset are available on the
113 # destination before patchbombing anything.
113 # destination before patchbombing anything.
114 pullurl = repo.ui.config('patchbomb', 'publicurl')
114 pullurl = repo.ui.config('patchbomb', 'publicurl')
115 if pullurl is not None:
115 if pullurl is not None:
116 return ('Available At %s\n'
116 return ('Available At %s\n'
117 '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
117 '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
118 return None
118 return None
119
119
120 def uisetup(ui):
120 def uisetup(ui):
121 cmdutil.extraexport.append('pullurl')
121 cmdutil.extraexport.append('pullurl')
122 cmdutil.extraexportmap['pullurl'] = _addpullheader
122 cmdutil.extraexportmap['pullurl'] = _addpullheader
123
123
124
124
125 def prompt(ui, prompt, default=None, rest=':'):
125 def prompt(ui, prompt, default=None, rest=':'):
126 if default:
126 if default:
127 prompt += ' [%s]' % default
127 prompt += ' [%s]' % default
128 return ui.prompt(prompt + rest, default)
128 return ui.prompt(prompt + rest, default)
129
129
130 def introwanted(ui, opts, number):
130 def introwanted(ui, opts, number):
131 '''is an introductory message apparently wanted?'''
131 '''is an introductory message apparently wanted?'''
132 introconfig = ui.config('patchbomb', 'intro', 'auto')
132 introconfig = ui.config('patchbomb', 'intro', 'auto')
133 if opts.get('intro') or opts.get('desc'):
133 if opts.get('intro') or opts.get('desc'):
134 intro = True
134 intro = True
135 elif introconfig == 'always':
135 elif introconfig == 'always':
136 intro = True
136 intro = True
137 elif introconfig == 'never':
137 elif introconfig == 'never':
138 intro = False
138 intro = False
139 elif introconfig == 'auto':
139 elif introconfig == 'auto':
140 intro = 1 < number
140 intro = 1 < number
141 else:
141 else:
142 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
142 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
143 % introconfig)
143 % introconfig)
144 ui.write_err(_('(should be one of always, never, auto)\n'))
144 ui.write_err(_('(should be one of always, never, auto)\n'))
145 intro = 1 < number
145 intro = 1 < number
146 return intro
146 return intro
147
147
148 def _formatflags(ui, repo, rev, flags):
148 def _formatflags(ui, repo, rev, flags):
149 """build flag string optionally by template"""
149 """build flag string optionally by template"""
150 tmpl = ui.config('patchbomb', 'flagtemplate')
150 tmpl = ui.config('patchbomb', 'flagtemplate')
151 if not tmpl:
151 if not tmpl:
152 return ' '.join(flags)
152 return ' '.join(flags)
153 out = util.stringio()
153 out = util.stringio()
154 opts = {'template': templater.unquotestring(tmpl)}
154 opts = {'template': templater.unquotestring(tmpl)}
155 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
155 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
156 fm.startitem()
156 fm.startitem()
157 fm.context(ctx=repo[rev])
157 fm.context(ctx=repo[rev])
158 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
158 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
159 return out.getvalue()
159 return out.getvalue()
160
160
161 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
161 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
162 """build prefix to patch subject"""
162 """build prefix to patch subject"""
163 flag = _formatflags(ui, repo, rev, flags)
163 flag = _formatflags(ui, repo, rev, flags)
164 if flag:
164 if flag:
165 flag = ' ' + flag
165 flag = ' ' + flag
166
166
167 if not numbered:
167 if not numbered:
168 return '[PATCH%s]' % flag
168 return '[PATCH%s]' % flag
169 else:
169 else:
170 tlen = len(str(total))
170 tlen = len(str(total))
171 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
171 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
172
172
173 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
173 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
174 patchname=None):
174 patchname=None):
175
175
176 desc = []
176 desc = []
177 node = None
177 node = None
178 body = ''
178 body = ''
179
179
180 for line in patchlines:
180 for line in patchlines:
181 if line.startswith('#'):
181 if line.startswith('#'):
182 if line.startswith('# Node ID'):
182 if line.startswith('# Node ID'):
183 node = line.split()[-1]
183 node = line.split()[-1]
184 continue
184 continue
185 if line.startswith('diff -r') or line.startswith('diff --git'):
185 if line.startswith('diff -r') or line.startswith('diff --git'):
186 break
186 break
187 desc.append(line)
187 desc.append(line)
188
188
189 if not patchname and not node:
189 if not patchname and not node:
190 raise ValueError
190 raise ValueError
191
191
192 if opts.get('attach') and not opts.get('body'):
192 if opts.get('attach') and not opts.get('body'):
193 body = ('\n'.join(desc[1:]).strip() or
193 body = ('\n'.join(desc[1:]).strip() or
194 'Patch subject is complete summary.')
194 'Patch subject is complete summary.')
195 body += '\n\n\n'
195 body += '\n\n\n'
196
196
197 if opts.get('plain'):
197 if opts.get('plain'):
198 while patchlines and patchlines[0].startswith('# '):
198 while patchlines and patchlines[0].startswith('# '):
199 patchlines.pop(0)
199 patchlines.pop(0)
200 if patchlines:
200 if patchlines:
201 patchlines.pop(0)
201 patchlines.pop(0)
202 while patchlines and not patchlines[0].strip():
202 while patchlines and not patchlines[0].strip():
203 patchlines.pop(0)
203 patchlines.pop(0)
204
204
205 ds = patch.diffstat(patchlines)
205 ds = patch.diffstat(patchlines)
206 if opts.get('diffstat'):
206 if opts.get('diffstat'):
207 body += ds + '\n\n'
207 body += ds + '\n\n'
208
208
209 addattachment = opts.get('attach') or opts.get('inline')
209 addattachment = opts.get('attach') or opts.get('inline')
210 if not addattachment or opts.get('body'):
210 if not addattachment or opts.get('body'):
211 body += '\n'.join(patchlines)
211 body += '\n'.join(patchlines)
212
212
213 if addattachment:
213 if addattachment:
214 msg = emailmod.MIMEMultipart.MIMEMultipart()
214 msg = emailmod.MIMEMultipart.MIMEMultipart()
215 if body:
215 if body:
216 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
216 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
217 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
217 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
218 opts.get('test'))
218 opts.get('test'))
219 binnode = nodemod.bin(node)
219 binnode = nodemod.bin(node)
220 # if node is mq patch, it will have the patch file's name as a tag
220 # if node is mq patch, it will have the patch file's name as a tag
221 if not patchname:
221 if not patchname:
222 patchtags = [t for t in repo.nodetags(binnode)
222 patchtags = [t for t in repo.nodetags(binnode)
223 if t.endswith('.patch') or t.endswith('.diff')]
223 if t.endswith('.patch') or t.endswith('.diff')]
224 if patchtags:
224 if patchtags:
225 patchname = patchtags[0]
225 patchname = patchtags[0]
226 elif total > 1:
226 elif total > 1:
227 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
227 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
228 binnode, seqno=idx,
228 binnode, seqno=idx,
229 total=total)
229 total=total)
230 else:
230 else:
231 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
231 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
232 disposition = 'inline'
232 disposition = 'inline'
233 if opts.get('attach'):
233 if opts.get('attach'):
234 disposition = 'attachment'
234 disposition = 'attachment'
235 p['Content-Disposition'] = disposition + '; filename=' + patchname
235 p['Content-Disposition'] = disposition + '; filename=' + patchname
236 msg.attach(p)
236 msg.attach(p)
237 else:
237 else:
238 msg = mail.mimetextpatch(body, display=opts.get('test'))
238 msg = mail.mimetextpatch(body, display=opts.get('test'))
239
239
240 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
240 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
241 numbered)
241 numbered)
242 subj = desc[0].strip().rstrip('. ')
242 subj = desc[0].strip().rstrip('. ')
243 if not numbered:
243 if not numbered:
244 subj = ' '.join([prefix, opts.get('subject') or subj])
244 subj = ' '.join([prefix, opts.get('subject') or subj])
245 else:
245 else:
246 subj = ' '.join([prefix, subj])
246 subj = ' '.join([prefix, subj])
247 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
247 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
248 msg['X-Mercurial-Node'] = node
248 msg['X-Mercurial-Node'] = node
249 msg['X-Mercurial-Series-Index'] = '%i' % idx
249 msg['X-Mercurial-Series-Index'] = '%i' % idx
250 msg['X-Mercurial-Series-Total'] = '%i' % total
250 msg['X-Mercurial-Series-Total'] = '%i' % total
251 return msg, subj, ds
251 return msg, subj, ds
252
252
253 def _getpatches(repo, revs, **opts):
253 def _getpatches(repo, revs, **opts):
254 """return a list of patches for a list of revisions
254 """return a list of patches for a list of revisions
255
255
256 Each patch in the list is itself a list of lines.
256 Each patch in the list is itself a list of lines.
257 """
257 """
258 ui = repo.ui
258 ui = repo.ui
259 prev = repo['.'].rev()
259 prev = repo['.'].rev()
260 for r in revs:
260 for r in revs:
261 if r == prev and (repo[None].files() or repo[None].deleted()):
261 if r == prev and (repo[None].files() or repo[None].deleted()):
262 ui.warn(_('warning: working directory has '
262 ui.warn(_('warning: working directory has '
263 'uncommitted changes\n'))
263 'uncommitted changes\n'))
264 output = stringio()
264 output = stringio()
265 cmdutil.export(repo, [r], fp=output,
265 cmdutil.export(repo, [r], fp=output,
266 opts=patch.difffeatureopts(ui, opts, git=True))
266 opts=patch.difffeatureopts(ui, opts, git=True))
267 yield output.getvalue().split('\n')
267 yield output.getvalue().split('\n')
268 def _getbundle(repo, dest, **opts):
268 def _getbundle(repo, dest, **opts):
269 """return a bundle containing changesets missing in "dest"
269 """return a bundle containing changesets missing in "dest"
270
270
271 The `opts` keyword-arguments are the same as the one accepted by the
271 The `opts` keyword-arguments are the same as the one accepted by the
272 `bundle` command.
272 `bundle` command.
273
273
274 The bundle is a returned as a single in-memory binary blob.
274 The bundle is a returned as a single in-memory binary blob.
275 """
275 """
276 ui = repo.ui
276 ui = repo.ui
277 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
277 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
278 tmpfn = os.path.join(tmpdir, 'bundle')
278 tmpfn = os.path.join(tmpdir, 'bundle')
279 btype = ui.config('patchbomb', 'bundletype')
279 btype = ui.config('patchbomb', 'bundletype')
280 if btype:
280 if btype:
281 opts['type'] = btype
281 opts['type'] = btype
282 try:
282 try:
283 commands.bundle(ui, repo, tmpfn, dest, **opts)
283 commands.bundle(ui, repo, tmpfn, dest, **opts)
284 return util.readfile(tmpfn)
284 return util.readfile(tmpfn)
285 finally:
285 finally:
286 try:
286 try:
287 os.unlink(tmpfn)
287 os.unlink(tmpfn)
288 except OSError:
288 except OSError:
289 pass
289 pass
290 os.rmdir(tmpdir)
290 os.rmdir(tmpdir)
291
291
292 def _getdescription(repo, defaultbody, sender, **opts):
292 def _getdescription(repo, defaultbody, sender, **opts):
293 """obtain the body of the introduction message and return it
293 """obtain the body of the introduction message and return it
294
294
295 This is also used for the body of email with an attached bundle.
295 This is also used for the body of email with an attached bundle.
296
296
297 The body can be obtained either from the command line option or entered by
297 The body can be obtained either from the command line option or entered by
298 the user through the editor.
298 the user through the editor.
299 """
299 """
300 ui = repo.ui
300 ui = repo.ui
301 if opts.get('desc'):
301 if opts.get('desc'):
302 body = open(opts.get('desc')).read()
302 body = open(opts.get('desc')).read()
303 else:
303 else:
304 ui.write(_('\nWrite the introductory message for the '
304 ui.write(_('\nWrite the introductory message for the '
305 'patch series.\n\n'))
305 'patch series.\n\n'))
306 body = ui.edit(defaultbody, sender, repopath=repo.path)
306 body = ui.edit(defaultbody, sender, repopath=repo.path)
307 # Save series description in case sendmail fails
307 # Save series description in case sendmail fails
308 msgfile = repo.vfs('last-email.txt', 'wb')
308 msgfile = repo.vfs('last-email.txt', 'wb')
309 msgfile.write(body)
309 msgfile.write(body)
310 msgfile.close()
310 msgfile.close()
311 return body
311 return body
312
312
313 def _getbundlemsgs(repo, sender, bundle, **opts):
313 def _getbundlemsgs(repo, sender, bundle, **opts):
314 """Get the full email for sending a given bundle
314 """Get the full email for sending a given bundle
315
315
316 This function returns a list of "email" tuples (subject, content, None).
316 This function returns a list of "email" tuples (subject, content, None).
317 The list is always one message long in that case.
317 The list is always one message long in that case.
318 """
318 """
319 ui = repo.ui
319 ui = repo.ui
320 _charsets = mail._charsets(ui)
320 _charsets = mail._charsets(ui)
321 subj = (opts.get('subject')
321 subj = (opts.get('subject')
322 or prompt(ui, 'Subject:', 'A bundle for your repository'))
322 or prompt(ui, 'Subject:', 'A bundle for your repository'))
323
323
324 body = _getdescription(repo, '', sender, **opts)
324 body = _getdescription(repo, '', sender, **opts)
325 msg = emailmod.MIMEMultipart.MIMEMultipart()
325 msg = emailmod.MIMEMultipart.MIMEMultipart()
326 if body:
326 if body:
327 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
327 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
328 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
328 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
329 datapart.set_payload(bundle)
329 datapart.set_payload(bundle)
330 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
330 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
331 datapart.add_header('Content-Disposition', 'attachment',
331 datapart.add_header('Content-Disposition', 'attachment',
332 filename=bundlename)
332 filename=bundlename)
333 emailmod.Encoders.encode_base64(datapart)
333 emailmod.Encoders.encode_base64(datapart)
334 msg.attach(datapart)
334 msg.attach(datapart)
335 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
335 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
336 return [(msg, subj, None)]
336 return [(msg, subj, None)]
337
337
338 def _makeintro(repo, sender, revs, patches, **opts):
338 def _makeintro(repo, sender, revs, patches, **opts):
339 """make an introduction email, asking the user for content if needed
339 """make an introduction email, asking the user for content if needed
340
340
341 email is returned as (subject, body, cumulative-diffstat)"""
341 email is returned as (subject, body, cumulative-diffstat)"""
342 ui = repo.ui
342 ui = repo.ui
343 _charsets = mail._charsets(ui)
343 _charsets = mail._charsets(ui)
344
344
345 # use the last revision which is likely to be a bookmarked head
345 # use the last revision which is likely to be a bookmarked head
346 prefix = _formatprefix(ui, repo, revs.last(), opts.get('flag'),
346 prefix = _formatprefix(ui, repo, revs.last(), opts.get('flag'),
347 0, len(patches), numbered=True)
347 0, len(patches), numbered=True)
348 subj = (opts.get('subject') or
348 subj = (opts.get('subject') or
349 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
349 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
350 if not subj:
350 if not subj:
351 return None # skip intro if the user doesn't bother
351 return None # skip intro if the user doesn't bother
352
352
353 subj = prefix + ' ' + subj
353 subj = prefix + ' ' + subj
354
354
355 body = ''
355 body = ''
356 if opts.get('diffstat'):
356 if opts.get('diffstat'):
357 # generate a cumulative diffstat of the whole patch series
357 # generate a cumulative diffstat of the whole patch series
358 diffstat = patch.diffstat(sum(patches, []))
358 diffstat = patch.diffstat(sum(patches, []))
359 body = '\n' + diffstat
359 body = '\n' + diffstat
360 else:
360 else:
361 diffstat = None
361 diffstat = None
362
362
363 body = _getdescription(repo, body, sender, **opts)
363 body = _getdescription(repo, body, sender, **opts)
364 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
364 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
365 msg['Subject'] = mail.headencode(ui, subj, _charsets,
365 msg['Subject'] = mail.headencode(ui, subj, _charsets,
366 opts.get('test'))
366 opts.get('test'))
367 return (msg, subj, diffstat)
367 return (msg, subj, diffstat)
368
368
369 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
369 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
370 """return a list of emails from a list of patches
370 """return a list of emails from a list of patches
371
371
372 This involves introduction message creation if necessary.
372 This involves introduction message creation if necessary.
373
373
374 This function returns a list of "email" tuples (subject, content, None).
374 This function returns a list of "email" tuples (subject, content, None).
375 """
375 """
376 ui = repo.ui
376 ui = repo.ui
377 _charsets = mail._charsets(ui)
377 _charsets = mail._charsets(ui)
378 patches = list(_getpatches(repo, revs, **opts))
378 patches = list(_getpatches(repo, revs, **opts))
379 msgs = []
379 msgs = []
380
380
381 ui.write(_('this patch series consists of %d patches.\n\n')
381 ui.write(_('this patch series consists of %d patches.\n\n')
382 % len(patches))
382 % len(patches))
383
383
384 # build the intro message, or skip it if the user declines
384 # build the intro message, or skip it if the user declines
385 if introwanted(ui, opts, len(patches)):
385 if introwanted(ui, opts, len(patches)):
386 msg = _makeintro(repo, sender, revs, patches, **opts)
386 msg = _makeintro(repo, sender, revs, patches, **opts)
387 if msg:
387 if msg:
388 msgs.append(msg)
388 msgs.append(msg)
389
389
390 # are we going to send more than one message?
390 # are we going to send more than one message?
391 numbered = len(msgs) + len(patches) > 1
391 numbered = len(msgs) + len(patches) > 1
392
392
393 # now generate the actual patch messages
393 # now generate the actual patch messages
394 name = None
394 name = None
395 assert len(revs) == len(patches)
395 assert len(revs) == len(patches)
396 for i, (r, p) in enumerate(zip(revs, patches)):
396 for i, (r, p) in enumerate(zip(revs, patches)):
397 if patchnames:
397 if patchnames:
398 name = patchnames[i]
398 name = patchnames[i]
399 msg = makepatch(ui, repo, r, p, opts, _charsets, i + 1,
399 msg = makepatch(ui, repo, r, p, opts, _charsets, i + 1,
400 len(patches), numbered, name)
400 len(patches), numbered, name)
401 msgs.append(msg)
401 msgs.append(msg)
402
402
403 return msgs
403 return msgs
404
404
405 def _getoutgoing(repo, dest, revs):
405 def _getoutgoing(repo, dest, revs):
406 '''Return the revisions present locally but not in dest'''
406 '''Return the revisions present locally but not in dest'''
407 ui = repo.ui
407 ui = repo.ui
408 url = ui.expandpath(dest or 'default-push', dest or 'default')
408 url = ui.expandpath(dest or 'default-push', dest or 'default')
409 url = hg.parseurl(url)[0]
409 url = hg.parseurl(url)[0]
410 ui.status(_('comparing with %s\n') % util.hidepassword(url))
410 ui.status(_('comparing with %s\n') % util.hidepassword(url))
411
411
412 revs = [r for r in revs if r >= 0]
412 revs = [r for r in revs if r >= 0]
413 if not revs:
413 if not revs:
414 revs = [len(repo) - 1]
414 revs = [len(repo) - 1]
415 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
415 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
416 if not revs:
416 if not revs:
417 ui.status(_("no changes found\n"))
417 ui.status(_("no changes found\n"))
418 return revs
418 return revs
419
419
420 emailopts = [
420 emailopts = [
421 ('', 'body', None, _('send patches as inline message text (default)')),
421 ('', 'body', None, _('send patches as inline message text (default)')),
422 ('a', 'attach', None, _('send patches as attachments')),
422 ('a', 'attach', None, _('send patches as attachments')),
423 ('i', 'inline', None, _('send patches as inline attachments')),
423 ('i', 'inline', None, _('send patches as inline attachments')),
424 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
424 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
425 ('c', 'cc', [], _('email addresses of copy recipients')),
425 ('c', 'cc', [], _('email addresses of copy recipients')),
426 ('', 'confirm', None, _('ask for confirmation before sending')),
426 ('', 'confirm', None, _('ask for confirmation before sending')),
427 ('d', 'diffstat', None, _('add diffstat output to messages')),
427 ('d', 'diffstat', None, _('add diffstat output to messages')),
428 ('', 'date', '', _('use the given date as the sending date')),
428 ('', 'date', '', _('use the given date as the sending date')),
429 ('', 'desc', '', _('use the given file as the series description')),
429 ('', 'desc', '', _('use the given file as the series description')),
430 ('f', 'from', '', _('email address of sender')),
430 ('f', 'from', '', _('email address of sender')),
431 ('n', 'test', None, _('print messages that would be sent')),
431 ('n', 'test', None, _('print messages that would be sent')),
432 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
432 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
433 ('', 'reply-to', [], _('email addresses replies should be sent to')),
433 ('', 'reply-to', [], _('email addresses replies should be sent to')),
434 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
434 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
435 ('', 'in-reply-to', '', _('message identifier to reply to')),
435 ('', 'in-reply-to', '', _('message identifier to reply to')),
436 ('', 'flag', [], _('flags to add in subject prefixes')),
436 ('', 'flag', [], _('flags to add in subject prefixes')),
437 ('t', 'to', [], _('email addresses of recipients'))]
437 ('t', 'to', [], _('email addresses of recipients'))]
438
438
439 @command('email',
439 @command('email',
440 [('g', 'git', None, _('use git extended diff format')),
440 [('g', 'git', None, _('use git extended diff format')),
441 ('', 'plain', None, _('omit hg patch header')),
441 ('', 'plain', None, _('omit hg patch header')),
442 ('o', 'outgoing', None,
442 ('o', 'outgoing', None,
443 _('send changes not found in the target repository')),
443 _('send changes not found in the target repository')),
444 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
444 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
445 ('', 'bundlename', 'bundle',
445 ('', 'bundlename', 'bundle',
446 _('name of the bundle attachment file'), _('NAME')),
446 _('name of the bundle attachment file'), _('NAME')),
447 ('r', 'rev', [], _('a revision to send'), _('REV')),
447 ('r', 'rev', [], _('a revision to send'), _('REV')),
448 ('', 'force', None, _('run even when remote repository is unrelated '
448 ('', 'force', None, _('run even when remote repository is unrelated '
449 '(with -b/--bundle)')),
449 '(with -b/--bundle)')),
450 ('', 'base', [], _('a base changeset to specify instead of a destination '
450 ('', 'base', [], _('a base changeset to specify instead of a destination '
451 '(with -b/--bundle)'), _('REV')),
451 '(with -b/--bundle)'), _('REV')),
452 ('', 'intro', None, _('send an introduction email for a single patch')),
452 ('', 'intro', None, _('send an introduction email for a single patch')),
453 ] + emailopts + commands.remoteopts,
453 ] + emailopts + cmdutil.remoteopts,
454 _('hg email [OPTION]... [DEST]...'))
454 _('hg email [OPTION]... [DEST]...'))
455 def email(ui, repo, *revs, **opts):
455 def email(ui, repo, *revs, **opts):
456 '''send changesets by email
456 '''send changesets by email
457
457
458 By default, diffs are sent in the format generated by
458 By default, diffs are sent in the format generated by
459 :hg:`export`, one per message. The series starts with a "[PATCH 0
459 :hg:`export`, one per message. The series starts with a "[PATCH 0
460 of N]" introduction, which describes the series as a whole.
460 of N]" introduction, which describes the series as a whole.
461
461
462 Each patch email has a Subject line of "[PATCH M of N] ...", using
462 Each patch email has a Subject line of "[PATCH M of N] ...", using
463 the first line of the changeset description as the subject text.
463 the first line of the changeset description as the subject text.
464 The message contains two or three parts. First, the changeset
464 The message contains two or three parts. First, the changeset
465 description.
465 description.
466
466
467 With the -d/--diffstat option, if the diffstat program is
467 With the -d/--diffstat option, if the diffstat program is
468 installed, the result of running diffstat on the patch is inserted.
468 installed, the result of running diffstat on the patch is inserted.
469
469
470 Finally, the patch itself, as generated by :hg:`export`.
470 Finally, the patch itself, as generated by :hg:`export`.
471
471
472 With the -d/--diffstat or --confirm options, you will be presented
472 With the -d/--diffstat or --confirm options, you will be presented
473 with a final summary of all messages and asked for confirmation before
473 with a final summary of all messages and asked for confirmation before
474 the messages are sent.
474 the messages are sent.
475
475
476 By default the patch is included as text in the email body for
476 By default the patch is included as text in the email body for
477 easy reviewing. Using the -a/--attach option will instead create
477 easy reviewing. Using the -a/--attach option will instead create
478 an attachment for the patch. With -i/--inline an inline attachment
478 an attachment for the patch. With -i/--inline an inline attachment
479 will be created. You can include a patch both as text in the email
479 will be created. You can include a patch both as text in the email
480 body and as a regular or an inline attachment by combining the
480 body and as a regular or an inline attachment by combining the
481 -a/--attach or -i/--inline with the --body option.
481 -a/--attach or -i/--inline with the --body option.
482
482
483 With -o/--outgoing, emails will be generated for patches not found
483 With -o/--outgoing, emails will be generated for patches not found
484 in the destination repository (or only those which are ancestors
484 in the destination repository (or only those which are ancestors
485 of the specified revisions if any are provided)
485 of the specified revisions if any are provided)
486
486
487 With -b/--bundle, changesets are selected as for --outgoing, but a
487 With -b/--bundle, changesets are selected as for --outgoing, but a
488 single email containing a binary Mercurial bundle as an attachment
488 single email containing a binary Mercurial bundle as an attachment
489 will be sent. Use the ``patchbomb.bundletype`` config option to
489 will be sent. Use the ``patchbomb.bundletype`` config option to
490 control the bundle type as with :hg:`bundle --type`.
490 control the bundle type as with :hg:`bundle --type`.
491
491
492 With -m/--mbox, instead of previewing each patchbomb message in a
492 With -m/--mbox, instead of previewing each patchbomb message in a
493 pager or sending the messages directly, it will create a UNIX
493 pager or sending the messages directly, it will create a UNIX
494 mailbox file with the patch emails. This mailbox file can be
494 mailbox file with the patch emails. This mailbox file can be
495 previewed with any mail user agent which supports UNIX mbox
495 previewed with any mail user agent which supports UNIX mbox
496 files.
496 files.
497
497
498 With -n/--test, all steps will run, but mail will not be sent.
498 With -n/--test, all steps will run, but mail will not be sent.
499 You will be prompted for an email recipient address, a subject and
499 You will be prompted for an email recipient address, a subject and
500 an introductory message describing the patches of your patchbomb.
500 an introductory message describing the patches of your patchbomb.
501 Then when all is done, patchbomb messages are displayed.
501 Then when all is done, patchbomb messages are displayed.
502
502
503 In case email sending fails, you will find a backup of your series
503 In case email sending fails, you will find a backup of your series
504 introductory message in ``.hg/last-email.txt``.
504 introductory message in ``.hg/last-email.txt``.
505
505
506 The default behavior of this command can be customized through
506 The default behavior of this command can be customized through
507 configuration. (See :hg:`help patchbomb` for details)
507 configuration. (See :hg:`help patchbomb` for details)
508
508
509 Examples::
509 Examples::
510
510
511 hg email -r 3000 # send patch 3000 only
511 hg email -r 3000 # send patch 3000 only
512 hg email -r 3000 -r 3001 # send patches 3000 and 3001
512 hg email -r 3000 -r 3001 # send patches 3000 and 3001
513 hg email -r 3000:3005 # send patches 3000 through 3005
513 hg email -r 3000:3005 # send patches 3000 through 3005
514 hg email 3000 # send patch 3000 (deprecated)
514 hg email 3000 # send patch 3000 (deprecated)
515
515
516 hg email -o # send all patches not in default
516 hg email -o # send all patches not in default
517 hg email -o DEST # send all patches not in DEST
517 hg email -o DEST # send all patches not in DEST
518 hg email -o -r 3000 # send all ancestors of 3000 not in default
518 hg email -o -r 3000 # send all ancestors of 3000 not in default
519 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
519 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
520
520
521 hg email -b # send bundle of all patches not in default
521 hg email -b # send bundle of all patches not in default
522 hg email -b DEST # send bundle of all patches not in DEST
522 hg email -b DEST # send bundle of all patches not in DEST
523 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
523 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
524 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
524 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
525
525
526 hg email -o -m mbox && # generate an mbox file...
526 hg email -o -m mbox && # generate an mbox file...
527 mutt -R -f mbox # ... and view it with mutt
527 mutt -R -f mbox # ... and view it with mutt
528 hg email -o -m mbox && # generate an mbox file ...
528 hg email -o -m mbox && # generate an mbox file ...
529 formail -s sendmail \\ # ... and use formail to send from the mbox
529 formail -s sendmail \\ # ... and use formail to send from the mbox
530 -bm -t < mbox # ... using sendmail
530 -bm -t < mbox # ... using sendmail
531
531
532 Before using this command, you will need to enable email in your
532 Before using this command, you will need to enable email in your
533 hgrc. See the [email] section in hgrc(5) for details.
533 hgrc. See the [email] section in hgrc(5) for details.
534 '''
534 '''
535
535
536 _charsets = mail._charsets(ui)
536 _charsets = mail._charsets(ui)
537
537
538 bundle = opts.get('bundle')
538 bundle = opts.get('bundle')
539 date = opts.get('date')
539 date = opts.get('date')
540 mbox = opts.get('mbox')
540 mbox = opts.get('mbox')
541 outgoing = opts.get('outgoing')
541 outgoing = opts.get('outgoing')
542 rev = opts.get('rev')
542 rev = opts.get('rev')
543
543
544 if not (opts.get('test') or mbox):
544 if not (opts.get('test') or mbox):
545 # really sending
545 # really sending
546 mail.validateconfig(ui)
546 mail.validateconfig(ui)
547
547
548 if not (revs or rev or outgoing or bundle):
548 if not (revs or rev or outgoing or bundle):
549 raise error.Abort(_('specify at least one changeset with -r or -o'))
549 raise error.Abort(_('specify at least one changeset with -r or -o'))
550
550
551 if outgoing and bundle:
551 if outgoing and bundle:
552 raise error.Abort(_("--outgoing mode always on with --bundle;"
552 raise error.Abort(_("--outgoing mode always on with --bundle;"
553 " do not re-specify --outgoing"))
553 " do not re-specify --outgoing"))
554
554
555 if outgoing or bundle:
555 if outgoing or bundle:
556 if len(revs) > 1:
556 if len(revs) > 1:
557 raise error.Abort(_("too many destinations"))
557 raise error.Abort(_("too many destinations"))
558 if revs:
558 if revs:
559 dest = revs[0]
559 dest = revs[0]
560 else:
560 else:
561 dest = None
561 dest = None
562 revs = []
562 revs = []
563
563
564 if rev:
564 if rev:
565 if revs:
565 if revs:
566 raise error.Abort(_('use only one form to specify the revision'))
566 raise error.Abort(_('use only one form to specify the revision'))
567 revs = rev
567 revs = rev
568
568
569 revs = scmutil.revrange(repo, revs)
569 revs = scmutil.revrange(repo, revs)
570 if outgoing:
570 if outgoing:
571 revs = _getoutgoing(repo, dest, revs)
571 revs = _getoutgoing(repo, dest, revs)
572 if bundle:
572 if bundle:
573 opts['revs'] = [str(r) for r in revs]
573 opts['revs'] = [str(r) for r in revs]
574
574
575 # check if revision exist on the public destination
575 # check if revision exist on the public destination
576 publicurl = repo.ui.config('patchbomb', 'publicurl')
576 publicurl = repo.ui.config('patchbomb', 'publicurl')
577 if publicurl is not None:
577 if publicurl is not None:
578 repo.ui.debug('checking that revision exist in the public repo')
578 repo.ui.debug('checking that revision exist in the public repo')
579 try:
579 try:
580 publicpeer = hg.peer(repo, {}, publicurl)
580 publicpeer = hg.peer(repo, {}, publicurl)
581 except error.RepoError:
581 except error.RepoError:
582 repo.ui.write_err(_('unable to access public repo: %s\n')
582 repo.ui.write_err(_('unable to access public repo: %s\n')
583 % publicurl)
583 % publicurl)
584 raise
584 raise
585 if not publicpeer.capable('known'):
585 if not publicpeer.capable('known'):
586 repo.ui.debug('skipping existence checks: public repo too old')
586 repo.ui.debug('skipping existence checks: public repo too old')
587 else:
587 else:
588 out = [repo[r] for r in revs]
588 out = [repo[r] for r in revs]
589 known = publicpeer.known(h.node() for h in out)
589 known = publicpeer.known(h.node() for h in out)
590 missing = []
590 missing = []
591 for idx, h in enumerate(out):
591 for idx, h in enumerate(out):
592 if not known[idx]:
592 if not known[idx]:
593 missing.append(h)
593 missing.append(h)
594 if missing:
594 if missing:
595 if 1 < len(missing):
595 if 1 < len(missing):
596 msg = _('public "%s" is missing %s and %i others')
596 msg = _('public "%s" is missing %s and %i others')
597 msg %= (publicurl, missing[0], len(missing) - 1)
597 msg %= (publicurl, missing[0], len(missing) - 1)
598 else:
598 else:
599 msg = _('public url %s is missing %s')
599 msg = _('public url %s is missing %s')
600 msg %= (publicurl, missing[0])
600 msg %= (publicurl, missing[0])
601 revhint = ' '.join('-r %s' % h
601 revhint = ' '.join('-r %s' % h
602 for h in repo.set('heads(%ld)', missing))
602 for h in repo.set('heads(%ld)', missing))
603 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
603 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
604 raise error.Abort(msg, hint=hint)
604 raise error.Abort(msg, hint=hint)
605
605
606 # start
606 # start
607 if date:
607 if date:
608 start_time = util.parsedate(date)
608 start_time = util.parsedate(date)
609 else:
609 else:
610 start_time = util.makedate()
610 start_time = util.makedate()
611
611
612 def genmsgid(id):
612 def genmsgid(id):
613 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
613 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
614
614
615 # deprecated config: patchbomb.from
615 # deprecated config: patchbomb.from
616 sender = (opts.get('from') or ui.config('email', 'from') or
616 sender = (opts.get('from') or ui.config('email', 'from') or
617 ui.config('patchbomb', 'from') or
617 ui.config('patchbomb', 'from') or
618 prompt(ui, 'From', ui.username()))
618 prompt(ui, 'From', ui.username()))
619
619
620 if bundle:
620 if bundle:
621 bundledata = _getbundle(repo, dest, **opts)
621 bundledata = _getbundle(repo, dest, **opts)
622 bundleopts = opts.copy()
622 bundleopts = opts.copy()
623 bundleopts.pop('bundle', None) # already processed
623 bundleopts.pop('bundle', None) # already processed
624 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
624 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
625 else:
625 else:
626 msgs = _getpatchmsgs(repo, sender, revs, **opts)
626 msgs = _getpatchmsgs(repo, sender, revs, **opts)
627
627
628 showaddrs = []
628 showaddrs = []
629
629
630 def getaddrs(header, ask=False, default=None):
630 def getaddrs(header, ask=False, default=None):
631 configkey = header.lower()
631 configkey = header.lower()
632 opt = header.replace('-', '_').lower()
632 opt = header.replace('-', '_').lower()
633 addrs = opts.get(opt)
633 addrs = opts.get(opt)
634 if addrs:
634 if addrs:
635 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
635 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
636 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
636 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
637
637
638 # not on the command line: fallback to config and then maybe ask
638 # not on the command line: fallback to config and then maybe ask
639 addr = (ui.config('email', configkey) or
639 addr = (ui.config('email', configkey) or
640 ui.config('patchbomb', configkey))
640 ui.config('patchbomb', configkey))
641 if not addr:
641 if not addr:
642 specified = (ui.hasconfig('email', configkey) or
642 specified = (ui.hasconfig('email', configkey) or
643 ui.hasconfig('patchbomb', configkey))
643 ui.hasconfig('patchbomb', configkey))
644 if not specified and ask:
644 if not specified and ask:
645 addr = prompt(ui, header, default=default)
645 addr = prompt(ui, header, default=default)
646 if addr:
646 if addr:
647 showaddrs.append('%s: %s' % (header, addr))
647 showaddrs.append('%s: %s' % (header, addr))
648 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
648 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
649 else:
649 else:
650 return default
650 return default
651
651
652 to = getaddrs('To', ask=True)
652 to = getaddrs('To', ask=True)
653 if not to:
653 if not to:
654 # we can get here in non-interactive mode
654 # we can get here in non-interactive mode
655 raise error.Abort(_('no recipient addresses provided'))
655 raise error.Abort(_('no recipient addresses provided'))
656 cc = getaddrs('Cc', ask=True, default='') or []
656 cc = getaddrs('Cc', ask=True, default='') or []
657 bcc = getaddrs('Bcc') or []
657 bcc = getaddrs('Bcc') or []
658 replyto = getaddrs('Reply-To')
658 replyto = getaddrs('Reply-To')
659
659
660 confirm = ui.configbool('patchbomb', 'confirm')
660 confirm = ui.configbool('patchbomb', 'confirm')
661 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
661 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
662
662
663 if confirm:
663 if confirm:
664 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
664 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
665 ui.write(('From: %s\n' % sender), label='patchbomb.from')
665 ui.write(('From: %s\n' % sender), label='patchbomb.from')
666 for addr in showaddrs:
666 for addr in showaddrs:
667 ui.write('%s\n' % addr, label='patchbomb.to')
667 ui.write('%s\n' % addr, label='patchbomb.to')
668 for m, subj, ds in msgs:
668 for m, subj, ds in msgs:
669 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
669 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
670 if ds:
670 if ds:
671 ui.write(ds, label='patchbomb.diffstats')
671 ui.write(ds, label='patchbomb.diffstats')
672 ui.write('\n')
672 ui.write('\n')
673 if ui.promptchoice(_('are you sure you want to send (yn)?'
673 if ui.promptchoice(_('are you sure you want to send (yn)?'
674 '$$ &Yes $$ &No')):
674 '$$ &Yes $$ &No')):
675 raise error.Abort(_('patchbomb canceled'))
675 raise error.Abort(_('patchbomb canceled'))
676
676
677 ui.write('\n')
677 ui.write('\n')
678
678
679 parent = opts.get('in_reply_to') or None
679 parent = opts.get('in_reply_to') or None
680 # angle brackets may be omitted, they're not semantically part of the msg-id
680 # angle brackets may be omitted, they're not semantically part of the msg-id
681 if parent is not None:
681 if parent is not None:
682 if not parent.startswith('<'):
682 if not parent.startswith('<'):
683 parent = '<' + parent
683 parent = '<' + parent
684 if not parent.endswith('>'):
684 if not parent.endswith('>'):
685 parent += '>'
685 parent += '>'
686
686
687 sender_addr = emailmod.Utils.parseaddr(sender)[1]
687 sender_addr = emailmod.Utils.parseaddr(sender)[1]
688 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
688 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
689 sendmail = None
689 sendmail = None
690 firstpatch = None
690 firstpatch = None
691 for i, (m, subj, ds) in enumerate(msgs):
691 for i, (m, subj, ds) in enumerate(msgs):
692 try:
692 try:
693 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
693 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
694 if not firstpatch:
694 if not firstpatch:
695 firstpatch = m['Message-Id']
695 firstpatch = m['Message-Id']
696 m['X-Mercurial-Series-Id'] = firstpatch
696 m['X-Mercurial-Series-Id'] = firstpatch
697 except TypeError:
697 except TypeError:
698 m['Message-Id'] = genmsgid('patchbomb')
698 m['Message-Id'] = genmsgid('patchbomb')
699 if parent:
699 if parent:
700 m['In-Reply-To'] = parent
700 m['In-Reply-To'] = parent
701 m['References'] = parent
701 m['References'] = parent
702 if not parent or 'X-Mercurial-Node' not in m:
702 if not parent or 'X-Mercurial-Node' not in m:
703 parent = m['Message-Id']
703 parent = m['Message-Id']
704
704
705 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
705 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
706 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
706 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
707
707
708 start_time = (start_time[0] + 1, start_time[1])
708 start_time = (start_time[0] + 1, start_time[1])
709 m['From'] = sender
709 m['From'] = sender
710 m['To'] = ', '.join(to)
710 m['To'] = ', '.join(to)
711 if cc:
711 if cc:
712 m['Cc'] = ', '.join(cc)
712 m['Cc'] = ', '.join(cc)
713 if bcc:
713 if bcc:
714 m['Bcc'] = ', '.join(bcc)
714 m['Bcc'] = ', '.join(bcc)
715 if replyto:
715 if replyto:
716 m['Reply-To'] = ', '.join(replyto)
716 m['Reply-To'] = ', '.join(replyto)
717 if opts.get('test'):
717 if opts.get('test'):
718 ui.status(_('displaying '), subj, ' ...\n')
718 ui.status(_('displaying '), subj, ' ...\n')
719 ui.pager('email')
719 ui.pager('email')
720 generator = emailmod.Generator.Generator(ui, mangle_from_=False)
720 generator = emailmod.Generator.Generator(ui, mangle_from_=False)
721 try:
721 try:
722 generator.flatten(m, 0)
722 generator.flatten(m, 0)
723 ui.write('\n')
723 ui.write('\n')
724 except IOError as inst:
724 except IOError as inst:
725 if inst.errno != errno.EPIPE:
725 if inst.errno != errno.EPIPE:
726 raise
726 raise
727 else:
727 else:
728 if not sendmail:
728 if not sendmail:
729 sendmail = mail.connect(ui, mbox=mbox)
729 sendmail = mail.connect(ui, mbox=mbox)
730 ui.status(_('sending '), subj, ' ...\n')
730 ui.status(_('sending '), subj, ' ...\n')
731 ui.progress(_('sending'), i, item=subj, total=len(msgs),
731 ui.progress(_('sending'), i, item=subj, total=len(msgs),
732 unit=_('emails'))
732 unit=_('emails'))
733 if not mbox:
733 if not mbox:
734 # Exim does not remove the Bcc field
734 # Exim does not remove the Bcc field
735 del m['Bcc']
735 del m['Bcc']
736 fp = stringio()
736 fp = stringio()
737 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
737 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
738 generator.flatten(m, 0)
738 generator.flatten(m, 0)
739 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
739 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
740
740
741 ui.progress(_('writing'), None)
741 ui.progress(_('writing'), None)
742 ui.progress(_('sending'), None)
742 ui.progress(_('sending'), None)
@@ -1,127 +1,127 b''
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 #
2 #
3 # This is a small extension for Mercurial (https://mercurial-scm.org/)
3 # This is a small extension for Mercurial (https://mercurial-scm.org/)
4 # that removes files not known to mercurial
4 # that removes files not known to mercurial
5 #
5 #
6 # This program was inspired by the "cvspurge" script contained in CVS
6 # This program was inspired by the "cvspurge" script contained in CVS
7 # utilities (http://www.red-bean.com/cvsutils/).
7 # utilities (http://www.red-bean.com/cvsutils/).
8 #
8 #
9 # For help on the usage of "hg purge" use:
9 # For help on the usage of "hg purge" use:
10 # hg help purge
10 # hg help purge
11 #
11 #
12 # This program is free software; you can redistribute it and/or modify
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
15 # (at your option) any later version.
16 #
16 #
17 # This program is distributed in the hope that it will be useful,
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
20 # GNU General Public License for more details.
21 #
21 #
22 # You should have received a copy of the GNU General Public License
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, see <http://www.gnu.org/licenses/>.
23 # along with this program; if not, see <http://www.gnu.org/licenses/>.
24
24
25 '''command to delete untracked files from the working directory'''
25 '''command to delete untracked files from the working directory'''
26 from __future__ import absolute_import
26 from __future__ import absolute_import
27
27
28 import os
28 import os
29
29
30 from mercurial.i18n import _
30 from mercurial.i18n import _
31 from mercurial import (
31 from mercurial import (
32 commands,
32 cmdutil,
33 error,
33 error,
34 registrar,
34 registrar,
35 scmutil,
35 scmutil,
36 util,
36 util,
37 )
37 )
38
38
39 cmdtable = {}
39 cmdtable = {}
40 command = registrar.command(cmdtable)
40 command = registrar.command(cmdtable)
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # be specifying the version(s) of Mercurial they are tested with, or
43 # be specifying the version(s) of Mercurial they are tested with, or
44 # leave the attribute unspecified.
44 # leave the attribute unspecified.
45 testedwith = 'ships-with-hg-core'
45 testedwith = 'ships-with-hg-core'
46
46
47 @command('purge|clean',
47 @command('purge|clean',
48 [('a', 'abort-on-err', None, _('abort if an error occurs')),
48 [('a', 'abort-on-err', None, _('abort if an error occurs')),
49 ('', 'all', None, _('purge ignored files too')),
49 ('', 'all', None, _('purge ignored files too')),
50 ('', 'dirs', None, _('purge empty directories')),
50 ('', 'dirs', None, _('purge empty directories')),
51 ('', 'files', None, _('purge files')),
51 ('', 'files', None, _('purge files')),
52 ('p', 'print', None, _('print filenames instead of deleting them')),
52 ('p', 'print', None, _('print filenames instead of deleting them')),
53 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
53 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
54 ' (implies -p/--print)')),
54 ' (implies -p/--print)')),
55 ] + commands.walkopts,
55 ] + cmdutil.walkopts,
56 _('hg purge [OPTION]... [DIR]...'))
56 _('hg purge [OPTION]... [DIR]...'))
57 def purge(ui, repo, *dirs, **opts):
57 def purge(ui, repo, *dirs, **opts):
58 '''removes files not tracked by Mercurial
58 '''removes files not tracked by Mercurial
59
59
60 Delete files not known to Mercurial. This is useful to test local
60 Delete files not known to Mercurial. This is useful to test local
61 and uncommitted changes in an otherwise-clean source tree.
61 and uncommitted changes in an otherwise-clean source tree.
62
62
63 This means that purge will delete the following by default:
63 This means that purge will delete the following by default:
64
64
65 - Unknown files: files marked with "?" by :hg:`status`
65 - Unknown files: files marked with "?" by :hg:`status`
66 - Empty directories: in fact Mercurial ignores directories unless
66 - Empty directories: in fact Mercurial ignores directories unless
67 they contain files under source control management
67 they contain files under source control management
68
68
69 But it will leave untouched:
69 But it will leave untouched:
70
70
71 - Modified and unmodified tracked files
71 - Modified and unmodified tracked files
72 - Ignored files (unless --all is specified)
72 - Ignored files (unless --all is specified)
73 - New files added to the repository (with :hg:`add`)
73 - New files added to the repository (with :hg:`add`)
74
74
75 The --files and --dirs options can be used to direct purge to delete
75 The --files and --dirs options can be used to direct purge to delete
76 only files, only directories, or both. If neither option is given,
76 only files, only directories, or both. If neither option is given,
77 both will be deleted.
77 both will be deleted.
78
78
79 If directories are given on the command line, only files in these
79 If directories are given on the command line, only files in these
80 directories are considered.
80 directories are considered.
81
81
82 Be careful with purge, as you could irreversibly delete some files
82 Be careful with purge, as you could irreversibly delete some files
83 you forgot to add to the repository. If you only want to print the
83 you forgot to add to the repository. If you only want to print the
84 list of files that this program would delete, use the --print
84 list of files that this program would delete, use the --print
85 option.
85 option.
86 '''
86 '''
87 act = not opts.get('print')
87 act = not opts.get('print')
88 eol = '\n'
88 eol = '\n'
89 if opts.get('print0'):
89 if opts.get('print0'):
90 eol = '\0'
90 eol = '\0'
91 act = False # --print0 implies --print
91 act = False # --print0 implies --print
92 removefiles = opts.get('files')
92 removefiles = opts.get('files')
93 removedirs = opts.get('dirs')
93 removedirs = opts.get('dirs')
94 if not removefiles and not removedirs:
94 if not removefiles and not removedirs:
95 removefiles = True
95 removefiles = True
96 removedirs = True
96 removedirs = True
97
97
98 def remove(remove_func, name):
98 def remove(remove_func, name):
99 if act:
99 if act:
100 try:
100 try:
101 remove_func(repo.wjoin(name))
101 remove_func(repo.wjoin(name))
102 except OSError:
102 except OSError:
103 m = _('%s cannot be removed') % name
103 m = _('%s cannot be removed') % name
104 if opts.get('abort_on_err'):
104 if opts.get('abort_on_err'):
105 raise error.Abort(m)
105 raise error.Abort(m)
106 ui.warn(_('warning: %s\n') % m)
106 ui.warn(_('warning: %s\n') % m)
107 else:
107 else:
108 ui.write('%s%s' % (name, eol))
108 ui.write('%s%s' % (name, eol))
109
109
110 match = scmutil.match(repo[None], dirs, opts)
110 match = scmutil.match(repo[None], dirs, opts)
111 if removedirs:
111 if removedirs:
112 directories = []
112 directories = []
113 match.explicitdir = match.traversedir = directories.append
113 match.explicitdir = match.traversedir = directories.append
114 status = repo.status(match=match, ignored=opts.get('all'), unknown=True)
114 status = repo.status(match=match, ignored=opts.get('all'), unknown=True)
115
115
116 if removefiles:
116 if removefiles:
117 for f in sorted(status.unknown + status.ignored):
117 for f in sorted(status.unknown + status.ignored):
118 if act:
118 if act:
119 ui.note(_('removing file %s\n') % f)
119 ui.note(_('removing file %s\n') % f)
120 remove(util.unlink, f)
120 remove(util.unlink, f)
121
121
122 if removedirs:
122 if removedirs:
123 for f in sorted(directories, reverse=True):
123 for f in sorted(directories, reverse=True):
124 if match(f) and not os.listdir(repo.wjoin(f)):
124 if match(f) and not os.listdir(repo.wjoin(f)):
125 if act:
125 if act:
126 ui.note(_('removing directory %s\n') % f)
126 ui.note(_('removing directory %s\n') % f)
127 remove(os.rmdir, f)
127 remove(os.rmdir, f)
@@ -1,1540 +1,1540 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 hex,
24 hex,
25 nullid,
25 nullid,
26 nullrev,
26 nullrev,
27 short,
27 short,
28 )
28 )
29 from mercurial import (
29 from mercurial import (
30 bookmarks,
30 bookmarks,
31 cmdutil,
31 cmdutil,
32 commands,
32 commands,
33 copies,
33 copies,
34 destutil,
34 destutil,
35 dirstateguard,
35 dirstateguard,
36 error,
36 error,
37 extensions,
37 extensions,
38 hg,
38 hg,
39 lock,
39 lock,
40 merge as mergemod,
40 merge as mergemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 phases,
44 phases,
45 registrar,
45 registrar,
46 repair,
46 repair,
47 repoview,
47 repoview,
48 revset,
48 revset,
49 scmutil,
49 scmutil,
50 smartset,
50 smartset,
51 util,
51 util,
52 )
52 )
53
53
54 release = lock.release
54 release = lock.release
55 templateopts = commands.templateopts
55 templateopts = cmdutil.templateopts
56
56
57 # The following constants are used throughout the rebase module. The ordering of
57 # The following constants are used throughout the rebase module. The ordering of
58 # their values must be maintained.
58 # their values must be maintained.
59
59
60 # Indicates that a revision needs to be rebased
60 # Indicates that a revision needs to be rebased
61 revtodo = -1
61 revtodo = -1
62 nullmerge = -2
62 nullmerge = -2
63 revignored = -3
63 revignored = -3
64 # successor in rebase destination
64 # successor in rebase destination
65 revprecursor = -4
65 revprecursor = -4
66 # plain prune (no successor)
66 # plain prune (no successor)
67 revpruned = -5
67 revpruned = -5
68 revskipped = (revignored, revprecursor, revpruned)
68 revskipped = (revignored, revprecursor, revpruned)
69
69
70 cmdtable = {}
70 cmdtable = {}
71 command = registrar.command(cmdtable)
71 command = registrar.command(cmdtable)
72 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
72 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
73 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
73 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
74 # be specifying the version(s) of Mercurial they are tested with, or
74 # be specifying the version(s) of Mercurial they are tested with, or
75 # leave the attribute unspecified.
75 # leave the attribute unspecified.
76 testedwith = 'ships-with-hg-core'
76 testedwith = 'ships-with-hg-core'
77
77
78 def _nothingtorebase():
78 def _nothingtorebase():
79 return 1
79 return 1
80
80
81 def _savegraft(ctx, extra):
81 def _savegraft(ctx, extra):
82 s = ctx.extra().get('source', None)
82 s = ctx.extra().get('source', None)
83 if s is not None:
83 if s is not None:
84 extra['source'] = s
84 extra['source'] = s
85 s = ctx.extra().get('intermediate-source', None)
85 s = ctx.extra().get('intermediate-source', None)
86 if s is not None:
86 if s is not None:
87 extra['intermediate-source'] = s
87 extra['intermediate-source'] = s
88
88
89 def _savebranch(ctx, extra):
89 def _savebranch(ctx, extra):
90 extra['branch'] = ctx.branch()
90 extra['branch'] = ctx.branch()
91
91
92 def _makeextrafn(copiers):
92 def _makeextrafn(copiers):
93 """make an extrafn out of the given copy-functions.
93 """make an extrafn out of the given copy-functions.
94
94
95 A copy function takes a context and an extra dict, and mutates the
95 A copy function takes a context and an extra dict, and mutates the
96 extra dict as needed based on the given context.
96 extra dict as needed based on the given context.
97 """
97 """
98 def extrafn(ctx, extra):
98 def extrafn(ctx, extra):
99 for c in copiers:
99 for c in copiers:
100 c(ctx, extra)
100 c(ctx, extra)
101 return extrafn
101 return extrafn
102
102
103 def _destrebase(repo, sourceset, destspace=None):
103 def _destrebase(repo, sourceset, destspace=None):
104 """small wrapper around destmerge to pass the right extra args
104 """small wrapper around destmerge to pass the right extra args
105
105
106 Please wrap destutil.destmerge instead."""
106 Please wrap destutil.destmerge instead."""
107 return destutil.destmerge(repo, action='rebase', sourceset=sourceset,
107 return destutil.destmerge(repo, action='rebase', sourceset=sourceset,
108 onheadcheck=False, destspace=destspace)
108 onheadcheck=False, destspace=destspace)
109
109
110 revsetpredicate = registrar.revsetpredicate()
110 revsetpredicate = registrar.revsetpredicate()
111
111
112 @revsetpredicate('_destrebase')
112 @revsetpredicate('_destrebase')
113 def _revsetdestrebase(repo, subset, x):
113 def _revsetdestrebase(repo, subset, x):
114 # ``_rebasedefaultdest()``
114 # ``_rebasedefaultdest()``
115
115
116 # default destination for rebase.
116 # default destination for rebase.
117 # # XXX: Currently private because I expect the signature to change.
117 # # XXX: Currently private because I expect the signature to change.
118 # # XXX: - bailing out in case of ambiguity vs returning all data.
118 # # XXX: - bailing out in case of ambiguity vs returning all data.
119 # i18n: "_rebasedefaultdest" is a keyword
119 # i18n: "_rebasedefaultdest" is a keyword
120 sourceset = None
120 sourceset = None
121 if x is not None:
121 if x is not None:
122 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
122 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
123 return subset & smartset.baseset([_destrebase(repo, sourceset)])
123 return subset & smartset.baseset([_destrebase(repo, sourceset)])
124
124
125 class rebaseruntime(object):
125 class rebaseruntime(object):
126 """This class is a container for rebase runtime state"""
126 """This class is a container for rebase runtime state"""
127 def __init__(self, repo, ui, opts=None):
127 def __init__(self, repo, ui, opts=None):
128 if opts is None:
128 if opts is None:
129 opts = {}
129 opts = {}
130
130
131 self.repo = repo
131 self.repo = repo
132 self.ui = ui
132 self.ui = ui
133 self.opts = opts
133 self.opts = opts
134 self.originalwd = None
134 self.originalwd = None
135 self.external = nullrev
135 self.external = nullrev
136 # Mapping between the old revision id and either what is the new rebased
136 # Mapping between the old revision id and either what is the new rebased
137 # revision or what needs to be done with the old revision. The state
137 # revision or what needs to be done with the old revision. The state
138 # dict will be what contains most of the rebase progress state.
138 # dict will be what contains most of the rebase progress state.
139 self.state = {}
139 self.state = {}
140 self.activebookmark = None
140 self.activebookmark = None
141 self.currentbookmarks = None
141 self.currentbookmarks = None
142 self.dest = None
142 self.dest = None
143 self.skipped = set()
143 self.skipped = set()
144 self.destancestors = set()
144 self.destancestors = set()
145
145
146 self.collapsef = opts.get('collapse', False)
146 self.collapsef = opts.get('collapse', False)
147 self.collapsemsg = cmdutil.logmessage(ui, opts)
147 self.collapsemsg = cmdutil.logmessage(ui, opts)
148 self.date = opts.get('date', None)
148 self.date = opts.get('date', None)
149
149
150 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
150 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
151 self.extrafns = [_savegraft]
151 self.extrafns = [_savegraft]
152 if e:
152 if e:
153 self.extrafns = [e]
153 self.extrafns = [e]
154
154
155 self.keepf = opts.get('keep', False)
155 self.keepf = opts.get('keep', False)
156 self.keepbranchesf = opts.get('keepbranches', False)
156 self.keepbranchesf = opts.get('keepbranches', False)
157 # keepopen is not meant for use on the command line, but by
157 # keepopen is not meant for use on the command line, but by
158 # other extensions
158 # other extensions
159 self.keepopen = opts.get('keepopen', False)
159 self.keepopen = opts.get('keepopen', False)
160 self.obsoletenotrebased = {}
160 self.obsoletenotrebased = {}
161
161
162 def storestatus(self, tr=None):
162 def storestatus(self, tr=None):
163 """Store the current status to allow recovery"""
163 """Store the current status to allow recovery"""
164 if tr:
164 if tr:
165 tr.addfilegenerator('rebasestate', ('rebasestate',),
165 tr.addfilegenerator('rebasestate', ('rebasestate',),
166 self._writestatus, location='plain')
166 self._writestatus, location='plain')
167 else:
167 else:
168 with self.repo.vfs("rebasestate", "w") as f:
168 with self.repo.vfs("rebasestate", "w") as f:
169 self._writestatus(f)
169 self._writestatus(f)
170
170
171 def _writestatus(self, f):
171 def _writestatus(self, f):
172 repo = self.repo.unfiltered()
172 repo = self.repo.unfiltered()
173 f.write(repo[self.originalwd].hex() + '\n')
173 f.write(repo[self.originalwd].hex() + '\n')
174 f.write(repo[self.dest].hex() + '\n')
174 f.write(repo[self.dest].hex() + '\n')
175 f.write(repo[self.external].hex() + '\n')
175 f.write(repo[self.external].hex() + '\n')
176 f.write('%d\n' % int(self.collapsef))
176 f.write('%d\n' % int(self.collapsef))
177 f.write('%d\n' % int(self.keepf))
177 f.write('%d\n' % int(self.keepf))
178 f.write('%d\n' % int(self.keepbranchesf))
178 f.write('%d\n' % int(self.keepbranchesf))
179 f.write('%s\n' % (self.activebookmark or ''))
179 f.write('%s\n' % (self.activebookmark or ''))
180 for d, v in self.state.iteritems():
180 for d, v in self.state.iteritems():
181 oldrev = repo[d].hex()
181 oldrev = repo[d].hex()
182 if v >= 0:
182 if v >= 0:
183 newrev = repo[v].hex()
183 newrev = repo[v].hex()
184 elif v == revtodo:
184 elif v == revtodo:
185 # To maintain format compatibility, we have to use nullid.
185 # To maintain format compatibility, we have to use nullid.
186 # Please do remove this special case when upgrading the format.
186 # Please do remove this special case when upgrading the format.
187 newrev = hex(nullid)
187 newrev = hex(nullid)
188 else:
188 else:
189 newrev = v
189 newrev = v
190 f.write("%s:%s\n" % (oldrev, newrev))
190 f.write("%s:%s\n" % (oldrev, newrev))
191 repo.ui.debug('rebase status stored\n')
191 repo.ui.debug('rebase status stored\n')
192
192
193 def restorestatus(self):
193 def restorestatus(self):
194 """Restore a previously stored status"""
194 """Restore a previously stored status"""
195 repo = self.repo
195 repo = self.repo
196 keepbranches = None
196 keepbranches = None
197 dest = None
197 dest = None
198 collapse = False
198 collapse = False
199 external = nullrev
199 external = nullrev
200 activebookmark = None
200 activebookmark = None
201 state = {}
201 state = {}
202
202
203 try:
203 try:
204 f = repo.vfs("rebasestate")
204 f = repo.vfs("rebasestate")
205 for i, l in enumerate(f.read().splitlines()):
205 for i, l in enumerate(f.read().splitlines()):
206 if i == 0:
206 if i == 0:
207 originalwd = repo[l].rev()
207 originalwd = repo[l].rev()
208 elif i == 1:
208 elif i == 1:
209 dest = repo[l].rev()
209 dest = repo[l].rev()
210 elif i == 2:
210 elif i == 2:
211 external = repo[l].rev()
211 external = repo[l].rev()
212 elif i == 3:
212 elif i == 3:
213 collapse = bool(int(l))
213 collapse = bool(int(l))
214 elif i == 4:
214 elif i == 4:
215 keep = bool(int(l))
215 keep = bool(int(l))
216 elif i == 5:
216 elif i == 5:
217 keepbranches = bool(int(l))
217 keepbranches = bool(int(l))
218 elif i == 6 and not (len(l) == 81 and ':' in l):
218 elif i == 6 and not (len(l) == 81 and ':' in l):
219 # line 6 is a recent addition, so for backwards
219 # line 6 is a recent addition, so for backwards
220 # compatibility check that the line doesn't look like the
220 # compatibility check that the line doesn't look like the
221 # oldrev:newrev lines
221 # oldrev:newrev lines
222 activebookmark = l
222 activebookmark = l
223 else:
223 else:
224 oldrev, newrev = l.split(':')
224 oldrev, newrev = l.split(':')
225 if newrev in (str(nullmerge), str(revignored),
225 if newrev in (str(nullmerge), str(revignored),
226 str(revprecursor), str(revpruned)):
226 str(revprecursor), str(revpruned)):
227 state[repo[oldrev].rev()] = int(newrev)
227 state[repo[oldrev].rev()] = int(newrev)
228 elif newrev == nullid:
228 elif newrev == nullid:
229 state[repo[oldrev].rev()] = revtodo
229 state[repo[oldrev].rev()] = revtodo
230 # Legacy compat special case
230 # Legacy compat special case
231 else:
231 else:
232 state[repo[oldrev].rev()] = repo[newrev].rev()
232 state[repo[oldrev].rev()] = repo[newrev].rev()
233
233
234 except IOError as err:
234 except IOError as err:
235 if err.errno != errno.ENOENT:
235 if err.errno != errno.ENOENT:
236 raise
236 raise
237 cmdutil.wrongtooltocontinue(repo, _('rebase'))
237 cmdutil.wrongtooltocontinue(repo, _('rebase'))
238
238
239 if keepbranches is None:
239 if keepbranches is None:
240 raise error.Abort(_('.hg/rebasestate is incomplete'))
240 raise error.Abort(_('.hg/rebasestate is incomplete'))
241
241
242 skipped = set()
242 skipped = set()
243 # recompute the set of skipped revs
243 # recompute the set of skipped revs
244 if not collapse:
244 if not collapse:
245 seen = {dest}
245 seen = {dest}
246 for old, new in sorted(state.items()):
246 for old, new in sorted(state.items()):
247 if new != revtodo and new in seen:
247 if new != revtodo and new in seen:
248 skipped.add(old)
248 skipped.add(old)
249 seen.add(new)
249 seen.add(new)
250 repo.ui.debug('computed skipped revs: %s\n' %
250 repo.ui.debug('computed skipped revs: %s\n' %
251 (' '.join(str(r) for r in sorted(skipped)) or None))
251 (' '.join(str(r) for r in sorted(skipped)) or None))
252 repo.ui.debug('rebase status resumed\n')
252 repo.ui.debug('rebase status resumed\n')
253 _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
253 _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
254
254
255 self.originalwd = originalwd
255 self.originalwd = originalwd
256 self.dest = dest
256 self.dest = dest
257 self.state = state
257 self.state = state
258 self.skipped = skipped
258 self.skipped = skipped
259 self.collapsef = collapse
259 self.collapsef = collapse
260 self.keepf = keep
260 self.keepf = keep
261 self.keepbranchesf = keepbranches
261 self.keepbranchesf = keepbranches
262 self.external = external
262 self.external = external
263 self.activebookmark = activebookmark
263 self.activebookmark = activebookmark
264
264
265 def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
265 def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
266 """Compute structures necessary for skipping obsolete revisions
266 """Compute structures necessary for skipping obsolete revisions
267
267
268 rebaserevs: iterable of all revisions that are to be rebased
268 rebaserevs: iterable of all revisions that are to be rebased
269 obsoleterevs: iterable of all obsolete revisions in rebaseset
269 obsoleterevs: iterable of all obsolete revisions in rebaseset
270 dest: a destination revision for the rebase operation
270 dest: a destination revision for the rebase operation
271 """
271 """
272 self.obsoletenotrebased = {}
272 self.obsoletenotrebased = {}
273 if not self.ui.configbool('experimental', 'rebaseskipobsolete',
273 if not self.ui.configbool('experimental', 'rebaseskipobsolete',
274 default=True):
274 default=True):
275 return
275 return
276 rebaseset = set(rebaserevs)
276 rebaseset = set(rebaserevs)
277 obsoleteset = set(obsoleterevs)
277 obsoleteset = set(obsoleterevs)
278 self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
278 self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
279 obsoleteset, dest)
279 obsoleteset, dest)
280 skippedset = set(self.obsoletenotrebased)
280 skippedset = set(self.obsoletenotrebased)
281 _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
281 _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
282
282
283 def _prepareabortorcontinue(self, isabort):
283 def _prepareabortorcontinue(self, isabort):
284 try:
284 try:
285 self.restorestatus()
285 self.restorestatus()
286 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
286 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
287 except error.RepoLookupError:
287 except error.RepoLookupError:
288 if isabort:
288 if isabort:
289 clearstatus(self.repo)
289 clearstatus(self.repo)
290 clearcollapsemsg(self.repo)
290 clearcollapsemsg(self.repo)
291 self.repo.ui.warn(_('rebase aborted (no revision is removed,'
291 self.repo.ui.warn(_('rebase aborted (no revision is removed,'
292 ' only broken state is cleared)\n'))
292 ' only broken state is cleared)\n'))
293 return 0
293 return 0
294 else:
294 else:
295 msg = _('cannot continue inconsistent rebase')
295 msg = _('cannot continue inconsistent rebase')
296 hint = _('use "hg rebase --abort" to clear broken state')
296 hint = _('use "hg rebase --abort" to clear broken state')
297 raise error.Abort(msg, hint=hint)
297 raise error.Abort(msg, hint=hint)
298 if isabort:
298 if isabort:
299 return abort(self.repo, self.originalwd, self.dest,
299 return abort(self.repo, self.originalwd, self.dest,
300 self.state, activebookmark=self.activebookmark)
300 self.state, activebookmark=self.activebookmark)
301
301
302 obsrevs = (r for r, st in self.state.items() if st == revprecursor)
302 obsrevs = (r for r, st in self.state.items() if st == revprecursor)
303 self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
303 self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
304
304
305 def _preparenewrebase(self, dest, rebaseset):
305 def _preparenewrebase(self, dest, rebaseset):
306 if dest is None:
306 if dest is None:
307 return _nothingtorebase()
307 return _nothingtorebase()
308
308
309 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
309 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
310 if (not (self.keepf or allowunstable)
310 if (not (self.keepf or allowunstable)
311 and self.repo.revs('first(children(%ld) - %ld)',
311 and self.repo.revs('first(children(%ld) - %ld)',
312 rebaseset, rebaseset)):
312 rebaseset, rebaseset)):
313 raise error.Abort(
313 raise error.Abort(
314 _("can't remove original changesets with"
314 _("can't remove original changesets with"
315 " unrebased descendants"),
315 " unrebased descendants"),
316 hint=_('use --keep to keep original changesets'))
316 hint=_('use --keep to keep original changesets'))
317
317
318 obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
318 obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
319 self._handleskippingobsolete(rebaseset, obsrevs, dest)
319 self._handleskippingobsolete(rebaseset, obsrevs, dest)
320
320
321 result = buildstate(self.repo, dest, rebaseset, self.collapsef,
321 result = buildstate(self.repo, dest, rebaseset, self.collapsef,
322 self.obsoletenotrebased)
322 self.obsoletenotrebased)
323
323
324 if not result:
324 if not result:
325 # Empty state built, nothing to rebase
325 # Empty state built, nothing to rebase
326 self.ui.status(_('nothing to rebase\n'))
326 self.ui.status(_('nothing to rebase\n'))
327 return _nothingtorebase()
327 return _nothingtorebase()
328
328
329 for root in self.repo.set('roots(%ld)', rebaseset):
329 for root in self.repo.set('roots(%ld)', rebaseset):
330 if not self.keepf and not root.mutable():
330 if not self.keepf and not root.mutable():
331 raise error.Abort(_("can't rebase public changeset %s")
331 raise error.Abort(_("can't rebase public changeset %s")
332 % root,
332 % root,
333 hint=_("see 'hg help phases' for details"))
333 hint=_("see 'hg help phases' for details"))
334
334
335 (self.originalwd, self.dest, self.state) = result
335 (self.originalwd, self.dest, self.state) = result
336 if self.collapsef:
336 if self.collapsef:
337 self.destancestors = self.repo.changelog.ancestors(
337 self.destancestors = self.repo.changelog.ancestors(
338 [self.dest],
338 [self.dest],
339 inclusive=True)
339 inclusive=True)
340 self.external = externalparent(self.repo, self.state,
340 self.external = externalparent(self.repo, self.state,
341 self.destancestors)
341 self.destancestors)
342
342
343 if dest.closesbranch() and not self.keepbranchesf:
343 if dest.closesbranch() and not self.keepbranchesf:
344 self.ui.status(_('reopening closed branch head %s\n') % dest)
344 self.ui.status(_('reopening closed branch head %s\n') % dest)
345
345
346 def _performrebase(self, tr):
346 def _performrebase(self, tr):
347 repo, ui, opts = self.repo, self.ui, self.opts
347 repo, ui, opts = self.repo, self.ui, self.opts
348 if self.keepbranchesf:
348 if self.keepbranchesf:
349 # insert _savebranch at the start of extrafns so if
349 # insert _savebranch at the start of extrafns so if
350 # there's a user-provided extrafn it can clobber branch if
350 # there's a user-provided extrafn it can clobber branch if
351 # desired
351 # desired
352 self.extrafns.insert(0, _savebranch)
352 self.extrafns.insert(0, _savebranch)
353 if self.collapsef:
353 if self.collapsef:
354 branches = set()
354 branches = set()
355 for rev in self.state:
355 for rev in self.state:
356 branches.add(repo[rev].branch())
356 branches.add(repo[rev].branch())
357 if len(branches) > 1:
357 if len(branches) > 1:
358 raise error.Abort(_('cannot collapse multiple named '
358 raise error.Abort(_('cannot collapse multiple named '
359 'branches'))
359 'branches'))
360
360
361 # Rebase
361 # Rebase
362 if not self.destancestors:
362 if not self.destancestors:
363 self.destancestors = repo.changelog.ancestors([self.dest],
363 self.destancestors = repo.changelog.ancestors([self.dest],
364 inclusive=True)
364 inclusive=True)
365
365
366 # Keep track of the current bookmarks in order to reset them later
366 # Keep track of the current bookmarks in order to reset them later
367 self.currentbookmarks = repo._bookmarks.copy()
367 self.currentbookmarks = repo._bookmarks.copy()
368 self.activebookmark = self.activebookmark or repo._activebookmark
368 self.activebookmark = self.activebookmark or repo._activebookmark
369 if self.activebookmark:
369 if self.activebookmark:
370 bookmarks.deactivate(repo)
370 bookmarks.deactivate(repo)
371
371
372 # Store the state before we begin so users can run 'hg rebase --abort'
372 # Store the state before we begin so users can run 'hg rebase --abort'
373 # if we fail before the transaction closes.
373 # if we fail before the transaction closes.
374 self.storestatus()
374 self.storestatus()
375
375
376 sortedrevs = repo.revs('sort(%ld, -topo)', self.state)
376 sortedrevs = repo.revs('sort(%ld, -topo)', self.state)
377 cands = [k for k, v in self.state.iteritems() if v == revtodo]
377 cands = [k for k, v in self.state.iteritems() if v == revtodo]
378 total = len(cands)
378 total = len(cands)
379 pos = 0
379 pos = 0
380 for rev in sortedrevs:
380 for rev in sortedrevs:
381 ctx = repo[rev]
381 ctx = repo[rev]
382 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
382 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
383 ctx.description().split('\n', 1)[0])
383 ctx.description().split('\n', 1)[0])
384 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
384 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
385 if names:
385 if names:
386 desc += ' (%s)' % ' '.join(names)
386 desc += ' (%s)' % ' '.join(names)
387 if self.state[rev] == rev:
387 if self.state[rev] == rev:
388 ui.status(_('already rebased %s\n') % desc)
388 ui.status(_('already rebased %s\n') % desc)
389 elif self.state[rev] == revtodo:
389 elif self.state[rev] == revtodo:
390 pos += 1
390 pos += 1
391 ui.status(_('rebasing %s\n') % desc)
391 ui.status(_('rebasing %s\n') % desc)
392 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
392 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
393 _('changesets'), total)
393 _('changesets'), total)
394 p1, p2, base = defineparents(repo, rev, self.dest,
394 p1, p2, base = defineparents(repo, rev, self.dest,
395 self.state,
395 self.state,
396 self.destancestors,
396 self.destancestors,
397 self.obsoletenotrebased)
397 self.obsoletenotrebased)
398 self.storestatus(tr=tr)
398 self.storestatus(tr=tr)
399 storecollapsemsg(repo, self.collapsemsg)
399 storecollapsemsg(repo, self.collapsemsg)
400 if len(repo[None].parents()) == 2:
400 if len(repo[None].parents()) == 2:
401 repo.ui.debug('resuming interrupted rebase\n')
401 repo.ui.debug('resuming interrupted rebase\n')
402 else:
402 else:
403 try:
403 try:
404 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
404 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
405 'rebase')
405 'rebase')
406 stats = rebasenode(repo, rev, p1, base, self.state,
406 stats = rebasenode(repo, rev, p1, base, self.state,
407 self.collapsef, self.dest)
407 self.collapsef, self.dest)
408 if stats and stats[3] > 0:
408 if stats and stats[3] > 0:
409 raise error.InterventionRequired(
409 raise error.InterventionRequired(
410 _('unresolved conflicts (see hg '
410 _('unresolved conflicts (see hg '
411 'resolve, then hg rebase --continue)'))
411 'resolve, then hg rebase --continue)'))
412 finally:
412 finally:
413 ui.setconfig('ui', 'forcemerge', '', 'rebase')
413 ui.setconfig('ui', 'forcemerge', '', 'rebase')
414 if not self.collapsef:
414 if not self.collapsef:
415 merging = p2 != nullrev
415 merging = p2 != nullrev
416 editform = cmdutil.mergeeditform(merging, 'rebase')
416 editform = cmdutil.mergeeditform(merging, 'rebase')
417 editor = cmdutil.getcommiteditor(editform=editform, **opts)
417 editor = cmdutil.getcommiteditor(editform=editform, **opts)
418 newnode = concludenode(repo, rev, p1, p2,
418 newnode = concludenode(repo, rev, p1, p2,
419 extrafn=_makeextrafn(self.extrafns),
419 extrafn=_makeextrafn(self.extrafns),
420 editor=editor,
420 editor=editor,
421 keepbranches=self.keepbranchesf,
421 keepbranches=self.keepbranchesf,
422 date=self.date)
422 date=self.date)
423 if newnode is None:
423 if newnode is None:
424 # If it ended up being a no-op commit, then the normal
424 # If it ended up being a no-op commit, then the normal
425 # merge state clean-up path doesn't happen, so do it
425 # merge state clean-up path doesn't happen, so do it
426 # here. Fix issue5494
426 # here. Fix issue5494
427 mergemod.mergestate.clean(repo)
427 mergemod.mergestate.clean(repo)
428 else:
428 else:
429 # Skip commit if we are collapsing
429 # Skip commit if we are collapsing
430 with repo.dirstate.parentchange():
430 with repo.dirstate.parentchange():
431 repo.setparents(repo[p1].node())
431 repo.setparents(repo[p1].node())
432 newnode = None
432 newnode = None
433 # Update the state
433 # Update the state
434 if newnode is not None:
434 if newnode is not None:
435 self.state[rev] = repo[newnode].rev()
435 self.state[rev] = repo[newnode].rev()
436 ui.debug('rebased as %s\n' % short(newnode))
436 ui.debug('rebased as %s\n' % short(newnode))
437 else:
437 else:
438 if not self.collapsef:
438 if not self.collapsef:
439 ui.warn(_('note: rebase of %d:%s created no changes '
439 ui.warn(_('note: rebase of %d:%s created no changes '
440 'to commit\n') % (rev, ctx))
440 'to commit\n') % (rev, ctx))
441 self.skipped.add(rev)
441 self.skipped.add(rev)
442 self.state[rev] = p1
442 self.state[rev] = p1
443 ui.debug('next revision set to %s\n' % p1)
443 ui.debug('next revision set to %s\n' % p1)
444 elif self.state[rev] == nullmerge:
444 elif self.state[rev] == nullmerge:
445 ui.debug('ignoring null merge rebase of %s\n' % rev)
445 ui.debug('ignoring null merge rebase of %s\n' % rev)
446 elif self.state[rev] == revignored:
446 elif self.state[rev] == revignored:
447 ui.status(_('not rebasing ignored %s\n') % desc)
447 ui.status(_('not rebasing ignored %s\n') % desc)
448 elif self.state[rev] == revprecursor:
448 elif self.state[rev] == revprecursor:
449 destctx = repo[self.obsoletenotrebased[rev]]
449 destctx = repo[self.obsoletenotrebased[rev]]
450 descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
450 descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
451 destctx.description().split('\n', 1)[0])
451 destctx.description().split('\n', 1)[0])
452 msg = _('note: not rebasing %s, already in destination as %s\n')
452 msg = _('note: not rebasing %s, already in destination as %s\n')
453 ui.status(msg % (desc, descdest))
453 ui.status(msg % (desc, descdest))
454 elif self.state[rev] == revpruned:
454 elif self.state[rev] == revpruned:
455 msg = _('note: not rebasing %s, it has no successor\n')
455 msg = _('note: not rebasing %s, it has no successor\n')
456 ui.status(msg % desc)
456 ui.status(msg % desc)
457 else:
457 else:
458 ui.status(_('already rebased %s as %s\n') %
458 ui.status(_('already rebased %s as %s\n') %
459 (desc, repo[self.state[rev]]))
459 (desc, repo[self.state[rev]]))
460
460
461 ui.progress(_('rebasing'), None)
461 ui.progress(_('rebasing'), None)
462 ui.note(_('rebase merging completed\n'))
462 ui.note(_('rebase merging completed\n'))
463
463
464 def _finishrebase(self):
464 def _finishrebase(self):
465 repo, ui, opts = self.repo, self.ui, self.opts
465 repo, ui, opts = self.repo, self.ui, self.opts
466 if self.collapsef and not self.keepopen:
466 if self.collapsef and not self.keepopen:
467 p1, p2, _base = defineparents(repo, min(self.state),
467 p1, p2, _base = defineparents(repo, min(self.state),
468 self.dest, self.state,
468 self.dest, self.state,
469 self.destancestors,
469 self.destancestors,
470 self.obsoletenotrebased)
470 self.obsoletenotrebased)
471 editopt = opts.get('edit')
471 editopt = opts.get('edit')
472 editform = 'rebase.collapse'
472 editform = 'rebase.collapse'
473 if self.collapsemsg:
473 if self.collapsemsg:
474 commitmsg = self.collapsemsg
474 commitmsg = self.collapsemsg
475 else:
475 else:
476 commitmsg = 'Collapsed revision'
476 commitmsg = 'Collapsed revision'
477 for rebased in self.state:
477 for rebased in self.state:
478 if rebased not in self.skipped and\
478 if rebased not in self.skipped and\
479 self.state[rebased] > nullmerge:
479 self.state[rebased] > nullmerge:
480 commitmsg += '\n* %s' % repo[rebased].description()
480 commitmsg += '\n* %s' % repo[rebased].description()
481 editopt = True
481 editopt = True
482 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
482 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
483 revtoreuse = max(self.state)
483 revtoreuse = max(self.state)
484 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
484 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
485 try:
485 try:
486 newnode = concludenode(repo, revtoreuse, p1, self.external,
486 newnode = concludenode(repo, revtoreuse, p1, self.external,
487 commitmsg=commitmsg,
487 commitmsg=commitmsg,
488 extrafn=_makeextrafn(self.extrafns),
488 extrafn=_makeextrafn(self.extrafns),
489 editor=editor,
489 editor=editor,
490 keepbranches=self.keepbranchesf,
490 keepbranches=self.keepbranchesf,
491 date=self.date)
491 date=self.date)
492 dsguard.close()
492 dsguard.close()
493 release(dsguard)
493 release(dsguard)
494 except error.InterventionRequired:
494 except error.InterventionRequired:
495 dsguard.close()
495 dsguard.close()
496 release(dsguard)
496 release(dsguard)
497 raise
497 raise
498 except Exception:
498 except Exception:
499 release(dsguard)
499 release(dsguard)
500 raise
500 raise
501
501
502 if newnode is None:
502 if newnode is None:
503 newrev = self.dest
503 newrev = self.dest
504 else:
504 else:
505 newrev = repo[newnode].rev()
505 newrev = repo[newnode].rev()
506 for oldrev in self.state.iterkeys():
506 for oldrev in self.state.iterkeys():
507 if self.state[oldrev] > nullmerge:
507 if self.state[oldrev] > nullmerge:
508 self.state[oldrev] = newrev
508 self.state[oldrev] = newrev
509
509
510 if 'qtip' in repo.tags():
510 if 'qtip' in repo.tags():
511 updatemq(repo, self.state, self.skipped, **opts)
511 updatemq(repo, self.state, self.skipped, **opts)
512
512
513 if self.currentbookmarks:
513 if self.currentbookmarks:
514 # Nodeids are needed to reset bookmarks
514 # Nodeids are needed to reset bookmarks
515 nstate = {}
515 nstate = {}
516 for k, v in self.state.iteritems():
516 for k, v in self.state.iteritems():
517 if v > nullmerge and v != k:
517 if v > nullmerge and v != k:
518 nstate[repo[k].node()] = repo[v].node()
518 nstate[repo[k].node()] = repo[v].node()
519 elif v == revprecursor:
519 elif v == revprecursor:
520 succ = self.obsoletenotrebased[k]
520 succ = self.obsoletenotrebased[k]
521 nstate[repo[k].node()] = repo[succ].node()
521 nstate[repo[k].node()] = repo[succ].node()
522 # XXX this is the same as dest.node() for the non-continue path --
522 # XXX this is the same as dest.node() for the non-continue path --
523 # this should probably be cleaned up
523 # this should probably be cleaned up
524 destnode = repo[self.dest].node()
524 destnode = repo[self.dest].node()
525
525
526 # restore original working directory
526 # restore original working directory
527 # (we do this before stripping)
527 # (we do this before stripping)
528 newwd = self.state.get(self.originalwd, self.originalwd)
528 newwd = self.state.get(self.originalwd, self.originalwd)
529 if newwd == revprecursor:
529 if newwd == revprecursor:
530 newwd = self.obsoletenotrebased[self.originalwd]
530 newwd = self.obsoletenotrebased[self.originalwd]
531 elif newwd < 0:
531 elif newwd < 0:
532 # original directory is a parent of rebase set root or ignored
532 # original directory is a parent of rebase set root or ignored
533 newwd = self.originalwd
533 newwd = self.originalwd
534 if newwd not in [c.rev() for c in repo[None].parents()]:
534 if newwd not in [c.rev() for c in repo[None].parents()]:
535 ui.note(_("update back to initial working directory parent\n"))
535 ui.note(_("update back to initial working directory parent\n"))
536 hg.updaterepo(repo, newwd, False)
536 hg.updaterepo(repo, newwd, False)
537
537
538 if self.currentbookmarks:
538 if self.currentbookmarks:
539 with repo.transaction('bookmark') as tr:
539 with repo.transaction('bookmark') as tr:
540 updatebookmarks(repo, destnode, nstate,
540 updatebookmarks(repo, destnode, nstate,
541 self.currentbookmarks, tr)
541 self.currentbookmarks, tr)
542 if self.activebookmark not in repo._bookmarks:
542 if self.activebookmark not in repo._bookmarks:
543 # active bookmark was divergent one and has been deleted
543 # active bookmark was divergent one and has been deleted
544 self.activebookmark = None
544 self.activebookmark = None
545
545
546 if not self.keepf:
546 if not self.keepf:
547 collapsedas = None
547 collapsedas = None
548 if self.collapsef:
548 if self.collapsef:
549 collapsedas = newnode
549 collapsedas = newnode
550 clearrebased(ui, repo, self.state, self.skipped, collapsedas)
550 clearrebased(ui, repo, self.state, self.skipped, collapsedas)
551
551
552 clearstatus(repo)
552 clearstatus(repo)
553 clearcollapsemsg(repo)
553 clearcollapsemsg(repo)
554
554
555 ui.note(_("rebase completed\n"))
555 ui.note(_("rebase completed\n"))
556 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
556 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
557 if self.skipped:
557 if self.skipped:
558 skippedlen = len(self.skipped)
558 skippedlen = len(self.skipped)
559 ui.note(_("%d revisions have been skipped\n") % skippedlen)
559 ui.note(_("%d revisions have been skipped\n") % skippedlen)
560
560
561 if (self.activebookmark and
561 if (self.activebookmark and
562 repo['.'].node() == repo._bookmarks[self.activebookmark]):
562 repo['.'].node() == repo._bookmarks[self.activebookmark]):
563 bookmarks.activate(repo, self.activebookmark)
563 bookmarks.activate(repo, self.activebookmark)
564
564
565 @command('rebase',
565 @command('rebase',
566 [('s', 'source', '',
566 [('s', 'source', '',
567 _('rebase the specified changeset and descendants'), _('REV')),
567 _('rebase the specified changeset and descendants'), _('REV')),
568 ('b', 'base', '',
568 ('b', 'base', '',
569 _('rebase everything from branching point of specified changeset'),
569 _('rebase everything from branching point of specified changeset'),
570 _('REV')),
570 _('REV')),
571 ('r', 'rev', [],
571 ('r', 'rev', [],
572 _('rebase these revisions'),
572 _('rebase these revisions'),
573 _('REV')),
573 _('REV')),
574 ('d', 'dest', '',
574 ('d', 'dest', '',
575 _('rebase onto the specified changeset'), _('REV')),
575 _('rebase onto the specified changeset'), _('REV')),
576 ('', 'collapse', False, _('collapse the rebased changesets')),
576 ('', 'collapse', False, _('collapse the rebased changesets')),
577 ('m', 'message', '',
577 ('m', 'message', '',
578 _('use text as collapse commit message'), _('TEXT')),
578 _('use text as collapse commit message'), _('TEXT')),
579 ('e', 'edit', False, _('invoke editor on commit messages')),
579 ('e', 'edit', False, _('invoke editor on commit messages')),
580 ('l', 'logfile', '',
580 ('l', 'logfile', '',
581 _('read collapse commit message from file'), _('FILE')),
581 _('read collapse commit message from file'), _('FILE')),
582 ('k', 'keep', False, _('keep original changesets')),
582 ('k', 'keep', False, _('keep original changesets')),
583 ('', 'keepbranches', False, _('keep original branch names')),
583 ('', 'keepbranches', False, _('keep original branch names')),
584 ('D', 'detach', False, _('(DEPRECATED)')),
584 ('D', 'detach', False, _('(DEPRECATED)')),
585 ('i', 'interactive', False, _('(DEPRECATED)')),
585 ('i', 'interactive', False, _('(DEPRECATED)')),
586 ('t', 'tool', '', _('specify merge tool')),
586 ('t', 'tool', '', _('specify merge tool')),
587 ('c', 'continue', False, _('continue an interrupted rebase')),
587 ('c', 'continue', False, _('continue an interrupted rebase')),
588 ('a', 'abort', False, _('abort an interrupted rebase'))] +
588 ('a', 'abort', False, _('abort an interrupted rebase'))] +
589 templateopts,
589 templateopts,
590 _('[-s REV | -b REV] [-d REV] [OPTION]'))
590 _('[-s REV | -b REV] [-d REV] [OPTION]'))
591 def rebase(ui, repo, **opts):
591 def rebase(ui, repo, **opts):
592 """move changeset (and descendants) to a different branch
592 """move changeset (and descendants) to a different branch
593
593
594 Rebase uses repeated merging to graft changesets from one part of
594 Rebase uses repeated merging to graft changesets from one part of
595 history (the source) onto another (the destination). This can be
595 history (the source) onto another (the destination). This can be
596 useful for linearizing *local* changes relative to a master
596 useful for linearizing *local* changes relative to a master
597 development tree.
597 development tree.
598
598
599 Published commits cannot be rebased (see :hg:`help phases`).
599 Published commits cannot be rebased (see :hg:`help phases`).
600 To copy commits, see :hg:`help graft`.
600 To copy commits, see :hg:`help graft`.
601
601
602 If you don't specify a destination changeset (``-d/--dest``), rebase
602 If you don't specify a destination changeset (``-d/--dest``), rebase
603 will use the same logic as :hg:`merge` to pick a destination. if
603 will use the same logic as :hg:`merge` to pick a destination. if
604 the current branch contains exactly one other head, the other head
604 the current branch contains exactly one other head, the other head
605 is merged with by default. Otherwise, an explicit revision with
605 is merged with by default. Otherwise, an explicit revision with
606 which to merge with must be provided. (destination changeset is not
606 which to merge with must be provided. (destination changeset is not
607 modified by rebasing, but new changesets are added as its
607 modified by rebasing, but new changesets are added as its
608 descendants.)
608 descendants.)
609
609
610 Here are the ways to select changesets:
610 Here are the ways to select changesets:
611
611
612 1. Explicitly select them using ``--rev``.
612 1. Explicitly select them using ``--rev``.
613
613
614 2. Use ``--source`` to select a root changeset and include all of its
614 2. Use ``--source`` to select a root changeset and include all of its
615 descendants.
615 descendants.
616
616
617 3. Use ``--base`` to select a changeset; rebase will find ancestors
617 3. Use ``--base`` to select a changeset; rebase will find ancestors
618 and their descendants which are not also ancestors of the destination.
618 and their descendants which are not also ancestors of the destination.
619
619
620 4. If you do not specify any of ``--rev``, ``source``, or ``--base``,
620 4. If you do not specify any of ``--rev``, ``source``, or ``--base``,
621 rebase will use ``--base .`` as above.
621 rebase will use ``--base .`` as above.
622
622
623 Rebase will destroy original changesets unless you use ``--keep``.
623 Rebase will destroy original changesets unless you use ``--keep``.
624 It will also move your bookmarks (even if you do).
624 It will also move your bookmarks (even if you do).
625
625
626 Some changesets may be dropped if they do not contribute changes
626 Some changesets may be dropped if they do not contribute changes
627 (e.g. merges from the destination branch).
627 (e.g. merges from the destination branch).
628
628
629 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
629 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
630 a named branch with two heads. You will need to explicitly specify source
630 a named branch with two heads. You will need to explicitly specify source
631 and/or destination.
631 and/or destination.
632
632
633 If you need to use a tool to automate merge/conflict decisions, you
633 If you need to use a tool to automate merge/conflict decisions, you
634 can specify one with ``--tool``, see :hg:`help merge-tools`.
634 can specify one with ``--tool``, see :hg:`help merge-tools`.
635 As a caveat: the tool will not be used to mediate when a file was
635 As a caveat: the tool will not be used to mediate when a file was
636 deleted, there is no hook presently available for this.
636 deleted, there is no hook presently available for this.
637
637
638 If a rebase is interrupted to manually resolve a conflict, it can be
638 If a rebase is interrupted to manually resolve a conflict, it can be
639 continued with --continue/-c or aborted with --abort/-a.
639 continued with --continue/-c or aborted with --abort/-a.
640
640
641 .. container:: verbose
641 .. container:: verbose
642
642
643 Examples:
643 Examples:
644
644
645 - move "local changes" (current commit back to branching point)
645 - move "local changes" (current commit back to branching point)
646 to the current branch tip after a pull::
646 to the current branch tip after a pull::
647
647
648 hg rebase
648 hg rebase
649
649
650 - move a single changeset to the stable branch::
650 - move a single changeset to the stable branch::
651
651
652 hg rebase -r 5f493448 -d stable
652 hg rebase -r 5f493448 -d stable
653
653
654 - splice a commit and all its descendants onto another part of history::
654 - splice a commit and all its descendants onto another part of history::
655
655
656 hg rebase --source c0c3 --dest 4cf9
656 hg rebase --source c0c3 --dest 4cf9
657
657
658 - rebase everything on a branch marked by a bookmark onto the
658 - rebase everything on a branch marked by a bookmark onto the
659 default branch::
659 default branch::
660
660
661 hg rebase --base myfeature --dest default
661 hg rebase --base myfeature --dest default
662
662
663 - collapse a sequence of changes into a single commit::
663 - collapse a sequence of changes into a single commit::
664
664
665 hg rebase --collapse -r 1520:1525 -d .
665 hg rebase --collapse -r 1520:1525 -d .
666
666
667 - move a named branch while preserving its name::
667 - move a named branch while preserving its name::
668
668
669 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
669 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
670
670
671 Configuration Options:
671 Configuration Options:
672
672
673 You can make rebase require a destination if you set the following config
673 You can make rebase require a destination if you set the following config
674 option::
674 option::
675
675
676 [commands]
676 [commands]
677 rebase.requiredest = True
677 rebase.requiredest = True
678
678
679 Return Values:
679 Return Values:
680
680
681 Returns 0 on success, 1 if nothing to rebase or there are
681 Returns 0 on success, 1 if nothing to rebase or there are
682 unresolved conflicts.
682 unresolved conflicts.
683
683
684 """
684 """
685 rbsrt = rebaseruntime(repo, ui, opts)
685 rbsrt = rebaseruntime(repo, ui, opts)
686
686
687 lock = wlock = None
687 lock = wlock = None
688 try:
688 try:
689 wlock = repo.wlock()
689 wlock = repo.wlock()
690 lock = repo.lock()
690 lock = repo.lock()
691
691
692 # Validate input and define rebasing points
692 # Validate input and define rebasing points
693 destf = opts.get('dest', None)
693 destf = opts.get('dest', None)
694 srcf = opts.get('source', None)
694 srcf = opts.get('source', None)
695 basef = opts.get('base', None)
695 basef = opts.get('base', None)
696 revf = opts.get('rev', [])
696 revf = opts.get('rev', [])
697 # search default destination in this space
697 # search default destination in this space
698 # used in the 'hg pull --rebase' case, see issue 5214.
698 # used in the 'hg pull --rebase' case, see issue 5214.
699 destspace = opts.get('_destspace')
699 destspace = opts.get('_destspace')
700 contf = opts.get('continue')
700 contf = opts.get('continue')
701 abortf = opts.get('abort')
701 abortf = opts.get('abort')
702 if opts.get('interactive'):
702 if opts.get('interactive'):
703 try:
703 try:
704 if extensions.find('histedit'):
704 if extensions.find('histedit'):
705 enablehistedit = ''
705 enablehistedit = ''
706 except KeyError:
706 except KeyError:
707 enablehistedit = " --config extensions.histedit="
707 enablehistedit = " --config extensions.histedit="
708 help = "hg%s help -e histedit" % enablehistedit
708 help = "hg%s help -e histedit" % enablehistedit
709 msg = _("interactive history editing is supported by the "
709 msg = _("interactive history editing is supported by the "
710 "'histedit' extension (see \"%s\")") % help
710 "'histedit' extension (see \"%s\")") % help
711 raise error.Abort(msg)
711 raise error.Abort(msg)
712
712
713 if rbsrt.collapsemsg and not rbsrt.collapsef:
713 if rbsrt.collapsemsg and not rbsrt.collapsef:
714 raise error.Abort(
714 raise error.Abort(
715 _('message can only be specified with collapse'))
715 _('message can only be specified with collapse'))
716
716
717 if contf or abortf:
717 if contf or abortf:
718 if contf and abortf:
718 if contf and abortf:
719 raise error.Abort(_('cannot use both abort and continue'))
719 raise error.Abort(_('cannot use both abort and continue'))
720 if rbsrt.collapsef:
720 if rbsrt.collapsef:
721 raise error.Abort(
721 raise error.Abort(
722 _('cannot use collapse with continue or abort'))
722 _('cannot use collapse with continue or abort'))
723 if srcf or basef or destf:
723 if srcf or basef or destf:
724 raise error.Abort(
724 raise error.Abort(
725 _('abort and continue do not allow specifying revisions'))
725 _('abort and continue do not allow specifying revisions'))
726 if abortf and opts.get('tool', False):
726 if abortf and opts.get('tool', False):
727 ui.warn(_('tool option will be ignored\n'))
727 ui.warn(_('tool option will be ignored\n'))
728 if contf:
728 if contf:
729 ms = mergemod.mergestate.read(repo)
729 ms = mergemod.mergestate.read(repo)
730 mergeutil.checkunresolved(ms)
730 mergeutil.checkunresolved(ms)
731
731
732 retcode = rbsrt._prepareabortorcontinue(abortf)
732 retcode = rbsrt._prepareabortorcontinue(abortf)
733 if retcode is not None:
733 if retcode is not None:
734 return retcode
734 return retcode
735 else:
735 else:
736 dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
736 dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
737 destspace=destspace)
737 destspace=destspace)
738 retcode = rbsrt._preparenewrebase(dest, rebaseset)
738 retcode = rbsrt._preparenewrebase(dest, rebaseset)
739 if retcode is not None:
739 if retcode is not None:
740 return retcode
740 return retcode
741
741
742 with repo.transaction('rebase') as tr:
742 with repo.transaction('rebase') as tr:
743 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
743 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
744 try:
744 try:
745 rbsrt._performrebase(tr)
745 rbsrt._performrebase(tr)
746 dsguard.close()
746 dsguard.close()
747 release(dsguard)
747 release(dsguard)
748 except error.InterventionRequired:
748 except error.InterventionRequired:
749 dsguard.close()
749 dsguard.close()
750 release(dsguard)
750 release(dsguard)
751 tr.close()
751 tr.close()
752 raise
752 raise
753 except Exception:
753 except Exception:
754 release(dsguard)
754 release(dsguard)
755 raise
755 raise
756 rbsrt._finishrebase()
756 rbsrt._finishrebase()
757 finally:
757 finally:
758 release(lock, wlock)
758 release(lock, wlock)
759
759
760 def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
760 def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
761 destspace=None):
761 destspace=None):
762 """use revisions argument to define destination and rebase set
762 """use revisions argument to define destination and rebase set
763 """
763 """
764 if revf is None:
764 if revf is None:
765 revf = []
765 revf = []
766
766
767 # destspace is here to work around issues with `hg pull --rebase` see
767 # destspace is here to work around issues with `hg pull --rebase` see
768 # issue5214 for details
768 # issue5214 for details
769 if srcf and basef:
769 if srcf and basef:
770 raise error.Abort(_('cannot specify both a source and a base'))
770 raise error.Abort(_('cannot specify both a source and a base'))
771 if revf and basef:
771 if revf and basef:
772 raise error.Abort(_('cannot specify both a revision and a base'))
772 raise error.Abort(_('cannot specify both a revision and a base'))
773 if revf and srcf:
773 if revf and srcf:
774 raise error.Abort(_('cannot specify both a revision and a source'))
774 raise error.Abort(_('cannot specify both a revision and a source'))
775
775
776 cmdutil.checkunfinished(repo)
776 cmdutil.checkunfinished(repo)
777 cmdutil.bailifchanged(repo)
777 cmdutil.bailifchanged(repo)
778
778
779 if ui.configbool('commands', 'rebase.requiredest') and not destf:
779 if ui.configbool('commands', 'rebase.requiredest') and not destf:
780 raise error.Abort(_('you must specify a destination'),
780 raise error.Abort(_('you must specify a destination'),
781 hint=_('use: hg rebase -d REV'))
781 hint=_('use: hg rebase -d REV'))
782
782
783 if destf:
783 if destf:
784 dest = scmutil.revsingle(repo, destf)
784 dest = scmutil.revsingle(repo, destf)
785
785
786 if revf:
786 if revf:
787 rebaseset = scmutil.revrange(repo, revf)
787 rebaseset = scmutil.revrange(repo, revf)
788 if not rebaseset:
788 if not rebaseset:
789 ui.status(_('empty "rev" revision set - nothing to rebase\n'))
789 ui.status(_('empty "rev" revision set - nothing to rebase\n'))
790 return None, None
790 return None, None
791 elif srcf:
791 elif srcf:
792 src = scmutil.revrange(repo, [srcf])
792 src = scmutil.revrange(repo, [srcf])
793 if not src:
793 if not src:
794 ui.status(_('empty "source" revision set - nothing to rebase\n'))
794 ui.status(_('empty "source" revision set - nothing to rebase\n'))
795 return None, None
795 return None, None
796 rebaseset = repo.revs('(%ld)::', src)
796 rebaseset = repo.revs('(%ld)::', src)
797 assert rebaseset
797 assert rebaseset
798 else:
798 else:
799 base = scmutil.revrange(repo, [basef or '.'])
799 base = scmutil.revrange(repo, [basef or '.'])
800 if not base:
800 if not base:
801 ui.status(_('empty "base" revision set - '
801 ui.status(_('empty "base" revision set - '
802 "can't compute rebase set\n"))
802 "can't compute rebase set\n"))
803 return None, None
803 return None, None
804 if not destf:
804 if not destf:
805 dest = repo[_destrebase(repo, base, destspace=destspace)]
805 dest = repo[_destrebase(repo, base, destspace=destspace)]
806 destf = str(dest)
806 destf = str(dest)
807
807
808 roots = [] # selected children of branching points
808 roots = [] # selected children of branching points
809 bpbase = {} # {branchingpoint: [origbase]}
809 bpbase = {} # {branchingpoint: [origbase]}
810 for b in base: # group bases by branching points
810 for b in base: # group bases by branching points
811 bp = repo.revs('ancestor(%d, %d)', b, dest).first()
811 bp = repo.revs('ancestor(%d, %d)', b, dest).first()
812 bpbase[bp] = bpbase.get(bp, []) + [b]
812 bpbase[bp] = bpbase.get(bp, []) + [b]
813 if None in bpbase:
813 if None in bpbase:
814 # emulate the old behavior, showing "nothing to rebase" (a better
814 # emulate the old behavior, showing "nothing to rebase" (a better
815 # behavior may be abort with "cannot find branching point" error)
815 # behavior may be abort with "cannot find branching point" error)
816 bpbase.clear()
816 bpbase.clear()
817 for bp, bs in bpbase.iteritems(): # calculate roots
817 for bp, bs in bpbase.iteritems(): # calculate roots
818 roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
818 roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
819
819
820 rebaseset = repo.revs('%ld::', roots)
820 rebaseset = repo.revs('%ld::', roots)
821
821
822 if not rebaseset:
822 if not rebaseset:
823 # transform to list because smartsets are not comparable to
823 # transform to list because smartsets are not comparable to
824 # lists. This should be improved to honor laziness of
824 # lists. This should be improved to honor laziness of
825 # smartset.
825 # smartset.
826 if list(base) == [dest.rev()]:
826 if list(base) == [dest.rev()]:
827 if basef:
827 if basef:
828 ui.status(_('nothing to rebase - %s is both "base"'
828 ui.status(_('nothing to rebase - %s is both "base"'
829 ' and destination\n') % dest)
829 ' and destination\n') % dest)
830 else:
830 else:
831 ui.status(_('nothing to rebase - working directory '
831 ui.status(_('nothing to rebase - working directory '
832 'parent is also destination\n'))
832 'parent is also destination\n'))
833 elif not repo.revs('%ld - ::%d', base, dest):
833 elif not repo.revs('%ld - ::%d', base, dest):
834 if basef:
834 if basef:
835 ui.status(_('nothing to rebase - "base" %s is '
835 ui.status(_('nothing to rebase - "base" %s is '
836 'already an ancestor of destination '
836 'already an ancestor of destination '
837 '%s\n') %
837 '%s\n') %
838 ('+'.join(str(repo[r]) for r in base),
838 ('+'.join(str(repo[r]) for r in base),
839 dest))
839 dest))
840 else:
840 else:
841 ui.status(_('nothing to rebase - working '
841 ui.status(_('nothing to rebase - working '
842 'directory parent is already an '
842 'directory parent is already an '
843 'ancestor of destination %s\n') % dest)
843 'ancestor of destination %s\n') % dest)
844 else: # can it happen?
844 else: # can it happen?
845 ui.status(_('nothing to rebase from %s to %s\n') %
845 ui.status(_('nothing to rebase from %s to %s\n') %
846 ('+'.join(str(repo[r]) for r in base), dest))
846 ('+'.join(str(repo[r]) for r in base), dest))
847 return None, None
847 return None, None
848
848
849 if not destf:
849 if not destf:
850 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
850 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
851 destf = str(dest)
851 destf = str(dest)
852
852
853 return dest, rebaseset
853 return dest, rebaseset
854
854
855 def externalparent(repo, state, destancestors):
855 def externalparent(repo, state, destancestors):
856 """Return the revision that should be used as the second parent
856 """Return the revision that should be used as the second parent
857 when the revisions in state is collapsed on top of destancestors.
857 when the revisions in state is collapsed on top of destancestors.
858 Abort if there is more than one parent.
858 Abort if there is more than one parent.
859 """
859 """
860 parents = set()
860 parents = set()
861 source = min(state)
861 source = min(state)
862 for rev in state:
862 for rev in state:
863 if rev == source:
863 if rev == source:
864 continue
864 continue
865 for p in repo[rev].parents():
865 for p in repo[rev].parents():
866 if (p.rev() not in state
866 if (p.rev() not in state
867 and p.rev() not in destancestors):
867 and p.rev() not in destancestors):
868 parents.add(p.rev())
868 parents.add(p.rev())
869 if not parents:
869 if not parents:
870 return nullrev
870 return nullrev
871 if len(parents) == 1:
871 if len(parents) == 1:
872 return parents.pop()
872 return parents.pop()
873 raise error.Abort(_('unable to collapse on top of %s, there is more '
873 raise error.Abort(_('unable to collapse on top of %s, there is more '
874 'than one external parent: %s') %
874 'than one external parent: %s') %
875 (max(destancestors),
875 (max(destancestors),
876 ', '.join(str(p) for p in sorted(parents))))
876 ', '.join(str(p) for p in sorted(parents))))
877
877
878 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
878 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
879 keepbranches=False, date=None):
879 keepbranches=False, date=None):
880 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
880 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
881 but also store useful information in extra.
881 but also store useful information in extra.
882 Return node of committed revision.'''
882 Return node of committed revision.'''
883 repo.setparents(repo[p1].node(), repo[p2].node())
883 repo.setparents(repo[p1].node(), repo[p2].node())
884 ctx = repo[rev]
884 ctx = repo[rev]
885 if commitmsg is None:
885 if commitmsg is None:
886 commitmsg = ctx.description()
886 commitmsg = ctx.description()
887 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
887 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
888 extra = {'rebase_source': ctx.hex()}
888 extra = {'rebase_source': ctx.hex()}
889 if extrafn:
889 if extrafn:
890 extrafn(ctx, extra)
890 extrafn(ctx, extra)
891
891
892 destphase = max(ctx.phase(), phases.draft)
892 destphase = max(ctx.phase(), phases.draft)
893 overrides = {('phases', 'new-commit'): destphase}
893 overrides = {('phases', 'new-commit'): destphase}
894 with repo.ui.configoverride(overrides, 'rebase'):
894 with repo.ui.configoverride(overrides, 'rebase'):
895 if keepbranch:
895 if keepbranch:
896 repo.ui.setconfig('ui', 'allowemptycommit', True)
896 repo.ui.setconfig('ui', 'allowemptycommit', True)
897 # Commit might fail if unresolved files exist
897 # Commit might fail if unresolved files exist
898 if date is None:
898 if date is None:
899 date = ctx.date()
899 date = ctx.date()
900 newnode = repo.commit(text=commitmsg, user=ctx.user(),
900 newnode = repo.commit(text=commitmsg, user=ctx.user(),
901 date=date, extra=extra, editor=editor)
901 date=date, extra=extra, editor=editor)
902
902
903 repo.dirstate.setbranch(repo[newnode].branch())
903 repo.dirstate.setbranch(repo[newnode].branch())
904 return newnode
904 return newnode
905
905
906 def rebasenode(repo, rev, p1, base, state, collapse, dest):
906 def rebasenode(repo, rev, p1, base, state, collapse, dest):
907 'Rebase a single revision rev on top of p1 using base as merge ancestor'
907 'Rebase a single revision rev on top of p1 using base as merge ancestor'
908 # Merge phase
908 # Merge phase
909 # Update to destination and merge it with local
909 # Update to destination and merge it with local
910 if repo['.'].rev() != p1:
910 if repo['.'].rev() != p1:
911 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
911 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
912 mergemod.update(repo, p1, False, True)
912 mergemod.update(repo, p1, False, True)
913 else:
913 else:
914 repo.ui.debug(" already in destination\n")
914 repo.ui.debug(" already in destination\n")
915 repo.dirstate.write(repo.currenttransaction())
915 repo.dirstate.write(repo.currenttransaction())
916 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
916 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
917 if base is not None:
917 if base is not None:
918 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
918 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
919 # When collapsing in-place, the parent is the common ancestor, we
919 # When collapsing in-place, the parent is the common ancestor, we
920 # have to allow merging with it.
920 # have to allow merging with it.
921 stats = mergemod.update(repo, rev, True, True, base, collapse,
921 stats = mergemod.update(repo, rev, True, True, base, collapse,
922 labels=['dest', 'source'])
922 labels=['dest', 'source'])
923 if collapse:
923 if collapse:
924 copies.duplicatecopies(repo, rev, dest)
924 copies.duplicatecopies(repo, rev, dest)
925 else:
925 else:
926 # If we're not using --collapse, we need to
926 # If we're not using --collapse, we need to
927 # duplicate copies between the revision we're
927 # duplicate copies between the revision we're
928 # rebasing and its first parent, but *not*
928 # rebasing and its first parent, but *not*
929 # duplicate any copies that have already been
929 # duplicate any copies that have already been
930 # performed in the destination.
930 # performed in the destination.
931 p1rev = repo[rev].p1().rev()
931 p1rev = repo[rev].p1().rev()
932 copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
932 copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
933 return stats
933 return stats
934
934
935 def nearestrebased(repo, rev, state):
935 def nearestrebased(repo, rev, state):
936 """return the nearest ancestors of rev in the rebase result"""
936 """return the nearest ancestors of rev in the rebase result"""
937 rebased = [r for r in state if state[r] > nullmerge]
937 rebased = [r for r in state if state[r] > nullmerge]
938 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
938 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
939 if candidates:
939 if candidates:
940 return state[candidates.first()]
940 return state[candidates.first()]
941 else:
941 else:
942 return None
942 return None
943
943
944 def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped):
944 def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped):
945 """
945 """
946 Abort if rebase will create divergence or rebase is noop because of markers
946 Abort if rebase will create divergence or rebase is noop because of markers
947
947
948 `rebaseobsrevs`: set of obsolete revision in source
948 `rebaseobsrevs`: set of obsolete revision in source
949 `rebasesetrevs`: set of revisions to be rebased from source
949 `rebasesetrevs`: set of revisions to be rebased from source
950 `rebaseobsskipped`: set of revisions from source skipped because they have
950 `rebaseobsskipped`: set of revisions from source skipped because they have
951 successors in destination
951 successors in destination
952 """
952 """
953 # Obsolete node with successors not in dest leads to divergence
953 # Obsolete node with successors not in dest leads to divergence
954 divergenceok = ui.configbool('experimental',
954 divergenceok = ui.configbool('experimental',
955 'allowdivergence')
955 'allowdivergence')
956 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
956 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
957
957
958 if divergencebasecandidates and not divergenceok:
958 if divergencebasecandidates and not divergenceok:
959 divhashes = (str(repo[r])
959 divhashes = (str(repo[r])
960 for r in divergencebasecandidates)
960 for r in divergencebasecandidates)
961 msg = _("this rebase will cause "
961 msg = _("this rebase will cause "
962 "divergences from: %s")
962 "divergences from: %s")
963 h = _("to force the rebase please set "
963 h = _("to force the rebase please set "
964 "experimental.allowdivergence=True")
964 "experimental.allowdivergence=True")
965 raise error.Abort(msg % (",".join(divhashes),), hint=h)
965 raise error.Abort(msg % (",".join(divhashes),), hint=h)
966
966
967 def defineparents(repo, rev, dest, state, destancestors,
967 def defineparents(repo, rev, dest, state, destancestors,
968 obsoletenotrebased):
968 obsoletenotrebased):
969 'Return the new parent relationship of the revision that will be rebased'
969 'Return the new parent relationship of the revision that will be rebased'
970 parents = repo[rev].parents()
970 parents = repo[rev].parents()
971 p1 = p2 = nullrev
971 p1 = p2 = nullrev
972 rp1 = None
972 rp1 = None
973
973
974 p1n = parents[0].rev()
974 p1n = parents[0].rev()
975 if p1n in destancestors:
975 if p1n in destancestors:
976 p1 = dest
976 p1 = dest
977 elif p1n in state:
977 elif p1n in state:
978 if state[p1n] == nullmerge:
978 if state[p1n] == nullmerge:
979 p1 = dest
979 p1 = dest
980 elif state[p1n] in revskipped:
980 elif state[p1n] in revskipped:
981 p1 = nearestrebased(repo, p1n, state)
981 p1 = nearestrebased(repo, p1n, state)
982 if p1 is None:
982 if p1 is None:
983 p1 = dest
983 p1 = dest
984 else:
984 else:
985 p1 = state[p1n]
985 p1 = state[p1n]
986 else: # p1n external
986 else: # p1n external
987 p1 = dest
987 p1 = dest
988 p2 = p1n
988 p2 = p1n
989
989
990 if len(parents) == 2 and parents[1].rev() not in destancestors:
990 if len(parents) == 2 and parents[1].rev() not in destancestors:
991 p2n = parents[1].rev()
991 p2n = parents[1].rev()
992 # interesting second parent
992 # interesting second parent
993 if p2n in state:
993 if p2n in state:
994 if p1 == dest: # p1n in destancestors or external
994 if p1 == dest: # p1n in destancestors or external
995 p1 = state[p2n]
995 p1 = state[p2n]
996 if p1 == revprecursor:
996 if p1 == revprecursor:
997 rp1 = obsoletenotrebased[p2n]
997 rp1 = obsoletenotrebased[p2n]
998 elif state[p2n] in revskipped:
998 elif state[p2n] in revskipped:
999 p2 = nearestrebased(repo, p2n, state)
999 p2 = nearestrebased(repo, p2n, state)
1000 if p2 is None:
1000 if p2 is None:
1001 # no ancestors rebased yet, detach
1001 # no ancestors rebased yet, detach
1002 p2 = dest
1002 p2 = dest
1003 else:
1003 else:
1004 p2 = state[p2n]
1004 p2 = state[p2n]
1005 else: # p2n external
1005 else: # p2n external
1006 if p2 != nullrev: # p1n external too => rev is a merged revision
1006 if p2 != nullrev: # p1n external too => rev is a merged revision
1007 raise error.Abort(_('cannot use revision %d as base, result '
1007 raise error.Abort(_('cannot use revision %d as base, result '
1008 'would have 3 parents') % rev)
1008 'would have 3 parents') % rev)
1009 p2 = p2n
1009 p2 = p2n
1010 repo.ui.debug(" future parents are %d and %d\n" %
1010 repo.ui.debug(" future parents are %d and %d\n" %
1011 (repo[rp1 or p1].rev(), repo[p2].rev()))
1011 (repo[rp1 or p1].rev(), repo[p2].rev()))
1012
1012
1013 if not any(p.rev() in state for p in parents):
1013 if not any(p.rev() in state for p in parents):
1014 # Case (1) root changeset of a non-detaching rebase set.
1014 # Case (1) root changeset of a non-detaching rebase set.
1015 # Let the merge mechanism find the base itself.
1015 # Let the merge mechanism find the base itself.
1016 base = None
1016 base = None
1017 elif not repo[rev].p2():
1017 elif not repo[rev].p2():
1018 # Case (2) detaching the node with a single parent, use this parent
1018 # Case (2) detaching the node with a single parent, use this parent
1019 base = repo[rev].p1().rev()
1019 base = repo[rev].p1().rev()
1020 else:
1020 else:
1021 # Assuming there is a p1, this is the case where there also is a p2.
1021 # Assuming there is a p1, this is the case where there also is a p2.
1022 # We are thus rebasing a merge and need to pick the right merge base.
1022 # We are thus rebasing a merge and need to pick the right merge base.
1023 #
1023 #
1024 # Imagine we have:
1024 # Imagine we have:
1025 # - M: current rebase revision in this step
1025 # - M: current rebase revision in this step
1026 # - A: one parent of M
1026 # - A: one parent of M
1027 # - B: other parent of M
1027 # - B: other parent of M
1028 # - D: destination of this merge step (p1 var)
1028 # - D: destination of this merge step (p1 var)
1029 #
1029 #
1030 # Consider the case where D is a descendant of A or B and the other is
1030 # Consider the case where D is a descendant of A or B and the other is
1031 # 'outside'. In this case, the right merge base is the D ancestor.
1031 # 'outside'. In this case, the right merge base is the D ancestor.
1032 #
1032 #
1033 # An informal proof, assuming A is 'outside' and B is the D ancestor:
1033 # An informal proof, assuming A is 'outside' and B is the D ancestor:
1034 #
1034 #
1035 # If we pick B as the base, the merge involves:
1035 # If we pick B as the base, the merge involves:
1036 # - changes from B to M (actual changeset payload)
1036 # - changes from B to M (actual changeset payload)
1037 # - changes from B to D (induced by rebase) as D is a rebased
1037 # - changes from B to D (induced by rebase) as D is a rebased
1038 # version of B)
1038 # version of B)
1039 # Which exactly represent the rebase operation.
1039 # Which exactly represent the rebase operation.
1040 #
1040 #
1041 # If we pick A as the base, the merge involves:
1041 # If we pick A as the base, the merge involves:
1042 # - changes from A to M (actual changeset payload)
1042 # - changes from A to M (actual changeset payload)
1043 # - changes from A to D (with include changes between unrelated A and B
1043 # - changes from A to D (with include changes between unrelated A and B
1044 # plus changes induced by rebase)
1044 # plus changes induced by rebase)
1045 # Which does not represent anything sensible and creates a lot of
1045 # Which does not represent anything sensible and creates a lot of
1046 # conflicts. A is thus not the right choice - B is.
1046 # conflicts. A is thus not the right choice - B is.
1047 #
1047 #
1048 # Note: The base found in this 'proof' is only correct in the specified
1048 # Note: The base found in this 'proof' is only correct in the specified
1049 # case. This base does not make sense if is not D a descendant of A or B
1049 # case. This base does not make sense if is not D a descendant of A or B
1050 # or if the other is not parent 'outside' (especially not if the other
1050 # or if the other is not parent 'outside' (especially not if the other
1051 # parent has been rebased). The current implementation does not
1051 # parent has been rebased). The current implementation does not
1052 # make it feasible to consider different cases separately. In these
1052 # make it feasible to consider different cases separately. In these
1053 # other cases we currently just leave it to the user to correctly
1053 # other cases we currently just leave it to the user to correctly
1054 # resolve an impossible merge using a wrong ancestor.
1054 # resolve an impossible merge using a wrong ancestor.
1055 #
1055 #
1056 # xx, p1 could be -4, and both parents could probably be -4...
1056 # xx, p1 could be -4, and both parents could probably be -4...
1057 for p in repo[rev].parents():
1057 for p in repo[rev].parents():
1058 if state.get(p.rev()) == p1:
1058 if state.get(p.rev()) == p1:
1059 base = p.rev()
1059 base = p.rev()
1060 break
1060 break
1061 else: # fallback when base not found
1061 else: # fallback when base not found
1062 base = None
1062 base = None
1063
1063
1064 # Raise because this function is called wrong (see issue 4106)
1064 # Raise because this function is called wrong (see issue 4106)
1065 raise AssertionError('no base found to rebase on '
1065 raise AssertionError('no base found to rebase on '
1066 '(defineparents called wrong)')
1066 '(defineparents called wrong)')
1067 return rp1 or p1, p2, base
1067 return rp1 or p1, p2, base
1068
1068
1069 def isagitpatch(repo, patchname):
1069 def isagitpatch(repo, patchname):
1070 'Return true if the given patch is in git format'
1070 'Return true if the given patch is in git format'
1071 mqpatch = os.path.join(repo.mq.path, patchname)
1071 mqpatch = os.path.join(repo.mq.path, patchname)
1072 for line in patch.linereader(file(mqpatch, 'rb')):
1072 for line in patch.linereader(file(mqpatch, 'rb')):
1073 if line.startswith('diff --git'):
1073 if line.startswith('diff --git'):
1074 return True
1074 return True
1075 return False
1075 return False
1076
1076
1077 def updatemq(repo, state, skipped, **opts):
1077 def updatemq(repo, state, skipped, **opts):
1078 'Update rebased mq patches - finalize and then import them'
1078 'Update rebased mq patches - finalize and then import them'
1079 mqrebase = {}
1079 mqrebase = {}
1080 mq = repo.mq
1080 mq = repo.mq
1081 original_series = mq.fullseries[:]
1081 original_series = mq.fullseries[:]
1082 skippedpatches = set()
1082 skippedpatches = set()
1083
1083
1084 for p in mq.applied:
1084 for p in mq.applied:
1085 rev = repo[p.node].rev()
1085 rev = repo[p.node].rev()
1086 if rev in state:
1086 if rev in state:
1087 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
1087 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
1088 (rev, p.name))
1088 (rev, p.name))
1089 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1089 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1090 else:
1090 else:
1091 # Applied but not rebased, not sure this should happen
1091 # Applied but not rebased, not sure this should happen
1092 skippedpatches.add(p.name)
1092 skippedpatches.add(p.name)
1093
1093
1094 if mqrebase:
1094 if mqrebase:
1095 mq.finish(repo, mqrebase.keys())
1095 mq.finish(repo, mqrebase.keys())
1096
1096
1097 # We must start import from the newest revision
1097 # We must start import from the newest revision
1098 for rev in sorted(mqrebase, reverse=True):
1098 for rev in sorted(mqrebase, reverse=True):
1099 if rev not in skipped:
1099 if rev not in skipped:
1100 name, isgit = mqrebase[rev]
1100 name, isgit = mqrebase[rev]
1101 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
1101 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
1102 (name, state[rev], repo[state[rev]]))
1102 (name, state[rev], repo[state[rev]]))
1103 mq.qimport(repo, (), patchname=name, git=isgit,
1103 mq.qimport(repo, (), patchname=name, git=isgit,
1104 rev=[str(state[rev])])
1104 rev=[str(state[rev])])
1105 else:
1105 else:
1106 # Rebased and skipped
1106 # Rebased and skipped
1107 skippedpatches.add(mqrebase[rev][0])
1107 skippedpatches.add(mqrebase[rev][0])
1108
1108
1109 # Patches were either applied and rebased and imported in
1109 # Patches were either applied and rebased and imported in
1110 # order, applied and removed or unapplied. Discard the removed
1110 # order, applied and removed or unapplied. Discard the removed
1111 # ones while preserving the original series order and guards.
1111 # ones while preserving the original series order and guards.
1112 newseries = [s for s in original_series
1112 newseries = [s for s in original_series
1113 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
1113 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
1114 mq.fullseries[:] = newseries
1114 mq.fullseries[:] = newseries
1115 mq.seriesdirty = True
1115 mq.seriesdirty = True
1116 mq.savedirty()
1116 mq.savedirty()
1117
1117
1118 def updatebookmarks(repo, destnode, nstate, originalbookmarks, tr):
1118 def updatebookmarks(repo, destnode, nstate, originalbookmarks, tr):
1119 'Move bookmarks to their correct changesets, and delete divergent ones'
1119 'Move bookmarks to their correct changesets, and delete divergent ones'
1120 marks = repo._bookmarks
1120 marks = repo._bookmarks
1121 for k, v in originalbookmarks.iteritems():
1121 for k, v in originalbookmarks.iteritems():
1122 if v in nstate:
1122 if v in nstate:
1123 # update the bookmarks for revs that have moved
1123 # update the bookmarks for revs that have moved
1124 marks[k] = nstate[v]
1124 marks[k] = nstate[v]
1125 bookmarks.deletedivergent(repo, [destnode], k)
1125 bookmarks.deletedivergent(repo, [destnode], k)
1126 marks.recordchange(tr)
1126 marks.recordchange(tr)
1127
1127
1128 def storecollapsemsg(repo, collapsemsg):
1128 def storecollapsemsg(repo, collapsemsg):
1129 'Store the collapse message to allow recovery'
1129 'Store the collapse message to allow recovery'
1130 collapsemsg = collapsemsg or ''
1130 collapsemsg = collapsemsg or ''
1131 f = repo.vfs("last-message.txt", "w")
1131 f = repo.vfs("last-message.txt", "w")
1132 f.write("%s\n" % collapsemsg)
1132 f.write("%s\n" % collapsemsg)
1133 f.close()
1133 f.close()
1134
1134
1135 def clearcollapsemsg(repo):
1135 def clearcollapsemsg(repo):
1136 'Remove collapse message file'
1136 'Remove collapse message file'
1137 repo.vfs.unlinkpath("last-message.txt", ignoremissing=True)
1137 repo.vfs.unlinkpath("last-message.txt", ignoremissing=True)
1138
1138
1139 def restorecollapsemsg(repo, isabort):
1139 def restorecollapsemsg(repo, isabort):
1140 'Restore previously stored collapse message'
1140 'Restore previously stored collapse message'
1141 try:
1141 try:
1142 f = repo.vfs("last-message.txt")
1142 f = repo.vfs("last-message.txt")
1143 collapsemsg = f.readline().strip()
1143 collapsemsg = f.readline().strip()
1144 f.close()
1144 f.close()
1145 except IOError as err:
1145 except IOError as err:
1146 if err.errno != errno.ENOENT:
1146 if err.errno != errno.ENOENT:
1147 raise
1147 raise
1148 if isabort:
1148 if isabort:
1149 # Oh well, just abort like normal
1149 # Oh well, just abort like normal
1150 collapsemsg = ''
1150 collapsemsg = ''
1151 else:
1151 else:
1152 raise error.Abort(_('missing .hg/last-message.txt for rebase'))
1152 raise error.Abort(_('missing .hg/last-message.txt for rebase'))
1153 return collapsemsg
1153 return collapsemsg
1154
1154
1155 def clearstatus(repo):
1155 def clearstatus(repo):
1156 'Remove the status files'
1156 'Remove the status files'
1157 _clearrebasesetvisibiliy(repo)
1157 _clearrebasesetvisibiliy(repo)
1158 repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
1158 repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
1159
1159
1160 def needupdate(repo, state):
1160 def needupdate(repo, state):
1161 '''check whether we should `update --clean` away from a merge, or if
1161 '''check whether we should `update --clean` away from a merge, or if
1162 somehow the working dir got forcibly updated, e.g. by older hg'''
1162 somehow the working dir got forcibly updated, e.g. by older hg'''
1163 parents = [p.rev() for p in repo[None].parents()]
1163 parents = [p.rev() for p in repo[None].parents()]
1164
1164
1165 # Are we in a merge state at all?
1165 # Are we in a merge state at all?
1166 if len(parents) < 2:
1166 if len(parents) < 2:
1167 return False
1167 return False
1168
1168
1169 # We should be standing on the first as-of-yet unrebased commit.
1169 # We should be standing on the first as-of-yet unrebased commit.
1170 firstunrebased = min([old for old, new in state.iteritems()
1170 firstunrebased = min([old for old, new in state.iteritems()
1171 if new == nullrev])
1171 if new == nullrev])
1172 if firstunrebased in parents:
1172 if firstunrebased in parents:
1173 return True
1173 return True
1174
1174
1175 return False
1175 return False
1176
1176
1177 def abort(repo, originalwd, dest, state, activebookmark=None):
1177 def abort(repo, originalwd, dest, state, activebookmark=None):
1178 '''Restore the repository to its original state. Additional args:
1178 '''Restore the repository to its original state. Additional args:
1179
1179
1180 activebookmark: the name of the bookmark that should be active after the
1180 activebookmark: the name of the bookmark that should be active after the
1181 restore'''
1181 restore'''
1182
1182
1183 try:
1183 try:
1184 # If the first commits in the rebased set get skipped during the rebase,
1184 # If the first commits in the rebased set get skipped during the rebase,
1185 # their values within the state mapping will be the dest rev id. The
1185 # their values within the state mapping will be the dest rev id. The
1186 # dstates list must must not contain the dest rev (issue4896)
1186 # dstates list must must not contain the dest rev (issue4896)
1187 dstates = [s for s in state.values() if s >= 0 and s != dest]
1187 dstates = [s for s in state.values() if s >= 0 and s != dest]
1188 immutable = [d for d in dstates if not repo[d].mutable()]
1188 immutable = [d for d in dstates if not repo[d].mutable()]
1189 cleanup = True
1189 cleanup = True
1190 if immutable:
1190 if immutable:
1191 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
1191 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
1192 % ', '.join(str(repo[r]) for r in immutable),
1192 % ', '.join(str(repo[r]) for r in immutable),
1193 hint=_("see 'hg help phases' for details"))
1193 hint=_("see 'hg help phases' for details"))
1194 cleanup = False
1194 cleanup = False
1195
1195
1196 descendants = set()
1196 descendants = set()
1197 if dstates:
1197 if dstates:
1198 descendants = set(repo.changelog.descendants(dstates))
1198 descendants = set(repo.changelog.descendants(dstates))
1199 if descendants - set(dstates):
1199 if descendants - set(dstates):
1200 repo.ui.warn(_("warning: new changesets detected on destination "
1200 repo.ui.warn(_("warning: new changesets detected on destination "
1201 "branch, can't strip\n"))
1201 "branch, can't strip\n"))
1202 cleanup = False
1202 cleanup = False
1203
1203
1204 if cleanup:
1204 if cleanup:
1205 shouldupdate = False
1205 shouldupdate = False
1206 rebased = filter(lambda x: x >= 0 and x != dest, state.values())
1206 rebased = filter(lambda x: x >= 0 and x != dest, state.values())
1207 if rebased:
1207 if rebased:
1208 strippoints = [
1208 strippoints = [
1209 c.node() for c in repo.set('roots(%ld)', rebased)]
1209 c.node() for c in repo.set('roots(%ld)', rebased)]
1210
1210
1211 updateifonnodes = set(rebased)
1211 updateifonnodes = set(rebased)
1212 updateifonnodes.add(dest)
1212 updateifonnodes.add(dest)
1213 updateifonnodes.add(originalwd)
1213 updateifonnodes.add(originalwd)
1214 shouldupdate = repo['.'].rev() in updateifonnodes
1214 shouldupdate = repo['.'].rev() in updateifonnodes
1215
1215
1216 # Update away from the rebase if necessary
1216 # Update away from the rebase if necessary
1217 if shouldupdate or needupdate(repo, state):
1217 if shouldupdate or needupdate(repo, state):
1218 mergemod.update(repo, originalwd, False, True)
1218 mergemod.update(repo, originalwd, False, True)
1219
1219
1220 # Strip from the first rebased revision
1220 # Strip from the first rebased revision
1221 if rebased:
1221 if rebased:
1222 # no backup of rebased cset versions needed
1222 # no backup of rebased cset versions needed
1223 repair.strip(repo.ui, repo, strippoints)
1223 repair.strip(repo.ui, repo, strippoints)
1224
1224
1225 if activebookmark and activebookmark in repo._bookmarks:
1225 if activebookmark and activebookmark in repo._bookmarks:
1226 bookmarks.activate(repo, activebookmark)
1226 bookmarks.activate(repo, activebookmark)
1227
1227
1228 finally:
1228 finally:
1229 clearstatus(repo)
1229 clearstatus(repo)
1230 clearcollapsemsg(repo)
1230 clearcollapsemsg(repo)
1231 repo.ui.warn(_('rebase aborted\n'))
1231 repo.ui.warn(_('rebase aborted\n'))
1232 return 0
1232 return 0
1233
1233
1234 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
1234 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
1235 '''Define which revisions are going to be rebased and where
1235 '''Define which revisions are going to be rebased and where
1236
1236
1237 repo: repo
1237 repo: repo
1238 dest: context
1238 dest: context
1239 rebaseset: set of rev
1239 rebaseset: set of rev
1240 '''
1240 '''
1241 originalwd = repo['.'].rev()
1241 originalwd = repo['.'].rev()
1242 _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
1242 _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
1243
1243
1244 # This check isn't strictly necessary, since mq detects commits over an
1244 # This check isn't strictly necessary, since mq detects commits over an
1245 # applied patch. But it prevents messing up the working directory when
1245 # applied patch. But it prevents messing up the working directory when
1246 # a partially completed rebase is blocked by mq.
1246 # a partially completed rebase is blocked by mq.
1247 if 'qtip' in repo.tags() and (dest.node() in
1247 if 'qtip' in repo.tags() and (dest.node() in
1248 [s.node for s in repo.mq.applied]):
1248 [s.node for s in repo.mq.applied]):
1249 raise error.Abort(_('cannot rebase onto an applied mq patch'))
1249 raise error.Abort(_('cannot rebase onto an applied mq patch'))
1250
1250
1251 roots = list(repo.set('roots(%ld)', rebaseset))
1251 roots = list(repo.set('roots(%ld)', rebaseset))
1252 if not roots:
1252 if not roots:
1253 raise error.Abort(_('no matching revisions'))
1253 raise error.Abort(_('no matching revisions'))
1254 roots.sort()
1254 roots.sort()
1255 state = dict.fromkeys(rebaseset, revtodo)
1255 state = dict.fromkeys(rebaseset, revtodo)
1256 detachset = set()
1256 detachset = set()
1257 emptyrebase = True
1257 emptyrebase = True
1258 for root in roots:
1258 for root in roots:
1259 commonbase = root.ancestor(dest)
1259 commonbase = root.ancestor(dest)
1260 if commonbase == root:
1260 if commonbase == root:
1261 raise error.Abort(_('source is ancestor of destination'))
1261 raise error.Abort(_('source is ancestor of destination'))
1262 if commonbase == dest:
1262 if commonbase == dest:
1263 wctx = repo[None]
1263 wctx = repo[None]
1264 if dest == wctx.p1():
1264 if dest == wctx.p1():
1265 # when rebasing to '.', it will use the current wd branch name
1265 # when rebasing to '.', it will use the current wd branch name
1266 samebranch = root.branch() == wctx.branch()
1266 samebranch = root.branch() == wctx.branch()
1267 else:
1267 else:
1268 samebranch = root.branch() == dest.branch()
1268 samebranch = root.branch() == dest.branch()
1269 if not collapse and samebranch and root in dest.children():
1269 if not collapse and samebranch and root in dest.children():
1270 # mark the revision as done by setting its new revision
1270 # mark the revision as done by setting its new revision
1271 # equal to its old (current) revisions
1271 # equal to its old (current) revisions
1272 state[root.rev()] = root.rev()
1272 state[root.rev()] = root.rev()
1273 repo.ui.debug('source is a child of destination\n')
1273 repo.ui.debug('source is a child of destination\n')
1274 continue
1274 continue
1275
1275
1276 emptyrebase = False
1276 emptyrebase = False
1277 repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
1277 repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
1278 # Rebase tries to turn <dest> into a parent of <root> while
1278 # Rebase tries to turn <dest> into a parent of <root> while
1279 # preserving the number of parents of rebased changesets:
1279 # preserving the number of parents of rebased changesets:
1280 #
1280 #
1281 # - A changeset with a single parent will always be rebased as a
1281 # - A changeset with a single parent will always be rebased as a
1282 # changeset with a single parent.
1282 # changeset with a single parent.
1283 #
1283 #
1284 # - A merge will be rebased as merge unless its parents are both
1284 # - A merge will be rebased as merge unless its parents are both
1285 # ancestors of <dest> or are themselves in the rebased set and
1285 # ancestors of <dest> or are themselves in the rebased set and
1286 # pruned while rebased.
1286 # pruned while rebased.
1287 #
1287 #
1288 # If one parent of <root> is an ancestor of <dest>, the rebased
1288 # If one parent of <root> is an ancestor of <dest>, the rebased
1289 # version of this parent will be <dest>. This is always true with
1289 # version of this parent will be <dest>. This is always true with
1290 # --base option.
1290 # --base option.
1291 #
1291 #
1292 # Otherwise, we need to *replace* the original parents with
1292 # Otherwise, we need to *replace* the original parents with
1293 # <dest>. This "detaches" the rebased set from its former location
1293 # <dest>. This "detaches" the rebased set from its former location
1294 # and rebases it onto <dest>. Changes introduced by ancestors of
1294 # and rebases it onto <dest>. Changes introduced by ancestors of
1295 # <root> not common with <dest> (the detachset, marked as
1295 # <root> not common with <dest> (the detachset, marked as
1296 # nullmerge) are "removed" from the rebased changesets.
1296 # nullmerge) are "removed" from the rebased changesets.
1297 #
1297 #
1298 # - If <root> has a single parent, set it to <dest>.
1298 # - If <root> has a single parent, set it to <dest>.
1299 #
1299 #
1300 # - If <root> is a merge, we cannot decide which parent to
1300 # - If <root> is a merge, we cannot decide which parent to
1301 # replace, the rebase operation is not clearly defined.
1301 # replace, the rebase operation is not clearly defined.
1302 #
1302 #
1303 # The table below sums up this behavior:
1303 # The table below sums up this behavior:
1304 #
1304 #
1305 # +------------------+----------------------+-------------------------+
1305 # +------------------+----------------------+-------------------------+
1306 # | | one parent | merge |
1306 # | | one parent | merge |
1307 # +------------------+----------------------+-------------------------+
1307 # +------------------+----------------------+-------------------------+
1308 # | parent in | new parent is <dest> | parents in ::<dest> are |
1308 # | parent in | new parent is <dest> | parents in ::<dest> are |
1309 # | ::<dest> | | remapped to <dest> |
1309 # | ::<dest> | | remapped to <dest> |
1310 # +------------------+----------------------+-------------------------+
1310 # +------------------+----------------------+-------------------------+
1311 # | unrelated source | new parent is <dest> | ambiguous, abort |
1311 # | unrelated source | new parent is <dest> | ambiguous, abort |
1312 # +------------------+----------------------+-------------------------+
1312 # +------------------+----------------------+-------------------------+
1313 #
1313 #
1314 # The actual abort is handled by `defineparents`
1314 # The actual abort is handled by `defineparents`
1315 if len(root.parents()) <= 1:
1315 if len(root.parents()) <= 1:
1316 # ancestors of <root> not ancestors of <dest>
1316 # ancestors of <root> not ancestors of <dest>
1317 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1317 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1318 [root.rev()]))
1318 [root.rev()]))
1319 if emptyrebase:
1319 if emptyrebase:
1320 return None
1320 return None
1321 for rev in sorted(state):
1321 for rev in sorted(state):
1322 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
1322 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
1323 # if all parents of this revision are done, then so is this revision
1323 # if all parents of this revision are done, then so is this revision
1324 if parents and all((state.get(p) == p for p in parents)):
1324 if parents and all((state.get(p) == p for p in parents)):
1325 state[rev] = rev
1325 state[rev] = rev
1326 for r in detachset:
1326 for r in detachset:
1327 if r not in state:
1327 if r not in state:
1328 state[r] = nullmerge
1328 state[r] = nullmerge
1329 if len(roots) > 1:
1329 if len(roots) > 1:
1330 # If we have multiple roots, we may have "hole" in the rebase set.
1330 # If we have multiple roots, we may have "hole" in the rebase set.
1331 # Rebase roots that descend from those "hole" should not be detached as
1331 # Rebase roots that descend from those "hole" should not be detached as
1332 # other root are. We use the special `revignored` to inform rebase that
1332 # other root are. We use the special `revignored` to inform rebase that
1333 # the revision should be ignored but that `defineparents` should search
1333 # the revision should be ignored but that `defineparents` should search
1334 # a rebase destination that make sense regarding rebased topology.
1334 # a rebase destination that make sense regarding rebased topology.
1335 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1335 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1336 for ignored in set(rebasedomain) - set(rebaseset):
1336 for ignored in set(rebasedomain) - set(rebaseset):
1337 state[ignored] = revignored
1337 state[ignored] = revignored
1338 for r in obsoletenotrebased:
1338 for r in obsoletenotrebased:
1339 if obsoletenotrebased[r] is None:
1339 if obsoletenotrebased[r] is None:
1340 state[r] = revpruned
1340 state[r] = revpruned
1341 else:
1341 else:
1342 state[r] = revprecursor
1342 state[r] = revprecursor
1343 return originalwd, dest.rev(), state
1343 return originalwd, dest.rev(), state
1344
1344
1345 def clearrebased(ui, repo, state, skipped, collapsedas=None):
1345 def clearrebased(ui, repo, state, skipped, collapsedas=None):
1346 """dispose of rebased revision at the end of the rebase
1346 """dispose of rebased revision at the end of the rebase
1347
1347
1348 If `collapsedas` is not None, the rebase was a collapse whose result if the
1348 If `collapsedas` is not None, the rebase was a collapse whose result if the
1349 `collapsedas` node."""
1349 `collapsedas` node."""
1350 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1350 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1351 markers = []
1351 markers = []
1352 for rev, newrev in sorted(state.items()):
1352 for rev, newrev in sorted(state.items()):
1353 if newrev >= 0 and newrev != rev:
1353 if newrev >= 0 and newrev != rev:
1354 if rev in skipped:
1354 if rev in skipped:
1355 succs = ()
1355 succs = ()
1356 elif collapsedas is not None:
1356 elif collapsedas is not None:
1357 succs = (repo[collapsedas],)
1357 succs = (repo[collapsedas],)
1358 else:
1358 else:
1359 succs = (repo[newrev],)
1359 succs = (repo[newrev],)
1360 markers.append((repo[rev], succs))
1360 markers.append((repo[rev], succs))
1361 if markers:
1361 if markers:
1362 obsolete.createmarkers(repo, markers, operation='rebase')
1362 obsolete.createmarkers(repo, markers, operation='rebase')
1363 else:
1363 else:
1364 rebased = [rev for rev in state
1364 rebased = [rev for rev in state
1365 if state[rev] > nullmerge and state[rev] != rev]
1365 if state[rev] > nullmerge and state[rev] != rev]
1366 if rebased:
1366 if rebased:
1367 stripped = []
1367 stripped = []
1368 for root in repo.set('roots(%ld)', rebased):
1368 for root in repo.set('roots(%ld)', rebased):
1369 if set(repo.changelog.descendants([root.rev()])) - set(state):
1369 if set(repo.changelog.descendants([root.rev()])) - set(state):
1370 ui.warn(_("warning: new changesets detected "
1370 ui.warn(_("warning: new changesets detected "
1371 "on source branch, not stripping\n"))
1371 "on source branch, not stripping\n"))
1372 else:
1372 else:
1373 stripped.append(root.node())
1373 stripped.append(root.node())
1374 if stripped:
1374 if stripped:
1375 # backup the old csets by default
1375 # backup the old csets by default
1376 repair.strip(ui, repo, stripped, "all")
1376 repair.strip(ui, repo, stripped, "all")
1377
1377
1378
1378
1379 def pullrebase(orig, ui, repo, *args, **opts):
1379 def pullrebase(orig, ui, repo, *args, **opts):
1380 'Call rebase after pull if the latter has been invoked with --rebase'
1380 'Call rebase after pull if the latter has been invoked with --rebase'
1381 ret = None
1381 ret = None
1382 if opts.get('rebase'):
1382 if opts.get('rebase'):
1383 if ui.configbool('commands', 'rebase.requiredest'):
1383 if ui.configbool('commands', 'rebase.requiredest'):
1384 msg = _('rebase destination required by configuration')
1384 msg = _('rebase destination required by configuration')
1385 hint = _('use hg pull followed by hg rebase -d DEST')
1385 hint = _('use hg pull followed by hg rebase -d DEST')
1386 raise error.Abort(msg, hint=hint)
1386 raise error.Abort(msg, hint=hint)
1387
1387
1388 wlock = lock = None
1388 wlock = lock = None
1389 try:
1389 try:
1390 wlock = repo.wlock()
1390 wlock = repo.wlock()
1391 lock = repo.lock()
1391 lock = repo.lock()
1392 if opts.get('update'):
1392 if opts.get('update'):
1393 del opts['update']
1393 del opts['update']
1394 ui.debug('--update and --rebase are not compatible, ignoring '
1394 ui.debug('--update and --rebase are not compatible, ignoring '
1395 'the update flag\n')
1395 'the update flag\n')
1396
1396
1397 cmdutil.checkunfinished(repo)
1397 cmdutil.checkunfinished(repo)
1398 cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
1398 cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
1399 'please commit or shelve your changes first'))
1399 'please commit or shelve your changes first'))
1400
1400
1401 revsprepull = len(repo)
1401 revsprepull = len(repo)
1402 origpostincoming = commands.postincoming
1402 origpostincoming = commands.postincoming
1403 def _dummy(*args, **kwargs):
1403 def _dummy(*args, **kwargs):
1404 pass
1404 pass
1405 commands.postincoming = _dummy
1405 commands.postincoming = _dummy
1406 try:
1406 try:
1407 ret = orig(ui, repo, *args, **opts)
1407 ret = orig(ui, repo, *args, **opts)
1408 finally:
1408 finally:
1409 commands.postincoming = origpostincoming
1409 commands.postincoming = origpostincoming
1410 revspostpull = len(repo)
1410 revspostpull = len(repo)
1411 if revspostpull > revsprepull:
1411 if revspostpull > revsprepull:
1412 # --rev option from pull conflict with rebase own --rev
1412 # --rev option from pull conflict with rebase own --rev
1413 # dropping it
1413 # dropping it
1414 if 'rev' in opts:
1414 if 'rev' in opts:
1415 del opts['rev']
1415 del opts['rev']
1416 # positional argument from pull conflicts with rebase's own
1416 # positional argument from pull conflicts with rebase's own
1417 # --source.
1417 # --source.
1418 if 'source' in opts:
1418 if 'source' in opts:
1419 del opts['source']
1419 del opts['source']
1420 # revsprepull is the len of the repo, not revnum of tip.
1420 # revsprepull is the len of the repo, not revnum of tip.
1421 destspace = list(repo.changelog.revs(start=revsprepull))
1421 destspace = list(repo.changelog.revs(start=revsprepull))
1422 opts['_destspace'] = destspace
1422 opts['_destspace'] = destspace
1423 try:
1423 try:
1424 rebase(ui, repo, **opts)
1424 rebase(ui, repo, **opts)
1425 except error.NoMergeDestAbort:
1425 except error.NoMergeDestAbort:
1426 # we can maybe update instead
1426 # we can maybe update instead
1427 rev, _a, _b = destutil.destupdate(repo)
1427 rev, _a, _b = destutil.destupdate(repo)
1428 if rev == repo['.'].rev():
1428 if rev == repo['.'].rev():
1429 ui.status(_('nothing to rebase\n'))
1429 ui.status(_('nothing to rebase\n'))
1430 else:
1430 else:
1431 ui.status(_('nothing to rebase - updating instead\n'))
1431 ui.status(_('nothing to rebase - updating instead\n'))
1432 # not passing argument to get the bare update behavior
1432 # not passing argument to get the bare update behavior
1433 # with warning and trumpets
1433 # with warning and trumpets
1434 commands.update(ui, repo)
1434 commands.update(ui, repo)
1435 finally:
1435 finally:
1436 release(lock, wlock)
1436 release(lock, wlock)
1437 else:
1437 else:
1438 if opts.get('tool'):
1438 if opts.get('tool'):
1439 raise error.Abort(_('--tool can only be used with --rebase'))
1439 raise error.Abort(_('--tool can only be used with --rebase'))
1440 ret = orig(ui, repo, *args, **opts)
1440 ret = orig(ui, repo, *args, **opts)
1441
1441
1442 return ret
1442 return ret
1443
1443
1444 def _setrebasesetvisibility(repo, revs):
1444 def _setrebasesetvisibility(repo, revs):
1445 """store the currently rebased set on the repo object
1445 """store the currently rebased set on the repo object
1446
1446
1447 This is used by another function to prevent rebased revision to because
1447 This is used by another function to prevent rebased revision to because
1448 hidden (see issue4504)"""
1448 hidden (see issue4504)"""
1449 repo = repo.unfiltered()
1449 repo = repo.unfiltered()
1450 repo._rebaseset = revs
1450 repo._rebaseset = revs
1451 # invalidate cache if visibility changes
1451 # invalidate cache if visibility changes
1452 hiddens = repo.filteredrevcache.get('visible', set())
1452 hiddens = repo.filteredrevcache.get('visible', set())
1453 if revs & hiddens:
1453 if revs & hiddens:
1454 repo.invalidatevolatilesets()
1454 repo.invalidatevolatilesets()
1455
1455
1456 def _clearrebasesetvisibiliy(repo):
1456 def _clearrebasesetvisibiliy(repo):
1457 """remove rebaseset data from the repo"""
1457 """remove rebaseset data from the repo"""
1458 repo = repo.unfiltered()
1458 repo = repo.unfiltered()
1459 if '_rebaseset' in vars(repo):
1459 if '_rebaseset' in vars(repo):
1460 del repo._rebaseset
1460 del repo._rebaseset
1461
1461
1462 def _rebasedvisible(orig, repo):
1462 def _rebasedvisible(orig, repo):
1463 """ensure rebased revs stay visible (see issue4504)"""
1463 """ensure rebased revs stay visible (see issue4504)"""
1464 blockers = orig(repo)
1464 blockers = orig(repo)
1465 blockers.update(getattr(repo, '_rebaseset', ()))
1465 blockers.update(getattr(repo, '_rebaseset', ()))
1466 return blockers
1466 return blockers
1467
1467
1468 def _filterobsoleterevs(repo, revs):
1468 def _filterobsoleterevs(repo, revs):
1469 """returns a set of the obsolete revisions in revs"""
1469 """returns a set of the obsolete revisions in revs"""
1470 return set(r for r in revs if repo[r].obsolete())
1470 return set(r for r in revs if repo[r].obsolete())
1471
1471
1472 def _computeobsoletenotrebased(repo, rebaseobsrevs, dest):
1472 def _computeobsoletenotrebased(repo, rebaseobsrevs, dest):
1473 """return a mapping obsolete => successor for all obsolete nodes to be
1473 """return a mapping obsolete => successor for all obsolete nodes to be
1474 rebased that have a successors in the destination
1474 rebased that have a successors in the destination
1475
1475
1476 obsolete => None entries in the mapping indicate nodes with no successor"""
1476 obsolete => None entries in the mapping indicate nodes with no successor"""
1477 obsoletenotrebased = {}
1477 obsoletenotrebased = {}
1478
1478
1479 # Build a mapping successor => obsolete nodes for the obsolete
1479 # Build a mapping successor => obsolete nodes for the obsolete
1480 # nodes to be rebased
1480 # nodes to be rebased
1481 allsuccessors = {}
1481 allsuccessors = {}
1482 cl = repo.changelog
1482 cl = repo.changelog
1483 for r in rebaseobsrevs:
1483 for r in rebaseobsrevs:
1484 node = cl.node(r)
1484 node = cl.node(r)
1485 for s in obsolete.allsuccessors(repo.obsstore, [node]):
1485 for s in obsolete.allsuccessors(repo.obsstore, [node]):
1486 try:
1486 try:
1487 allsuccessors[cl.rev(s)] = cl.rev(node)
1487 allsuccessors[cl.rev(s)] = cl.rev(node)
1488 except LookupError:
1488 except LookupError:
1489 pass
1489 pass
1490
1490
1491 if allsuccessors:
1491 if allsuccessors:
1492 # Look for successors of obsolete nodes to be rebased among
1492 # Look for successors of obsolete nodes to be rebased among
1493 # the ancestors of dest
1493 # the ancestors of dest
1494 ancs = cl.ancestors([repo[dest].rev()],
1494 ancs = cl.ancestors([repo[dest].rev()],
1495 stoprev=min(allsuccessors),
1495 stoprev=min(allsuccessors),
1496 inclusive=True)
1496 inclusive=True)
1497 for s in allsuccessors:
1497 for s in allsuccessors:
1498 if s in ancs:
1498 if s in ancs:
1499 obsoletenotrebased[allsuccessors[s]] = s
1499 obsoletenotrebased[allsuccessors[s]] = s
1500 elif (s == allsuccessors[s] and
1500 elif (s == allsuccessors[s] and
1501 allsuccessors.values().count(s) == 1):
1501 allsuccessors.values().count(s) == 1):
1502 # plain prune
1502 # plain prune
1503 obsoletenotrebased[s] = None
1503 obsoletenotrebased[s] = None
1504
1504
1505 return obsoletenotrebased
1505 return obsoletenotrebased
1506
1506
1507 def summaryhook(ui, repo):
1507 def summaryhook(ui, repo):
1508 if not repo.vfs.exists('rebasestate'):
1508 if not repo.vfs.exists('rebasestate'):
1509 return
1509 return
1510 try:
1510 try:
1511 rbsrt = rebaseruntime(repo, ui, {})
1511 rbsrt = rebaseruntime(repo, ui, {})
1512 rbsrt.restorestatus()
1512 rbsrt.restorestatus()
1513 state = rbsrt.state
1513 state = rbsrt.state
1514 except error.RepoLookupError:
1514 except error.RepoLookupError:
1515 # i18n: column positioning for "hg summary"
1515 # i18n: column positioning for "hg summary"
1516 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1516 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1517 ui.write(msg)
1517 ui.write(msg)
1518 return
1518 return
1519 numrebased = len([i for i in state.itervalues() if i >= 0])
1519 numrebased = len([i for i in state.itervalues() if i >= 0])
1520 # i18n: column positioning for "hg summary"
1520 # i18n: column positioning for "hg summary"
1521 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1521 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1522 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1522 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1523 ui.label(_('%d remaining'), 'rebase.remaining') %
1523 ui.label(_('%d remaining'), 'rebase.remaining') %
1524 (len(state) - numrebased)))
1524 (len(state) - numrebased)))
1525
1525
1526 def uisetup(ui):
1526 def uisetup(ui):
1527 #Replace pull with a decorator to provide --rebase option
1527 #Replace pull with a decorator to provide --rebase option
1528 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1528 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1529 entry[1].append(('', 'rebase', None,
1529 entry[1].append(('', 'rebase', None,
1530 _("rebase working directory to branch head")))
1530 _("rebase working directory to branch head")))
1531 entry[1].append(('t', 'tool', '',
1531 entry[1].append(('t', 'tool', '',
1532 _("specify merge tool for rebase")))
1532 _("specify merge tool for rebase")))
1533 cmdutil.summaryhooks.add('rebase', summaryhook)
1533 cmdutil.summaryhooks.add('rebase', summaryhook)
1534 cmdutil.unfinishedstates.append(
1534 cmdutil.unfinishedstates.append(
1535 ['rebasestate', False, False, _('rebase in progress'),
1535 ['rebasestate', False, False, _('rebase in progress'),
1536 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1536 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1537 cmdutil.afterresolvedstates.append(
1537 cmdutil.afterresolvedstates.append(
1538 ['rebasestate', _('hg rebase --continue')])
1538 ['rebasestate', _('hg rebase --continue')])
1539 # ensure rebased rev are not hidden
1539 # ensure rebased rev are not hidden
1540 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
1540 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
@@ -1,148 +1,148 b''
1 # record.py
1 # record.py
2 #
2 #
3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''commands to interactively select changes for commit/qrefresh (DEPRECATED)
8 '''commands to interactively select changes for commit/qrefresh (DEPRECATED)
9
9
10 The feature provided by this extension has been moved into core Mercurial as
10 The feature provided by this extension has been moved into core Mercurial as
11 :hg:`commit --interactive`.'''
11 :hg:`commit --interactive`.'''
12
12
13 from __future__ import absolute_import
13 from __future__ import absolute_import
14
14
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16 from mercurial import (
16 from mercurial import (
17 cmdutil,
17 cmdutil,
18 commands,
18 commands,
19 error,
19 error,
20 extensions,
20 extensions,
21 registrar,
21 registrar,
22 )
22 )
23
23
24 cmdtable = {}
24 cmdtable = {}
25 command = registrar.command(cmdtable)
25 command = registrar.command(cmdtable)
26 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
26 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
27 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
27 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
28 # be specifying the version(s) of Mercurial they are tested with, or
28 # be specifying the version(s) of Mercurial they are tested with, or
29 # leave the attribute unspecified.
29 # leave the attribute unspecified.
30 testedwith = 'ships-with-hg-core'
30 testedwith = 'ships-with-hg-core'
31
31
32
32
33 @command("record",
33 @command("record",
34 # same options as commit + white space diff options
34 # same options as commit + white space diff options
35 [c for c in commands.table['^commit|ci'][1][:]
35 [c for c in commands.table['^commit|ci'][1][:]
36 if c[1] != "interactive"] + commands.diffwsopts,
36 if c[1] != "interactive"] + cmdutil.diffwsopts,
37 _('hg record [OPTION]... [FILE]...'))
37 _('hg record [OPTION]... [FILE]...'))
38 def record(ui, repo, *pats, **opts):
38 def record(ui, repo, *pats, **opts):
39 '''interactively select changes to commit
39 '''interactively select changes to commit
40
40
41 If a list of files is omitted, all changes reported by :hg:`status`
41 If a list of files is omitted, all changes reported by :hg:`status`
42 will be candidates for recording.
42 will be candidates for recording.
43
43
44 See :hg:`help dates` for a list of formats valid for -d/--date.
44 See :hg:`help dates` for a list of formats valid for -d/--date.
45
45
46 If using the text interface (see :hg:`help config`),
46 If using the text interface (see :hg:`help config`),
47 you will be prompted for whether to record changes to each
47 you will be prompted for whether to record changes to each
48 modified file, and for files with multiple changes, for each
48 modified file, and for files with multiple changes, for each
49 change to use. For each query, the following responses are
49 change to use. For each query, the following responses are
50 possible::
50 possible::
51
51
52 y - record this change
52 y - record this change
53 n - skip this change
53 n - skip this change
54 e - edit this change manually
54 e - edit this change manually
55
55
56 s - skip remaining changes to this file
56 s - skip remaining changes to this file
57 f - record remaining changes to this file
57 f - record remaining changes to this file
58
58
59 d - done, skip remaining changes and files
59 d - done, skip remaining changes and files
60 a - record all changes to all remaining files
60 a - record all changes to all remaining files
61 q - quit, recording no changes
61 q - quit, recording no changes
62
62
63 ? - display help
63 ? - display help
64
64
65 This command is not available when committing a merge.'''
65 This command is not available when committing a merge.'''
66
66
67 if not ui.interactive():
67 if not ui.interactive():
68 raise error.Abort(_('running non-interactively, use %s instead') %
68 raise error.Abort(_('running non-interactively, use %s instead') %
69 'commit')
69 'commit')
70
70
71 opts["interactive"] = True
71 opts["interactive"] = True
72 overrides = {('experimental', 'crecord'): False}
72 overrides = {('experimental', 'crecord'): False}
73 with ui.configoverride(overrides, 'record'):
73 with ui.configoverride(overrides, 'record'):
74 return commands.commit(ui, repo, *pats, **opts)
74 return commands.commit(ui, repo, *pats, **opts)
75
75
76 def qrefresh(origfn, ui, repo, *pats, **opts):
76 def qrefresh(origfn, ui, repo, *pats, **opts):
77 if not opts['interactive']:
77 if not opts['interactive']:
78 return origfn(ui, repo, *pats, **opts)
78 return origfn(ui, repo, *pats, **opts)
79
79
80 mq = extensions.find('mq')
80 mq = extensions.find('mq')
81
81
82 def committomq(ui, repo, *pats, **opts):
82 def committomq(ui, repo, *pats, **opts):
83 # At this point the working copy contains only changes that
83 # At this point the working copy contains only changes that
84 # were accepted. All other changes were reverted.
84 # were accepted. All other changes were reverted.
85 # We can't pass *pats here since qrefresh will undo all other
85 # We can't pass *pats here since qrefresh will undo all other
86 # changed files in the patch that aren't in pats.
86 # changed files in the patch that aren't in pats.
87 mq.refresh(ui, repo, **opts)
87 mq.refresh(ui, repo, **opts)
88
88
89 # backup all changed files
89 # backup all changed files
90 cmdutil.dorecord(ui, repo, committomq, None, True,
90 cmdutil.dorecord(ui, repo, committomq, None, True,
91 cmdutil.recordfilter, *pats, **opts)
91 cmdutil.recordfilter, *pats, **opts)
92
92
93 # This command registration is replaced during uisetup().
93 # This command registration is replaced during uisetup().
94 @command('qrecord',
94 @command('qrecord',
95 [],
95 [],
96 _('hg qrecord [OPTION]... PATCH [FILE]...'),
96 _('hg qrecord [OPTION]... PATCH [FILE]...'),
97 inferrepo=True)
97 inferrepo=True)
98 def qrecord(ui, repo, patch, *pats, **opts):
98 def qrecord(ui, repo, patch, *pats, **opts):
99 '''interactively record a new patch
99 '''interactively record a new patch
100
100
101 See :hg:`help qnew` & :hg:`help record` for more information and
101 See :hg:`help qnew` & :hg:`help record` for more information and
102 usage.
102 usage.
103 '''
103 '''
104 return _qrecord('qnew', ui, repo, patch, *pats, **opts)
104 return _qrecord('qnew', ui, repo, patch, *pats, **opts)
105
105
106 def _qrecord(cmdsuggest, ui, repo, patch, *pats, **opts):
106 def _qrecord(cmdsuggest, ui, repo, patch, *pats, **opts):
107 try:
107 try:
108 mq = extensions.find('mq')
108 mq = extensions.find('mq')
109 except KeyError:
109 except KeyError:
110 raise error.Abort(_("'mq' extension not loaded"))
110 raise error.Abort(_("'mq' extension not loaded"))
111
111
112 repo.mq.checkpatchname(patch)
112 repo.mq.checkpatchname(patch)
113
113
114 def committomq(ui, repo, *pats, **opts):
114 def committomq(ui, repo, *pats, **opts):
115 opts['checkname'] = False
115 opts['checkname'] = False
116 mq.new(ui, repo, patch, *pats, **opts)
116 mq.new(ui, repo, patch, *pats, **opts)
117
117
118 overrides = {('experimental', 'crecord'): False}
118 overrides = {('experimental', 'crecord'): False}
119 with ui.configoverride(overrides, 'record'):
119 with ui.configoverride(overrides, 'record'):
120 cmdutil.dorecord(ui, repo, committomq, cmdsuggest, False,
120 cmdutil.dorecord(ui, repo, committomq, cmdsuggest, False,
121 cmdutil.recordfilter, *pats, **opts)
121 cmdutil.recordfilter, *pats, **opts)
122
122
123 def qnew(origfn, ui, repo, patch, *args, **opts):
123 def qnew(origfn, ui, repo, patch, *args, **opts):
124 if opts['interactive']:
124 if opts['interactive']:
125 return _qrecord(None, ui, repo, patch, *args, **opts)
125 return _qrecord(None, ui, repo, patch, *args, **opts)
126 return origfn(ui, repo, patch, *args, **opts)
126 return origfn(ui, repo, patch, *args, **opts)
127
127
128
128
129 def uisetup(ui):
129 def uisetup(ui):
130 try:
130 try:
131 mq = extensions.find('mq')
131 mq = extensions.find('mq')
132 except KeyError:
132 except KeyError:
133 return
133 return
134
134
135 cmdtable["qrecord"] = \
135 cmdtable["qrecord"] = \
136 (qrecord,
136 (qrecord,
137 # same options as qnew, but copy them so we don't get
137 # same options as qnew, but copy them so we don't get
138 # -i/--interactive for qrecord and add white space diff options
138 # -i/--interactive for qrecord and add white space diff options
139 mq.cmdtable['^qnew'][1][:] + commands.diffwsopts,
139 mq.cmdtable['^qnew'][1][:] + cmdutil.diffwsopts,
140 _('hg qrecord [OPTION]... PATCH [FILE]...'))
140 _('hg qrecord [OPTION]... PATCH [FILE]...'))
141
141
142 _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
142 _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
143 _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
143 _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
144 _("interactively select changes to refresh"))
144 _("interactively select changes to refresh"))
145
145
146 def _wrapcmd(cmd, table, wrapfn, msg):
146 def _wrapcmd(cmd, table, wrapfn, msg):
147 entry = extensions.wrapcommand(table, cmd, wrapfn)
147 entry = extensions.wrapcommand(table, cmd, wrapfn)
148 entry[1].append(('i', 'interactive', None, msg))
148 entry[1].append(('i', 'interactive', None, msg))
@@ -1,1050 +1,1049 b''
1 # shelve.py - save/restore working directory state
1 # shelve.py - save/restore working directory state
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """save and restore changes to the working directory
8 """save and restore changes to the working directory
9
9
10 The "hg shelve" command saves changes made to the working directory
10 The "hg shelve" command saves changes made to the working directory
11 and reverts those changes, resetting the working directory to a clean
11 and reverts those changes, resetting the working directory to a clean
12 state.
12 state.
13
13
14 Later on, the "hg unshelve" command restores the changes saved by "hg
14 Later on, the "hg unshelve" command restores the changes saved by "hg
15 shelve". Changes can be restored even after updating to a different
15 shelve". Changes can be restored even after updating to a different
16 parent, in which case Mercurial's merge machinery will resolve any
16 parent, in which case Mercurial's merge machinery will resolve any
17 conflicts if necessary.
17 conflicts if necessary.
18
18
19 You can have more than one shelved change outstanding at a time; each
19 You can have more than one shelved change outstanding at a time; each
20 shelved change has a distinct name. For details, see the help for "hg
20 shelved change has a distinct name. For details, see the help for "hg
21 shelve".
21 shelve".
22 """
22 """
23 from __future__ import absolute_import
23 from __future__ import absolute_import
24
24
25 import collections
25 import collections
26 import errno
26 import errno
27 import itertools
27 import itertools
28
28
29 from mercurial.i18n import _
29 from mercurial.i18n import _
30 from mercurial import (
30 from mercurial import (
31 bookmarks,
31 bookmarks,
32 bundle2,
32 bundle2,
33 bundlerepo,
33 bundlerepo,
34 changegroup,
34 changegroup,
35 cmdutil,
35 cmdutil,
36 commands,
37 error,
36 error,
38 exchange,
37 exchange,
39 hg,
38 hg,
40 lock as lockmod,
39 lock as lockmod,
41 mdiff,
40 mdiff,
42 merge,
41 merge,
43 node as nodemod,
42 node as nodemod,
44 patch,
43 patch,
45 phases,
44 phases,
46 registrar,
45 registrar,
47 repair,
46 repair,
48 scmutil,
47 scmutil,
49 templatefilters,
48 templatefilters,
50 util,
49 util,
51 vfs as vfsmod,
50 vfs as vfsmod,
52 )
51 )
53
52
54 from . import (
53 from . import (
55 rebase,
54 rebase,
56 )
55 )
57
56
58 cmdtable = {}
57 cmdtable = {}
59 command = registrar.command(cmdtable)
58 command = registrar.command(cmdtable)
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
59 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
60 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
61 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
62 # leave the attribute unspecified.
64 testedwith = 'ships-with-hg-core'
63 testedwith = 'ships-with-hg-core'
65
64
66 backupdir = 'shelve-backup'
65 backupdir = 'shelve-backup'
67 shelvedir = 'shelved'
66 shelvedir = 'shelved'
68 shelvefileextensions = ['hg', 'patch', 'oshelve']
67 shelvefileextensions = ['hg', 'patch', 'oshelve']
69 # universal extension is present in all types of shelves
68 # universal extension is present in all types of shelves
70 patchextension = 'patch'
69 patchextension = 'patch'
71
70
72 # we never need the user, so we use a
71 # we never need the user, so we use a
73 # generic user for all shelve operations
72 # generic user for all shelve operations
74 shelveuser = 'shelve@localhost'
73 shelveuser = 'shelve@localhost'
75
74
76 class shelvedfile(object):
75 class shelvedfile(object):
77 """Helper for the file storing a single shelve
76 """Helper for the file storing a single shelve
78
77
79 Handles common functions on shelve files (.hg/.patch) using
78 Handles common functions on shelve files (.hg/.patch) using
80 the vfs layer"""
79 the vfs layer"""
81 def __init__(self, repo, name, filetype=None):
80 def __init__(self, repo, name, filetype=None):
82 self.repo = repo
81 self.repo = repo
83 self.name = name
82 self.name = name
84 self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
83 self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
85 self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
84 self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
86 self.ui = self.repo.ui
85 self.ui = self.repo.ui
87 if filetype:
86 if filetype:
88 self.fname = name + '.' + filetype
87 self.fname = name + '.' + filetype
89 else:
88 else:
90 self.fname = name
89 self.fname = name
91
90
92 def exists(self):
91 def exists(self):
93 return self.vfs.exists(self.fname)
92 return self.vfs.exists(self.fname)
94
93
95 def filename(self):
94 def filename(self):
96 return self.vfs.join(self.fname)
95 return self.vfs.join(self.fname)
97
96
98 def backupfilename(self):
97 def backupfilename(self):
99 def gennames(base):
98 def gennames(base):
100 yield base
99 yield base
101 base, ext = base.rsplit('.', 1)
100 base, ext = base.rsplit('.', 1)
102 for i in itertools.count(1):
101 for i in itertools.count(1):
103 yield '%s-%d.%s' % (base, i, ext)
102 yield '%s-%d.%s' % (base, i, ext)
104
103
105 name = self.backupvfs.join(self.fname)
104 name = self.backupvfs.join(self.fname)
106 for n in gennames(name):
105 for n in gennames(name):
107 if not self.backupvfs.exists(n):
106 if not self.backupvfs.exists(n):
108 return n
107 return n
109
108
110 def movetobackup(self):
109 def movetobackup(self):
111 if not self.backupvfs.isdir():
110 if not self.backupvfs.isdir():
112 self.backupvfs.makedir()
111 self.backupvfs.makedir()
113 util.rename(self.filename(), self.backupfilename())
112 util.rename(self.filename(), self.backupfilename())
114
113
115 def stat(self):
114 def stat(self):
116 return self.vfs.stat(self.fname)
115 return self.vfs.stat(self.fname)
117
116
118 def opener(self, mode='rb'):
117 def opener(self, mode='rb'):
119 try:
118 try:
120 return self.vfs(self.fname, mode)
119 return self.vfs(self.fname, mode)
121 except IOError as err:
120 except IOError as err:
122 if err.errno != errno.ENOENT:
121 if err.errno != errno.ENOENT:
123 raise
122 raise
124 raise error.Abort(_("shelved change '%s' not found") % self.name)
123 raise error.Abort(_("shelved change '%s' not found") % self.name)
125
124
126 def applybundle(self):
125 def applybundle(self):
127 fp = self.opener()
126 fp = self.opener()
128 try:
127 try:
129 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
128 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
130 if not isinstance(gen, bundle2.unbundle20):
129 if not isinstance(gen, bundle2.unbundle20):
131 gen.apply(self.repo, 'unshelve',
130 gen.apply(self.repo, 'unshelve',
132 'bundle:' + self.vfs.join(self.fname),
131 'bundle:' + self.vfs.join(self.fname),
133 targetphase=phases.secret)
132 targetphase=phases.secret)
134 if isinstance(gen, bundle2.unbundle20):
133 if isinstance(gen, bundle2.unbundle20):
135 bundle2.applybundle(self.repo, gen,
134 bundle2.applybundle(self.repo, gen,
136 self.repo.currenttransaction(),
135 self.repo.currenttransaction(),
137 source='unshelve',
136 source='unshelve',
138 url='bundle:' + self.vfs.join(self.fname))
137 url='bundle:' + self.vfs.join(self.fname))
139 finally:
138 finally:
140 fp.close()
139 fp.close()
141
140
142 def bundlerepo(self):
141 def bundlerepo(self):
143 return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
142 return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
144 self.vfs.join(self.fname))
143 self.vfs.join(self.fname))
145 def writebundle(self, bases, node):
144 def writebundle(self, bases, node):
146 cgversion = changegroup.safeversion(self.repo)
145 cgversion = changegroup.safeversion(self.repo)
147 if cgversion == '01':
146 if cgversion == '01':
148 btype = 'HG10BZ'
147 btype = 'HG10BZ'
149 compression = None
148 compression = None
150 else:
149 else:
151 btype = 'HG20'
150 btype = 'HG20'
152 compression = 'BZ'
151 compression = 'BZ'
153
152
154 cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve',
153 cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve',
155 version=cgversion)
154 version=cgversion)
156 bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
155 bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
157 compression=compression)
156 compression=compression)
158
157
159 def writeobsshelveinfo(self, info):
158 def writeobsshelveinfo(self, info):
160 scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
159 scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
161
160
162 def readobsshelveinfo(self):
161 def readobsshelveinfo(self):
163 return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
162 return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
164
163
165 class shelvedstate(object):
164 class shelvedstate(object):
166 """Handle persistence during unshelving operations.
165 """Handle persistence during unshelving operations.
167
166
168 Handles saving and restoring a shelved state. Ensures that different
167 Handles saving and restoring a shelved state. Ensures that different
169 versions of a shelved state are possible and handles them appropriately.
168 versions of a shelved state are possible and handles them appropriately.
170 """
169 """
171 _version = 2
170 _version = 2
172 _filename = 'shelvedstate'
171 _filename = 'shelvedstate'
173 _keep = 'keep'
172 _keep = 'keep'
174 _nokeep = 'nokeep'
173 _nokeep = 'nokeep'
175 # colon is essential to differentiate from a real bookmark name
174 # colon is essential to differentiate from a real bookmark name
176 _noactivebook = ':no-active-bookmark'
175 _noactivebook = ':no-active-bookmark'
177
176
178 @classmethod
177 @classmethod
179 def _verifyandtransform(cls, d):
178 def _verifyandtransform(cls, d):
180 """Some basic shelvestate syntactic verification and transformation"""
179 """Some basic shelvestate syntactic verification and transformation"""
181 try:
180 try:
182 d['originalwctx'] = nodemod.bin(d['originalwctx'])
181 d['originalwctx'] = nodemod.bin(d['originalwctx'])
183 d['pendingctx'] = nodemod.bin(d['pendingctx'])
182 d['pendingctx'] = nodemod.bin(d['pendingctx'])
184 d['parents'] = [nodemod.bin(h)
183 d['parents'] = [nodemod.bin(h)
185 for h in d['parents'].split(' ')]
184 for h in d['parents'].split(' ')]
186 d['nodestoremove'] = [nodemod.bin(h)
185 d['nodestoremove'] = [nodemod.bin(h)
187 for h in d['nodestoremove'].split(' ')]
186 for h in d['nodestoremove'].split(' ')]
188 except (ValueError, TypeError, KeyError) as err:
187 except (ValueError, TypeError, KeyError) as err:
189 raise error.CorruptedState(str(err))
188 raise error.CorruptedState(str(err))
190
189
191 @classmethod
190 @classmethod
192 def _getversion(cls, repo):
191 def _getversion(cls, repo):
193 """Read version information from shelvestate file"""
192 """Read version information from shelvestate file"""
194 fp = repo.vfs(cls._filename)
193 fp = repo.vfs(cls._filename)
195 try:
194 try:
196 version = int(fp.readline().strip())
195 version = int(fp.readline().strip())
197 except ValueError as err:
196 except ValueError as err:
198 raise error.CorruptedState(str(err))
197 raise error.CorruptedState(str(err))
199 finally:
198 finally:
200 fp.close()
199 fp.close()
201 return version
200 return version
202
201
203 @classmethod
202 @classmethod
204 def _readold(cls, repo):
203 def _readold(cls, repo):
205 """Read the old position-based version of a shelvestate file"""
204 """Read the old position-based version of a shelvestate file"""
206 # Order is important, because old shelvestate file uses it
205 # Order is important, because old shelvestate file uses it
207 # to detemine values of fields (i.g. name is on the second line,
206 # to detemine values of fields (i.g. name is on the second line,
208 # originalwctx is on the third and so forth). Please do not change.
207 # originalwctx is on the third and so forth). Please do not change.
209 keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
208 keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
210 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
209 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
211 # this is executed only seldomly, so it is not a big deal
210 # this is executed only seldomly, so it is not a big deal
212 # that we open this file twice
211 # that we open this file twice
213 fp = repo.vfs(cls._filename)
212 fp = repo.vfs(cls._filename)
214 d = {}
213 d = {}
215 try:
214 try:
216 for key in keys:
215 for key in keys:
217 d[key] = fp.readline().strip()
216 d[key] = fp.readline().strip()
218 finally:
217 finally:
219 fp.close()
218 fp.close()
220 return d
219 return d
221
220
222 @classmethod
221 @classmethod
223 def load(cls, repo):
222 def load(cls, repo):
224 version = cls._getversion(repo)
223 version = cls._getversion(repo)
225 if version < cls._version:
224 if version < cls._version:
226 d = cls._readold(repo)
225 d = cls._readold(repo)
227 elif version == cls._version:
226 elif version == cls._version:
228 d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
227 d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
229 .read(firstlinenonkeyval=True)
228 .read(firstlinenonkeyval=True)
230 else:
229 else:
231 raise error.Abort(_('this version of shelve is incompatible '
230 raise error.Abort(_('this version of shelve is incompatible '
232 'with the version used in this repo'))
231 'with the version used in this repo'))
233
232
234 cls._verifyandtransform(d)
233 cls._verifyandtransform(d)
235 try:
234 try:
236 obj = cls()
235 obj = cls()
237 obj.name = d['name']
236 obj.name = d['name']
238 obj.wctx = repo[d['originalwctx']]
237 obj.wctx = repo[d['originalwctx']]
239 obj.pendingctx = repo[d['pendingctx']]
238 obj.pendingctx = repo[d['pendingctx']]
240 obj.parents = d['parents']
239 obj.parents = d['parents']
241 obj.nodestoremove = d['nodestoremove']
240 obj.nodestoremove = d['nodestoremove']
242 obj.branchtorestore = d.get('branchtorestore', '')
241 obj.branchtorestore = d.get('branchtorestore', '')
243 obj.keep = d.get('keep') == cls._keep
242 obj.keep = d.get('keep') == cls._keep
244 obj.activebookmark = ''
243 obj.activebookmark = ''
245 if d.get('activebook', '') != cls._noactivebook:
244 if d.get('activebook', '') != cls._noactivebook:
246 obj.activebookmark = d.get('activebook', '')
245 obj.activebookmark = d.get('activebook', '')
247 except (error.RepoLookupError, KeyError) as err:
246 except (error.RepoLookupError, KeyError) as err:
248 raise error.CorruptedState(str(err))
247 raise error.CorruptedState(str(err))
249
248
250 return obj
249 return obj
251
250
252 @classmethod
251 @classmethod
253 def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
252 def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
254 branchtorestore, keep=False, activebook=''):
253 branchtorestore, keep=False, activebook=''):
255 info = {
254 info = {
256 "name": name,
255 "name": name,
257 "originalwctx": nodemod.hex(originalwctx.node()),
256 "originalwctx": nodemod.hex(originalwctx.node()),
258 "pendingctx": nodemod.hex(pendingctx.node()),
257 "pendingctx": nodemod.hex(pendingctx.node()),
259 "parents": ' '.join([nodemod.hex(p)
258 "parents": ' '.join([nodemod.hex(p)
260 for p in repo.dirstate.parents()]),
259 for p in repo.dirstate.parents()]),
261 "nodestoremove": ' '.join([nodemod.hex(n)
260 "nodestoremove": ' '.join([nodemod.hex(n)
262 for n in nodestoremove]),
261 for n in nodestoremove]),
263 "branchtorestore": branchtorestore,
262 "branchtorestore": branchtorestore,
264 "keep": cls._keep if keep else cls._nokeep,
263 "keep": cls._keep if keep else cls._nokeep,
265 "activebook": activebook or cls._noactivebook
264 "activebook": activebook or cls._noactivebook
266 }
265 }
267 scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
266 scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
268 .write(info, firstline=str(cls._version))
267 .write(info, firstline=str(cls._version))
269
268
270 @classmethod
269 @classmethod
271 def clear(cls, repo):
270 def clear(cls, repo):
272 repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
271 repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
273
272
274 def cleanupoldbackups(repo):
273 def cleanupoldbackups(repo):
275 vfs = vfsmod.vfs(repo.vfs.join(backupdir))
274 vfs = vfsmod.vfs(repo.vfs.join(backupdir))
276 maxbackups = repo.ui.configint('shelve', 'maxbackups', 10)
275 maxbackups = repo.ui.configint('shelve', 'maxbackups', 10)
277 hgfiles = [f for f in vfs.listdir()
276 hgfiles = [f for f in vfs.listdir()
278 if f.endswith('.' + patchextension)]
277 if f.endswith('.' + patchextension)]
279 hgfiles = sorted([(vfs.stat(f).st_mtime, f) for f in hgfiles])
278 hgfiles = sorted([(vfs.stat(f).st_mtime, f) for f in hgfiles])
280 if 0 < maxbackups and maxbackups < len(hgfiles):
279 if 0 < maxbackups and maxbackups < len(hgfiles):
281 bordermtime = hgfiles[-maxbackups][0]
280 bordermtime = hgfiles[-maxbackups][0]
282 else:
281 else:
283 bordermtime = None
282 bordermtime = None
284 for mtime, f in hgfiles[:len(hgfiles) - maxbackups]:
283 for mtime, f in hgfiles[:len(hgfiles) - maxbackups]:
285 if mtime == bordermtime:
284 if mtime == bordermtime:
286 # keep it, because timestamp can't decide exact order of backups
285 # keep it, because timestamp can't decide exact order of backups
287 continue
286 continue
288 base = f[:-(1 + len(patchextension))]
287 base = f[:-(1 + len(patchextension))]
289 for ext in shelvefileextensions:
288 for ext in shelvefileextensions:
290 vfs.tryunlink(base + '.' + ext)
289 vfs.tryunlink(base + '.' + ext)
291
290
292 def _backupactivebookmark(repo):
291 def _backupactivebookmark(repo):
293 activebookmark = repo._activebookmark
292 activebookmark = repo._activebookmark
294 if activebookmark:
293 if activebookmark:
295 bookmarks.deactivate(repo)
294 bookmarks.deactivate(repo)
296 return activebookmark
295 return activebookmark
297
296
298 def _restoreactivebookmark(repo, mark):
297 def _restoreactivebookmark(repo, mark):
299 if mark:
298 if mark:
300 bookmarks.activate(repo, mark)
299 bookmarks.activate(repo, mark)
301
300
302 def _aborttransaction(repo):
301 def _aborttransaction(repo):
303 '''Abort current transaction for shelve/unshelve, but keep dirstate
302 '''Abort current transaction for shelve/unshelve, but keep dirstate
304 '''
303 '''
305 tr = repo.currenttransaction()
304 tr = repo.currenttransaction()
306 repo.dirstate.savebackup(tr, suffix='.shelve')
305 repo.dirstate.savebackup(tr, suffix='.shelve')
307 tr.abort()
306 tr.abort()
308 repo.dirstate.restorebackup(None, suffix='.shelve')
307 repo.dirstate.restorebackup(None, suffix='.shelve')
309
308
310 def createcmd(ui, repo, pats, opts):
309 def createcmd(ui, repo, pats, opts):
311 """subcommand that creates a new shelve"""
310 """subcommand that creates a new shelve"""
312 with repo.wlock():
311 with repo.wlock():
313 cmdutil.checkunfinished(repo)
312 cmdutil.checkunfinished(repo)
314 return _docreatecmd(ui, repo, pats, opts)
313 return _docreatecmd(ui, repo, pats, opts)
315
314
316 def getshelvename(repo, parent, opts):
315 def getshelvename(repo, parent, opts):
317 """Decide on the name this shelve is going to have"""
316 """Decide on the name this shelve is going to have"""
318 def gennames():
317 def gennames():
319 yield label
318 yield label
320 for i in xrange(1, 100):
319 for i in xrange(1, 100):
321 yield '%s-%02d' % (label, i)
320 yield '%s-%02d' % (label, i)
322 name = opts.get('name')
321 name = opts.get('name')
323 label = repo._activebookmark or parent.branch() or 'default'
322 label = repo._activebookmark or parent.branch() or 'default'
324 # slashes aren't allowed in filenames, therefore we rename it
323 # slashes aren't allowed in filenames, therefore we rename it
325 label = label.replace('/', '_')
324 label = label.replace('/', '_')
326 label = label.replace('\\', '_')
325 label = label.replace('\\', '_')
327 # filenames must not start with '.' as it should not be hidden
326 # filenames must not start with '.' as it should not be hidden
328 if label.startswith('.'):
327 if label.startswith('.'):
329 label = label.replace('.', '_', 1)
328 label = label.replace('.', '_', 1)
330
329
331 if name:
330 if name:
332 if shelvedfile(repo, name, patchextension).exists():
331 if shelvedfile(repo, name, patchextension).exists():
333 e = _("a shelved change named '%s' already exists") % name
332 e = _("a shelved change named '%s' already exists") % name
334 raise error.Abort(e)
333 raise error.Abort(e)
335
334
336 # ensure we are not creating a subdirectory or a hidden file
335 # ensure we are not creating a subdirectory or a hidden file
337 if '/' in name or '\\' in name:
336 if '/' in name or '\\' in name:
338 raise error.Abort(_('shelved change names can not contain slashes'))
337 raise error.Abort(_('shelved change names can not contain slashes'))
339 if name.startswith('.'):
338 if name.startswith('.'):
340 raise error.Abort(_("shelved change names can not start with '.'"))
339 raise error.Abort(_("shelved change names can not start with '.'"))
341
340
342 else:
341 else:
343 for n in gennames():
342 for n in gennames():
344 if not shelvedfile(repo, n, patchextension).exists():
343 if not shelvedfile(repo, n, patchextension).exists():
345 name = n
344 name = n
346 break
345 break
347 else:
346 else:
348 raise error.Abort(_("too many shelved changes named '%s'") % label)
347 raise error.Abort(_("too many shelved changes named '%s'") % label)
349
348
350 return name
349 return name
351
350
352 def mutableancestors(ctx):
351 def mutableancestors(ctx):
353 """return all mutable ancestors for ctx (included)
352 """return all mutable ancestors for ctx (included)
354
353
355 Much faster than the revset ancestors(ctx) & draft()"""
354 Much faster than the revset ancestors(ctx) & draft()"""
356 seen = {nodemod.nullrev}
355 seen = {nodemod.nullrev}
357 visit = collections.deque()
356 visit = collections.deque()
358 visit.append(ctx)
357 visit.append(ctx)
359 while visit:
358 while visit:
360 ctx = visit.popleft()
359 ctx = visit.popleft()
361 yield ctx.node()
360 yield ctx.node()
362 for parent in ctx.parents():
361 for parent in ctx.parents():
363 rev = parent.rev()
362 rev = parent.rev()
364 if rev not in seen:
363 if rev not in seen:
365 seen.add(rev)
364 seen.add(rev)
366 if parent.mutable():
365 if parent.mutable():
367 visit.append(parent)
366 visit.append(parent)
368
367
369 def getcommitfunc(extra, interactive, editor=False):
368 def getcommitfunc(extra, interactive, editor=False):
370 def commitfunc(ui, repo, message, match, opts):
369 def commitfunc(ui, repo, message, match, opts):
371 hasmq = util.safehasattr(repo, 'mq')
370 hasmq = util.safehasattr(repo, 'mq')
372 if hasmq:
371 if hasmq:
373 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
372 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
374 overrides = {('phases', 'new-commit'): phases.secret}
373 overrides = {('phases', 'new-commit'): phases.secret}
375 try:
374 try:
376 editor_ = False
375 editor_ = False
377 if editor:
376 if editor:
378 editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
377 editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
379 **opts)
378 **opts)
380 with repo.ui.configoverride(overrides):
379 with repo.ui.configoverride(overrides):
381 return repo.commit(message, shelveuser, opts.get('date'),
380 return repo.commit(message, shelveuser, opts.get('date'),
382 match, editor=editor_, extra=extra)
381 match, editor=editor_, extra=extra)
383 finally:
382 finally:
384 if hasmq:
383 if hasmq:
385 repo.mq.checkapplied = saved
384 repo.mq.checkapplied = saved
386
385
387 def interactivecommitfunc(ui, repo, *pats, **opts):
386 def interactivecommitfunc(ui, repo, *pats, **opts):
388 match = scmutil.match(repo['.'], pats, {})
387 match = scmutil.match(repo['.'], pats, {})
389 message = opts['message']
388 message = opts['message']
390 return commitfunc(ui, repo, message, match, opts)
389 return commitfunc(ui, repo, message, match, opts)
391
390
392 return interactivecommitfunc if interactive else commitfunc
391 return interactivecommitfunc if interactive else commitfunc
393
392
394 def _nothingtoshelvemessaging(ui, repo, pats, opts):
393 def _nothingtoshelvemessaging(ui, repo, pats, opts):
395 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
394 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
396 if stat.deleted:
395 if stat.deleted:
397 ui.status(_("nothing changed (%d missing files, see "
396 ui.status(_("nothing changed (%d missing files, see "
398 "'hg status')\n") % len(stat.deleted))
397 "'hg status')\n") % len(stat.deleted))
399 else:
398 else:
400 ui.status(_("nothing changed\n"))
399 ui.status(_("nothing changed\n"))
401
400
402 def _shelvecreatedcommit(repo, node, name):
401 def _shelvecreatedcommit(repo, node, name):
403 bases = list(mutableancestors(repo[node]))
402 bases = list(mutableancestors(repo[node]))
404 shelvedfile(repo, name, 'hg').writebundle(bases, node)
403 shelvedfile(repo, name, 'hg').writebundle(bases, node)
405 cmdutil.export(repo, [node],
404 cmdutil.export(repo, [node],
406 fp=shelvedfile(repo, name, patchextension).opener('wb'),
405 fp=shelvedfile(repo, name, patchextension).opener('wb'),
407 opts=mdiff.diffopts(git=True))
406 opts=mdiff.diffopts(git=True))
408
407
409 def _includeunknownfiles(repo, pats, opts, extra):
408 def _includeunknownfiles(repo, pats, opts, extra):
410 s = repo.status(match=scmutil.match(repo[None], pats, opts),
409 s = repo.status(match=scmutil.match(repo[None], pats, opts),
411 unknown=True)
410 unknown=True)
412 if s.unknown:
411 if s.unknown:
413 extra['shelve_unknown'] = '\0'.join(s.unknown)
412 extra['shelve_unknown'] = '\0'.join(s.unknown)
414 repo[None].add(s.unknown)
413 repo[None].add(s.unknown)
415
414
416 def _finishshelve(repo):
415 def _finishshelve(repo):
417 _aborttransaction(repo)
416 _aborttransaction(repo)
418
417
419 def _docreatecmd(ui, repo, pats, opts):
418 def _docreatecmd(ui, repo, pats, opts):
420 wctx = repo[None]
419 wctx = repo[None]
421 parents = wctx.parents()
420 parents = wctx.parents()
422 if len(parents) > 1:
421 if len(parents) > 1:
423 raise error.Abort(_('cannot shelve while merging'))
422 raise error.Abort(_('cannot shelve while merging'))
424 parent = parents[0]
423 parent = parents[0]
425 origbranch = wctx.branch()
424 origbranch = wctx.branch()
426
425
427 if parent.node() != nodemod.nullid:
426 if parent.node() != nodemod.nullid:
428 desc = "changes to: %s" % parent.description().split('\n', 1)[0]
427 desc = "changes to: %s" % parent.description().split('\n', 1)[0]
429 else:
428 else:
430 desc = '(changes in empty repository)'
429 desc = '(changes in empty repository)'
431
430
432 if not opts.get('message'):
431 if not opts.get('message'):
433 opts['message'] = desc
432 opts['message'] = desc
434
433
435 lock = tr = activebookmark = None
434 lock = tr = activebookmark = None
436 try:
435 try:
437 lock = repo.lock()
436 lock = repo.lock()
438
437
439 # use an uncommitted transaction to generate the bundle to avoid
438 # use an uncommitted transaction to generate the bundle to avoid
440 # pull races. ensure we don't print the abort message to stderr.
439 # pull races. ensure we don't print the abort message to stderr.
441 tr = repo.transaction('commit', report=lambda x: None)
440 tr = repo.transaction('commit', report=lambda x: None)
442
441
443 interactive = opts.get('interactive', False)
442 interactive = opts.get('interactive', False)
444 includeunknown = (opts.get('unknown', False) and
443 includeunknown = (opts.get('unknown', False) and
445 not opts.get('addremove', False))
444 not opts.get('addremove', False))
446
445
447 name = getshelvename(repo, parent, opts)
446 name = getshelvename(repo, parent, opts)
448 activebookmark = _backupactivebookmark(repo)
447 activebookmark = _backupactivebookmark(repo)
449 extra = {}
448 extra = {}
450 if includeunknown:
449 if includeunknown:
451 _includeunknownfiles(repo, pats, opts, extra)
450 _includeunknownfiles(repo, pats, opts, extra)
452
451
453 if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
452 if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
454 # In non-bare shelve we don't store newly created branch
453 # In non-bare shelve we don't store newly created branch
455 # at bundled commit
454 # at bundled commit
456 repo.dirstate.setbranch(repo['.'].branch())
455 repo.dirstate.setbranch(repo['.'].branch())
457
456
458 commitfunc = getcommitfunc(extra, interactive, editor=True)
457 commitfunc = getcommitfunc(extra, interactive, editor=True)
459 if not interactive:
458 if not interactive:
460 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
459 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
461 else:
460 else:
462 node = cmdutil.dorecord(ui, repo, commitfunc, None,
461 node = cmdutil.dorecord(ui, repo, commitfunc, None,
463 False, cmdutil.recordfilter, *pats,
462 False, cmdutil.recordfilter, *pats,
464 **opts)
463 **opts)
465 if not node:
464 if not node:
466 _nothingtoshelvemessaging(ui, repo, pats, opts)
465 _nothingtoshelvemessaging(ui, repo, pats, opts)
467 return 1
466 return 1
468
467
469 _shelvecreatedcommit(repo, node, name)
468 _shelvecreatedcommit(repo, node, name)
470
469
471 if ui.formatted():
470 if ui.formatted():
472 desc = util.ellipsis(desc, ui.termwidth())
471 desc = util.ellipsis(desc, ui.termwidth())
473 ui.status(_('shelved as %s\n') % name)
472 ui.status(_('shelved as %s\n') % name)
474 hg.update(repo, parent.node())
473 hg.update(repo, parent.node())
475 if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
474 if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
476 repo.dirstate.setbranch(origbranch)
475 repo.dirstate.setbranch(origbranch)
477
476
478 _finishshelve(repo)
477 _finishshelve(repo)
479 finally:
478 finally:
480 _restoreactivebookmark(repo, activebookmark)
479 _restoreactivebookmark(repo, activebookmark)
481 lockmod.release(tr, lock)
480 lockmod.release(tr, lock)
482
481
483 def _isbareshelve(pats, opts):
482 def _isbareshelve(pats, opts):
484 return (not pats
483 return (not pats
485 and not opts.get('interactive', False)
484 and not opts.get('interactive', False)
486 and not opts.get('include', False)
485 and not opts.get('include', False)
487 and not opts.get('exclude', False))
486 and not opts.get('exclude', False))
488
487
489 def _iswctxonnewbranch(repo):
488 def _iswctxonnewbranch(repo):
490 return repo[None].branch() != repo['.'].branch()
489 return repo[None].branch() != repo['.'].branch()
491
490
492 def cleanupcmd(ui, repo):
491 def cleanupcmd(ui, repo):
493 """subcommand that deletes all shelves"""
492 """subcommand that deletes all shelves"""
494
493
495 with repo.wlock():
494 with repo.wlock():
496 for (name, _type) in repo.vfs.readdir(shelvedir):
495 for (name, _type) in repo.vfs.readdir(shelvedir):
497 suffix = name.rsplit('.', 1)[-1]
496 suffix = name.rsplit('.', 1)[-1]
498 if suffix in shelvefileextensions:
497 if suffix in shelvefileextensions:
499 shelvedfile(repo, name).movetobackup()
498 shelvedfile(repo, name).movetobackup()
500 cleanupoldbackups(repo)
499 cleanupoldbackups(repo)
501
500
502 def deletecmd(ui, repo, pats):
501 def deletecmd(ui, repo, pats):
503 """subcommand that deletes a specific shelve"""
502 """subcommand that deletes a specific shelve"""
504 if not pats:
503 if not pats:
505 raise error.Abort(_('no shelved changes specified!'))
504 raise error.Abort(_('no shelved changes specified!'))
506 with repo.wlock():
505 with repo.wlock():
507 try:
506 try:
508 for name in pats:
507 for name in pats:
509 for suffix in shelvefileextensions:
508 for suffix in shelvefileextensions:
510 shfile = shelvedfile(repo, name, suffix)
509 shfile = shelvedfile(repo, name, suffix)
511 # patch file is necessary, as it should
510 # patch file is necessary, as it should
512 # be present for any kind of shelve,
511 # be present for any kind of shelve,
513 # but the .hg file is optional as in future we
512 # but the .hg file is optional as in future we
514 # will add obsolete shelve with does not create a
513 # will add obsolete shelve with does not create a
515 # bundle
514 # bundle
516 if shfile.exists() or suffix == patchextension:
515 if shfile.exists() or suffix == patchextension:
517 shfile.movetobackup()
516 shfile.movetobackup()
518 cleanupoldbackups(repo)
517 cleanupoldbackups(repo)
519 except OSError as err:
518 except OSError as err:
520 if err.errno != errno.ENOENT:
519 if err.errno != errno.ENOENT:
521 raise
520 raise
522 raise error.Abort(_("shelved change '%s' not found") % name)
521 raise error.Abort(_("shelved change '%s' not found") % name)
523
522
524 def listshelves(repo):
523 def listshelves(repo):
525 """return all shelves in repo as list of (time, filename)"""
524 """return all shelves in repo as list of (time, filename)"""
526 try:
525 try:
527 names = repo.vfs.readdir(shelvedir)
526 names = repo.vfs.readdir(shelvedir)
528 except OSError as err:
527 except OSError as err:
529 if err.errno != errno.ENOENT:
528 if err.errno != errno.ENOENT:
530 raise
529 raise
531 return []
530 return []
532 info = []
531 info = []
533 for (name, _type) in names:
532 for (name, _type) in names:
534 pfx, sfx = name.rsplit('.', 1)
533 pfx, sfx = name.rsplit('.', 1)
535 if not pfx or sfx != patchextension:
534 if not pfx or sfx != patchextension:
536 continue
535 continue
537 st = shelvedfile(repo, name).stat()
536 st = shelvedfile(repo, name).stat()
538 info.append((st.st_mtime, shelvedfile(repo, pfx).filename()))
537 info.append((st.st_mtime, shelvedfile(repo, pfx).filename()))
539 return sorted(info, reverse=True)
538 return sorted(info, reverse=True)
540
539
541 def listcmd(ui, repo, pats, opts):
540 def listcmd(ui, repo, pats, opts):
542 """subcommand that displays the list of shelves"""
541 """subcommand that displays the list of shelves"""
543 pats = set(pats)
542 pats = set(pats)
544 width = 80
543 width = 80
545 if not ui.plain():
544 if not ui.plain():
546 width = ui.termwidth()
545 width = ui.termwidth()
547 namelabel = 'shelve.newest'
546 namelabel = 'shelve.newest'
548 ui.pager('shelve')
547 ui.pager('shelve')
549 for mtime, name in listshelves(repo):
548 for mtime, name in listshelves(repo):
550 sname = util.split(name)[1]
549 sname = util.split(name)[1]
551 if pats and sname not in pats:
550 if pats and sname not in pats:
552 continue
551 continue
553 ui.write(sname, label=namelabel)
552 ui.write(sname, label=namelabel)
554 namelabel = 'shelve.name'
553 namelabel = 'shelve.name'
555 if ui.quiet:
554 if ui.quiet:
556 ui.write('\n')
555 ui.write('\n')
557 continue
556 continue
558 ui.write(' ' * (16 - len(sname)))
557 ui.write(' ' * (16 - len(sname)))
559 used = 16
558 used = 16
560 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
559 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
561 ui.write(age, label='shelve.age')
560 ui.write(age, label='shelve.age')
562 ui.write(' ' * (12 - len(age)))
561 ui.write(' ' * (12 - len(age)))
563 used += 12
562 used += 12
564 with open(name + '.' + patchextension, 'rb') as fp:
563 with open(name + '.' + patchextension, 'rb') as fp:
565 while True:
564 while True:
566 line = fp.readline()
565 line = fp.readline()
567 if not line:
566 if not line:
568 break
567 break
569 if not line.startswith('#'):
568 if not line.startswith('#'):
570 desc = line.rstrip()
569 desc = line.rstrip()
571 if ui.formatted():
570 if ui.formatted():
572 desc = util.ellipsis(desc, width - used)
571 desc = util.ellipsis(desc, width - used)
573 ui.write(desc)
572 ui.write(desc)
574 break
573 break
575 ui.write('\n')
574 ui.write('\n')
576 if not (opts['patch'] or opts['stat']):
575 if not (opts['patch'] or opts['stat']):
577 continue
576 continue
578 difflines = fp.readlines()
577 difflines = fp.readlines()
579 if opts['patch']:
578 if opts['patch']:
580 for chunk, label in patch.difflabel(iter, difflines):
579 for chunk, label in patch.difflabel(iter, difflines):
581 ui.write(chunk, label=label)
580 ui.write(chunk, label=label)
582 if opts['stat']:
581 if opts['stat']:
583 for chunk, label in patch.diffstatui(difflines, width=width):
582 for chunk, label in patch.diffstatui(difflines, width=width):
584 ui.write(chunk, label=label)
583 ui.write(chunk, label=label)
585
584
586 def patchcmds(ui, repo, pats, opts, subcommand):
585 def patchcmds(ui, repo, pats, opts, subcommand):
587 """subcommand that displays shelves"""
586 """subcommand that displays shelves"""
588 if len(pats) == 0:
587 if len(pats) == 0:
589 raise error.Abort(_("--%s expects at least one shelf") % subcommand)
588 raise error.Abort(_("--%s expects at least one shelf") % subcommand)
590
589
591 for shelfname in pats:
590 for shelfname in pats:
592 if not shelvedfile(repo, shelfname, patchextension).exists():
591 if not shelvedfile(repo, shelfname, patchextension).exists():
593 raise error.Abort(_("cannot find shelf %s") % shelfname)
592 raise error.Abort(_("cannot find shelf %s") % shelfname)
594
593
595 listcmd(ui, repo, pats, opts)
594 listcmd(ui, repo, pats, opts)
596
595
597 def checkparents(repo, state):
596 def checkparents(repo, state):
598 """check parent while resuming an unshelve"""
597 """check parent while resuming an unshelve"""
599 if state.parents != repo.dirstate.parents():
598 if state.parents != repo.dirstate.parents():
600 raise error.Abort(_('working directory parents do not match unshelve '
599 raise error.Abort(_('working directory parents do not match unshelve '
601 'state'))
600 'state'))
602
601
603 def pathtofiles(repo, files):
602 def pathtofiles(repo, files):
604 cwd = repo.getcwd()
603 cwd = repo.getcwd()
605 return [repo.pathto(f, cwd) for f in files]
604 return [repo.pathto(f, cwd) for f in files]
606
605
607 def unshelveabort(ui, repo, state, opts):
606 def unshelveabort(ui, repo, state, opts):
608 """subcommand that abort an in-progress unshelve"""
607 """subcommand that abort an in-progress unshelve"""
609 with repo.lock():
608 with repo.lock():
610 try:
609 try:
611 checkparents(repo, state)
610 checkparents(repo, state)
612
611
613 repo.vfs.rename('unshelverebasestate', 'rebasestate')
612 repo.vfs.rename('unshelverebasestate', 'rebasestate')
614 try:
613 try:
615 rebase.rebase(ui, repo, **{
614 rebase.rebase(ui, repo, **{
616 'abort' : True
615 'abort' : True
617 })
616 })
618 except Exception:
617 except Exception:
619 repo.vfs.rename('rebasestate', 'unshelverebasestate')
618 repo.vfs.rename('rebasestate', 'unshelverebasestate')
620 raise
619 raise
621
620
622 mergefiles(ui, repo, state.wctx, state.pendingctx)
621 mergefiles(ui, repo, state.wctx, state.pendingctx)
623 repair.strip(ui, repo, state.nodestoremove, backup=False,
622 repair.strip(ui, repo, state.nodestoremove, backup=False,
624 topic='shelve')
623 topic='shelve')
625 finally:
624 finally:
626 shelvedstate.clear(repo)
625 shelvedstate.clear(repo)
627 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
626 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
628
627
629 def mergefiles(ui, repo, wctx, shelvectx):
628 def mergefiles(ui, repo, wctx, shelvectx):
630 """updates to wctx and merges the changes from shelvectx into the
629 """updates to wctx and merges the changes from shelvectx into the
631 dirstate."""
630 dirstate."""
632 with ui.configoverride({('ui', 'quiet'): True}):
631 with ui.configoverride({('ui', 'quiet'): True}):
633 hg.update(repo, wctx.node())
632 hg.update(repo, wctx.node())
634 files = []
633 files = []
635 files.extend(shelvectx.files())
634 files.extend(shelvectx.files())
636 files.extend(shelvectx.parents()[0].files())
635 files.extend(shelvectx.parents()[0].files())
637
636
638 # revert will overwrite unknown files, so move them out of the way
637 # revert will overwrite unknown files, so move them out of the way
639 for file in repo.status(unknown=True).unknown:
638 for file in repo.status(unknown=True).unknown:
640 if file in files:
639 if file in files:
641 util.rename(file, scmutil.origpath(ui, repo, file))
640 util.rename(file, scmutil.origpath(ui, repo, file))
642 ui.pushbuffer(True)
641 ui.pushbuffer(True)
643 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
642 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
644 *pathtofiles(repo, files),
643 *pathtofiles(repo, files),
645 **{'no_backup': True})
644 **{'no_backup': True})
646 ui.popbuffer()
645 ui.popbuffer()
647
646
648 def restorebranch(ui, repo, branchtorestore):
647 def restorebranch(ui, repo, branchtorestore):
649 if branchtorestore and branchtorestore != repo.dirstate.branch():
648 if branchtorestore and branchtorestore != repo.dirstate.branch():
650 repo.dirstate.setbranch(branchtorestore)
649 repo.dirstate.setbranch(branchtorestore)
651 ui.status(_('marked working directory as branch %s\n')
650 ui.status(_('marked working directory as branch %s\n')
652 % branchtorestore)
651 % branchtorestore)
653
652
654 def unshelvecleanup(ui, repo, name, opts):
653 def unshelvecleanup(ui, repo, name, opts):
655 """remove related files after an unshelve"""
654 """remove related files after an unshelve"""
656 if not opts.get('keep'):
655 if not opts.get('keep'):
657 for filetype in shelvefileextensions:
656 for filetype in shelvefileextensions:
658 shfile = shelvedfile(repo, name, filetype)
657 shfile = shelvedfile(repo, name, filetype)
659 if shfile.exists():
658 if shfile.exists():
660 shfile.movetobackup()
659 shfile.movetobackup()
661 cleanupoldbackups(repo)
660 cleanupoldbackups(repo)
662
661
663 def unshelvecontinue(ui, repo, state, opts):
662 def unshelvecontinue(ui, repo, state, opts):
664 """subcommand to continue an in-progress unshelve"""
663 """subcommand to continue an in-progress unshelve"""
665 # We're finishing off a merge. First parent is our original
664 # We're finishing off a merge. First parent is our original
666 # parent, second is the temporary "fake" commit we're unshelving.
665 # parent, second is the temporary "fake" commit we're unshelving.
667 with repo.lock():
666 with repo.lock():
668 checkparents(repo, state)
667 checkparents(repo, state)
669 ms = merge.mergestate.read(repo)
668 ms = merge.mergestate.read(repo)
670 if [f for f in ms if ms[f] == 'u']:
669 if [f for f in ms if ms[f] == 'u']:
671 raise error.Abort(
670 raise error.Abort(
672 _("unresolved conflicts, can't continue"),
671 _("unresolved conflicts, can't continue"),
673 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
672 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
674
673
675 repo.vfs.rename('unshelverebasestate', 'rebasestate')
674 repo.vfs.rename('unshelverebasestate', 'rebasestate')
676 try:
675 try:
677 rebase.rebase(ui, repo, **{
676 rebase.rebase(ui, repo, **{
678 'continue' : True
677 'continue' : True
679 })
678 })
680 except Exception:
679 except Exception:
681 repo.vfs.rename('rebasestate', 'unshelverebasestate')
680 repo.vfs.rename('rebasestate', 'unshelverebasestate')
682 raise
681 raise
683
682
684 shelvectx = repo['tip']
683 shelvectx = repo['tip']
685 if not shelvectx in state.pendingctx.children():
684 if not shelvectx in state.pendingctx.children():
686 # rebase was a no-op, so it produced no child commit
685 # rebase was a no-op, so it produced no child commit
687 shelvectx = state.pendingctx
686 shelvectx = state.pendingctx
688 else:
687 else:
689 # only strip the shelvectx if the rebase produced it
688 # only strip the shelvectx if the rebase produced it
690 state.nodestoremove.append(shelvectx.node())
689 state.nodestoremove.append(shelvectx.node())
691
690
692 mergefiles(ui, repo, state.wctx, shelvectx)
691 mergefiles(ui, repo, state.wctx, shelvectx)
693 restorebranch(ui, repo, state.branchtorestore)
692 restorebranch(ui, repo, state.branchtorestore)
694
693
695 repair.strip(ui, repo, state.nodestoremove, backup=False,
694 repair.strip(ui, repo, state.nodestoremove, backup=False,
696 topic='shelve')
695 topic='shelve')
697 _restoreactivebookmark(repo, state.activebookmark)
696 _restoreactivebookmark(repo, state.activebookmark)
698 shelvedstate.clear(repo)
697 shelvedstate.clear(repo)
699 unshelvecleanup(ui, repo, state.name, opts)
698 unshelvecleanup(ui, repo, state.name, opts)
700 ui.status(_("unshelve of '%s' complete\n") % state.name)
699 ui.status(_("unshelve of '%s' complete\n") % state.name)
701
700
702 def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
701 def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
703 """Temporarily commit working copy changes before moving unshelve commit"""
702 """Temporarily commit working copy changes before moving unshelve commit"""
704 # Store pending changes in a commit and remember added in case a shelve
703 # Store pending changes in a commit and remember added in case a shelve
705 # contains unknown files that are part of the pending change
704 # contains unknown files that are part of the pending change
706 s = repo.status()
705 s = repo.status()
707 addedbefore = frozenset(s.added)
706 addedbefore = frozenset(s.added)
708 if not (s.modified or s.added or s.removed):
707 if not (s.modified or s.added or s.removed):
709 return tmpwctx, addedbefore
708 return tmpwctx, addedbefore
710 ui.status(_("temporarily committing pending changes "
709 ui.status(_("temporarily committing pending changes "
711 "(restore with 'hg unshelve --abort')\n"))
710 "(restore with 'hg unshelve --abort')\n"))
712 commitfunc = getcommitfunc(extra=None, interactive=False,
711 commitfunc = getcommitfunc(extra=None, interactive=False,
713 editor=False)
712 editor=False)
714 tempopts = {}
713 tempopts = {}
715 tempopts['message'] = "pending changes temporary commit"
714 tempopts['message'] = "pending changes temporary commit"
716 tempopts['date'] = opts.get('date')
715 tempopts['date'] = opts.get('date')
717 with ui.configoverride({('ui', 'quiet'): True}):
716 with ui.configoverride({('ui', 'quiet'): True}):
718 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
717 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
719 tmpwctx = repo[node]
718 tmpwctx = repo[node]
720 return tmpwctx, addedbefore
719 return tmpwctx, addedbefore
721
720
722 def _unshelverestorecommit(ui, repo, basename):
721 def _unshelverestorecommit(ui, repo, basename):
723 """Recreate commit in the repository during the unshelve"""
722 """Recreate commit in the repository during the unshelve"""
724 with ui.configoverride({('ui', 'quiet'): True}):
723 with ui.configoverride({('ui', 'quiet'): True}):
725 shelvedfile(repo, basename, 'hg').applybundle()
724 shelvedfile(repo, basename, 'hg').applybundle()
726 shelvectx = repo['tip']
725 shelvectx = repo['tip']
727 return repo, shelvectx
726 return repo, shelvectx
728
727
729 def _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev, basename, pctx,
728 def _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev, basename, pctx,
730 tmpwctx, shelvectx, branchtorestore,
729 tmpwctx, shelvectx, branchtorestore,
731 activebookmark):
730 activebookmark):
732 """Rebase restored commit from its original location to a destination"""
731 """Rebase restored commit from its original location to a destination"""
733 # If the shelve is not immediately on top of the commit
732 # If the shelve is not immediately on top of the commit
734 # we'll be merging with, rebase it to be on top.
733 # we'll be merging with, rebase it to be on top.
735 if tmpwctx.node() == shelvectx.parents()[0].node():
734 if tmpwctx.node() == shelvectx.parents()[0].node():
736 return shelvectx
735 return shelvectx
737
736
738 ui.status(_('rebasing shelved changes\n'))
737 ui.status(_('rebasing shelved changes\n'))
739 try:
738 try:
740 rebase.rebase(ui, repo, **{
739 rebase.rebase(ui, repo, **{
741 'rev': [shelvectx.rev()],
740 'rev': [shelvectx.rev()],
742 'dest': str(tmpwctx.rev()),
741 'dest': str(tmpwctx.rev()),
743 'keep': True,
742 'keep': True,
744 'tool': opts.get('tool', ''),
743 'tool': opts.get('tool', ''),
745 })
744 })
746 except error.InterventionRequired:
745 except error.InterventionRequired:
747 tr.close()
746 tr.close()
748
747
749 nodestoremove = [repo.changelog.node(rev)
748 nodestoremove = [repo.changelog.node(rev)
750 for rev in xrange(oldtiprev, len(repo))]
749 for rev in xrange(oldtiprev, len(repo))]
751 shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
750 shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
752 branchtorestore, opts.get('keep'), activebookmark)
751 branchtorestore, opts.get('keep'), activebookmark)
753
752
754 repo.vfs.rename('rebasestate', 'unshelverebasestate')
753 repo.vfs.rename('rebasestate', 'unshelverebasestate')
755 raise error.InterventionRequired(
754 raise error.InterventionRequired(
756 _("unresolved conflicts (see 'hg resolve', then "
755 _("unresolved conflicts (see 'hg resolve', then "
757 "'hg unshelve --continue')"))
756 "'hg unshelve --continue')"))
758
757
759 # refresh ctx after rebase completes
758 # refresh ctx after rebase completes
760 shelvectx = repo['tip']
759 shelvectx = repo['tip']
761
760
762 if not shelvectx in tmpwctx.children():
761 if not shelvectx in tmpwctx.children():
763 # rebase was a no-op, so it produced no child commit
762 # rebase was a no-op, so it produced no child commit
764 shelvectx = tmpwctx
763 shelvectx = tmpwctx
765 return shelvectx
764 return shelvectx
766
765
767 def _forgetunknownfiles(repo, shelvectx, addedbefore):
766 def _forgetunknownfiles(repo, shelvectx, addedbefore):
768 # Forget any files that were unknown before the shelve, unknown before
767 # Forget any files that were unknown before the shelve, unknown before
769 # unshelve started, but are now added.
768 # unshelve started, but are now added.
770 shelveunknown = shelvectx.extra().get('shelve_unknown')
769 shelveunknown = shelvectx.extra().get('shelve_unknown')
771 if not shelveunknown:
770 if not shelveunknown:
772 return
771 return
773 shelveunknown = frozenset(shelveunknown.split('\0'))
772 shelveunknown = frozenset(shelveunknown.split('\0'))
774 addedafter = frozenset(repo.status().added)
773 addedafter = frozenset(repo.status().added)
775 toforget = (addedafter & shelveunknown) - addedbefore
774 toforget = (addedafter & shelveunknown) - addedbefore
776 repo[None].forget(toforget)
775 repo[None].forget(toforget)
777
776
778 def _finishunshelve(repo, oldtiprev, tr, activebookmark):
777 def _finishunshelve(repo, oldtiprev, tr, activebookmark):
779 _restoreactivebookmark(repo, activebookmark)
778 _restoreactivebookmark(repo, activebookmark)
780 # The transaction aborting will strip all the commits for us,
779 # The transaction aborting will strip all the commits for us,
781 # but it doesn't update the inmemory structures, so addchangegroup
780 # but it doesn't update the inmemory structures, so addchangegroup
782 # hooks still fire and try to operate on the missing commits.
781 # hooks still fire and try to operate on the missing commits.
783 # Clean up manually to prevent this.
782 # Clean up manually to prevent this.
784 repo.unfiltered().changelog.strip(oldtiprev, tr)
783 repo.unfiltered().changelog.strip(oldtiprev, tr)
785 _aborttransaction(repo)
784 _aborttransaction(repo)
786
785
787 def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
786 def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
788 """Check potential problems which may result from working
787 """Check potential problems which may result from working
789 copy having untracked changes."""
788 copy having untracked changes."""
790 wcdeleted = set(repo.status().deleted)
789 wcdeleted = set(repo.status().deleted)
791 shelvetouched = set(shelvectx.files())
790 shelvetouched = set(shelvectx.files())
792 intersection = wcdeleted.intersection(shelvetouched)
791 intersection = wcdeleted.intersection(shelvetouched)
793 if intersection:
792 if intersection:
794 m = _("shelved change touches missing files")
793 m = _("shelved change touches missing files")
795 hint = _("run hg status to see which files are missing")
794 hint = _("run hg status to see which files are missing")
796 raise error.Abort(m, hint=hint)
795 raise error.Abort(m, hint=hint)
797
796
798 @command('unshelve',
797 @command('unshelve',
799 [('a', 'abort', None,
798 [('a', 'abort', None,
800 _('abort an incomplete unshelve operation')),
799 _('abort an incomplete unshelve operation')),
801 ('c', 'continue', None,
800 ('c', 'continue', None,
802 _('continue an incomplete unshelve operation')),
801 _('continue an incomplete unshelve operation')),
803 ('k', 'keep', None,
802 ('k', 'keep', None,
804 _('keep shelve after unshelving')),
803 _('keep shelve after unshelving')),
805 ('n', 'name', '',
804 ('n', 'name', '',
806 _('restore shelved change with given name'), _('NAME')),
805 _('restore shelved change with given name'), _('NAME')),
807 ('t', 'tool', '', _('specify merge tool')),
806 ('t', 'tool', '', _('specify merge tool')),
808 ('', 'date', '',
807 ('', 'date', '',
809 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
808 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
810 _('hg unshelve [[-n] SHELVED]'))
809 _('hg unshelve [[-n] SHELVED]'))
811 def unshelve(ui, repo, *shelved, **opts):
810 def unshelve(ui, repo, *shelved, **opts):
812 """restore a shelved change to the working directory
811 """restore a shelved change to the working directory
813
812
814 This command accepts an optional name of a shelved change to
813 This command accepts an optional name of a shelved change to
815 restore. If none is given, the most recent shelved change is used.
814 restore. If none is given, the most recent shelved change is used.
816
815
817 If a shelved change is applied successfully, the bundle that
816 If a shelved change is applied successfully, the bundle that
818 contains the shelved changes is moved to a backup location
817 contains the shelved changes is moved to a backup location
819 (.hg/shelve-backup).
818 (.hg/shelve-backup).
820
819
821 Since you can restore a shelved change on top of an arbitrary
820 Since you can restore a shelved change on top of an arbitrary
822 commit, it is possible that unshelving will result in a conflict
821 commit, it is possible that unshelving will result in a conflict
823 between your changes and the commits you are unshelving onto. If
822 between your changes and the commits you are unshelving onto. If
824 this occurs, you must resolve the conflict, then use
823 this occurs, you must resolve the conflict, then use
825 ``--continue`` to complete the unshelve operation. (The bundle
824 ``--continue`` to complete the unshelve operation. (The bundle
826 will not be moved until you successfully complete the unshelve.)
825 will not be moved until you successfully complete the unshelve.)
827
826
828 (Alternatively, you can use ``--abort`` to abandon an unshelve
827 (Alternatively, you can use ``--abort`` to abandon an unshelve
829 that causes a conflict. This reverts the unshelved changes, and
828 that causes a conflict. This reverts the unshelved changes, and
830 leaves the bundle in place.)
829 leaves the bundle in place.)
831
830
832 If bare shelved change(when no files are specified, without interactive,
831 If bare shelved change(when no files are specified, without interactive,
833 include and exclude option) was done on newly created branch it would
832 include and exclude option) was done on newly created branch it would
834 restore branch information to the working directory.
833 restore branch information to the working directory.
835
834
836 After a successful unshelve, the shelved changes are stored in a
835 After a successful unshelve, the shelved changes are stored in a
837 backup directory. Only the N most recent backups are kept. N
836 backup directory. Only the N most recent backups are kept. N
838 defaults to 10 but can be overridden using the ``shelve.maxbackups``
837 defaults to 10 but can be overridden using the ``shelve.maxbackups``
839 configuration option.
838 configuration option.
840
839
841 .. container:: verbose
840 .. container:: verbose
842
841
843 Timestamp in seconds is used to decide order of backups. More
842 Timestamp in seconds is used to decide order of backups. More
844 than ``maxbackups`` backups are kept, if same timestamp
843 than ``maxbackups`` backups are kept, if same timestamp
845 prevents from deciding exact order of them, for safety.
844 prevents from deciding exact order of them, for safety.
846 """
845 """
847 with repo.wlock():
846 with repo.wlock():
848 return _dounshelve(ui, repo, *shelved, **opts)
847 return _dounshelve(ui, repo, *shelved, **opts)
849
848
850 def _dounshelve(ui, repo, *shelved, **opts):
849 def _dounshelve(ui, repo, *shelved, **opts):
851 abortf = opts.get('abort')
850 abortf = opts.get('abort')
852 continuef = opts.get('continue')
851 continuef = opts.get('continue')
853 if not abortf and not continuef:
852 if not abortf and not continuef:
854 cmdutil.checkunfinished(repo)
853 cmdutil.checkunfinished(repo)
855 shelved = list(shelved)
854 shelved = list(shelved)
856 if opts.get("name"):
855 if opts.get("name"):
857 shelved.append(opts["name"])
856 shelved.append(opts["name"])
858
857
859 if abortf or continuef:
858 if abortf or continuef:
860 if abortf and continuef:
859 if abortf and continuef:
861 raise error.Abort(_('cannot use both abort and continue'))
860 raise error.Abort(_('cannot use both abort and continue'))
862 if shelved:
861 if shelved:
863 raise error.Abort(_('cannot combine abort/continue with '
862 raise error.Abort(_('cannot combine abort/continue with '
864 'naming a shelved change'))
863 'naming a shelved change'))
865 if abortf and opts.get('tool', False):
864 if abortf and opts.get('tool', False):
866 ui.warn(_('tool option will be ignored\n'))
865 ui.warn(_('tool option will be ignored\n'))
867
866
868 try:
867 try:
869 state = shelvedstate.load(repo)
868 state = shelvedstate.load(repo)
870 if opts.get('keep') is None:
869 if opts.get('keep') is None:
871 opts['keep'] = state.keep
870 opts['keep'] = state.keep
872 except IOError as err:
871 except IOError as err:
873 if err.errno != errno.ENOENT:
872 if err.errno != errno.ENOENT:
874 raise
873 raise
875 cmdutil.wrongtooltocontinue(repo, _('unshelve'))
874 cmdutil.wrongtooltocontinue(repo, _('unshelve'))
876 except error.CorruptedState as err:
875 except error.CorruptedState as err:
877 ui.debug(str(err) + '\n')
876 ui.debug(str(err) + '\n')
878 if continuef:
877 if continuef:
879 msg = _('corrupted shelved state file')
878 msg = _('corrupted shelved state file')
880 hint = _('please run hg unshelve --abort to abort unshelve '
879 hint = _('please run hg unshelve --abort to abort unshelve '
881 'operation')
880 'operation')
882 raise error.Abort(msg, hint=hint)
881 raise error.Abort(msg, hint=hint)
883 elif abortf:
882 elif abortf:
884 msg = _('could not read shelved state file, your working copy '
883 msg = _('could not read shelved state file, your working copy '
885 'may be in an unexpected state\nplease update to some '
884 'may be in an unexpected state\nplease update to some '
886 'commit\n')
885 'commit\n')
887 ui.warn(msg)
886 ui.warn(msg)
888 shelvedstate.clear(repo)
887 shelvedstate.clear(repo)
889 return
888 return
890
889
891 if abortf:
890 if abortf:
892 return unshelveabort(ui, repo, state, opts)
891 return unshelveabort(ui, repo, state, opts)
893 elif continuef:
892 elif continuef:
894 return unshelvecontinue(ui, repo, state, opts)
893 return unshelvecontinue(ui, repo, state, opts)
895 elif len(shelved) > 1:
894 elif len(shelved) > 1:
896 raise error.Abort(_('can only unshelve one change at a time'))
895 raise error.Abort(_('can only unshelve one change at a time'))
897 elif not shelved:
896 elif not shelved:
898 shelved = listshelves(repo)
897 shelved = listshelves(repo)
899 if not shelved:
898 if not shelved:
900 raise error.Abort(_('no shelved changes to apply!'))
899 raise error.Abort(_('no shelved changes to apply!'))
901 basename = util.split(shelved[0][1])[1]
900 basename = util.split(shelved[0][1])[1]
902 ui.status(_("unshelving change '%s'\n") % basename)
901 ui.status(_("unshelving change '%s'\n") % basename)
903 else:
902 else:
904 basename = shelved[0]
903 basename = shelved[0]
905
904
906 if not shelvedfile(repo, basename, patchextension).exists():
905 if not shelvedfile(repo, basename, patchextension).exists():
907 raise error.Abort(_("shelved change '%s' not found") % basename)
906 raise error.Abort(_("shelved change '%s' not found") % basename)
908
907
909 lock = tr = None
908 lock = tr = None
910 try:
909 try:
911 lock = repo.lock()
910 lock = repo.lock()
912 tr = repo.transaction('unshelve', report=lambda x: None)
911 tr = repo.transaction('unshelve', report=lambda x: None)
913 oldtiprev = len(repo)
912 oldtiprev = len(repo)
914
913
915 pctx = repo['.']
914 pctx = repo['.']
916 tmpwctx = pctx
915 tmpwctx = pctx
917 # The goal is to have a commit structure like so:
916 # The goal is to have a commit structure like so:
918 # ...-> pctx -> tmpwctx -> shelvectx
917 # ...-> pctx -> tmpwctx -> shelvectx
919 # where tmpwctx is an optional commit with the user's pending changes
918 # where tmpwctx is an optional commit with the user's pending changes
920 # and shelvectx is the unshelved changes. Then we merge it all down
919 # and shelvectx is the unshelved changes. Then we merge it all down
921 # to the original pctx.
920 # to the original pctx.
922
921
923 activebookmark = _backupactivebookmark(repo)
922 activebookmark = _backupactivebookmark(repo)
924 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
923 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
925 with ui.configoverride(overrides, 'unshelve'):
924 with ui.configoverride(overrides, 'unshelve'):
926 tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
925 tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
927 tmpwctx)
926 tmpwctx)
928 repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
927 repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
929 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
928 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
930 branchtorestore = ''
929 branchtorestore = ''
931 if shelvectx.branch() != shelvectx.p1().branch():
930 if shelvectx.branch() != shelvectx.p1().branch():
932 branchtorestore = shelvectx.branch()
931 branchtorestore = shelvectx.branch()
933
932
934 shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
933 shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
935 basename, pctx, tmpwctx,
934 basename, pctx, tmpwctx,
936 shelvectx, branchtorestore,
935 shelvectx, branchtorestore,
937 activebookmark)
936 activebookmark)
938 mergefiles(ui, repo, pctx, shelvectx)
937 mergefiles(ui, repo, pctx, shelvectx)
939 restorebranch(ui, repo, branchtorestore)
938 restorebranch(ui, repo, branchtorestore)
940 _forgetunknownfiles(repo, shelvectx, addedbefore)
939 _forgetunknownfiles(repo, shelvectx, addedbefore)
941
940
942 shelvedstate.clear(repo)
941 shelvedstate.clear(repo)
943 _finishunshelve(repo, oldtiprev, tr, activebookmark)
942 _finishunshelve(repo, oldtiprev, tr, activebookmark)
944 unshelvecleanup(ui, repo, basename, opts)
943 unshelvecleanup(ui, repo, basename, opts)
945 finally:
944 finally:
946 if tr:
945 if tr:
947 tr.release()
946 tr.release()
948 lockmod.release(lock)
947 lockmod.release(lock)
949
948
950 @command('shelve',
949 @command('shelve',
951 [('A', 'addremove', None,
950 [('A', 'addremove', None,
952 _('mark new/missing files as added/removed before shelving')),
951 _('mark new/missing files as added/removed before shelving')),
953 ('u', 'unknown', None,
952 ('u', 'unknown', None,
954 _('store unknown files in the shelve')),
953 _('store unknown files in the shelve')),
955 ('', 'cleanup', None,
954 ('', 'cleanup', None,
956 _('delete all shelved changes')),
955 _('delete all shelved changes')),
957 ('', 'date', '',
956 ('', 'date', '',
958 _('shelve with the specified commit date'), _('DATE')),
957 _('shelve with the specified commit date'), _('DATE')),
959 ('d', 'delete', None,
958 ('d', 'delete', None,
960 _('delete the named shelved change(s)')),
959 _('delete the named shelved change(s)')),
961 ('e', 'edit', False,
960 ('e', 'edit', False,
962 _('invoke editor on commit messages')),
961 _('invoke editor on commit messages')),
963 ('l', 'list', None,
962 ('l', 'list', None,
964 _('list current shelves')),
963 _('list current shelves')),
965 ('m', 'message', '',
964 ('m', 'message', '',
966 _('use text as shelve message'), _('TEXT')),
965 _('use text as shelve message'), _('TEXT')),
967 ('n', 'name', '',
966 ('n', 'name', '',
968 _('use the given name for the shelved commit'), _('NAME')),
967 _('use the given name for the shelved commit'), _('NAME')),
969 ('p', 'patch', None,
968 ('p', 'patch', None,
970 _('show patch')),
969 _('show patch')),
971 ('i', 'interactive', None,
970 ('i', 'interactive', None,
972 _('interactive mode, only works while creating a shelve')),
971 _('interactive mode, only works while creating a shelve')),
973 ('', 'stat', None,
972 ('', 'stat', None,
974 _('output diffstat-style summary of changes'))] + commands.walkopts,
973 _('output diffstat-style summary of changes'))] + cmdutil.walkopts,
975 _('hg shelve [OPTION]... [FILE]...'))
974 _('hg shelve [OPTION]... [FILE]...'))
976 def shelvecmd(ui, repo, *pats, **opts):
975 def shelvecmd(ui, repo, *pats, **opts):
977 '''save and set aside changes from the working directory
976 '''save and set aside changes from the working directory
978
977
979 Shelving takes files that "hg status" reports as not clean, saves
978 Shelving takes files that "hg status" reports as not clean, saves
980 the modifications to a bundle (a shelved change), and reverts the
979 the modifications to a bundle (a shelved change), and reverts the
981 files so that their state in the working directory becomes clean.
980 files so that their state in the working directory becomes clean.
982
981
983 To restore these changes to the working directory, using "hg
982 To restore these changes to the working directory, using "hg
984 unshelve"; this will work even if you switch to a different
983 unshelve"; this will work even if you switch to a different
985 commit.
984 commit.
986
985
987 When no files are specified, "hg shelve" saves all not-clean
986 When no files are specified, "hg shelve" saves all not-clean
988 files. If specific files or directories are named, only changes to
987 files. If specific files or directories are named, only changes to
989 those files are shelved.
988 those files are shelved.
990
989
991 In bare shelve (when no files are specified, without interactive,
990 In bare shelve (when no files are specified, without interactive,
992 include and exclude option), shelving remembers information if the
991 include and exclude option), shelving remembers information if the
993 working directory was on newly created branch, in other words working
992 working directory was on newly created branch, in other words working
994 directory was on different branch than its first parent. In this
993 directory was on different branch than its first parent. In this
995 situation unshelving restores branch information to the working directory.
994 situation unshelving restores branch information to the working directory.
996
995
997 Each shelved change has a name that makes it easier to find later.
996 Each shelved change has a name that makes it easier to find later.
998 The name of a shelved change defaults to being based on the active
997 The name of a shelved change defaults to being based on the active
999 bookmark, or if there is no active bookmark, the current named
998 bookmark, or if there is no active bookmark, the current named
1000 branch. To specify a different name, use ``--name``.
999 branch. To specify a different name, use ``--name``.
1001
1000
1002 To see a list of existing shelved changes, use the ``--list``
1001 To see a list of existing shelved changes, use the ``--list``
1003 option. For each shelved change, this will print its name, age,
1002 option. For each shelved change, this will print its name, age,
1004 and description; use ``--patch`` or ``--stat`` for more details.
1003 and description; use ``--patch`` or ``--stat`` for more details.
1005
1004
1006 To delete specific shelved changes, use ``--delete``. To delete
1005 To delete specific shelved changes, use ``--delete``. To delete
1007 all shelved changes, use ``--cleanup``.
1006 all shelved changes, use ``--cleanup``.
1008 '''
1007 '''
1009 allowables = [
1008 allowables = [
1010 ('addremove', {'create'}), # 'create' is pseudo action
1009 ('addremove', {'create'}), # 'create' is pseudo action
1011 ('unknown', {'create'}),
1010 ('unknown', {'create'}),
1012 ('cleanup', {'cleanup'}),
1011 ('cleanup', {'cleanup'}),
1013 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
1012 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
1014 ('delete', {'delete'}),
1013 ('delete', {'delete'}),
1015 ('edit', {'create'}),
1014 ('edit', {'create'}),
1016 ('list', {'list'}),
1015 ('list', {'list'}),
1017 ('message', {'create'}),
1016 ('message', {'create'}),
1018 ('name', {'create'}),
1017 ('name', {'create'}),
1019 ('patch', {'patch', 'list'}),
1018 ('patch', {'patch', 'list'}),
1020 ('stat', {'stat', 'list'}),
1019 ('stat', {'stat', 'list'}),
1021 ]
1020 ]
1022 def checkopt(opt):
1021 def checkopt(opt):
1023 if opts.get(opt):
1022 if opts.get(opt):
1024 for i, allowable in allowables:
1023 for i, allowable in allowables:
1025 if opts[i] and opt not in allowable:
1024 if opts[i] and opt not in allowable:
1026 raise error.Abort(_("options '--%s' and '--%s' may not be "
1025 raise error.Abort(_("options '--%s' and '--%s' may not be "
1027 "used together") % (opt, i))
1026 "used together") % (opt, i))
1028 return True
1027 return True
1029 if checkopt('cleanup'):
1028 if checkopt('cleanup'):
1030 if pats:
1029 if pats:
1031 raise error.Abort(_("cannot specify names when using '--cleanup'"))
1030 raise error.Abort(_("cannot specify names when using '--cleanup'"))
1032 return cleanupcmd(ui, repo)
1031 return cleanupcmd(ui, repo)
1033 elif checkopt('delete'):
1032 elif checkopt('delete'):
1034 return deletecmd(ui, repo, pats)
1033 return deletecmd(ui, repo, pats)
1035 elif checkopt('list'):
1034 elif checkopt('list'):
1036 return listcmd(ui, repo, pats, opts)
1035 return listcmd(ui, repo, pats, opts)
1037 elif checkopt('patch'):
1036 elif checkopt('patch'):
1038 return patchcmds(ui, repo, pats, opts, subcommand='patch')
1037 return patchcmds(ui, repo, pats, opts, subcommand='patch')
1039 elif checkopt('stat'):
1038 elif checkopt('stat'):
1040 return patchcmds(ui, repo, pats, opts, subcommand='stat')
1039 return patchcmds(ui, repo, pats, opts, subcommand='stat')
1041 else:
1040 else:
1042 return createcmd(ui, repo, pats, opts)
1041 return createcmd(ui, repo, pats, opts)
1043
1042
1044 def extsetup(ui):
1043 def extsetup(ui):
1045 cmdutil.unfinishedstates.append(
1044 cmdutil.unfinishedstates.append(
1046 [shelvedstate._filename, False, False,
1045 [shelvedstate._filename, False, False,
1047 _('unshelve already in progress'),
1046 _('unshelve already in progress'),
1048 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
1047 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
1049 cmdutil.afterresolvedstates.append(
1048 cmdutil.afterresolvedstates.append(
1050 [shelvedstate._filename, _('hg unshelve --continue')])
1049 [shelvedstate._filename, _('hg unshelve --continue')])
@@ -1,222 +1,222 b''
1 # show.py - Extension implementing `hg show`
1 # show.py - Extension implementing `hg show`
2 #
2 #
3 # Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """unified command to show various repository information (EXPERIMENTAL)
8 """unified command to show various repository information (EXPERIMENTAL)
9
9
10 This extension provides the :hg:`show` command, which provides a central
10 This extension provides the :hg:`show` command, which provides a central
11 command for displaying commonly-accessed repository data and views of that
11 command for displaying commonly-accessed repository data and views of that
12 data.
12 data.
13 """
13 """
14
14
15 from __future__ import absolute_import
15 from __future__ import absolute_import
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import nullrev
18 from mercurial.node import nullrev
19 from mercurial import (
19 from mercurial import (
20 cmdutil,
20 cmdutil,
21 error,
21 error,
22 formatter,
22 formatter,
23 graphmod,
23 graphmod,
24 pycompat,
24 pycompat,
25 registrar,
25 registrar,
26 revset,
26 revset,
27 revsetlang,
27 revsetlang,
28 )
28 )
29
29
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
33 # leave the attribute unspecified.
34 testedwith = 'ships-with-hg-core'
34 testedwith = 'ships-with-hg-core'
35
35
36 cmdtable = {}
36 cmdtable = {}
37 command = registrar.command(cmdtable)
37 command = registrar.command(cmdtable)
38 revsetpredicate = registrar.revsetpredicate()
38 revsetpredicate = registrar.revsetpredicate()
39
39
40 class showcmdfunc(registrar._funcregistrarbase):
40 class showcmdfunc(registrar._funcregistrarbase):
41 """Register a function to be invoked for an `hg show <thing>`."""
41 """Register a function to be invoked for an `hg show <thing>`."""
42
42
43 # Used by _formatdoc().
43 # Used by _formatdoc().
44 _docformat = '%s -- %s'
44 _docformat = '%s -- %s'
45
45
46 def _extrasetup(self, name, func, fmtopic=None):
46 def _extrasetup(self, name, func, fmtopic=None):
47 """Called with decorator arguments to register a show view.
47 """Called with decorator arguments to register a show view.
48
48
49 ``name`` is the sub-command name.
49 ``name`` is the sub-command name.
50
50
51 ``func`` is the function being decorated.
51 ``func`` is the function being decorated.
52
52
53 ``fmtopic`` is the topic in the style that will be rendered for
53 ``fmtopic`` is the topic in the style that will be rendered for
54 this view.
54 this view.
55 """
55 """
56 func._fmtopic = fmtopic
56 func._fmtopic = fmtopic
57
57
58 showview = showcmdfunc()
58 showview = showcmdfunc()
59
59
60 @command('show', [
60 @command('show', [
61 # TODO: Switch this template flag to use commands.formatteropts if
61 # TODO: Switch this template flag to use cmdutil.formatteropts if
62 # 'hg show' becomes stable before --template/-T is stable. For now,
62 # 'hg show' becomes stable before --template/-T is stable. For now,
63 # we are putting it here without the '(EXPERIMENTAL)' flag because it
63 # we are putting it here without the '(EXPERIMENTAL)' flag because it
64 # is an important part of the 'hg show' user experience and the entire
64 # is an important part of the 'hg show' user experience and the entire
65 # 'hg show' experience is experimental.
65 # 'hg show' experience is experimental.
66 ('T', 'template', '', ('display with template'), _('TEMPLATE')),
66 ('T', 'template', '', ('display with template'), _('TEMPLATE')),
67 ], _('VIEW'))
67 ], _('VIEW'))
68 def show(ui, repo, view=None, template=None):
68 def show(ui, repo, view=None, template=None):
69 """show various repository information
69 """show various repository information
70
70
71 A requested view of repository data is displayed.
71 A requested view of repository data is displayed.
72
72
73 If no view is requested, the list of available views is shown and the
73 If no view is requested, the list of available views is shown and the
74 command aborts.
74 command aborts.
75
75
76 .. note::
76 .. note::
77
77
78 There are no backwards compatibility guarantees for the output of this
78 There are no backwards compatibility guarantees for the output of this
79 command. Output may change in any future Mercurial release.
79 command. Output may change in any future Mercurial release.
80
80
81 Consumers wanting stable command output should specify a template via
81 Consumers wanting stable command output should specify a template via
82 ``-T/--template``.
82 ``-T/--template``.
83
83
84 List of available views:
84 List of available views:
85 """
85 """
86 if ui.plain() and not template:
86 if ui.plain() and not template:
87 hint = _('invoke with -T/--template to control output format')
87 hint = _('invoke with -T/--template to control output format')
88 raise error.Abort(_('must specify a template in plain mode'), hint=hint)
88 raise error.Abort(_('must specify a template in plain mode'), hint=hint)
89
89
90 views = showview._table
90 views = showview._table
91
91
92 if not view:
92 if not view:
93 ui.pager('show')
93 ui.pager('show')
94 # TODO consider using formatter here so available views can be
94 # TODO consider using formatter here so available views can be
95 # rendered to custom format.
95 # rendered to custom format.
96 ui.write(_('available views:\n'))
96 ui.write(_('available views:\n'))
97 ui.write('\n')
97 ui.write('\n')
98
98
99 for name, func in sorted(views.items()):
99 for name, func in sorted(views.items()):
100 ui.write(('%s\n') % func.__doc__)
100 ui.write(('%s\n') % func.__doc__)
101
101
102 ui.write('\n')
102 ui.write('\n')
103 raise error.Abort(_('no view requested'),
103 raise error.Abort(_('no view requested'),
104 hint=_('use "hg show VIEW" to choose a view'))
104 hint=_('use "hg show VIEW" to choose a view'))
105
105
106 # TODO use same logic as dispatch to perform prefix matching.
106 # TODO use same logic as dispatch to perform prefix matching.
107 if view not in views:
107 if view not in views:
108 raise error.Abort(_('unknown view: %s') % view,
108 raise error.Abort(_('unknown view: %s') % view,
109 hint=_('run "hg show" to see available views'))
109 hint=_('run "hg show" to see available views'))
110
110
111 template = template or 'show'
111 template = template or 'show'
112 fmtopic = 'show%s' % views[view]._fmtopic
112 fmtopic = 'show%s' % views[view]._fmtopic
113
113
114 ui.pager('show')
114 ui.pager('show')
115 with ui.formatter(fmtopic, {'template': template}) as fm:
115 with ui.formatter(fmtopic, {'template': template}) as fm:
116 return views[view](ui, repo, fm)
116 return views[view](ui, repo, fm)
117
117
118 @showview('bookmarks', fmtopic='bookmarks')
118 @showview('bookmarks', fmtopic='bookmarks')
119 def showbookmarks(ui, repo, fm):
119 def showbookmarks(ui, repo, fm):
120 """bookmarks and their associated changeset"""
120 """bookmarks and their associated changeset"""
121 marks = repo._bookmarks
121 marks = repo._bookmarks
122 if not len(marks):
122 if not len(marks):
123 # This is a bit hacky. Ideally, templates would have a way to
123 # This is a bit hacky. Ideally, templates would have a way to
124 # specify an empty output, but we shouldn't corrupt JSON while
124 # specify an empty output, but we shouldn't corrupt JSON while
125 # waiting for this functionality.
125 # waiting for this functionality.
126 if not isinstance(fm, formatter.jsonformatter):
126 if not isinstance(fm, formatter.jsonformatter):
127 ui.write(_('(no bookmarks set)\n'))
127 ui.write(_('(no bookmarks set)\n'))
128 return
128 return
129
129
130 active = repo._activebookmark
130 active = repo._activebookmark
131 longestname = max(len(b) for b in marks)
131 longestname = max(len(b) for b in marks)
132 # TODO consider exposing longest shortest(node).
132 # TODO consider exposing longest shortest(node).
133
133
134 for bm, node in sorted(marks.items()):
134 for bm, node in sorted(marks.items()):
135 fm.startitem()
135 fm.startitem()
136 fm.context(ctx=repo[node])
136 fm.context(ctx=repo[node])
137 fm.write('bookmark', '%s', bm)
137 fm.write('bookmark', '%s', bm)
138 fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
138 fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
139 fm.data(active=bm == active,
139 fm.data(active=bm == active,
140 longestbookmarklen=longestname)
140 longestbookmarklen=longestname)
141
141
142 @revsetpredicate('_underway([commitage[, headage]])')
142 @revsetpredicate('_underway([commitage[, headage]])')
143 def underwayrevset(repo, subset, x):
143 def underwayrevset(repo, subset, x):
144 args = revset.getargsdict(x, 'underway', 'commitage headage')
144 args = revset.getargsdict(x, 'underway', 'commitage headage')
145 if 'commitage' not in args:
145 if 'commitage' not in args:
146 args['commitage'] = None
146 args['commitage'] = None
147 if 'headage' not in args:
147 if 'headage' not in args:
148 args['headage'] = None
148 args['headage'] = None
149
149
150 # We assume callers of this revset add a topographical sort on the
150 # We assume callers of this revset add a topographical sort on the
151 # result. This means there is no benefit to making the revset lazy
151 # result. This means there is no benefit to making the revset lazy
152 # since the topographical sort needs to consume all revs.
152 # since the topographical sort needs to consume all revs.
153 #
153 #
154 # With this in mind, we build up the set manually instead of constructing
154 # With this in mind, we build up the set manually instead of constructing
155 # a complex revset. This enables faster execution.
155 # a complex revset. This enables faster execution.
156
156
157 # Mutable changesets (non-public) are the most important changesets
157 # Mutable changesets (non-public) are the most important changesets
158 # to return. ``not public()`` will also pull in obsolete changesets if
158 # to return. ``not public()`` will also pull in obsolete changesets if
159 # there is a non-obsolete changeset with obsolete ancestors. This is
159 # there is a non-obsolete changeset with obsolete ancestors. This is
160 # why we exclude obsolete changesets from this query.
160 # why we exclude obsolete changesets from this query.
161 rs = 'not public() and not obsolete()'
161 rs = 'not public() and not obsolete()'
162 rsargs = []
162 rsargs = []
163 if args['commitage']:
163 if args['commitage']:
164 rs += ' and date(%s)'
164 rs += ' and date(%s)'
165 rsargs.append(revsetlang.getstring(args['commitage'],
165 rsargs.append(revsetlang.getstring(args['commitage'],
166 _('commitage requires a string')))
166 _('commitage requires a string')))
167
167
168 mutable = repo.revs(rs, *rsargs)
168 mutable = repo.revs(rs, *rsargs)
169 relevant = revset.baseset(mutable)
169 relevant = revset.baseset(mutable)
170
170
171 # Add parents of mutable changesets to provide context.
171 # Add parents of mutable changesets to provide context.
172 relevant += repo.revs('parents(%ld)', mutable)
172 relevant += repo.revs('parents(%ld)', mutable)
173
173
174 # We also pull in (public) heads if they a) aren't closing a branch
174 # We also pull in (public) heads if they a) aren't closing a branch
175 # b) are recent.
175 # b) are recent.
176 rs = 'head() and not closed()'
176 rs = 'head() and not closed()'
177 rsargs = []
177 rsargs = []
178 if args['headage']:
178 if args['headage']:
179 rs += ' and date(%s)'
179 rs += ' and date(%s)'
180 rsargs.append(revsetlang.getstring(args['headage'],
180 rsargs.append(revsetlang.getstring(args['headage'],
181 _('headage requires a string')))
181 _('headage requires a string')))
182
182
183 relevant += repo.revs(rs, *rsargs)
183 relevant += repo.revs(rs, *rsargs)
184
184
185 # Add working directory parent.
185 # Add working directory parent.
186 wdirrev = repo['.'].rev()
186 wdirrev = repo['.'].rev()
187 if wdirrev != nullrev:
187 if wdirrev != nullrev:
188 relevant += revset.baseset({wdirrev})
188 relevant += revset.baseset({wdirrev})
189
189
190 return subset & relevant
190 return subset & relevant
191
191
192 @showview('work', fmtopic='work')
192 @showview('work', fmtopic='work')
193 def showwork(ui, repo, fm):
193 def showwork(ui, repo, fm):
194 """changesets that aren't finished"""
194 """changesets that aren't finished"""
195 # TODO support date-based limiting when calling revset.
195 # TODO support date-based limiting when calling revset.
196 revs = repo.revs('sort(_underway(), topo)')
196 revs = repo.revs('sort(_underway(), topo)')
197
197
198 revdag = graphmod.dagwalker(repo, revs)
198 revdag = graphmod.dagwalker(repo, revs)
199 displayer = cmdutil.changeset_templater(ui, repo, None, None,
199 displayer = cmdutil.changeset_templater(ui, repo, None, None,
200 tmpl=fm._t.load(fm._topic),
200 tmpl=fm._t.load(fm._topic),
201 mapfile=None, buffered=True)
201 mapfile=None, buffered=True)
202
202
203 ui.setconfig('experimental', 'graphshorten', True)
203 ui.setconfig('experimental', 'graphshorten', True)
204 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
204 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
205
205
206 # Adjust the docstring of the show command so it shows all registered views.
206 # Adjust the docstring of the show command so it shows all registered views.
207 # This is a bit hacky because it runs at the end of module load. When moved
207 # This is a bit hacky because it runs at the end of module load. When moved
208 # into core or when another extension wants to provide a view, we'll need
208 # into core or when another extension wants to provide a view, we'll need
209 # to do this more robustly.
209 # to do this more robustly.
210 # TODO make this more robust.
210 # TODO make this more robust.
211 def _updatedocstring():
211 def _updatedocstring():
212 longest = max(map(len, showview._table.keys()))
212 longest = max(map(len, showview._table.keys()))
213 entries = []
213 entries = []
214 for key in sorted(showview._table.keys()):
214 for key in sorted(showview._table.keys()):
215 entries.append(pycompat.sysstr(' %s %s' % (
215 entries.append(pycompat.sysstr(' %s %s' % (
216 key.ljust(longest), showview._table[key]._origdoc)))
216 key.ljust(longest), showview._table[key]._origdoc)))
217
217
218 cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n ') % (
218 cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n ') % (
219 cmdtable['show'][0].__doc__.rstrip(),
219 cmdtable['show'][0].__doc__.rstrip(),
220 pycompat.sysstr('\n\n').join(entries))
220 pycompat.sysstr('\n\n').join(entries))
221
221
222 _updatedocstring()
222 _updatedocstring()
@@ -1,3449 +1,3556 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 bookmarks,
26 bookmarks,
27 changelog,
27 changelog,
28 copies,
28 copies,
29 crecord as crecordmod,
29 crecord as crecordmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 lock as lockmod,
34 lock as lockmod,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 pycompat,
40 pycompat,
41 registrar,
41 registrar,
42 repair,
42 repair,
43 revlog,
43 revlog,
44 revset,
44 revset,
45 scmutil,
45 scmutil,
46 smartset,
46 smartset,
47 templatekw,
47 templatekw,
48 templater,
48 templater,
49 util,
49 util,
50 vfs as vfsmod,
50 vfs as vfsmod,
51 )
51 )
52 stringio = util.stringio
52 stringio = util.stringio
53
53
54 # templates of common command options
55
56 dryrunopts = [
57 ('n', 'dry-run', None,
58 _('do not perform actions, just print output')),
59 ]
60
61 remoteopts = [
62 ('e', 'ssh', '',
63 _('specify ssh command to use'), _('CMD')),
64 ('', 'remotecmd', '',
65 _('specify hg command to run on the remote side'), _('CMD')),
66 ('', 'insecure', None,
67 _('do not verify server certificate (ignoring web.cacerts config)')),
68 ]
69
70 walkopts = [
71 ('I', 'include', [],
72 _('include names matching the given patterns'), _('PATTERN')),
73 ('X', 'exclude', [],
74 _('exclude names matching the given patterns'), _('PATTERN')),
75 ]
76
77 commitopts = [
78 ('m', 'message', '',
79 _('use text as commit message'), _('TEXT')),
80 ('l', 'logfile', '',
81 _('read commit message from file'), _('FILE')),
82 ]
83
84 commitopts2 = [
85 ('d', 'date', '',
86 _('record the specified date as commit date'), _('DATE')),
87 ('u', 'user', '',
88 _('record the specified user as committer'), _('USER')),
89 ]
90
91 # hidden for now
92 formatteropts = [
93 ('T', 'template', '',
94 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
95 ]
96
97 templateopts = [
98 ('', 'style', '',
99 _('display using template map file (DEPRECATED)'), _('STYLE')),
100 ('T', 'template', '',
101 _('display with template'), _('TEMPLATE')),
102 ]
103
104 logopts = [
105 ('p', 'patch', None, _('show patch')),
106 ('g', 'git', None, _('use git extended diff format')),
107 ('l', 'limit', '',
108 _('limit number of changes displayed'), _('NUM')),
109 ('M', 'no-merges', None, _('do not show merges')),
110 ('', 'stat', None, _('output diffstat-style summary of changes')),
111 ('G', 'graph', None, _("show the revision DAG")),
112 ] + templateopts
113
114 diffopts = [
115 ('a', 'text', None, _('treat all files as text')),
116 ('g', 'git', None, _('use git extended diff format')),
117 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
118 ('', 'nodates', None, _('omit dates from diff headers'))
119 ]
120
121 diffwsopts = [
122 ('w', 'ignore-all-space', None,
123 _('ignore white space when comparing lines')),
124 ('b', 'ignore-space-change', None,
125 _('ignore changes in the amount of white space')),
126 ('B', 'ignore-blank-lines', None,
127 _('ignore changes whose lines are all blank')),
128 ]
129
130 diffopts2 = [
131 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('p', 'show-function', None, _('show which function each change is in')),
133 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ] + diffwsopts + [
135 ('U', 'unified', '',
136 _('number of lines of context to show'), _('NUM')),
137 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ]
140
141 mergetoolopts = [
142 ('t', 'tool', '', _('specify merge tool')),
143 ]
144
145 similarityopts = [
146 ('s', 'similarity', '',
147 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 ]
149
150 subrepoopts = [
151 ('S', 'subrepos', None,
152 _('recurse into subrepositories'))
153 ]
154
155 debugrevlogopts = [
156 ('c', 'changelog', False, _('open changelog')),
157 ('m', 'manifest', False, _('open manifest')),
158 ('', 'dir', '', _('open directory manifest')),
159 ]
160
54 # special string such that everything below this line will be ingored in the
161 # special string such that everything below this line will be ingored in the
55 # editor text
162 # editor text
56 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163 _linebelow = "^HG: ------------------------ >8 ------------------------$"
57
164
58 def ishunk(x):
165 def ishunk(x):
59 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
60 return isinstance(x, hunkclasses)
167 return isinstance(x, hunkclasses)
61
168
62 def newandmodified(chunks, originalchunks):
169 def newandmodified(chunks, originalchunks):
63 newlyaddedandmodifiedfiles = set()
170 newlyaddedandmodifiedfiles = set()
64 for chunk in chunks:
171 for chunk in chunks:
65 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
66 originalchunks:
173 originalchunks:
67 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 newlyaddedandmodifiedfiles.add(chunk.header.filename())
68 return newlyaddedandmodifiedfiles
175 return newlyaddedandmodifiedfiles
69
176
70 def parsealiases(cmd):
177 def parsealiases(cmd):
71 return cmd.lstrip("^").split("|")
178 return cmd.lstrip("^").split("|")
72
179
73 def setupwrapcolorwrite(ui):
180 def setupwrapcolorwrite(ui):
74 # wrap ui.write so diff output can be labeled/colorized
181 # wrap ui.write so diff output can be labeled/colorized
75 def wrapwrite(orig, *args, **kw):
182 def wrapwrite(orig, *args, **kw):
76 label = kw.pop('label', '')
183 label = kw.pop('label', '')
77 for chunk, l in patch.difflabel(lambda: args):
184 for chunk, l in patch.difflabel(lambda: args):
78 orig(chunk, label=label + l)
185 orig(chunk, label=label + l)
79
186
80 oldwrite = ui.write
187 oldwrite = ui.write
81 def wrap(*args, **kwargs):
188 def wrap(*args, **kwargs):
82 return wrapwrite(oldwrite, *args, **kwargs)
189 return wrapwrite(oldwrite, *args, **kwargs)
83 setattr(ui, 'write', wrap)
190 setattr(ui, 'write', wrap)
84 return oldwrite
191 return oldwrite
85
192
86 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
87 if usecurses:
194 if usecurses:
88 if testfile:
195 if testfile:
89 recordfn = crecordmod.testdecorator(testfile,
196 recordfn = crecordmod.testdecorator(testfile,
90 crecordmod.testchunkselector)
197 crecordmod.testchunkselector)
91 else:
198 else:
92 recordfn = crecordmod.chunkselector
199 recordfn = crecordmod.chunkselector
93
200
94 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
95
202
96 else:
203 else:
97 return patch.filterpatch(ui, originalhunks, operation)
204 return patch.filterpatch(ui, originalhunks, operation)
98
205
99 def recordfilter(ui, originalhunks, operation=None):
206 def recordfilter(ui, originalhunks, operation=None):
100 """ Prompts the user to filter the originalhunks and return a list of
207 """ Prompts the user to filter the originalhunks and return a list of
101 selected hunks.
208 selected hunks.
102 *operation* is used for to build ui messages to indicate the user what
209 *operation* is used for to build ui messages to indicate the user what
103 kind of filtering they are doing: reverting, committing, shelving, etc.
210 kind of filtering they are doing: reverting, committing, shelving, etc.
104 (see patch.filterpatch).
211 (see patch.filterpatch).
105 """
212 """
106 usecurses = crecordmod.checkcurses(ui)
213 usecurses = crecordmod.checkcurses(ui)
107 testfile = ui.config('experimental', 'crecordtest', None)
214 testfile = ui.config('experimental', 'crecordtest', None)
108 oldwrite = setupwrapcolorwrite(ui)
215 oldwrite = setupwrapcolorwrite(ui)
109 try:
216 try:
110 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
111 testfile, operation)
218 testfile, operation)
112 finally:
219 finally:
113 ui.write = oldwrite
220 ui.write = oldwrite
114 return newchunks, newopts
221 return newchunks, newopts
115
222
116 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
117 filterfn, *pats, **opts):
224 filterfn, *pats, **opts):
118 from . import merge as mergemod
225 from . import merge as mergemod
119 opts = pycompat.byteskwargs(opts)
226 opts = pycompat.byteskwargs(opts)
120 if not ui.interactive():
227 if not ui.interactive():
121 if cmdsuggest:
228 if cmdsuggest:
122 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 msg = _('running non-interactively, use %s instead') % cmdsuggest
123 else:
230 else:
124 msg = _('running non-interactively')
231 msg = _('running non-interactively')
125 raise error.Abort(msg)
232 raise error.Abort(msg)
126
233
127 # make sure username is set before going interactive
234 # make sure username is set before going interactive
128 if not opts.get('user'):
235 if not opts.get('user'):
129 ui.username() # raise exception, username not provided
236 ui.username() # raise exception, username not provided
130
237
131 def recordfunc(ui, repo, message, match, opts):
238 def recordfunc(ui, repo, message, match, opts):
132 """This is generic record driver.
239 """This is generic record driver.
133
240
134 Its job is to interactively filter local changes, and
241 Its job is to interactively filter local changes, and
135 accordingly prepare working directory into a state in which the
242 accordingly prepare working directory into a state in which the
136 job can be delegated to a non-interactive commit command such as
243 job can be delegated to a non-interactive commit command such as
137 'commit' or 'qrefresh'.
244 'commit' or 'qrefresh'.
138
245
139 After the actual job is done by non-interactive command, the
246 After the actual job is done by non-interactive command, the
140 working directory is restored to its original state.
247 working directory is restored to its original state.
141
248
142 In the end we'll record interesting changes, and everything else
249 In the end we'll record interesting changes, and everything else
143 will be left in place, so the user can continue working.
250 will be left in place, so the user can continue working.
144 """
251 """
145
252
146 checkunfinished(repo, commit=True)
253 checkunfinished(repo, commit=True)
147 wctx = repo[None]
254 wctx = repo[None]
148 merge = len(wctx.parents()) > 1
255 merge = len(wctx.parents()) > 1
149 if merge:
256 if merge:
150 raise error.Abort(_('cannot partially commit a merge '
257 raise error.Abort(_('cannot partially commit a merge '
151 '(use "hg commit" instead)'))
258 '(use "hg commit" instead)'))
152
259
153 def fail(f, msg):
260 def fail(f, msg):
154 raise error.Abort('%s: %s' % (f, msg))
261 raise error.Abort('%s: %s' % (f, msg))
155
262
156 force = opts.get('force')
263 force = opts.get('force')
157 if not force:
264 if not force:
158 vdirs = []
265 vdirs = []
159 match.explicitdir = vdirs.append
266 match.explicitdir = vdirs.append
160 match.bad = fail
267 match.bad = fail
161
268
162 status = repo.status(match=match)
269 status = repo.status(match=match)
163 if not force:
270 if not force:
164 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
165 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
166 diffopts.nodates = True
273 diffopts.nodates = True
167 diffopts.git = True
274 diffopts.git = True
168 diffopts.showfunc = True
275 diffopts.showfunc = True
169 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
170 originalchunks = patch.parsepatch(originaldiff)
277 originalchunks = patch.parsepatch(originaldiff)
171
278
172 # 1. filter patch, since we are intending to apply subset of it
279 # 1. filter patch, since we are intending to apply subset of it
173 try:
280 try:
174 chunks, newopts = filterfn(ui, originalchunks)
281 chunks, newopts = filterfn(ui, originalchunks)
175 except patch.PatchError as err:
282 except patch.PatchError as err:
176 raise error.Abort(_('error parsing patch: %s') % err)
283 raise error.Abort(_('error parsing patch: %s') % err)
177 opts.update(newopts)
284 opts.update(newopts)
178
285
179 # We need to keep a backup of files that have been newly added and
286 # We need to keep a backup of files that have been newly added and
180 # modified during the recording process because there is a previous
287 # modified during the recording process because there is a previous
181 # version without the edit in the workdir
288 # version without the edit in the workdir
182 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
183 contenders = set()
290 contenders = set()
184 for h in chunks:
291 for h in chunks:
185 try:
292 try:
186 contenders.update(set(h.files()))
293 contenders.update(set(h.files()))
187 except AttributeError:
294 except AttributeError:
188 pass
295 pass
189
296
190 changed = status.modified + status.added + status.removed
297 changed = status.modified + status.added + status.removed
191 newfiles = [f for f in changed if f in contenders]
298 newfiles = [f for f in changed if f in contenders]
192 if not newfiles:
299 if not newfiles:
193 ui.status(_('no changes to record\n'))
300 ui.status(_('no changes to record\n'))
194 return 0
301 return 0
195
302
196 modified = set(status.modified)
303 modified = set(status.modified)
197
304
198 # 2. backup changed files, so we can restore them in the end
305 # 2. backup changed files, so we can restore them in the end
199
306
200 if backupall:
307 if backupall:
201 tobackup = changed
308 tobackup = changed
202 else:
309 else:
203 tobackup = [f for f in newfiles if f in modified or f in \
310 tobackup = [f for f in newfiles if f in modified or f in \
204 newlyaddedandmodifiedfiles]
311 newlyaddedandmodifiedfiles]
205 backups = {}
312 backups = {}
206 if tobackup:
313 if tobackup:
207 backupdir = repo.vfs.join('record-backups')
314 backupdir = repo.vfs.join('record-backups')
208 try:
315 try:
209 os.mkdir(backupdir)
316 os.mkdir(backupdir)
210 except OSError as err:
317 except OSError as err:
211 if err.errno != errno.EEXIST:
318 if err.errno != errno.EEXIST:
212 raise
319 raise
213 try:
320 try:
214 # backup continues
321 # backup continues
215 for f in tobackup:
322 for f in tobackup:
216 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
217 dir=backupdir)
324 dir=backupdir)
218 os.close(fd)
325 os.close(fd)
219 ui.debug('backup %r as %r\n' % (f, tmpname))
326 ui.debug('backup %r as %r\n' % (f, tmpname))
220 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
221 backups[f] = tmpname
328 backups[f] = tmpname
222
329
223 fp = stringio()
330 fp = stringio()
224 for c in chunks:
331 for c in chunks:
225 fname = c.filename()
332 fname = c.filename()
226 if fname in backups:
333 if fname in backups:
227 c.write(fp)
334 c.write(fp)
228 dopatch = fp.tell()
335 dopatch = fp.tell()
229 fp.seek(0)
336 fp.seek(0)
230
337
231 # 2.5 optionally review / modify patch in text editor
338 # 2.5 optionally review / modify patch in text editor
232 if opts.get('review', False):
339 if opts.get('review', False):
233 patchtext = (crecordmod.diffhelptext
340 patchtext = (crecordmod.diffhelptext
234 + crecordmod.patchhelptext
341 + crecordmod.patchhelptext
235 + fp.read())
342 + fp.read())
236 reviewedpatch = ui.edit(patchtext, "",
343 reviewedpatch = ui.edit(patchtext, "",
237 extra={"suffix": ".diff"},
344 extra={"suffix": ".diff"},
238 repopath=repo.path)
345 repopath=repo.path)
239 fp.truncate(0)
346 fp.truncate(0)
240 fp.write(reviewedpatch)
347 fp.write(reviewedpatch)
241 fp.seek(0)
348 fp.seek(0)
242
349
243 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
244 # 3a. apply filtered patch to clean repo (clean)
351 # 3a. apply filtered patch to clean repo (clean)
245 if backups:
352 if backups:
246 # Equivalent to hg.revert
353 # Equivalent to hg.revert
247 m = scmutil.matchfiles(repo, backups.keys())
354 m = scmutil.matchfiles(repo, backups.keys())
248 mergemod.update(repo, repo.dirstate.p1(),
355 mergemod.update(repo, repo.dirstate.p1(),
249 False, True, matcher=m)
356 False, True, matcher=m)
250
357
251 # 3b. (apply)
358 # 3b. (apply)
252 if dopatch:
359 if dopatch:
253 try:
360 try:
254 ui.debug('applying patch\n')
361 ui.debug('applying patch\n')
255 ui.debug(fp.getvalue())
362 ui.debug(fp.getvalue())
256 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
257 except patch.PatchError as err:
364 except patch.PatchError as err:
258 raise error.Abort(str(err))
365 raise error.Abort(str(err))
259 del fp
366 del fp
260
367
261 # 4. We prepared working directory according to filtered
368 # 4. We prepared working directory according to filtered
262 # patch. Now is the time to delegate the job to
369 # patch. Now is the time to delegate the job to
263 # commit/qrefresh or the like!
370 # commit/qrefresh or the like!
264
371
265 # Make all of the pathnames absolute.
372 # Make all of the pathnames absolute.
266 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 newfiles = [repo.wjoin(nf) for nf in newfiles]
267 return commitfunc(ui, repo, *newfiles, **opts)
374 return commitfunc(ui, repo, *newfiles, **opts)
268 finally:
375 finally:
269 # 5. finally restore backed-up files
376 # 5. finally restore backed-up files
270 try:
377 try:
271 dirstate = repo.dirstate
378 dirstate = repo.dirstate
272 for realname, tmpname in backups.iteritems():
379 for realname, tmpname in backups.iteritems():
273 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380 ui.debug('restoring %r to %r\n' % (tmpname, realname))
274
381
275 if dirstate[realname] == 'n':
382 if dirstate[realname] == 'n':
276 # without normallookup, restoring timestamp
383 # without normallookup, restoring timestamp
277 # may cause partially committed files
384 # may cause partially committed files
278 # to be treated as unmodified
385 # to be treated as unmodified
279 dirstate.normallookup(realname)
386 dirstate.normallookup(realname)
280
387
281 # copystat=True here and above are a hack to trick any
388 # copystat=True here and above are a hack to trick any
282 # editors that have f open that we haven't modified them.
389 # editors that have f open that we haven't modified them.
283 #
390 #
284 # Also note that this racy as an editor could notice the
391 # Also note that this racy as an editor could notice the
285 # file's mtime before we've finished writing it.
392 # file's mtime before we've finished writing it.
286 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
287 os.unlink(tmpname)
394 os.unlink(tmpname)
288 if tobackup:
395 if tobackup:
289 os.rmdir(backupdir)
396 os.rmdir(backupdir)
290 except OSError:
397 except OSError:
291 pass
398 pass
292
399
293 def recordinwlock(ui, repo, message, match, opts):
400 def recordinwlock(ui, repo, message, match, opts):
294 with repo.wlock():
401 with repo.wlock():
295 return recordfunc(ui, repo, message, match, opts)
402 return recordfunc(ui, repo, message, match, opts)
296
403
297 return commit(ui, repo, recordinwlock, pats, opts)
404 return commit(ui, repo, recordinwlock, pats, opts)
298
405
299 def findpossible(cmd, table, strict=False):
406 def findpossible(cmd, table, strict=False):
300 """
407 """
301 Return cmd -> (aliases, command table entry)
408 Return cmd -> (aliases, command table entry)
302 for each matching command.
409 for each matching command.
303 Return debug commands (or their aliases) only if no normal command matches.
410 Return debug commands (or their aliases) only if no normal command matches.
304 """
411 """
305 choice = {}
412 choice = {}
306 debugchoice = {}
413 debugchoice = {}
307
414
308 if cmd in table:
415 if cmd in table:
309 # short-circuit exact matches, "log" alias beats "^log|history"
416 # short-circuit exact matches, "log" alias beats "^log|history"
310 keys = [cmd]
417 keys = [cmd]
311 else:
418 else:
312 keys = table.keys()
419 keys = table.keys()
313
420
314 allcmds = []
421 allcmds = []
315 for e in keys:
422 for e in keys:
316 aliases = parsealiases(e)
423 aliases = parsealiases(e)
317 allcmds.extend(aliases)
424 allcmds.extend(aliases)
318 found = None
425 found = None
319 if cmd in aliases:
426 if cmd in aliases:
320 found = cmd
427 found = cmd
321 elif not strict:
428 elif not strict:
322 for a in aliases:
429 for a in aliases:
323 if a.startswith(cmd):
430 if a.startswith(cmd):
324 found = a
431 found = a
325 break
432 break
326 if found is not None:
433 if found is not None:
327 if aliases[0].startswith("debug") or found.startswith("debug"):
434 if aliases[0].startswith("debug") or found.startswith("debug"):
328 debugchoice[found] = (aliases, table[e])
435 debugchoice[found] = (aliases, table[e])
329 else:
436 else:
330 choice[found] = (aliases, table[e])
437 choice[found] = (aliases, table[e])
331
438
332 if not choice and debugchoice:
439 if not choice and debugchoice:
333 choice = debugchoice
440 choice = debugchoice
334
441
335 return choice, allcmds
442 return choice, allcmds
336
443
337 def findcmd(cmd, table, strict=True):
444 def findcmd(cmd, table, strict=True):
338 """Return (aliases, command table entry) for command string."""
445 """Return (aliases, command table entry) for command string."""
339 choice, allcmds = findpossible(cmd, table, strict)
446 choice, allcmds = findpossible(cmd, table, strict)
340
447
341 if cmd in choice:
448 if cmd in choice:
342 return choice[cmd]
449 return choice[cmd]
343
450
344 if len(choice) > 1:
451 if len(choice) > 1:
345 clist = choice.keys()
452 clist = choice.keys()
346 clist.sort()
453 clist.sort()
347 raise error.AmbiguousCommand(cmd, clist)
454 raise error.AmbiguousCommand(cmd, clist)
348
455
349 if choice:
456 if choice:
350 return choice.values()[0]
457 return choice.values()[0]
351
458
352 raise error.UnknownCommand(cmd, allcmds)
459 raise error.UnknownCommand(cmd, allcmds)
353
460
354 def findrepo(p):
461 def findrepo(p):
355 while not os.path.isdir(os.path.join(p, ".hg")):
462 while not os.path.isdir(os.path.join(p, ".hg")):
356 oldp, p = p, os.path.dirname(p)
463 oldp, p = p, os.path.dirname(p)
357 if p == oldp:
464 if p == oldp:
358 return None
465 return None
359
466
360 return p
467 return p
361
468
362 def bailifchanged(repo, merge=True, hint=None):
469 def bailifchanged(repo, merge=True, hint=None):
363 """ enforce the precondition that working directory must be clean.
470 """ enforce the precondition that working directory must be clean.
364
471
365 'merge' can be set to false if a pending uncommitted merge should be
472 'merge' can be set to false if a pending uncommitted merge should be
366 ignored (such as when 'update --check' runs).
473 ignored (such as when 'update --check' runs).
367
474
368 'hint' is the usual hint given to Abort exception.
475 'hint' is the usual hint given to Abort exception.
369 """
476 """
370
477
371 if merge and repo.dirstate.p2() != nullid:
478 if merge and repo.dirstate.p2() != nullid:
372 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
479 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
373 modified, added, removed, deleted = repo.status()[:4]
480 modified, added, removed, deleted = repo.status()[:4]
374 if modified or added or removed or deleted:
481 if modified or added or removed or deleted:
375 raise error.Abort(_('uncommitted changes'), hint=hint)
482 raise error.Abort(_('uncommitted changes'), hint=hint)
376 ctx = repo[None]
483 ctx = repo[None]
377 for s in sorted(ctx.substate):
484 for s in sorted(ctx.substate):
378 ctx.sub(s).bailifchanged(hint=hint)
485 ctx.sub(s).bailifchanged(hint=hint)
379
486
380 def logmessage(ui, opts):
487 def logmessage(ui, opts):
381 """ get the log message according to -m and -l option """
488 """ get the log message according to -m and -l option """
382 message = opts.get('message')
489 message = opts.get('message')
383 logfile = opts.get('logfile')
490 logfile = opts.get('logfile')
384
491
385 if message and logfile:
492 if message and logfile:
386 raise error.Abort(_('options --message and --logfile are mutually '
493 raise error.Abort(_('options --message and --logfile are mutually '
387 'exclusive'))
494 'exclusive'))
388 if not message and logfile:
495 if not message and logfile:
389 try:
496 try:
390 if logfile == '-':
497 if logfile == '-':
391 message = ui.fin.read()
498 message = ui.fin.read()
392 else:
499 else:
393 message = '\n'.join(util.readfile(logfile).splitlines())
500 message = '\n'.join(util.readfile(logfile).splitlines())
394 except IOError as inst:
501 except IOError as inst:
395 raise error.Abort(_("can't read commit message '%s': %s") %
502 raise error.Abort(_("can't read commit message '%s': %s") %
396 (logfile, inst.strerror))
503 (logfile, inst.strerror))
397 return message
504 return message
398
505
399 def mergeeditform(ctxorbool, baseformname):
506 def mergeeditform(ctxorbool, baseformname):
400 """return appropriate editform name (referencing a committemplate)
507 """return appropriate editform name (referencing a committemplate)
401
508
402 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
509 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
403 merging is committed.
510 merging is committed.
404
511
405 This returns baseformname with '.merge' appended if it is a merge,
512 This returns baseformname with '.merge' appended if it is a merge,
406 otherwise '.normal' is appended.
513 otherwise '.normal' is appended.
407 """
514 """
408 if isinstance(ctxorbool, bool):
515 if isinstance(ctxorbool, bool):
409 if ctxorbool:
516 if ctxorbool:
410 return baseformname + ".merge"
517 return baseformname + ".merge"
411 elif 1 < len(ctxorbool.parents()):
518 elif 1 < len(ctxorbool.parents()):
412 return baseformname + ".merge"
519 return baseformname + ".merge"
413
520
414 return baseformname + ".normal"
521 return baseformname + ".normal"
415
522
416 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
523 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
417 editform='', **opts):
524 editform='', **opts):
418 """get appropriate commit message editor according to '--edit' option
525 """get appropriate commit message editor according to '--edit' option
419
526
420 'finishdesc' is a function to be called with edited commit message
527 'finishdesc' is a function to be called with edited commit message
421 (= 'description' of the new changeset) just after editing, but
528 (= 'description' of the new changeset) just after editing, but
422 before checking empty-ness. It should return actual text to be
529 before checking empty-ness. It should return actual text to be
423 stored into history. This allows to change description before
530 stored into history. This allows to change description before
424 storing.
531 storing.
425
532
426 'extramsg' is a extra message to be shown in the editor instead of
533 'extramsg' is a extra message to be shown in the editor instead of
427 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
534 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
428 is automatically added.
535 is automatically added.
429
536
430 'editform' is a dot-separated list of names, to distinguish
537 'editform' is a dot-separated list of names, to distinguish
431 the purpose of commit text editing.
538 the purpose of commit text editing.
432
539
433 'getcommiteditor' returns 'commitforceeditor' regardless of
540 'getcommiteditor' returns 'commitforceeditor' regardless of
434 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
541 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
435 they are specific for usage in MQ.
542 they are specific for usage in MQ.
436 """
543 """
437 if edit or finishdesc or extramsg:
544 if edit or finishdesc or extramsg:
438 return lambda r, c, s: commitforceeditor(r, c, s,
545 return lambda r, c, s: commitforceeditor(r, c, s,
439 finishdesc=finishdesc,
546 finishdesc=finishdesc,
440 extramsg=extramsg,
547 extramsg=extramsg,
441 editform=editform)
548 editform=editform)
442 elif editform:
549 elif editform:
443 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
550 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
444 else:
551 else:
445 return commiteditor
552 return commiteditor
446
553
447 def loglimit(opts):
554 def loglimit(opts):
448 """get the log limit according to option -l/--limit"""
555 """get the log limit according to option -l/--limit"""
449 limit = opts.get('limit')
556 limit = opts.get('limit')
450 if limit:
557 if limit:
451 try:
558 try:
452 limit = int(limit)
559 limit = int(limit)
453 except ValueError:
560 except ValueError:
454 raise error.Abort(_('limit must be a positive integer'))
561 raise error.Abort(_('limit must be a positive integer'))
455 if limit <= 0:
562 if limit <= 0:
456 raise error.Abort(_('limit must be positive'))
563 raise error.Abort(_('limit must be positive'))
457 else:
564 else:
458 limit = None
565 limit = None
459 return limit
566 return limit
460
567
461 def makefilename(repo, pat, node, desc=None,
568 def makefilename(repo, pat, node, desc=None,
462 total=None, seqno=None, revwidth=None, pathname=None):
569 total=None, seqno=None, revwidth=None, pathname=None):
463 node_expander = {
570 node_expander = {
464 'H': lambda: hex(node),
571 'H': lambda: hex(node),
465 'R': lambda: str(repo.changelog.rev(node)),
572 'R': lambda: str(repo.changelog.rev(node)),
466 'h': lambda: short(node),
573 'h': lambda: short(node),
467 'm': lambda: re.sub('[^\w]', '_', str(desc))
574 'm': lambda: re.sub('[^\w]', '_', str(desc))
468 }
575 }
469 expander = {
576 expander = {
470 '%': lambda: '%',
577 '%': lambda: '%',
471 'b': lambda: os.path.basename(repo.root),
578 'b': lambda: os.path.basename(repo.root),
472 }
579 }
473
580
474 try:
581 try:
475 if node:
582 if node:
476 expander.update(node_expander)
583 expander.update(node_expander)
477 if node:
584 if node:
478 expander['r'] = (lambda:
585 expander['r'] = (lambda:
479 str(repo.changelog.rev(node)).zfill(revwidth or 0))
586 str(repo.changelog.rev(node)).zfill(revwidth or 0))
480 if total is not None:
587 if total is not None:
481 expander['N'] = lambda: str(total)
588 expander['N'] = lambda: str(total)
482 if seqno is not None:
589 if seqno is not None:
483 expander['n'] = lambda: str(seqno)
590 expander['n'] = lambda: str(seqno)
484 if total is not None and seqno is not None:
591 if total is not None and seqno is not None:
485 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
592 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
486 if pathname is not None:
593 if pathname is not None:
487 expander['s'] = lambda: os.path.basename(pathname)
594 expander['s'] = lambda: os.path.basename(pathname)
488 expander['d'] = lambda: os.path.dirname(pathname) or '.'
595 expander['d'] = lambda: os.path.dirname(pathname) or '.'
489 expander['p'] = lambda: pathname
596 expander['p'] = lambda: pathname
490
597
491 newname = []
598 newname = []
492 patlen = len(pat)
599 patlen = len(pat)
493 i = 0
600 i = 0
494 while i < patlen:
601 while i < patlen:
495 c = pat[i:i + 1]
602 c = pat[i:i + 1]
496 if c == '%':
603 if c == '%':
497 i += 1
604 i += 1
498 c = pat[i:i + 1]
605 c = pat[i:i + 1]
499 c = expander[c]()
606 c = expander[c]()
500 newname.append(c)
607 newname.append(c)
501 i += 1
608 i += 1
502 return ''.join(newname)
609 return ''.join(newname)
503 except KeyError as inst:
610 except KeyError as inst:
504 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
611 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
505 inst.args[0])
612 inst.args[0])
506
613
507 class _unclosablefile(object):
614 class _unclosablefile(object):
508 def __init__(self, fp):
615 def __init__(self, fp):
509 self._fp = fp
616 self._fp = fp
510
617
511 def close(self):
618 def close(self):
512 pass
619 pass
513
620
514 def __iter__(self):
621 def __iter__(self):
515 return iter(self._fp)
622 return iter(self._fp)
516
623
517 def __getattr__(self, attr):
624 def __getattr__(self, attr):
518 return getattr(self._fp, attr)
625 return getattr(self._fp, attr)
519
626
520 def __enter__(self):
627 def __enter__(self):
521 return self
628 return self
522
629
523 def __exit__(self, exc_type, exc_value, exc_tb):
630 def __exit__(self, exc_type, exc_value, exc_tb):
524 pass
631 pass
525
632
526 def makefileobj(repo, pat, node=None, desc=None, total=None,
633 def makefileobj(repo, pat, node=None, desc=None, total=None,
527 seqno=None, revwidth=None, mode='wb', modemap=None,
634 seqno=None, revwidth=None, mode='wb', modemap=None,
528 pathname=None):
635 pathname=None):
529
636
530 writable = mode not in ('r', 'rb')
637 writable = mode not in ('r', 'rb')
531
638
532 if not pat or pat == '-':
639 if not pat or pat == '-':
533 if writable:
640 if writable:
534 fp = repo.ui.fout
641 fp = repo.ui.fout
535 else:
642 else:
536 fp = repo.ui.fin
643 fp = repo.ui.fin
537 return _unclosablefile(fp)
644 return _unclosablefile(fp)
538 if util.safehasattr(pat, 'write') and writable:
645 if util.safehasattr(pat, 'write') and writable:
539 return pat
646 return pat
540 if util.safehasattr(pat, 'read') and 'r' in mode:
647 if util.safehasattr(pat, 'read') and 'r' in mode:
541 return pat
648 return pat
542 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
649 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
543 if modemap is not None:
650 if modemap is not None:
544 mode = modemap.get(fn, mode)
651 mode = modemap.get(fn, mode)
545 if mode == 'wb':
652 if mode == 'wb':
546 modemap[fn] = 'ab'
653 modemap[fn] = 'ab'
547 return open(fn, mode)
654 return open(fn, mode)
548
655
549 def openrevlog(repo, cmd, file_, opts):
656 def openrevlog(repo, cmd, file_, opts):
550 """opens the changelog, manifest, a filelog or a given revlog"""
657 """opens the changelog, manifest, a filelog or a given revlog"""
551 cl = opts['changelog']
658 cl = opts['changelog']
552 mf = opts['manifest']
659 mf = opts['manifest']
553 dir = opts['dir']
660 dir = opts['dir']
554 msg = None
661 msg = None
555 if cl and mf:
662 if cl and mf:
556 msg = _('cannot specify --changelog and --manifest at the same time')
663 msg = _('cannot specify --changelog and --manifest at the same time')
557 elif cl and dir:
664 elif cl and dir:
558 msg = _('cannot specify --changelog and --dir at the same time')
665 msg = _('cannot specify --changelog and --dir at the same time')
559 elif cl or mf or dir:
666 elif cl or mf or dir:
560 if file_:
667 if file_:
561 msg = _('cannot specify filename with --changelog or --manifest')
668 msg = _('cannot specify filename with --changelog or --manifest')
562 elif not repo:
669 elif not repo:
563 msg = _('cannot specify --changelog or --manifest or --dir '
670 msg = _('cannot specify --changelog or --manifest or --dir '
564 'without a repository')
671 'without a repository')
565 if msg:
672 if msg:
566 raise error.Abort(msg)
673 raise error.Abort(msg)
567
674
568 r = None
675 r = None
569 if repo:
676 if repo:
570 if cl:
677 if cl:
571 r = repo.unfiltered().changelog
678 r = repo.unfiltered().changelog
572 elif dir:
679 elif dir:
573 if 'treemanifest' not in repo.requirements:
680 if 'treemanifest' not in repo.requirements:
574 raise error.Abort(_("--dir can only be used on repos with "
681 raise error.Abort(_("--dir can only be used on repos with "
575 "treemanifest enabled"))
682 "treemanifest enabled"))
576 dirlog = repo.manifestlog._revlog.dirlog(dir)
683 dirlog = repo.manifestlog._revlog.dirlog(dir)
577 if len(dirlog):
684 if len(dirlog):
578 r = dirlog
685 r = dirlog
579 elif mf:
686 elif mf:
580 r = repo.manifestlog._revlog
687 r = repo.manifestlog._revlog
581 elif file_:
688 elif file_:
582 filelog = repo.file(file_)
689 filelog = repo.file(file_)
583 if len(filelog):
690 if len(filelog):
584 r = filelog
691 r = filelog
585 if not r:
692 if not r:
586 if not file_:
693 if not file_:
587 raise error.CommandError(cmd, _('invalid arguments'))
694 raise error.CommandError(cmd, _('invalid arguments'))
588 if not os.path.isfile(file_):
695 if not os.path.isfile(file_):
589 raise error.Abort(_("revlog '%s' not found") % file_)
696 raise error.Abort(_("revlog '%s' not found") % file_)
590 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
697 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
591 file_[:-2] + ".i")
698 file_[:-2] + ".i")
592 return r
699 return r
593
700
594 def copy(ui, repo, pats, opts, rename=False):
701 def copy(ui, repo, pats, opts, rename=False):
595 # called with the repo lock held
702 # called with the repo lock held
596 #
703 #
597 # hgsep => pathname that uses "/" to separate directories
704 # hgsep => pathname that uses "/" to separate directories
598 # ossep => pathname that uses os.sep to separate directories
705 # ossep => pathname that uses os.sep to separate directories
599 cwd = repo.getcwd()
706 cwd = repo.getcwd()
600 targets = {}
707 targets = {}
601 after = opts.get("after")
708 after = opts.get("after")
602 dryrun = opts.get("dry_run")
709 dryrun = opts.get("dry_run")
603 wctx = repo[None]
710 wctx = repo[None]
604
711
605 def walkpat(pat):
712 def walkpat(pat):
606 srcs = []
713 srcs = []
607 if after:
714 if after:
608 badstates = '?'
715 badstates = '?'
609 else:
716 else:
610 badstates = '?r'
717 badstates = '?r'
611 m = scmutil.match(repo[None], [pat], opts, globbed=True)
718 m = scmutil.match(repo[None], [pat], opts, globbed=True)
612 for abs in repo[None].walk(m):
719 for abs in repo[None].walk(m):
613 state = repo.dirstate[abs]
720 state = repo.dirstate[abs]
614 rel = m.rel(abs)
721 rel = m.rel(abs)
615 exact = m.exact(abs)
722 exact = m.exact(abs)
616 if state in badstates:
723 if state in badstates:
617 if exact and state == '?':
724 if exact and state == '?':
618 ui.warn(_('%s: not copying - file is not managed\n') % rel)
725 ui.warn(_('%s: not copying - file is not managed\n') % rel)
619 if exact and state == 'r':
726 if exact and state == 'r':
620 ui.warn(_('%s: not copying - file has been marked for'
727 ui.warn(_('%s: not copying - file has been marked for'
621 ' remove\n') % rel)
728 ' remove\n') % rel)
622 continue
729 continue
623 # abs: hgsep
730 # abs: hgsep
624 # rel: ossep
731 # rel: ossep
625 srcs.append((abs, rel, exact))
732 srcs.append((abs, rel, exact))
626 return srcs
733 return srcs
627
734
628 # abssrc: hgsep
735 # abssrc: hgsep
629 # relsrc: ossep
736 # relsrc: ossep
630 # otarget: ossep
737 # otarget: ossep
631 def copyfile(abssrc, relsrc, otarget, exact):
738 def copyfile(abssrc, relsrc, otarget, exact):
632 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
739 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
633 if '/' in abstarget:
740 if '/' in abstarget:
634 # We cannot normalize abstarget itself, this would prevent
741 # We cannot normalize abstarget itself, this would prevent
635 # case only renames, like a => A.
742 # case only renames, like a => A.
636 abspath, absname = abstarget.rsplit('/', 1)
743 abspath, absname = abstarget.rsplit('/', 1)
637 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
744 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
638 reltarget = repo.pathto(abstarget, cwd)
745 reltarget = repo.pathto(abstarget, cwd)
639 target = repo.wjoin(abstarget)
746 target = repo.wjoin(abstarget)
640 src = repo.wjoin(abssrc)
747 src = repo.wjoin(abssrc)
641 state = repo.dirstate[abstarget]
748 state = repo.dirstate[abstarget]
642
749
643 scmutil.checkportable(ui, abstarget)
750 scmutil.checkportable(ui, abstarget)
644
751
645 # check for collisions
752 # check for collisions
646 prevsrc = targets.get(abstarget)
753 prevsrc = targets.get(abstarget)
647 if prevsrc is not None:
754 if prevsrc is not None:
648 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
755 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
649 (reltarget, repo.pathto(abssrc, cwd),
756 (reltarget, repo.pathto(abssrc, cwd),
650 repo.pathto(prevsrc, cwd)))
757 repo.pathto(prevsrc, cwd)))
651 return
758 return
652
759
653 # check for overwrites
760 # check for overwrites
654 exists = os.path.lexists(target)
761 exists = os.path.lexists(target)
655 samefile = False
762 samefile = False
656 if exists and abssrc != abstarget:
763 if exists and abssrc != abstarget:
657 if (repo.dirstate.normalize(abssrc) ==
764 if (repo.dirstate.normalize(abssrc) ==
658 repo.dirstate.normalize(abstarget)):
765 repo.dirstate.normalize(abstarget)):
659 if not rename:
766 if not rename:
660 ui.warn(_("%s: can't copy - same file\n") % reltarget)
767 ui.warn(_("%s: can't copy - same file\n") % reltarget)
661 return
768 return
662 exists = False
769 exists = False
663 samefile = True
770 samefile = True
664
771
665 if not after and exists or after and state in 'mn':
772 if not after and exists or after and state in 'mn':
666 if not opts['force']:
773 if not opts['force']:
667 if state in 'mn':
774 if state in 'mn':
668 msg = _('%s: not overwriting - file already committed\n')
775 msg = _('%s: not overwriting - file already committed\n')
669 if after:
776 if after:
670 flags = '--after --force'
777 flags = '--after --force'
671 else:
778 else:
672 flags = '--force'
779 flags = '--force'
673 if rename:
780 if rename:
674 hint = _('(hg rename %s to replace the file by '
781 hint = _('(hg rename %s to replace the file by '
675 'recording a rename)\n') % flags
782 'recording a rename)\n') % flags
676 else:
783 else:
677 hint = _('(hg copy %s to replace the file by '
784 hint = _('(hg copy %s to replace the file by '
678 'recording a copy)\n') % flags
785 'recording a copy)\n') % flags
679 else:
786 else:
680 msg = _('%s: not overwriting - file exists\n')
787 msg = _('%s: not overwriting - file exists\n')
681 if rename:
788 if rename:
682 hint = _('(hg rename --after to record the rename)\n')
789 hint = _('(hg rename --after to record the rename)\n')
683 else:
790 else:
684 hint = _('(hg copy --after to record the copy)\n')
791 hint = _('(hg copy --after to record the copy)\n')
685 ui.warn(msg % reltarget)
792 ui.warn(msg % reltarget)
686 ui.warn(hint)
793 ui.warn(hint)
687 return
794 return
688
795
689 if after:
796 if after:
690 if not exists:
797 if not exists:
691 if rename:
798 if rename:
692 ui.warn(_('%s: not recording move - %s does not exist\n') %
799 ui.warn(_('%s: not recording move - %s does not exist\n') %
693 (relsrc, reltarget))
800 (relsrc, reltarget))
694 else:
801 else:
695 ui.warn(_('%s: not recording copy - %s does not exist\n') %
802 ui.warn(_('%s: not recording copy - %s does not exist\n') %
696 (relsrc, reltarget))
803 (relsrc, reltarget))
697 return
804 return
698 elif not dryrun:
805 elif not dryrun:
699 try:
806 try:
700 if exists:
807 if exists:
701 os.unlink(target)
808 os.unlink(target)
702 targetdir = os.path.dirname(target) or '.'
809 targetdir = os.path.dirname(target) or '.'
703 if not os.path.isdir(targetdir):
810 if not os.path.isdir(targetdir):
704 os.makedirs(targetdir)
811 os.makedirs(targetdir)
705 if samefile:
812 if samefile:
706 tmp = target + "~hgrename"
813 tmp = target + "~hgrename"
707 os.rename(src, tmp)
814 os.rename(src, tmp)
708 os.rename(tmp, target)
815 os.rename(tmp, target)
709 else:
816 else:
710 util.copyfile(src, target)
817 util.copyfile(src, target)
711 srcexists = True
818 srcexists = True
712 except IOError as inst:
819 except IOError as inst:
713 if inst.errno == errno.ENOENT:
820 if inst.errno == errno.ENOENT:
714 ui.warn(_('%s: deleted in working directory\n') % relsrc)
821 ui.warn(_('%s: deleted in working directory\n') % relsrc)
715 srcexists = False
822 srcexists = False
716 else:
823 else:
717 ui.warn(_('%s: cannot copy - %s\n') %
824 ui.warn(_('%s: cannot copy - %s\n') %
718 (relsrc, inst.strerror))
825 (relsrc, inst.strerror))
719 return True # report a failure
826 return True # report a failure
720
827
721 if ui.verbose or not exact:
828 if ui.verbose or not exact:
722 if rename:
829 if rename:
723 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
830 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
724 else:
831 else:
725 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
832 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
726
833
727 targets[abstarget] = abssrc
834 targets[abstarget] = abssrc
728
835
729 # fix up dirstate
836 # fix up dirstate
730 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
837 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
731 dryrun=dryrun, cwd=cwd)
838 dryrun=dryrun, cwd=cwd)
732 if rename and not dryrun:
839 if rename and not dryrun:
733 if not after and srcexists and not samefile:
840 if not after and srcexists and not samefile:
734 repo.wvfs.unlinkpath(abssrc)
841 repo.wvfs.unlinkpath(abssrc)
735 wctx.forget([abssrc])
842 wctx.forget([abssrc])
736
843
737 # pat: ossep
844 # pat: ossep
738 # dest ossep
845 # dest ossep
739 # srcs: list of (hgsep, hgsep, ossep, bool)
846 # srcs: list of (hgsep, hgsep, ossep, bool)
740 # return: function that takes hgsep and returns ossep
847 # return: function that takes hgsep and returns ossep
741 def targetpathfn(pat, dest, srcs):
848 def targetpathfn(pat, dest, srcs):
742 if os.path.isdir(pat):
849 if os.path.isdir(pat):
743 abspfx = pathutil.canonpath(repo.root, cwd, pat)
850 abspfx = pathutil.canonpath(repo.root, cwd, pat)
744 abspfx = util.localpath(abspfx)
851 abspfx = util.localpath(abspfx)
745 if destdirexists:
852 if destdirexists:
746 striplen = len(os.path.split(abspfx)[0])
853 striplen = len(os.path.split(abspfx)[0])
747 else:
854 else:
748 striplen = len(abspfx)
855 striplen = len(abspfx)
749 if striplen:
856 if striplen:
750 striplen += len(pycompat.ossep)
857 striplen += len(pycompat.ossep)
751 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
858 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
752 elif destdirexists:
859 elif destdirexists:
753 res = lambda p: os.path.join(dest,
860 res = lambda p: os.path.join(dest,
754 os.path.basename(util.localpath(p)))
861 os.path.basename(util.localpath(p)))
755 else:
862 else:
756 res = lambda p: dest
863 res = lambda p: dest
757 return res
864 return res
758
865
759 # pat: ossep
866 # pat: ossep
760 # dest ossep
867 # dest ossep
761 # srcs: list of (hgsep, hgsep, ossep, bool)
868 # srcs: list of (hgsep, hgsep, ossep, bool)
762 # return: function that takes hgsep and returns ossep
869 # return: function that takes hgsep and returns ossep
763 def targetpathafterfn(pat, dest, srcs):
870 def targetpathafterfn(pat, dest, srcs):
764 if matchmod.patkind(pat):
871 if matchmod.patkind(pat):
765 # a mercurial pattern
872 # a mercurial pattern
766 res = lambda p: os.path.join(dest,
873 res = lambda p: os.path.join(dest,
767 os.path.basename(util.localpath(p)))
874 os.path.basename(util.localpath(p)))
768 else:
875 else:
769 abspfx = pathutil.canonpath(repo.root, cwd, pat)
876 abspfx = pathutil.canonpath(repo.root, cwd, pat)
770 if len(abspfx) < len(srcs[0][0]):
877 if len(abspfx) < len(srcs[0][0]):
771 # A directory. Either the target path contains the last
878 # A directory. Either the target path contains the last
772 # component of the source path or it does not.
879 # component of the source path or it does not.
773 def evalpath(striplen):
880 def evalpath(striplen):
774 score = 0
881 score = 0
775 for s in srcs:
882 for s in srcs:
776 t = os.path.join(dest, util.localpath(s[0])[striplen:])
883 t = os.path.join(dest, util.localpath(s[0])[striplen:])
777 if os.path.lexists(t):
884 if os.path.lexists(t):
778 score += 1
885 score += 1
779 return score
886 return score
780
887
781 abspfx = util.localpath(abspfx)
888 abspfx = util.localpath(abspfx)
782 striplen = len(abspfx)
889 striplen = len(abspfx)
783 if striplen:
890 if striplen:
784 striplen += len(pycompat.ossep)
891 striplen += len(pycompat.ossep)
785 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
892 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
786 score = evalpath(striplen)
893 score = evalpath(striplen)
787 striplen1 = len(os.path.split(abspfx)[0])
894 striplen1 = len(os.path.split(abspfx)[0])
788 if striplen1:
895 if striplen1:
789 striplen1 += len(pycompat.ossep)
896 striplen1 += len(pycompat.ossep)
790 if evalpath(striplen1) > score:
897 if evalpath(striplen1) > score:
791 striplen = striplen1
898 striplen = striplen1
792 res = lambda p: os.path.join(dest,
899 res = lambda p: os.path.join(dest,
793 util.localpath(p)[striplen:])
900 util.localpath(p)[striplen:])
794 else:
901 else:
795 # a file
902 # a file
796 if destdirexists:
903 if destdirexists:
797 res = lambda p: os.path.join(dest,
904 res = lambda p: os.path.join(dest,
798 os.path.basename(util.localpath(p)))
905 os.path.basename(util.localpath(p)))
799 else:
906 else:
800 res = lambda p: dest
907 res = lambda p: dest
801 return res
908 return res
802
909
803 pats = scmutil.expandpats(pats)
910 pats = scmutil.expandpats(pats)
804 if not pats:
911 if not pats:
805 raise error.Abort(_('no source or destination specified'))
912 raise error.Abort(_('no source or destination specified'))
806 if len(pats) == 1:
913 if len(pats) == 1:
807 raise error.Abort(_('no destination specified'))
914 raise error.Abort(_('no destination specified'))
808 dest = pats.pop()
915 dest = pats.pop()
809 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
916 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
810 if not destdirexists:
917 if not destdirexists:
811 if len(pats) > 1 or matchmod.patkind(pats[0]):
918 if len(pats) > 1 or matchmod.patkind(pats[0]):
812 raise error.Abort(_('with multiple sources, destination must be an '
919 raise error.Abort(_('with multiple sources, destination must be an '
813 'existing directory'))
920 'existing directory'))
814 if util.endswithsep(dest):
921 if util.endswithsep(dest):
815 raise error.Abort(_('destination %s is not a directory') % dest)
922 raise error.Abort(_('destination %s is not a directory') % dest)
816
923
817 tfn = targetpathfn
924 tfn = targetpathfn
818 if after:
925 if after:
819 tfn = targetpathafterfn
926 tfn = targetpathafterfn
820 copylist = []
927 copylist = []
821 for pat in pats:
928 for pat in pats:
822 srcs = walkpat(pat)
929 srcs = walkpat(pat)
823 if not srcs:
930 if not srcs:
824 continue
931 continue
825 copylist.append((tfn(pat, dest, srcs), srcs))
932 copylist.append((tfn(pat, dest, srcs), srcs))
826 if not copylist:
933 if not copylist:
827 raise error.Abort(_('no files to copy'))
934 raise error.Abort(_('no files to copy'))
828
935
829 errors = 0
936 errors = 0
830 for targetpath, srcs in copylist:
937 for targetpath, srcs in copylist:
831 for abssrc, relsrc, exact in srcs:
938 for abssrc, relsrc, exact in srcs:
832 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
939 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
833 errors += 1
940 errors += 1
834
941
835 if errors:
942 if errors:
836 ui.warn(_('(consider using --after)\n'))
943 ui.warn(_('(consider using --after)\n'))
837
944
838 return errors != 0
945 return errors != 0
839
946
840 ## facility to let extension process additional data into an import patch
947 ## facility to let extension process additional data into an import patch
841 # list of identifier to be executed in order
948 # list of identifier to be executed in order
842 extrapreimport = [] # run before commit
949 extrapreimport = [] # run before commit
843 extrapostimport = [] # run after commit
950 extrapostimport = [] # run after commit
844 # mapping from identifier to actual import function
951 # mapping from identifier to actual import function
845 #
952 #
846 # 'preimport' are run before the commit is made and are provided the following
953 # 'preimport' are run before the commit is made and are provided the following
847 # arguments:
954 # arguments:
848 # - repo: the localrepository instance,
955 # - repo: the localrepository instance,
849 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
956 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
850 # - extra: the future extra dictionary of the changeset, please mutate it,
957 # - extra: the future extra dictionary of the changeset, please mutate it,
851 # - opts: the import options.
958 # - opts: the import options.
852 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
959 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
853 # mutation of in memory commit and more. Feel free to rework the code to get
960 # mutation of in memory commit and more. Feel free to rework the code to get
854 # there.
961 # there.
855 extrapreimportmap = {}
962 extrapreimportmap = {}
856 # 'postimport' are run after the commit is made and are provided the following
963 # 'postimport' are run after the commit is made and are provided the following
857 # argument:
964 # argument:
858 # - ctx: the changectx created by import.
965 # - ctx: the changectx created by import.
859 extrapostimportmap = {}
966 extrapostimportmap = {}
860
967
861 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
968 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
862 """Utility function used by commands.import to import a single patch
969 """Utility function used by commands.import to import a single patch
863
970
864 This function is explicitly defined here to help the evolve extension to
971 This function is explicitly defined here to help the evolve extension to
865 wrap this part of the import logic.
972 wrap this part of the import logic.
866
973
867 The API is currently a bit ugly because it a simple code translation from
974 The API is currently a bit ugly because it a simple code translation from
868 the import command. Feel free to make it better.
975 the import command. Feel free to make it better.
869
976
870 :hunk: a patch (as a binary string)
977 :hunk: a patch (as a binary string)
871 :parents: nodes that will be parent of the created commit
978 :parents: nodes that will be parent of the created commit
872 :opts: the full dict of option passed to the import command
979 :opts: the full dict of option passed to the import command
873 :msgs: list to save commit message to.
980 :msgs: list to save commit message to.
874 (used in case we need to save it when failing)
981 (used in case we need to save it when failing)
875 :updatefunc: a function that update a repo to a given node
982 :updatefunc: a function that update a repo to a given node
876 updatefunc(<repo>, <node>)
983 updatefunc(<repo>, <node>)
877 """
984 """
878 # avoid cycle context -> subrepo -> cmdutil
985 # avoid cycle context -> subrepo -> cmdutil
879 from . import context
986 from . import context
880 extractdata = patch.extract(ui, hunk)
987 extractdata = patch.extract(ui, hunk)
881 tmpname = extractdata.get('filename')
988 tmpname = extractdata.get('filename')
882 message = extractdata.get('message')
989 message = extractdata.get('message')
883 user = opts.get('user') or extractdata.get('user')
990 user = opts.get('user') or extractdata.get('user')
884 date = opts.get('date') or extractdata.get('date')
991 date = opts.get('date') or extractdata.get('date')
885 branch = extractdata.get('branch')
992 branch = extractdata.get('branch')
886 nodeid = extractdata.get('nodeid')
993 nodeid = extractdata.get('nodeid')
887 p1 = extractdata.get('p1')
994 p1 = extractdata.get('p1')
888 p2 = extractdata.get('p2')
995 p2 = extractdata.get('p2')
889
996
890 nocommit = opts.get('no_commit')
997 nocommit = opts.get('no_commit')
891 importbranch = opts.get('import_branch')
998 importbranch = opts.get('import_branch')
892 update = not opts.get('bypass')
999 update = not opts.get('bypass')
893 strip = opts["strip"]
1000 strip = opts["strip"]
894 prefix = opts["prefix"]
1001 prefix = opts["prefix"]
895 sim = float(opts.get('similarity') or 0)
1002 sim = float(opts.get('similarity') or 0)
896 if not tmpname:
1003 if not tmpname:
897 return (None, None, False)
1004 return (None, None, False)
898
1005
899 rejects = False
1006 rejects = False
900
1007
901 try:
1008 try:
902 cmdline_message = logmessage(ui, opts)
1009 cmdline_message = logmessage(ui, opts)
903 if cmdline_message:
1010 if cmdline_message:
904 # pickup the cmdline msg
1011 # pickup the cmdline msg
905 message = cmdline_message
1012 message = cmdline_message
906 elif message:
1013 elif message:
907 # pickup the patch msg
1014 # pickup the patch msg
908 message = message.strip()
1015 message = message.strip()
909 else:
1016 else:
910 # launch the editor
1017 # launch the editor
911 message = None
1018 message = None
912 ui.debug('message:\n%s\n' % message)
1019 ui.debug('message:\n%s\n' % message)
913
1020
914 if len(parents) == 1:
1021 if len(parents) == 1:
915 parents.append(repo[nullid])
1022 parents.append(repo[nullid])
916 if opts.get('exact'):
1023 if opts.get('exact'):
917 if not nodeid or not p1:
1024 if not nodeid or not p1:
918 raise error.Abort(_('not a Mercurial patch'))
1025 raise error.Abort(_('not a Mercurial patch'))
919 p1 = repo[p1]
1026 p1 = repo[p1]
920 p2 = repo[p2 or nullid]
1027 p2 = repo[p2 or nullid]
921 elif p2:
1028 elif p2:
922 try:
1029 try:
923 p1 = repo[p1]
1030 p1 = repo[p1]
924 p2 = repo[p2]
1031 p2 = repo[p2]
925 # Without any options, consider p2 only if the
1032 # Without any options, consider p2 only if the
926 # patch is being applied on top of the recorded
1033 # patch is being applied on top of the recorded
927 # first parent.
1034 # first parent.
928 if p1 != parents[0]:
1035 if p1 != parents[0]:
929 p1 = parents[0]
1036 p1 = parents[0]
930 p2 = repo[nullid]
1037 p2 = repo[nullid]
931 except error.RepoError:
1038 except error.RepoError:
932 p1, p2 = parents
1039 p1, p2 = parents
933 if p2.node() == nullid:
1040 if p2.node() == nullid:
934 ui.warn(_("warning: import the patch as a normal revision\n"
1041 ui.warn(_("warning: import the patch as a normal revision\n"
935 "(use --exact to import the patch as a merge)\n"))
1042 "(use --exact to import the patch as a merge)\n"))
936 else:
1043 else:
937 p1, p2 = parents
1044 p1, p2 = parents
938
1045
939 n = None
1046 n = None
940 if update:
1047 if update:
941 if p1 != parents[0]:
1048 if p1 != parents[0]:
942 updatefunc(repo, p1.node())
1049 updatefunc(repo, p1.node())
943 if p2 != parents[1]:
1050 if p2 != parents[1]:
944 repo.setparents(p1.node(), p2.node())
1051 repo.setparents(p1.node(), p2.node())
945
1052
946 if opts.get('exact') or importbranch:
1053 if opts.get('exact') or importbranch:
947 repo.dirstate.setbranch(branch or 'default')
1054 repo.dirstate.setbranch(branch or 'default')
948
1055
949 partial = opts.get('partial', False)
1056 partial = opts.get('partial', False)
950 files = set()
1057 files = set()
951 try:
1058 try:
952 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1059 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
953 files=files, eolmode=None, similarity=sim / 100.0)
1060 files=files, eolmode=None, similarity=sim / 100.0)
954 except patch.PatchError as e:
1061 except patch.PatchError as e:
955 if not partial:
1062 if not partial:
956 raise error.Abort(str(e))
1063 raise error.Abort(str(e))
957 if partial:
1064 if partial:
958 rejects = True
1065 rejects = True
959
1066
960 files = list(files)
1067 files = list(files)
961 if nocommit:
1068 if nocommit:
962 if message:
1069 if message:
963 msgs.append(message)
1070 msgs.append(message)
964 else:
1071 else:
965 if opts.get('exact') or p2:
1072 if opts.get('exact') or p2:
966 # If you got here, you either use --force and know what
1073 # If you got here, you either use --force and know what
967 # you are doing or used --exact or a merge patch while
1074 # you are doing or used --exact or a merge patch while
968 # being updated to its first parent.
1075 # being updated to its first parent.
969 m = None
1076 m = None
970 else:
1077 else:
971 m = scmutil.matchfiles(repo, files or [])
1078 m = scmutil.matchfiles(repo, files or [])
972 editform = mergeeditform(repo[None], 'import.normal')
1079 editform = mergeeditform(repo[None], 'import.normal')
973 if opts.get('exact'):
1080 if opts.get('exact'):
974 editor = None
1081 editor = None
975 else:
1082 else:
976 editor = getcommiteditor(editform=editform, **opts)
1083 editor = getcommiteditor(editform=editform, **opts)
977 extra = {}
1084 extra = {}
978 for idfunc in extrapreimport:
1085 for idfunc in extrapreimport:
979 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1086 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
980 overrides = {}
1087 overrides = {}
981 if partial:
1088 if partial:
982 overrides[('ui', 'allowemptycommit')] = True
1089 overrides[('ui', 'allowemptycommit')] = True
983 with repo.ui.configoverride(overrides, 'import'):
1090 with repo.ui.configoverride(overrides, 'import'):
984 n = repo.commit(message, user,
1091 n = repo.commit(message, user,
985 date, match=m,
1092 date, match=m,
986 editor=editor, extra=extra)
1093 editor=editor, extra=extra)
987 for idfunc in extrapostimport:
1094 for idfunc in extrapostimport:
988 extrapostimportmap[idfunc](repo[n])
1095 extrapostimportmap[idfunc](repo[n])
989 else:
1096 else:
990 if opts.get('exact') or importbranch:
1097 if opts.get('exact') or importbranch:
991 branch = branch or 'default'
1098 branch = branch or 'default'
992 else:
1099 else:
993 branch = p1.branch()
1100 branch = p1.branch()
994 store = patch.filestore()
1101 store = patch.filestore()
995 try:
1102 try:
996 files = set()
1103 files = set()
997 try:
1104 try:
998 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1105 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
999 files, eolmode=None)
1106 files, eolmode=None)
1000 except patch.PatchError as e:
1107 except patch.PatchError as e:
1001 raise error.Abort(str(e))
1108 raise error.Abort(str(e))
1002 if opts.get('exact'):
1109 if opts.get('exact'):
1003 editor = None
1110 editor = None
1004 else:
1111 else:
1005 editor = getcommiteditor(editform='import.bypass')
1112 editor = getcommiteditor(editform='import.bypass')
1006 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1113 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1007 message,
1114 message,
1008 user,
1115 user,
1009 date,
1116 date,
1010 branch, files, store,
1117 branch, files, store,
1011 editor=editor)
1118 editor=editor)
1012 n = memctx.commit()
1119 n = memctx.commit()
1013 finally:
1120 finally:
1014 store.close()
1121 store.close()
1015 if opts.get('exact') and nocommit:
1122 if opts.get('exact') and nocommit:
1016 # --exact with --no-commit is still useful in that it does merge
1123 # --exact with --no-commit is still useful in that it does merge
1017 # and branch bits
1124 # and branch bits
1018 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1019 elif opts.get('exact') and hex(n) != nodeid:
1126 elif opts.get('exact') and hex(n) != nodeid:
1020 raise error.Abort(_('patch is damaged or loses information'))
1127 raise error.Abort(_('patch is damaged or loses information'))
1021 msg = _('applied to working directory')
1128 msg = _('applied to working directory')
1022 if n:
1129 if n:
1023 # i18n: refers to a short changeset id
1130 # i18n: refers to a short changeset id
1024 msg = _('created %s') % short(n)
1131 msg = _('created %s') % short(n)
1025 return (msg, n, rejects)
1132 return (msg, n, rejects)
1026 finally:
1133 finally:
1027 os.unlink(tmpname)
1134 os.unlink(tmpname)
1028
1135
1029 # facility to let extensions include additional data in an exported patch
1136 # facility to let extensions include additional data in an exported patch
1030 # list of identifiers to be executed in order
1137 # list of identifiers to be executed in order
1031 extraexport = []
1138 extraexport = []
1032 # mapping from identifier to actual export function
1139 # mapping from identifier to actual export function
1033 # function as to return a string to be added to the header or None
1140 # function as to return a string to be added to the header or None
1034 # it is given two arguments (sequencenumber, changectx)
1141 # it is given two arguments (sequencenumber, changectx)
1035 extraexportmap = {}
1142 extraexportmap = {}
1036
1143
1037 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1144 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1038 opts=None, match=None):
1145 opts=None, match=None):
1039 '''export changesets as hg patches.'''
1146 '''export changesets as hg patches.'''
1040
1147
1041 total = len(revs)
1148 total = len(revs)
1042 revwidth = max([len(str(rev)) for rev in revs])
1149 revwidth = max([len(str(rev)) for rev in revs])
1043 filemode = {}
1150 filemode = {}
1044
1151
1045 def single(rev, seqno, fp):
1152 def single(rev, seqno, fp):
1046 ctx = repo[rev]
1153 ctx = repo[rev]
1047 node = ctx.node()
1154 node = ctx.node()
1048 parents = [p.node() for p in ctx.parents() if p]
1155 parents = [p.node() for p in ctx.parents() if p]
1049 branch = ctx.branch()
1156 branch = ctx.branch()
1050 if switch_parent:
1157 if switch_parent:
1051 parents.reverse()
1158 parents.reverse()
1052
1159
1053 if parents:
1160 if parents:
1054 prev = parents[0]
1161 prev = parents[0]
1055 else:
1162 else:
1056 prev = nullid
1163 prev = nullid
1057
1164
1058 shouldclose = False
1165 shouldclose = False
1059 if not fp and len(template) > 0:
1166 if not fp and len(template) > 0:
1060 desc_lines = ctx.description().rstrip().split('\n')
1167 desc_lines = ctx.description().rstrip().split('\n')
1061 desc = desc_lines[0] #Commit always has a first line.
1168 desc = desc_lines[0] #Commit always has a first line.
1062 fp = makefileobj(repo, template, node, desc=desc, total=total,
1169 fp = makefileobj(repo, template, node, desc=desc, total=total,
1063 seqno=seqno, revwidth=revwidth, mode='wb',
1170 seqno=seqno, revwidth=revwidth, mode='wb',
1064 modemap=filemode)
1171 modemap=filemode)
1065 shouldclose = True
1172 shouldclose = True
1066 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1173 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1067 repo.ui.note("%s\n" % fp.name)
1174 repo.ui.note("%s\n" % fp.name)
1068
1175
1069 if not fp:
1176 if not fp:
1070 write = repo.ui.write
1177 write = repo.ui.write
1071 else:
1178 else:
1072 def write(s, **kw):
1179 def write(s, **kw):
1073 fp.write(s)
1180 fp.write(s)
1074
1181
1075 write("# HG changeset patch\n")
1182 write("# HG changeset patch\n")
1076 write("# User %s\n" % ctx.user())
1183 write("# User %s\n" % ctx.user())
1077 write("# Date %d %d\n" % ctx.date())
1184 write("# Date %d %d\n" % ctx.date())
1078 write("# %s\n" % util.datestr(ctx.date()))
1185 write("# %s\n" % util.datestr(ctx.date()))
1079 if branch and branch != 'default':
1186 if branch and branch != 'default':
1080 write("# Branch %s\n" % branch)
1187 write("# Branch %s\n" % branch)
1081 write("# Node ID %s\n" % hex(node))
1188 write("# Node ID %s\n" % hex(node))
1082 write("# Parent %s\n" % hex(prev))
1189 write("# Parent %s\n" % hex(prev))
1083 if len(parents) > 1:
1190 if len(parents) > 1:
1084 write("# Parent %s\n" % hex(parents[1]))
1191 write("# Parent %s\n" % hex(parents[1]))
1085
1192
1086 for headerid in extraexport:
1193 for headerid in extraexport:
1087 header = extraexportmap[headerid](seqno, ctx)
1194 header = extraexportmap[headerid](seqno, ctx)
1088 if header is not None:
1195 if header is not None:
1089 write('# %s\n' % header)
1196 write('# %s\n' % header)
1090 write(ctx.description().rstrip())
1197 write(ctx.description().rstrip())
1091 write("\n\n")
1198 write("\n\n")
1092
1199
1093 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1200 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1094 write(chunk, label=label)
1201 write(chunk, label=label)
1095
1202
1096 if shouldclose:
1203 if shouldclose:
1097 fp.close()
1204 fp.close()
1098
1205
1099 for seqno, rev in enumerate(revs):
1206 for seqno, rev in enumerate(revs):
1100 single(rev, seqno + 1, fp)
1207 single(rev, seqno + 1, fp)
1101
1208
1102 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1209 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1103 changes=None, stat=False, fp=None, prefix='',
1210 changes=None, stat=False, fp=None, prefix='',
1104 root='', listsubrepos=False):
1211 root='', listsubrepos=False):
1105 '''show diff or diffstat.'''
1212 '''show diff or diffstat.'''
1106 if fp is None:
1213 if fp is None:
1107 write = ui.write
1214 write = ui.write
1108 else:
1215 else:
1109 def write(s, **kw):
1216 def write(s, **kw):
1110 fp.write(s)
1217 fp.write(s)
1111
1218
1112 if root:
1219 if root:
1113 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1220 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1114 else:
1221 else:
1115 relroot = ''
1222 relroot = ''
1116 if relroot != '':
1223 if relroot != '':
1117 # XXX relative roots currently don't work if the root is within a
1224 # XXX relative roots currently don't work if the root is within a
1118 # subrepo
1225 # subrepo
1119 uirelroot = match.uipath(relroot)
1226 uirelroot = match.uipath(relroot)
1120 relroot += '/'
1227 relroot += '/'
1121 for matchroot in match.files():
1228 for matchroot in match.files():
1122 if not matchroot.startswith(relroot):
1229 if not matchroot.startswith(relroot):
1123 ui.warn(_('warning: %s not inside relative root %s\n') % (
1230 ui.warn(_('warning: %s not inside relative root %s\n') % (
1124 match.uipath(matchroot), uirelroot))
1231 match.uipath(matchroot), uirelroot))
1125
1232
1126 if stat:
1233 if stat:
1127 diffopts = diffopts.copy(context=0)
1234 diffopts = diffopts.copy(context=0)
1128 width = 80
1235 width = 80
1129 if not ui.plain():
1236 if not ui.plain():
1130 width = ui.termwidth()
1237 width = ui.termwidth()
1131 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1238 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1132 prefix=prefix, relroot=relroot)
1239 prefix=prefix, relroot=relroot)
1133 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1240 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1134 width=width):
1241 width=width):
1135 write(chunk, label=label)
1242 write(chunk, label=label)
1136 else:
1243 else:
1137 for chunk, label in patch.diffui(repo, node1, node2, match,
1244 for chunk, label in patch.diffui(repo, node1, node2, match,
1138 changes, diffopts, prefix=prefix,
1245 changes, diffopts, prefix=prefix,
1139 relroot=relroot):
1246 relroot=relroot):
1140 write(chunk, label=label)
1247 write(chunk, label=label)
1141
1248
1142 if listsubrepos:
1249 if listsubrepos:
1143 ctx1 = repo[node1]
1250 ctx1 = repo[node1]
1144 ctx2 = repo[node2]
1251 ctx2 = repo[node2]
1145 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1252 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1146 tempnode2 = node2
1253 tempnode2 = node2
1147 try:
1254 try:
1148 if node2 is not None:
1255 if node2 is not None:
1149 tempnode2 = ctx2.substate[subpath][1]
1256 tempnode2 = ctx2.substate[subpath][1]
1150 except KeyError:
1257 except KeyError:
1151 # A subrepo that existed in node1 was deleted between node1 and
1258 # A subrepo that existed in node1 was deleted between node1 and
1152 # node2 (inclusive). Thus, ctx2's substate won't contain that
1259 # node2 (inclusive). Thus, ctx2's substate won't contain that
1153 # subpath. The best we can do is to ignore it.
1260 # subpath. The best we can do is to ignore it.
1154 tempnode2 = None
1261 tempnode2 = None
1155 submatch = matchmod.subdirmatcher(subpath, match)
1262 submatch = matchmod.subdirmatcher(subpath, match)
1156 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1263 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1157 stat=stat, fp=fp, prefix=prefix)
1264 stat=stat, fp=fp, prefix=prefix)
1158
1265
1159 def _changesetlabels(ctx):
1266 def _changesetlabels(ctx):
1160 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1267 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1161 if ctx.obsolete():
1268 if ctx.obsolete():
1162 labels.append('changeset.obsolete')
1269 labels.append('changeset.obsolete')
1163 if ctx.troubled():
1270 if ctx.troubled():
1164 labels.append('changeset.troubled')
1271 labels.append('changeset.troubled')
1165 for trouble in ctx.troubles():
1272 for trouble in ctx.troubles():
1166 labels.append('trouble.%s' % trouble)
1273 labels.append('trouble.%s' % trouble)
1167 return ' '.join(labels)
1274 return ' '.join(labels)
1168
1275
1169 class changeset_printer(object):
1276 class changeset_printer(object):
1170 '''show changeset information when templating not requested.'''
1277 '''show changeset information when templating not requested.'''
1171
1278
1172 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1279 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1173 self.ui = ui
1280 self.ui = ui
1174 self.repo = repo
1281 self.repo = repo
1175 self.buffered = buffered
1282 self.buffered = buffered
1176 self.matchfn = matchfn
1283 self.matchfn = matchfn
1177 self.diffopts = diffopts
1284 self.diffopts = diffopts
1178 self.header = {}
1285 self.header = {}
1179 self.hunk = {}
1286 self.hunk = {}
1180 self.lastheader = None
1287 self.lastheader = None
1181 self.footer = None
1288 self.footer = None
1182
1289
1183 def flush(self, ctx):
1290 def flush(self, ctx):
1184 rev = ctx.rev()
1291 rev = ctx.rev()
1185 if rev in self.header:
1292 if rev in self.header:
1186 h = self.header[rev]
1293 h = self.header[rev]
1187 if h != self.lastheader:
1294 if h != self.lastheader:
1188 self.lastheader = h
1295 self.lastheader = h
1189 self.ui.write(h)
1296 self.ui.write(h)
1190 del self.header[rev]
1297 del self.header[rev]
1191 if rev in self.hunk:
1298 if rev in self.hunk:
1192 self.ui.write(self.hunk[rev])
1299 self.ui.write(self.hunk[rev])
1193 del self.hunk[rev]
1300 del self.hunk[rev]
1194 return 1
1301 return 1
1195 return 0
1302 return 0
1196
1303
1197 def close(self):
1304 def close(self):
1198 if self.footer:
1305 if self.footer:
1199 self.ui.write(self.footer)
1306 self.ui.write(self.footer)
1200
1307
1201 def show(self, ctx, copies=None, matchfn=None, **props):
1308 def show(self, ctx, copies=None, matchfn=None, **props):
1202 if self.buffered:
1309 if self.buffered:
1203 self.ui.pushbuffer(labeled=True)
1310 self.ui.pushbuffer(labeled=True)
1204 self._show(ctx, copies, matchfn, props)
1311 self._show(ctx, copies, matchfn, props)
1205 self.hunk[ctx.rev()] = self.ui.popbuffer()
1312 self.hunk[ctx.rev()] = self.ui.popbuffer()
1206 else:
1313 else:
1207 self._show(ctx, copies, matchfn, props)
1314 self._show(ctx, copies, matchfn, props)
1208
1315
1209 def _show(self, ctx, copies, matchfn, props):
1316 def _show(self, ctx, copies, matchfn, props):
1210 '''show a single changeset or file revision'''
1317 '''show a single changeset or file revision'''
1211 changenode = ctx.node()
1318 changenode = ctx.node()
1212 rev = ctx.rev()
1319 rev = ctx.rev()
1213 if self.ui.debugflag:
1320 if self.ui.debugflag:
1214 hexfunc = hex
1321 hexfunc = hex
1215 else:
1322 else:
1216 hexfunc = short
1323 hexfunc = short
1217 # as of now, wctx.node() and wctx.rev() return None, but we want to
1324 # as of now, wctx.node() and wctx.rev() return None, but we want to
1218 # show the same values as {node} and {rev} templatekw
1325 # show the same values as {node} and {rev} templatekw
1219 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1326 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1220
1327
1221 if self.ui.quiet:
1328 if self.ui.quiet:
1222 self.ui.write("%d:%s\n" % revnode, label='log.node')
1329 self.ui.write("%d:%s\n" % revnode, label='log.node')
1223 return
1330 return
1224
1331
1225 date = util.datestr(ctx.date())
1332 date = util.datestr(ctx.date())
1226
1333
1227 # i18n: column positioning for "hg log"
1334 # i18n: column positioning for "hg log"
1228 self.ui.write(_("changeset: %d:%s\n") % revnode,
1335 self.ui.write(_("changeset: %d:%s\n") % revnode,
1229 label=_changesetlabels(ctx))
1336 label=_changesetlabels(ctx))
1230
1337
1231 # branches are shown first before any other names due to backwards
1338 # branches are shown first before any other names due to backwards
1232 # compatibility
1339 # compatibility
1233 branch = ctx.branch()
1340 branch = ctx.branch()
1234 # don't show the default branch name
1341 # don't show the default branch name
1235 if branch != 'default':
1342 if branch != 'default':
1236 # i18n: column positioning for "hg log"
1343 # i18n: column positioning for "hg log"
1237 self.ui.write(_("branch: %s\n") % branch,
1344 self.ui.write(_("branch: %s\n") % branch,
1238 label='log.branch')
1345 label='log.branch')
1239
1346
1240 for nsname, ns in self.repo.names.iteritems():
1347 for nsname, ns in self.repo.names.iteritems():
1241 # branches has special logic already handled above, so here we just
1348 # branches has special logic already handled above, so here we just
1242 # skip it
1349 # skip it
1243 if nsname == 'branches':
1350 if nsname == 'branches':
1244 continue
1351 continue
1245 # we will use the templatename as the color name since those two
1352 # we will use the templatename as the color name since those two
1246 # should be the same
1353 # should be the same
1247 for name in ns.names(self.repo, changenode):
1354 for name in ns.names(self.repo, changenode):
1248 self.ui.write(ns.logfmt % name,
1355 self.ui.write(ns.logfmt % name,
1249 label='log.%s' % ns.colorname)
1356 label='log.%s' % ns.colorname)
1250 if self.ui.debugflag:
1357 if self.ui.debugflag:
1251 # i18n: column positioning for "hg log"
1358 # i18n: column positioning for "hg log"
1252 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1359 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1253 label='log.phase')
1360 label='log.phase')
1254 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1361 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1255 label = 'log.parent changeset.%s' % pctx.phasestr()
1362 label = 'log.parent changeset.%s' % pctx.phasestr()
1256 # i18n: column positioning for "hg log"
1363 # i18n: column positioning for "hg log"
1257 self.ui.write(_("parent: %d:%s\n")
1364 self.ui.write(_("parent: %d:%s\n")
1258 % (pctx.rev(), hexfunc(pctx.node())),
1365 % (pctx.rev(), hexfunc(pctx.node())),
1259 label=label)
1366 label=label)
1260
1367
1261 if self.ui.debugflag and rev is not None:
1368 if self.ui.debugflag and rev is not None:
1262 mnode = ctx.manifestnode()
1369 mnode = ctx.manifestnode()
1263 # i18n: column positioning for "hg log"
1370 # i18n: column positioning for "hg log"
1264 self.ui.write(_("manifest: %d:%s\n") %
1371 self.ui.write(_("manifest: %d:%s\n") %
1265 (self.repo.manifestlog._revlog.rev(mnode),
1372 (self.repo.manifestlog._revlog.rev(mnode),
1266 hex(mnode)),
1373 hex(mnode)),
1267 label='ui.debug log.manifest')
1374 label='ui.debug log.manifest')
1268 # i18n: column positioning for "hg log"
1375 # i18n: column positioning for "hg log"
1269 self.ui.write(_("user: %s\n") % ctx.user(),
1376 self.ui.write(_("user: %s\n") % ctx.user(),
1270 label='log.user')
1377 label='log.user')
1271 # i18n: column positioning for "hg log"
1378 # i18n: column positioning for "hg log"
1272 self.ui.write(_("date: %s\n") % date,
1379 self.ui.write(_("date: %s\n") % date,
1273 label='log.date')
1380 label='log.date')
1274
1381
1275 if ctx.troubled():
1382 if ctx.troubled():
1276 # i18n: column positioning for "hg log"
1383 # i18n: column positioning for "hg log"
1277 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1384 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1278 label='log.trouble')
1385 label='log.trouble')
1279
1386
1280 if self.ui.debugflag:
1387 if self.ui.debugflag:
1281 files = ctx.p1().status(ctx)[:3]
1388 files = ctx.p1().status(ctx)[:3]
1282 for key, value in zip([# i18n: column positioning for "hg log"
1389 for key, value in zip([# i18n: column positioning for "hg log"
1283 _("files:"),
1390 _("files:"),
1284 # i18n: column positioning for "hg log"
1391 # i18n: column positioning for "hg log"
1285 _("files+:"),
1392 _("files+:"),
1286 # i18n: column positioning for "hg log"
1393 # i18n: column positioning for "hg log"
1287 _("files-:")], files):
1394 _("files-:")], files):
1288 if value:
1395 if value:
1289 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1396 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1290 label='ui.debug log.files')
1397 label='ui.debug log.files')
1291 elif ctx.files() and self.ui.verbose:
1398 elif ctx.files() and self.ui.verbose:
1292 # i18n: column positioning for "hg log"
1399 # i18n: column positioning for "hg log"
1293 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1400 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1294 label='ui.note log.files')
1401 label='ui.note log.files')
1295 if copies and self.ui.verbose:
1402 if copies and self.ui.verbose:
1296 copies = ['%s (%s)' % c for c in copies]
1403 copies = ['%s (%s)' % c for c in copies]
1297 # i18n: column positioning for "hg log"
1404 # i18n: column positioning for "hg log"
1298 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1405 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1299 label='ui.note log.copies')
1406 label='ui.note log.copies')
1300
1407
1301 extra = ctx.extra()
1408 extra = ctx.extra()
1302 if extra and self.ui.debugflag:
1409 if extra and self.ui.debugflag:
1303 for key, value in sorted(extra.items()):
1410 for key, value in sorted(extra.items()):
1304 # i18n: column positioning for "hg log"
1411 # i18n: column positioning for "hg log"
1305 self.ui.write(_("extra: %s=%s\n")
1412 self.ui.write(_("extra: %s=%s\n")
1306 % (key, util.escapestr(value)),
1413 % (key, util.escapestr(value)),
1307 label='ui.debug log.extra')
1414 label='ui.debug log.extra')
1308
1415
1309 description = ctx.description().strip()
1416 description = ctx.description().strip()
1310 if description:
1417 if description:
1311 if self.ui.verbose:
1418 if self.ui.verbose:
1312 self.ui.write(_("description:\n"),
1419 self.ui.write(_("description:\n"),
1313 label='ui.note log.description')
1420 label='ui.note log.description')
1314 self.ui.write(description,
1421 self.ui.write(description,
1315 label='ui.note log.description')
1422 label='ui.note log.description')
1316 self.ui.write("\n\n")
1423 self.ui.write("\n\n")
1317 else:
1424 else:
1318 # i18n: column positioning for "hg log"
1425 # i18n: column positioning for "hg log"
1319 self.ui.write(_("summary: %s\n") %
1426 self.ui.write(_("summary: %s\n") %
1320 description.splitlines()[0],
1427 description.splitlines()[0],
1321 label='log.summary')
1428 label='log.summary')
1322 self.ui.write("\n")
1429 self.ui.write("\n")
1323
1430
1324 self.showpatch(ctx, matchfn)
1431 self.showpatch(ctx, matchfn)
1325
1432
1326 def showpatch(self, ctx, matchfn):
1433 def showpatch(self, ctx, matchfn):
1327 if not matchfn:
1434 if not matchfn:
1328 matchfn = self.matchfn
1435 matchfn = self.matchfn
1329 if matchfn:
1436 if matchfn:
1330 stat = self.diffopts.get('stat')
1437 stat = self.diffopts.get('stat')
1331 diff = self.diffopts.get('patch')
1438 diff = self.diffopts.get('patch')
1332 diffopts = patch.diffallopts(self.ui, self.diffopts)
1439 diffopts = patch.diffallopts(self.ui, self.diffopts)
1333 node = ctx.node()
1440 node = ctx.node()
1334 prev = ctx.p1().node()
1441 prev = ctx.p1().node()
1335 if stat:
1442 if stat:
1336 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1443 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1337 match=matchfn, stat=True)
1444 match=matchfn, stat=True)
1338 if diff:
1445 if diff:
1339 if stat:
1446 if stat:
1340 self.ui.write("\n")
1447 self.ui.write("\n")
1341 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1448 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1342 match=matchfn, stat=False)
1449 match=matchfn, stat=False)
1343 self.ui.write("\n")
1450 self.ui.write("\n")
1344
1451
1345 class jsonchangeset(changeset_printer):
1452 class jsonchangeset(changeset_printer):
1346 '''format changeset information.'''
1453 '''format changeset information.'''
1347
1454
1348 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1455 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1349 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1456 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1350 self.cache = {}
1457 self.cache = {}
1351 self._first = True
1458 self._first = True
1352
1459
1353 def close(self):
1460 def close(self):
1354 if not self._first:
1461 if not self._first:
1355 self.ui.write("\n]\n")
1462 self.ui.write("\n]\n")
1356 else:
1463 else:
1357 self.ui.write("[]\n")
1464 self.ui.write("[]\n")
1358
1465
1359 def _show(self, ctx, copies, matchfn, props):
1466 def _show(self, ctx, copies, matchfn, props):
1360 '''show a single changeset or file revision'''
1467 '''show a single changeset or file revision'''
1361 rev = ctx.rev()
1468 rev = ctx.rev()
1362 if rev is None:
1469 if rev is None:
1363 jrev = jnode = 'null'
1470 jrev = jnode = 'null'
1364 else:
1471 else:
1365 jrev = '%d' % rev
1472 jrev = '%d' % rev
1366 jnode = '"%s"' % hex(ctx.node())
1473 jnode = '"%s"' % hex(ctx.node())
1367 j = encoding.jsonescape
1474 j = encoding.jsonescape
1368
1475
1369 if self._first:
1476 if self._first:
1370 self.ui.write("[\n {")
1477 self.ui.write("[\n {")
1371 self._first = False
1478 self._first = False
1372 else:
1479 else:
1373 self.ui.write(",\n {")
1480 self.ui.write(",\n {")
1374
1481
1375 if self.ui.quiet:
1482 if self.ui.quiet:
1376 self.ui.write(('\n "rev": %s') % jrev)
1483 self.ui.write(('\n "rev": %s') % jrev)
1377 self.ui.write((',\n "node": %s') % jnode)
1484 self.ui.write((',\n "node": %s') % jnode)
1378 self.ui.write('\n }')
1485 self.ui.write('\n }')
1379 return
1486 return
1380
1487
1381 self.ui.write(('\n "rev": %s') % jrev)
1488 self.ui.write(('\n "rev": %s') % jrev)
1382 self.ui.write((',\n "node": %s') % jnode)
1489 self.ui.write((',\n "node": %s') % jnode)
1383 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1490 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1384 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1491 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1385 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1492 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1386 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1493 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1387 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1494 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1388
1495
1389 self.ui.write((',\n "bookmarks": [%s]') %
1496 self.ui.write((',\n "bookmarks": [%s]') %
1390 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1497 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1391 self.ui.write((',\n "tags": [%s]') %
1498 self.ui.write((',\n "tags": [%s]') %
1392 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1499 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1393 self.ui.write((',\n "parents": [%s]') %
1500 self.ui.write((',\n "parents": [%s]') %
1394 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1501 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1395
1502
1396 if self.ui.debugflag:
1503 if self.ui.debugflag:
1397 if rev is None:
1504 if rev is None:
1398 jmanifestnode = 'null'
1505 jmanifestnode = 'null'
1399 else:
1506 else:
1400 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1507 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1401 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1508 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1402
1509
1403 self.ui.write((',\n "extra": {%s}') %
1510 self.ui.write((',\n "extra": {%s}') %
1404 ", ".join('"%s": "%s"' % (j(k), j(v))
1511 ", ".join('"%s": "%s"' % (j(k), j(v))
1405 for k, v in ctx.extra().items()))
1512 for k, v in ctx.extra().items()))
1406
1513
1407 files = ctx.p1().status(ctx)
1514 files = ctx.p1().status(ctx)
1408 self.ui.write((',\n "modified": [%s]') %
1515 self.ui.write((',\n "modified": [%s]') %
1409 ", ".join('"%s"' % j(f) for f in files[0]))
1516 ", ".join('"%s"' % j(f) for f in files[0]))
1410 self.ui.write((',\n "added": [%s]') %
1517 self.ui.write((',\n "added": [%s]') %
1411 ", ".join('"%s"' % j(f) for f in files[1]))
1518 ", ".join('"%s"' % j(f) for f in files[1]))
1412 self.ui.write((',\n "removed": [%s]') %
1519 self.ui.write((',\n "removed": [%s]') %
1413 ", ".join('"%s"' % j(f) for f in files[2]))
1520 ", ".join('"%s"' % j(f) for f in files[2]))
1414
1521
1415 elif self.ui.verbose:
1522 elif self.ui.verbose:
1416 self.ui.write((',\n "files": [%s]') %
1523 self.ui.write((',\n "files": [%s]') %
1417 ", ".join('"%s"' % j(f) for f in ctx.files()))
1524 ", ".join('"%s"' % j(f) for f in ctx.files()))
1418
1525
1419 if copies:
1526 if copies:
1420 self.ui.write((',\n "copies": {%s}') %
1527 self.ui.write((',\n "copies": {%s}') %
1421 ", ".join('"%s": "%s"' % (j(k), j(v))
1528 ", ".join('"%s": "%s"' % (j(k), j(v))
1422 for k, v in copies))
1529 for k, v in copies))
1423
1530
1424 matchfn = self.matchfn
1531 matchfn = self.matchfn
1425 if matchfn:
1532 if matchfn:
1426 stat = self.diffopts.get('stat')
1533 stat = self.diffopts.get('stat')
1427 diff = self.diffopts.get('patch')
1534 diff = self.diffopts.get('patch')
1428 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1535 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1429 node, prev = ctx.node(), ctx.p1().node()
1536 node, prev = ctx.node(), ctx.p1().node()
1430 if stat:
1537 if stat:
1431 self.ui.pushbuffer()
1538 self.ui.pushbuffer()
1432 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1539 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1433 match=matchfn, stat=True)
1540 match=matchfn, stat=True)
1434 self.ui.write((',\n "diffstat": "%s"')
1541 self.ui.write((',\n "diffstat": "%s"')
1435 % j(self.ui.popbuffer()))
1542 % j(self.ui.popbuffer()))
1436 if diff:
1543 if diff:
1437 self.ui.pushbuffer()
1544 self.ui.pushbuffer()
1438 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1545 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1439 match=matchfn, stat=False)
1546 match=matchfn, stat=False)
1440 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1547 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1441
1548
1442 self.ui.write("\n }")
1549 self.ui.write("\n }")
1443
1550
1444 class changeset_templater(changeset_printer):
1551 class changeset_templater(changeset_printer):
1445 '''format changeset information.'''
1552 '''format changeset information.'''
1446
1553
1447 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1554 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1448 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1555 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1449 assert not (tmpl and mapfile)
1556 assert not (tmpl and mapfile)
1450 defaulttempl = templatekw.defaulttempl
1557 defaulttempl = templatekw.defaulttempl
1451 if mapfile:
1558 if mapfile:
1452 self.t = templater.templater.frommapfile(mapfile,
1559 self.t = templater.templater.frommapfile(mapfile,
1453 cache=defaulttempl)
1560 cache=defaulttempl)
1454 else:
1561 else:
1455 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1562 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1456 cache=defaulttempl)
1563 cache=defaulttempl)
1457
1564
1458 self._counter = itertools.count()
1565 self._counter = itertools.count()
1459 self.cache = {}
1566 self.cache = {}
1460
1567
1461 # find correct templates for current mode
1568 # find correct templates for current mode
1462 tmplmodes = [
1569 tmplmodes = [
1463 (True, None),
1570 (True, None),
1464 (self.ui.verbose, 'verbose'),
1571 (self.ui.verbose, 'verbose'),
1465 (self.ui.quiet, 'quiet'),
1572 (self.ui.quiet, 'quiet'),
1466 (self.ui.debugflag, 'debug'),
1573 (self.ui.debugflag, 'debug'),
1467 ]
1574 ]
1468
1575
1469 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1576 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1470 'docheader': '', 'docfooter': ''}
1577 'docheader': '', 'docfooter': ''}
1471 for mode, postfix in tmplmodes:
1578 for mode, postfix in tmplmodes:
1472 for t in self._parts:
1579 for t in self._parts:
1473 cur = t
1580 cur = t
1474 if postfix:
1581 if postfix:
1475 cur += "_" + postfix
1582 cur += "_" + postfix
1476 if mode and cur in self.t:
1583 if mode and cur in self.t:
1477 self._parts[t] = cur
1584 self._parts[t] = cur
1478
1585
1479 if self._parts['docheader']:
1586 if self._parts['docheader']:
1480 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1587 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1481
1588
1482 def close(self):
1589 def close(self):
1483 if self._parts['docfooter']:
1590 if self._parts['docfooter']:
1484 if not self.footer:
1591 if not self.footer:
1485 self.footer = ""
1592 self.footer = ""
1486 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1593 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1487 return super(changeset_templater, self).close()
1594 return super(changeset_templater, self).close()
1488
1595
1489 def _show(self, ctx, copies, matchfn, props):
1596 def _show(self, ctx, copies, matchfn, props):
1490 '''show a single changeset or file revision'''
1597 '''show a single changeset or file revision'''
1491 props = props.copy()
1598 props = props.copy()
1492 props.update(templatekw.keywords)
1599 props.update(templatekw.keywords)
1493 props['templ'] = self.t
1600 props['templ'] = self.t
1494 props['ctx'] = ctx
1601 props['ctx'] = ctx
1495 props['repo'] = self.repo
1602 props['repo'] = self.repo
1496 props['ui'] = self.repo.ui
1603 props['ui'] = self.repo.ui
1497 props['index'] = next(self._counter)
1604 props['index'] = next(self._counter)
1498 props['revcache'] = {'copies': copies}
1605 props['revcache'] = {'copies': copies}
1499 props['cache'] = self.cache
1606 props['cache'] = self.cache
1500 props = pycompat.strkwargs(props)
1607 props = pycompat.strkwargs(props)
1501
1608
1502 # write header
1609 # write header
1503 if self._parts['header']:
1610 if self._parts['header']:
1504 h = templater.stringify(self.t(self._parts['header'], **props))
1611 h = templater.stringify(self.t(self._parts['header'], **props))
1505 if self.buffered:
1612 if self.buffered:
1506 self.header[ctx.rev()] = h
1613 self.header[ctx.rev()] = h
1507 else:
1614 else:
1508 if self.lastheader != h:
1615 if self.lastheader != h:
1509 self.lastheader = h
1616 self.lastheader = h
1510 self.ui.write(h)
1617 self.ui.write(h)
1511
1618
1512 # write changeset metadata, then patch if requested
1619 # write changeset metadata, then patch if requested
1513 key = self._parts['changeset']
1620 key = self._parts['changeset']
1514 self.ui.write(templater.stringify(self.t(key, **props)))
1621 self.ui.write(templater.stringify(self.t(key, **props)))
1515 self.showpatch(ctx, matchfn)
1622 self.showpatch(ctx, matchfn)
1516
1623
1517 if self._parts['footer']:
1624 if self._parts['footer']:
1518 if not self.footer:
1625 if not self.footer:
1519 self.footer = templater.stringify(
1626 self.footer = templater.stringify(
1520 self.t(self._parts['footer'], **props))
1627 self.t(self._parts['footer'], **props))
1521
1628
1522 def gettemplate(ui, tmpl, style):
1629 def gettemplate(ui, tmpl, style):
1523 """
1630 """
1524 Find the template matching the given template spec or style.
1631 Find the template matching the given template spec or style.
1525 """
1632 """
1526
1633
1527 # ui settings
1634 # ui settings
1528 if not tmpl and not style: # template are stronger than style
1635 if not tmpl and not style: # template are stronger than style
1529 tmpl = ui.config('ui', 'logtemplate')
1636 tmpl = ui.config('ui', 'logtemplate')
1530 if tmpl:
1637 if tmpl:
1531 return templater.unquotestring(tmpl), None
1638 return templater.unquotestring(tmpl), None
1532 else:
1639 else:
1533 style = util.expandpath(ui.config('ui', 'style', ''))
1640 style = util.expandpath(ui.config('ui', 'style', ''))
1534
1641
1535 if not tmpl and style:
1642 if not tmpl and style:
1536 mapfile = style
1643 mapfile = style
1537 if not os.path.split(mapfile)[0]:
1644 if not os.path.split(mapfile)[0]:
1538 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1645 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1539 or templater.templatepath(mapfile))
1646 or templater.templatepath(mapfile))
1540 if mapname:
1647 if mapname:
1541 mapfile = mapname
1648 mapfile = mapname
1542 return None, mapfile
1649 return None, mapfile
1543
1650
1544 if not tmpl:
1651 if not tmpl:
1545 return None, None
1652 return None, None
1546
1653
1547 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1654 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1548
1655
1549 def show_changeset(ui, repo, opts, buffered=False):
1656 def show_changeset(ui, repo, opts, buffered=False):
1550 """show one changeset using template or regular display.
1657 """show one changeset using template or regular display.
1551
1658
1552 Display format will be the first non-empty hit of:
1659 Display format will be the first non-empty hit of:
1553 1. option 'template'
1660 1. option 'template'
1554 2. option 'style'
1661 2. option 'style'
1555 3. [ui] setting 'logtemplate'
1662 3. [ui] setting 'logtemplate'
1556 4. [ui] setting 'style'
1663 4. [ui] setting 'style'
1557 If all of these values are either the unset or the empty string,
1664 If all of these values are either the unset or the empty string,
1558 regular display via changeset_printer() is done.
1665 regular display via changeset_printer() is done.
1559 """
1666 """
1560 # options
1667 # options
1561 matchfn = None
1668 matchfn = None
1562 if opts.get('patch') or opts.get('stat'):
1669 if opts.get('patch') or opts.get('stat'):
1563 matchfn = scmutil.matchall(repo)
1670 matchfn = scmutil.matchall(repo)
1564
1671
1565 if opts.get('template') == 'json':
1672 if opts.get('template') == 'json':
1566 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1673 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1567
1674
1568 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1675 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1569
1676
1570 if not tmpl and not mapfile:
1677 if not tmpl and not mapfile:
1571 return changeset_printer(ui, repo, matchfn, opts, buffered)
1678 return changeset_printer(ui, repo, matchfn, opts, buffered)
1572
1679
1573 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1680 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1574
1681
1575 def showmarker(fm, marker, index=None):
1682 def showmarker(fm, marker, index=None):
1576 """utility function to display obsolescence marker in a readable way
1683 """utility function to display obsolescence marker in a readable way
1577
1684
1578 To be used by debug function."""
1685 To be used by debug function."""
1579 if index is not None:
1686 if index is not None:
1580 fm.write('index', '%i ', index)
1687 fm.write('index', '%i ', index)
1581 fm.write('precnode', '%s ', hex(marker.precnode()))
1688 fm.write('precnode', '%s ', hex(marker.precnode()))
1582 succs = marker.succnodes()
1689 succs = marker.succnodes()
1583 fm.condwrite(succs, 'succnodes', '%s ',
1690 fm.condwrite(succs, 'succnodes', '%s ',
1584 fm.formatlist(map(hex, succs), name='node'))
1691 fm.formatlist(map(hex, succs), name='node'))
1585 fm.write('flag', '%X ', marker.flags())
1692 fm.write('flag', '%X ', marker.flags())
1586 parents = marker.parentnodes()
1693 parents = marker.parentnodes()
1587 if parents is not None:
1694 if parents is not None:
1588 fm.write('parentnodes', '{%s} ',
1695 fm.write('parentnodes', '{%s} ',
1589 fm.formatlist(map(hex, parents), name='node', sep=', '))
1696 fm.formatlist(map(hex, parents), name='node', sep=', '))
1590 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1697 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1591 meta = marker.metadata().copy()
1698 meta = marker.metadata().copy()
1592 meta.pop('date', None)
1699 meta.pop('date', None)
1593 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1700 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1594 fm.plain('\n')
1701 fm.plain('\n')
1595
1702
1596 def finddate(ui, repo, date):
1703 def finddate(ui, repo, date):
1597 """Find the tipmost changeset that matches the given date spec"""
1704 """Find the tipmost changeset that matches the given date spec"""
1598
1705
1599 df = util.matchdate(date)
1706 df = util.matchdate(date)
1600 m = scmutil.matchall(repo)
1707 m = scmutil.matchall(repo)
1601 results = {}
1708 results = {}
1602
1709
1603 def prep(ctx, fns):
1710 def prep(ctx, fns):
1604 d = ctx.date()
1711 d = ctx.date()
1605 if df(d[0]):
1712 if df(d[0]):
1606 results[ctx.rev()] = d
1713 results[ctx.rev()] = d
1607
1714
1608 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1715 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1609 rev = ctx.rev()
1716 rev = ctx.rev()
1610 if rev in results:
1717 if rev in results:
1611 ui.status(_("found revision %s from %s\n") %
1718 ui.status(_("found revision %s from %s\n") %
1612 (rev, util.datestr(results[rev])))
1719 (rev, util.datestr(results[rev])))
1613 return '%d' % rev
1720 return '%d' % rev
1614
1721
1615 raise error.Abort(_("revision matching date not found"))
1722 raise error.Abort(_("revision matching date not found"))
1616
1723
1617 def increasingwindows(windowsize=8, sizelimit=512):
1724 def increasingwindows(windowsize=8, sizelimit=512):
1618 while True:
1725 while True:
1619 yield windowsize
1726 yield windowsize
1620 if windowsize < sizelimit:
1727 if windowsize < sizelimit:
1621 windowsize *= 2
1728 windowsize *= 2
1622
1729
1623 class FileWalkError(Exception):
1730 class FileWalkError(Exception):
1624 pass
1731 pass
1625
1732
1626 def walkfilerevs(repo, match, follow, revs, fncache):
1733 def walkfilerevs(repo, match, follow, revs, fncache):
1627 '''Walks the file history for the matched files.
1734 '''Walks the file history for the matched files.
1628
1735
1629 Returns the changeset revs that are involved in the file history.
1736 Returns the changeset revs that are involved in the file history.
1630
1737
1631 Throws FileWalkError if the file history can't be walked using
1738 Throws FileWalkError if the file history can't be walked using
1632 filelogs alone.
1739 filelogs alone.
1633 '''
1740 '''
1634 wanted = set()
1741 wanted = set()
1635 copies = []
1742 copies = []
1636 minrev, maxrev = min(revs), max(revs)
1743 minrev, maxrev = min(revs), max(revs)
1637 def filerevgen(filelog, last):
1744 def filerevgen(filelog, last):
1638 """
1745 """
1639 Only files, no patterns. Check the history of each file.
1746 Only files, no patterns. Check the history of each file.
1640
1747
1641 Examines filelog entries within minrev, maxrev linkrev range
1748 Examines filelog entries within minrev, maxrev linkrev range
1642 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1749 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1643 tuples in backwards order
1750 tuples in backwards order
1644 """
1751 """
1645 cl_count = len(repo)
1752 cl_count = len(repo)
1646 revs = []
1753 revs = []
1647 for j in xrange(0, last + 1):
1754 for j in xrange(0, last + 1):
1648 linkrev = filelog.linkrev(j)
1755 linkrev = filelog.linkrev(j)
1649 if linkrev < minrev:
1756 if linkrev < minrev:
1650 continue
1757 continue
1651 # only yield rev for which we have the changelog, it can
1758 # only yield rev for which we have the changelog, it can
1652 # happen while doing "hg log" during a pull or commit
1759 # happen while doing "hg log" during a pull or commit
1653 if linkrev >= cl_count:
1760 if linkrev >= cl_count:
1654 break
1761 break
1655
1762
1656 parentlinkrevs = []
1763 parentlinkrevs = []
1657 for p in filelog.parentrevs(j):
1764 for p in filelog.parentrevs(j):
1658 if p != nullrev:
1765 if p != nullrev:
1659 parentlinkrevs.append(filelog.linkrev(p))
1766 parentlinkrevs.append(filelog.linkrev(p))
1660 n = filelog.node(j)
1767 n = filelog.node(j)
1661 revs.append((linkrev, parentlinkrevs,
1768 revs.append((linkrev, parentlinkrevs,
1662 follow and filelog.renamed(n)))
1769 follow and filelog.renamed(n)))
1663
1770
1664 return reversed(revs)
1771 return reversed(revs)
1665 def iterfiles():
1772 def iterfiles():
1666 pctx = repo['.']
1773 pctx = repo['.']
1667 for filename in match.files():
1774 for filename in match.files():
1668 if follow:
1775 if follow:
1669 if filename not in pctx:
1776 if filename not in pctx:
1670 raise error.Abort(_('cannot follow file not in parent '
1777 raise error.Abort(_('cannot follow file not in parent '
1671 'revision: "%s"') % filename)
1778 'revision: "%s"') % filename)
1672 yield filename, pctx[filename].filenode()
1779 yield filename, pctx[filename].filenode()
1673 else:
1780 else:
1674 yield filename, None
1781 yield filename, None
1675 for filename_node in copies:
1782 for filename_node in copies:
1676 yield filename_node
1783 yield filename_node
1677
1784
1678 for file_, node in iterfiles():
1785 for file_, node in iterfiles():
1679 filelog = repo.file(file_)
1786 filelog = repo.file(file_)
1680 if not len(filelog):
1787 if not len(filelog):
1681 if node is None:
1788 if node is None:
1682 # A zero count may be a directory or deleted file, so
1789 # A zero count may be a directory or deleted file, so
1683 # try to find matching entries on the slow path.
1790 # try to find matching entries on the slow path.
1684 if follow:
1791 if follow:
1685 raise error.Abort(
1792 raise error.Abort(
1686 _('cannot follow nonexistent file: "%s"') % file_)
1793 _('cannot follow nonexistent file: "%s"') % file_)
1687 raise FileWalkError("Cannot walk via filelog")
1794 raise FileWalkError("Cannot walk via filelog")
1688 else:
1795 else:
1689 continue
1796 continue
1690
1797
1691 if node is None:
1798 if node is None:
1692 last = len(filelog) - 1
1799 last = len(filelog) - 1
1693 else:
1800 else:
1694 last = filelog.rev(node)
1801 last = filelog.rev(node)
1695
1802
1696 # keep track of all ancestors of the file
1803 # keep track of all ancestors of the file
1697 ancestors = {filelog.linkrev(last)}
1804 ancestors = {filelog.linkrev(last)}
1698
1805
1699 # iterate from latest to oldest revision
1806 # iterate from latest to oldest revision
1700 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1807 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1701 if not follow:
1808 if not follow:
1702 if rev > maxrev:
1809 if rev > maxrev:
1703 continue
1810 continue
1704 else:
1811 else:
1705 # Note that last might not be the first interesting
1812 # Note that last might not be the first interesting
1706 # rev to us:
1813 # rev to us:
1707 # if the file has been changed after maxrev, we'll
1814 # if the file has been changed after maxrev, we'll
1708 # have linkrev(last) > maxrev, and we still need
1815 # have linkrev(last) > maxrev, and we still need
1709 # to explore the file graph
1816 # to explore the file graph
1710 if rev not in ancestors:
1817 if rev not in ancestors:
1711 continue
1818 continue
1712 # XXX insert 1327 fix here
1819 # XXX insert 1327 fix here
1713 if flparentlinkrevs:
1820 if flparentlinkrevs:
1714 ancestors.update(flparentlinkrevs)
1821 ancestors.update(flparentlinkrevs)
1715
1822
1716 fncache.setdefault(rev, []).append(file_)
1823 fncache.setdefault(rev, []).append(file_)
1717 wanted.add(rev)
1824 wanted.add(rev)
1718 if copied:
1825 if copied:
1719 copies.append(copied)
1826 copies.append(copied)
1720
1827
1721 return wanted
1828 return wanted
1722
1829
1723 class _followfilter(object):
1830 class _followfilter(object):
1724 def __init__(self, repo, onlyfirst=False):
1831 def __init__(self, repo, onlyfirst=False):
1725 self.repo = repo
1832 self.repo = repo
1726 self.startrev = nullrev
1833 self.startrev = nullrev
1727 self.roots = set()
1834 self.roots = set()
1728 self.onlyfirst = onlyfirst
1835 self.onlyfirst = onlyfirst
1729
1836
1730 def match(self, rev):
1837 def match(self, rev):
1731 def realparents(rev):
1838 def realparents(rev):
1732 if self.onlyfirst:
1839 if self.onlyfirst:
1733 return self.repo.changelog.parentrevs(rev)[0:1]
1840 return self.repo.changelog.parentrevs(rev)[0:1]
1734 else:
1841 else:
1735 return filter(lambda x: x != nullrev,
1842 return filter(lambda x: x != nullrev,
1736 self.repo.changelog.parentrevs(rev))
1843 self.repo.changelog.parentrevs(rev))
1737
1844
1738 if self.startrev == nullrev:
1845 if self.startrev == nullrev:
1739 self.startrev = rev
1846 self.startrev = rev
1740 return True
1847 return True
1741
1848
1742 if rev > self.startrev:
1849 if rev > self.startrev:
1743 # forward: all descendants
1850 # forward: all descendants
1744 if not self.roots:
1851 if not self.roots:
1745 self.roots.add(self.startrev)
1852 self.roots.add(self.startrev)
1746 for parent in realparents(rev):
1853 for parent in realparents(rev):
1747 if parent in self.roots:
1854 if parent in self.roots:
1748 self.roots.add(rev)
1855 self.roots.add(rev)
1749 return True
1856 return True
1750 else:
1857 else:
1751 # backwards: all parents
1858 # backwards: all parents
1752 if not self.roots:
1859 if not self.roots:
1753 self.roots.update(realparents(self.startrev))
1860 self.roots.update(realparents(self.startrev))
1754 if rev in self.roots:
1861 if rev in self.roots:
1755 self.roots.remove(rev)
1862 self.roots.remove(rev)
1756 self.roots.update(realparents(rev))
1863 self.roots.update(realparents(rev))
1757 return True
1864 return True
1758
1865
1759 return False
1866 return False
1760
1867
1761 def walkchangerevs(repo, match, opts, prepare):
1868 def walkchangerevs(repo, match, opts, prepare):
1762 '''Iterate over files and the revs in which they changed.
1869 '''Iterate over files and the revs in which they changed.
1763
1870
1764 Callers most commonly need to iterate backwards over the history
1871 Callers most commonly need to iterate backwards over the history
1765 in which they are interested. Doing so has awful (quadratic-looking)
1872 in which they are interested. Doing so has awful (quadratic-looking)
1766 performance, so we use iterators in a "windowed" way.
1873 performance, so we use iterators in a "windowed" way.
1767
1874
1768 We walk a window of revisions in the desired order. Within the
1875 We walk a window of revisions in the desired order. Within the
1769 window, we first walk forwards to gather data, then in the desired
1876 window, we first walk forwards to gather data, then in the desired
1770 order (usually backwards) to display it.
1877 order (usually backwards) to display it.
1771
1878
1772 This function returns an iterator yielding contexts. Before
1879 This function returns an iterator yielding contexts. Before
1773 yielding each context, the iterator will first call the prepare
1880 yielding each context, the iterator will first call the prepare
1774 function on each context in the window in forward order.'''
1881 function on each context in the window in forward order.'''
1775
1882
1776 follow = opts.get('follow') or opts.get('follow_first')
1883 follow = opts.get('follow') or opts.get('follow_first')
1777 revs = _logrevs(repo, opts)
1884 revs = _logrevs(repo, opts)
1778 if not revs:
1885 if not revs:
1779 return []
1886 return []
1780 wanted = set()
1887 wanted = set()
1781 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1888 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1782 opts.get('removed'))
1889 opts.get('removed'))
1783 fncache = {}
1890 fncache = {}
1784 change = repo.changectx
1891 change = repo.changectx
1785
1892
1786 # First step is to fill wanted, the set of revisions that we want to yield.
1893 # First step is to fill wanted, the set of revisions that we want to yield.
1787 # When it does not induce extra cost, we also fill fncache for revisions in
1894 # When it does not induce extra cost, we also fill fncache for revisions in
1788 # wanted: a cache of filenames that were changed (ctx.files()) and that
1895 # wanted: a cache of filenames that were changed (ctx.files()) and that
1789 # match the file filtering conditions.
1896 # match the file filtering conditions.
1790
1897
1791 if match.always():
1898 if match.always():
1792 # No files, no patterns. Display all revs.
1899 # No files, no patterns. Display all revs.
1793 wanted = revs
1900 wanted = revs
1794 elif not slowpath:
1901 elif not slowpath:
1795 # We only have to read through the filelog to find wanted revisions
1902 # We only have to read through the filelog to find wanted revisions
1796
1903
1797 try:
1904 try:
1798 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1905 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1799 except FileWalkError:
1906 except FileWalkError:
1800 slowpath = True
1907 slowpath = True
1801
1908
1802 # We decided to fall back to the slowpath because at least one
1909 # We decided to fall back to the slowpath because at least one
1803 # of the paths was not a file. Check to see if at least one of them
1910 # of the paths was not a file. Check to see if at least one of them
1804 # existed in history, otherwise simply return
1911 # existed in history, otherwise simply return
1805 for path in match.files():
1912 for path in match.files():
1806 if path == '.' or path in repo.store:
1913 if path == '.' or path in repo.store:
1807 break
1914 break
1808 else:
1915 else:
1809 return []
1916 return []
1810
1917
1811 if slowpath:
1918 if slowpath:
1812 # We have to read the changelog to match filenames against
1919 # We have to read the changelog to match filenames against
1813 # changed files
1920 # changed files
1814
1921
1815 if follow:
1922 if follow:
1816 raise error.Abort(_('can only follow copies/renames for explicit '
1923 raise error.Abort(_('can only follow copies/renames for explicit '
1817 'filenames'))
1924 'filenames'))
1818
1925
1819 # The slow path checks files modified in every changeset.
1926 # The slow path checks files modified in every changeset.
1820 # This is really slow on large repos, so compute the set lazily.
1927 # This is really slow on large repos, so compute the set lazily.
1821 class lazywantedset(object):
1928 class lazywantedset(object):
1822 def __init__(self):
1929 def __init__(self):
1823 self.set = set()
1930 self.set = set()
1824 self.revs = set(revs)
1931 self.revs = set(revs)
1825
1932
1826 # No need to worry about locality here because it will be accessed
1933 # No need to worry about locality here because it will be accessed
1827 # in the same order as the increasing window below.
1934 # in the same order as the increasing window below.
1828 def __contains__(self, value):
1935 def __contains__(self, value):
1829 if value in self.set:
1936 if value in self.set:
1830 return True
1937 return True
1831 elif not value in self.revs:
1938 elif not value in self.revs:
1832 return False
1939 return False
1833 else:
1940 else:
1834 self.revs.discard(value)
1941 self.revs.discard(value)
1835 ctx = change(value)
1942 ctx = change(value)
1836 matches = filter(match, ctx.files())
1943 matches = filter(match, ctx.files())
1837 if matches:
1944 if matches:
1838 fncache[value] = matches
1945 fncache[value] = matches
1839 self.set.add(value)
1946 self.set.add(value)
1840 return True
1947 return True
1841 return False
1948 return False
1842
1949
1843 def discard(self, value):
1950 def discard(self, value):
1844 self.revs.discard(value)
1951 self.revs.discard(value)
1845 self.set.discard(value)
1952 self.set.discard(value)
1846
1953
1847 wanted = lazywantedset()
1954 wanted = lazywantedset()
1848
1955
1849 # it might be worthwhile to do this in the iterator if the rev range
1956 # it might be worthwhile to do this in the iterator if the rev range
1850 # is descending and the prune args are all within that range
1957 # is descending and the prune args are all within that range
1851 for rev in opts.get('prune', ()):
1958 for rev in opts.get('prune', ()):
1852 rev = repo[rev].rev()
1959 rev = repo[rev].rev()
1853 ff = _followfilter(repo)
1960 ff = _followfilter(repo)
1854 stop = min(revs[0], revs[-1])
1961 stop = min(revs[0], revs[-1])
1855 for x in xrange(rev, stop - 1, -1):
1962 for x in xrange(rev, stop - 1, -1):
1856 if ff.match(x):
1963 if ff.match(x):
1857 wanted = wanted - [x]
1964 wanted = wanted - [x]
1858
1965
1859 # Now that wanted is correctly initialized, we can iterate over the
1966 # Now that wanted is correctly initialized, we can iterate over the
1860 # revision range, yielding only revisions in wanted.
1967 # revision range, yielding only revisions in wanted.
1861 def iterate():
1968 def iterate():
1862 if follow and match.always():
1969 if follow and match.always():
1863 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1970 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1864 def want(rev):
1971 def want(rev):
1865 return ff.match(rev) and rev in wanted
1972 return ff.match(rev) and rev in wanted
1866 else:
1973 else:
1867 def want(rev):
1974 def want(rev):
1868 return rev in wanted
1975 return rev in wanted
1869
1976
1870 it = iter(revs)
1977 it = iter(revs)
1871 stopiteration = False
1978 stopiteration = False
1872 for windowsize in increasingwindows():
1979 for windowsize in increasingwindows():
1873 nrevs = []
1980 nrevs = []
1874 for i in xrange(windowsize):
1981 for i in xrange(windowsize):
1875 rev = next(it, None)
1982 rev = next(it, None)
1876 if rev is None:
1983 if rev is None:
1877 stopiteration = True
1984 stopiteration = True
1878 break
1985 break
1879 elif want(rev):
1986 elif want(rev):
1880 nrevs.append(rev)
1987 nrevs.append(rev)
1881 for rev in sorted(nrevs):
1988 for rev in sorted(nrevs):
1882 fns = fncache.get(rev)
1989 fns = fncache.get(rev)
1883 ctx = change(rev)
1990 ctx = change(rev)
1884 if not fns:
1991 if not fns:
1885 def fns_generator():
1992 def fns_generator():
1886 for f in ctx.files():
1993 for f in ctx.files():
1887 if match(f):
1994 if match(f):
1888 yield f
1995 yield f
1889 fns = fns_generator()
1996 fns = fns_generator()
1890 prepare(ctx, fns)
1997 prepare(ctx, fns)
1891 for rev in nrevs:
1998 for rev in nrevs:
1892 yield change(rev)
1999 yield change(rev)
1893
2000
1894 if stopiteration:
2001 if stopiteration:
1895 break
2002 break
1896
2003
1897 return iterate()
2004 return iterate()
1898
2005
1899 def _makefollowlogfilematcher(repo, files, followfirst):
2006 def _makefollowlogfilematcher(repo, files, followfirst):
1900 # When displaying a revision with --patch --follow FILE, we have
2007 # When displaying a revision with --patch --follow FILE, we have
1901 # to know which file of the revision must be diffed. With
2008 # to know which file of the revision must be diffed. With
1902 # --follow, we want the names of the ancestors of FILE in the
2009 # --follow, we want the names of the ancestors of FILE in the
1903 # revision, stored in "fcache". "fcache" is populated by
2010 # revision, stored in "fcache". "fcache" is populated by
1904 # reproducing the graph traversal already done by --follow revset
2011 # reproducing the graph traversal already done by --follow revset
1905 # and relating revs to file names (which is not "correct" but
2012 # and relating revs to file names (which is not "correct" but
1906 # good enough).
2013 # good enough).
1907 fcache = {}
2014 fcache = {}
1908 fcacheready = [False]
2015 fcacheready = [False]
1909 pctx = repo['.']
2016 pctx = repo['.']
1910
2017
1911 def populate():
2018 def populate():
1912 for fn in files:
2019 for fn in files:
1913 fctx = pctx[fn]
2020 fctx = pctx[fn]
1914 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2021 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1915 for c in fctx.ancestors(followfirst=followfirst):
2022 for c in fctx.ancestors(followfirst=followfirst):
1916 fcache.setdefault(c.rev(), set()).add(c.path())
2023 fcache.setdefault(c.rev(), set()).add(c.path())
1917
2024
1918 def filematcher(rev):
2025 def filematcher(rev):
1919 if not fcacheready[0]:
2026 if not fcacheready[0]:
1920 # Lazy initialization
2027 # Lazy initialization
1921 fcacheready[0] = True
2028 fcacheready[0] = True
1922 populate()
2029 populate()
1923 return scmutil.matchfiles(repo, fcache.get(rev, []))
2030 return scmutil.matchfiles(repo, fcache.get(rev, []))
1924
2031
1925 return filematcher
2032 return filematcher
1926
2033
1927 def _makenofollowlogfilematcher(repo, pats, opts):
2034 def _makenofollowlogfilematcher(repo, pats, opts):
1928 '''hook for extensions to override the filematcher for non-follow cases'''
2035 '''hook for extensions to override the filematcher for non-follow cases'''
1929 return None
2036 return None
1930
2037
1931 def _makelogrevset(repo, pats, opts, revs):
2038 def _makelogrevset(repo, pats, opts, revs):
1932 """Return (expr, filematcher) where expr is a revset string built
2039 """Return (expr, filematcher) where expr is a revset string built
1933 from log options and file patterns or None. If --stat or --patch
2040 from log options and file patterns or None. If --stat or --patch
1934 are not passed filematcher is None. Otherwise it is a callable
2041 are not passed filematcher is None. Otherwise it is a callable
1935 taking a revision number and returning a match objects filtering
2042 taking a revision number and returning a match objects filtering
1936 the files to be detailed when displaying the revision.
2043 the files to be detailed when displaying the revision.
1937 """
2044 """
1938 opt2revset = {
2045 opt2revset = {
1939 'no_merges': ('not merge()', None),
2046 'no_merges': ('not merge()', None),
1940 'only_merges': ('merge()', None),
2047 'only_merges': ('merge()', None),
1941 '_ancestors': ('ancestors(%(val)s)', None),
2048 '_ancestors': ('ancestors(%(val)s)', None),
1942 '_fancestors': ('_firstancestors(%(val)s)', None),
2049 '_fancestors': ('_firstancestors(%(val)s)', None),
1943 '_descendants': ('descendants(%(val)s)', None),
2050 '_descendants': ('descendants(%(val)s)', None),
1944 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2051 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1945 '_matchfiles': ('_matchfiles(%(val)s)', None),
2052 '_matchfiles': ('_matchfiles(%(val)s)', None),
1946 'date': ('date(%(val)r)', None),
2053 'date': ('date(%(val)r)', None),
1947 'branch': ('branch(%(val)r)', ' or '),
2054 'branch': ('branch(%(val)r)', ' or '),
1948 '_patslog': ('filelog(%(val)r)', ' or '),
2055 '_patslog': ('filelog(%(val)r)', ' or '),
1949 '_patsfollow': ('follow(%(val)r)', ' or '),
2056 '_patsfollow': ('follow(%(val)r)', ' or '),
1950 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2057 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1951 'keyword': ('keyword(%(val)r)', ' or '),
2058 'keyword': ('keyword(%(val)r)', ' or '),
1952 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2059 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1953 'user': ('user(%(val)r)', ' or '),
2060 'user': ('user(%(val)r)', ' or '),
1954 }
2061 }
1955
2062
1956 opts = dict(opts)
2063 opts = dict(opts)
1957 # follow or not follow?
2064 # follow or not follow?
1958 follow = opts.get('follow') or opts.get('follow_first')
2065 follow = opts.get('follow') or opts.get('follow_first')
1959 if opts.get('follow_first'):
2066 if opts.get('follow_first'):
1960 followfirst = 1
2067 followfirst = 1
1961 else:
2068 else:
1962 followfirst = 0
2069 followfirst = 0
1963 # --follow with FILE behavior depends on revs...
2070 # --follow with FILE behavior depends on revs...
1964 it = iter(revs)
2071 it = iter(revs)
1965 startrev = next(it)
2072 startrev = next(it)
1966 followdescendants = startrev < next(it, startrev)
2073 followdescendants = startrev < next(it, startrev)
1967
2074
1968 # branch and only_branch are really aliases and must be handled at
2075 # branch and only_branch are really aliases and must be handled at
1969 # the same time
2076 # the same time
1970 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2077 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1971 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2078 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1972 # pats/include/exclude are passed to match.match() directly in
2079 # pats/include/exclude are passed to match.match() directly in
1973 # _matchfiles() revset but walkchangerevs() builds its matcher with
2080 # _matchfiles() revset but walkchangerevs() builds its matcher with
1974 # scmutil.match(). The difference is input pats are globbed on
2081 # scmutil.match(). The difference is input pats are globbed on
1975 # platforms without shell expansion (windows).
2082 # platforms without shell expansion (windows).
1976 wctx = repo[None]
2083 wctx = repo[None]
1977 match, pats = scmutil.matchandpats(wctx, pats, opts)
2084 match, pats = scmutil.matchandpats(wctx, pats, opts)
1978 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2085 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1979 opts.get('removed'))
2086 opts.get('removed'))
1980 if not slowpath:
2087 if not slowpath:
1981 for f in match.files():
2088 for f in match.files():
1982 if follow and f not in wctx:
2089 if follow and f not in wctx:
1983 # If the file exists, it may be a directory, so let it
2090 # If the file exists, it may be a directory, so let it
1984 # take the slow path.
2091 # take the slow path.
1985 if os.path.exists(repo.wjoin(f)):
2092 if os.path.exists(repo.wjoin(f)):
1986 slowpath = True
2093 slowpath = True
1987 continue
2094 continue
1988 else:
2095 else:
1989 raise error.Abort(_('cannot follow file not in parent '
2096 raise error.Abort(_('cannot follow file not in parent '
1990 'revision: "%s"') % f)
2097 'revision: "%s"') % f)
1991 filelog = repo.file(f)
2098 filelog = repo.file(f)
1992 if not filelog:
2099 if not filelog:
1993 # A zero count may be a directory or deleted file, so
2100 # A zero count may be a directory or deleted file, so
1994 # try to find matching entries on the slow path.
2101 # try to find matching entries on the slow path.
1995 if follow:
2102 if follow:
1996 raise error.Abort(
2103 raise error.Abort(
1997 _('cannot follow nonexistent file: "%s"') % f)
2104 _('cannot follow nonexistent file: "%s"') % f)
1998 slowpath = True
2105 slowpath = True
1999
2106
2000 # We decided to fall back to the slowpath because at least one
2107 # We decided to fall back to the slowpath because at least one
2001 # of the paths was not a file. Check to see if at least one of them
2108 # of the paths was not a file. Check to see if at least one of them
2002 # existed in history - in that case, we'll continue down the
2109 # existed in history - in that case, we'll continue down the
2003 # slowpath; otherwise, we can turn off the slowpath
2110 # slowpath; otherwise, we can turn off the slowpath
2004 if slowpath:
2111 if slowpath:
2005 for path in match.files():
2112 for path in match.files():
2006 if path == '.' or path in repo.store:
2113 if path == '.' or path in repo.store:
2007 break
2114 break
2008 else:
2115 else:
2009 slowpath = False
2116 slowpath = False
2010
2117
2011 fpats = ('_patsfollow', '_patsfollowfirst')
2118 fpats = ('_patsfollow', '_patsfollowfirst')
2012 fnopats = (('_ancestors', '_fancestors'),
2119 fnopats = (('_ancestors', '_fancestors'),
2013 ('_descendants', '_fdescendants'))
2120 ('_descendants', '_fdescendants'))
2014 if slowpath:
2121 if slowpath:
2015 # See walkchangerevs() slow path.
2122 # See walkchangerevs() slow path.
2016 #
2123 #
2017 # pats/include/exclude cannot be represented as separate
2124 # pats/include/exclude cannot be represented as separate
2018 # revset expressions as their filtering logic applies at file
2125 # revset expressions as their filtering logic applies at file
2019 # level. For instance "-I a -X a" matches a revision touching
2126 # level. For instance "-I a -X a" matches a revision touching
2020 # "a" and "b" while "file(a) and not file(b)" does
2127 # "a" and "b" while "file(a) and not file(b)" does
2021 # not. Besides, filesets are evaluated against the working
2128 # not. Besides, filesets are evaluated against the working
2022 # directory.
2129 # directory.
2023 matchargs = ['r:', 'd:relpath']
2130 matchargs = ['r:', 'd:relpath']
2024 for p in pats:
2131 for p in pats:
2025 matchargs.append('p:' + p)
2132 matchargs.append('p:' + p)
2026 for p in opts.get('include', []):
2133 for p in opts.get('include', []):
2027 matchargs.append('i:' + p)
2134 matchargs.append('i:' + p)
2028 for p in opts.get('exclude', []):
2135 for p in opts.get('exclude', []):
2029 matchargs.append('x:' + p)
2136 matchargs.append('x:' + p)
2030 matchargs = ','.join(('%r' % p) for p in matchargs)
2137 matchargs = ','.join(('%r' % p) for p in matchargs)
2031 opts['_matchfiles'] = matchargs
2138 opts['_matchfiles'] = matchargs
2032 if follow:
2139 if follow:
2033 opts[fnopats[0][followfirst]] = '.'
2140 opts[fnopats[0][followfirst]] = '.'
2034 else:
2141 else:
2035 if follow:
2142 if follow:
2036 if pats:
2143 if pats:
2037 # follow() revset interprets its file argument as a
2144 # follow() revset interprets its file argument as a
2038 # manifest entry, so use match.files(), not pats.
2145 # manifest entry, so use match.files(), not pats.
2039 opts[fpats[followfirst]] = list(match.files())
2146 opts[fpats[followfirst]] = list(match.files())
2040 else:
2147 else:
2041 op = fnopats[followdescendants][followfirst]
2148 op = fnopats[followdescendants][followfirst]
2042 opts[op] = 'rev(%d)' % startrev
2149 opts[op] = 'rev(%d)' % startrev
2043 else:
2150 else:
2044 opts['_patslog'] = list(pats)
2151 opts['_patslog'] = list(pats)
2045
2152
2046 filematcher = None
2153 filematcher = None
2047 if opts.get('patch') or opts.get('stat'):
2154 if opts.get('patch') or opts.get('stat'):
2048 # When following files, track renames via a special matcher.
2155 # When following files, track renames via a special matcher.
2049 # If we're forced to take the slowpath it means we're following
2156 # If we're forced to take the slowpath it means we're following
2050 # at least one pattern/directory, so don't bother with rename tracking.
2157 # at least one pattern/directory, so don't bother with rename tracking.
2051 if follow and not match.always() and not slowpath:
2158 if follow and not match.always() and not slowpath:
2052 # _makefollowlogfilematcher expects its files argument to be
2159 # _makefollowlogfilematcher expects its files argument to be
2053 # relative to the repo root, so use match.files(), not pats.
2160 # relative to the repo root, so use match.files(), not pats.
2054 filematcher = _makefollowlogfilematcher(repo, match.files(),
2161 filematcher = _makefollowlogfilematcher(repo, match.files(),
2055 followfirst)
2162 followfirst)
2056 else:
2163 else:
2057 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2164 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2058 if filematcher is None:
2165 if filematcher is None:
2059 filematcher = lambda rev: match
2166 filematcher = lambda rev: match
2060
2167
2061 expr = []
2168 expr = []
2062 for op, val in sorted(opts.iteritems()):
2169 for op, val in sorted(opts.iteritems()):
2063 if not val:
2170 if not val:
2064 continue
2171 continue
2065 if op not in opt2revset:
2172 if op not in opt2revset:
2066 continue
2173 continue
2067 revop, andor = opt2revset[op]
2174 revop, andor = opt2revset[op]
2068 if '%(val)' not in revop:
2175 if '%(val)' not in revop:
2069 expr.append(revop)
2176 expr.append(revop)
2070 else:
2177 else:
2071 if not isinstance(val, list):
2178 if not isinstance(val, list):
2072 e = revop % {'val': val}
2179 e = revop % {'val': val}
2073 else:
2180 else:
2074 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2181 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2075 expr.append(e)
2182 expr.append(e)
2076
2183
2077 if expr:
2184 if expr:
2078 expr = '(' + ' and '.join(expr) + ')'
2185 expr = '(' + ' and '.join(expr) + ')'
2079 else:
2186 else:
2080 expr = None
2187 expr = None
2081 return expr, filematcher
2188 return expr, filematcher
2082
2189
2083 def _logrevs(repo, opts):
2190 def _logrevs(repo, opts):
2084 # Default --rev value depends on --follow but --follow behavior
2191 # Default --rev value depends on --follow but --follow behavior
2085 # depends on revisions resolved from --rev...
2192 # depends on revisions resolved from --rev...
2086 follow = opts.get('follow') or opts.get('follow_first')
2193 follow = opts.get('follow') or opts.get('follow_first')
2087 if opts.get('rev'):
2194 if opts.get('rev'):
2088 revs = scmutil.revrange(repo, opts['rev'])
2195 revs = scmutil.revrange(repo, opts['rev'])
2089 elif follow and repo.dirstate.p1() == nullid:
2196 elif follow and repo.dirstate.p1() == nullid:
2090 revs = smartset.baseset()
2197 revs = smartset.baseset()
2091 elif follow:
2198 elif follow:
2092 revs = repo.revs('reverse(:.)')
2199 revs = repo.revs('reverse(:.)')
2093 else:
2200 else:
2094 revs = smartset.spanset(repo)
2201 revs = smartset.spanset(repo)
2095 revs.reverse()
2202 revs.reverse()
2096 return revs
2203 return revs
2097
2204
2098 def getgraphlogrevs(repo, pats, opts):
2205 def getgraphlogrevs(repo, pats, opts):
2099 """Return (revs, expr, filematcher) where revs is an iterable of
2206 """Return (revs, expr, filematcher) where revs is an iterable of
2100 revision numbers, expr is a revset string built from log options
2207 revision numbers, expr is a revset string built from log options
2101 and file patterns or None, and used to filter 'revs'. If --stat or
2208 and file patterns or None, and used to filter 'revs'. If --stat or
2102 --patch are not passed filematcher is None. Otherwise it is a
2209 --patch are not passed filematcher is None. Otherwise it is a
2103 callable taking a revision number and returning a match objects
2210 callable taking a revision number and returning a match objects
2104 filtering the files to be detailed when displaying the revision.
2211 filtering the files to be detailed when displaying the revision.
2105 """
2212 """
2106 limit = loglimit(opts)
2213 limit = loglimit(opts)
2107 revs = _logrevs(repo, opts)
2214 revs = _logrevs(repo, opts)
2108 if not revs:
2215 if not revs:
2109 return smartset.baseset(), None, None
2216 return smartset.baseset(), None, None
2110 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2217 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2111 if opts.get('rev'):
2218 if opts.get('rev'):
2112 # User-specified revs might be unsorted, but don't sort before
2219 # User-specified revs might be unsorted, but don't sort before
2113 # _makelogrevset because it might depend on the order of revs
2220 # _makelogrevset because it might depend on the order of revs
2114 if not (revs.isdescending() or revs.istopo()):
2221 if not (revs.isdescending() or revs.istopo()):
2115 revs.sort(reverse=True)
2222 revs.sort(reverse=True)
2116 if expr:
2223 if expr:
2117 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2224 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2118 revs = matcher(repo, revs)
2225 revs = matcher(repo, revs)
2119 if limit is not None:
2226 if limit is not None:
2120 limitedrevs = []
2227 limitedrevs = []
2121 for idx, rev in enumerate(revs):
2228 for idx, rev in enumerate(revs):
2122 if idx >= limit:
2229 if idx >= limit:
2123 break
2230 break
2124 limitedrevs.append(rev)
2231 limitedrevs.append(rev)
2125 revs = smartset.baseset(limitedrevs)
2232 revs = smartset.baseset(limitedrevs)
2126
2233
2127 return revs, expr, filematcher
2234 return revs, expr, filematcher
2128
2235
2129 def getlogrevs(repo, pats, opts):
2236 def getlogrevs(repo, pats, opts):
2130 """Return (revs, expr, filematcher) where revs is an iterable of
2237 """Return (revs, expr, filematcher) where revs is an iterable of
2131 revision numbers, expr is a revset string built from log options
2238 revision numbers, expr is a revset string built from log options
2132 and file patterns or None, and used to filter 'revs'. If --stat or
2239 and file patterns or None, and used to filter 'revs'. If --stat or
2133 --patch are not passed filematcher is None. Otherwise it is a
2240 --patch are not passed filematcher is None. Otherwise it is a
2134 callable taking a revision number and returning a match objects
2241 callable taking a revision number and returning a match objects
2135 filtering the files to be detailed when displaying the revision.
2242 filtering the files to be detailed when displaying the revision.
2136 """
2243 """
2137 limit = loglimit(opts)
2244 limit = loglimit(opts)
2138 revs = _logrevs(repo, opts)
2245 revs = _logrevs(repo, opts)
2139 if not revs:
2246 if not revs:
2140 return smartset.baseset([]), None, None
2247 return smartset.baseset([]), None, None
2141 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2248 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2142 if expr:
2249 if expr:
2143 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2250 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2144 revs = matcher(repo, revs)
2251 revs = matcher(repo, revs)
2145 if limit is not None:
2252 if limit is not None:
2146 limitedrevs = []
2253 limitedrevs = []
2147 for idx, r in enumerate(revs):
2254 for idx, r in enumerate(revs):
2148 if limit <= idx:
2255 if limit <= idx:
2149 break
2256 break
2150 limitedrevs.append(r)
2257 limitedrevs.append(r)
2151 revs = smartset.baseset(limitedrevs)
2258 revs = smartset.baseset(limitedrevs)
2152
2259
2153 return revs, expr, filematcher
2260 return revs, expr, filematcher
2154
2261
2155 def _graphnodeformatter(ui, displayer):
2262 def _graphnodeformatter(ui, displayer):
2156 spec = ui.config('ui', 'graphnodetemplate')
2263 spec = ui.config('ui', 'graphnodetemplate')
2157 if not spec:
2264 if not spec:
2158 return templatekw.showgraphnode # fast path for "{graphnode}"
2265 return templatekw.showgraphnode # fast path for "{graphnode}"
2159
2266
2160 spec = templater.unquotestring(spec)
2267 spec = templater.unquotestring(spec)
2161 templ = formatter.gettemplater(ui, 'graphnode', spec)
2268 templ = formatter.gettemplater(ui, 'graphnode', spec)
2162 cache = {}
2269 cache = {}
2163 if isinstance(displayer, changeset_templater):
2270 if isinstance(displayer, changeset_templater):
2164 cache = displayer.cache # reuse cache of slow templates
2271 cache = displayer.cache # reuse cache of slow templates
2165 props = templatekw.keywords.copy()
2272 props = templatekw.keywords.copy()
2166 props['templ'] = templ
2273 props['templ'] = templ
2167 props['cache'] = cache
2274 props['cache'] = cache
2168 def formatnode(repo, ctx):
2275 def formatnode(repo, ctx):
2169 props['ctx'] = ctx
2276 props['ctx'] = ctx
2170 props['repo'] = repo
2277 props['repo'] = repo
2171 props['ui'] = repo.ui
2278 props['ui'] = repo.ui
2172 props['revcache'] = {}
2279 props['revcache'] = {}
2173 return templater.stringify(templ('graphnode', **props))
2280 return templater.stringify(templ('graphnode', **props))
2174 return formatnode
2281 return formatnode
2175
2282
2176 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2283 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2177 filematcher=None):
2284 filematcher=None):
2178 formatnode = _graphnodeformatter(ui, displayer)
2285 formatnode = _graphnodeformatter(ui, displayer)
2179 state = graphmod.asciistate()
2286 state = graphmod.asciistate()
2180 styles = state['styles']
2287 styles = state['styles']
2181
2288
2182 # only set graph styling if HGPLAIN is not set.
2289 # only set graph styling if HGPLAIN is not set.
2183 if ui.plain('graph'):
2290 if ui.plain('graph'):
2184 # set all edge styles to |, the default pre-3.8 behaviour
2291 # set all edge styles to |, the default pre-3.8 behaviour
2185 styles.update(dict.fromkeys(styles, '|'))
2292 styles.update(dict.fromkeys(styles, '|'))
2186 else:
2293 else:
2187 edgetypes = {
2294 edgetypes = {
2188 'parent': graphmod.PARENT,
2295 'parent': graphmod.PARENT,
2189 'grandparent': graphmod.GRANDPARENT,
2296 'grandparent': graphmod.GRANDPARENT,
2190 'missing': graphmod.MISSINGPARENT
2297 'missing': graphmod.MISSINGPARENT
2191 }
2298 }
2192 for name, key in edgetypes.items():
2299 for name, key in edgetypes.items():
2193 # experimental config: experimental.graphstyle.*
2300 # experimental config: experimental.graphstyle.*
2194 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2301 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2195 styles[key])
2302 styles[key])
2196 if not styles[key]:
2303 if not styles[key]:
2197 styles[key] = None
2304 styles[key] = None
2198
2305
2199 # experimental config: experimental.graphshorten
2306 # experimental config: experimental.graphshorten
2200 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2307 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2201
2308
2202 for rev, type, ctx, parents in dag:
2309 for rev, type, ctx, parents in dag:
2203 char = formatnode(repo, ctx)
2310 char = formatnode(repo, ctx)
2204 copies = None
2311 copies = None
2205 if getrenamed and ctx.rev():
2312 if getrenamed and ctx.rev():
2206 copies = []
2313 copies = []
2207 for fn in ctx.files():
2314 for fn in ctx.files():
2208 rename = getrenamed(fn, ctx.rev())
2315 rename = getrenamed(fn, ctx.rev())
2209 if rename:
2316 if rename:
2210 copies.append((fn, rename[0]))
2317 copies.append((fn, rename[0]))
2211 revmatchfn = None
2318 revmatchfn = None
2212 if filematcher is not None:
2319 if filematcher is not None:
2213 revmatchfn = filematcher(ctx.rev())
2320 revmatchfn = filematcher(ctx.rev())
2214 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2321 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2215 lines = displayer.hunk.pop(rev).split('\n')
2322 lines = displayer.hunk.pop(rev).split('\n')
2216 if not lines[-1]:
2323 if not lines[-1]:
2217 del lines[-1]
2324 del lines[-1]
2218 displayer.flush(ctx)
2325 displayer.flush(ctx)
2219 edges = edgefn(type, char, lines, state, rev, parents)
2326 edges = edgefn(type, char, lines, state, rev, parents)
2220 for type, char, lines, coldata in edges:
2327 for type, char, lines, coldata in edges:
2221 graphmod.ascii(ui, state, type, char, lines, coldata)
2328 graphmod.ascii(ui, state, type, char, lines, coldata)
2222 displayer.close()
2329 displayer.close()
2223
2330
2224 def graphlog(ui, repo, pats, opts):
2331 def graphlog(ui, repo, pats, opts):
2225 # Parameters are identical to log command ones
2332 # Parameters are identical to log command ones
2226 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2333 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2227 revdag = graphmod.dagwalker(repo, revs)
2334 revdag = graphmod.dagwalker(repo, revs)
2228
2335
2229 getrenamed = None
2336 getrenamed = None
2230 if opts.get('copies'):
2337 if opts.get('copies'):
2231 endrev = None
2338 endrev = None
2232 if opts.get('rev'):
2339 if opts.get('rev'):
2233 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2340 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2234 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2341 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2235
2342
2236 ui.pager('log')
2343 ui.pager('log')
2237 displayer = show_changeset(ui, repo, opts, buffered=True)
2344 displayer = show_changeset(ui, repo, opts, buffered=True)
2238 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2345 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2239 filematcher)
2346 filematcher)
2240
2347
2241 def checkunsupportedgraphflags(pats, opts):
2348 def checkunsupportedgraphflags(pats, opts):
2242 for op in ["newest_first"]:
2349 for op in ["newest_first"]:
2243 if op in opts and opts[op]:
2350 if op in opts and opts[op]:
2244 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2351 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2245 % op.replace("_", "-"))
2352 % op.replace("_", "-"))
2246
2353
2247 def graphrevs(repo, nodes, opts):
2354 def graphrevs(repo, nodes, opts):
2248 limit = loglimit(opts)
2355 limit = loglimit(opts)
2249 nodes.reverse()
2356 nodes.reverse()
2250 if limit is not None:
2357 if limit is not None:
2251 nodes = nodes[:limit]
2358 nodes = nodes[:limit]
2252 return graphmod.nodes(repo, nodes)
2359 return graphmod.nodes(repo, nodes)
2253
2360
2254 def add(ui, repo, match, prefix, explicitonly, **opts):
2361 def add(ui, repo, match, prefix, explicitonly, **opts):
2255 join = lambda f: os.path.join(prefix, f)
2362 join = lambda f: os.path.join(prefix, f)
2256 bad = []
2363 bad = []
2257
2364
2258 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2365 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2259 names = []
2366 names = []
2260 wctx = repo[None]
2367 wctx = repo[None]
2261 cca = None
2368 cca = None
2262 abort, warn = scmutil.checkportabilityalert(ui)
2369 abort, warn = scmutil.checkportabilityalert(ui)
2263 if abort or warn:
2370 if abort or warn:
2264 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2371 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2265
2372
2266 badmatch = matchmod.badmatch(match, badfn)
2373 badmatch = matchmod.badmatch(match, badfn)
2267 dirstate = repo.dirstate
2374 dirstate = repo.dirstate
2268 # We don't want to just call wctx.walk here, since it would return a lot of
2375 # We don't want to just call wctx.walk here, since it would return a lot of
2269 # clean files, which we aren't interested in and takes time.
2376 # clean files, which we aren't interested in and takes time.
2270 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2377 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2271 True, False, full=False)):
2378 True, False, full=False)):
2272 exact = match.exact(f)
2379 exact = match.exact(f)
2273 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2380 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2274 if cca:
2381 if cca:
2275 cca(f)
2382 cca(f)
2276 names.append(f)
2383 names.append(f)
2277 if ui.verbose or not exact:
2384 if ui.verbose or not exact:
2278 ui.status(_('adding %s\n') % match.rel(f))
2385 ui.status(_('adding %s\n') % match.rel(f))
2279
2386
2280 for subpath in sorted(wctx.substate):
2387 for subpath in sorted(wctx.substate):
2281 sub = wctx.sub(subpath)
2388 sub = wctx.sub(subpath)
2282 try:
2389 try:
2283 submatch = matchmod.subdirmatcher(subpath, match)
2390 submatch = matchmod.subdirmatcher(subpath, match)
2284 if opts.get(r'subrepos'):
2391 if opts.get(r'subrepos'):
2285 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2392 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2286 else:
2393 else:
2287 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2394 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2288 except error.LookupError:
2395 except error.LookupError:
2289 ui.status(_("skipping missing subrepository: %s\n")
2396 ui.status(_("skipping missing subrepository: %s\n")
2290 % join(subpath))
2397 % join(subpath))
2291
2398
2292 if not opts.get(r'dry_run'):
2399 if not opts.get(r'dry_run'):
2293 rejected = wctx.add(names, prefix)
2400 rejected = wctx.add(names, prefix)
2294 bad.extend(f for f in rejected if f in match.files())
2401 bad.extend(f for f in rejected if f in match.files())
2295 return bad
2402 return bad
2296
2403
2297 def addwebdirpath(repo, serverpath, webconf):
2404 def addwebdirpath(repo, serverpath, webconf):
2298 webconf[serverpath] = repo.root
2405 webconf[serverpath] = repo.root
2299 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2406 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2300
2407
2301 for r in repo.revs('filelog("path:.hgsub")'):
2408 for r in repo.revs('filelog("path:.hgsub")'):
2302 ctx = repo[r]
2409 ctx = repo[r]
2303 for subpath in ctx.substate:
2410 for subpath in ctx.substate:
2304 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2411 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2305
2412
2306 def forget(ui, repo, match, prefix, explicitonly):
2413 def forget(ui, repo, match, prefix, explicitonly):
2307 join = lambda f: os.path.join(prefix, f)
2414 join = lambda f: os.path.join(prefix, f)
2308 bad = []
2415 bad = []
2309 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2416 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2310 wctx = repo[None]
2417 wctx = repo[None]
2311 forgot = []
2418 forgot = []
2312
2419
2313 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2420 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2314 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2421 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2315 if explicitonly:
2422 if explicitonly:
2316 forget = [f for f in forget if match.exact(f)]
2423 forget = [f for f in forget if match.exact(f)]
2317
2424
2318 for subpath in sorted(wctx.substate):
2425 for subpath in sorted(wctx.substate):
2319 sub = wctx.sub(subpath)
2426 sub = wctx.sub(subpath)
2320 try:
2427 try:
2321 submatch = matchmod.subdirmatcher(subpath, match)
2428 submatch = matchmod.subdirmatcher(subpath, match)
2322 subbad, subforgot = sub.forget(submatch, prefix)
2429 subbad, subforgot = sub.forget(submatch, prefix)
2323 bad.extend([subpath + '/' + f for f in subbad])
2430 bad.extend([subpath + '/' + f for f in subbad])
2324 forgot.extend([subpath + '/' + f for f in subforgot])
2431 forgot.extend([subpath + '/' + f for f in subforgot])
2325 except error.LookupError:
2432 except error.LookupError:
2326 ui.status(_("skipping missing subrepository: %s\n")
2433 ui.status(_("skipping missing subrepository: %s\n")
2327 % join(subpath))
2434 % join(subpath))
2328
2435
2329 if not explicitonly:
2436 if not explicitonly:
2330 for f in match.files():
2437 for f in match.files():
2331 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2438 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2332 if f not in forgot:
2439 if f not in forgot:
2333 if repo.wvfs.exists(f):
2440 if repo.wvfs.exists(f):
2334 # Don't complain if the exact case match wasn't given.
2441 # Don't complain if the exact case match wasn't given.
2335 # But don't do this until after checking 'forgot', so
2442 # But don't do this until after checking 'forgot', so
2336 # that subrepo files aren't normalized, and this op is
2443 # that subrepo files aren't normalized, and this op is
2337 # purely from data cached by the status walk above.
2444 # purely from data cached by the status walk above.
2338 if repo.dirstate.normalize(f) in repo.dirstate:
2445 if repo.dirstate.normalize(f) in repo.dirstate:
2339 continue
2446 continue
2340 ui.warn(_('not removing %s: '
2447 ui.warn(_('not removing %s: '
2341 'file is already untracked\n')
2448 'file is already untracked\n')
2342 % match.rel(f))
2449 % match.rel(f))
2343 bad.append(f)
2450 bad.append(f)
2344
2451
2345 for f in forget:
2452 for f in forget:
2346 if ui.verbose or not match.exact(f):
2453 if ui.verbose or not match.exact(f):
2347 ui.status(_('removing %s\n') % match.rel(f))
2454 ui.status(_('removing %s\n') % match.rel(f))
2348
2455
2349 rejected = wctx.forget(forget, prefix)
2456 rejected = wctx.forget(forget, prefix)
2350 bad.extend(f for f in rejected if f in match.files())
2457 bad.extend(f for f in rejected if f in match.files())
2351 forgot.extend(f for f in forget if f not in rejected)
2458 forgot.extend(f for f in forget if f not in rejected)
2352 return bad, forgot
2459 return bad, forgot
2353
2460
2354 def files(ui, ctx, m, fm, fmt, subrepos):
2461 def files(ui, ctx, m, fm, fmt, subrepos):
2355 rev = ctx.rev()
2462 rev = ctx.rev()
2356 ret = 1
2463 ret = 1
2357 ds = ctx.repo().dirstate
2464 ds = ctx.repo().dirstate
2358
2465
2359 for f in ctx.matches(m):
2466 for f in ctx.matches(m):
2360 if rev is None and ds[f] == 'r':
2467 if rev is None and ds[f] == 'r':
2361 continue
2468 continue
2362 fm.startitem()
2469 fm.startitem()
2363 if ui.verbose:
2470 if ui.verbose:
2364 fc = ctx[f]
2471 fc = ctx[f]
2365 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2472 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2366 fm.data(abspath=f)
2473 fm.data(abspath=f)
2367 fm.write('path', fmt, m.rel(f))
2474 fm.write('path', fmt, m.rel(f))
2368 ret = 0
2475 ret = 0
2369
2476
2370 for subpath in sorted(ctx.substate):
2477 for subpath in sorted(ctx.substate):
2371 submatch = matchmod.subdirmatcher(subpath, m)
2478 submatch = matchmod.subdirmatcher(subpath, m)
2372 if (subrepos or m.exact(subpath) or any(submatch.files())):
2479 if (subrepos or m.exact(subpath) or any(submatch.files())):
2373 sub = ctx.sub(subpath)
2480 sub = ctx.sub(subpath)
2374 try:
2481 try:
2375 recurse = m.exact(subpath) or subrepos
2482 recurse = m.exact(subpath) or subrepos
2376 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2483 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2377 ret = 0
2484 ret = 0
2378 except error.LookupError:
2485 except error.LookupError:
2379 ui.status(_("skipping missing subrepository: %s\n")
2486 ui.status(_("skipping missing subrepository: %s\n")
2380 % m.abs(subpath))
2487 % m.abs(subpath))
2381
2488
2382 return ret
2489 return ret
2383
2490
2384 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2491 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2385 join = lambda f: os.path.join(prefix, f)
2492 join = lambda f: os.path.join(prefix, f)
2386 ret = 0
2493 ret = 0
2387 s = repo.status(match=m, clean=True)
2494 s = repo.status(match=m, clean=True)
2388 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2495 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2389
2496
2390 wctx = repo[None]
2497 wctx = repo[None]
2391
2498
2392 if warnings is None:
2499 if warnings is None:
2393 warnings = []
2500 warnings = []
2394 warn = True
2501 warn = True
2395 else:
2502 else:
2396 warn = False
2503 warn = False
2397
2504
2398 subs = sorted(wctx.substate)
2505 subs = sorted(wctx.substate)
2399 total = len(subs)
2506 total = len(subs)
2400 count = 0
2507 count = 0
2401 for subpath in subs:
2508 for subpath in subs:
2402 count += 1
2509 count += 1
2403 submatch = matchmod.subdirmatcher(subpath, m)
2510 submatch = matchmod.subdirmatcher(subpath, m)
2404 if subrepos or m.exact(subpath) or any(submatch.files()):
2511 if subrepos or m.exact(subpath) or any(submatch.files()):
2405 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2512 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2406 sub = wctx.sub(subpath)
2513 sub = wctx.sub(subpath)
2407 try:
2514 try:
2408 if sub.removefiles(submatch, prefix, after, force, subrepos,
2515 if sub.removefiles(submatch, prefix, after, force, subrepos,
2409 warnings):
2516 warnings):
2410 ret = 1
2517 ret = 1
2411 except error.LookupError:
2518 except error.LookupError:
2412 warnings.append(_("skipping missing subrepository: %s\n")
2519 warnings.append(_("skipping missing subrepository: %s\n")
2413 % join(subpath))
2520 % join(subpath))
2414 ui.progress(_('searching'), None)
2521 ui.progress(_('searching'), None)
2415
2522
2416 # warn about failure to delete explicit files/dirs
2523 # warn about failure to delete explicit files/dirs
2417 deleteddirs = util.dirs(deleted)
2524 deleteddirs = util.dirs(deleted)
2418 files = m.files()
2525 files = m.files()
2419 total = len(files)
2526 total = len(files)
2420 count = 0
2527 count = 0
2421 for f in files:
2528 for f in files:
2422 def insubrepo():
2529 def insubrepo():
2423 for subpath in wctx.substate:
2530 for subpath in wctx.substate:
2424 if f.startswith(subpath + '/'):
2531 if f.startswith(subpath + '/'):
2425 return True
2532 return True
2426 return False
2533 return False
2427
2534
2428 count += 1
2535 count += 1
2429 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2536 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2430 isdir = f in deleteddirs or wctx.hasdir(f)
2537 isdir = f in deleteddirs or wctx.hasdir(f)
2431 if (f in repo.dirstate or isdir or f == '.'
2538 if (f in repo.dirstate or isdir or f == '.'
2432 or insubrepo() or f in subs):
2539 or insubrepo() or f in subs):
2433 continue
2540 continue
2434
2541
2435 if repo.wvfs.exists(f):
2542 if repo.wvfs.exists(f):
2436 if repo.wvfs.isdir(f):
2543 if repo.wvfs.isdir(f):
2437 warnings.append(_('not removing %s: no tracked files\n')
2544 warnings.append(_('not removing %s: no tracked files\n')
2438 % m.rel(f))
2545 % m.rel(f))
2439 else:
2546 else:
2440 warnings.append(_('not removing %s: file is untracked\n')
2547 warnings.append(_('not removing %s: file is untracked\n')
2441 % m.rel(f))
2548 % m.rel(f))
2442 # missing files will generate a warning elsewhere
2549 # missing files will generate a warning elsewhere
2443 ret = 1
2550 ret = 1
2444 ui.progress(_('deleting'), None)
2551 ui.progress(_('deleting'), None)
2445
2552
2446 if force:
2553 if force:
2447 list = modified + deleted + clean + added
2554 list = modified + deleted + clean + added
2448 elif after:
2555 elif after:
2449 list = deleted
2556 list = deleted
2450 remaining = modified + added + clean
2557 remaining = modified + added + clean
2451 total = len(remaining)
2558 total = len(remaining)
2452 count = 0
2559 count = 0
2453 for f in remaining:
2560 for f in remaining:
2454 count += 1
2561 count += 1
2455 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2562 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2456 warnings.append(_('not removing %s: file still exists\n')
2563 warnings.append(_('not removing %s: file still exists\n')
2457 % m.rel(f))
2564 % m.rel(f))
2458 ret = 1
2565 ret = 1
2459 ui.progress(_('skipping'), None)
2566 ui.progress(_('skipping'), None)
2460 else:
2567 else:
2461 list = deleted + clean
2568 list = deleted + clean
2462 total = len(modified) + len(added)
2569 total = len(modified) + len(added)
2463 count = 0
2570 count = 0
2464 for f in modified:
2571 for f in modified:
2465 count += 1
2572 count += 1
2466 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2573 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2467 warnings.append(_('not removing %s: file is modified (use -f'
2574 warnings.append(_('not removing %s: file is modified (use -f'
2468 ' to force removal)\n') % m.rel(f))
2575 ' to force removal)\n') % m.rel(f))
2469 ret = 1
2576 ret = 1
2470 for f in added:
2577 for f in added:
2471 count += 1
2578 count += 1
2472 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2579 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2473 warnings.append(_("not removing %s: file has been marked for add"
2580 warnings.append(_("not removing %s: file has been marked for add"
2474 " (use 'hg forget' to undo add)\n") % m.rel(f))
2581 " (use 'hg forget' to undo add)\n") % m.rel(f))
2475 ret = 1
2582 ret = 1
2476 ui.progress(_('skipping'), None)
2583 ui.progress(_('skipping'), None)
2477
2584
2478 list = sorted(list)
2585 list = sorted(list)
2479 total = len(list)
2586 total = len(list)
2480 count = 0
2587 count = 0
2481 for f in list:
2588 for f in list:
2482 count += 1
2589 count += 1
2483 if ui.verbose or not m.exact(f):
2590 if ui.verbose or not m.exact(f):
2484 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2591 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2485 ui.status(_('removing %s\n') % m.rel(f))
2592 ui.status(_('removing %s\n') % m.rel(f))
2486 ui.progress(_('deleting'), None)
2593 ui.progress(_('deleting'), None)
2487
2594
2488 with repo.wlock():
2595 with repo.wlock():
2489 if not after:
2596 if not after:
2490 for f in list:
2597 for f in list:
2491 if f in added:
2598 if f in added:
2492 continue # we never unlink added files on remove
2599 continue # we never unlink added files on remove
2493 repo.wvfs.unlinkpath(f, ignoremissing=True)
2600 repo.wvfs.unlinkpath(f, ignoremissing=True)
2494 repo[None].forget(list)
2601 repo[None].forget(list)
2495
2602
2496 if warn:
2603 if warn:
2497 for warning in warnings:
2604 for warning in warnings:
2498 ui.warn(warning)
2605 ui.warn(warning)
2499
2606
2500 return ret
2607 return ret
2501
2608
2502 def cat(ui, repo, ctx, matcher, prefix, **opts):
2609 def cat(ui, repo, ctx, matcher, prefix, **opts):
2503 err = 1
2610 err = 1
2504
2611
2505 def write(path):
2612 def write(path):
2506 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2613 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2507 pathname=os.path.join(prefix, path))
2614 pathname=os.path.join(prefix, path))
2508 data = ctx[path].data()
2615 data = ctx[path].data()
2509 if opts.get('decode'):
2616 if opts.get('decode'):
2510 data = repo.wwritedata(path, data)
2617 data = repo.wwritedata(path, data)
2511 fp.write(data)
2618 fp.write(data)
2512 fp.close()
2619 fp.close()
2513
2620
2514 # Automation often uses hg cat on single files, so special case it
2621 # Automation often uses hg cat on single files, so special case it
2515 # for performance to avoid the cost of parsing the manifest.
2622 # for performance to avoid the cost of parsing the manifest.
2516 if len(matcher.files()) == 1 and not matcher.anypats():
2623 if len(matcher.files()) == 1 and not matcher.anypats():
2517 file = matcher.files()[0]
2624 file = matcher.files()[0]
2518 mfl = repo.manifestlog
2625 mfl = repo.manifestlog
2519 mfnode = ctx.manifestnode()
2626 mfnode = ctx.manifestnode()
2520 try:
2627 try:
2521 if mfnode and mfl[mfnode].find(file)[0]:
2628 if mfnode and mfl[mfnode].find(file)[0]:
2522 write(file)
2629 write(file)
2523 return 0
2630 return 0
2524 except KeyError:
2631 except KeyError:
2525 pass
2632 pass
2526
2633
2527 for abs in ctx.walk(matcher):
2634 for abs in ctx.walk(matcher):
2528 write(abs)
2635 write(abs)
2529 err = 0
2636 err = 0
2530
2637
2531 for subpath in sorted(ctx.substate):
2638 for subpath in sorted(ctx.substate):
2532 sub = ctx.sub(subpath)
2639 sub = ctx.sub(subpath)
2533 try:
2640 try:
2534 submatch = matchmod.subdirmatcher(subpath, matcher)
2641 submatch = matchmod.subdirmatcher(subpath, matcher)
2535
2642
2536 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2643 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2537 **opts):
2644 **opts):
2538 err = 0
2645 err = 0
2539 except error.RepoLookupError:
2646 except error.RepoLookupError:
2540 ui.status(_("skipping missing subrepository: %s\n")
2647 ui.status(_("skipping missing subrepository: %s\n")
2541 % os.path.join(prefix, subpath))
2648 % os.path.join(prefix, subpath))
2542
2649
2543 return err
2650 return err
2544
2651
2545 def commit(ui, repo, commitfunc, pats, opts):
2652 def commit(ui, repo, commitfunc, pats, opts):
2546 '''commit the specified files or all outstanding changes'''
2653 '''commit the specified files or all outstanding changes'''
2547 date = opts.get('date')
2654 date = opts.get('date')
2548 if date:
2655 if date:
2549 opts['date'] = util.parsedate(date)
2656 opts['date'] = util.parsedate(date)
2550 message = logmessage(ui, opts)
2657 message = logmessage(ui, opts)
2551 matcher = scmutil.match(repo[None], pats, opts)
2658 matcher = scmutil.match(repo[None], pats, opts)
2552
2659
2553 # extract addremove carefully -- this function can be called from a command
2660 # extract addremove carefully -- this function can be called from a command
2554 # that doesn't support addremove
2661 # that doesn't support addremove
2555 if opts.get('addremove'):
2662 if opts.get('addremove'):
2556 if scmutil.addremove(repo, matcher, "", opts) != 0:
2663 if scmutil.addremove(repo, matcher, "", opts) != 0:
2557 raise error.Abort(
2664 raise error.Abort(
2558 _("failed to mark all new/missing files as added/removed"))
2665 _("failed to mark all new/missing files as added/removed"))
2559
2666
2560 return commitfunc(ui, repo, message, matcher, opts)
2667 return commitfunc(ui, repo, message, matcher, opts)
2561
2668
2562 def samefile(f, ctx1, ctx2):
2669 def samefile(f, ctx1, ctx2):
2563 if f in ctx1.manifest():
2670 if f in ctx1.manifest():
2564 a = ctx1.filectx(f)
2671 a = ctx1.filectx(f)
2565 if f in ctx2.manifest():
2672 if f in ctx2.manifest():
2566 b = ctx2.filectx(f)
2673 b = ctx2.filectx(f)
2567 return (not a.cmp(b)
2674 return (not a.cmp(b)
2568 and a.flags() == b.flags())
2675 and a.flags() == b.flags())
2569 else:
2676 else:
2570 return False
2677 return False
2571 else:
2678 else:
2572 return f not in ctx2.manifest()
2679 return f not in ctx2.manifest()
2573
2680
2574 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2681 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2575 # avoid cycle context -> subrepo -> cmdutil
2682 # avoid cycle context -> subrepo -> cmdutil
2576 from . import context
2683 from . import context
2577
2684
2578 # amend will reuse the existing user if not specified, but the obsolete
2685 # amend will reuse the existing user if not specified, but the obsolete
2579 # marker creation requires that the current user's name is specified.
2686 # marker creation requires that the current user's name is specified.
2580 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2687 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2581 ui.username() # raise exception if username not set
2688 ui.username() # raise exception if username not set
2582
2689
2583 ui.note(_('amending changeset %s\n') % old)
2690 ui.note(_('amending changeset %s\n') % old)
2584 base = old.p1()
2691 base = old.p1()
2585 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2692 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2586
2693
2587 wlock = lock = newid = None
2694 wlock = lock = newid = None
2588 try:
2695 try:
2589 wlock = repo.wlock()
2696 wlock = repo.wlock()
2590 lock = repo.lock()
2697 lock = repo.lock()
2591 with repo.transaction('amend') as tr:
2698 with repo.transaction('amend') as tr:
2592 # See if we got a message from -m or -l, if not, open the editor
2699 # See if we got a message from -m or -l, if not, open the editor
2593 # with the message of the changeset to amend
2700 # with the message of the changeset to amend
2594 message = logmessage(ui, opts)
2701 message = logmessage(ui, opts)
2595 # ensure logfile does not conflict with later enforcement of the
2702 # ensure logfile does not conflict with later enforcement of the
2596 # message. potential logfile content has been processed by
2703 # message. potential logfile content has been processed by
2597 # `logmessage` anyway.
2704 # `logmessage` anyway.
2598 opts.pop('logfile')
2705 opts.pop('logfile')
2599 # First, do a regular commit to record all changes in the working
2706 # First, do a regular commit to record all changes in the working
2600 # directory (if there are any)
2707 # directory (if there are any)
2601 ui.callhooks = False
2708 ui.callhooks = False
2602 activebookmark = repo._bookmarks.active
2709 activebookmark = repo._bookmarks.active
2603 try:
2710 try:
2604 repo._bookmarks.active = None
2711 repo._bookmarks.active = None
2605 opts['message'] = 'temporary amend commit for %s' % old
2712 opts['message'] = 'temporary amend commit for %s' % old
2606 node = commit(ui, repo, commitfunc, pats, opts)
2713 node = commit(ui, repo, commitfunc, pats, opts)
2607 finally:
2714 finally:
2608 repo._bookmarks.active = activebookmark
2715 repo._bookmarks.active = activebookmark
2609 repo._bookmarks.recordchange(tr)
2716 repo._bookmarks.recordchange(tr)
2610 ui.callhooks = True
2717 ui.callhooks = True
2611 ctx = repo[node]
2718 ctx = repo[node]
2612
2719
2613 # Participating changesets:
2720 # Participating changesets:
2614 #
2721 #
2615 # node/ctx o - new (intermediate) commit that contains changes
2722 # node/ctx o - new (intermediate) commit that contains changes
2616 # | from working dir to go into amending commit
2723 # | from working dir to go into amending commit
2617 # | (or a workingctx if there were no changes)
2724 # | (or a workingctx if there were no changes)
2618 # |
2725 # |
2619 # old o - changeset to amend
2726 # old o - changeset to amend
2620 # |
2727 # |
2621 # base o - parent of amending changeset
2728 # base o - parent of amending changeset
2622
2729
2623 # Update extra dict from amended commit (e.g. to preserve graft
2730 # Update extra dict from amended commit (e.g. to preserve graft
2624 # source)
2731 # source)
2625 extra.update(old.extra())
2732 extra.update(old.extra())
2626
2733
2627 # Also update it from the intermediate commit or from the wctx
2734 # Also update it from the intermediate commit or from the wctx
2628 extra.update(ctx.extra())
2735 extra.update(ctx.extra())
2629
2736
2630 if len(old.parents()) > 1:
2737 if len(old.parents()) > 1:
2631 # ctx.files() isn't reliable for merges, so fall back to the
2738 # ctx.files() isn't reliable for merges, so fall back to the
2632 # slower repo.status() method
2739 # slower repo.status() method
2633 files = set([fn for st in repo.status(base, old)[:3]
2740 files = set([fn for st in repo.status(base, old)[:3]
2634 for fn in st])
2741 for fn in st])
2635 else:
2742 else:
2636 files = set(old.files())
2743 files = set(old.files())
2637
2744
2638 # Second, we use either the commit we just did, or if there were no
2745 # Second, we use either the commit we just did, or if there were no
2639 # changes the parent of the working directory as the version of the
2746 # changes the parent of the working directory as the version of the
2640 # files in the final amend commit
2747 # files in the final amend commit
2641 if node:
2748 if node:
2642 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2749 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2643
2750
2644 user = ctx.user()
2751 user = ctx.user()
2645 date = ctx.date()
2752 date = ctx.date()
2646 # Recompute copies (avoid recording a -> b -> a)
2753 # Recompute copies (avoid recording a -> b -> a)
2647 copied = copies.pathcopies(base, ctx)
2754 copied = copies.pathcopies(base, ctx)
2648 if old.p2:
2755 if old.p2:
2649 copied.update(copies.pathcopies(old.p2(), ctx))
2756 copied.update(copies.pathcopies(old.p2(), ctx))
2650
2757
2651 # Prune files which were reverted by the updates: if old
2758 # Prune files which were reverted by the updates: if old
2652 # introduced file X and our intermediate commit, node,
2759 # introduced file X and our intermediate commit, node,
2653 # renamed that file, then those two files are the same and
2760 # renamed that file, then those two files are the same and
2654 # we can discard X from our list of files. Likewise if X
2761 # we can discard X from our list of files. Likewise if X
2655 # was deleted, it's no longer relevant
2762 # was deleted, it's no longer relevant
2656 files.update(ctx.files())
2763 files.update(ctx.files())
2657 files = [f for f in files if not samefile(f, ctx, base)]
2764 files = [f for f in files if not samefile(f, ctx, base)]
2658
2765
2659 def filectxfn(repo, ctx_, path):
2766 def filectxfn(repo, ctx_, path):
2660 try:
2767 try:
2661 fctx = ctx[path]
2768 fctx = ctx[path]
2662 flags = fctx.flags()
2769 flags = fctx.flags()
2663 mctx = context.memfilectx(repo,
2770 mctx = context.memfilectx(repo,
2664 fctx.path(), fctx.data(),
2771 fctx.path(), fctx.data(),
2665 islink='l' in flags,
2772 islink='l' in flags,
2666 isexec='x' in flags,
2773 isexec='x' in flags,
2667 copied=copied.get(path))
2774 copied=copied.get(path))
2668 return mctx
2775 return mctx
2669 except KeyError:
2776 except KeyError:
2670 return None
2777 return None
2671 else:
2778 else:
2672 ui.note(_('copying changeset %s to %s\n') % (old, base))
2779 ui.note(_('copying changeset %s to %s\n') % (old, base))
2673
2780
2674 # Use version of files as in the old cset
2781 # Use version of files as in the old cset
2675 def filectxfn(repo, ctx_, path):
2782 def filectxfn(repo, ctx_, path):
2676 try:
2783 try:
2677 return old.filectx(path)
2784 return old.filectx(path)
2678 except KeyError:
2785 except KeyError:
2679 return None
2786 return None
2680
2787
2681 user = opts.get('user') or old.user()
2788 user = opts.get('user') or old.user()
2682 date = opts.get('date') or old.date()
2789 date = opts.get('date') or old.date()
2683 editform = mergeeditform(old, 'commit.amend')
2790 editform = mergeeditform(old, 'commit.amend')
2684 editor = getcommiteditor(editform=editform, **opts)
2791 editor = getcommiteditor(editform=editform, **opts)
2685 if not message:
2792 if not message:
2686 editor = getcommiteditor(edit=True, editform=editform)
2793 editor = getcommiteditor(edit=True, editform=editform)
2687 message = old.description()
2794 message = old.description()
2688
2795
2689 pureextra = extra.copy()
2796 pureextra = extra.copy()
2690 extra['amend_source'] = old.hex()
2797 extra['amend_source'] = old.hex()
2691
2798
2692 new = context.memctx(repo,
2799 new = context.memctx(repo,
2693 parents=[base.node(), old.p2().node()],
2800 parents=[base.node(), old.p2().node()],
2694 text=message,
2801 text=message,
2695 files=files,
2802 files=files,
2696 filectxfn=filectxfn,
2803 filectxfn=filectxfn,
2697 user=user,
2804 user=user,
2698 date=date,
2805 date=date,
2699 extra=extra,
2806 extra=extra,
2700 editor=editor)
2807 editor=editor)
2701
2808
2702 newdesc = changelog.stripdesc(new.description())
2809 newdesc = changelog.stripdesc(new.description())
2703 if ((not node)
2810 if ((not node)
2704 and newdesc == old.description()
2811 and newdesc == old.description()
2705 and user == old.user()
2812 and user == old.user()
2706 and date == old.date()
2813 and date == old.date()
2707 and pureextra == old.extra()):
2814 and pureextra == old.extra()):
2708 # nothing changed. continuing here would create a new node
2815 # nothing changed. continuing here would create a new node
2709 # anyway because of the amend_source noise.
2816 # anyway because of the amend_source noise.
2710 #
2817 #
2711 # This not what we expect from amend.
2818 # This not what we expect from amend.
2712 return old.node()
2819 return old.node()
2713
2820
2714 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2821 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2715 try:
2822 try:
2716 if opts.get('secret'):
2823 if opts.get('secret'):
2717 commitphase = 'secret'
2824 commitphase = 'secret'
2718 else:
2825 else:
2719 commitphase = old.phase()
2826 commitphase = old.phase()
2720 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2827 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2721 newid = repo.commitctx(new)
2828 newid = repo.commitctx(new)
2722 finally:
2829 finally:
2723 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2830 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2724 if newid != old.node():
2831 if newid != old.node():
2725 # Reroute the working copy parent to the new changeset
2832 # Reroute the working copy parent to the new changeset
2726 repo.setparents(newid, nullid)
2833 repo.setparents(newid, nullid)
2727
2834
2728 # Move bookmarks from old parent to amend commit
2835 # Move bookmarks from old parent to amend commit
2729 bms = repo.nodebookmarks(old.node())
2836 bms = repo.nodebookmarks(old.node())
2730 if bms:
2837 if bms:
2731 marks = repo._bookmarks
2838 marks = repo._bookmarks
2732 for bm in bms:
2839 for bm in bms:
2733 ui.debug('moving bookmarks %r from %s to %s\n' %
2840 ui.debug('moving bookmarks %r from %s to %s\n' %
2734 (marks, old.hex(), hex(newid)))
2841 (marks, old.hex(), hex(newid)))
2735 marks[bm] = newid
2842 marks[bm] = newid
2736 marks.recordchange(tr)
2843 marks.recordchange(tr)
2737 #commit the whole amend process
2844 #commit the whole amend process
2738 if createmarkers:
2845 if createmarkers:
2739 # mark the new changeset as successor of the rewritten one
2846 # mark the new changeset as successor of the rewritten one
2740 new = repo[newid]
2847 new = repo[newid]
2741 obs = [(old, (new,))]
2848 obs = [(old, (new,))]
2742 if node:
2849 if node:
2743 obs.append((ctx, ()))
2850 obs.append((ctx, ()))
2744
2851
2745 obsolete.createmarkers(repo, obs, operation='amend')
2852 obsolete.createmarkers(repo, obs, operation='amend')
2746 if not createmarkers and newid != old.node():
2853 if not createmarkers and newid != old.node():
2747 # Strip the intermediate commit (if there was one) and the amended
2854 # Strip the intermediate commit (if there was one) and the amended
2748 # commit
2855 # commit
2749 if node:
2856 if node:
2750 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2857 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2751 ui.note(_('stripping amended changeset %s\n') % old)
2858 ui.note(_('stripping amended changeset %s\n') % old)
2752 repair.strip(ui, repo, old.node(), topic='amend-backup')
2859 repair.strip(ui, repo, old.node(), topic='amend-backup')
2753 finally:
2860 finally:
2754 lockmod.release(lock, wlock)
2861 lockmod.release(lock, wlock)
2755 return newid
2862 return newid
2756
2863
2757 def commiteditor(repo, ctx, subs, editform=''):
2864 def commiteditor(repo, ctx, subs, editform=''):
2758 if ctx.description():
2865 if ctx.description():
2759 return ctx.description()
2866 return ctx.description()
2760 return commitforceeditor(repo, ctx, subs, editform=editform,
2867 return commitforceeditor(repo, ctx, subs, editform=editform,
2761 unchangedmessagedetection=True)
2868 unchangedmessagedetection=True)
2762
2869
2763 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2870 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2764 editform='', unchangedmessagedetection=False):
2871 editform='', unchangedmessagedetection=False):
2765 if not extramsg:
2872 if not extramsg:
2766 extramsg = _("Leave message empty to abort commit.")
2873 extramsg = _("Leave message empty to abort commit.")
2767
2874
2768 forms = [e for e in editform.split('.') if e]
2875 forms = [e for e in editform.split('.') if e]
2769 forms.insert(0, 'changeset')
2876 forms.insert(0, 'changeset')
2770 templatetext = None
2877 templatetext = None
2771 while forms:
2878 while forms:
2772 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2879 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2773 if tmpl:
2880 if tmpl:
2774 tmpl = templater.unquotestring(tmpl)
2881 tmpl = templater.unquotestring(tmpl)
2775 templatetext = committext = buildcommittemplate(
2882 templatetext = committext = buildcommittemplate(
2776 repo, ctx, subs, extramsg, tmpl)
2883 repo, ctx, subs, extramsg, tmpl)
2777 break
2884 break
2778 forms.pop()
2885 forms.pop()
2779 else:
2886 else:
2780 committext = buildcommittext(repo, ctx, subs, extramsg)
2887 committext = buildcommittext(repo, ctx, subs, extramsg)
2781
2888
2782 # run editor in the repository root
2889 # run editor in the repository root
2783 olddir = pycompat.getcwd()
2890 olddir = pycompat.getcwd()
2784 os.chdir(repo.root)
2891 os.chdir(repo.root)
2785
2892
2786 # make in-memory changes visible to external process
2893 # make in-memory changes visible to external process
2787 tr = repo.currenttransaction()
2894 tr = repo.currenttransaction()
2788 repo.dirstate.write(tr)
2895 repo.dirstate.write(tr)
2789 pending = tr and tr.writepending() and repo.root
2896 pending = tr and tr.writepending() and repo.root
2790
2897
2791 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2898 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2792 editform=editform, pending=pending,
2899 editform=editform, pending=pending,
2793 repopath=repo.path)
2900 repopath=repo.path)
2794 text = editortext
2901 text = editortext
2795
2902
2796 # strip away anything below this special string (used for editors that want
2903 # strip away anything below this special string (used for editors that want
2797 # to display the diff)
2904 # to display the diff)
2798 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2905 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2799 if stripbelow:
2906 if stripbelow:
2800 text = text[:stripbelow.start()]
2907 text = text[:stripbelow.start()]
2801
2908
2802 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2909 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2803 os.chdir(olddir)
2910 os.chdir(olddir)
2804
2911
2805 if finishdesc:
2912 if finishdesc:
2806 text = finishdesc(text)
2913 text = finishdesc(text)
2807 if not text.strip():
2914 if not text.strip():
2808 raise error.Abort(_("empty commit message"))
2915 raise error.Abort(_("empty commit message"))
2809 if unchangedmessagedetection and editortext == templatetext:
2916 if unchangedmessagedetection and editortext == templatetext:
2810 raise error.Abort(_("commit message unchanged"))
2917 raise error.Abort(_("commit message unchanged"))
2811
2918
2812 return text
2919 return text
2813
2920
2814 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2921 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2815 ui = repo.ui
2922 ui = repo.ui
2816 tmpl, mapfile = gettemplate(ui, tmpl, None)
2923 tmpl, mapfile = gettemplate(ui, tmpl, None)
2817
2924
2818 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2925 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2819
2926
2820 for k, v in repo.ui.configitems('committemplate'):
2927 for k, v in repo.ui.configitems('committemplate'):
2821 if k != 'changeset':
2928 if k != 'changeset':
2822 t.t.cache[k] = v
2929 t.t.cache[k] = v
2823
2930
2824 if not extramsg:
2931 if not extramsg:
2825 extramsg = '' # ensure that extramsg is string
2932 extramsg = '' # ensure that extramsg is string
2826
2933
2827 ui.pushbuffer()
2934 ui.pushbuffer()
2828 t.show(ctx, extramsg=extramsg)
2935 t.show(ctx, extramsg=extramsg)
2829 return ui.popbuffer()
2936 return ui.popbuffer()
2830
2937
2831 def hgprefix(msg):
2938 def hgprefix(msg):
2832 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2939 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2833
2940
2834 def buildcommittext(repo, ctx, subs, extramsg):
2941 def buildcommittext(repo, ctx, subs, extramsg):
2835 edittext = []
2942 edittext = []
2836 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2943 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2837 if ctx.description():
2944 if ctx.description():
2838 edittext.append(ctx.description())
2945 edittext.append(ctx.description())
2839 edittext.append("")
2946 edittext.append("")
2840 edittext.append("") # Empty line between message and comments.
2947 edittext.append("") # Empty line between message and comments.
2841 edittext.append(hgprefix(_("Enter commit message."
2948 edittext.append(hgprefix(_("Enter commit message."
2842 " Lines beginning with 'HG:' are removed.")))
2949 " Lines beginning with 'HG:' are removed.")))
2843 edittext.append(hgprefix(extramsg))
2950 edittext.append(hgprefix(extramsg))
2844 edittext.append("HG: --")
2951 edittext.append("HG: --")
2845 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2952 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2846 if ctx.p2():
2953 if ctx.p2():
2847 edittext.append(hgprefix(_("branch merge")))
2954 edittext.append(hgprefix(_("branch merge")))
2848 if ctx.branch():
2955 if ctx.branch():
2849 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2956 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2850 if bookmarks.isactivewdirparent(repo):
2957 if bookmarks.isactivewdirparent(repo):
2851 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2958 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2852 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2959 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2853 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2960 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2854 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2961 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2855 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2962 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2856 if not added and not modified and not removed:
2963 if not added and not modified and not removed:
2857 edittext.append(hgprefix(_("no files changed")))
2964 edittext.append(hgprefix(_("no files changed")))
2858 edittext.append("")
2965 edittext.append("")
2859
2966
2860 return "\n".join(edittext)
2967 return "\n".join(edittext)
2861
2968
2862 def commitstatus(repo, node, branch, bheads=None, opts=None):
2969 def commitstatus(repo, node, branch, bheads=None, opts=None):
2863 if opts is None:
2970 if opts is None:
2864 opts = {}
2971 opts = {}
2865 ctx = repo[node]
2972 ctx = repo[node]
2866 parents = ctx.parents()
2973 parents = ctx.parents()
2867
2974
2868 if (not opts.get('amend') and bheads and node not in bheads and not
2975 if (not opts.get('amend') and bheads and node not in bheads and not
2869 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2976 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2870 repo.ui.status(_('created new head\n'))
2977 repo.ui.status(_('created new head\n'))
2871 # The message is not printed for initial roots. For the other
2978 # The message is not printed for initial roots. For the other
2872 # changesets, it is printed in the following situations:
2979 # changesets, it is printed in the following situations:
2873 #
2980 #
2874 # Par column: for the 2 parents with ...
2981 # Par column: for the 2 parents with ...
2875 # N: null or no parent
2982 # N: null or no parent
2876 # B: parent is on another named branch
2983 # B: parent is on another named branch
2877 # C: parent is a regular non head changeset
2984 # C: parent is a regular non head changeset
2878 # H: parent was a branch head of the current branch
2985 # H: parent was a branch head of the current branch
2879 # Msg column: whether we print "created new head" message
2986 # Msg column: whether we print "created new head" message
2880 # In the following, it is assumed that there already exists some
2987 # In the following, it is assumed that there already exists some
2881 # initial branch heads of the current branch, otherwise nothing is
2988 # initial branch heads of the current branch, otherwise nothing is
2882 # printed anyway.
2989 # printed anyway.
2883 #
2990 #
2884 # Par Msg Comment
2991 # Par Msg Comment
2885 # N N y additional topo root
2992 # N N y additional topo root
2886 #
2993 #
2887 # B N y additional branch root
2994 # B N y additional branch root
2888 # C N y additional topo head
2995 # C N y additional topo head
2889 # H N n usual case
2996 # H N n usual case
2890 #
2997 #
2891 # B B y weird additional branch root
2998 # B B y weird additional branch root
2892 # C B y branch merge
2999 # C B y branch merge
2893 # H B n merge with named branch
3000 # H B n merge with named branch
2894 #
3001 #
2895 # C C y additional head from merge
3002 # C C y additional head from merge
2896 # C H n merge with a head
3003 # C H n merge with a head
2897 #
3004 #
2898 # H H n head merge: head count decreases
3005 # H H n head merge: head count decreases
2899
3006
2900 if not opts.get('close_branch'):
3007 if not opts.get('close_branch'):
2901 for r in parents:
3008 for r in parents:
2902 if r.closesbranch() and r.branch() == branch:
3009 if r.closesbranch() and r.branch() == branch:
2903 repo.ui.status(_('reopening closed branch head %d\n') % r)
3010 repo.ui.status(_('reopening closed branch head %d\n') % r)
2904
3011
2905 if repo.ui.debugflag:
3012 if repo.ui.debugflag:
2906 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3013 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2907 elif repo.ui.verbose:
3014 elif repo.ui.verbose:
2908 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3015 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2909
3016
2910 def postcommitstatus(repo, pats, opts):
3017 def postcommitstatus(repo, pats, opts):
2911 return repo.status(match=scmutil.match(repo[None], pats, opts))
3018 return repo.status(match=scmutil.match(repo[None], pats, opts))
2912
3019
2913 def revert(ui, repo, ctx, parents, *pats, **opts):
3020 def revert(ui, repo, ctx, parents, *pats, **opts):
2914 parent, p2 = parents
3021 parent, p2 = parents
2915 node = ctx.node()
3022 node = ctx.node()
2916
3023
2917 mf = ctx.manifest()
3024 mf = ctx.manifest()
2918 if node == p2:
3025 if node == p2:
2919 parent = p2
3026 parent = p2
2920
3027
2921 # need all matching names in dirstate and manifest of target rev,
3028 # need all matching names in dirstate and manifest of target rev,
2922 # so have to walk both. do not print errors if files exist in one
3029 # so have to walk both. do not print errors if files exist in one
2923 # but not other. in both cases, filesets should be evaluated against
3030 # but not other. in both cases, filesets should be evaluated against
2924 # workingctx to get consistent result (issue4497). this means 'set:**'
3031 # workingctx to get consistent result (issue4497). this means 'set:**'
2925 # cannot be used to select missing files from target rev.
3032 # cannot be used to select missing files from target rev.
2926
3033
2927 # `names` is a mapping for all elements in working copy and target revision
3034 # `names` is a mapping for all elements in working copy and target revision
2928 # The mapping is in the form:
3035 # The mapping is in the form:
2929 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3036 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2930 names = {}
3037 names = {}
2931
3038
2932 with repo.wlock():
3039 with repo.wlock():
2933 ## filling of the `names` mapping
3040 ## filling of the `names` mapping
2934 # walk dirstate to fill `names`
3041 # walk dirstate to fill `names`
2935
3042
2936 interactive = opts.get('interactive', False)
3043 interactive = opts.get('interactive', False)
2937 wctx = repo[None]
3044 wctx = repo[None]
2938 m = scmutil.match(wctx, pats, opts)
3045 m = scmutil.match(wctx, pats, opts)
2939
3046
2940 # we'll need this later
3047 # we'll need this later
2941 targetsubs = sorted(s for s in wctx.substate if m(s))
3048 targetsubs = sorted(s for s in wctx.substate if m(s))
2942
3049
2943 if not m.always():
3050 if not m.always():
2944 matcher = matchmod.badmatch(m, lambda x, y: False)
3051 matcher = matchmod.badmatch(m, lambda x, y: False)
2945 for abs in repo[None].walk(matcher):
3052 for abs in repo[None].walk(matcher):
2946 names[abs] = m.rel(abs), m.exact(abs)
3053 names[abs] = m.rel(abs), m.exact(abs)
2947
3054
2948 # walk target manifest to fill `names`
3055 # walk target manifest to fill `names`
2949
3056
2950 def badfn(path, msg):
3057 def badfn(path, msg):
2951 if path in names:
3058 if path in names:
2952 return
3059 return
2953 if path in ctx.substate:
3060 if path in ctx.substate:
2954 return
3061 return
2955 path_ = path + '/'
3062 path_ = path + '/'
2956 for f in names:
3063 for f in names:
2957 if f.startswith(path_):
3064 if f.startswith(path_):
2958 return
3065 return
2959 ui.warn("%s: %s\n" % (m.rel(path), msg))
3066 ui.warn("%s: %s\n" % (m.rel(path), msg))
2960
3067
2961 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3068 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2962 if abs not in names:
3069 if abs not in names:
2963 names[abs] = m.rel(abs), m.exact(abs)
3070 names[abs] = m.rel(abs), m.exact(abs)
2964
3071
2965 # Find status of all file in `names`.
3072 # Find status of all file in `names`.
2966 m = scmutil.matchfiles(repo, names)
3073 m = scmutil.matchfiles(repo, names)
2967
3074
2968 changes = repo.status(node1=node, match=m,
3075 changes = repo.status(node1=node, match=m,
2969 unknown=True, ignored=True, clean=True)
3076 unknown=True, ignored=True, clean=True)
2970 else:
3077 else:
2971 changes = repo.status(node1=node, match=m)
3078 changes = repo.status(node1=node, match=m)
2972 for kind in changes:
3079 for kind in changes:
2973 for abs in kind:
3080 for abs in kind:
2974 names[abs] = m.rel(abs), m.exact(abs)
3081 names[abs] = m.rel(abs), m.exact(abs)
2975
3082
2976 m = scmutil.matchfiles(repo, names)
3083 m = scmutil.matchfiles(repo, names)
2977
3084
2978 modified = set(changes.modified)
3085 modified = set(changes.modified)
2979 added = set(changes.added)
3086 added = set(changes.added)
2980 removed = set(changes.removed)
3087 removed = set(changes.removed)
2981 _deleted = set(changes.deleted)
3088 _deleted = set(changes.deleted)
2982 unknown = set(changes.unknown)
3089 unknown = set(changes.unknown)
2983 unknown.update(changes.ignored)
3090 unknown.update(changes.ignored)
2984 clean = set(changes.clean)
3091 clean = set(changes.clean)
2985 modadded = set()
3092 modadded = set()
2986
3093
2987 # We need to account for the state of the file in the dirstate,
3094 # We need to account for the state of the file in the dirstate,
2988 # even when we revert against something else than parent. This will
3095 # even when we revert against something else than parent. This will
2989 # slightly alter the behavior of revert (doing back up or not, delete
3096 # slightly alter the behavior of revert (doing back up or not, delete
2990 # or just forget etc).
3097 # or just forget etc).
2991 if parent == node:
3098 if parent == node:
2992 dsmodified = modified
3099 dsmodified = modified
2993 dsadded = added
3100 dsadded = added
2994 dsremoved = removed
3101 dsremoved = removed
2995 # store all local modifications, useful later for rename detection
3102 # store all local modifications, useful later for rename detection
2996 localchanges = dsmodified | dsadded
3103 localchanges = dsmodified | dsadded
2997 modified, added, removed = set(), set(), set()
3104 modified, added, removed = set(), set(), set()
2998 else:
3105 else:
2999 changes = repo.status(node1=parent, match=m)
3106 changes = repo.status(node1=parent, match=m)
3000 dsmodified = set(changes.modified)
3107 dsmodified = set(changes.modified)
3001 dsadded = set(changes.added)
3108 dsadded = set(changes.added)
3002 dsremoved = set(changes.removed)
3109 dsremoved = set(changes.removed)
3003 # store all local modifications, useful later for rename detection
3110 # store all local modifications, useful later for rename detection
3004 localchanges = dsmodified | dsadded
3111 localchanges = dsmodified | dsadded
3005
3112
3006 # only take into account for removes between wc and target
3113 # only take into account for removes between wc and target
3007 clean |= dsremoved - removed
3114 clean |= dsremoved - removed
3008 dsremoved &= removed
3115 dsremoved &= removed
3009 # distinct between dirstate remove and other
3116 # distinct between dirstate remove and other
3010 removed -= dsremoved
3117 removed -= dsremoved
3011
3118
3012 modadded = added & dsmodified
3119 modadded = added & dsmodified
3013 added -= modadded
3120 added -= modadded
3014
3121
3015 # tell newly modified apart.
3122 # tell newly modified apart.
3016 dsmodified &= modified
3123 dsmodified &= modified
3017 dsmodified |= modified & dsadded # dirstate added may need backup
3124 dsmodified |= modified & dsadded # dirstate added may need backup
3018 modified -= dsmodified
3125 modified -= dsmodified
3019
3126
3020 # We need to wait for some post-processing to update this set
3127 # We need to wait for some post-processing to update this set
3021 # before making the distinction. The dirstate will be used for
3128 # before making the distinction. The dirstate will be used for
3022 # that purpose.
3129 # that purpose.
3023 dsadded = added
3130 dsadded = added
3024
3131
3025 # in case of merge, files that are actually added can be reported as
3132 # in case of merge, files that are actually added can be reported as
3026 # modified, we need to post process the result
3133 # modified, we need to post process the result
3027 if p2 != nullid:
3134 if p2 != nullid:
3028 mergeadd = set(dsmodified)
3135 mergeadd = set(dsmodified)
3029 for path in dsmodified:
3136 for path in dsmodified:
3030 if path in mf:
3137 if path in mf:
3031 mergeadd.remove(path)
3138 mergeadd.remove(path)
3032 dsadded |= mergeadd
3139 dsadded |= mergeadd
3033 dsmodified -= mergeadd
3140 dsmodified -= mergeadd
3034
3141
3035 # if f is a rename, update `names` to also revert the source
3142 # if f is a rename, update `names` to also revert the source
3036 cwd = repo.getcwd()
3143 cwd = repo.getcwd()
3037 for f in localchanges:
3144 for f in localchanges:
3038 src = repo.dirstate.copied(f)
3145 src = repo.dirstate.copied(f)
3039 # XXX should we check for rename down to target node?
3146 # XXX should we check for rename down to target node?
3040 if src and src not in names and repo.dirstate[src] == 'r':
3147 if src and src not in names and repo.dirstate[src] == 'r':
3041 dsremoved.add(src)
3148 dsremoved.add(src)
3042 names[src] = (repo.pathto(src, cwd), True)
3149 names[src] = (repo.pathto(src, cwd), True)
3043
3150
3044 # determine the exact nature of the deleted changesets
3151 # determine the exact nature of the deleted changesets
3045 deladded = set(_deleted)
3152 deladded = set(_deleted)
3046 for path in _deleted:
3153 for path in _deleted:
3047 if path in mf:
3154 if path in mf:
3048 deladded.remove(path)
3155 deladded.remove(path)
3049 deleted = _deleted - deladded
3156 deleted = _deleted - deladded
3050
3157
3051 # distinguish between file to forget and the other
3158 # distinguish between file to forget and the other
3052 added = set()
3159 added = set()
3053 for abs in dsadded:
3160 for abs in dsadded:
3054 if repo.dirstate[abs] != 'a':
3161 if repo.dirstate[abs] != 'a':
3055 added.add(abs)
3162 added.add(abs)
3056 dsadded -= added
3163 dsadded -= added
3057
3164
3058 for abs in deladded:
3165 for abs in deladded:
3059 if repo.dirstate[abs] == 'a':
3166 if repo.dirstate[abs] == 'a':
3060 dsadded.add(abs)
3167 dsadded.add(abs)
3061 deladded -= dsadded
3168 deladded -= dsadded
3062
3169
3063 # For files marked as removed, we check if an unknown file is present at
3170 # For files marked as removed, we check if an unknown file is present at
3064 # the same path. If a such file exists it may need to be backed up.
3171 # the same path. If a such file exists it may need to be backed up.
3065 # Making the distinction at this stage helps have simpler backup
3172 # Making the distinction at this stage helps have simpler backup
3066 # logic.
3173 # logic.
3067 removunk = set()
3174 removunk = set()
3068 for abs in removed:
3175 for abs in removed:
3069 target = repo.wjoin(abs)
3176 target = repo.wjoin(abs)
3070 if os.path.lexists(target):
3177 if os.path.lexists(target):
3071 removunk.add(abs)
3178 removunk.add(abs)
3072 removed -= removunk
3179 removed -= removunk
3073
3180
3074 dsremovunk = set()
3181 dsremovunk = set()
3075 for abs in dsremoved:
3182 for abs in dsremoved:
3076 target = repo.wjoin(abs)
3183 target = repo.wjoin(abs)
3077 if os.path.lexists(target):
3184 if os.path.lexists(target):
3078 dsremovunk.add(abs)
3185 dsremovunk.add(abs)
3079 dsremoved -= dsremovunk
3186 dsremoved -= dsremovunk
3080
3187
3081 # action to be actually performed by revert
3188 # action to be actually performed by revert
3082 # (<list of file>, message>) tuple
3189 # (<list of file>, message>) tuple
3083 actions = {'revert': ([], _('reverting %s\n')),
3190 actions = {'revert': ([], _('reverting %s\n')),
3084 'add': ([], _('adding %s\n')),
3191 'add': ([], _('adding %s\n')),
3085 'remove': ([], _('removing %s\n')),
3192 'remove': ([], _('removing %s\n')),
3086 'drop': ([], _('removing %s\n')),
3193 'drop': ([], _('removing %s\n')),
3087 'forget': ([], _('forgetting %s\n')),
3194 'forget': ([], _('forgetting %s\n')),
3088 'undelete': ([], _('undeleting %s\n')),
3195 'undelete': ([], _('undeleting %s\n')),
3089 'noop': (None, _('no changes needed to %s\n')),
3196 'noop': (None, _('no changes needed to %s\n')),
3090 'unknown': (None, _('file not managed: %s\n')),
3197 'unknown': (None, _('file not managed: %s\n')),
3091 }
3198 }
3092
3199
3093 # "constant" that convey the backup strategy.
3200 # "constant" that convey the backup strategy.
3094 # All set to `discard` if `no-backup` is set do avoid checking
3201 # All set to `discard` if `no-backup` is set do avoid checking
3095 # no_backup lower in the code.
3202 # no_backup lower in the code.
3096 # These values are ordered for comparison purposes
3203 # These values are ordered for comparison purposes
3097 backupinteractive = 3 # do backup if interactively modified
3204 backupinteractive = 3 # do backup if interactively modified
3098 backup = 2 # unconditionally do backup
3205 backup = 2 # unconditionally do backup
3099 check = 1 # check if the existing file differs from target
3206 check = 1 # check if the existing file differs from target
3100 discard = 0 # never do backup
3207 discard = 0 # never do backup
3101 if opts.get('no_backup'):
3208 if opts.get('no_backup'):
3102 backupinteractive = backup = check = discard
3209 backupinteractive = backup = check = discard
3103 if interactive:
3210 if interactive:
3104 dsmodifiedbackup = backupinteractive
3211 dsmodifiedbackup = backupinteractive
3105 else:
3212 else:
3106 dsmodifiedbackup = backup
3213 dsmodifiedbackup = backup
3107 tobackup = set()
3214 tobackup = set()
3108
3215
3109 backupanddel = actions['remove']
3216 backupanddel = actions['remove']
3110 if not opts.get('no_backup'):
3217 if not opts.get('no_backup'):
3111 backupanddel = actions['drop']
3218 backupanddel = actions['drop']
3112
3219
3113 disptable = (
3220 disptable = (
3114 # dispatch table:
3221 # dispatch table:
3115 # file state
3222 # file state
3116 # action
3223 # action
3117 # make backup
3224 # make backup
3118
3225
3119 ## Sets that results that will change file on disk
3226 ## Sets that results that will change file on disk
3120 # Modified compared to target, no local change
3227 # Modified compared to target, no local change
3121 (modified, actions['revert'], discard),
3228 (modified, actions['revert'], discard),
3122 # Modified compared to target, but local file is deleted
3229 # Modified compared to target, but local file is deleted
3123 (deleted, actions['revert'], discard),
3230 (deleted, actions['revert'], discard),
3124 # Modified compared to target, local change
3231 # Modified compared to target, local change
3125 (dsmodified, actions['revert'], dsmodifiedbackup),
3232 (dsmodified, actions['revert'], dsmodifiedbackup),
3126 # Added since target
3233 # Added since target
3127 (added, actions['remove'], discard),
3234 (added, actions['remove'], discard),
3128 # Added in working directory
3235 # Added in working directory
3129 (dsadded, actions['forget'], discard),
3236 (dsadded, actions['forget'], discard),
3130 # Added since target, have local modification
3237 # Added since target, have local modification
3131 (modadded, backupanddel, backup),
3238 (modadded, backupanddel, backup),
3132 # Added since target but file is missing in working directory
3239 # Added since target but file is missing in working directory
3133 (deladded, actions['drop'], discard),
3240 (deladded, actions['drop'], discard),
3134 # Removed since target, before working copy parent
3241 # Removed since target, before working copy parent
3135 (removed, actions['add'], discard),
3242 (removed, actions['add'], discard),
3136 # Same as `removed` but an unknown file exists at the same path
3243 # Same as `removed` but an unknown file exists at the same path
3137 (removunk, actions['add'], check),
3244 (removunk, actions['add'], check),
3138 # Removed since targe, marked as such in working copy parent
3245 # Removed since targe, marked as such in working copy parent
3139 (dsremoved, actions['undelete'], discard),
3246 (dsremoved, actions['undelete'], discard),
3140 # Same as `dsremoved` but an unknown file exists at the same path
3247 # Same as `dsremoved` but an unknown file exists at the same path
3141 (dsremovunk, actions['undelete'], check),
3248 (dsremovunk, actions['undelete'], check),
3142 ## the following sets does not result in any file changes
3249 ## the following sets does not result in any file changes
3143 # File with no modification
3250 # File with no modification
3144 (clean, actions['noop'], discard),
3251 (clean, actions['noop'], discard),
3145 # Existing file, not tracked anywhere
3252 # Existing file, not tracked anywhere
3146 (unknown, actions['unknown'], discard),
3253 (unknown, actions['unknown'], discard),
3147 )
3254 )
3148
3255
3149 for abs, (rel, exact) in sorted(names.items()):
3256 for abs, (rel, exact) in sorted(names.items()):
3150 # target file to be touch on disk (relative to cwd)
3257 # target file to be touch on disk (relative to cwd)
3151 target = repo.wjoin(abs)
3258 target = repo.wjoin(abs)
3152 # search the entry in the dispatch table.
3259 # search the entry in the dispatch table.
3153 # if the file is in any of these sets, it was touched in the working
3260 # if the file is in any of these sets, it was touched in the working
3154 # directory parent and we are sure it needs to be reverted.
3261 # directory parent and we are sure it needs to be reverted.
3155 for table, (xlist, msg), dobackup in disptable:
3262 for table, (xlist, msg), dobackup in disptable:
3156 if abs not in table:
3263 if abs not in table:
3157 continue
3264 continue
3158 if xlist is not None:
3265 if xlist is not None:
3159 xlist.append(abs)
3266 xlist.append(abs)
3160 if dobackup:
3267 if dobackup:
3161 # If in interactive mode, don't automatically create
3268 # If in interactive mode, don't automatically create
3162 # .orig files (issue4793)
3269 # .orig files (issue4793)
3163 if dobackup == backupinteractive:
3270 if dobackup == backupinteractive:
3164 tobackup.add(abs)
3271 tobackup.add(abs)
3165 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3272 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3166 bakname = scmutil.origpath(ui, repo, rel)
3273 bakname = scmutil.origpath(ui, repo, rel)
3167 ui.note(_('saving current version of %s as %s\n') %
3274 ui.note(_('saving current version of %s as %s\n') %
3168 (rel, bakname))
3275 (rel, bakname))
3169 if not opts.get('dry_run'):
3276 if not opts.get('dry_run'):
3170 if interactive:
3277 if interactive:
3171 util.copyfile(target, bakname)
3278 util.copyfile(target, bakname)
3172 else:
3279 else:
3173 util.rename(target, bakname)
3280 util.rename(target, bakname)
3174 if ui.verbose or not exact:
3281 if ui.verbose or not exact:
3175 if not isinstance(msg, basestring):
3282 if not isinstance(msg, basestring):
3176 msg = msg(abs)
3283 msg = msg(abs)
3177 ui.status(msg % rel)
3284 ui.status(msg % rel)
3178 elif exact:
3285 elif exact:
3179 ui.warn(msg % rel)
3286 ui.warn(msg % rel)
3180 break
3287 break
3181
3288
3182 if not opts.get('dry_run'):
3289 if not opts.get('dry_run'):
3183 needdata = ('revert', 'add', 'undelete')
3290 needdata = ('revert', 'add', 'undelete')
3184 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3291 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3185 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3292 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3186
3293
3187 if targetsubs:
3294 if targetsubs:
3188 # Revert the subrepos on the revert list
3295 # Revert the subrepos on the revert list
3189 for sub in targetsubs:
3296 for sub in targetsubs:
3190 try:
3297 try:
3191 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3298 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3192 except KeyError:
3299 except KeyError:
3193 raise error.Abort("subrepository '%s' does not exist in %s!"
3300 raise error.Abort("subrepository '%s' does not exist in %s!"
3194 % (sub, short(ctx.node())))
3301 % (sub, short(ctx.node())))
3195
3302
3196 def _revertprefetch(repo, ctx, *files):
3303 def _revertprefetch(repo, ctx, *files):
3197 """Let extension changing the storage layer prefetch content"""
3304 """Let extension changing the storage layer prefetch content"""
3198 pass
3305 pass
3199
3306
3200 def _performrevert(repo, parents, ctx, actions, interactive=False,
3307 def _performrevert(repo, parents, ctx, actions, interactive=False,
3201 tobackup=None):
3308 tobackup=None):
3202 """function that actually perform all the actions computed for revert
3309 """function that actually perform all the actions computed for revert
3203
3310
3204 This is an independent function to let extension to plug in and react to
3311 This is an independent function to let extension to plug in and react to
3205 the imminent revert.
3312 the imminent revert.
3206
3313
3207 Make sure you have the working directory locked when calling this function.
3314 Make sure you have the working directory locked when calling this function.
3208 """
3315 """
3209 parent, p2 = parents
3316 parent, p2 = parents
3210 node = ctx.node()
3317 node = ctx.node()
3211 excluded_files = []
3318 excluded_files = []
3212 matcher_opts = {"exclude": excluded_files}
3319 matcher_opts = {"exclude": excluded_files}
3213
3320
3214 def checkout(f):
3321 def checkout(f):
3215 fc = ctx[f]
3322 fc = ctx[f]
3216 repo.wwrite(f, fc.data(), fc.flags())
3323 repo.wwrite(f, fc.data(), fc.flags())
3217
3324
3218 def doremove(f):
3325 def doremove(f):
3219 try:
3326 try:
3220 repo.wvfs.unlinkpath(f)
3327 repo.wvfs.unlinkpath(f)
3221 except OSError:
3328 except OSError:
3222 pass
3329 pass
3223 repo.dirstate.remove(f)
3330 repo.dirstate.remove(f)
3224
3331
3225 audit_path = pathutil.pathauditor(repo.root)
3332 audit_path = pathutil.pathauditor(repo.root)
3226 for f in actions['forget'][0]:
3333 for f in actions['forget'][0]:
3227 if interactive:
3334 if interactive:
3228 choice = repo.ui.promptchoice(
3335 choice = repo.ui.promptchoice(
3229 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3336 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3230 if choice == 0:
3337 if choice == 0:
3231 repo.dirstate.drop(f)
3338 repo.dirstate.drop(f)
3232 else:
3339 else:
3233 excluded_files.append(repo.wjoin(f))
3340 excluded_files.append(repo.wjoin(f))
3234 else:
3341 else:
3235 repo.dirstate.drop(f)
3342 repo.dirstate.drop(f)
3236 for f in actions['remove'][0]:
3343 for f in actions['remove'][0]:
3237 audit_path(f)
3344 audit_path(f)
3238 if interactive:
3345 if interactive:
3239 choice = repo.ui.promptchoice(
3346 choice = repo.ui.promptchoice(
3240 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3347 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3241 if choice == 0:
3348 if choice == 0:
3242 doremove(f)
3349 doremove(f)
3243 else:
3350 else:
3244 excluded_files.append(repo.wjoin(f))
3351 excluded_files.append(repo.wjoin(f))
3245 else:
3352 else:
3246 doremove(f)
3353 doremove(f)
3247 for f in actions['drop'][0]:
3354 for f in actions['drop'][0]:
3248 audit_path(f)
3355 audit_path(f)
3249 repo.dirstate.remove(f)
3356 repo.dirstate.remove(f)
3250
3357
3251 normal = None
3358 normal = None
3252 if node == parent:
3359 if node == parent:
3253 # We're reverting to our parent. If possible, we'd like status
3360 # We're reverting to our parent. If possible, we'd like status
3254 # to report the file as clean. We have to use normallookup for
3361 # to report the file as clean. We have to use normallookup for
3255 # merges to avoid losing information about merged/dirty files.
3362 # merges to avoid losing information about merged/dirty files.
3256 if p2 != nullid:
3363 if p2 != nullid:
3257 normal = repo.dirstate.normallookup
3364 normal = repo.dirstate.normallookup
3258 else:
3365 else:
3259 normal = repo.dirstate.normal
3366 normal = repo.dirstate.normal
3260
3367
3261 newlyaddedandmodifiedfiles = set()
3368 newlyaddedandmodifiedfiles = set()
3262 if interactive:
3369 if interactive:
3263 # Prompt the user for changes to revert
3370 # Prompt the user for changes to revert
3264 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3371 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3265 m = scmutil.match(ctx, torevert, matcher_opts)
3372 m = scmutil.match(ctx, torevert, matcher_opts)
3266 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3373 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3267 diffopts.nodates = True
3374 diffopts.nodates = True
3268 diffopts.git = True
3375 diffopts.git = True
3269 operation = 'discard'
3376 operation = 'discard'
3270 reversehunks = True
3377 reversehunks = True
3271 if node != parent:
3378 if node != parent:
3272 operation = 'revert'
3379 operation = 'revert'
3273 reversehunks = repo.ui.configbool('experimental',
3380 reversehunks = repo.ui.configbool('experimental',
3274 'revertalternateinteractivemode',
3381 'revertalternateinteractivemode',
3275 True)
3382 True)
3276 if reversehunks:
3383 if reversehunks:
3277 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3384 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3278 else:
3385 else:
3279 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3386 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3280 originalchunks = patch.parsepatch(diff)
3387 originalchunks = patch.parsepatch(diff)
3281
3388
3282 try:
3389 try:
3283
3390
3284 chunks, opts = recordfilter(repo.ui, originalchunks,
3391 chunks, opts = recordfilter(repo.ui, originalchunks,
3285 operation=operation)
3392 operation=operation)
3286 if reversehunks:
3393 if reversehunks:
3287 chunks = patch.reversehunks(chunks)
3394 chunks = patch.reversehunks(chunks)
3288
3395
3289 except patch.PatchError as err:
3396 except patch.PatchError as err:
3290 raise error.Abort(_('error parsing patch: %s') % err)
3397 raise error.Abort(_('error parsing patch: %s') % err)
3291
3398
3292 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3399 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3293 if tobackup is None:
3400 if tobackup is None:
3294 tobackup = set()
3401 tobackup = set()
3295 # Apply changes
3402 # Apply changes
3296 fp = stringio()
3403 fp = stringio()
3297 for c in chunks:
3404 for c in chunks:
3298 # Create a backup file only if this hunk should be backed up
3405 # Create a backup file only if this hunk should be backed up
3299 if ishunk(c) and c.header.filename() in tobackup:
3406 if ishunk(c) and c.header.filename() in tobackup:
3300 abs = c.header.filename()
3407 abs = c.header.filename()
3301 target = repo.wjoin(abs)
3408 target = repo.wjoin(abs)
3302 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3409 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3303 util.copyfile(target, bakname)
3410 util.copyfile(target, bakname)
3304 tobackup.remove(abs)
3411 tobackup.remove(abs)
3305 c.write(fp)
3412 c.write(fp)
3306 dopatch = fp.tell()
3413 dopatch = fp.tell()
3307 fp.seek(0)
3414 fp.seek(0)
3308 if dopatch:
3415 if dopatch:
3309 try:
3416 try:
3310 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3417 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3311 except patch.PatchError as err:
3418 except patch.PatchError as err:
3312 raise error.Abort(str(err))
3419 raise error.Abort(str(err))
3313 del fp
3420 del fp
3314 else:
3421 else:
3315 for f in actions['revert'][0]:
3422 for f in actions['revert'][0]:
3316 checkout(f)
3423 checkout(f)
3317 if normal:
3424 if normal:
3318 normal(f)
3425 normal(f)
3319
3426
3320 for f in actions['add'][0]:
3427 for f in actions['add'][0]:
3321 # Don't checkout modified files, they are already created by the diff
3428 # Don't checkout modified files, they are already created by the diff
3322 if f not in newlyaddedandmodifiedfiles:
3429 if f not in newlyaddedandmodifiedfiles:
3323 checkout(f)
3430 checkout(f)
3324 repo.dirstate.add(f)
3431 repo.dirstate.add(f)
3325
3432
3326 normal = repo.dirstate.normallookup
3433 normal = repo.dirstate.normallookup
3327 if node == parent and p2 == nullid:
3434 if node == parent and p2 == nullid:
3328 normal = repo.dirstate.normal
3435 normal = repo.dirstate.normal
3329 for f in actions['undelete'][0]:
3436 for f in actions['undelete'][0]:
3330 checkout(f)
3437 checkout(f)
3331 normal(f)
3438 normal(f)
3332
3439
3333 copied = copies.pathcopies(repo[parent], ctx)
3440 copied = copies.pathcopies(repo[parent], ctx)
3334
3441
3335 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3442 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3336 if f in copied:
3443 if f in copied:
3337 repo.dirstate.copy(copied[f], f)
3444 repo.dirstate.copy(copied[f], f)
3338
3445
3339 class command(registrar.command):
3446 class command(registrar.command):
3340 def _doregister(self, func, name, *args, **kwargs):
3447 def _doregister(self, func, name, *args, **kwargs):
3341 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3448 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3342 return super(command, self)._doregister(func, name, *args, **kwargs)
3449 return super(command, self)._doregister(func, name, *args, **kwargs)
3343
3450
3344 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3451 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3345 # commands.outgoing. "missing" is "missing" of the result of
3452 # commands.outgoing. "missing" is "missing" of the result of
3346 # "findcommonoutgoing()"
3453 # "findcommonoutgoing()"
3347 outgoinghooks = util.hooks()
3454 outgoinghooks = util.hooks()
3348
3455
3349 # a list of (ui, repo) functions called by commands.summary
3456 # a list of (ui, repo) functions called by commands.summary
3350 summaryhooks = util.hooks()
3457 summaryhooks = util.hooks()
3351
3458
3352 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3459 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3353 #
3460 #
3354 # functions should return tuple of booleans below, if 'changes' is None:
3461 # functions should return tuple of booleans below, if 'changes' is None:
3355 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3462 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3356 #
3463 #
3357 # otherwise, 'changes' is a tuple of tuples below:
3464 # otherwise, 'changes' is a tuple of tuples below:
3358 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3465 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3359 # - (desturl, destbranch, destpeer, outgoing)
3466 # - (desturl, destbranch, destpeer, outgoing)
3360 summaryremotehooks = util.hooks()
3467 summaryremotehooks = util.hooks()
3361
3468
3362 # A list of state files kept by multistep operations like graft.
3469 # A list of state files kept by multistep operations like graft.
3363 # Since graft cannot be aborted, it is considered 'clearable' by update.
3470 # Since graft cannot be aborted, it is considered 'clearable' by update.
3364 # note: bisect is intentionally excluded
3471 # note: bisect is intentionally excluded
3365 # (state file, clearable, allowcommit, error, hint)
3472 # (state file, clearable, allowcommit, error, hint)
3366 unfinishedstates = [
3473 unfinishedstates = [
3367 ('graftstate', True, False, _('graft in progress'),
3474 ('graftstate', True, False, _('graft in progress'),
3368 _("use 'hg graft --continue' or 'hg update' to abort")),
3475 _("use 'hg graft --continue' or 'hg update' to abort")),
3369 ('updatestate', True, False, _('last update was interrupted'),
3476 ('updatestate', True, False, _('last update was interrupted'),
3370 _("use 'hg update' to get a consistent checkout"))
3477 _("use 'hg update' to get a consistent checkout"))
3371 ]
3478 ]
3372
3479
3373 def checkunfinished(repo, commit=False):
3480 def checkunfinished(repo, commit=False):
3374 '''Look for an unfinished multistep operation, like graft, and abort
3481 '''Look for an unfinished multistep operation, like graft, and abort
3375 if found. It's probably good to check this right before
3482 if found. It's probably good to check this right before
3376 bailifchanged().
3483 bailifchanged().
3377 '''
3484 '''
3378 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3485 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3379 if commit and allowcommit:
3486 if commit and allowcommit:
3380 continue
3487 continue
3381 if repo.vfs.exists(f):
3488 if repo.vfs.exists(f):
3382 raise error.Abort(msg, hint=hint)
3489 raise error.Abort(msg, hint=hint)
3383
3490
3384 def clearunfinished(repo):
3491 def clearunfinished(repo):
3385 '''Check for unfinished operations (as above), and clear the ones
3492 '''Check for unfinished operations (as above), and clear the ones
3386 that are clearable.
3493 that are clearable.
3387 '''
3494 '''
3388 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3495 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3389 if not clearable and repo.vfs.exists(f):
3496 if not clearable and repo.vfs.exists(f):
3390 raise error.Abort(msg, hint=hint)
3497 raise error.Abort(msg, hint=hint)
3391 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3498 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3392 if clearable and repo.vfs.exists(f):
3499 if clearable and repo.vfs.exists(f):
3393 util.unlink(repo.vfs.join(f))
3500 util.unlink(repo.vfs.join(f))
3394
3501
3395 afterresolvedstates = [
3502 afterresolvedstates = [
3396 ('graftstate',
3503 ('graftstate',
3397 _('hg graft --continue')),
3504 _('hg graft --continue')),
3398 ]
3505 ]
3399
3506
3400 def howtocontinue(repo):
3507 def howtocontinue(repo):
3401 '''Check for an unfinished operation and return the command to finish
3508 '''Check for an unfinished operation and return the command to finish
3402 it.
3509 it.
3403
3510
3404 afterresolvedstates tuples define a .hg/{file} and the corresponding
3511 afterresolvedstates tuples define a .hg/{file} and the corresponding
3405 command needed to finish it.
3512 command needed to finish it.
3406
3513
3407 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3514 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3408 a boolean.
3515 a boolean.
3409 '''
3516 '''
3410 contmsg = _("continue: %s")
3517 contmsg = _("continue: %s")
3411 for f, msg in afterresolvedstates:
3518 for f, msg in afterresolvedstates:
3412 if repo.vfs.exists(f):
3519 if repo.vfs.exists(f):
3413 return contmsg % msg, True
3520 return contmsg % msg, True
3414 workingctx = repo[None]
3521 workingctx = repo[None]
3415 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3522 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3416 for s in workingctx.substate)
3523 for s in workingctx.substate)
3417 if dirty:
3524 if dirty:
3418 return contmsg % _("hg commit"), False
3525 return contmsg % _("hg commit"), False
3419 return None, None
3526 return None, None
3420
3527
3421 def checkafterresolved(repo):
3528 def checkafterresolved(repo):
3422 '''Inform the user about the next action after completing hg resolve
3529 '''Inform the user about the next action after completing hg resolve
3423
3530
3424 If there's a matching afterresolvedstates, howtocontinue will yield
3531 If there's a matching afterresolvedstates, howtocontinue will yield
3425 repo.ui.warn as the reporter.
3532 repo.ui.warn as the reporter.
3426
3533
3427 Otherwise, it will yield repo.ui.note.
3534 Otherwise, it will yield repo.ui.note.
3428 '''
3535 '''
3429 msg, warning = howtocontinue(repo)
3536 msg, warning = howtocontinue(repo)
3430 if msg is not None:
3537 if msg is not None:
3431 if warning:
3538 if warning:
3432 repo.ui.warn("%s\n" % msg)
3539 repo.ui.warn("%s\n" % msg)
3433 else:
3540 else:
3434 repo.ui.note("%s\n" % msg)
3541 repo.ui.note("%s\n" % msg)
3435
3542
3436 def wrongtooltocontinue(repo, task):
3543 def wrongtooltocontinue(repo, task):
3437 '''Raise an abort suggesting how to properly continue if there is an
3544 '''Raise an abort suggesting how to properly continue if there is an
3438 active task.
3545 active task.
3439
3546
3440 Uses howtocontinue() to find the active task.
3547 Uses howtocontinue() to find the active task.
3441
3548
3442 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3549 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3443 a hint.
3550 a hint.
3444 '''
3551 '''
3445 after = howtocontinue(repo)
3552 after = howtocontinue(repo)
3446 hint = None
3553 hint = None
3447 if after[1]:
3554 if after[1]:
3448 hint = after[0]
3555 hint = after[0]
3449 raise error.Abort(_('no %s in progress') % task, hint=hint)
3556 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,5521 +1,5434 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from . import (
22 from . import (
23 archival,
23 archival,
24 bookmarks,
24 bookmarks,
25 bundle2,
25 bundle2,
26 changegroup,
26 changegroup,
27 cmdutil,
27 cmdutil,
28 copies,
28 copies,
29 destutil,
29 destutil,
30 dirstateguard,
30 dirstateguard,
31 discovery,
31 discovery,
32 encoding,
32 encoding,
33 error,
33 error,
34 exchange,
34 exchange,
35 extensions,
35 extensions,
36 graphmod,
36 graphmod,
37 hbisect,
37 hbisect,
38 help,
38 help,
39 hg,
39 hg,
40 lock as lockmod,
40 lock as lockmod,
41 merge as mergemod,
41 merge as mergemod,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 phases,
44 phases,
45 pycompat,
45 pycompat,
46 rcutil,
46 rcutil,
47 registrar,
47 registrar,
48 revsetlang,
48 revsetlang,
49 scmutil,
49 scmutil,
50 server,
50 server,
51 sshserver,
51 sshserver,
52 streamclone,
52 streamclone,
53 tags as tagsmod,
53 tags as tagsmod,
54 templatekw,
54 templatekw,
55 ui as uimod,
55 ui as uimod,
56 util,
56 util,
57 )
57 )
58
58
59 release = lockmod.release
59 release = lockmod.release
60
60
61 table = {}
61 table = {}
62
62
63 command = registrar.command(table)
63 command = registrar.command(table)
64
64
65 # label constants
65 # label constants
66 # until 3.5, bookmarks.current was the advertised name, not
66 # until 3.5, bookmarks.current was the advertised name, not
67 # bookmarks.active, so we must use both to avoid breaking old
67 # bookmarks.active, so we must use both to avoid breaking old
68 # custom styles
68 # custom styles
69 activebookmarklabel = 'bookmarks.active bookmarks.current'
69 activebookmarklabel = 'bookmarks.active bookmarks.current'
70
70
71 # common command options
71 # common command options
72
72
73 globalopts = [
73 globalopts = [
74 ('R', 'repository', '',
74 ('R', 'repository', '',
75 _('repository root directory or name of overlay bundle file'),
75 _('repository root directory or name of overlay bundle file'),
76 _('REPO')),
76 _('REPO')),
77 ('', 'cwd', '',
77 ('', 'cwd', '',
78 _('change working directory'), _('DIR')),
78 _('change working directory'), _('DIR')),
79 ('y', 'noninteractive', None,
79 ('y', 'noninteractive', None,
80 _('do not prompt, automatically pick the first choice for all prompts')),
80 _('do not prompt, automatically pick the first choice for all prompts')),
81 ('q', 'quiet', None, _('suppress output')),
81 ('q', 'quiet', None, _('suppress output')),
82 ('v', 'verbose', None, _('enable additional output')),
82 ('v', 'verbose', None, _('enable additional output')),
83 ('', 'color', '',
83 ('', 'color', '',
84 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
84 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
85 # and should not be translated
85 # and should not be translated
86 _("when to colorize (boolean, always, auto, never, or debug)"),
86 _("when to colorize (boolean, always, auto, never, or debug)"),
87 _('TYPE')),
87 _('TYPE')),
88 ('', 'config', [],
88 ('', 'config', [],
89 _('set/override config option (use \'section.name=value\')'),
89 _('set/override config option (use \'section.name=value\')'),
90 _('CONFIG')),
90 _('CONFIG')),
91 ('', 'debug', None, _('enable debugging output')),
91 ('', 'debug', None, _('enable debugging output')),
92 ('', 'debugger', None, _('start debugger')),
92 ('', 'debugger', None, _('start debugger')),
93 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
93 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
94 _('ENCODE')),
94 _('ENCODE')),
95 ('', 'encodingmode', encoding.encodingmode,
95 ('', 'encodingmode', encoding.encodingmode,
96 _('set the charset encoding mode'), _('MODE')),
96 _('set the charset encoding mode'), _('MODE')),
97 ('', 'traceback', None, _('always print a traceback on exception')),
97 ('', 'traceback', None, _('always print a traceback on exception')),
98 ('', 'time', None, _('time how long the command takes')),
98 ('', 'time', None, _('time how long the command takes')),
99 ('', 'profile', None, _('print command execution profile')),
99 ('', 'profile', None, _('print command execution profile')),
100 ('', 'version', None, _('output version information and exit')),
100 ('', 'version', None, _('output version information and exit')),
101 ('h', 'help', None, _('display help and exit')),
101 ('h', 'help', None, _('display help and exit')),
102 ('', 'hidden', False, _('consider hidden changesets')),
102 ('', 'hidden', False, _('consider hidden changesets')),
103 ('', 'pager', 'auto',
103 ('', 'pager', 'auto',
104 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
104 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
105 ]
105 ]
106
106
107 dryrunopts = [('n', 'dry-run', None,
107 dryrunopts = cmdutil.dryrunopts
108 _('do not perform actions, just print output'))]
108 remoteopts = cmdutil.remoteopts
109
109 walkopts = cmdutil.walkopts
110 remoteopts = [
110 commitopts = cmdutil.commitopts
111 ('e', 'ssh', '',
111 commitopts2 = cmdutil.commitopts2
112 _('specify ssh command to use'), _('CMD')),
112 formatteropts = cmdutil.formatteropts
113 ('', 'remotecmd', '',
113 templateopts = cmdutil.templateopts
114 _('specify hg command to run on the remote side'), _('CMD')),
114 logopts = cmdutil.logopts
115 ('', 'insecure', None,
115 diffopts = cmdutil.diffopts
116 _('do not verify server certificate (ignoring web.cacerts config)')),
116 diffwsopts = cmdutil.diffwsopts
117 ]
117 diffopts2 = cmdutil.diffopts2
118
118 mergetoolopts = cmdutil.mergetoolopts
119 walkopts = [
119 similarityopts = cmdutil.similarityopts
120 ('I', 'include', [],
120 subrepoopts = cmdutil.subrepoopts
121 _('include names matching the given patterns'), _('PATTERN')),
121 debugrevlogopts = cmdutil.debugrevlogopts
122 ('X', 'exclude', [],
123 _('exclude names matching the given patterns'), _('PATTERN')),
124 ]
125
126 commitopts = [
127 ('m', 'message', '',
128 _('use text as commit message'), _('TEXT')),
129 ('l', 'logfile', '',
130 _('read commit message from file'), _('FILE')),
131 ]
132
133 commitopts2 = [
134 ('d', 'date', '',
135 _('record the specified date as commit date'), _('DATE')),
136 ('u', 'user', '',
137 _('record the specified user as committer'), _('USER')),
138 ]
139
140 # hidden for now
141 formatteropts = [
142 ('T', 'template', '',
143 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
144 ]
145
146 templateopts = [
147 ('', 'style', '',
148 _('display using template map file (DEPRECATED)'), _('STYLE')),
149 ('T', 'template', '',
150 _('display with template'), _('TEMPLATE')),
151 ]
152
153 logopts = [
154 ('p', 'patch', None, _('show patch')),
155 ('g', 'git', None, _('use git extended diff format')),
156 ('l', 'limit', '',
157 _('limit number of changes displayed'), _('NUM')),
158 ('M', 'no-merges', None, _('do not show merges')),
159 ('', 'stat', None, _('output diffstat-style summary of changes')),
160 ('G', 'graph', None, _("show the revision DAG")),
161 ] + templateopts
162
163 diffopts = [
164 ('a', 'text', None, _('treat all files as text')),
165 ('g', 'git', None, _('use git extended diff format')),
166 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
167 ('', 'nodates', None, _('omit dates from diff headers'))
168 ]
169
170 diffwsopts = [
171 ('w', 'ignore-all-space', None,
172 _('ignore white space when comparing lines')),
173 ('b', 'ignore-space-change', None,
174 _('ignore changes in the amount of white space')),
175 ('B', 'ignore-blank-lines', None,
176 _('ignore changes whose lines are all blank')),
177 ]
178
179 diffopts2 = [
180 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
181 ('p', 'show-function', None, _('show which function each change is in')),
182 ('', 'reverse', None, _('produce a diff that undoes the changes')),
183 ] + diffwsopts + [
184 ('U', 'unified', '',
185 _('number of lines of context to show'), _('NUM')),
186 ('', 'stat', None, _('output diffstat-style summary of changes')),
187 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
188 ]
189
190 mergetoolopts = [
191 ('t', 'tool', '', _('specify merge tool')),
192 ]
193
194 similarityopts = [
195 ('s', 'similarity', '',
196 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
197 ]
198
199 subrepoopts = [
200 ('S', 'subrepos', None,
201 _('recurse into subrepositories'))
202 ]
203
204 debugrevlogopts = [
205 ('c', 'changelog', False, _('open changelog')),
206 ('m', 'manifest', False, _('open manifest')),
207 ('', 'dir', '', _('open directory manifest')),
208 ]
209
122
210 # Commands start here, listed alphabetically
123 # Commands start here, listed alphabetically
211
124
212 @command('^add',
125 @command('^add',
213 walkopts + subrepoopts + dryrunopts,
126 walkopts + subrepoopts + dryrunopts,
214 _('[OPTION]... [FILE]...'),
127 _('[OPTION]... [FILE]...'),
215 inferrepo=True)
128 inferrepo=True)
216 def add(ui, repo, *pats, **opts):
129 def add(ui, repo, *pats, **opts):
217 """add the specified files on the next commit
130 """add the specified files on the next commit
218
131
219 Schedule files to be version controlled and added to the
132 Schedule files to be version controlled and added to the
220 repository.
133 repository.
221
134
222 The files will be added to the repository at the next commit. To
135 The files will be added to the repository at the next commit. To
223 undo an add before that, see :hg:`forget`.
136 undo an add before that, see :hg:`forget`.
224
137
225 If no names are given, add all files to the repository (except
138 If no names are given, add all files to the repository (except
226 files matching ``.hgignore``).
139 files matching ``.hgignore``).
227
140
228 .. container:: verbose
141 .. container:: verbose
229
142
230 Examples:
143 Examples:
231
144
232 - New (unknown) files are added
145 - New (unknown) files are added
233 automatically by :hg:`add`::
146 automatically by :hg:`add`::
234
147
235 $ ls
148 $ ls
236 foo.c
149 foo.c
237 $ hg status
150 $ hg status
238 ? foo.c
151 ? foo.c
239 $ hg add
152 $ hg add
240 adding foo.c
153 adding foo.c
241 $ hg status
154 $ hg status
242 A foo.c
155 A foo.c
243
156
244 - Specific files to be added can be specified::
157 - Specific files to be added can be specified::
245
158
246 $ ls
159 $ ls
247 bar.c foo.c
160 bar.c foo.c
248 $ hg status
161 $ hg status
249 ? bar.c
162 ? bar.c
250 ? foo.c
163 ? foo.c
251 $ hg add bar.c
164 $ hg add bar.c
252 $ hg status
165 $ hg status
253 A bar.c
166 A bar.c
254 ? foo.c
167 ? foo.c
255
168
256 Returns 0 if all files are successfully added.
169 Returns 0 if all files are successfully added.
257 """
170 """
258
171
259 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
172 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
260 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
173 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
261 return rejected and 1 or 0
174 return rejected and 1 or 0
262
175
263 @command('addremove',
176 @command('addremove',
264 similarityopts + subrepoopts + walkopts + dryrunopts,
177 similarityopts + subrepoopts + walkopts + dryrunopts,
265 _('[OPTION]... [FILE]...'),
178 _('[OPTION]... [FILE]...'),
266 inferrepo=True)
179 inferrepo=True)
267 def addremove(ui, repo, *pats, **opts):
180 def addremove(ui, repo, *pats, **opts):
268 """add all new files, delete all missing files
181 """add all new files, delete all missing files
269
182
270 Add all new files and remove all missing files from the
183 Add all new files and remove all missing files from the
271 repository.
184 repository.
272
185
273 Unless names are given, new files are ignored if they match any of
186 Unless names are given, new files are ignored if they match any of
274 the patterns in ``.hgignore``. As with add, these changes take
187 the patterns in ``.hgignore``. As with add, these changes take
275 effect at the next commit.
188 effect at the next commit.
276
189
277 Use the -s/--similarity option to detect renamed files. This
190 Use the -s/--similarity option to detect renamed files. This
278 option takes a percentage between 0 (disabled) and 100 (files must
191 option takes a percentage between 0 (disabled) and 100 (files must
279 be identical) as its parameter. With a parameter greater than 0,
192 be identical) as its parameter. With a parameter greater than 0,
280 this compares every removed file with every added file and records
193 this compares every removed file with every added file and records
281 those similar enough as renames. Detecting renamed files this way
194 those similar enough as renames. Detecting renamed files this way
282 can be expensive. After using this option, :hg:`status -C` can be
195 can be expensive. After using this option, :hg:`status -C` can be
283 used to check which files were identified as moved or renamed. If
196 used to check which files were identified as moved or renamed. If
284 not specified, -s/--similarity defaults to 100 and only renames of
197 not specified, -s/--similarity defaults to 100 and only renames of
285 identical files are detected.
198 identical files are detected.
286
199
287 .. container:: verbose
200 .. container:: verbose
288
201
289 Examples:
202 Examples:
290
203
291 - A number of files (bar.c and foo.c) are new,
204 - A number of files (bar.c and foo.c) are new,
292 while foobar.c has been removed (without using :hg:`remove`)
205 while foobar.c has been removed (without using :hg:`remove`)
293 from the repository::
206 from the repository::
294
207
295 $ ls
208 $ ls
296 bar.c foo.c
209 bar.c foo.c
297 $ hg status
210 $ hg status
298 ! foobar.c
211 ! foobar.c
299 ? bar.c
212 ? bar.c
300 ? foo.c
213 ? foo.c
301 $ hg addremove
214 $ hg addremove
302 adding bar.c
215 adding bar.c
303 adding foo.c
216 adding foo.c
304 removing foobar.c
217 removing foobar.c
305 $ hg status
218 $ hg status
306 A bar.c
219 A bar.c
307 A foo.c
220 A foo.c
308 R foobar.c
221 R foobar.c
309
222
310 - A file foobar.c was moved to foo.c without using :hg:`rename`.
223 - A file foobar.c was moved to foo.c without using :hg:`rename`.
311 Afterwards, it was edited slightly::
224 Afterwards, it was edited slightly::
312
225
313 $ ls
226 $ ls
314 foo.c
227 foo.c
315 $ hg status
228 $ hg status
316 ! foobar.c
229 ! foobar.c
317 ? foo.c
230 ? foo.c
318 $ hg addremove --similarity 90
231 $ hg addremove --similarity 90
319 removing foobar.c
232 removing foobar.c
320 adding foo.c
233 adding foo.c
321 recording removal of foobar.c as rename to foo.c (94% similar)
234 recording removal of foobar.c as rename to foo.c (94% similar)
322 $ hg status -C
235 $ hg status -C
323 A foo.c
236 A foo.c
324 foobar.c
237 foobar.c
325 R foobar.c
238 R foobar.c
326
239
327 Returns 0 if all files are successfully added.
240 Returns 0 if all files are successfully added.
328 """
241 """
329 opts = pycompat.byteskwargs(opts)
242 opts = pycompat.byteskwargs(opts)
330 try:
243 try:
331 sim = float(opts.get('similarity') or 100)
244 sim = float(opts.get('similarity') or 100)
332 except ValueError:
245 except ValueError:
333 raise error.Abort(_('similarity must be a number'))
246 raise error.Abort(_('similarity must be a number'))
334 if sim < 0 or sim > 100:
247 if sim < 0 or sim > 100:
335 raise error.Abort(_('similarity must be between 0 and 100'))
248 raise error.Abort(_('similarity must be between 0 and 100'))
336 matcher = scmutil.match(repo[None], pats, opts)
249 matcher = scmutil.match(repo[None], pats, opts)
337 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
250 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
338
251
339 @command('^annotate|blame',
252 @command('^annotate|blame',
340 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
253 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
341 ('', 'follow', None,
254 ('', 'follow', None,
342 _('follow copies/renames and list the filename (DEPRECATED)')),
255 _('follow copies/renames and list the filename (DEPRECATED)')),
343 ('', 'no-follow', None, _("don't follow copies and renames")),
256 ('', 'no-follow', None, _("don't follow copies and renames")),
344 ('a', 'text', None, _('treat all files as text')),
257 ('a', 'text', None, _('treat all files as text')),
345 ('u', 'user', None, _('list the author (long with -v)')),
258 ('u', 'user', None, _('list the author (long with -v)')),
346 ('f', 'file', None, _('list the filename')),
259 ('f', 'file', None, _('list the filename')),
347 ('d', 'date', None, _('list the date (short with -q)')),
260 ('d', 'date', None, _('list the date (short with -q)')),
348 ('n', 'number', None, _('list the revision number (default)')),
261 ('n', 'number', None, _('list the revision number (default)')),
349 ('c', 'changeset', None, _('list the changeset')),
262 ('c', 'changeset', None, _('list the changeset')),
350 ('l', 'line-number', None, _('show line number at the first appearance'))
263 ('l', 'line-number', None, _('show line number at the first appearance'))
351 ] + diffwsopts + walkopts + formatteropts,
264 ] + diffwsopts + walkopts + formatteropts,
352 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
265 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
353 inferrepo=True)
266 inferrepo=True)
354 def annotate(ui, repo, *pats, **opts):
267 def annotate(ui, repo, *pats, **opts):
355 """show changeset information by line for each file
268 """show changeset information by line for each file
356
269
357 List changes in files, showing the revision id responsible for
270 List changes in files, showing the revision id responsible for
358 each line.
271 each line.
359
272
360 This command is useful for discovering when a change was made and
273 This command is useful for discovering when a change was made and
361 by whom.
274 by whom.
362
275
363 If you include --file, --user, or --date, the revision number is
276 If you include --file, --user, or --date, the revision number is
364 suppressed unless you also include --number.
277 suppressed unless you also include --number.
365
278
366 Without the -a/--text option, annotate will avoid processing files
279 Without the -a/--text option, annotate will avoid processing files
367 it detects as binary. With -a, annotate will annotate the file
280 it detects as binary. With -a, annotate will annotate the file
368 anyway, although the results will probably be neither useful
281 anyway, although the results will probably be neither useful
369 nor desirable.
282 nor desirable.
370
283
371 Returns 0 on success.
284 Returns 0 on success.
372 """
285 """
373 opts = pycompat.byteskwargs(opts)
286 opts = pycompat.byteskwargs(opts)
374 if not pats:
287 if not pats:
375 raise error.Abort(_('at least one filename or pattern is required'))
288 raise error.Abort(_('at least one filename or pattern is required'))
376
289
377 if opts.get('follow'):
290 if opts.get('follow'):
378 # --follow is deprecated and now just an alias for -f/--file
291 # --follow is deprecated and now just an alias for -f/--file
379 # to mimic the behavior of Mercurial before version 1.5
292 # to mimic the behavior of Mercurial before version 1.5
380 opts['file'] = True
293 opts['file'] = True
381
294
382 ctx = scmutil.revsingle(repo, opts.get('rev'))
295 ctx = scmutil.revsingle(repo, opts.get('rev'))
383
296
384 fm = ui.formatter('annotate', opts)
297 fm = ui.formatter('annotate', opts)
385 if ui.quiet:
298 if ui.quiet:
386 datefunc = util.shortdate
299 datefunc = util.shortdate
387 else:
300 else:
388 datefunc = util.datestr
301 datefunc = util.datestr
389 if ctx.rev() is None:
302 if ctx.rev() is None:
390 def hexfn(node):
303 def hexfn(node):
391 if node is None:
304 if node is None:
392 return None
305 return None
393 else:
306 else:
394 return fm.hexfunc(node)
307 return fm.hexfunc(node)
395 if opts.get('changeset'):
308 if opts.get('changeset'):
396 # omit "+" suffix which is appended to node hex
309 # omit "+" suffix which is appended to node hex
397 def formatrev(rev):
310 def formatrev(rev):
398 if rev is None:
311 if rev is None:
399 return '%d' % ctx.p1().rev()
312 return '%d' % ctx.p1().rev()
400 else:
313 else:
401 return '%d' % rev
314 return '%d' % rev
402 else:
315 else:
403 def formatrev(rev):
316 def formatrev(rev):
404 if rev is None:
317 if rev is None:
405 return '%d+' % ctx.p1().rev()
318 return '%d+' % ctx.p1().rev()
406 else:
319 else:
407 return '%d ' % rev
320 return '%d ' % rev
408 def formathex(hex):
321 def formathex(hex):
409 if hex is None:
322 if hex is None:
410 return '%s+' % fm.hexfunc(ctx.p1().node())
323 return '%s+' % fm.hexfunc(ctx.p1().node())
411 else:
324 else:
412 return '%s ' % hex
325 return '%s ' % hex
413 else:
326 else:
414 hexfn = fm.hexfunc
327 hexfn = fm.hexfunc
415 formatrev = formathex = str
328 formatrev = formathex = str
416
329
417 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
330 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
418 ('number', ' ', lambda x: x[0].rev(), formatrev),
331 ('number', ' ', lambda x: x[0].rev(), formatrev),
419 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
332 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
420 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
333 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
421 ('file', ' ', lambda x: x[0].path(), str),
334 ('file', ' ', lambda x: x[0].path(), str),
422 ('line_number', ':', lambda x: x[1], str),
335 ('line_number', ':', lambda x: x[1], str),
423 ]
336 ]
424 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
337 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
425
338
426 if (not opts.get('user') and not opts.get('changeset')
339 if (not opts.get('user') and not opts.get('changeset')
427 and not opts.get('date') and not opts.get('file')):
340 and not opts.get('date') and not opts.get('file')):
428 opts['number'] = True
341 opts['number'] = True
429
342
430 linenumber = opts.get('line_number') is not None
343 linenumber = opts.get('line_number') is not None
431 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
344 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
432 raise error.Abort(_('at least one of -n/-c is required for -l'))
345 raise error.Abort(_('at least one of -n/-c is required for -l'))
433
346
434 ui.pager('annotate')
347 ui.pager('annotate')
435
348
436 if fm.isplain():
349 if fm.isplain():
437 def makefunc(get, fmt):
350 def makefunc(get, fmt):
438 return lambda x: fmt(get(x))
351 return lambda x: fmt(get(x))
439 else:
352 else:
440 def makefunc(get, fmt):
353 def makefunc(get, fmt):
441 return get
354 return get
442 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
355 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
443 if opts.get(op)]
356 if opts.get(op)]
444 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
357 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
445 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
358 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
446 if opts.get(op))
359 if opts.get(op))
447
360
448 def bad(x, y):
361 def bad(x, y):
449 raise error.Abort("%s: %s" % (x, y))
362 raise error.Abort("%s: %s" % (x, y))
450
363
451 m = scmutil.match(ctx, pats, opts, badfn=bad)
364 m = scmutil.match(ctx, pats, opts, badfn=bad)
452
365
453 follow = not opts.get('no_follow')
366 follow = not opts.get('no_follow')
454 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
367 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
455 whitespace=True)
368 whitespace=True)
456 for abs in ctx.walk(m):
369 for abs in ctx.walk(m):
457 fctx = ctx[abs]
370 fctx = ctx[abs]
458 if not opts.get('text') and fctx.isbinary():
371 if not opts.get('text') and fctx.isbinary():
459 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
372 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
460 continue
373 continue
461
374
462 lines = fctx.annotate(follow=follow, linenumber=linenumber,
375 lines = fctx.annotate(follow=follow, linenumber=linenumber,
463 diffopts=diffopts)
376 diffopts=diffopts)
464 if not lines:
377 if not lines:
465 continue
378 continue
466 formats = []
379 formats = []
467 pieces = []
380 pieces = []
468
381
469 for f, sep in funcmap:
382 for f, sep in funcmap:
470 l = [f(n) for n, dummy in lines]
383 l = [f(n) for n, dummy in lines]
471 if fm.isplain():
384 if fm.isplain():
472 sizes = [encoding.colwidth(x) for x in l]
385 sizes = [encoding.colwidth(x) for x in l]
473 ml = max(sizes)
386 ml = max(sizes)
474 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
387 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
475 else:
388 else:
476 formats.append(['%s' for x in l])
389 formats.append(['%s' for x in l])
477 pieces.append(l)
390 pieces.append(l)
478
391
479 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
392 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
480 fm.startitem()
393 fm.startitem()
481 fm.write(fields, "".join(f), *p)
394 fm.write(fields, "".join(f), *p)
482 fm.write('line', ": %s", l[1])
395 fm.write('line', ": %s", l[1])
483
396
484 if not lines[-1][1].endswith('\n'):
397 if not lines[-1][1].endswith('\n'):
485 fm.plain('\n')
398 fm.plain('\n')
486
399
487 fm.end()
400 fm.end()
488
401
489 @command('archive',
402 @command('archive',
490 [('', 'no-decode', None, _('do not pass files through decoders')),
403 [('', 'no-decode', None, _('do not pass files through decoders')),
491 ('p', 'prefix', '', _('directory prefix for files in archive'),
404 ('p', 'prefix', '', _('directory prefix for files in archive'),
492 _('PREFIX')),
405 _('PREFIX')),
493 ('r', 'rev', '', _('revision to distribute'), _('REV')),
406 ('r', 'rev', '', _('revision to distribute'), _('REV')),
494 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
407 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
495 ] + subrepoopts + walkopts,
408 ] + subrepoopts + walkopts,
496 _('[OPTION]... DEST'))
409 _('[OPTION]... DEST'))
497 def archive(ui, repo, dest, **opts):
410 def archive(ui, repo, dest, **opts):
498 '''create an unversioned archive of a repository revision
411 '''create an unversioned archive of a repository revision
499
412
500 By default, the revision used is the parent of the working
413 By default, the revision used is the parent of the working
501 directory; use -r/--rev to specify a different revision.
414 directory; use -r/--rev to specify a different revision.
502
415
503 The archive type is automatically detected based on file
416 The archive type is automatically detected based on file
504 extension (to override, use -t/--type).
417 extension (to override, use -t/--type).
505
418
506 .. container:: verbose
419 .. container:: verbose
507
420
508 Examples:
421 Examples:
509
422
510 - create a zip file containing the 1.0 release::
423 - create a zip file containing the 1.0 release::
511
424
512 hg archive -r 1.0 project-1.0.zip
425 hg archive -r 1.0 project-1.0.zip
513
426
514 - create a tarball excluding .hg files::
427 - create a tarball excluding .hg files::
515
428
516 hg archive project.tar.gz -X ".hg*"
429 hg archive project.tar.gz -X ".hg*"
517
430
518 Valid types are:
431 Valid types are:
519
432
520 :``files``: a directory full of files (default)
433 :``files``: a directory full of files (default)
521 :``tar``: tar archive, uncompressed
434 :``tar``: tar archive, uncompressed
522 :``tbz2``: tar archive, compressed using bzip2
435 :``tbz2``: tar archive, compressed using bzip2
523 :``tgz``: tar archive, compressed using gzip
436 :``tgz``: tar archive, compressed using gzip
524 :``uzip``: zip archive, uncompressed
437 :``uzip``: zip archive, uncompressed
525 :``zip``: zip archive, compressed using deflate
438 :``zip``: zip archive, compressed using deflate
526
439
527 The exact name of the destination archive or directory is given
440 The exact name of the destination archive or directory is given
528 using a format string; see :hg:`help export` for details.
441 using a format string; see :hg:`help export` for details.
529
442
530 Each member added to an archive file has a directory prefix
443 Each member added to an archive file has a directory prefix
531 prepended. Use -p/--prefix to specify a format string for the
444 prepended. Use -p/--prefix to specify a format string for the
532 prefix. The default is the basename of the archive, with suffixes
445 prefix. The default is the basename of the archive, with suffixes
533 removed.
446 removed.
534
447
535 Returns 0 on success.
448 Returns 0 on success.
536 '''
449 '''
537
450
538 opts = pycompat.byteskwargs(opts)
451 opts = pycompat.byteskwargs(opts)
539 ctx = scmutil.revsingle(repo, opts.get('rev'))
452 ctx = scmutil.revsingle(repo, opts.get('rev'))
540 if not ctx:
453 if not ctx:
541 raise error.Abort(_('no working directory: please specify a revision'))
454 raise error.Abort(_('no working directory: please specify a revision'))
542 node = ctx.node()
455 node = ctx.node()
543 dest = cmdutil.makefilename(repo, dest, node)
456 dest = cmdutil.makefilename(repo, dest, node)
544 if os.path.realpath(dest) == repo.root:
457 if os.path.realpath(dest) == repo.root:
545 raise error.Abort(_('repository root cannot be destination'))
458 raise error.Abort(_('repository root cannot be destination'))
546
459
547 kind = opts.get('type') or archival.guesskind(dest) or 'files'
460 kind = opts.get('type') or archival.guesskind(dest) or 'files'
548 prefix = opts.get('prefix')
461 prefix = opts.get('prefix')
549
462
550 if dest == '-':
463 if dest == '-':
551 if kind == 'files':
464 if kind == 'files':
552 raise error.Abort(_('cannot archive plain files to stdout'))
465 raise error.Abort(_('cannot archive plain files to stdout'))
553 dest = cmdutil.makefileobj(repo, dest)
466 dest = cmdutil.makefileobj(repo, dest)
554 if not prefix:
467 if not prefix:
555 prefix = os.path.basename(repo.root) + '-%h'
468 prefix = os.path.basename(repo.root) + '-%h'
556
469
557 prefix = cmdutil.makefilename(repo, prefix, node)
470 prefix = cmdutil.makefilename(repo, prefix, node)
558 matchfn = scmutil.match(ctx, [], opts)
471 matchfn = scmutil.match(ctx, [], opts)
559 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
472 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
560 matchfn, prefix, subrepos=opts.get('subrepos'))
473 matchfn, prefix, subrepos=opts.get('subrepos'))
561
474
562 @command('backout',
475 @command('backout',
563 [('', 'merge', None, _('merge with old dirstate parent after backout')),
476 [('', 'merge', None, _('merge with old dirstate parent after backout')),
564 ('', 'commit', None,
477 ('', 'commit', None,
565 _('commit if no conflicts were encountered (DEPRECATED)')),
478 _('commit if no conflicts were encountered (DEPRECATED)')),
566 ('', 'no-commit', None, _('do not commit')),
479 ('', 'no-commit', None, _('do not commit')),
567 ('', 'parent', '',
480 ('', 'parent', '',
568 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
481 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
569 ('r', 'rev', '', _('revision to backout'), _('REV')),
482 ('r', 'rev', '', _('revision to backout'), _('REV')),
570 ('e', 'edit', False, _('invoke editor on commit messages')),
483 ('e', 'edit', False, _('invoke editor on commit messages')),
571 ] + mergetoolopts + walkopts + commitopts + commitopts2,
484 ] + mergetoolopts + walkopts + commitopts + commitopts2,
572 _('[OPTION]... [-r] REV'))
485 _('[OPTION]... [-r] REV'))
573 def backout(ui, repo, node=None, rev=None, **opts):
486 def backout(ui, repo, node=None, rev=None, **opts):
574 '''reverse effect of earlier changeset
487 '''reverse effect of earlier changeset
575
488
576 Prepare a new changeset with the effect of REV undone in the
489 Prepare a new changeset with the effect of REV undone in the
577 current working directory. If no conflicts were encountered,
490 current working directory. If no conflicts were encountered,
578 it will be committed immediately.
491 it will be committed immediately.
579
492
580 If REV is the parent of the working directory, then this new changeset
493 If REV is the parent of the working directory, then this new changeset
581 is committed automatically (unless --no-commit is specified).
494 is committed automatically (unless --no-commit is specified).
582
495
583 .. note::
496 .. note::
584
497
585 :hg:`backout` cannot be used to fix either an unwanted or
498 :hg:`backout` cannot be used to fix either an unwanted or
586 incorrect merge.
499 incorrect merge.
587
500
588 .. container:: verbose
501 .. container:: verbose
589
502
590 Examples:
503 Examples:
591
504
592 - Reverse the effect of the parent of the working directory.
505 - Reverse the effect of the parent of the working directory.
593 This backout will be committed immediately::
506 This backout will be committed immediately::
594
507
595 hg backout -r .
508 hg backout -r .
596
509
597 - Reverse the effect of previous bad revision 23::
510 - Reverse the effect of previous bad revision 23::
598
511
599 hg backout -r 23
512 hg backout -r 23
600
513
601 - Reverse the effect of previous bad revision 23 and
514 - Reverse the effect of previous bad revision 23 and
602 leave changes uncommitted::
515 leave changes uncommitted::
603
516
604 hg backout -r 23 --no-commit
517 hg backout -r 23 --no-commit
605 hg commit -m "Backout revision 23"
518 hg commit -m "Backout revision 23"
606
519
607 By default, the pending changeset will have one parent,
520 By default, the pending changeset will have one parent,
608 maintaining a linear history. With --merge, the pending
521 maintaining a linear history. With --merge, the pending
609 changeset will instead have two parents: the old parent of the
522 changeset will instead have two parents: the old parent of the
610 working directory and a new child of REV that simply undoes REV.
523 working directory and a new child of REV that simply undoes REV.
611
524
612 Before version 1.7, the behavior without --merge was equivalent
525 Before version 1.7, the behavior without --merge was equivalent
613 to specifying --merge followed by :hg:`update --clean .` to
526 to specifying --merge followed by :hg:`update --clean .` to
614 cancel the merge and leave the child of REV as a head to be
527 cancel the merge and leave the child of REV as a head to be
615 merged separately.
528 merged separately.
616
529
617 See :hg:`help dates` for a list of formats valid for -d/--date.
530 See :hg:`help dates` for a list of formats valid for -d/--date.
618
531
619 See :hg:`help revert` for a way to restore files to the state
532 See :hg:`help revert` for a way to restore files to the state
620 of another revision.
533 of another revision.
621
534
622 Returns 0 on success, 1 if nothing to backout or there are unresolved
535 Returns 0 on success, 1 if nothing to backout or there are unresolved
623 files.
536 files.
624 '''
537 '''
625 wlock = lock = None
538 wlock = lock = None
626 try:
539 try:
627 wlock = repo.wlock()
540 wlock = repo.wlock()
628 lock = repo.lock()
541 lock = repo.lock()
629 return _dobackout(ui, repo, node, rev, **opts)
542 return _dobackout(ui, repo, node, rev, **opts)
630 finally:
543 finally:
631 release(lock, wlock)
544 release(lock, wlock)
632
545
633 def _dobackout(ui, repo, node=None, rev=None, **opts):
546 def _dobackout(ui, repo, node=None, rev=None, **opts):
634 opts = pycompat.byteskwargs(opts)
547 opts = pycompat.byteskwargs(opts)
635 if opts.get('commit') and opts.get('no_commit'):
548 if opts.get('commit') and opts.get('no_commit'):
636 raise error.Abort(_("cannot use --commit with --no-commit"))
549 raise error.Abort(_("cannot use --commit with --no-commit"))
637 if opts.get('merge') and opts.get('no_commit'):
550 if opts.get('merge') and opts.get('no_commit'):
638 raise error.Abort(_("cannot use --merge with --no-commit"))
551 raise error.Abort(_("cannot use --merge with --no-commit"))
639
552
640 if rev and node:
553 if rev and node:
641 raise error.Abort(_("please specify just one revision"))
554 raise error.Abort(_("please specify just one revision"))
642
555
643 if not rev:
556 if not rev:
644 rev = node
557 rev = node
645
558
646 if not rev:
559 if not rev:
647 raise error.Abort(_("please specify a revision to backout"))
560 raise error.Abort(_("please specify a revision to backout"))
648
561
649 date = opts.get('date')
562 date = opts.get('date')
650 if date:
563 if date:
651 opts['date'] = util.parsedate(date)
564 opts['date'] = util.parsedate(date)
652
565
653 cmdutil.checkunfinished(repo)
566 cmdutil.checkunfinished(repo)
654 cmdutil.bailifchanged(repo)
567 cmdutil.bailifchanged(repo)
655 node = scmutil.revsingle(repo, rev).node()
568 node = scmutil.revsingle(repo, rev).node()
656
569
657 op1, op2 = repo.dirstate.parents()
570 op1, op2 = repo.dirstate.parents()
658 if not repo.changelog.isancestor(node, op1):
571 if not repo.changelog.isancestor(node, op1):
659 raise error.Abort(_('cannot backout change that is not an ancestor'))
572 raise error.Abort(_('cannot backout change that is not an ancestor'))
660
573
661 p1, p2 = repo.changelog.parents(node)
574 p1, p2 = repo.changelog.parents(node)
662 if p1 == nullid:
575 if p1 == nullid:
663 raise error.Abort(_('cannot backout a change with no parents'))
576 raise error.Abort(_('cannot backout a change with no parents'))
664 if p2 != nullid:
577 if p2 != nullid:
665 if not opts.get('parent'):
578 if not opts.get('parent'):
666 raise error.Abort(_('cannot backout a merge changeset'))
579 raise error.Abort(_('cannot backout a merge changeset'))
667 p = repo.lookup(opts['parent'])
580 p = repo.lookup(opts['parent'])
668 if p not in (p1, p2):
581 if p not in (p1, p2):
669 raise error.Abort(_('%s is not a parent of %s') %
582 raise error.Abort(_('%s is not a parent of %s') %
670 (short(p), short(node)))
583 (short(p), short(node)))
671 parent = p
584 parent = p
672 else:
585 else:
673 if opts.get('parent'):
586 if opts.get('parent'):
674 raise error.Abort(_('cannot use --parent on non-merge changeset'))
587 raise error.Abort(_('cannot use --parent on non-merge changeset'))
675 parent = p1
588 parent = p1
676
589
677 # the backout should appear on the same branch
590 # the backout should appear on the same branch
678 branch = repo.dirstate.branch()
591 branch = repo.dirstate.branch()
679 bheads = repo.branchheads(branch)
592 bheads = repo.branchheads(branch)
680 rctx = scmutil.revsingle(repo, hex(parent))
593 rctx = scmutil.revsingle(repo, hex(parent))
681 if not opts.get('merge') and op1 != node:
594 if not opts.get('merge') and op1 != node:
682 dsguard = dirstateguard.dirstateguard(repo, 'backout')
595 dsguard = dirstateguard.dirstateguard(repo, 'backout')
683 try:
596 try:
684 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
597 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
685 'backout')
598 'backout')
686 stats = mergemod.update(repo, parent, True, True, node, False)
599 stats = mergemod.update(repo, parent, True, True, node, False)
687 repo.setparents(op1, op2)
600 repo.setparents(op1, op2)
688 dsguard.close()
601 dsguard.close()
689 hg._showstats(repo, stats)
602 hg._showstats(repo, stats)
690 if stats[3]:
603 if stats[3]:
691 repo.ui.status(_("use 'hg resolve' to retry unresolved "
604 repo.ui.status(_("use 'hg resolve' to retry unresolved "
692 "file merges\n"))
605 "file merges\n"))
693 return 1
606 return 1
694 finally:
607 finally:
695 ui.setconfig('ui', 'forcemerge', '', '')
608 ui.setconfig('ui', 'forcemerge', '', '')
696 lockmod.release(dsguard)
609 lockmod.release(dsguard)
697 else:
610 else:
698 hg.clean(repo, node, show_stats=False)
611 hg.clean(repo, node, show_stats=False)
699 repo.dirstate.setbranch(branch)
612 repo.dirstate.setbranch(branch)
700 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
613 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
701
614
702 if opts.get('no_commit'):
615 if opts.get('no_commit'):
703 msg = _("changeset %s backed out, "
616 msg = _("changeset %s backed out, "
704 "don't forget to commit.\n")
617 "don't forget to commit.\n")
705 ui.status(msg % short(node))
618 ui.status(msg % short(node))
706 return 0
619 return 0
707
620
708 def commitfunc(ui, repo, message, match, opts):
621 def commitfunc(ui, repo, message, match, opts):
709 editform = 'backout'
622 editform = 'backout'
710 e = cmdutil.getcommiteditor(editform=editform,
623 e = cmdutil.getcommiteditor(editform=editform,
711 **pycompat.strkwargs(opts))
624 **pycompat.strkwargs(opts))
712 if not message:
625 if not message:
713 # we don't translate commit messages
626 # we don't translate commit messages
714 message = "Backed out changeset %s" % short(node)
627 message = "Backed out changeset %s" % short(node)
715 e = cmdutil.getcommiteditor(edit=True, editform=editform)
628 e = cmdutil.getcommiteditor(edit=True, editform=editform)
716 return repo.commit(message, opts.get('user'), opts.get('date'),
629 return repo.commit(message, opts.get('user'), opts.get('date'),
717 match, editor=e)
630 match, editor=e)
718 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
631 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
719 if not newnode:
632 if not newnode:
720 ui.status(_("nothing changed\n"))
633 ui.status(_("nothing changed\n"))
721 return 1
634 return 1
722 cmdutil.commitstatus(repo, newnode, branch, bheads)
635 cmdutil.commitstatus(repo, newnode, branch, bheads)
723
636
724 def nice(node):
637 def nice(node):
725 return '%d:%s' % (repo.changelog.rev(node), short(node))
638 return '%d:%s' % (repo.changelog.rev(node), short(node))
726 ui.status(_('changeset %s backs out changeset %s\n') %
639 ui.status(_('changeset %s backs out changeset %s\n') %
727 (nice(repo.changelog.tip()), nice(node)))
640 (nice(repo.changelog.tip()), nice(node)))
728 if opts.get('merge') and op1 != node:
641 if opts.get('merge') and op1 != node:
729 hg.clean(repo, op1, show_stats=False)
642 hg.clean(repo, op1, show_stats=False)
730 ui.status(_('merging with changeset %s\n')
643 ui.status(_('merging with changeset %s\n')
731 % nice(repo.changelog.tip()))
644 % nice(repo.changelog.tip()))
732 try:
645 try:
733 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
646 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
734 'backout')
647 'backout')
735 return hg.merge(repo, hex(repo.changelog.tip()))
648 return hg.merge(repo, hex(repo.changelog.tip()))
736 finally:
649 finally:
737 ui.setconfig('ui', 'forcemerge', '', '')
650 ui.setconfig('ui', 'forcemerge', '', '')
738 return 0
651 return 0
739
652
740 @command('bisect',
653 @command('bisect',
741 [('r', 'reset', False, _('reset bisect state')),
654 [('r', 'reset', False, _('reset bisect state')),
742 ('g', 'good', False, _('mark changeset good')),
655 ('g', 'good', False, _('mark changeset good')),
743 ('b', 'bad', False, _('mark changeset bad')),
656 ('b', 'bad', False, _('mark changeset bad')),
744 ('s', 'skip', False, _('skip testing changeset')),
657 ('s', 'skip', False, _('skip testing changeset')),
745 ('e', 'extend', False, _('extend the bisect range')),
658 ('e', 'extend', False, _('extend the bisect range')),
746 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
659 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
747 ('U', 'noupdate', False, _('do not update to target'))],
660 ('U', 'noupdate', False, _('do not update to target'))],
748 _("[-gbsr] [-U] [-c CMD] [REV]"))
661 _("[-gbsr] [-U] [-c CMD] [REV]"))
749 def bisect(ui, repo, rev=None, extra=None, command=None,
662 def bisect(ui, repo, rev=None, extra=None, command=None,
750 reset=None, good=None, bad=None, skip=None, extend=None,
663 reset=None, good=None, bad=None, skip=None, extend=None,
751 noupdate=None):
664 noupdate=None):
752 """subdivision search of changesets
665 """subdivision search of changesets
753
666
754 This command helps to find changesets which introduce problems. To
667 This command helps to find changesets which introduce problems. To
755 use, mark the earliest changeset you know exhibits the problem as
668 use, mark the earliest changeset you know exhibits the problem as
756 bad, then mark the latest changeset which is free from the problem
669 bad, then mark the latest changeset which is free from the problem
757 as good. Bisect will update your working directory to a revision
670 as good. Bisect will update your working directory to a revision
758 for testing (unless the -U/--noupdate option is specified). Once
671 for testing (unless the -U/--noupdate option is specified). Once
759 you have performed tests, mark the working directory as good or
672 you have performed tests, mark the working directory as good or
760 bad, and bisect will either update to another candidate changeset
673 bad, and bisect will either update to another candidate changeset
761 or announce that it has found the bad revision.
674 or announce that it has found the bad revision.
762
675
763 As a shortcut, you can also use the revision argument to mark a
676 As a shortcut, you can also use the revision argument to mark a
764 revision as good or bad without checking it out first.
677 revision as good or bad without checking it out first.
765
678
766 If you supply a command, it will be used for automatic bisection.
679 If you supply a command, it will be used for automatic bisection.
767 The environment variable HG_NODE will contain the ID of the
680 The environment variable HG_NODE will contain the ID of the
768 changeset being tested. The exit status of the command will be
681 changeset being tested. The exit status of the command will be
769 used to mark revisions as good or bad: status 0 means good, 125
682 used to mark revisions as good or bad: status 0 means good, 125
770 means to skip the revision, 127 (command not found) will abort the
683 means to skip the revision, 127 (command not found) will abort the
771 bisection, and any other non-zero exit status means the revision
684 bisection, and any other non-zero exit status means the revision
772 is bad.
685 is bad.
773
686
774 .. container:: verbose
687 .. container:: verbose
775
688
776 Some examples:
689 Some examples:
777
690
778 - start a bisection with known bad revision 34, and good revision 12::
691 - start a bisection with known bad revision 34, and good revision 12::
779
692
780 hg bisect --bad 34
693 hg bisect --bad 34
781 hg bisect --good 12
694 hg bisect --good 12
782
695
783 - advance the current bisection by marking current revision as good or
696 - advance the current bisection by marking current revision as good or
784 bad::
697 bad::
785
698
786 hg bisect --good
699 hg bisect --good
787 hg bisect --bad
700 hg bisect --bad
788
701
789 - mark the current revision, or a known revision, to be skipped (e.g. if
702 - mark the current revision, or a known revision, to be skipped (e.g. if
790 that revision is not usable because of another issue)::
703 that revision is not usable because of another issue)::
791
704
792 hg bisect --skip
705 hg bisect --skip
793 hg bisect --skip 23
706 hg bisect --skip 23
794
707
795 - skip all revisions that do not touch directories ``foo`` or ``bar``::
708 - skip all revisions that do not touch directories ``foo`` or ``bar``::
796
709
797 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
710 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
798
711
799 - forget the current bisection::
712 - forget the current bisection::
800
713
801 hg bisect --reset
714 hg bisect --reset
802
715
803 - use 'make && make tests' to automatically find the first broken
716 - use 'make && make tests' to automatically find the first broken
804 revision::
717 revision::
805
718
806 hg bisect --reset
719 hg bisect --reset
807 hg bisect --bad 34
720 hg bisect --bad 34
808 hg bisect --good 12
721 hg bisect --good 12
809 hg bisect --command "make && make tests"
722 hg bisect --command "make && make tests"
810
723
811 - see all changesets whose states are already known in the current
724 - see all changesets whose states are already known in the current
812 bisection::
725 bisection::
813
726
814 hg log -r "bisect(pruned)"
727 hg log -r "bisect(pruned)"
815
728
816 - see the changeset currently being bisected (especially useful
729 - see the changeset currently being bisected (especially useful
817 if running with -U/--noupdate)::
730 if running with -U/--noupdate)::
818
731
819 hg log -r "bisect(current)"
732 hg log -r "bisect(current)"
820
733
821 - see all changesets that took part in the current bisection::
734 - see all changesets that took part in the current bisection::
822
735
823 hg log -r "bisect(range)"
736 hg log -r "bisect(range)"
824
737
825 - you can even get a nice graph::
738 - you can even get a nice graph::
826
739
827 hg log --graph -r "bisect(range)"
740 hg log --graph -r "bisect(range)"
828
741
829 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
742 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
830
743
831 Returns 0 on success.
744 Returns 0 on success.
832 """
745 """
833 # backward compatibility
746 # backward compatibility
834 if rev in "good bad reset init".split():
747 if rev in "good bad reset init".split():
835 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
748 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
836 cmd, rev, extra = rev, extra, None
749 cmd, rev, extra = rev, extra, None
837 if cmd == "good":
750 if cmd == "good":
838 good = True
751 good = True
839 elif cmd == "bad":
752 elif cmd == "bad":
840 bad = True
753 bad = True
841 else:
754 else:
842 reset = True
755 reset = True
843 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
756 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
844 raise error.Abort(_('incompatible arguments'))
757 raise error.Abort(_('incompatible arguments'))
845
758
846 if reset:
759 if reset:
847 hbisect.resetstate(repo)
760 hbisect.resetstate(repo)
848 return
761 return
849
762
850 state = hbisect.load_state(repo)
763 state = hbisect.load_state(repo)
851
764
852 # update state
765 # update state
853 if good or bad or skip:
766 if good or bad or skip:
854 if rev:
767 if rev:
855 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
768 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
856 else:
769 else:
857 nodes = [repo.lookup('.')]
770 nodes = [repo.lookup('.')]
858 if good:
771 if good:
859 state['good'] += nodes
772 state['good'] += nodes
860 elif bad:
773 elif bad:
861 state['bad'] += nodes
774 state['bad'] += nodes
862 elif skip:
775 elif skip:
863 state['skip'] += nodes
776 state['skip'] += nodes
864 hbisect.save_state(repo, state)
777 hbisect.save_state(repo, state)
865 if not (state['good'] and state['bad']):
778 if not (state['good'] and state['bad']):
866 return
779 return
867
780
868 def mayupdate(repo, node, show_stats=True):
781 def mayupdate(repo, node, show_stats=True):
869 """common used update sequence"""
782 """common used update sequence"""
870 if noupdate:
783 if noupdate:
871 return
784 return
872 cmdutil.checkunfinished(repo)
785 cmdutil.checkunfinished(repo)
873 cmdutil.bailifchanged(repo)
786 cmdutil.bailifchanged(repo)
874 return hg.clean(repo, node, show_stats=show_stats)
787 return hg.clean(repo, node, show_stats=show_stats)
875
788
876 displayer = cmdutil.show_changeset(ui, repo, {})
789 displayer = cmdutil.show_changeset(ui, repo, {})
877
790
878 if command:
791 if command:
879 changesets = 1
792 changesets = 1
880 if noupdate:
793 if noupdate:
881 try:
794 try:
882 node = state['current'][0]
795 node = state['current'][0]
883 except LookupError:
796 except LookupError:
884 raise error.Abort(_('current bisect revision is unknown - '
797 raise error.Abort(_('current bisect revision is unknown - '
885 'start a new bisect to fix'))
798 'start a new bisect to fix'))
886 else:
799 else:
887 node, p2 = repo.dirstate.parents()
800 node, p2 = repo.dirstate.parents()
888 if p2 != nullid:
801 if p2 != nullid:
889 raise error.Abort(_('current bisect revision is a merge'))
802 raise error.Abort(_('current bisect revision is a merge'))
890 if rev:
803 if rev:
891 node = repo[scmutil.revsingle(repo, rev, node)].node()
804 node = repo[scmutil.revsingle(repo, rev, node)].node()
892 try:
805 try:
893 while changesets:
806 while changesets:
894 # update state
807 # update state
895 state['current'] = [node]
808 state['current'] = [node]
896 hbisect.save_state(repo, state)
809 hbisect.save_state(repo, state)
897 status = ui.system(command, environ={'HG_NODE': hex(node)},
810 status = ui.system(command, environ={'HG_NODE': hex(node)},
898 blockedtag='bisect_check')
811 blockedtag='bisect_check')
899 if status == 125:
812 if status == 125:
900 transition = "skip"
813 transition = "skip"
901 elif status == 0:
814 elif status == 0:
902 transition = "good"
815 transition = "good"
903 # status < 0 means process was killed
816 # status < 0 means process was killed
904 elif status == 127:
817 elif status == 127:
905 raise error.Abort(_("failed to execute %s") % command)
818 raise error.Abort(_("failed to execute %s") % command)
906 elif status < 0:
819 elif status < 0:
907 raise error.Abort(_("%s killed") % command)
820 raise error.Abort(_("%s killed") % command)
908 else:
821 else:
909 transition = "bad"
822 transition = "bad"
910 state[transition].append(node)
823 state[transition].append(node)
911 ctx = repo[node]
824 ctx = repo[node]
912 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
825 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
913 hbisect.checkstate(state)
826 hbisect.checkstate(state)
914 # bisect
827 # bisect
915 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
828 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
916 # update to next check
829 # update to next check
917 node = nodes[0]
830 node = nodes[0]
918 mayupdate(repo, node, show_stats=False)
831 mayupdate(repo, node, show_stats=False)
919 finally:
832 finally:
920 state['current'] = [node]
833 state['current'] = [node]
921 hbisect.save_state(repo, state)
834 hbisect.save_state(repo, state)
922 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
835 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
923 return
836 return
924
837
925 hbisect.checkstate(state)
838 hbisect.checkstate(state)
926
839
927 # actually bisect
840 # actually bisect
928 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
841 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
929 if extend:
842 if extend:
930 if not changesets:
843 if not changesets:
931 extendnode = hbisect.extendrange(repo, state, nodes, good)
844 extendnode = hbisect.extendrange(repo, state, nodes, good)
932 if extendnode is not None:
845 if extendnode is not None:
933 ui.write(_("Extending search to changeset %d:%s\n")
846 ui.write(_("Extending search to changeset %d:%s\n")
934 % (extendnode.rev(), extendnode))
847 % (extendnode.rev(), extendnode))
935 state['current'] = [extendnode.node()]
848 state['current'] = [extendnode.node()]
936 hbisect.save_state(repo, state)
849 hbisect.save_state(repo, state)
937 return mayupdate(repo, extendnode.node())
850 return mayupdate(repo, extendnode.node())
938 raise error.Abort(_("nothing to extend"))
851 raise error.Abort(_("nothing to extend"))
939
852
940 if changesets == 0:
853 if changesets == 0:
941 hbisect.printresult(ui, repo, state, displayer, nodes, good)
854 hbisect.printresult(ui, repo, state, displayer, nodes, good)
942 else:
855 else:
943 assert len(nodes) == 1 # only a single node can be tested next
856 assert len(nodes) == 1 # only a single node can be tested next
944 node = nodes[0]
857 node = nodes[0]
945 # compute the approximate number of remaining tests
858 # compute the approximate number of remaining tests
946 tests, size = 0, 2
859 tests, size = 0, 2
947 while size <= changesets:
860 while size <= changesets:
948 tests, size = tests + 1, size * 2
861 tests, size = tests + 1, size * 2
949 rev = repo.changelog.rev(node)
862 rev = repo.changelog.rev(node)
950 ui.write(_("Testing changeset %d:%s "
863 ui.write(_("Testing changeset %d:%s "
951 "(%d changesets remaining, ~%d tests)\n")
864 "(%d changesets remaining, ~%d tests)\n")
952 % (rev, short(node), changesets, tests))
865 % (rev, short(node), changesets, tests))
953 state['current'] = [node]
866 state['current'] = [node]
954 hbisect.save_state(repo, state)
867 hbisect.save_state(repo, state)
955 return mayupdate(repo, node)
868 return mayupdate(repo, node)
956
869
957 @command('bookmarks|bookmark',
870 @command('bookmarks|bookmark',
958 [('f', 'force', False, _('force')),
871 [('f', 'force', False, _('force')),
959 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
872 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
960 ('d', 'delete', False, _('delete a given bookmark')),
873 ('d', 'delete', False, _('delete a given bookmark')),
961 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
874 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
962 ('i', 'inactive', False, _('mark a bookmark inactive')),
875 ('i', 'inactive', False, _('mark a bookmark inactive')),
963 ] + formatteropts,
876 ] + formatteropts,
964 _('hg bookmarks [OPTIONS]... [NAME]...'))
877 _('hg bookmarks [OPTIONS]... [NAME]...'))
965 def bookmark(ui, repo, *names, **opts):
878 def bookmark(ui, repo, *names, **opts):
966 '''create a new bookmark or list existing bookmarks
879 '''create a new bookmark or list existing bookmarks
967
880
968 Bookmarks are labels on changesets to help track lines of development.
881 Bookmarks are labels on changesets to help track lines of development.
969 Bookmarks are unversioned and can be moved, renamed and deleted.
882 Bookmarks are unversioned and can be moved, renamed and deleted.
970 Deleting or moving a bookmark has no effect on the associated changesets.
883 Deleting or moving a bookmark has no effect on the associated changesets.
971
884
972 Creating or updating to a bookmark causes it to be marked as 'active'.
885 Creating or updating to a bookmark causes it to be marked as 'active'.
973 The active bookmark is indicated with a '*'.
886 The active bookmark is indicated with a '*'.
974 When a commit is made, the active bookmark will advance to the new commit.
887 When a commit is made, the active bookmark will advance to the new commit.
975 A plain :hg:`update` will also advance an active bookmark, if possible.
888 A plain :hg:`update` will also advance an active bookmark, if possible.
976 Updating away from a bookmark will cause it to be deactivated.
889 Updating away from a bookmark will cause it to be deactivated.
977
890
978 Bookmarks can be pushed and pulled between repositories (see
891 Bookmarks can be pushed and pulled between repositories (see
979 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
892 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
980 diverged, a new 'divergent bookmark' of the form 'name@path' will
893 diverged, a new 'divergent bookmark' of the form 'name@path' will
981 be created. Using :hg:`merge` will resolve the divergence.
894 be created. Using :hg:`merge` will resolve the divergence.
982
895
983 A bookmark named '@' has the special property that :hg:`clone` will
896 A bookmark named '@' has the special property that :hg:`clone` will
984 check it out by default if it exists.
897 check it out by default if it exists.
985
898
986 .. container:: verbose
899 .. container:: verbose
987
900
988 Examples:
901 Examples:
989
902
990 - create an active bookmark for a new line of development::
903 - create an active bookmark for a new line of development::
991
904
992 hg book new-feature
905 hg book new-feature
993
906
994 - create an inactive bookmark as a place marker::
907 - create an inactive bookmark as a place marker::
995
908
996 hg book -i reviewed
909 hg book -i reviewed
997
910
998 - create an inactive bookmark on another changeset::
911 - create an inactive bookmark on another changeset::
999
912
1000 hg book -r .^ tested
913 hg book -r .^ tested
1001
914
1002 - rename bookmark turkey to dinner::
915 - rename bookmark turkey to dinner::
1003
916
1004 hg book -m turkey dinner
917 hg book -m turkey dinner
1005
918
1006 - move the '@' bookmark from another branch::
919 - move the '@' bookmark from another branch::
1007
920
1008 hg book -f @
921 hg book -f @
1009 '''
922 '''
1010 opts = pycompat.byteskwargs(opts)
923 opts = pycompat.byteskwargs(opts)
1011 force = opts.get('force')
924 force = opts.get('force')
1012 rev = opts.get('rev')
925 rev = opts.get('rev')
1013 delete = opts.get('delete')
926 delete = opts.get('delete')
1014 rename = opts.get('rename')
927 rename = opts.get('rename')
1015 inactive = opts.get('inactive')
928 inactive = opts.get('inactive')
1016
929
1017 def checkformat(mark):
930 def checkformat(mark):
1018 mark = mark.strip()
931 mark = mark.strip()
1019 if not mark:
932 if not mark:
1020 raise error.Abort(_("bookmark names cannot consist entirely of "
933 raise error.Abort(_("bookmark names cannot consist entirely of "
1021 "whitespace"))
934 "whitespace"))
1022 scmutil.checknewlabel(repo, mark, 'bookmark')
935 scmutil.checknewlabel(repo, mark, 'bookmark')
1023 return mark
936 return mark
1024
937
1025 def checkconflict(repo, mark, cur, force=False, target=None):
938 def checkconflict(repo, mark, cur, force=False, target=None):
1026 if mark in marks and not force:
939 if mark in marks and not force:
1027 if target:
940 if target:
1028 if marks[mark] == target and target == cur:
941 if marks[mark] == target and target == cur:
1029 # re-activating a bookmark
942 # re-activating a bookmark
1030 return
943 return
1031 anc = repo.changelog.ancestors([repo[target].rev()])
944 anc = repo.changelog.ancestors([repo[target].rev()])
1032 bmctx = repo[marks[mark]]
945 bmctx = repo[marks[mark]]
1033 divs = [repo[b].node() for b in marks
946 divs = [repo[b].node() for b in marks
1034 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
947 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1035
948
1036 # allow resolving a single divergent bookmark even if moving
949 # allow resolving a single divergent bookmark even if moving
1037 # the bookmark across branches when a revision is specified
950 # the bookmark across branches when a revision is specified
1038 # that contains a divergent bookmark
951 # that contains a divergent bookmark
1039 if bmctx.rev() not in anc and target in divs:
952 if bmctx.rev() not in anc and target in divs:
1040 bookmarks.deletedivergent(repo, [target], mark)
953 bookmarks.deletedivergent(repo, [target], mark)
1041 return
954 return
1042
955
1043 deletefrom = [b for b in divs
956 deletefrom = [b for b in divs
1044 if repo[b].rev() in anc or b == target]
957 if repo[b].rev() in anc or b == target]
1045 bookmarks.deletedivergent(repo, deletefrom, mark)
958 bookmarks.deletedivergent(repo, deletefrom, mark)
1046 if bookmarks.validdest(repo, bmctx, repo[target]):
959 if bookmarks.validdest(repo, bmctx, repo[target]):
1047 ui.status(_("moving bookmark '%s' forward from %s\n") %
960 ui.status(_("moving bookmark '%s' forward from %s\n") %
1048 (mark, short(bmctx.node())))
961 (mark, short(bmctx.node())))
1049 return
962 return
1050 raise error.Abort(_("bookmark '%s' already exists "
963 raise error.Abort(_("bookmark '%s' already exists "
1051 "(use -f to force)") % mark)
964 "(use -f to force)") % mark)
1052 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
965 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1053 and not force):
966 and not force):
1054 raise error.Abort(
967 raise error.Abort(
1055 _("a bookmark cannot have the name of an existing branch"))
968 _("a bookmark cannot have the name of an existing branch"))
1056
969
1057 if delete and rename:
970 if delete and rename:
1058 raise error.Abort(_("--delete and --rename are incompatible"))
971 raise error.Abort(_("--delete and --rename are incompatible"))
1059 if delete and rev:
972 if delete and rev:
1060 raise error.Abort(_("--rev is incompatible with --delete"))
973 raise error.Abort(_("--rev is incompatible with --delete"))
1061 if rename and rev:
974 if rename and rev:
1062 raise error.Abort(_("--rev is incompatible with --rename"))
975 raise error.Abort(_("--rev is incompatible with --rename"))
1063 if not names and (delete or rev):
976 if not names and (delete or rev):
1064 raise error.Abort(_("bookmark name required"))
977 raise error.Abort(_("bookmark name required"))
1065
978
1066 if delete or rename or names or inactive:
979 if delete or rename or names or inactive:
1067 wlock = lock = tr = None
980 wlock = lock = tr = None
1068 try:
981 try:
1069 wlock = repo.wlock()
982 wlock = repo.wlock()
1070 lock = repo.lock()
983 lock = repo.lock()
1071 cur = repo.changectx('.').node()
984 cur = repo.changectx('.').node()
1072 marks = repo._bookmarks
985 marks = repo._bookmarks
1073 if delete:
986 if delete:
1074 tr = repo.transaction('bookmark')
987 tr = repo.transaction('bookmark')
1075 for mark in names:
988 for mark in names:
1076 if mark not in marks:
989 if mark not in marks:
1077 raise error.Abort(_("bookmark '%s' does not exist") %
990 raise error.Abort(_("bookmark '%s' does not exist") %
1078 mark)
991 mark)
1079 if mark == repo._activebookmark:
992 if mark == repo._activebookmark:
1080 bookmarks.deactivate(repo)
993 bookmarks.deactivate(repo)
1081 del marks[mark]
994 del marks[mark]
1082
995
1083 elif rename:
996 elif rename:
1084 tr = repo.transaction('bookmark')
997 tr = repo.transaction('bookmark')
1085 if not names:
998 if not names:
1086 raise error.Abort(_("new bookmark name required"))
999 raise error.Abort(_("new bookmark name required"))
1087 elif len(names) > 1:
1000 elif len(names) > 1:
1088 raise error.Abort(_("only one new bookmark name allowed"))
1001 raise error.Abort(_("only one new bookmark name allowed"))
1089 mark = checkformat(names[0])
1002 mark = checkformat(names[0])
1090 if rename not in marks:
1003 if rename not in marks:
1091 raise error.Abort(_("bookmark '%s' does not exist")
1004 raise error.Abort(_("bookmark '%s' does not exist")
1092 % rename)
1005 % rename)
1093 checkconflict(repo, mark, cur, force)
1006 checkconflict(repo, mark, cur, force)
1094 marks[mark] = marks[rename]
1007 marks[mark] = marks[rename]
1095 if repo._activebookmark == rename and not inactive:
1008 if repo._activebookmark == rename and not inactive:
1096 bookmarks.activate(repo, mark)
1009 bookmarks.activate(repo, mark)
1097 del marks[rename]
1010 del marks[rename]
1098 elif names:
1011 elif names:
1099 tr = repo.transaction('bookmark')
1012 tr = repo.transaction('bookmark')
1100 newact = None
1013 newact = None
1101 for mark in names:
1014 for mark in names:
1102 mark = checkformat(mark)
1015 mark = checkformat(mark)
1103 if newact is None:
1016 if newact is None:
1104 newact = mark
1017 newact = mark
1105 if inactive and mark == repo._activebookmark:
1018 if inactive and mark == repo._activebookmark:
1106 bookmarks.deactivate(repo)
1019 bookmarks.deactivate(repo)
1107 return
1020 return
1108 tgt = cur
1021 tgt = cur
1109 if rev:
1022 if rev:
1110 tgt = scmutil.revsingle(repo, rev).node()
1023 tgt = scmutil.revsingle(repo, rev).node()
1111 checkconflict(repo, mark, cur, force, tgt)
1024 checkconflict(repo, mark, cur, force, tgt)
1112 marks[mark] = tgt
1025 marks[mark] = tgt
1113 if not inactive and cur == marks[newact] and not rev:
1026 if not inactive and cur == marks[newact] and not rev:
1114 bookmarks.activate(repo, newact)
1027 bookmarks.activate(repo, newact)
1115 elif cur != tgt and newact == repo._activebookmark:
1028 elif cur != tgt and newact == repo._activebookmark:
1116 bookmarks.deactivate(repo)
1029 bookmarks.deactivate(repo)
1117 elif inactive:
1030 elif inactive:
1118 if len(marks) == 0:
1031 if len(marks) == 0:
1119 ui.status(_("no bookmarks set\n"))
1032 ui.status(_("no bookmarks set\n"))
1120 elif not repo._activebookmark:
1033 elif not repo._activebookmark:
1121 ui.status(_("no active bookmark\n"))
1034 ui.status(_("no active bookmark\n"))
1122 else:
1035 else:
1123 bookmarks.deactivate(repo)
1036 bookmarks.deactivate(repo)
1124 if tr is not None:
1037 if tr is not None:
1125 marks.recordchange(tr)
1038 marks.recordchange(tr)
1126 tr.close()
1039 tr.close()
1127 finally:
1040 finally:
1128 lockmod.release(tr, lock, wlock)
1041 lockmod.release(tr, lock, wlock)
1129 else: # show bookmarks
1042 else: # show bookmarks
1130 fm = ui.formatter('bookmarks', opts)
1043 fm = ui.formatter('bookmarks', opts)
1131 hexfn = fm.hexfunc
1044 hexfn = fm.hexfunc
1132 marks = repo._bookmarks
1045 marks = repo._bookmarks
1133 if len(marks) == 0 and fm.isplain():
1046 if len(marks) == 0 and fm.isplain():
1134 ui.status(_("no bookmarks set\n"))
1047 ui.status(_("no bookmarks set\n"))
1135 for bmark, n in sorted(marks.iteritems()):
1048 for bmark, n in sorted(marks.iteritems()):
1136 active = repo._activebookmark
1049 active = repo._activebookmark
1137 if bmark == active:
1050 if bmark == active:
1138 prefix, label = '*', activebookmarklabel
1051 prefix, label = '*', activebookmarklabel
1139 else:
1052 else:
1140 prefix, label = ' ', ''
1053 prefix, label = ' ', ''
1141
1054
1142 fm.startitem()
1055 fm.startitem()
1143 if not ui.quiet:
1056 if not ui.quiet:
1144 fm.plain(' %s ' % prefix, label=label)
1057 fm.plain(' %s ' % prefix, label=label)
1145 fm.write('bookmark', '%s', bmark, label=label)
1058 fm.write('bookmark', '%s', bmark, label=label)
1146 pad = " " * (25 - encoding.colwidth(bmark))
1059 pad = " " * (25 - encoding.colwidth(bmark))
1147 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1060 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1148 repo.changelog.rev(n), hexfn(n), label=label)
1061 repo.changelog.rev(n), hexfn(n), label=label)
1149 fm.data(active=(bmark == active))
1062 fm.data(active=(bmark == active))
1150 fm.plain('\n')
1063 fm.plain('\n')
1151 fm.end()
1064 fm.end()
1152
1065
1153 @command('branch',
1066 @command('branch',
1154 [('f', 'force', None,
1067 [('f', 'force', None,
1155 _('set branch name even if it shadows an existing branch')),
1068 _('set branch name even if it shadows an existing branch')),
1156 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1069 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1157 _('[-fC] [NAME]'))
1070 _('[-fC] [NAME]'))
1158 def branch(ui, repo, label=None, **opts):
1071 def branch(ui, repo, label=None, **opts):
1159 """set or show the current branch name
1072 """set or show the current branch name
1160
1073
1161 .. note::
1074 .. note::
1162
1075
1163 Branch names are permanent and global. Use :hg:`bookmark` to create a
1076 Branch names are permanent and global. Use :hg:`bookmark` to create a
1164 light-weight bookmark instead. See :hg:`help glossary` for more
1077 light-weight bookmark instead. See :hg:`help glossary` for more
1165 information about named branches and bookmarks.
1078 information about named branches and bookmarks.
1166
1079
1167 With no argument, show the current branch name. With one argument,
1080 With no argument, show the current branch name. With one argument,
1168 set the working directory branch name (the branch will not exist
1081 set the working directory branch name (the branch will not exist
1169 in the repository until the next commit). Standard practice
1082 in the repository until the next commit). Standard practice
1170 recommends that primary development take place on the 'default'
1083 recommends that primary development take place on the 'default'
1171 branch.
1084 branch.
1172
1085
1173 Unless -f/--force is specified, branch will not let you set a
1086 Unless -f/--force is specified, branch will not let you set a
1174 branch name that already exists.
1087 branch name that already exists.
1175
1088
1176 Use -C/--clean to reset the working directory branch to that of
1089 Use -C/--clean to reset the working directory branch to that of
1177 the parent of the working directory, negating a previous branch
1090 the parent of the working directory, negating a previous branch
1178 change.
1091 change.
1179
1092
1180 Use the command :hg:`update` to switch to an existing branch. Use
1093 Use the command :hg:`update` to switch to an existing branch. Use
1181 :hg:`commit --close-branch` to mark this branch head as closed.
1094 :hg:`commit --close-branch` to mark this branch head as closed.
1182 When all heads of a branch are closed, the branch will be
1095 When all heads of a branch are closed, the branch will be
1183 considered closed.
1096 considered closed.
1184
1097
1185 Returns 0 on success.
1098 Returns 0 on success.
1186 """
1099 """
1187 opts = pycompat.byteskwargs(opts)
1100 opts = pycompat.byteskwargs(opts)
1188 if label:
1101 if label:
1189 label = label.strip()
1102 label = label.strip()
1190
1103
1191 if not opts.get('clean') and not label:
1104 if not opts.get('clean') and not label:
1192 ui.write("%s\n" % repo.dirstate.branch())
1105 ui.write("%s\n" % repo.dirstate.branch())
1193 return
1106 return
1194
1107
1195 with repo.wlock():
1108 with repo.wlock():
1196 if opts.get('clean'):
1109 if opts.get('clean'):
1197 label = repo[None].p1().branch()
1110 label = repo[None].p1().branch()
1198 repo.dirstate.setbranch(label)
1111 repo.dirstate.setbranch(label)
1199 ui.status(_('reset working directory to branch %s\n') % label)
1112 ui.status(_('reset working directory to branch %s\n') % label)
1200 elif label:
1113 elif label:
1201 if not opts.get('force') and label in repo.branchmap():
1114 if not opts.get('force') and label in repo.branchmap():
1202 if label not in [p.branch() for p in repo[None].parents()]:
1115 if label not in [p.branch() for p in repo[None].parents()]:
1203 raise error.Abort(_('a branch of the same name already'
1116 raise error.Abort(_('a branch of the same name already'
1204 ' exists'),
1117 ' exists'),
1205 # i18n: "it" refers to an existing branch
1118 # i18n: "it" refers to an existing branch
1206 hint=_("use 'hg update' to switch to it"))
1119 hint=_("use 'hg update' to switch to it"))
1207 scmutil.checknewlabel(repo, label, 'branch')
1120 scmutil.checknewlabel(repo, label, 'branch')
1208 repo.dirstate.setbranch(label)
1121 repo.dirstate.setbranch(label)
1209 ui.status(_('marked working directory as branch %s\n') % label)
1122 ui.status(_('marked working directory as branch %s\n') % label)
1210
1123
1211 # find any open named branches aside from default
1124 # find any open named branches aside from default
1212 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1125 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1213 if n != "default" and not c]
1126 if n != "default" and not c]
1214 if not others:
1127 if not others:
1215 ui.status(_('(branches are permanent and global, '
1128 ui.status(_('(branches are permanent and global, '
1216 'did you want a bookmark?)\n'))
1129 'did you want a bookmark?)\n'))
1217
1130
1218 @command('branches',
1131 @command('branches',
1219 [('a', 'active', False,
1132 [('a', 'active', False,
1220 _('show only branches that have unmerged heads (DEPRECATED)')),
1133 _('show only branches that have unmerged heads (DEPRECATED)')),
1221 ('c', 'closed', False, _('show normal and closed branches')),
1134 ('c', 'closed', False, _('show normal and closed branches')),
1222 ] + formatteropts,
1135 ] + formatteropts,
1223 _('[-c]'))
1136 _('[-c]'))
1224 def branches(ui, repo, active=False, closed=False, **opts):
1137 def branches(ui, repo, active=False, closed=False, **opts):
1225 """list repository named branches
1138 """list repository named branches
1226
1139
1227 List the repository's named branches, indicating which ones are
1140 List the repository's named branches, indicating which ones are
1228 inactive. If -c/--closed is specified, also list branches which have
1141 inactive. If -c/--closed is specified, also list branches which have
1229 been marked closed (see :hg:`commit --close-branch`).
1142 been marked closed (see :hg:`commit --close-branch`).
1230
1143
1231 Use the command :hg:`update` to switch to an existing branch.
1144 Use the command :hg:`update` to switch to an existing branch.
1232
1145
1233 Returns 0.
1146 Returns 0.
1234 """
1147 """
1235
1148
1236 opts = pycompat.byteskwargs(opts)
1149 opts = pycompat.byteskwargs(opts)
1237 ui.pager('branches')
1150 ui.pager('branches')
1238 fm = ui.formatter('branches', opts)
1151 fm = ui.formatter('branches', opts)
1239 hexfunc = fm.hexfunc
1152 hexfunc = fm.hexfunc
1240
1153
1241 allheads = set(repo.heads())
1154 allheads = set(repo.heads())
1242 branches = []
1155 branches = []
1243 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1156 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1244 isactive = not isclosed and bool(set(heads) & allheads)
1157 isactive = not isclosed and bool(set(heads) & allheads)
1245 branches.append((tag, repo[tip], isactive, not isclosed))
1158 branches.append((tag, repo[tip], isactive, not isclosed))
1246 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1159 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1247 reverse=True)
1160 reverse=True)
1248
1161
1249 for tag, ctx, isactive, isopen in branches:
1162 for tag, ctx, isactive, isopen in branches:
1250 if active and not isactive:
1163 if active and not isactive:
1251 continue
1164 continue
1252 if isactive:
1165 if isactive:
1253 label = 'branches.active'
1166 label = 'branches.active'
1254 notice = ''
1167 notice = ''
1255 elif not isopen:
1168 elif not isopen:
1256 if not closed:
1169 if not closed:
1257 continue
1170 continue
1258 label = 'branches.closed'
1171 label = 'branches.closed'
1259 notice = _(' (closed)')
1172 notice = _(' (closed)')
1260 else:
1173 else:
1261 label = 'branches.inactive'
1174 label = 'branches.inactive'
1262 notice = _(' (inactive)')
1175 notice = _(' (inactive)')
1263 current = (tag == repo.dirstate.branch())
1176 current = (tag == repo.dirstate.branch())
1264 if current:
1177 if current:
1265 label = 'branches.current'
1178 label = 'branches.current'
1266
1179
1267 fm.startitem()
1180 fm.startitem()
1268 fm.write('branch', '%s', tag, label=label)
1181 fm.write('branch', '%s', tag, label=label)
1269 rev = ctx.rev()
1182 rev = ctx.rev()
1270 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1183 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1271 fmt = ' ' * padsize + ' %d:%s'
1184 fmt = ' ' * padsize + ' %d:%s'
1272 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1185 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1273 label='log.changeset changeset.%s' % ctx.phasestr())
1186 label='log.changeset changeset.%s' % ctx.phasestr())
1274 fm.context(ctx=ctx)
1187 fm.context(ctx=ctx)
1275 fm.data(active=isactive, closed=not isopen, current=current)
1188 fm.data(active=isactive, closed=not isopen, current=current)
1276 if not ui.quiet:
1189 if not ui.quiet:
1277 fm.plain(notice)
1190 fm.plain(notice)
1278 fm.plain('\n')
1191 fm.plain('\n')
1279 fm.end()
1192 fm.end()
1280
1193
1281 @command('bundle',
1194 @command('bundle',
1282 [('f', 'force', None, _('run even when the destination is unrelated')),
1195 [('f', 'force', None, _('run even when the destination is unrelated')),
1283 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1196 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1284 _('REV')),
1197 _('REV')),
1285 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1198 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1286 _('BRANCH')),
1199 _('BRANCH')),
1287 ('', 'base', [],
1200 ('', 'base', [],
1288 _('a base changeset assumed to be available at the destination'),
1201 _('a base changeset assumed to be available at the destination'),
1289 _('REV')),
1202 _('REV')),
1290 ('a', 'all', None, _('bundle all changesets in the repository')),
1203 ('a', 'all', None, _('bundle all changesets in the repository')),
1291 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1204 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1292 ] + remoteopts,
1205 ] + remoteopts,
1293 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1206 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1294 def bundle(ui, repo, fname, dest=None, **opts):
1207 def bundle(ui, repo, fname, dest=None, **opts):
1295 """create a bundle file
1208 """create a bundle file
1296
1209
1297 Generate a bundle file containing data to be added to a repository.
1210 Generate a bundle file containing data to be added to a repository.
1298
1211
1299 To create a bundle containing all changesets, use -a/--all
1212 To create a bundle containing all changesets, use -a/--all
1300 (or --base null). Otherwise, hg assumes the destination will have
1213 (or --base null). Otherwise, hg assumes the destination will have
1301 all the nodes you specify with --base parameters. Otherwise, hg
1214 all the nodes you specify with --base parameters. Otherwise, hg
1302 will assume the repository has all the nodes in destination, or
1215 will assume the repository has all the nodes in destination, or
1303 default-push/default if no destination is specified.
1216 default-push/default if no destination is specified.
1304
1217
1305 You can change bundle format with the -t/--type option. See
1218 You can change bundle format with the -t/--type option. See
1306 :hg:`help bundlespec` for documentation on this format. By default,
1219 :hg:`help bundlespec` for documentation on this format. By default,
1307 the most appropriate format is used and compression defaults to
1220 the most appropriate format is used and compression defaults to
1308 bzip2.
1221 bzip2.
1309
1222
1310 The bundle file can then be transferred using conventional means
1223 The bundle file can then be transferred using conventional means
1311 and applied to another repository with the unbundle or pull
1224 and applied to another repository with the unbundle or pull
1312 command. This is useful when direct push and pull are not
1225 command. This is useful when direct push and pull are not
1313 available or when exporting an entire repository is undesirable.
1226 available or when exporting an entire repository is undesirable.
1314
1227
1315 Applying bundles preserves all changeset contents including
1228 Applying bundles preserves all changeset contents including
1316 permissions, copy/rename information, and revision history.
1229 permissions, copy/rename information, and revision history.
1317
1230
1318 Returns 0 on success, 1 if no changes found.
1231 Returns 0 on success, 1 if no changes found.
1319 """
1232 """
1320 opts = pycompat.byteskwargs(opts)
1233 opts = pycompat.byteskwargs(opts)
1321 revs = None
1234 revs = None
1322 if 'rev' in opts:
1235 if 'rev' in opts:
1323 revstrings = opts['rev']
1236 revstrings = opts['rev']
1324 revs = scmutil.revrange(repo, revstrings)
1237 revs = scmutil.revrange(repo, revstrings)
1325 if revstrings and not revs:
1238 if revstrings and not revs:
1326 raise error.Abort(_('no commits to bundle'))
1239 raise error.Abort(_('no commits to bundle'))
1327
1240
1328 bundletype = opts.get('type', 'bzip2').lower()
1241 bundletype = opts.get('type', 'bzip2').lower()
1329 try:
1242 try:
1330 bcompression, cgversion, params = exchange.parsebundlespec(
1243 bcompression, cgversion, params = exchange.parsebundlespec(
1331 repo, bundletype, strict=False)
1244 repo, bundletype, strict=False)
1332 except error.UnsupportedBundleSpecification as e:
1245 except error.UnsupportedBundleSpecification as e:
1333 raise error.Abort(str(e),
1246 raise error.Abort(str(e),
1334 hint=_("see 'hg help bundlespec' for supported "
1247 hint=_("see 'hg help bundlespec' for supported "
1335 "values for --type"))
1248 "values for --type"))
1336
1249
1337 # Packed bundles are a pseudo bundle format for now.
1250 # Packed bundles are a pseudo bundle format for now.
1338 if cgversion == 's1':
1251 if cgversion == 's1':
1339 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1252 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1340 hint=_("use 'hg debugcreatestreamclonebundle'"))
1253 hint=_("use 'hg debugcreatestreamclonebundle'"))
1341
1254
1342 if opts.get('all'):
1255 if opts.get('all'):
1343 if dest:
1256 if dest:
1344 raise error.Abort(_("--all is incompatible with specifying "
1257 raise error.Abort(_("--all is incompatible with specifying "
1345 "a destination"))
1258 "a destination"))
1346 if opts.get('base'):
1259 if opts.get('base'):
1347 ui.warn(_("ignoring --base because --all was specified\n"))
1260 ui.warn(_("ignoring --base because --all was specified\n"))
1348 base = ['null']
1261 base = ['null']
1349 else:
1262 else:
1350 base = scmutil.revrange(repo, opts.get('base'))
1263 base = scmutil.revrange(repo, opts.get('base'))
1351 if cgversion not in changegroup.supportedoutgoingversions(repo):
1264 if cgversion not in changegroup.supportedoutgoingversions(repo):
1352 raise error.Abort(_("repository does not support bundle version %s") %
1265 raise error.Abort(_("repository does not support bundle version %s") %
1353 cgversion)
1266 cgversion)
1354
1267
1355 if base:
1268 if base:
1356 if dest:
1269 if dest:
1357 raise error.Abort(_("--base is incompatible with specifying "
1270 raise error.Abort(_("--base is incompatible with specifying "
1358 "a destination"))
1271 "a destination"))
1359 common = [repo.lookup(rev) for rev in base]
1272 common = [repo.lookup(rev) for rev in base]
1360 heads = revs and map(repo.lookup, revs) or None
1273 heads = revs and map(repo.lookup, revs) or None
1361 outgoing = discovery.outgoing(repo, common, heads)
1274 outgoing = discovery.outgoing(repo, common, heads)
1362 else:
1275 else:
1363 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1276 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1364 dest, branches = hg.parseurl(dest, opts.get('branch'))
1277 dest, branches = hg.parseurl(dest, opts.get('branch'))
1365 other = hg.peer(repo, opts, dest)
1278 other = hg.peer(repo, opts, dest)
1366 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1279 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1367 heads = revs and map(repo.lookup, revs) or revs
1280 heads = revs and map(repo.lookup, revs) or revs
1368 outgoing = discovery.findcommonoutgoing(repo, other,
1281 outgoing = discovery.findcommonoutgoing(repo, other,
1369 onlyheads=heads,
1282 onlyheads=heads,
1370 force=opts.get('force'),
1283 force=opts.get('force'),
1371 portable=True)
1284 portable=True)
1372
1285
1373 if not outgoing.missing:
1286 if not outgoing.missing:
1374 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1287 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1375 return 1
1288 return 1
1376
1289
1377 if cgversion == '01': #bundle1
1290 if cgversion == '01': #bundle1
1378 if bcompression is None:
1291 if bcompression is None:
1379 bcompression = 'UN'
1292 bcompression = 'UN'
1380 bversion = 'HG10' + bcompression
1293 bversion = 'HG10' + bcompression
1381 bcompression = None
1294 bcompression = None
1382 elif cgversion in ('02', '03'):
1295 elif cgversion in ('02', '03'):
1383 bversion = 'HG20'
1296 bversion = 'HG20'
1384 else:
1297 else:
1385 raise error.ProgrammingError(
1298 raise error.ProgrammingError(
1386 'bundle: unexpected changegroup version %s' % cgversion)
1299 'bundle: unexpected changegroup version %s' % cgversion)
1387
1300
1388 # TODO compression options should be derived from bundlespec parsing.
1301 # TODO compression options should be derived from bundlespec parsing.
1389 # This is a temporary hack to allow adjusting bundle compression
1302 # This is a temporary hack to allow adjusting bundle compression
1390 # level without a) formalizing the bundlespec changes to declare it
1303 # level without a) formalizing the bundlespec changes to declare it
1391 # b) introducing a command flag.
1304 # b) introducing a command flag.
1392 compopts = {}
1305 compopts = {}
1393 complevel = ui.configint('experimental', 'bundlecomplevel')
1306 complevel = ui.configint('experimental', 'bundlecomplevel')
1394 if complevel is not None:
1307 if complevel is not None:
1395 compopts['level'] = complevel
1308 compopts['level'] = complevel
1396
1309
1397
1310
1398 contentopts = {'cg.version': cgversion}
1311 contentopts = {'cg.version': cgversion}
1399 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1312 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1400 contentopts, compression=bcompression,
1313 contentopts, compression=bcompression,
1401 compopts=compopts)
1314 compopts=compopts)
1402
1315
1403 @command('cat',
1316 @command('cat',
1404 [('o', 'output', '',
1317 [('o', 'output', '',
1405 _('print output to file with formatted name'), _('FORMAT')),
1318 _('print output to file with formatted name'), _('FORMAT')),
1406 ('r', 'rev', '', _('print the given revision'), _('REV')),
1319 ('r', 'rev', '', _('print the given revision'), _('REV')),
1407 ('', 'decode', None, _('apply any matching decode filter')),
1320 ('', 'decode', None, _('apply any matching decode filter')),
1408 ] + walkopts,
1321 ] + walkopts,
1409 _('[OPTION]... FILE...'),
1322 _('[OPTION]... FILE...'),
1410 inferrepo=True)
1323 inferrepo=True)
1411 def cat(ui, repo, file1, *pats, **opts):
1324 def cat(ui, repo, file1, *pats, **opts):
1412 """output the current or given revision of files
1325 """output the current or given revision of files
1413
1326
1414 Print the specified files as they were at the given revision. If
1327 Print the specified files as they were at the given revision. If
1415 no revision is given, the parent of the working directory is used.
1328 no revision is given, the parent of the working directory is used.
1416
1329
1417 Output may be to a file, in which case the name of the file is
1330 Output may be to a file, in which case the name of the file is
1418 given using a format string. The formatting rules as follows:
1331 given using a format string. The formatting rules as follows:
1419
1332
1420 :``%%``: literal "%" character
1333 :``%%``: literal "%" character
1421 :``%s``: basename of file being printed
1334 :``%s``: basename of file being printed
1422 :``%d``: dirname of file being printed, or '.' if in repository root
1335 :``%d``: dirname of file being printed, or '.' if in repository root
1423 :``%p``: root-relative path name of file being printed
1336 :``%p``: root-relative path name of file being printed
1424 :``%H``: changeset hash (40 hexadecimal digits)
1337 :``%H``: changeset hash (40 hexadecimal digits)
1425 :``%R``: changeset revision number
1338 :``%R``: changeset revision number
1426 :``%h``: short-form changeset hash (12 hexadecimal digits)
1339 :``%h``: short-form changeset hash (12 hexadecimal digits)
1427 :``%r``: zero-padded changeset revision number
1340 :``%r``: zero-padded changeset revision number
1428 :``%b``: basename of the exporting repository
1341 :``%b``: basename of the exporting repository
1429
1342
1430 Returns 0 on success.
1343 Returns 0 on success.
1431 """
1344 """
1432 ctx = scmutil.revsingle(repo, opts.get('rev'))
1345 ctx = scmutil.revsingle(repo, opts.get('rev'))
1433 m = scmutil.match(ctx, (file1,) + pats, opts)
1346 m = scmutil.match(ctx, (file1,) + pats, opts)
1434
1347
1435 ui.pager('cat')
1348 ui.pager('cat')
1436 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1349 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1437
1350
1438 @command('^clone',
1351 @command('^clone',
1439 [('U', 'noupdate', None, _('the clone will include an empty working '
1352 [('U', 'noupdate', None, _('the clone will include an empty working '
1440 'directory (only a repository)')),
1353 'directory (only a repository)')),
1441 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1354 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1442 _('REV')),
1355 _('REV')),
1443 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1356 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1444 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1357 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1445 ('', 'pull', None, _('use pull protocol to copy metadata')),
1358 ('', 'pull', None, _('use pull protocol to copy metadata')),
1446 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1359 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1447 ] + remoteopts,
1360 ] + remoteopts,
1448 _('[OPTION]... SOURCE [DEST]'),
1361 _('[OPTION]... SOURCE [DEST]'),
1449 norepo=True)
1362 norepo=True)
1450 def clone(ui, source, dest=None, **opts):
1363 def clone(ui, source, dest=None, **opts):
1451 """make a copy of an existing repository
1364 """make a copy of an existing repository
1452
1365
1453 Create a copy of an existing repository in a new directory.
1366 Create a copy of an existing repository in a new directory.
1454
1367
1455 If no destination directory name is specified, it defaults to the
1368 If no destination directory name is specified, it defaults to the
1456 basename of the source.
1369 basename of the source.
1457
1370
1458 The location of the source is added to the new repository's
1371 The location of the source is added to the new repository's
1459 ``.hg/hgrc`` file, as the default to be used for future pulls.
1372 ``.hg/hgrc`` file, as the default to be used for future pulls.
1460
1373
1461 Only local paths and ``ssh://`` URLs are supported as
1374 Only local paths and ``ssh://`` URLs are supported as
1462 destinations. For ``ssh://`` destinations, no working directory or
1375 destinations. For ``ssh://`` destinations, no working directory or
1463 ``.hg/hgrc`` will be created on the remote side.
1376 ``.hg/hgrc`` will be created on the remote side.
1464
1377
1465 If the source repository has a bookmark called '@' set, that
1378 If the source repository has a bookmark called '@' set, that
1466 revision will be checked out in the new repository by default.
1379 revision will be checked out in the new repository by default.
1467
1380
1468 To check out a particular version, use -u/--update, or
1381 To check out a particular version, use -u/--update, or
1469 -U/--noupdate to create a clone with no working directory.
1382 -U/--noupdate to create a clone with no working directory.
1470
1383
1471 To pull only a subset of changesets, specify one or more revisions
1384 To pull only a subset of changesets, specify one or more revisions
1472 identifiers with -r/--rev or branches with -b/--branch. The
1385 identifiers with -r/--rev or branches with -b/--branch. The
1473 resulting clone will contain only the specified changesets and
1386 resulting clone will contain only the specified changesets and
1474 their ancestors. These options (or 'clone src#rev dest') imply
1387 their ancestors. These options (or 'clone src#rev dest') imply
1475 --pull, even for local source repositories.
1388 --pull, even for local source repositories.
1476
1389
1477 .. note::
1390 .. note::
1478
1391
1479 Specifying a tag will include the tagged changeset but not the
1392 Specifying a tag will include the tagged changeset but not the
1480 changeset containing the tag.
1393 changeset containing the tag.
1481
1394
1482 .. container:: verbose
1395 .. container:: verbose
1483
1396
1484 For efficiency, hardlinks are used for cloning whenever the
1397 For efficiency, hardlinks are used for cloning whenever the
1485 source and destination are on the same filesystem (note this
1398 source and destination are on the same filesystem (note this
1486 applies only to the repository data, not to the working
1399 applies only to the repository data, not to the working
1487 directory). Some filesystems, such as AFS, implement hardlinking
1400 directory). Some filesystems, such as AFS, implement hardlinking
1488 incorrectly, but do not report errors. In these cases, use the
1401 incorrectly, but do not report errors. In these cases, use the
1489 --pull option to avoid hardlinking.
1402 --pull option to avoid hardlinking.
1490
1403
1491 In some cases, you can clone repositories and the working
1404 In some cases, you can clone repositories and the working
1492 directory using full hardlinks with ::
1405 directory using full hardlinks with ::
1493
1406
1494 $ cp -al REPO REPOCLONE
1407 $ cp -al REPO REPOCLONE
1495
1408
1496 This is the fastest way to clone, but it is not always safe. The
1409 This is the fastest way to clone, but it is not always safe. The
1497 operation is not atomic (making sure REPO is not modified during
1410 operation is not atomic (making sure REPO is not modified during
1498 the operation is up to you) and you have to make sure your
1411 the operation is up to you) and you have to make sure your
1499 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1412 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1500 so). Also, this is not compatible with certain extensions that
1413 so). Also, this is not compatible with certain extensions that
1501 place their metadata under the .hg directory, such as mq.
1414 place their metadata under the .hg directory, such as mq.
1502
1415
1503 Mercurial will update the working directory to the first applicable
1416 Mercurial will update the working directory to the first applicable
1504 revision from this list:
1417 revision from this list:
1505
1418
1506 a) null if -U or the source repository has no changesets
1419 a) null if -U or the source repository has no changesets
1507 b) if -u . and the source repository is local, the first parent of
1420 b) if -u . and the source repository is local, the first parent of
1508 the source repository's working directory
1421 the source repository's working directory
1509 c) the changeset specified with -u (if a branch name, this means the
1422 c) the changeset specified with -u (if a branch name, this means the
1510 latest head of that branch)
1423 latest head of that branch)
1511 d) the changeset specified with -r
1424 d) the changeset specified with -r
1512 e) the tipmost head specified with -b
1425 e) the tipmost head specified with -b
1513 f) the tipmost head specified with the url#branch source syntax
1426 f) the tipmost head specified with the url#branch source syntax
1514 g) the revision marked with the '@' bookmark, if present
1427 g) the revision marked with the '@' bookmark, if present
1515 h) the tipmost head of the default branch
1428 h) the tipmost head of the default branch
1516 i) tip
1429 i) tip
1517
1430
1518 When cloning from servers that support it, Mercurial may fetch
1431 When cloning from servers that support it, Mercurial may fetch
1519 pre-generated data from a server-advertised URL. When this is done,
1432 pre-generated data from a server-advertised URL. When this is done,
1520 hooks operating on incoming changesets and changegroups may fire twice,
1433 hooks operating on incoming changesets and changegroups may fire twice,
1521 once for the bundle fetched from the URL and another for any additional
1434 once for the bundle fetched from the URL and another for any additional
1522 data not fetched from this URL. In addition, if an error occurs, the
1435 data not fetched from this URL. In addition, if an error occurs, the
1523 repository may be rolled back to a partial clone. This behavior may
1436 repository may be rolled back to a partial clone. This behavior may
1524 change in future releases. See :hg:`help -e clonebundles` for more.
1437 change in future releases. See :hg:`help -e clonebundles` for more.
1525
1438
1526 Examples:
1439 Examples:
1527
1440
1528 - clone a remote repository to a new directory named hg/::
1441 - clone a remote repository to a new directory named hg/::
1529
1442
1530 hg clone https://www.mercurial-scm.org/repo/hg/
1443 hg clone https://www.mercurial-scm.org/repo/hg/
1531
1444
1532 - create a lightweight local clone::
1445 - create a lightweight local clone::
1533
1446
1534 hg clone project/ project-feature/
1447 hg clone project/ project-feature/
1535
1448
1536 - clone from an absolute path on an ssh server (note double-slash)::
1449 - clone from an absolute path on an ssh server (note double-slash)::
1537
1450
1538 hg clone ssh://user@server//home/projects/alpha/
1451 hg clone ssh://user@server//home/projects/alpha/
1539
1452
1540 - do a high-speed clone over a LAN while checking out a
1453 - do a high-speed clone over a LAN while checking out a
1541 specified version::
1454 specified version::
1542
1455
1543 hg clone --uncompressed http://server/repo -u 1.5
1456 hg clone --uncompressed http://server/repo -u 1.5
1544
1457
1545 - create a repository without changesets after a particular revision::
1458 - create a repository without changesets after a particular revision::
1546
1459
1547 hg clone -r 04e544 experimental/ good/
1460 hg clone -r 04e544 experimental/ good/
1548
1461
1549 - clone (and track) a particular named branch::
1462 - clone (and track) a particular named branch::
1550
1463
1551 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1464 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1552
1465
1553 See :hg:`help urls` for details on specifying URLs.
1466 See :hg:`help urls` for details on specifying URLs.
1554
1467
1555 Returns 0 on success.
1468 Returns 0 on success.
1556 """
1469 """
1557 opts = pycompat.byteskwargs(opts)
1470 opts = pycompat.byteskwargs(opts)
1558 if opts.get('noupdate') and opts.get('updaterev'):
1471 if opts.get('noupdate') and opts.get('updaterev'):
1559 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1472 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1560
1473
1561 r = hg.clone(ui, opts, source, dest,
1474 r = hg.clone(ui, opts, source, dest,
1562 pull=opts.get('pull'),
1475 pull=opts.get('pull'),
1563 stream=opts.get('uncompressed'),
1476 stream=opts.get('uncompressed'),
1564 rev=opts.get('rev'),
1477 rev=opts.get('rev'),
1565 update=opts.get('updaterev') or not opts.get('noupdate'),
1478 update=opts.get('updaterev') or not opts.get('noupdate'),
1566 branch=opts.get('branch'),
1479 branch=opts.get('branch'),
1567 shareopts=opts.get('shareopts'))
1480 shareopts=opts.get('shareopts'))
1568
1481
1569 return r is None
1482 return r is None
1570
1483
1571 @command('^commit|ci',
1484 @command('^commit|ci',
1572 [('A', 'addremove', None,
1485 [('A', 'addremove', None,
1573 _('mark new/missing files as added/removed before committing')),
1486 _('mark new/missing files as added/removed before committing')),
1574 ('', 'close-branch', None,
1487 ('', 'close-branch', None,
1575 _('mark a branch head as closed')),
1488 _('mark a branch head as closed')),
1576 ('', 'amend', None, _('amend the parent of the working directory')),
1489 ('', 'amend', None, _('amend the parent of the working directory')),
1577 ('s', 'secret', None, _('use the secret phase for committing')),
1490 ('s', 'secret', None, _('use the secret phase for committing')),
1578 ('e', 'edit', None, _('invoke editor on commit messages')),
1491 ('e', 'edit', None, _('invoke editor on commit messages')),
1579 ('i', 'interactive', None, _('use interactive mode')),
1492 ('i', 'interactive', None, _('use interactive mode')),
1580 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1493 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1581 _('[OPTION]... [FILE]...'),
1494 _('[OPTION]... [FILE]...'),
1582 inferrepo=True)
1495 inferrepo=True)
1583 def commit(ui, repo, *pats, **opts):
1496 def commit(ui, repo, *pats, **opts):
1584 """commit the specified files or all outstanding changes
1497 """commit the specified files or all outstanding changes
1585
1498
1586 Commit changes to the given files into the repository. Unlike a
1499 Commit changes to the given files into the repository. Unlike a
1587 centralized SCM, this operation is a local operation. See
1500 centralized SCM, this operation is a local operation. See
1588 :hg:`push` for a way to actively distribute your changes.
1501 :hg:`push` for a way to actively distribute your changes.
1589
1502
1590 If a list of files is omitted, all changes reported by :hg:`status`
1503 If a list of files is omitted, all changes reported by :hg:`status`
1591 will be committed.
1504 will be committed.
1592
1505
1593 If you are committing the result of a merge, do not provide any
1506 If you are committing the result of a merge, do not provide any
1594 filenames or -I/-X filters.
1507 filenames or -I/-X filters.
1595
1508
1596 If no commit message is specified, Mercurial starts your
1509 If no commit message is specified, Mercurial starts your
1597 configured editor where you can enter a message. In case your
1510 configured editor where you can enter a message. In case your
1598 commit fails, you will find a backup of your message in
1511 commit fails, you will find a backup of your message in
1599 ``.hg/last-message.txt``.
1512 ``.hg/last-message.txt``.
1600
1513
1601 The --close-branch flag can be used to mark the current branch
1514 The --close-branch flag can be used to mark the current branch
1602 head closed. When all heads of a branch are closed, the branch
1515 head closed. When all heads of a branch are closed, the branch
1603 will be considered closed and no longer listed.
1516 will be considered closed and no longer listed.
1604
1517
1605 The --amend flag can be used to amend the parent of the
1518 The --amend flag can be used to amend the parent of the
1606 working directory with a new commit that contains the changes
1519 working directory with a new commit that contains the changes
1607 in the parent in addition to those currently reported by :hg:`status`,
1520 in the parent in addition to those currently reported by :hg:`status`,
1608 if there are any. The old commit is stored in a backup bundle in
1521 if there are any. The old commit is stored in a backup bundle in
1609 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1522 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1610 on how to restore it).
1523 on how to restore it).
1611
1524
1612 Message, user and date are taken from the amended commit unless
1525 Message, user and date are taken from the amended commit unless
1613 specified. When a message isn't specified on the command line,
1526 specified. When a message isn't specified on the command line,
1614 the editor will open with the message of the amended commit.
1527 the editor will open with the message of the amended commit.
1615
1528
1616 It is not possible to amend public changesets (see :hg:`help phases`)
1529 It is not possible to amend public changesets (see :hg:`help phases`)
1617 or changesets that have children.
1530 or changesets that have children.
1618
1531
1619 See :hg:`help dates` for a list of formats valid for -d/--date.
1532 See :hg:`help dates` for a list of formats valid for -d/--date.
1620
1533
1621 Returns 0 on success, 1 if nothing changed.
1534 Returns 0 on success, 1 if nothing changed.
1622
1535
1623 .. container:: verbose
1536 .. container:: verbose
1624
1537
1625 Examples:
1538 Examples:
1626
1539
1627 - commit all files ending in .py::
1540 - commit all files ending in .py::
1628
1541
1629 hg commit --include "set:**.py"
1542 hg commit --include "set:**.py"
1630
1543
1631 - commit all non-binary files::
1544 - commit all non-binary files::
1632
1545
1633 hg commit --exclude "set:binary()"
1546 hg commit --exclude "set:binary()"
1634
1547
1635 - amend the current commit and set the date to now::
1548 - amend the current commit and set the date to now::
1636
1549
1637 hg commit --amend --date now
1550 hg commit --amend --date now
1638 """
1551 """
1639 wlock = lock = None
1552 wlock = lock = None
1640 try:
1553 try:
1641 wlock = repo.wlock()
1554 wlock = repo.wlock()
1642 lock = repo.lock()
1555 lock = repo.lock()
1643 return _docommit(ui, repo, *pats, **opts)
1556 return _docommit(ui, repo, *pats, **opts)
1644 finally:
1557 finally:
1645 release(lock, wlock)
1558 release(lock, wlock)
1646
1559
1647 def _docommit(ui, repo, *pats, **opts):
1560 def _docommit(ui, repo, *pats, **opts):
1648 if opts.get(r'interactive'):
1561 if opts.get(r'interactive'):
1649 opts.pop(r'interactive')
1562 opts.pop(r'interactive')
1650 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1563 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1651 cmdutil.recordfilter, *pats,
1564 cmdutil.recordfilter, *pats,
1652 **opts)
1565 **opts)
1653 # ret can be 0 (no changes to record) or the value returned by
1566 # ret can be 0 (no changes to record) or the value returned by
1654 # commit(), 1 if nothing changed or None on success.
1567 # commit(), 1 if nothing changed or None on success.
1655 return 1 if ret == 0 else ret
1568 return 1 if ret == 0 else ret
1656
1569
1657 opts = pycompat.byteskwargs(opts)
1570 opts = pycompat.byteskwargs(opts)
1658 if opts.get('subrepos'):
1571 if opts.get('subrepos'):
1659 if opts.get('amend'):
1572 if opts.get('amend'):
1660 raise error.Abort(_('cannot amend with --subrepos'))
1573 raise error.Abort(_('cannot amend with --subrepos'))
1661 # Let --subrepos on the command line override config setting.
1574 # Let --subrepos on the command line override config setting.
1662 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1575 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1663
1576
1664 cmdutil.checkunfinished(repo, commit=True)
1577 cmdutil.checkunfinished(repo, commit=True)
1665
1578
1666 branch = repo[None].branch()
1579 branch = repo[None].branch()
1667 bheads = repo.branchheads(branch)
1580 bheads = repo.branchheads(branch)
1668
1581
1669 extra = {}
1582 extra = {}
1670 if opts.get('close_branch'):
1583 if opts.get('close_branch'):
1671 extra['close'] = 1
1584 extra['close'] = 1
1672
1585
1673 if not bheads:
1586 if not bheads:
1674 raise error.Abort(_('can only close branch heads'))
1587 raise error.Abort(_('can only close branch heads'))
1675 elif opts.get('amend'):
1588 elif opts.get('amend'):
1676 if repo[None].parents()[0].p1().branch() != branch and \
1589 if repo[None].parents()[0].p1().branch() != branch and \
1677 repo[None].parents()[0].p2().branch() != branch:
1590 repo[None].parents()[0].p2().branch() != branch:
1678 raise error.Abort(_('can only close branch heads'))
1591 raise error.Abort(_('can only close branch heads'))
1679
1592
1680 if opts.get('amend'):
1593 if opts.get('amend'):
1681 if ui.configbool('ui', 'commitsubrepos'):
1594 if ui.configbool('ui', 'commitsubrepos'):
1682 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1595 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1683
1596
1684 old = repo['.']
1597 old = repo['.']
1685 if not old.mutable():
1598 if not old.mutable():
1686 raise error.Abort(_('cannot amend public changesets'))
1599 raise error.Abort(_('cannot amend public changesets'))
1687 if len(repo[None].parents()) > 1:
1600 if len(repo[None].parents()) > 1:
1688 raise error.Abort(_('cannot amend while merging'))
1601 raise error.Abort(_('cannot amend while merging'))
1689 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1602 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1690 if not allowunstable and old.children():
1603 if not allowunstable and old.children():
1691 raise error.Abort(_('cannot amend changeset with children'))
1604 raise error.Abort(_('cannot amend changeset with children'))
1692
1605
1693 # Currently histedit gets confused if an amend happens while histedit
1606 # Currently histedit gets confused if an amend happens while histedit
1694 # is in progress. Since we have a checkunfinished command, we are
1607 # is in progress. Since we have a checkunfinished command, we are
1695 # temporarily honoring it.
1608 # temporarily honoring it.
1696 #
1609 #
1697 # Note: eventually this guard will be removed. Please do not expect
1610 # Note: eventually this guard will be removed. Please do not expect
1698 # this behavior to remain.
1611 # this behavior to remain.
1699 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1612 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1700 cmdutil.checkunfinished(repo)
1613 cmdutil.checkunfinished(repo)
1701
1614
1702 # commitfunc is used only for temporary amend commit by cmdutil.amend
1615 # commitfunc is used only for temporary amend commit by cmdutil.amend
1703 def commitfunc(ui, repo, message, match, opts):
1616 def commitfunc(ui, repo, message, match, opts):
1704 return repo.commit(message,
1617 return repo.commit(message,
1705 opts.get('user') or old.user(),
1618 opts.get('user') or old.user(),
1706 opts.get('date') or old.date(),
1619 opts.get('date') or old.date(),
1707 match,
1620 match,
1708 extra=extra)
1621 extra=extra)
1709
1622
1710 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1623 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1711 if node == old.node():
1624 if node == old.node():
1712 ui.status(_("nothing changed\n"))
1625 ui.status(_("nothing changed\n"))
1713 return 1
1626 return 1
1714 else:
1627 else:
1715 def commitfunc(ui, repo, message, match, opts):
1628 def commitfunc(ui, repo, message, match, opts):
1716 overrides = {}
1629 overrides = {}
1717 if opts.get('secret'):
1630 if opts.get('secret'):
1718 overrides[('phases', 'new-commit')] = 'secret'
1631 overrides[('phases', 'new-commit')] = 'secret'
1719
1632
1720 baseui = repo.baseui
1633 baseui = repo.baseui
1721 with baseui.configoverride(overrides, 'commit'):
1634 with baseui.configoverride(overrides, 'commit'):
1722 with ui.configoverride(overrides, 'commit'):
1635 with ui.configoverride(overrides, 'commit'):
1723 editform = cmdutil.mergeeditform(repo[None],
1636 editform = cmdutil.mergeeditform(repo[None],
1724 'commit.normal')
1637 'commit.normal')
1725 editor = cmdutil.getcommiteditor(
1638 editor = cmdutil.getcommiteditor(
1726 editform=editform, **pycompat.strkwargs(opts))
1639 editform=editform, **pycompat.strkwargs(opts))
1727 return repo.commit(message,
1640 return repo.commit(message,
1728 opts.get('user'),
1641 opts.get('user'),
1729 opts.get('date'),
1642 opts.get('date'),
1730 match,
1643 match,
1731 editor=editor,
1644 editor=editor,
1732 extra=extra)
1645 extra=extra)
1733
1646
1734 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1647 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1735
1648
1736 if not node:
1649 if not node:
1737 stat = cmdutil.postcommitstatus(repo, pats, opts)
1650 stat = cmdutil.postcommitstatus(repo, pats, opts)
1738 if stat[3]:
1651 if stat[3]:
1739 ui.status(_("nothing changed (%d missing files, see "
1652 ui.status(_("nothing changed (%d missing files, see "
1740 "'hg status')\n") % len(stat[3]))
1653 "'hg status')\n") % len(stat[3]))
1741 else:
1654 else:
1742 ui.status(_("nothing changed\n"))
1655 ui.status(_("nothing changed\n"))
1743 return 1
1656 return 1
1744
1657
1745 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1658 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1746
1659
1747 @command('config|showconfig|debugconfig',
1660 @command('config|showconfig|debugconfig',
1748 [('u', 'untrusted', None, _('show untrusted configuration options')),
1661 [('u', 'untrusted', None, _('show untrusted configuration options')),
1749 ('e', 'edit', None, _('edit user config')),
1662 ('e', 'edit', None, _('edit user config')),
1750 ('l', 'local', None, _('edit repository config')),
1663 ('l', 'local', None, _('edit repository config')),
1751 ('g', 'global', None, _('edit global config'))] + formatteropts,
1664 ('g', 'global', None, _('edit global config'))] + formatteropts,
1752 _('[-u] [NAME]...'),
1665 _('[-u] [NAME]...'),
1753 optionalrepo=True)
1666 optionalrepo=True)
1754 def config(ui, repo, *values, **opts):
1667 def config(ui, repo, *values, **opts):
1755 """show combined config settings from all hgrc files
1668 """show combined config settings from all hgrc files
1756
1669
1757 With no arguments, print names and values of all config items.
1670 With no arguments, print names and values of all config items.
1758
1671
1759 With one argument of the form section.name, print just the value
1672 With one argument of the form section.name, print just the value
1760 of that config item.
1673 of that config item.
1761
1674
1762 With multiple arguments, print names and values of all config
1675 With multiple arguments, print names and values of all config
1763 items with matching section names.
1676 items with matching section names.
1764
1677
1765 With --edit, start an editor on the user-level config file. With
1678 With --edit, start an editor on the user-level config file. With
1766 --global, edit the system-wide config file. With --local, edit the
1679 --global, edit the system-wide config file. With --local, edit the
1767 repository-level config file.
1680 repository-level config file.
1768
1681
1769 With --debug, the source (filename and line number) is printed
1682 With --debug, the source (filename and line number) is printed
1770 for each config item.
1683 for each config item.
1771
1684
1772 See :hg:`help config` for more information about config files.
1685 See :hg:`help config` for more information about config files.
1773
1686
1774 Returns 0 on success, 1 if NAME does not exist.
1687 Returns 0 on success, 1 if NAME does not exist.
1775
1688
1776 """
1689 """
1777
1690
1778 opts = pycompat.byteskwargs(opts)
1691 opts = pycompat.byteskwargs(opts)
1779 if opts.get('edit') or opts.get('local') or opts.get('global'):
1692 if opts.get('edit') or opts.get('local') or opts.get('global'):
1780 if opts.get('local') and opts.get('global'):
1693 if opts.get('local') and opts.get('global'):
1781 raise error.Abort(_("can't use --local and --global together"))
1694 raise error.Abort(_("can't use --local and --global together"))
1782
1695
1783 if opts.get('local'):
1696 if opts.get('local'):
1784 if not repo:
1697 if not repo:
1785 raise error.Abort(_("can't use --local outside a repository"))
1698 raise error.Abort(_("can't use --local outside a repository"))
1786 paths = [repo.vfs.join('hgrc')]
1699 paths = [repo.vfs.join('hgrc')]
1787 elif opts.get('global'):
1700 elif opts.get('global'):
1788 paths = rcutil.systemrcpath()
1701 paths = rcutil.systemrcpath()
1789 else:
1702 else:
1790 paths = rcutil.userrcpath()
1703 paths = rcutil.userrcpath()
1791
1704
1792 for f in paths:
1705 for f in paths:
1793 if os.path.exists(f):
1706 if os.path.exists(f):
1794 break
1707 break
1795 else:
1708 else:
1796 if opts.get('global'):
1709 if opts.get('global'):
1797 samplehgrc = uimod.samplehgrcs['global']
1710 samplehgrc = uimod.samplehgrcs['global']
1798 elif opts.get('local'):
1711 elif opts.get('local'):
1799 samplehgrc = uimod.samplehgrcs['local']
1712 samplehgrc = uimod.samplehgrcs['local']
1800 else:
1713 else:
1801 samplehgrc = uimod.samplehgrcs['user']
1714 samplehgrc = uimod.samplehgrcs['user']
1802
1715
1803 f = paths[0]
1716 f = paths[0]
1804 fp = open(f, "w")
1717 fp = open(f, "w")
1805 fp.write(samplehgrc)
1718 fp.write(samplehgrc)
1806 fp.close()
1719 fp.close()
1807
1720
1808 editor = ui.geteditor()
1721 editor = ui.geteditor()
1809 ui.system("%s \"%s\"" % (editor, f),
1722 ui.system("%s \"%s\"" % (editor, f),
1810 onerr=error.Abort, errprefix=_("edit failed"),
1723 onerr=error.Abort, errprefix=_("edit failed"),
1811 blockedtag='config_edit')
1724 blockedtag='config_edit')
1812 return
1725 return
1813 ui.pager('config')
1726 ui.pager('config')
1814 fm = ui.formatter('config', opts)
1727 fm = ui.formatter('config', opts)
1815 for t, f in rcutil.rccomponents():
1728 for t, f in rcutil.rccomponents():
1816 if t == 'path':
1729 if t == 'path':
1817 ui.debug('read config from: %s\n' % f)
1730 ui.debug('read config from: %s\n' % f)
1818 elif t == 'items':
1731 elif t == 'items':
1819 for section, name, value, source in f:
1732 for section, name, value, source in f:
1820 ui.debug('set config by: %s\n' % source)
1733 ui.debug('set config by: %s\n' % source)
1821 else:
1734 else:
1822 raise error.ProgrammingError('unknown rctype: %s' % t)
1735 raise error.ProgrammingError('unknown rctype: %s' % t)
1823 untrusted = bool(opts.get('untrusted'))
1736 untrusted = bool(opts.get('untrusted'))
1824 if values:
1737 if values:
1825 sections = [v for v in values if '.' not in v]
1738 sections = [v for v in values if '.' not in v]
1826 items = [v for v in values if '.' in v]
1739 items = [v for v in values if '.' in v]
1827 if len(items) > 1 or items and sections:
1740 if len(items) > 1 or items and sections:
1828 raise error.Abort(_('only one config item permitted'))
1741 raise error.Abort(_('only one config item permitted'))
1829 matched = False
1742 matched = False
1830 for section, name, value in ui.walkconfig(untrusted=untrusted):
1743 for section, name, value in ui.walkconfig(untrusted=untrusted):
1831 source = ui.configsource(section, name, untrusted)
1744 source = ui.configsource(section, name, untrusted)
1832 value = pycompat.bytestr(value)
1745 value = pycompat.bytestr(value)
1833 if fm.isplain():
1746 if fm.isplain():
1834 source = source or 'none'
1747 source = source or 'none'
1835 value = value.replace('\n', '\\n')
1748 value = value.replace('\n', '\\n')
1836 entryname = section + '.' + name
1749 entryname = section + '.' + name
1837 if values:
1750 if values:
1838 for v in values:
1751 for v in values:
1839 if v == section:
1752 if v == section:
1840 fm.startitem()
1753 fm.startitem()
1841 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1754 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1842 fm.write('name value', '%s=%s\n', entryname, value)
1755 fm.write('name value', '%s=%s\n', entryname, value)
1843 matched = True
1756 matched = True
1844 elif v == entryname:
1757 elif v == entryname:
1845 fm.startitem()
1758 fm.startitem()
1846 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1759 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1847 fm.write('value', '%s\n', value)
1760 fm.write('value', '%s\n', value)
1848 fm.data(name=entryname)
1761 fm.data(name=entryname)
1849 matched = True
1762 matched = True
1850 else:
1763 else:
1851 fm.startitem()
1764 fm.startitem()
1852 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1765 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1853 fm.write('name value', '%s=%s\n', entryname, value)
1766 fm.write('name value', '%s=%s\n', entryname, value)
1854 matched = True
1767 matched = True
1855 fm.end()
1768 fm.end()
1856 if matched:
1769 if matched:
1857 return 0
1770 return 0
1858 return 1
1771 return 1
1859
1772
1860 @command('copy|cp',
1773 @command('copy|cp',
1861 [('A', 'after', None, _('record a copy that has already occurred')),
1774 [('A', 'after', None, _('record a copy that has already occurred')),
1862 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1775 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1863 ] + walkopts + dryrunopts,
1776 ] + walkopts + dryrunopts,
1864 _('[OPTION]... [SOURCE]... DEST'))
1777 _('[OPTION]... [SOURCE]... DEST'))
1865 def copy(ui, repo, *pats, **opts):
1778 def copy(ui, repo, *pats, **opts):
1866 """mark files as copied for the next commit
1779 """mark files as copied for the next commit
1867
1780
1868 Mark dest as having copies of source files. If dest is a
1781 Mark dest as having copies of source files. If dest is a
1869 directory, copies are put in that directory. If dest is a file,
1782 directory, copies are put in that directory. If dest is a file,
1870 the source must be a single file.
1783 the source must be a single file.
1871
1784
1872 By default, this command copies the contents of files as they
1785 By default, this command copies the contents of files as they
1873 exist in the working directory. If invoked with -A/--after, the
1786 exist in the working directory. If invoked with -A/--after, the
1874 operation is recorded, but no copying is performed.
1787 operation is recorded, but no copying is performed.
1875
1788
1876 This command takes effect with the next commit. To undo a copy
1789 This command takes effect with the next commit. To undo a copy
1877 before that, see :hg:`revert`.
1790 before that, see :hg:`revert`.
1878
1791
1879 Returns 0 on success, 1 if errors are encountered.
1792 Returns 0 on success, 1 if errors are encountered.
1880 """
1793 """
1881 opts = pycompat.byteskwargs(opts)
1794 opts = pycompat.byteskwargs(opts)
1882 with repo.wlock(False):
1795 with repo.wlock(False):
1883 return cmdutil.copy(ui, repo, pats, opts)
1796 return cmdutil.copy(ui, repo, pats, opts)
1884
1797
1885 @command('^diff',
1798 @command('^diff',
1886 [('r', 'rev', [], _('revision'), _('REV')),
1799 [('r', 'rev', [], _('revision'), _('REV')),
1887 ('c', 'change', '', _('change made by revision'), _('REV'))
1800 ('c', 'change', '', _('change made by revision'), _('REV'))
1888 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1801 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1889 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1802 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1890 inferrepo=True)
1803 inferrepo=True)
1891 def diff(ui, repo, *pats, **opts):
1804 def diff(ui, repo, *pats, **opts):
1892 """diff repository (or selected files)
1805 """diff repository (or selected files)
1893
1806
1894 Show differences between revisions for the specified files.
1807 Show differences between revisions for the specified files.
1895
1808
1896 Differences between files are shown using the unified diff format.
1809 Differences between files are shown using the unified diff format.
1897
1810
1898 .. note::
1811 .. note::
1899
1812
1900 :hg:`diff` may generate unexpected results for merges, as it will
1813 :hg:`diff` may generate unexpected results for merges, as it will
1901 default to comparing against the working directory's first
1814 default to comparing against the working directory's first
1902 parent changeset if no revisions are specified.
1815 parent changeset if no revisions are specified.
1903
1816
1904 When two revision arguments are given, then changes are shown
1817 When two revision arguments are given, then changes are shown
1905 between those revisions. If only one revision is specified then
1818 between those revisions. If only one revision is specified then
1906 that revision is compared to the working directory, and, when no
1819 that revision is compared to the working directory, and, when no
1907 revisions are specified, the working directory files are compared
1820 revisions are specified, the working directory files are compared
1908 to its first parent.
1821 to its first parent.
1909
1822
1910 Alternatively you can specify -c/--change with a revision to see
1823 Alternatively you can specify -c/--change with a revision to see
1911 the changes in that changeset relative to its first parent.
1824 the changes in that changeset relative to its first parent.
1912
1825
1913 Without the -a/--text option, diff will avoid generating diffs of
1826 Without the -a/--text option, diff will avoid generating diffs of
1914 files it detects as binary. With -a, diff will generate a diff
1827 files it detects as binary. With -a, diff will generate a diff
1915 anyway, probably with undesirable results.
1828 anyway, probably with undesirable results.
1916
1829
1917 Use the -g/--git option to generate diffs in the git extended diff
1830 Use the -g/--git option to generate diffs in the git extended diff
1918 format. For more information, read :hg:`help diffs`.
1831 format. For more information, read :hg:`help diffs`.
1919
1832
1920 .. container:: verbose
1833 .. container:: verbose
1921
1834
1922 Examples:
1835 Examples:
1923
1836
1924 - compare a file in the current working directory to its parent::
1837 - compare a file in the current working directory to its parent::
1925
1838
1926 hg diff foo.c
1839 hg diff foo.c
1927
1840
1928 - compare two historical versions of a directory, with rename info::
1841 - compare two historical versions of a directory, with rename info::
1929
1842
1930 hg diff --git -r 1.0:1.2 lib/
1843 hg diff --git -r 1.0:1.2 lib/
1931
1844
1932 - get change stats relative to the last change on some date::
1845 - get change stats relative to the last change on some date::
1933
1846
1934 hg diff --stat -r "date('may 2')"
1847 hg diff --stat -r "date('may 2')"
1935
1848
1936 - diff all newly-added files that contain a keyword::
1849 - diff all newly-added files that contain a keyword::
1937
1850
1938 hg diff "set:added() and grep(GNU)"
1851 hg diff "set:added() and grep(GNU)"
1939
1852
1940 - compare a revision and its parents::
1853 - compare a revision and its parents::
1941
1854
1942 hg diff -c 9353 # compare against first parent
1855 hg diff -c 9353 # compare against first parent
1943 hg diff -r 9353^:9353 # same using revset syntax
1856 hg diff -r 9353^:9353 # same using revset syntax
1944 hg diff -r 9353^2:9353 # compare against the second parent
1857 hg diff -r 9353^2:9353 # compare against the second parent
1945
1858
1946 Returns 0 on success.
1859 Returns 0 on success.
1947 """
1860 """
1948
1861
1949 opts = pycompat.byteskwargs(opts)
1862 opts = pycompat.byteskwargs(opts)
1950 revs = opts.get('rev')
1863 revs = opts.get('rev')
1951 change = opts.get('change')
1864 change = opts.get('change')
1952 stat = opts.get('stat')
1865 stat = opts.get('stat')
1953 reverse = opts.get('reverse')
1866 reverse = opts.get('reverse')
1954
1867
1955 if revs and change:
1868 if revs and change:
1956 msg = _('cannot specify --rev and --change at the same time')
1869 msg = _('cannot specify --rev and --change at the same time')
1957 raise error.Abort(msg)
1870 raise error.Abort(msg)
1958 elif change:
1871 elif change:
1959 node2 = scmutil.revsingle(repo, change, None).node()
1872 node2 = scmutil.revsingle(repo, change, None).node()
1960 node1 = repo[node2].p1().node()
1873 node1 = repo[node2].p1().node()
1961 else:
1874 else:
1962 node1, node2 = scmutil.revpair(repo, revs)
1875 node1, node2 = scmutil.revpair(repo, revs)
1963
1876
1964 if reverse:
1877 if reverse:
1965 node1, node2 = node2, node1
1878 node1, node2 = node2, node1
1966
1879
1967 diffopts = patch.diffallopts(ui, opts)
1880 diffopts = patch.diffallopts(ui, opts)
1968 m = scmutil.match(repo[node2], pats, opts)
1881 m = scmutil.match(repo[node2], pats, opts)
1969 ui.pager('diff')
1882 ui.pager('diff')
1970 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1883 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1971 listsubrepos=opts.get('subrepos'),
1884 listsubrepos=opts.get('subrepos'),
1972 root=opts.get('root'))
1885 root=opts.get('root'))
1973
1886
1974 @command('^export',
1887 @command('^export',
1975 [('o', 'output', '',
1888 [('o', 'output', '',
1976 _('print output to file with formatted name'), _('FORMAT')),
1889 _('print output to file with formatted name'), _('FORMAT')),
1977 ('', 'switch-parent', None, _('diff against the second parent')),
1890 ('', 'switch-parent', None, _('diff against the second parent')),
1978 ('r', 'rev', [], _('revisions to export'), _('REV')),
1891 ('r', 'rev', [], _('revisions to export'), _('REV')),
1979 ] + diffopts,
1892 ] + diffopts,
1980 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1893 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1981 def export(ui, repo, *changesets, **opts):
1894 def export(ui, repo, *changesets, **opts):
1982 """dump the header and diffs for one or more changesets
1895 """dump the header and diffs for one or more changesets
1983
1896
1984 Print the changeset header and diffs for one or more revisions.
1897 Print the changeset header and diffs for one or more revisions.
1985 If no revision is given, the parent of the working directory is used.
1898 If no revision is given, the parent of the working directory is used.
1986
1899
1987 The information shown in the changeset header is: author, date,
1900 The information shown in the changeset header is: author, date,
1988 branch name (if non-default), changeset hash, parent(s) and commit
1901 branch name (if non-default), changeset hash, parent(s) and commit
1989 comment.
1902 comment.
1990
1903
1991 .. note::
1904 .. note::
1992
1905
1993 :hg:`export` may generate unexpected diff output for merge
1906 :hg:`export` may generate unexpected diff output for merge
1994 changesets, as it will compare the merge changeset against its
1907 changesets, as it will compare the merge changeset against its
1995 first parent only.
1908 first parent only.
1996
1909
1997 Output may be to a file, in which case the name of the file is
1910 Output may be to a file, in which case the name of the file is
1998 given using a format string. The formatting rules are as follows:
1911 given using a format string. The formatting rules are as follows:
1999
1912
2000 :``%%``: literal "%" character
1913 :``%%``: literal "%" character
2001 :``%H``: changeset hash (40 hexadecimal digits)
1914 :``%H``: changeset hash (40 hexadecimal digits)
2002 :``%N``: number of patches being generated
1915 :``%N``: number of patches being generated
2003 :``%R``: changeset revision number
1916 :``%R``: changeset revision number
2004 :``%b``: basename of the exporting repository
1917 :``%b``: basename of the exporting repository
2005 :``%h``: short-form changeset hash (12 hexadecimal digits)
1918 :``%h``: short-form changeset hash (12 hexadecimal digits)
2006 :``%m``: first line of the commit message (only alphanumeric characters)
1919 :``%m``: first line of the commit message (only alphanumeric characters)
2007 :``%n``: zero-padded sequence number, starting at 1
1920 :``%n``: zero-padded sequence number, starting at 1
2008 :``%r``: zero-padded changeset revision number
1921 :``%r``: zero-padded changeset revision number
2009
1922
2010 Without the -a/--text option, export will avoid generating diffs
1923 Without the -a/--text option, export will avoid generating diffs
2011 of files it detects as binary. With -a, export will generate a
1924 of files it detects as binary. With -a, export will generate a
2012 diff anyway, probably with undesirable results.
1925 diff anyway, probably with undesirable results.
2013
1926
2014 Use the -g/--git option to generate diffs in the git extended diff
1927 Use the -g/--git option to generate diffs in the git extended diff
2015 format. See :hg:`help diffs` for more information.
1928 format. See :hg:`help diffs` for more information.
2016
1929
2017 With the --switch-parent option, the diff will be against the
1930 With the --switch-parent option, the diff will be against the
2018 second parent. It can be useful to review a merge.
1931 second parent. It can be useful to review a merge.
2019
1932
2020 .. container:: verbose
1933 .. container:: verbose
2021
1934
2022 Examples:
1935 Examples:
2023
1936
2024 - use export and import to transplant a bugfix to the current
1937 - use export and import to transplant a bugfix to the current
2025 branch::
1938 branch::
2026
1939
2027 hg export -r 9353 | hg import -
1940 hg export -r 9353 | hg import -
2028
1941
2029 - export all the changesets between two revisions to a file with
1942 - export all the changesets between two revisions to a file with
2030 rename information::
1943 rename information::
2031
1944
2032 hg export --git -r 123:150 > changes.txt
1945 hg export --git -r 123:150 > changes.txt
2033
1946
2034 - split outgoing changes into a series of patches with
1947 - split outgoing changes into a series of patches with
2035 descriptive names::
1948 descriptive names::
2036
1949
2037 hg export -r "outgoing()" -o "%n-%m.patch"
1950 hg export -r "outgoing()" -o "%n-%m.patch"
2038
1951
2039 Returns 0 on success.
1952 Returns 0 on success.
2040 """
1953 """
2041 opts = pycompat.byteskwargs(opts)
1954 opts = pycompat.byteskwargs(opts)
2042 changesets += tuple(opts.get('rev', []))
1955 changesets += tuple(opts.get('rev', []))
2043 if not changesets:
1956 if not changesets:
2044 changesets = ['.']
1957 changesets = ['.']
2045 revs = scmutil.revrange(repo, changesets)
1958 revs = scmutil.revrange(repo, changesets)
2046 if not revs:
1959 if not revs:
2047 raise error.Abort(_("export requires at least one changeset"))
1960 raise error.Abort(_("export requires at least one changeset"))
2048 if len(revs) > 1:
1961 if len(revs) > 1:
2049 ui.note(_('exporting patches:\n'))
1962 ui.note(_('exporting patches:\n'))
2050 else:
1963 else:
2051 ui.note(_('exporting patch:\n'))
1964 ui.note(_('exporting patch:\n'))
2052 ui.pager('export')
1965 ui.pager('export')
2053 cmdutil.export(repo, revs, template=opts.get('output'),
1966 cmdutil.export(repo, revs, template=opts.get('output'),
2054 switch_parent=opts.get('switch_parent'),
1967 switch_parent=opts.get('switch_parent'),
2055 opts=patch.diffallopts(ui, opts))
1968 opts=patch.diffallopts(ui, opts))
2056
1969
2057 @command('files',
1970 @command('files',
2058 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1971 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
2059 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1972 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2060 ] + walkopts + formatteropts + subrepoopts,
1973 ] + walkopts + formatteropts + subrepoopts,
2061 _('[OPTION]... [FILE]...'))
1974 _('[OPTION]... [FILE]...'))
2062 def files(ui, repo, *pats, **opts):
1975 def files(ui, repo, *pats, **opts):
2063 """list tracked files
1976 """list tracked files
2064
1977
2065 Print files under Mercurial control in the working directory or
1978 Print files under Mercurial control in the working directory or
2066 specified revision for given files (excluding removed files).
1979 specified revision for given files (excluding removed files).
2067 Files can be specified as filenames or filesets.
1980 Files can be specified as filenames or filesets.
2068
1981
2069 If no files are given to match, this command prints the names
1982 If no files are given to match, this command prints the names
2070 of all files under Mercurial control.
1983 of all files under Mercurial control.
2071
1984
2072 .. container:: verbose
1985 .. container:: verbose
2073
1986
2074 Examples:
1987 Examples:
2075
1988
2076 - list all files under the current directory::
1989 - list all files under the current directory::
2077
1990
2078 hg files .
1991 hg files .
2079
1992
2080 - shows sizes and flags for current revision::
1993 - shows sizes and flags for current revision::
2081
1994
2082 hg files -vr .
1995 hg files -vr .
2083
1996
2084 - list all files named README::
1997 - list all files named README::
2085
1998
2086 hg files -I "**/README"
1999 hg files -I "**/README"
2087
2000
2088 - list all binary files::
2001 - list all binary files::
2089
2002
2090 hg files "set:binary()"
2003 hg files "set:binary()"
2091
2004
2092 - find files containing a regular expression::
2005 - find files containing a regular expression::
2093
2006
2094 hg files "set:grep('bob')"
2007 hg files "set:grep('bob')"
2095
2008
2096 - search tracked file contents with xargs and grep::
2009 - search tracked file contents with xargs and grep::
2097
2010
2098 hg files -0 | xargs -0 grep foo
2011 hg files -0 | xargs -0 grep foo
2099
2012
2100 See :hg:`help patterns` and :hg:`help filesets` for more information
2013 See :hg:`help patterns` and :hg:`help filesets` for more information
2101 on specifying file patterns.
2014 on specifying file patterns.
2102
2015
2103 Returns 0 if a match is found, 1 otherwise.
2016 Returns 0 if a match is found, 1 otherwise.
2104
2017
2105 """
2018 """
2106
2019
2107 opts = pycompat.byteskwargs(opts)
2020 opts = pycompat.byteskwargs(opts)
2108 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2021 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2109
2022
2110 end = '\n'
2023 end = '\n'
2111 if opts.get('print0'):
2024 if opts.get('print0'):
2112 end = '\0'
2025 end = '\0'
2113 fmt = '%s' + end
2026 fmt = '%s' + end
2114
2027
2115 m = scmutil.match(ctx, pats, opts)
2028 m = scmutil.match(ctx, pats, opts)
2116 ui.pager('files')
2029 ui.pager('files')
2117 with ui.formatter('files', opts) as fm:
2030 with ui.formatter('files', opts) as fm:
2118 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2031 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2119
2032
2120 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2033 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2121 def forget(ui, repo, *pats, **opts):
2034 def forget(ui, repo, *pats, **opts):
2122 """forget the specified files on the next commit
2035 """forget the specified files on the next commit
2123
2036
2124 Mark the specified files so they will no longer be tracked
2037 Mark the specified files so they will no longer be tracked
2125 after the next commit.
2038 after the next commit.
2126
2039
2127 This only removes files from the current branch, not from the
2040 This only removes files from the current branch, not from the
2128 entire project history, and it does not delete them from the
2041 entire project history, and it does not delete them from the
2129 working directory.
2042 working directory.
2130
2043
2131 To delete the file from the working directory, see :hg:`remove`.
2044 To delete the file from the working directory, see :hg:`remove`.
2132
2045
2133 To undo a forget before the next commit, see :hg:`add`.
2046 To undo a forget before the next commit, see :hg:`add`.
2134
2047
2135 .. container:: verbose
2048 .. container:: verbose
2136
2049
2137 Examples:
2050 Examples:
2138
2051
2139 - forget newly-added binary files::
2052 - forget newly-added binary files::
2140
2053
2141 hg forget "set:added() and binary()"
2054 hg forget "set:added() and binary()"
2142
2055
2143 - forget files that would be excluded by .hgignore::
2056 - forget files that would be excluded by .hgignore::
2144
2057
2145 hg forget "set:hgignore()"
2058 hg forget "set:hgignore()"
2146
2059
2147 Returns 0 on success.
2060 Returns 0 on success.
2148 """
2061 """
2149
2062
2150 opts = pycompat.byteskwargs(opts)
2063 opts = pycompat.byteskwargs(opts)
2151 if not pats:
2064 if not pats:
2152 raise error.Abort(_('no files specified'))
2065 raise error.Abort(_('no files specified'))
2153
2066
2154 m = scmutil.match(repo[None], pats, opts)
2067 m = scmutil.match(repo[None], pats, opts)
2155 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2068 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2156 return rejected and 1 or 0
2069 return rejected and 1 or 0
2157
2070
2158 @command(
2071 @command(
2159 'graft',
2072 'graft',
2160 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2073 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2161 ('c', 'continue', False, _('resume interrupted graft')),
2074 ('c', 'continue', False, _('resume interrupted graft')),
2162 ('e', 'edit', False, _('invoke editor on commit messages')),
2075 ('e', 'edit', False, _('invoke editor on commit messages')),
2163 ('', 'log', None, _('append graft info to log message')),
2076 ('', 'log', None, _('append graft info to log message')),
2164 ('f', 'force', False, _('force graft')),
2077 ('f', 'force', False, _('force graft')),
2165 ('D', 'currentdate', False,
2078 ('D', 'currentdate', False,
2166 _('record the current date as commit date')),
2079 _('record the current date as commit date')),
2167 ('U', 'currentuser', False,
2080 ('U', 'currentuser', False,
2168 _('record the current user as committer'), _('DATE'))]
2081 _('record the current user as committer'), _('DATE'))]
2169 + commitopts2 + mergetoolopts + dryrunopts,
2082 + commitopts2 + mergetoolopts + dryrunopts,
2170 _('[OPTION]... [-r REV]... REV...'))
2083 _('[OPTION]... [-r REV]... REV...'))
2171 def graft(ui, repo, *revs, **opts):
2084 def graft(ui, repo, *revs, **opts):
2172 '''copy changes from other branches onto the current branch
2085 '''copy changes from other branches onto the current branch
2173
2086
2174 This command uses Mercurial's merge logic to copy individual
2087 This command uses Mercurial's merge logic to copy individual
2175 changes from other branches without merging branches in the
2088 changes from other branches without merging branches in the
2176 history graph. This is sometimes known as 'backporting' or
2089 history graph. This is sometimes known as 'backporting' or
2177 'cherry-picking'. By default, graft will copy user, date, and
2090 'cherry-picking'. By default, graft will copy user, date, and
2178 description from the source changesets.
2091 description from the source changesets.
2179
2092
2180 Changesets that are ancestors of the current revision, that have
2093 Changesets that are ancestors of the current revision, that have
2181 already been grafted, or that are merges will be skipped.
2094 already been grafted, or that are merges will be skipped.
2182
2095
2183 If --log is specified, log messages will have a comment appended
2096 If --log is specified, log messages will have a comment appended
2184 of the form::
2097 of the form::
2185
2098
2186 (grafted from CHANGESETHASH)
2099 (grafted from CHANGESETHASH)
2187
2100
2188 If --force is specified, revisions will be grafted even if they
2101 If --force is specified, revisions will be grafted even if they
2189 are already ancestors of or have been grafted to the destination.
2102 are already ancestors of or have been grafted to the destination.
2190 This is useful when the revisions have since been backed out.
2103 This is useful when the revisions have since been backed out.
2191
2104
2192 If a graft merge results in conflicts, the graft process is
2105 If a graft merge results in conflicts, the graft process is
2193 interrupted so that the current merge can be manually resolved.
2106 interrupted so that the current merge can be manually resolved.
2194 Once all conflicts are addressed, the graft process can be
2107 Once all conflicts are addressed, the graft process can be
2195 continued with the -c/--continue option.
2108 continued with the -c/--continue option.
2196
2109
2197 .. note::
2110 .. note::
2198
2111
2199 The -c/--continue option does not reapply earlier options, except
2112 The -c/--continue option does not reapply earlier options, except
2200 for --force.
2113 for --force.
2201
2114
2202 .. container:: verbose
2115 .. container:: verbose
2203
2116
2204 Examples:
2117 Examples:
2205
2118
2206 - copy a single change to the stable branch and edit its description::
2119 - copy a single change to the stable branch and edit its description::
2207
2120
2208 hg update stable
2121 hg update stable
2209 hg graft --edit 9393
2122 hg graft --edit 9393
2210
2123
2211 - graft a range of changesets with one exception, updating dates::
2124 - graft a range of changesets with one exception, updating dates::
2212
2125
2213 hg graft -D "2085::2093 and not 2091"
2126 hg graft -D "2085::2093 and not 2091"
2214
2127
2215 - continue a graft after resolving conflicts::
2128 - continue a graft after resolving conflicts::
2216
2129
2217 hg graft -c
2130 hg graft -c
2218
2131
2219 - show the source of a grafted changeset::
2132 - show the source of a grafted changeset::
2220
2133
2221 hg log --debug -r .
2134 hg log --debug -r .
2222
2135
2223 - show revisions sorted by date::
2136 - show revisions sorted by date::
2224
2137
2225 hg log -r "sort(all(), date)"
2138 hg log -r "sort(all(), date)"
2226
2139
2227 See :hg:`help revisions` for more about specifying revisions.
2140 See :hg:`help revisions` for more about specifying revisions.
2228
2141
2229 Returns 0 on successful completion.
2142 Returns 0 on successful completion.
2230 '''
2143 '''
2231 with repo.wlock():
2144 with repo.wlock():
2232 return _dograft(ui, repo, *revs, **opts)
2145 return _dograft(ui, repo, *revs, **opts)
2233
2146
2234 def _dograft(ui, repo, *revs, **opts):
2147 def _dograft(ui, repo, *revs, **opts):
2235 opts = pycompat.byteskwargs(opts)
2148 opts = pycompat.byteskwargs(opts)
2236 if revs and opts.get('rev'):
2149 if revs and opts.get('rev'):
2237 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2150 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2238 'revision ordering!\n'))
2151 'revision ordering!\n'))
2239
2152
2240 revs = list(revs)
2153 revs = list(revs)
2241 revs.extend(opts.get('rev'))
2154 revs.extend(opts.get('rev'))
2242
2155
2243 if not opts.get('user') and opts.get('currentuser'):
2156 if not opts.get('user') and opts.get('currentuser'):
2244 opts['user'] = ui.username()
2157 opts['user'] = ui.username()
2245 if not opts.get('date') and opts.get('currentdate'):
2158 if not opts.get('date') and opts.get('currentdate'):
2246 opts['date'] = "%d %d" % util.makedate()
2159 opts['date'] = "%d %d" % util.makedate()
2247
2160
2248 editor = cmdutil.getcommiteditor(editform='graft',
2161 editor = cmdutil.getcommiteditor(editform='graft',
2249 **pycompat.strkwargs(opts))
2162 **pycompat.strkwargs(opts))
2250
2163
2251 cont = False
2164 cont = False
2252 if opts.get('continue'):
2165 if opts.get('continue'):
2253 cont = True
2166 cont = True
2254 if revs:
2167 if revs:
2255 raise error.Abort(_("can't specify --continue and revisions"))
2168 raise error.Abort(_("can't specify --continue and revisions"))
2256 # read in unfinished revisions
2169 # read in unfinished revisions
2257 try:
2170 try:
2258 nodes = repo.vfs.read('graftstate').splitlines()
2171 nodes = repo.vfs.read('graftstate').splitlines()
2259 revs = [repo[node].rev() for node in nodes]
2172 revs = [repo[node].rev() for node in nodes]
2260 except IOError as inst:
2173 except IOError as inst:
2261 if inst.errno != errno.ENOENT:
2174 if inst.errno != errno.ENOENT:
2262 raise
2175 raise
2263 cmdutil.wrongtooltocontinue(repo, _('graft'))
2176 cmdutil.wrongtooltocontinue(repo, _('graft'))
2264 else:
2177 else:
2265 cmdutil.checkunfinished(repo)
2178 cmdutil.checkunfinished(repo)
2266 cmdutil.bailifchanged(repo)
2179 cmdutil.bailifchanged(repo)
2267 if not revs:
2180 if not revs:
2268 raise error.Abort(_('no revisions specified'))
2181 raise error.Abort(_('no revisions specified'))
2269 revs = scmutil.revrange(repo, revs)
2182 revs = scmutil.revrange(repo, revs)
2270
2183
2271 skipped = set()
2184 skipped = set()
2272 # check for merges
2185 # check for merges
2273 for rev in repo.revs('%ld and merge()', revs):
2186 for rev in repo.revs('%ld and merge()', revs):
2274 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2187 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2275 skipped.add(rev)
2188 skipped.add(rev)
2276 revs = [r for r in revs if r not in skipped]
2189 revs = [r for r in revs if r not in skipped]
2277 if not revs:
2190 if not revs:
2278 return -1
2191 return -1
2279
2192
2280 # Don't check in the --continue case, in effect retaining --force across
2193 # Don't check in the --continue case, in effect retaining --force across
2281 # --continues. That's because without --force, any revisions we decided to
2194 # --continues. That's because without --force, any revisions we decided to
2282 # skip would have been filtered out here, so they wouldn't have made their
2195 # skip would have been filtered out here, so they wouldn't have made their
2283 # way to the graftstate. With --force, any revisions we would have otherwise
2196 # way to the graftstate. With --force, any revisions we would have otherwise
2284 # skipped would not have been filtered out, and if they hadn't been applied
2197 # skipped would not have been filtered out, and if they hadn't been applied
2285 # already, they'd have been in the graftstate.
2198 # already, they'd have been in the graftstate.
2286 if not (cont or opts.get('force')):
2199 if not (cont or opts.get('force')):
2287 # check for ancestors of dest branch
2200 # check for ancestors of dest branch
2288 crev = repo['.'].rev()
2201 crev = repo['.'].rev()
2289 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2202 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2290 # XXX make this lazy in the future
2203 # XXX make this lazy in the future
2291 # don't mutate while iterating, create a copy
2204 # don't mutate while iterating, create a copy
2292 for rev in list(revs):
2205 for rev in list(revs):
2293 if rev in ancestors:
2206 if rev in ancestors:
2294 ui.warn(_('skipping ancestor revision %d:%s\n') %
2207 ui.warn(_('skipping ancestor revision %d:%s\n') %
2295 (rev, repo[rev]))
2208 (rev, repo[rev]))
2296 # XXX remove on list is slow
2209 # XXX remove on list is slow
2297 revs.remove(rev)
2210 revs.remove(rev)
2298 if not revs:
2211 if not revs:
2299 return -1
2212 return -1
2300
2213
2301 # analyze revs for earlier grafts
2214 # analyze revs for earlier grafts
2302 ids = {}
2215 ids = {}
2303 for ctx in repo.set("%ld", revs):
2216 for ctx in repo.set("%ld", revs):
2304 ids[ctx.hex()] = ctx.rev()
2217 ids[ctx.hex()] = ctx.rev()
2305 n = ctx.extra().get('source')
2218 n = ctx.extra().get('source')
2306 if n:
2219 if n:
2307 ids[n] = ctx.rev()
2220 ids[n] = ctx.rev()
2308
2221
2309 # check ancestors for earlier grafts
2222 # check ancestors for earlier grafts
2310 ui.debug('scanning for duplicate grafts\n')
2223 ui.debug('scanning for duplicate grafts\n')
2311
2224
2312 # The only changesets we can be sure doesn't contain grafts of any
2225 # The only changesets we can be sure doesn't contain grafts of any
2313 # revs, are the ones that are common ancestors of *all* revs:
2226 # revs, are the ones that are common ancestors of *all* revs:
2314 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2227 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2315 ctx = repo[rev]
2228 ctx = repo[rev]
2316 n = ctx.extra().get('source')
2229 n = ctx.extra().get('source')
2317 if n in ids:
2230 if n in ids:
2318 try:
2231 try:
2319 r = repo[n].rev()
2232 r = repo[n].rev()
2320 except error.RepoLookupError:
2233 except error.RepoLookupError:
2321 r = None
2234 r = None
2322 if r in revs:
2235 if r in revs:
2323 ui.warn(_('skipping revision %d:%s '
2236 ui.warn(_('skipping revision %d:%s '
2324 '(already grafted to %d:%s)\n')
2237 '(already grafted to %d:%s)\n')
2325 % (r, repo[r], rev, ctx))
2238 % (r, repo[r], rev, ctx))
2326 revs.remove(r)
2239 revs.remove(r)
2327 elif ids[n] in revs:
2240 elif ids[n] in revs:
2328 if r is None:
2241 if r is None:
2329 ui.warn(_('skipping already grafted revision %d:%s '
2242 ui.warn(_('skipping already grafted revision %d:%s '
2330 '(%d:%s also has unknown origin %s)\n')
2243 '(%d:%s also has unknown origin %s)\n')
2331 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2244 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2332 else:
2245 else:
2333 ui.warn(_('skipping already grafted revision %d:%s '
2246 ui.warn(_('skipping already grafted revision %d:%s '
2334 '(%d:%s also has origin %d:%s)\n')
2247 '(%d:%s also has origin %d:%s)\n')
2335 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2248 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2336 revs.remove(ids[n])
2249 revs.remove(ids[n])
2337 elif ctx.hex() in ids:
2250 elif ctx.hex() in ids:
2338 r = ids[ctx.hex()]
2251 r = ids[ctx.hex()]
2339 ui.warn(_('skipping already grafted revision %d:%s '
2252 ui.warn(_('skipping already grafted revision %d:%s '
2340 '(was grafted from %d:%s)\n') %
2253 '(was grafted from %d:%s)\n') %
2341 (r, repo[r], rev, ctx))
2254 (r, repo[r], rev, ctx))
2342 revs.remove(r)
2255 revs.remove(r)
2343 if not revs:
2256 if not revs:
2344 return -1
2257 return -1
2345
2258
2346 for pos, ctx in enumerate(repo.set("%ld", revs)):
2259 for pos, ctx in enumerate(repo.set("%ld", revs)):
2347 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2260 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2348 ctx.description().split('\n', 1)[0])
2261 ctx.description().split('\n', 1)[0])
2349 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2262 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2350 if names:
2263 if names:
2351 desc += ' (%s)' % ' '.join(names)
2264 desc += ' (%s)' % ' '.join(names)
2352 ui.status(_('grafting %s\n') % desc)
2265 ui.status(_('grafting %s\n') % desc)
2353 if opts.get('dry_run'):
2266 if opts.get('dry_run'):
2354 continue
2267 continue
2355
2268
2356 source = ctx.extra().get('source')
2269 source = ctx.extra().get('source')
2357 extra = {}
2270 extra = {}
2358 if source:
2271 if source:
2359 extra['source'] = source
2272 extra['source'] = source
2360 extra['intermediate-source'] = ctx.hex()
2273 extra['intermediate-source'] = ctx.hex()
2361 else:
2274 else:
2362 extra['source'] = ctx.hex()
2275 extra['source'] = ctx.hex()
2363 user = ctx.user()
2276 user = ctx.user()
2364 if opts.get('user'):
2277 if opts.get('user'):
2365 user = opts['user']
2278 user = opts['user']
2366 date = ctx.date()
2279 date = ctx.date()
2367 if opts.get('date'):
2280 if opts.get('date'):
2368 date = opts['date']
2281 date = opts['date']
2369 message = ctx.description()
2282 message = ctx.description()
2370 if opts.get('log'):
2283 if opts.get('log'):
2371 message += '\n(grafted from %s)' % ctx.hex()
2284 message += '\n(grafted from %s)' % ctx.hex()
2372
2285
2373 # we don't merge the first commit when continuing
2286 # we don't merge the first commit when continuing
2374 if not cont:
2287 if not cont:
2375 # perform the graft merge with p1(rev) as 'ancestor'
2288 # perform the graft merge with p1(rev) as 'ancestor'
2376 try:
2289 try:
2377 # ui.forcemerge is an internal variable, do not document
2290 # ui.forcemerge is an internal variable, do not document
2378 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2291 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2379 'graft')
2292 'graft')
2380 stats = mergemod.graft(repo, ctx, ctx.p1(),
2293 stats = mergemod.graft(repo, ctx, ctx.p1(),
2381 ['local', 'graft'])
2294 ['local', 'graft'])
2382 finally:
2295 finally:
2383 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2296 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2384 # report any conflicts
2297 # report any conflicts
2385 if stats and stats[3] > 0:
2298 if stats and stats[3] > 0:
2386 # write out state for --continue
2299 # write out state for --continue
2387 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2300 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2388 repo.vfs.write('graftstate', ''.join(nodelines))
2301 repo.vfs.write('graftstate', ''.join(nodelines))
2389 extra = ''
2302 extra = ''
2390 if opts.get('user'):
2303 if opts.get('user'):
2391 extra += ' --user %s' % util.shellquote(opts['user'])
2304 extra += ' --user %s' % util.shellquote(opts['user'])
2392 if opts.get('date'):
2305 if opts.get('date'):
2393 extra += ' --date %s' % util.shellquote(opts['date'])
2306 extra += ' --date %s' % util.shellquote(opts['date'])
2394 if opts.get('log'):
2307 if opts.get('log'):
2395 extra += ' --log'
2308 extra += ' --log'
2396 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2309 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2397 raise error.Abort(
2310 raise error.Abort(
2398 _("unresolved conflicts, can't continue"),
2311 _("unresolved conflicts, can't continue"),
2399 hint=hint)
2312 hint=hint)
2400 else:
2313 else:
2401 cont = False
2314 cont = False
2402
2315
2403 # commit
2316 # commit
2404 node = repo.commit(text=message, user=user,
2317 node = repo.commit(text=message, user=user,
2405 date=date, extra=extra, editor=editor)
2318 date=date, extra=extra, editor=editor)
2406 if node is None:
2319 if node is None:
2407 ui.warn(
2320 ui.warn(
2408 _('note: graft of %d:%s created no changes to commit\n') %
2321 _('note: graft of %d:%s created no changes to commit\n') %
2409 (ctx.rev(), ctx))
2322 (ctx.rev(), ctx))
2410
2323
2411 # remove state when we complete successfully
2324 # remove state when we complete successfully
2412 if not opts.get('dry_run'):
2325 if not opts.get('dry_run'):
2413 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2326 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2414
2327
2415 return 0
2328 return 0
2416
2329
2417 @command('grep',
2330 @command('grep',
2418 [('0', 'print0', None, _('end fields with NUL')),
2331 [('0', 'print0', None, _('end fields with NUL')),
2419 ('', 'all', None, _('print all revisions that match')),
2332 ('', 'all', None, _('print all revisions that match')),
2420 ('a', 'text', None, _('treat all files as text')),
2333 ('a', 'text', None, _('treat all files as text')),
2421 ('f', 'follow', None,
2334 ('f', 'follow', None,
2422 _('follow changeset history,'
2335 _('follow changeset history,'
2423 ' or file history across copies and renames')),
2336 ' or file history across copies and renames')),
2424 ('i', 'ignore-case', None, _('ignore case when matching')),
2337 ('i', 'ignore-case', None, _('ignore case when matching')),
2425 ('l', 'files-with-matches', None,
2338 ('l', 'files-with-matches', None,
2426 _('print only filenames and revisions that match')),
2339 _('print only filenames and revisions that match')),
2427 ('n', 'line-number', None, _('print matching line numbers')),
2340 ('n', 'line-number', None, _('print matching line numbers')),
2428 ('r', 'rev', [],
2341 ('r', 'rev', [],
2429 _('only search files changed within revision range'), _('REV')),
2342 _('only search files changed within revision range'), _('REV')),
2430 ('u', 'user', None, _('list the author (long with -v)')),
2343 ('u', 'user', None, _('list the author (long with -v)')),
2431 ('d', 'date', None, _('list the date (short with -q)')),
2344 ('d', 'date', None, _('list the date (short with -q)')),
2432 ] + formatteropts + walkopts,
2345 ] + formatteropts + walkopts,
2433 _('[OPTION]... PATTERN [FILE]...'),
2346 _('[OPTION]... PATTERN [FILE]...'),
2434 inferrepo=True)
2347 inferrepo=True)
2435 def grep(ui, repo, pattern, *pats, **opts):
2348 def grep(ui, repo, pattern, *pats, **opts):
2436 """search revision history for a pattern in specified files
2349 """search revision history for a pattern in specified files
2437
2350
2438 Search revision history for a regular expression in the specified
2351 Search revision history for a regular expression in the specified
2439 files or the entire project.
2352 files or the entire project.
2440
2353
2441 By default, grep prints the most recent revision number for each
2354 By default, grep prints the most recent revision number for each
2442 file in which it finds a match. To get it to print every revision
2355 file in which it finds a match. To get it to print every revision
2443 that contains a change in match status ("-" for a match that becomes
2356 that contains a change in match status ("-" for a match that becomes
2444 a non-match, or "+" for a non-match that becomes a match), use the
2357 a non-match, or "+" for a non-match that becomes a match), use the
2445 --all flag.
2358 --all flag.
2446
2359
2447 PATTERN can be any Python (roughly Perl-compatible) regular
2360 PATTERN can be any Python (roughly Perl-compatible) regular
2448 expression.
2361 expression.
2449
2362
2450 If no FILEs are specified (and -f/--follow isn't set), all files in
2363 If no FILEs are specified (and -f/--follow isn't set), all files in
2451 the repository are searched, including those that don't exist in the
2364 the repository are searched, including those that don't exist in the
2452 current branch or have been deleted in a prior changeset.
2365 current branch or have been deleted in a prior changeset.
2453
2366
2454 Returns 0 if a match is found, 1 otherwise.
2367 Returns 0 if a match is found, 1 otherwise.
2455 """
2368 """
2456 opts = pycompat.byteskwargs(opts)
2369 opts = pycompat.byteskwargs(opts)
2457 reflags = re.M
2370 reflags = re.M
2458 if opts.get('ignore_case'):
2371 if opts.get('ignore_case'):
2459 reflags |= re.I
2372 reflags |= re.I
2460 try:
2373 try:
2461 regexp = util.re.compile(pattern, reflags)
2374 regexp = util.re.compile(pattern, reflags)
2462 except re.error as inst:
2375 except re.error as inst:
2463 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2376 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2464 return 1
2377 return 1
2465 sep, eol = ':', '\n'
2378 sep, eol = ':', '\n'
2466 if opts.get('print0'):
2379 if opts.get('print0'):
2467 sep = eol = '\0'
2380 sep = eol = '\0'
2468
2381
2469 getfile = util.lrucachefunc(repo.file)
2382 getfile = util.lrucachefunc(repo.file)
2470
2383
2471 def matchlines(body):
2384 def matchlines(body):
2472 begin = 0
2385 begin = 0
2473 linenum = 0
2386 linenum = 0
2474 while begin < len(body):
2387 while begin < len(body):
2475 match = regexp.search(body, begin)
2388 match = regexp.search(body, begin)
2476 if not match:
2389 if not match:
2477 break
2390 break
2478 mstart, mend = match.span()
2391 mstart, mend = match.span()
2479 linenum += body.count('\n', begin, mstart) + 1
2392 linenum += body.count('\n', begin, mstart) + 1
2480 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2393 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2481 begin = body.find('\n', mend) + 1 or len(body) + 1
2394 begin = body.find('\n', mend) + 1 or len(body) + 1
2482 lend = begin - 1
2395 lend = begin - 1
2483 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2396 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2484
2397
2485 class linestate(object):
2398 class linestate(object):
2486 def __init__(self, line, linenum, colstart, colend):
2399 def __init__(self, line, linenum, colstart, colend):
2487 self.line = line
2400 self.line = line
2488 self.linenum = linenum
2401 self.linenum = linenum
2489 self.colstart = colstart
2402 self.colstart = colstart
2490 self.colend = colend
2403 self.colend = colend
2491
2404
2492 def __hash__(self):
2405 def __hash__(self):
2493 return hash((self.linenum, self.line))
2406 return hash((self.linenum, self.line))
2494
2407
2495 def __eq__(self, other):
2408 def __eq__(self, other):
2496 return self.line == other.line
2409 return self.line == other.line
2497
2410
2498 def findpos(self):
2411 def findpos(self):
2499 """Iterate all (start, end) indices of matches"""
2412 """Iterate all (start, end) indices of matches"""
2500 yield self.colstart, self.colend
2413 yield self.colstart, self.colend
2501 p = self.colend
2414 p = self.colend
2502 while p < len(self.line):
2415 while p < len(self.line):
2503 m = regexp.search(self.line, p)
2416 m = regexp.search(self.line, p)
2504 if not m:
2417 if not m:
2505 break
2418 break
2506 yield m.span()
2419 yield m.span()
2507 p = m.end()
2420 p = m.end()
2508
2421
2509 matches = {}
2422 matches = {}
2510 copies = {}
2423 copies = {}
2511 def grepbody(fn, rev, body):
2424 def grepbody(fn, rev, body):
2512 matches[rev].setdefault(fn, [])
2425 matches[rev].setdefault(fn, [])
2513 m = matches[rev][fn]
2426 m = matches[rev][fn]
2514 for lnum, cstart, cend, line in matchlines(body):
2427 for lnum, cstart, cend, line in matchlines(body):
2515 s = linestate(line, lnum, cstart, cend)
2428 s = linestate(line, lnum, cstart, cend)
2516 m.append(s)
2429 m.append(s)
2517
2430
2518 def difflinestates(a, b):
2431 def difflinestates(a, b):
2519 sm = difflib.SequenceMatcher(None, a, b)
2432 sm = difflib.SequenceMatcher(None, a, b)
2520 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2433 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2521 if tag == 'insert':
2434 if tag == 'insert':
2522 for i in xrange(blo, bhi):
2435 for i in xrange(blo, bhi):
2523 yield ('+', b[i])
2436 yield ('+', b[i])
2524 elif tag == 'delete':
2437 elif tag == 'delete':
2525 for i in xrange(alo, ahi):
2438 for i in xrange(alo, ahi):
2526 yield ('-', a[i])
2439 yield ('-', a[i])
2527 elif tag == 'replace':
2440 elif tag == 'replace':
2528 for i in xrange(alo, ahi):
2441 for i in xrange(alo, ahi):
2529 yield ('-', a[i])
2442 yield ('-', a[i])
2530 for i in xrange(blo, bhi):
2443 for i in xrange(blo, bhi):
2531 yield ('+', b[i])
2444 yield ('+', b[i])
2532
2445
2533 def display(fm, fn, ctx, pstates, states):
2446 def display(fm, fn, ctx, pstates, states):
2534 rev = ctx.rev()
2447 rev = ctx.rev()
2535 if fm.isplain():
2448 if fm.isplain():
2536 formatuser = ui.shortuser
2449 formatuser = ui.shortuser
2537 else:
2450 else:
2538 formatuser = str
2451 formatuser = str
2539 if ui.quiet:
2452 if ui.quiet:
2540 datefmt = '%Y-%m-%d'
2453 datefmt = '%Y-%m-%d'
2541 else:
2454 else:
2542 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2455 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2543 found = False
2456 found = False
2544 @util.cachefunc
2457 @util.cachefunc
2545 def binary():
2458 def binary():
2546 flog = getfile(fn)
2459 flog = getfile(fn)
2547 return util.binary(flog.read(ctx.filenode(fn)))
2460 return util.binary(flog.read(ctx.filenode(fn)))
2548
2461
2549 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2462 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2550 if opts.get('all'):
2463 if opts.get('all'):
2551 iter = difflinestates(pstates, states)
2464 iter = difflinestates(pstates, states)
2552 else:
2465 else:
2553 iter = [('', l) for l in states]
2466 iter = [('', l) for l in states]
2554 for change, l in iter:
2467 for change, l in iter:
2555 fm.startitem()
2468 fm.startitem()
2556 fm.data(node=fm.hexfunc(ctx.node()))
2469 fm.data(node=fm.hexfunc(ctx.node()))
2557 cols = [
2470 cols = [
2558 ('filename', fn, True),
2471 ('filename', fn, True),
2559 ('rev', rev, True),
2472 ('rev', rev, True),
2560 ('linenumber', l.linenum, opts.get('line_number')),
2473 ('linenumber', l.linenum, opts.get('line_number')),
2561 ]
2474 ]
2562 if opts.get('all'):
2475 if opts.get('all'):
2563 cols.append(('change', change, True))
2476 cols.append(('change', change, True))
2564 cols.extend([
2477 cols.extend([
2565 ('user', formatuser(ctx.user()), opts.get('user')),
2478 ('user', formatuser(ctx.user()), opts.get('user')),
2566 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2479 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2567 ])
2480 ])
2568 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2481 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2569 for name, data, cond in cols:
2482 for name, data, cond in cols:
2570 field = fieldnamemap.get(name, name)
2483 field = fieldnamemap.get(name, name)
2571 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2484 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2572 if cond and name != lastcol:
2485 if cond and name != lastcol:
2573 fm.plain(sep, label='grep.sep')
2486 fm.plain(sep, label='grep.sep')
2574 if not opts.get('files_with_matches'):
2487 if not opts.get('files_with_matches'):
2575 fm.plain(sep, label='grep.sep')
2488 fm.plain(sep, label='grep.sep')
2576 if not opts.get('text') and binary():
2489 if not opts.get('text') and binary():
2577 fm.plain(_(" Binary file matches"))
2490 fm.plain(_(" Binary file matches"))
2578 else:
2491 else:
2579 displaymatches(fm.nested('texts'), l)
2492 displaymatches(fm.nested('texts'), l)
2580 fm.plain(eol)
2493 fm.plain(eol)
2581 found = True
2494 found = True
2582 if opts.get('files_with_matches'):
2495 if opts.get('files_with_matches'):
2583 break
2496 break
2584 return found
2497 return found
2585
2498
2586 def displaymatches(fm, l):
2499 def displaymatches(fm, l):
2587 p = 0
2500 p = 0
2588 for s, e in l.findpos():
2501 for s, e in l.findpos():
2589 if p < s:
2502 if p < s:
2590 fm.startitem()
2503 fm.startitem()
2591 fm.write('text', '%s', l.line[p:s])
2504 fm.write('text', '%s', l.line[p:s])
2592 fm.data(matched=False)
2505 fm.data(matched=False)
2593 fm.startitem()
2506 fm.startitem()
2594 fm.write('text', '%s', l.line[s:e], label='grep.match')
2507 fm.write('text', '%s', l.line[s:e], label='grep.match')
2595 fm.data(matched=True)
2508 fm.data(matched=True)
2596 p = e
2509 p = e
2597 if p < len(l.line):
2510 if p < len(l.line):
2598 fm.startitem()
2511 fm.startitem()
2599 fm.write('text', '%s', l.line[p:])
2512 fm.write('text', '%s', l.line[p:])
2600 fm.data(matched=False)
2513 fm.data(matched=False)
2601 fm.end()
2514 fm.end()
2602
2515
2603 skip = {}
2516 skip = {}
2604 revfiles = {}
2517 revfiles = {}
2605 matchfn = scmutil.match(repo[None], pats, opts)
2518 matchfn = scmutil.match(repo[None], pats, opts)
2606 found = False
2519 found = False
2607 follow = opts.get('follow')
2520 follow = opts.get('follow')
2608
2521
2609 def prep(ctx, fns):
2522 def prep(ctx, fns):
2610 rev = ctx.rev()
2523 rev = ctx.rev()
2611 pctx = ctx.p1()
2524 pctx = ctx.p1()
2612 parent = pctx.rev()
2525 parent = pctx.rev()
2613 matches.setdefault(rev, {})
2526 matches.setdefault(rev, {})
2614 matches.setdefault(parent, {})
2527 matches.setdefault(parent, {})
2615 files = revfiles.setdefault(rev, [])
2528 files = revfiles.setdefault(rev, [])
2616 for fn in fns:
2529 for fn in fns:
2617 flog = getfile(fn)
2530 flog = getfile(fn)
2618 try:
2531 try:
2619 fnode = ctx.filenode(fn)
2532 fnode = ctx.filenode(fn)
2620 except error.LookupError:
2533 except error.LookupError:
2621 continue
2534 continue
2622
2535
2623 copied = flog.renamed(fnode)
2536 copied = flog.renamed(fnode)
2624 copy = follow and copied and copied[0]
2537 copy = follow and copied and copied[0]
2625 if copy:
2538 if copy:
2626 copies.setdefault(rev, {})[fn] = copy
2539 copies.setdefault(rev, {})[fn] = copy
2627 if fn in skip:
2540 if fn in skip:
2628 if copy:
2541 if copy:
2629 skip[copy] = True
2542 skip[copy] = True
2630 continue
2543 continue
2631 files.append(fn)
2544 files.append(fn)
2632
2545
2633 if fn not in matches[rev]:
2546 if fn not in matches[rev]:
2634 grepbody(fn, rev, flog.read(fnode))
2547 grepbody(fn, rev, flog.read(fnode))
2635
2548
2636 pfn = copy or fn
2549 pfn = copy or fn
2637 if pfn not in matches[parent]:
2550 if pfn not in matches[parent]:
2638 try:
2551 try:
2639 fnode = pctx.filenode(pfn)
2552 fnode = pctx.filenode(pfn)
2640 grepbody(pfn, parent, flog.read(fnode))
2553 grepbody(pfn, parent, flog.read(fnode))
2641 except error.LookupError:
2554 except error.LookupError:
2642 pass
2555 pass
2643
2556
2644 ui.pager('grep')
2557 ui.pager('grep')
2645 fm = ui.formatter('grep', opts)
2558 fm = ui.formatter('grep', opts)
2646 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2559 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2647 rev = ctx.rev()
2560 rev = ctx.rev()
2648 parent = ctx.p1().rev()
2561 parent = ctx.p1().rev()
2649 for fn in sorted(revfiles.get(rev, [])):
2562 for fn in sorted(revfiles.get(rev, [])):
2650 states = matches[rev][fn]
2563 states = matches[rev][fn]
2651 copy = copies.get(rev, {}).get(fn)
2564 copy = copies.get(rev, {}).get(fn)
2652 if fn in skip:
2565 if fn in skip:
2653 if copy:
2566 if copy:
2654 skip[copy] = True
2567 skip[copy] = True
2655 continue
2568 continue
2656 pstates = matches.get(parent, {}).get(copy or fn, [])
2569 pstates = matches.get(parent, {}).get(copy or fn, [])
2657 if pstates or states:
2570 if pstates or states:
2658 r = display(fm, fn, ctx, pstates, states)
2571 r = display(fm, fn, ctx, pstates, states)
2659 found = found or r
2572 found = found or r
2660 if r and not opts.get('all'):
2573 if r and not opts.get('all'):
2661 skip[fn] = True
2574 skip[fn] = True
2662 if copy:
2575 if copy:
2663 skip[copy] = True
2576 skip[copy] = True
2664 del matches[rev]
2577 del matches[rev]
2665 del revfiles[rev]
2578 del revfiles[rev]
2666 fm.end()
2579 fm.end()
2667
2580
2668 return not found
2581 return not found
2669
2582
2670 @command('heads',
2583 @command('heads',
2671 [('r', 'rev', '',
2584 [('r', 'rev', '',
2672 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2585 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2673 ('t', 'topo', False, _('show topological heads only')),
2586 ('t', 'topo', False, _('show topological heads only')),
2674 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2587 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2675 ('c', 'closed', False, _('show normal and closed branch heads')),
2588 ('c', 'closed', False, _('show normal and closed branch heads')),
2676 ] + templateopts,
2589 ] + templateopts,
2677 _('[-ct] [-r STARTREV] [REV]...'))
2590 _('[-ct] [-r STARTREV] [REV]...'))
2678 def heads(ui, repo, *branchrevs, **opts):
2591 def heads(ui, repo, *branchrevs, **opts):
2679 """show branch heads
2592 """show branch heads
2680
2593
2681 With no arguments, show all open branch heads in the repository.
2594 With no arguments, show all open branch heads in the repository.
2682 Branch heads are changesets that have no descendants on the
2595 Branch heads are changesets that have no descendants on the
2683 same branch. They are where development generally takes place and
2596 same branch. They are where development generally takes place and
2684 are the usual targets for update and merge operations.
2597 are the usual targets for update and merge operations.
2685
2598
2686 If one or more REVs are given, only open branch heads on the
2599 If one or more REVs are given, only open branch heads on the
2687 branches associated with the specified changesets are shown. This
2600 branches associated with the specified changesets are shown. This
2688 means that you can use :hg:`heads .` to see the heads on the
2601 means that you can use :hg:`heads .` to see the heads on the
2689 currently checked-out branch.
2602 currently checked-out branch.
2690
2603
2691 If -c/--closed is specified, also show branch heads marked closed
2604 If -c/--closed is specified, also show branch heads marked closed
2692 (see :hg:`commit --close-branch`).
2605 (see :hg:`commit --close-branch`).
2693
2606
2694 If STARTREV is specified, only those heads that are descendants of
2607 If STARTREV is specified, only those heads that are descendants of
2695 STARTREV will be displayed.
2608 STARTREV will be displayed.
2696
2609
2697 If -t/--topo is specified, named branch mechanics will be ignored and only
2610 If -t/--topo is specified, named branch mechanics will be ignored and only
2698 topological heads (changesets with no children) will be shown.
2611 topological heads (changesets with no children) will be shown.
2699
2612
2700 Returns 0 if matching heads are found, 1 if not.
2613 Returns 0 if matching heads are found, 1 if not.
2701 """
2614 """
2702
2615
2703 opts = pycompat.byteskwargs(opts)
2616 opts = pycompat.byteskwargs(opts)
2704 start = None
2617 start = None
2705 if 'rev' in opts:
2618 if 'rev' in opts:
2706 start = scmutil.revsingle(repo, opts['rev'], None).node()
2619 start = scmutil.revsingle(repo, opts['rev'], None).node()
2707
2620
2708 if opts.get('topo'):
2621 if opts.get('topo'):
2709 heads = [repo[h] for h in repo.heads(start)]
2622 heads = [repo[h] for h in repo.heads(start)]
2710 else:
2623 else:
2711 heads = []
2624 heads = []
2712 for branch in repo.branchmap():
2625 for branch in repo.branchmap():
2713 heads += repo.branchheads(branch, start, opts.get('closed'))
2626 heads += repo.branchheads(branch, start, opts.get('closed'))
2714 heads = [repo[h] for h in heads]
2627 heads = [repo[h] for h in heads]
2715
2628
2716 if branchrevs:
2629 if branchrevs:
2717 branches = set(repo[br].branch() for br in branchrevs)
2630 branches = set(repo[br].branch() for br in branchrevs)
2718 heads = [h for h in heads if h.branch() in branches]
2631 heads = [h for h in heads if h.branch() in branches]
2719
2632
2720 if opts.get('active') and branchrevs:
2633 if opts.get('active') and branchrevs:
2721 dagheads = repo.heads(start)
2634 dagheads = repo.heads(start)
2722 heads = [h for h in heads if h.node() in dagheads]
2635 heads = [h for h in heads if h.node() in dagheads]
2723
2636
2724 if branchrevs:
2637 if branchrevs:
2725 haveheads = set(h.branch() for h in heads)
2638 haveheads = set(h.branch() for h in heads)
2726 if branches - haveheads:
2639 if branches - haveheads:
2727 headless = ', '.join(b for b in branches - haveheads)
2640 headless = ', '.join(b for b in branches - haveheads)
2728 msg = _('no open branch heads found on branches %s')
2641 msg = _('no open branch heads found on branches %s')
2729 if opts.get('rev'):
2642 if opts.get('rev'):
2730 msg += _(' (started at %s)') % opts['rev']
2643 msg += _(' (started at %s)') % opts['rev']
2731 ui.warn((msg + '\n') % headless)
2644 ui.warn((msg + '\n') % headless)
2732
2645
2733 if not heads:
2646 if not heads:
2734 return 1
2647 return 1
2735
2648
2736 ui.pager('heads')
2649 ui.pager('heads')
2737 heads = sorted(heads, key=lambda x: -x.rev())
2650 heads = sorted(heads, key=lambda x: -x.rev())
2738 displayer = cmdutil.show_changeset(ui, repo, opts)
2651 displayer = cmdutil.show_changeset(ui, repo, opts)
2739 for ctx in heads:
2652 for ctx in heads:
2740 displayer.show(ctx)
2653 displayer.show(ctx)
2741 displayer.close()
2654 displayer.close()
2742
2655
2743 @command('help',
2656 @command('help',
2744 [('e', 'extension', None, _('show only help for extensions')),
2657 [('e', 'extension', None, _('show only help for extensions')),
2745 ('c', 'command', None, _('show only help for commands')),
2658 ('c', 'command', None, _('show only help for commands')),
2746 ('k', 'keyword', None, _('show topics matching keyword')),
2659 ('k', 'keyword', None, _('show topics matching keyword')),
2747 ('s', 'system', [], _('show help for specific platform(s)')),
2660 ('s', 'system', [], _('show help for specific platform(s)')),
2748 ],
2661 ],
2749 _('[-ecks] [TOPIC]'),
2662 _('[-ecks] [TOPIC]'),
2750 norepo=True)
2663 norepo=True)
2751 def help_(ui, name=None, **opts):
2664 def help_(ui, name=None, **opts):
2752 """show help for a given topic or a help overview
2665 """show help for a given topic or a help overview
2753
2666
2754 With no arguments, print a list of commands with short help messages.
2667 With no arguments, print a list of commands with short help messages.
2755
2668
2756 Given a topic, extension, or command name, print help for that
2669 Given a topic, extension, or command name, print help for that
2757 topic.
2670 topic.
2758
2671
2759 Returns 0 if successful.
2672 Returns 0 if successful.
2760 """
2673 """
2761
2674
2762 keep = opts.get(r'system') or []
2675 keep = opts.get(r'system') or []
2763 if len(keep) == 0:
2676 if len(keep) == 0:
2764 if pycompat.sysplatform.startswith('win'):
2677 if pycompat.sysplatform.startswith('win'):
2765 keep.append('windows')
2678 keep.append('windows')
2766 elif pycompat.sysplatform == 'OpenVMS':
2679 elif pycompat.sysplatform == 'OpenVMS':
2767 keep.append('vms')
2680 keep.append('vms')
2768 elif pycompat.sysplatform == 'plan9':
2681 elif pycompat.sysplatform == 'plan9':
2769 keep.append('plan9')
2682 keep.append('plan9')
2770 else:
2683 else:
2771 keep.append('unix')
2684 keep.append('unix')
2772 keep.append(pycompat.sysplatform.lower())
2685 keep.append(pycompat.sysplatform.lower())
2773 if ui.verbose:
2686 if ui.verbose:
2774 keep.append('verbose')
2687 keep.append('verbose')
2775
2688
2776 formatted = help.formattedhelp(ui, name, keep=keep, **opts)
2689 formatted = help.formattedhelp(ui, name, keep=keep, **opts)
2777 ui.pager('help')
2690 ui.pager('help')
2778 ui.write(formatted)
2691 ui.write(formatted)
2779
2692
2780
2693
2781 @command('identify|id',
2694 @command('identify|id',
2782 [('r', 'rev', '',
2695 [('r', 'rev', '',
2783 _('identify the specified revision'), _('REV')),
2696 _('identify the specified revision'), _('REV')),
2784 ('n', 'num', None, _('show local revision number')),
2697 ('n', 'num', None, _('show local revision number')),
2785 ('i', 'id', None, _('show global revision id')),
2698 ('i', 'id', None, _('show global revision id')),
2786 ('b', 'branch', None, _('show branch')),
2699 ('b', 'branch', None, _('show branch')),
2787 ('t', 'tags', None, _('show tags')),
2700 ('t', 'tags', None, _('show tags')),
2788 ('B', 'bookmarks', None, _('show bookmarks')),
2701 ('B', 'bookmarks', None, _('show bookmarks')),
2789 ] + remoteopts,
2702 ] + remoteopts,
2790 _('[-nibtB] [-r REV] [SOURCE]'),
2703 _('[-nibtB] [-r REV] [SOURCE]'),
2791 optionalrepo=True)
2704 optionalrepo=True)
2792 def identify(ui, repo, source=None, rev=None,
2705 def identify(ui, repo, source=None, rev=None,
2793 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2706 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2794 """identify the working directory or specified revision
2707 """identify the working directory or specified revision
2795
2708
2796 Print a summary identifying the repository state at REV using one or
2709 Print a summary identifying the repository state at REV using one or
2797 two parent hash identifiers, followed by a "+" if the working
2710 two parent hash identifiers, followed by a "+" if the working
2798 directory has uncommitted changes, the branch name (if not default),
2711 directory has uncommitted changes, the branch name (if not default),
2799 a list of tags, and a list of bookmarks.
2712 a list of tags, and a list of bookmarks.
2800
2713
2801 When REV is not given, print a summary of the current state of the
2714 When REV is not given, print a summary of the current state of the
2802 repository.
2715 repository.
2803
2716
2804 Specifying a path to a repository root or Mercurial bundle will
2717 Specifying a path to a repository root or Mercurial bundle will
2805 cause lookup to operate on that repository/bundle.
2718 cause lookup to operate on that repository/bundle.
2806
2719
2807 .. container:: verbose
2720 .. container:: verbose
2808
2721
2809 Examples:
2722 Examples:
2810
2723
2811 - generate a build identifier for the working directory::
2724 - generate a build identifier for the working directory::
2812
2725
2813 hg id --id > build-id.dat
2726 hg id --id > build-id.dat
2814
2727
2815 - find the revision corresponding to a tag::
2728 - find the revision corresponding to a tag::
2816
2729
2817 hg id -n -r 1.3
2730 hg id -n -r 1.3
2818
2731
2819 - check the most recent revision of a remote repository::
2732 - check the most recent revision of a remote repository::
2820
2733
2821 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2734 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2822
2735
2823 See :hg:`log` for generating more information about specific revisions,
2736 See :hg:`log` for generating more information about specific revisions,
2824 including full hash identifiers.
2737 including full hash identifiers.
2825
2738
2826 Returns 0 if successful.
2739 Returns 0 if successful.
2827 """
2740 """
2828
2741
2829 opts = pycompat.byteskwargs(opts)
2742 opts = pycompat.byteskwargs(opts)
2830 if not repo and not source:
2743 if not repo and not source:
2831 raise error.Abort(_("there is no Mercurial repository here "
2744 raise error.Abort(_("there is no Mercurial repository here "
2832 "(.hg not found)"))
2745 "(.hg not found)"))
2833
2746
2834 if ui.debugflag:
2747 if ui.debugflag:
2835 hexfunc = hex
2748 hexfunc = hex
2836 else:
2749 else:
2837 hexfunc = short
2750 hexfunc = short
2838 default = not (num or id or branch or tags or bookmarks)
2751 default = not (num or id or branch or tags or bookmarks)
2839 output = []
2752 output = []
2840 revs = []
2753 revs = []
2841
2754
2842 if source:
2755 if source:
2843 source, branches = hg.parseurl(ui.expandpath(source))
2756 source, branches = hg.parseurl(ui.expandpath(source))
2844 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2757 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2845 repo = peer.local()
2758 repo = peer.local()
2846 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2759 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2847
2760
2848 if not repo:
2761 if not repo:
2849 if num or branch or tags:
2762 if num or branch or tags:
2850 raise error.Abort(
2763 raise error.Abort(
2851 _("can't query remote revision number, branch, or tags"))
2764 _("can't query remote revision number, branch, or tags"))
2852 if not rev and revs:
2765 if not rev and revs:
2853 rev = revs[0]
2766 rev = revs[0]
2854 if not rev:
2767 if not rev:
2855 rev = "tip"
2768 rev = "tip"
2856
2769
2857 remoterev = peer.lookup(rev)
2770 remoterev = peer.lookup(rev)
2858 if default or id:
2771 if default or id:
2859 output = [hexfunc(remoterev)]
2772 output = [hexfunc(remoterev)]
2860
2773
2861 def getbms():
2774 def getbms():
2862 bms = []
2775 bms = []
2863
2776
2864 if 'bookmarks' in peer.listkeys('namespaces'):
2777 if 'bookmarks' in peer.listkeys('namespaces'):
2865 hexremoterev = hex(remoterev)
2778 hexremoterev = hex(remoterev)
2866 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2779 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2867 if bmr == hexremoterev]
2780 if bmr == hexremoterev]
2868
2781
2869 return sorted(bms)
2782 return sorted(bms)
2870
2783
2871 if bookmarks:
2784 if bookmarks:
2872 output.extend(getbms())
2785 output.extend(getbms())
2873 elif default and not ui.quiet:
2786 elif default and not ui.quiet:
2874 # multiple bookmarks for a single parent separated by '/'
2787 # multiple bookmarks for a single parent separated by '/'
2875 bm = '/'.join(getbms())
2788 bm = '/'.join(getbms())
2876 if bm:
2789 if bm:
2877 output.append(bm)
2790 output.append(bm)
2878 else:
2791 else:
2879 ctx = scmutil.revsingle(repo, rev, None)
2792 ctx = scmutil.revsingle(repo, rev, None)
2880
2793
2881 if ctx.rev() is None:
2794 if ctx.rev() is None:
2882 ctx = repo[None]
2795 ctx = repo[None]
2883 parents = ctx.parents()
2796 parents = ctx.parents()
2884 taglist = []
2797 taglist = []
2885 for p in parents:
2798 for p in parents:
2886 taglist.extend(p.tags())
2799 taglist.extend(p.tags())
2887
2800
2888 changed = ""
2801 changed = ""
2889 if default or id or num:
2802 if default or id or num:
2890 if (any(repo.status())
2803 if (any(repo.status())
2891 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2804 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2892 changed = '+'
2805 changed = '+'
2893 if default or id:
2806 if default or id:
2894 output = ["%s%s" %
2807 output = ["%s%s" %
2895 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2808 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2896 if num:
2809 if num:
2897 output.append("%s%s" %
2810 output.append("%s%s" %
2898 ('+'.join([str(p.rev()) for p in parents]), changed))
2811 ('+'.join([str(p.rev()) for p in parents]), changed))
2899 else:
2812 else:
2900 if default or id:
2813 if default or id:
2901 output = [hexfunc(ctx.node())]
2814 output = [hexfunc(ctx.node())]
2902 if num:
2815 if num:
2903 output.append(str(ctx.rev()))
2816 output.append(str(ctx.rev()))
2904 taglist = ctx.tags()
2817 taglist = ctx.tags()
2905
2818
2906 if default and not ui.quiet:
2819 if default and not ui.quiet:
2907 b = ctx.branch()
2820 b = ctx.branch()
2908 if b != 'default':
2821 if b != 'default':
2909 output.append("(%s)" % b)
2822 output.append("(%s)" % b)
2910
2823
2911 # multiple tags for a single parent separated by '/'
2824 # multiple tags for a single parent separated by '/'
2912 t = '/'.join(taglist)
2825 t = '/'.join(taglist)
2913 if t:
2826 if t:
2914 output.append(t)
2827 output.append(t)
2915
2828
2916 # multiple bookmarks for a single parent separated by '/'
2829 # multiple bookmarks for a single parent separated by '/'
2917 bm = '/'.join(ctx.bookmarks())
2830 bm = '/'.join(ctx.bookmarks())
2918 if bm:
2831 if bm:
2919 output.append(bm)
2832 output.append(bm)
2920 else:
2833 else:
2921 if branch:
2834 if branch:
2922 output.append(ctx.branch())
2835 output.append(ctx.branch())
2923
2836
2924 if tags:
2837 if tags:
2925 output.extend(taglist)
2838 output.extend(taglist)
2926
2839
2927 if bookmarks:
2840 if bookmarks:
2928 output.extend(ctx.bookmarks())
2841 output.extend(ctx.bookmarks())
2929
2842
2930 ui.write("%s\n" % ' '.join(output))
2843 ui.write("%s\n" % ' '.join(output))
2931
2844
2932 @command('import|patch',
2845 @command('import|patch',
2933 [('p', 'strip', 1,
2846 [('p', 'strip', 1,
2934 _('directory strip option for patch. This has the same '
2847 _('directory strip option for patch. This has the same '
2935 'meaning as the corresponding patch option'), _('NUM')),
2848 'meaning as the corresponding patch option'), _('NUM')),
2936 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2849 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2937 ('e', 'edit', False, _('invoke editor on commit messages')),
2850 ('e', 'edit', False, _('invoke editor on commit messages')),
2938 ('f', 'force', None,
2851 ('f', 'force', None,
2939 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2852 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2940 ('', 'no-commit', None,
2853 ('', 'no-commit', None,
2941 _("don't commit, just update the working directory")),
2854 _("don't commit, just update the working directory")),
2942 ('', 'bypass', None,
2855 ('', 'bypass', None,
2943 _("apply patch without touching the working directory")),
2856 _("apply patch without touching the working directory")),
2944 ('', 'partial', None,
2857 ('', 'partial', None,
2945 _('commit even if some hunks fail')),
2858 _('commit even if some hunks fail')),
2946 ('', 'exact', None,
2859 ('', 'exact', None,
2947 _('abort if patch would apply lossily')),
2860 _('abort if patch would apply lossily')),
2948 ('', 'prefix', '',
2861 ('', 'prefix', '',
2949 _('apply patch to subdirectory'), _('DIR')),
2862 _('apply patch to subdirectory'), _('DIR')),
2950 ('', 'import-branch', None,
2863 ('', 'import-branch', None,
2951 _('use any branch information in patch (implied by --exact)'))] +
2864 _('use any branch information in patch (implied by --exact)'))] +
2952 commitopts + commitopts2 + similarityopts,
2865 commitopts + commitopts2 + similarityopts,
2953 _('[OPTION]... PATCH...'))
2866 _('[OPTION]... PATCH...'))
2954 def import_(ui, repo, patch1=None, *patches, **opts):
2867 def import_(ui, repo, patch1=None, *patches, **opts):
2955 """import an ordered set of patches
2868 """import an ordered set of patches
2956
2869
2957 Import a list of patches and commit them individually (unless
2870 Import a list of patches and commit them individually (unless
2958 --no-commit is specified).
2871 --no-commit is specified).
2959
2872
2960 To read a patch from standard input (stdin), use "-" as the patch
2873 To read a patch from standard input (stdin), use "-" as the patch
2961 name. If a URL is specified, the patch will be downloaded from
2874 name. If a URL is specified, the patch will be downloaded from
2962 there.
2875 there.
2963
2876
2964 Import first applies changes to the working directory (unless
2877 Import first applies changes to the working directory (unless
2965 --bypass is specified), import will abort if there are outstanding
2878 --bypass is specified), import will abort if there are outstanding
2966 changes.
2879 changes.
2967
2880
2968 Use --bypass to apply and commit patches directly to the
2881 Use --bypass to apply and commit patches directly to the
2969 repository, without affecting the working directory. Without
2882 repository, without affecting the working directory. Without
2970 --exact, patches will be applied on top of the working directory
2883 --exact, patches will be applied on top of the working directory
2971 parent revision.
2884 parent revision.
2972
2885
2973 You can import a patch straight from a mail message. Even patches
2886 You can import a patch straight from a mail message. Even patches
2974 as attachments work (to use the body part, it must have type
2887 as attachments work (to use the body part, it must have type
2975 text/plain or text/x-patch). From and Subject headers of email
2888 text/plain or text/x-patch). From and Subject headers of email
2976 message are used as default committer and commit message. All
2889 message are used as default committer and commit message. All
2977 text/plain body parts before first diff are added to the commit
2890 text/plain body parts before first diff are added to the commit
2978 message.
2891 message.
2979
2892
2980 If the imported patch was generated by :hg:`export`, user and
2893 If the imported patch was generated by :hg:`export`, user and
2981 description from patch override values from message headers and
2894 description from patch override values from message headers and
2982 body. Values given on command line with -m/--message and -u/--user
2895 body. Values given on command line with -m/--message and -u/--user
2983 override these.
2896 override these.
2984
2897
2985 If --exact is specified, import will set the working directory to
2898 If --exact is specified, import will set the working directory to
2986 the parent of each patch before applying it, and will abort if the
2899 the parent of each patch before applying it, and will abort if the
2987 resulting changeset has a different ID than the one recorded in
2900 resulting changeset has a different ID than the one recorded in
2988 the patch. This will guard against various ways that portable
2901 the patch. This will guard against various ways that portable
2989 patch formats and mail systems might fail to transfer Mercurial
2902 patch formats and mail systems might fail to transfer Mercurial
2990 data or metadata. See :hg:`bundle` for lossless transmission.
2903 data or metadata. See :hg:`bundle` for lossless transmission.
2991
2904
2992 Use --partial to ensure a changeset will be created from the patch
2905 Use --partial to ensure a changeset will be created from the patch
2993 even if some hunks fail to apply. Hunks that fail to apply will be
2906 even if some hunks fail to apply. Hunks that fail to apply will be
2994 written to a <target-file>.rej file. Conflicts can then be resolved
2907 written to a <target-file>.rej file. Conflicts can then be resolved
2995 by hand before :hg:`commit --amend` is run to update the created
2908 by hand before :hg:`commit --amend` is run to update the created
2996 changeset. This flag exists to let people import patches that
2909 changeset. This flag exists to let people import patches that
2997 partially apply without losing the associated metadata (author,
2910 partially apply without losing the associated metadata (author,
2998 date, description, ...).
2911 date, description, ...).
2999
2912
3000 .. note::
2913 .. note::
3001
2914
3002 When no hunks apply cleanly, :hg:`import --partial` will create
2915 When no hunks apply cleanly, :hg:`import --partial` will create
3003 an empty changeset, importing only the patch metadata.
2916 an empty changeset, importing only the patch metadata.
3004
2917
3005 With -s/--similarity, hg will attempt to discover renames and
2918 With -s/--similarity, hg will attempt to discover renames and
3006 copies in the patch in the same way as :hg:`addremove`.
2919 copies in the patch in the same way as :hg:`addremove`.
3007
2920
3008 It is possible to use external patch programs to perform the patch
2921 It is possible to use external patch programs to perform the patch
3009 by setting the ``ui.patch`` configuration option. For the default
2922 by setting the ``ui.patch`` configuration option. For the default
3010 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2923 internal tool, the fuzz can also be configured via ``patch.fuzz``.
3011 See :hg:`help config` for more information about configuration
2924 See :hg:`help config` for more information about configuration
3012 files and how to use these options.
2925 files and how to use these options.
3013
2926
3014 See :hg:`help dates` for a list of formats valid for -d/--date.
2927 See :hg:`help dates` for a list of formats valid for -d/--date.
3015
2928
3016 .. container:: verbose
2929 .. container:: verbose
3017
2930
3018 Examples:
2931 Examples:
3019
2932
3020 - import a traditional patch from a website and detect renames::
2933 - import a traditional patch from a website and detect renames::
3021
2934
3022 hg import -s 80 http://example.com/bugfix.patch
2935 hg import -s 80 http://example.com/bugfix.patch
3023
2936
3024 - import a changeset from an hgweb server::
2937 - import a changeset from an hgweb server::
3025
2938
3026 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2939 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
3027
2940
3028 - import all the patches in an Unix-style mbox::
2941 - import all the patches in an Unix-style mbox::
3029
2942
3030 hg import incoming-patches.mbox
2943 hg import incoming-patches.mbox
3031
2944
3032 - import patches from stdin::
2945 - import patches from stdin::
3033
2946
3034 hg import -
2947 hg import -
3035
2948
3036 - attempt to exactly restore an exported changeset (not always
2949 - attempt to exactly restore an exported changeset (not always
3037 possible)::
2950 possible)::
3038
2951
3039 hg import --exact proposed-fix.patch
2952 hg import --exact proposed-fix.patch
3040
2953
3041 - use an external tool to apply a patch which is too fuzzy for
2954 - use an external tool to apply a patch which is too fuzzy for
3042 the default internal tool.
2955 the default internal tool.
3043
2956
3044 hg import --config ui.patch="patch --merge" fuzzy.patch
2957 hg import --config ui.patch="patch --merge" fuzzy.patch
3045
2958
3046 - change the default fuzzing from 2 to a less strict 7
2959 - change the default fuzzing from 2 to a less strict 7
3047
2960
3048 hg import --config ui.fuzz=7 fuzz.patch
2961 hg import --config ui.fuzz=7 fuzz.patch
3049
2962
3050 Returns 0 on success, 1 on partial success (see --partial).
2963 Returns 0 on success, 1 on partial success (see --partial).
3051 """
2964 """
3052
2965
3053 opts = pycompat.byteskwargs(opts)
2966 opts = pycompat.byteskwargs(opts)
3054 if not patch1:
2967 if not patch1:
3055 raise error.Abort(_('need at least one patch to import'))
2968 raise error.Abort(_('need at least one patch to import'))
3056
2969
3057 patches = (patch1,) + patches
2970 patches = (patch1,) + patches
3058
2971
3059 date = opts.get('date')
2972 date = opts.get('date')
3060 if date:
2973 if date:
3061 opts['date'] = util.parsedate(date)
2974 opts['date'] = util.parsedate(date)
3062
2975
3063 exact = opts.get('exact')
2976 exact = opts.get('exact')
3064 update = not opts.get('bypass')
2977 update = not opts.get('bypass')
3065 if not update and opts.get('no_commit'):
2978 if not update and opts.get('no_commit'):
3066 raise error.Abort(_('cannot use --no-commit with --bypass'))
2979 raise error.Abort(_('cannot use --no-commit with --bypass'))
3067 try:
2980 try:
3068 sim = float(opts.get('similarity') or 0)
2981 sim = float(opts.get('similarity') or 0)
3069 except ValueError:
2982 except ValueError:
3070 raise error.Abort(_('similarity must be a number'))
2983 raise error.Abort(_('similarity must be a number'))
3071 if sim < 0 or sim > 100:
2984 if sim < 0 or sim > 100:
3072 raise error.Abort(_('similarity must be between 0 and 100'))
2985 raise error.Abort(_('similarity must be between 0 and 100'))
3073 if sim and not update:
2986 if sim and not update:
3074 raise error.Abort(_('cannot use --similarity with --bypass'))
2987 raise error.Abort(_('cannot use --similarity with --bypass'))
3075 if exact:
2988 if exact:
3076 if opts.get('edit'):
2989 if opts.get('edit'):
3077 raise error.Abort(_('cannot use --exact with --edit'))
2990 raise error.Abort(_('cannot use --exact with --edit'))
3078 if opts.get('prefix'):
2991 if opts.get('prefix'):
3079 raise error.Abort(_('cannot use --exact with --prefix'))
2992 raise error.Abort(_('cannot use --exact with --prefix'))
3080
2993
3081 base = opts["base"]
2994 base = opts["base"]
3082 wlock = dsguard = lock = tr = None
2995 wlock = dsguard = lock = tr = None
3083 msgs = []
2996 msgs = []
3084 ret = 0
2997 ret = 0
3085
2998
3086
2999
3087 try:
3000 try:
3088 wlock = repo.wlock()
3001 wlock = repo.wlock()
3089
3002
3090 if update:
3003 if update:
3091 cmdutil.checkunfinished(repo)
3004 cmdutil.checkunfinished(repo)
3092 if (exact or not opts.get('force')):
3005 if (exact or not opts.get('force')):
3093 cmdutil.bailifchanged(repo)
3006 cmdutil.bailifchanged(repo)
3094
3007
3095 if not opts.get('no_commit'):
3008 if not opts.get('no_commit'):
3096 lock = repo.lock()
3009 lock = repo.lock()
3097 tr = repo.transaction('import')
3010 tr = repo.transaction('import')
3098 else:
3011 else:
3099 dsguard = dirstateguard.dirstateguard(repo, 'import')
3012 dsguard = dirstateguard.dirstateguard(repo, 'import')
3100 parents = repo[None].parents()
3013 parents = repo[None].parents()
3101 for patchurl in patches:
3014 for patchurl in patches:
3102 if patchurl == '-':
3015 if patchurl == '-':
3103 ui.status(_('applying patch from stdin\n'))
3016 ui.status(_('applying patch from stdin\n'))
3104 patchfile = ui.fin
3017 patchfile = ui.fin
3105 patchurl = 'stdin' # for error message
3018 patchurl = 'stdin' # for error message
3106 else:
3019 else:
3107 patchurl = os.path.join(base, patchurl)
3020 patchurl = os.path.join(base, patchurl)
3108 ui.status(_('applying %s\n') % patchurl)
3021 ui.status(_('applying %s\n') % patchurl)
3109 patchfile = hg.openpath(ui, patchurl)
3022 patchfile = hg.openpath(ui, patchurl)
3110
3023
3111 haspatch = False
3024 haspatch = False
3112 for hunk in patch.split(patchfile):
3025 for hunk in patch.split(patchfile):
3113 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3026 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3114 parents, opts,
3027 parents, opts,
3115 msgs, hg.clean)
3028 msgs, hg.clean)
3116 if msg:
3029 if msg:
3117 haspatch = True
3030 haspatch = True
3118 ui.note(msg + '\n')
3031 ui.note(msg + '\n')
3119 if update or exact:
3032 if update or exact:
3120 parents = repo[None].parents()
3033 parents = repo[None].parents()
3121 else:
3034 else:
3122 parents = [repo[node]]
3035 parents = [repo[node]]
3123 if rej:
3036 if rej:
3124 ui.write_err(_("patch applied partially\n"))
3037 ui.write_err(_("patch applied partially\n"))
3125 ui.write_err(_("(fix the .rej files and run "
3038 ui.write_err(_("(fix the .rej files and run "
3126 "`hg commit --amend`)\n"))
3039 "`hg commit --amend`)\n"))
3127 ret = 1
3040 ret = 1
3128 break
3041 break
3129
3042
3130 if not haspatch:
3043 if not haspatch:
3131 raise error.Abort(_('%s: no diffs found') % patchurl)
3044 raise error.Abort(_('%s: no diffs found') % patchurl)
3132
3045
3133 if tr:
3046 if tr:
3134 tr.close()
3047 tr.close()
3135 if msgs:
3048 if msgs:
3136 repo.savecommitmessage('\n* * *\n'.join(msgs))
3049 repo.savecommitmessage('\n* * *\n'.join(msgs))
3137 if dsguard:
3050 if dsguard:
3138 dsguard.close()
3051 dsguard.close()
3139 return ret
3052 return ret
3140 finally:
3053 finally:
3141 if tr:
3054 if tr:
3142 tr.release()
3055 tr.release()
3143 release(lock, dsguard, wlock)
3056 release(lock, dsguard, wlock)
3144
3057
3145 @command('incoming|in',
3058 @command('incoming|in',
3146 [('f', 'force', None,
3059 [('f', 'force', None,
3147 _('run even if remote repository is unrelated')),
3060 _('run even if remote repository is unrelated')),
3148 ('n', 'newest-first', None, _('show newest record first')),
3061 ('n', 'newest-first', None, _('show newest record first')),
3149 ('', 'bundle', '',
3062 ('', 'bundle', '',
3150 _('file to store the bundles into'), _('FILE')),
3063 _('file to store the bundles into'), _('FILE')),
3151 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3064 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3152 ('B', 'bookmarks', False, _("compare bookmarks")),
3065 ('B', 'bookmarks', False, _("compare bookmarks")),
3153 ('b', 'branch', [],
3066 ('b', 'branch', [],
3154 _('a specific branch you would like to pull'), _('BRANCH')),
3067 _('a specific branch you would like to pull'), _('BRANCH')),
3155 ] + logopts + remoteopts + subrepoopts,
3068 ] + logopts + remoteopts + subrepoopts,
3156 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3069 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3157 def incoming(ui, repo, source="default", **opts):
3070 def incoming(ui, repo, source="default", **opts):
3158 """show new changesets found in source
3071 """show new changesets found in source
3159
3072
3160 Show new changesets found in the specified path/URL or the default
3073 Show new changesets found in the specified path/URL or the default
3161 pull location. These are the changesets that would have been pulled
3074 pull location. These are the changesets that would have been pulled
3162 if a pull at the time you issued this command.
3075 if a pull at the time you issued this command.
3163
3076
3164 See pull for valid source format details.
3077 See pull for valid source format details.
3165
3078
3166 .. container:: verbose
3079 .. container:: verbose
3167
3080
3168 With -B/--bookmarks, the result of bookmark comparison between
3081 With -B/--bookmarks, the result of bookmark comparison between
3169 local and remote repositories is displayed. With -v/--verbose,
3082 local and remote repositories is displayed. With -v/--verbose,
3170 status is also displayed for each bookmark like below::
3083 status is also displayed for each bookmark like below::
3171
3084
3172 BM1 01234567890a added
3085 BM1 01234567890a added
3173 BM2 1234567890ab advanced
3086 BM2 1234567890ab advanced
3174 BM3 234567890abc diverged
3087 BM3 234567890abc diverged
3175 BM4 34567890abcd changed
3088 BM4 34567890abcd changed
3176
3089
3177 The action taken locally when pulling depends on the
3090 The action taken locally when pulling depends on the
3178 status of each bookmark:
3091 status of each bookmark:
3179
3092
3180 :``added``: pull will create it
3093 :``added``: pull will create it
3181 :``advanced``: pull will update it
3094 :``advanced``: pull will update it
3182 :``diverged``: pull will create a divergent bookmark
3095 :``diverged``: pull will create a divergent bookmark
3183 :``changed``: result depends on remote changesets
3096 :``changed``: result depends on remote changesets
3184
3097
3185 From the point of view of pulling behavior, bookmark
3098 From the point of view of pulling behavior, bookmark
3186 existing only in the remote repository are treated as ``added``,
3099 existing only in the remote repository are treated as ``added``,
3187 even if it is in fact locally deleted.
3100 even if it is in fact locally deleted.
3188
3101
3189 .. container:: verbose
3102 .. container:: verbose
3190
3103
3191 For remote repository, using --bundle avoids downloading the
3104 For remote repository, using --bundle avoids downloading the
3192 changesets twice if the incoming is followed by a pull.
3105 changesets twice if the incoming is followed by a pull.
3193
3106
3194 Examples:
3107 Examples:
3195
3108
3196 - show incoming changes with patches and full description::
3109 - show incoming changes with patches and full description::
3197
3110
3198 hg incoming -vp
3111 hg incoming -vp
3199
3112
3200 - show incoming changes excluding merges, store a bundle::
3113 - show incoming changes excluding merges, store a bundle::
3201
3114
3202 hg in -vpM --bundle incoming.hg
3115 hg in -vpM --bundle incoming.hg
3203 hg pull incoming.hg
3116 hg pull incoming.hg
3204
3117
3205 - briefly list changes inside a bundle::
3118 - briefly list changes inside a bundle::
3206
3119
3207 hg in changes.hg -T "{desc|firstline}\\n"
3120 hg in changes.hg -T "{desc|firstline}\\n"
3208
3121
3209 Returns 0 if there are incoming changes, 1 otherwise.
3122 Returns 0 if there are incoming changes, 1 otherwise.
3210 """
3123 """
3211 opts = pycompat.byteskwargs(opts)
3124 opts = pycompat.byteskwargs(opts)
3212 if opts.get('graph'):
3125 if opts.get('graph'):
3213 cmdutil.checkunsupportedgraphflags([], opts)
3126 cmdutil.checkunsupportedgraphflags([], opts)
3214 def display(other, chlist, displayer):
3127 def display(other, chlist, displayer):
3215 revdag = cmdutil.graphrevs(other, chlist, opts)
3128 revdag = cmdutil.graphrevs(other, chlist, opts)
3216 cmdutil.displaygraph(ui, repo, revdag, displayer,
3129 cmdutil.displaygraph(ui, repo, revdag, displayer,
3217 graphmod.asciiedges)
3130 graphmod.asciiedges)
3218
3131
3219 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3132 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3220 return 0
3133 return 0
3221
3134
3222 if opts.get('bundle') and opts.get('subrepos'):
3135 if opts.get('bundle') and opts.get('subrepos'):
3223 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3136 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3224
3137
3225 if opts.get('bookmarks'):
3138 if opts.get('bookmarks'):
3226 source, branches = hg.parseurl(ui.expandpath(source),
3139 source, branches = hg.parseurl(ui.expandpath(source),
3227 opts.get('branch'))
3140 opts.get('branch'))
3228 other = hg.peer(repo, opts, source)
3141 other = hg.peer(repo, opts, source)
3229 if 'bookmarks' not in other.listkeys('namespaces'):
3142 if 'bookmarks' not in other.listkeys('namespaces'):
3230 ui.warn(_("remote doesn't support bookmarks\n"))
3143 ui.warn(_("remote doesn't support bookmarks\n"))
3231 return 0
3144 return 0
3232 ui.pager('incoming')
3145 ui.pager('incoming')
3233 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3146 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3234 return bookmarks.incoming(ui, repo, other)
3147 return bookmarks.incoming(ui, repo, other)
3235
3148
3236 repo._subtoppath = ui.expandpath(source)
3149 repo._subtoppath = ui.expandpath(source)
3237 try:
3150 try:
3238 return hg.incoming(ui, repo, source, opts)
3151 return hg.incoming(ui, repo, source, opts)
3239 finally:
3152 finally:
3240 del repo._subtoppath
3153 del repo._subtoppath
3241
3154
3242
3155
3243 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3156 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3244 norepo=True)
3157 norepo=True)
3245 def init(ui, dest=".", **opts):
3158 def init(ui, dest=".", **opts):
3246 """create a new repository in the given directory
3159 """create a new repository in the given directory
3247
3160
3248 Initialize a new repository in the given directory. If the given
3161 Initialize a new repository in the given directory. If the given
3249 directory does not exist, it will be created.
3162 directory does not exist, it will be created.
3250
3163
3251 If no directory is given, the current directory is used.
3164 If no directory is given, the current directory is used.
3252
3165
3253 It is possible to specify an ``ssh://`` URL as the destination.
3166 It is possible to specify an ``ssh://`` URL as the destination.
3254 See :hg:`help urls` for more information.
3167 See :hg:`help urls` for more information.
3255
3168
3256 Returns 0 on success.
3169 Returns 0 on success.
3257 """
3170 """
3258 opts = pycompat.byteskwargs(opts)
3171 opts = pycompat.byteskwargs(opts)
3259 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3172 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3260
3173
3261 @command('locate',
3174 @command('locate',
3262 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3175 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3263 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3176 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3264 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3177 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3265 ] + walkopts,
3178 ] + walkopts,
3266 _('[OPTION]... [PATTERN]...'))
3179 _('[OPTION]... [PATTERN]...'))
3267 def locate(ui, repo, *pats, **opts):
3180 def locate(ui, repo, *pats, **opts):
3268 """locate files matching specific patterns (DEPRECATED)
3181 """locate files matching specific patterns (DEPRECATED)
3269
3182
3270 Print files under Mercurial control in the working directory whose
3183 Print files under Mercurial control in the working directory whose
3271 names match the given patterns.
3184 names match the given patterns.
3272
3185
3273 By default, this command searches all directories in the working
3186 By default, this command searches all directories in the working
3274 directory. To search just the current directory and its
3187 directory. To search just the current directory and its
3275 subdirectories, use "--include .".
3188 subdirectories, use "--include .".
3276
3189
3277 If no patterns are given to match, this command prints the names
3190 If no patterns are given to match, this command prints the names
3278 of all files under Mercurial control in the working directory.
3191 of all files under Mercurial control in the working directory.
3279
3192
3280 If you want to feed the output of this command into the "xargs"
3193 If you want to feed the output of this command into the "xargs"
3281 command, use the -0 option to both this command and "xargs". This
3194 command, use the -0 option to both this command and "xargs". This
3282 will avoid the problem of "xargs" treating single filenames that
3195 will avoid the problem of "xargs" treating single filenames that
3283 contain whitespace as multiple filenames.
3196 contain whitespace as multiple filenames.
3284
3197
3285 See :hg:`help files` for a more versatile command.
3198 See :hg:`help files` for a more versatile command.
3286
3199
3287 Returns 0 if a match is found, 1 otherwise.
3200 Returns 0 if a match is found, 1 otherwise.
3288 """
3201 """
3289 opts = pycompat.byteskwargs(opts)
3202 opts = pycompat.byteskwargs(opts)
3290 if opts.get('print0'):
3203 if opts.get('print0'):
3291 end = '\0'
3204 end = '\0'
3292 else:
3205 else:
3293 end = '\n'
3206 end = '\n'
3294 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3207 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3295
3208
3296 ret = 1
3209 ret = 1
3297 ctx = repo[rev]
3210 ctx = repo[rev]
3298 m = scmutil.match(ctx, pats, opts, default='relglob',
3211 m = scmutil.match(ctx, pats, opts, default='relglob',
3299 badfn=lambda x, y: False)
3212 badfn=lambda x, y: False)
3300
3213
3301 ui.pager('locate')
3214 ui.pager('locate')
3302 for abs in ctx.matches(m):
3215 for abs in ctx.matches(m):
3303 if opts.get('fullpath'):
3216 if opts.get('fullpath'):
3304 ui.write(repo.wjoin(abs), end)
3217 ui.write(repo.wjoin(abs), end)
3305 else:
3218 else:
3306 ui.write(((pats and m.rel(abs)) or abs), end)
3219 ui.write(((pats and m.rel(abs)) or abs), end)
3307 ret = 0
3220 ret = 0
3308
3221
3309 return ret
3222 return ret
3310
3223
3311 @command('^log|history',
3224 @command('^log|history',
3312 [('f', 'follow', None,
3225 [('f', 'follow', None,
3313 _('follow changeset history, or file history across copies and renames')),
3226 _('follow changeset history, or file history across copies and renames')),
3314 ('', 'follow-first', None,
3227 ('', 'follow-first', None,
3315 _('only follow the first parent of merge changesets (DEPRECATED)')),
3228 _('only follow the first parent of merge changesets (DEPRECATED)')),
3316 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3229 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3317 ('C', 'copies', None, _('show copied files')),
3230 ('C', 'copies', None, _('show copied files')),
3318 ('k', 'keyword', [],
3231 ('k', 'keyword', [],
3319 _('do case-insensitive search for a given text'), _('TEXT')),
3232 _('do case-insensitive search for a given text'), _('TEXT')),
3320 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3233 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3321 ('', 'removed', None, _('include revisions where files were removed')),
3234 ('', 'removed', None, _('include revisions where files were removed')),
3322 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3235 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3323 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3236 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3324 ('', 'only-branch', [],
3237 ('', 'only-branch', [],
3325 _('show only changesets within the given named branch (DEPRECATED)'),
3238 _('show only changesets within the given named branch (DEPRECATED)'),
3326 _('BRANCH')),
3239 _('BRANCH')),
3327 ('b', 'branch', [],
3240 ('b', 'branch', [],
3328 _('show changesets within the given named branch'), _('BRANCH')),
3241 _('show changesets within the given named branch'), _('BRANCH')),
3329 ('P', 'prune', [],
3242 ('P', 'prune', [],
3330 _('do not display revision or any of its ancestors'), _('REV')),
3243 _('do not display revision or any of its ancestors'), _('REV')),
3331 ] + logopts + walkopts,
3244 ] + logopts + walkopts,
3332 _('[OPTION]... [FILE]'),
3245 _('[OPTION]... [FILE]'),
3333 inferrepo=True)
3246 inferrepo=True)
3334 def log(ui, repo, *pats, **opts):
3247 def log(ui, repo, *pats, **opts):
3335 """show revision history of entire repository or files
3248 """show revision history of entire repository or files
3336
3249
3337 Print the revision history of the specified files or the entire
3250 Print the revision history of the specified files or the entire
3338 project.
3251 project.
3339
3252
3340 If no revision range is specified, the default is ``tip:0`` unless
3253 If no revision range is specified, the default is ``tip:0`` unless
3341 --follow is set, in which case the working directory parent is
3254 --follow is set, in which case the working directory parent is
3342 used as the starting revision.
3255 used as the starting revision.
3343
3256
3344 File history is shown without following rename or copy history of
3257 File history is shown without following rename or copy history of
3345 files. Use -f/--follow with a filename to follow history across
3258 files. Use -f/--follow with a filename to follow history across
3346 renames and copies. --follow without a filename will only show
3259 renames and copies. --follow without a filename will only show
3347 ancestors or descendants of the starting revision.
3260 ancestors or descendants of the starting revision.
3348
3261
3349 By default this command prints revision number and changeset id,
3262 By default this command prints revision number and changeset id,
3350 tags, non-trivial parents, user, date and time, and a summary for
3263 tags, non-trivial parents, user, date and time, and a summary for
3351 each commit. When the -v/--verbose switch is used, the list of
3264 each commit. When the -v/--verbose switch is used, the list of
3352 changed files and full commit message are shown.
3265 changed files and full commit message are shown.
3353
3266
3354 With --graph the revisions are shown as an ASCII art DAG with the most
3267 With --graph the revisions are shown as an ASCII art DAG with the most
3355 recent changeset at the top.
3268 recent changeset at the top.
3356 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3269 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3357 and '+' represents a fork where the changeset from the lines below is a
3270 and '+' represents a fork where the changeset from the lines below is a
3358 parent of the 'o' merge on the same line.
3271 parent of the 'o' merge on the same line.
3359 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3272 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3360 of a '|' indicates one or more revisions in a path are omitted.
3273 of a '|' indicates one or more revisions in a path are omitted.
3361
3274
3362 .. note::
3275 .. note::
3363
3276
3364 :hg:`log --patch` may generate unexpected diff output for merge
3277 :hg:`log --patch` may generate unexpected diff output for merge
3365 changesets, as it will only compare the merge changeset against
3278 changesets, as it will only compare the merge changeset against
3366 its first parent. Also, only files different from BOTH parents
3279 its first parent. Also, only files different from BOTH parents
3367 will appear in files:.
3280 will appear in files:.
3368
3281
3369 .. note::
3282 .. note::
3370
3283
3371 For performance reasons, :hg:`log FILE` may omit duplicate changes
3284 For performance reasons, :hg:`log FILE` may omit duplicate changes
3372 made on branches and will not show removals or mode changes. To
3285 made on branches and will not show removals or mode changes. To
3373 see all such changes, use the --removed switch.
3286 see all such changes, use the --removed switch.
3374
3287
3375 .. container:: verbose
3288 .. container:: verbose
3376
3289
3377 Some examples:
3290 Some examples:
3378
3291
3379 - changesets with full descriptions and file lists::
3292 - changesets with full descriptions and file lists::
3380
3293
3381 hg log -v
3294 hg log -v
3382
3295
3383 - changesets ancestral to the working directory::
3296 - changesets ancestral to the working directory::
3384
3297
3385 hg log -f
3298 hg log -f
3386
3299
3387 - last 10 commits on the current branch::
3300 - last 10 commits on the current branch::
3388
3301
3389 hg log -l 10 -b .
3302 hg log -l 10 -b .
3390
3303
3391 - changesets showing all modifications of a file, including removals::
3304 - changesets showing all modifications of a file, including removals::
3392
3305
3393 hg log --removed file.c
3306 hg log --removed file.c
3394
3307
3395 - all changesets that touch a directory, with diffs, excluding merges::
3308 - all changesets that touch a directory, with diffs, excluding merges::
3396
3309
3397 hg log -Mp lib/
3310 hg log -Mp lib/
3398
3311
3399 - all revision numbers that match a keyword::
3312 - all revision numbers that match a keyword::
3400
3313
3401 hg log -k bug --template "{rev}\\n"
3314 hg log -k bug --template "{rev}\\n"
3402
3315
3403 - the full hash identifier of the working directory parent::
3316 - the full hash identifier of the working directory parent::
3404
3317
3405 hg log -r . --template "{node}\\n"
3318 hg log -r . --template "{node}\\n"
3406
3319
3407 - list available log templates::
3320 - list available log templates::
3408
3321
3409 hg log -T list
3322 hg log -T list
3410
3323
3411 - check if a given changeset is included in a tagged release::
3324 - check if a given changeset is included in a tagged release::
3412
3325
3413 hg log -r "a21ccf and ancestor(1.9)"
3326 hg log -r "a21ccf and ancestor(1.9)"
3414
3327
3415 - find all changesets by some user in a date range::
3328 - find all changesets by some user in a date range::
3416
3329
3417 hg log -k alice -d "may 2008 to jul 2008"
3330 hg log -k alice -d "may 2008 to jul 2008"
3418
3331
3419 - summary of all changesets after the last tag::
3332 - summary of all changesets after the last tag::
3420
3333
3421 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3334 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3422
3335
3423 See :hg:`help dates` for a list of formats valid for -d/--date.
3336 See :hg:`help dates` for a list of formats valid for -d/--date.
3424
3337
3425 See :hg:`help revisions` for more about specifying and ordering
3338 See :hg:`help revisions` for more about specifying and ordering
3426 revisions.
3339 revisions.
3427
3340
3428 See :hg:`help templates` for more about pre-packaged styles and
3341 See :hg:`help templates` for more about pre-packaged styles and
3429 specifying custom templates.
3342 specifying custom templates.
3430
3343
3431 Returns 0 on success.
3344 Returns 0 on success.
3432
3345
3433 """
3346 """
3434 opts = pycompat.byteskwargs(opts)
3347 opts = pycompat.byteskwargs(opts)
3435 if opts.get('follow') and opts.get('rev'):
3348 if opts.get('follow') and opts.get('rev'):
3436 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3349 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3437 del opts['follow']
3350 del opts['follow']
3438
3351
3439 if opts.get('graph'):
3352 if opts.get('graph'):
3440 return cmdutil.graphlog(ui, repo, pats, opts)
3353 return cmdutil.graphlog(ui, repo, pats, opts)
3441
3354
3442 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3355 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3443 limit = cmdutil.loglimit(opts)
3356 limit = cmdutil.loglimit(opts)
3444 count = 0
3357 count = 0
3445
3358
3446 getrenamed = None
3359 getrenamed = None
3447 if opts.get('copies'):
3360 if opts.get('copies'):
3448 endrev = None
3361 endrev = None
3449 if opts.get('rev'):
3362 if opts.get('rev'):
3450 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3363 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3451 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3364 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3452
3365
3453 ui.pager('log')
3366 ui.pager('log')
3454 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3367 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3455 for rev in revs:
3368 for rev in revs:
3456 if count == limit:
3369 if count == limit:
3457 break
3370 break
3458 ctx = repo[rev]
3371 ctx = repo[rev]
3459 copies = None
3372 copies = None
3460 if getrenamed is not None and rev:
3373 if getrenamed is not None and rev:
3461 copies = []
3374 copies = []
3462 for fn in ctx.files():
3375 for fn in ctx.files():
3463 rename = getrenamed(fn, rev)
3376 rename = getrenamed(fn, rev)
3464 if rename:
3377 if rename:
3465 copies.append((fn, rename[0]))
3378 copies.append((fn, rename[0]))
3466 if filematcher:
3379 if filematcher:
3467 revmatchfn = filematcher(ctx.rev())
3380 revmatchfn = filematcher(ctx.rev())
3468 else:
3381 else:
3469 revmatchfn = None
3382 revmatchfn = None
3470 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3383 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3471 if displayer.flush(ctx):
3384 if displayer.flush(ctx):
3472 count += 1
3385 count += 1
3473
3386
3474 displayer.close()
3387 displayer.close()
3475
3388
3476 @command('manifest',
3389 @command('manifest',
3477 [('r', 'rev', '', _('revision to display'), _('REV')),
3390 [('r', 'rev', '', _('revision to display'), _('REV')),
3478 ('', 'all', False, _("list files from all revisions"))]
3391 ('', 'all', False, _("list files from all revisions"))]
3479 + formatteropts,
3392 + formatteropts,
3480 _('[-r REV]'))
3393 _('[-r REV]'))
3481 def manifest(ui, repo, node=None, rev=None, **opts):
3394 def manifest(ui, repo, node=None, rev=None, **opts):
3482 """output the current or given revision of the project manifest
3395 """output the current or given revision of the project manifest
3483
3396
3484 Print a list of version controlled files for the given revision.
3397 Print a list of version controlled files for the given revision.
3485 If no revision is given, the first parent of the working directory
3398 If no revision is given, the first parent of the working directory
3486 is used, or the null revision if no revision is checked out.
3399 is used, or the null revision if no revision is checked out.
3487
3400
3488 With -v, print file permissions, symlink and executable bits.
3401 With -v, print file permissions, symlink and executable bits.
3489 With --debug, print file revision hashes.
3402 With --debug, print file revision hashes.
3490
3403
3491 If option --all is specified, the list of all files from all revisions
3404 If option --all is specified, the list of all files from all revisions
3492 is printed. This includes deleted and renamed files.
3405 is printed. This includes deleted and renamed files.
3493
3406
3494 Returns 0 on success.
3407 Returns 0 on success.
3495 """
3408 """
3496 opts = pycompat.byteskwargs(opts)
3409 opts = pycompat.byteskwargs(opts)
3497 fm = ui.formatter('manifest', opts)
3410 fm = ui.formatter('manifest', opts)
3498
3411
3499 if opts.get('all'):
3412 if opts.get('all'):
3500 if rev or node:
3413 if rev or node:
3501 raise error.Abort(_("can't specify a revision with --all"))
3414 raise error.Abort(_("can't specify a revision with --all"))
3502
3415
3503 res = []
3416 res = []
3504 prefix = "data/"
3417 prefix = "data/"
3505 suffix = ".i"
3418 suffix = ".i"
3506 plen = len(prefix)
3419 plen = len(prefix)
3507 slen = len(suffix)
3420 slen = len(suffix)
3508 with repo.lock():
3421 with repo.lock():
3509 for fn, b, size in repo.store.datafiles():
3422 for fn, b, size in repo.store.datafiles():
3510 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3423 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3511 res.append(fn[plen:-slen])
3424 res.append(fn[plen:-slen])
3512 ui.pager('manifest')
3425 ui.pager('manifest')
3513 for f in res:
3426 for f in res:
3514 fm.startitem()
3427 fm.startitem()
3515 fm.write("path", '%s\n', f)
3428 fm.write("path", '%s\n', f)
3516 fm.end()
3429 fm.end()
3517 return
3430 return
3518
3431
3519 if rev and node:
3432 if rev and node:
3520 raise error.Abort(_("please specify just one revision"))
3433 raise error.Abort(_("please specify just one revision"))
3521
3434
3522 if not node:
3435 if not node:
3523 node = rev
3436 node = rev
3524
3437
3525 char = {'l': '@', 'x': '*', '': ''}
3438 char = {'l': '@', 'x': '*', '': ''}
3526 mode = {'l': '644', 'x': '755', '': '644'}
3439 mode = {'l': '644', 'x': '755', '': '644'}
3527 ctx = scmutil.revsingle(repo, node)
3440 ctx = scmutil.revsingle(repo, node)
3528 mf = ctx.manifest()
3441 mf = ctx.manifest()
3529 ui.pager('manifest')
3442 ui.pager('manifest')
3530 for f in ctx:
3443 for f in ctx:
3531 fm.startitem()
3444 fm.startitem()
3532 fl = ctx[f].flags()
3445 fl = ctx[f].flags()
3533 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3446 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3534 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3447 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3535 fm.write('path', '%s\n', f)
3448 fm.write('path', '%s\n', f)
3536 fm.end()
3449 fm.end()
3537
3450
3538 @command('^merge',
3451 @command('^merge',
3539 [('f', 'force', None,
3452 [('f', 'force', None,
3540 _('force a merge including outstanding changes (DEPRECATED)')),
3453 _('force a merge including outstanding changes (DEPRECATED)')),
3541 ('r', 'rev', '', _('revision to merge'), _('REV')),
3454 ('r', 'rev', '', _('revision to merge'), _('REV')),
3542 ('P', 'preview', None,
3455 ('P', 'preview', None,
3543 _('review revisions to merge (no merge is performed)'))
3456 _('review revisions to merge (no merge is performed)'))
3544 ] + mergetoolopts,
3457 ] + mergetoolopts,
3545 _('[-P] [[-r] REV]'))
3458 _('[-P] [[-r] REV]'))
3546 def merge(ui, repo, node=None, **opts):
3459 def merge(ui, repo, node=None, **opts):
3547 """merge another revision into working directory
3460 """merge another revision into working directory
3548
3461
3549 The current working directory is updated with all changes made in
3462 The current working directory is updated with all changes made in
3550 the requested revision since the last common predecessor revision.
3463 the requested revision since the last common predecessor revision.
3551
3464
3552 Files that changed between either parent are marked as changed for
3465 Files that changed between either parent are marked as changed for
3553 the next commit and a commit must be performed before any further
3466 the next commit and a commit must be performed before any further
3554 updates to the repository are allowed. The next commit will have
3467 updates to the repository are allowed. The next commit will have
3555 two parents.
3468 two parents.
3556
3469
3557 ``--tool`` can be used to specify the merge tool used for file
3470 ``--tool`` can be used to specify the merge tool used for file
3558 merges. It overrides the HGMERGE environment variable and your
3471 merges. It overrides the HGMERGE environment variable and your
3559 configuration files. See :hg:`help merge-tools` for options.
3472 configuration files. See :hg:`help merge-tools` for options.
3560
3473
3561 If no revision is specified, the working directory's parent is a
3474 If no revision is specified, the working directory's parent is a
3562 head revision, and the current branch contains exactly one other
3475 head revision, and the current branch contains exactly one other
3563 head, the other head is merged with by default. Otherwise, an
3476 head, the other head is merged with by default. Otherwise, an
3564 explicit revision with which to merge with must be provided.
3477 explicit revision with which to merge with must be provided.
3565
3478
3566 See :hg:`help resolve` for information on handling file conflicts.
3479 See :hg:`help resolve` for information on handling file conflicts.
3567
3480
3568 To undo an uncommitted merge, use :hg:`update --clean .` which
3481 To undo an uncommitted merge, use :hg:`update --clean .` which
3569 will check out a clean copy of the original merge parent, losing
3482 will check out a clean copy of the original merge parent, losing
3570 all changes.
3483 all changes.
3571
3484
3572 Returns 0 on success, 1 if there are unresolved files.
3485 Returns 0 on success, 1 if there are unresolved files.
3573 """
3486 """
3574
3487
3575 opts = pycompat.byteskwargs(opts)
3488 opts = pycompat.byteskwargs(opts)
3576 if opts.get('rev') and node:
3489 if opts.get('rev') and node:
3577 raise error.Abort(_("please specify just one revision"))
3490 raise error.Abort(_("please specify just one revision"))
3578 if not node:
3491 if not node:
3579 node = opts.get('rev')
3492 node = opts.get('rev')
3580
3493
3581 if node:
3494 if node:
3582 node = scmutil.revsingle(repo, node).node()
3495 node = scmutil.revsingle(repo, node).node()
3583
3496
3584 if not node:
3497 if not node:
3585 node = repo[destutil.destmerge(repo)].node()
3498 node = repo[destutil.destmerge(repo)].node()
3586
3499
3587 if opts.get('preview'):
3500 if opts.get('preview'):
3588 # find nodes that are ancestors of p2 but not of p1
3501 # find nodes that are ancestors of p2 but not of p1
3589 p1 = repo.lookup('.')
3502 p1 = repo.lookup('.')
3590 p2 = repo.lookup(node)
3503 p2 = repo.lookup(node)
3591 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3504 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3592
3505
3593 displayer = cmdutil.show_changeset(ui, repo, opts)
3506 displayer = cmdutil.show_changeset(ui, repo, opts)
3594 for node in nodes:
3507 for node in nodes:
3595 displayer.show(repo[node])
3508 displayer.show(repo[node])
3596 displayer.close()
3509 displayer.close()
3597 return 0
3510 return 0
3598
3511
3599 try:
3512 try:
3600 # ui.forcemerge is an internal variable, do not document
3513 # ui.forcemerge is an internal variable, do not document
3601 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3514 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3602 force = opts.get('force')
3515 force = opts.get('force')
3603 labels = ['working copy', 'merge rev']
3516 labels = ['working copy', 'merge rev']
3604 return hg.merge(repo, node, force=force, mergeforce=force,
3517 return hg.merge(repo, node, force=force, mergeforce=force,
3605 labels=labels)
3518 labels=labels)
3606 finally:
3519 finally:
3607 ui.setconfig('ui', 'forcemerge', '', 'merge')
3520 ui.setconfig('ui', 'forcemerge', '', 'merge')
3608
3521
3609 @command('outgoing|out',
3522 @command('outgoing|out',
3610 [('f', 'force', None, _('run even when the destination is unrelated')),
3523 [('f', 'force', None, _('run even when the destination is unrelated')),
3611 ('r', 'rev', [],
3524 ('r', 'rev', [],
3612 _('a changeset intended to be included in the destination'), _('REV')),
3525 _('a changeset intended to be included in the destination'), _('REV')),
3613 ('n', 'newest-first', None, _('show newest record first')),
3526 ('n', 'newest-first', None, _('show newest record first')),
3614 ('B', 'bookmarks', False, _('compare bookmarks')),
3527 ('B', 'bookmarks', False, _('compare bookmarks')),
3615 ('b', 'branch', [], _('a specific branch you would like to push'),
3528 ('b', 'branch', [], _('a specific branch you would like to push'),
3616 _('BRANCH')),
3529 _('BRANCH')),
3617 ] + logopts + remoteopts + subrepoopts,
3530 ] + logopts + remoteopts + subrepoopts,
3618 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3531 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3619 def outgoing(ui, repo, dest=None, **opts):
3532 def outgoing(ui, repo, dest=None, **opts):
3620 """show changesets not found in the destination
3533 """show changesets not found in the destination
3621
3534
3622 Show changesets not found in the specified destination repository
3535 Show changesets not found in the specified destination repository
3623 or the default push location. These are the changesets that would
3536 or the default push location. These are the changesets that would
3624 be pushed if a push was requested.
3537 be pushed if a push was requested.
3625
3538
3626 See pull for details of valid destination formats.
3539 See pull for details of valid destination formats.
3627
3540
3628 .. container:: verbose
3541 .. container:: verbose
3629
3542
3630 With -B/--bookmarks, the result of bookmark comparison between
3543 With -B/--bookmarks, the result of bookmark comparison between
3631 local and remote repositories is displayed. With -v/--verbose,
3544 local and remote repositories is displayed. With -v/--verbose,
3632 status is also displayed for each bookmark like below::
3545 status is also displayed for each bookmark like below::
3633
3546
3634 BM1 01234567890a added
3547 BM1 01234567890a added
3635 BM2 deleted
3548 BM2 deleted
3636 BM3 234567890abc advanced
3549 BM3 234567890abc advanced
3637 BM4 34567890abcd diverged
3550 BM4 34567890abcd diverged
3638 BM5 4567890abcde changed
3551 BM5 4567890abcde changed
3639
3552
3640 The action taken when pushing depends on the
3553 The action taken when pushing depends on the
3641 status of each bookmark:
3554 status of each bookmark:
3642
3555
3643 :``added``: push with ``-B`` will create it
3556 :``added``: push with ``-B`` will create it
3644 :``deleted``: push with ``-B`` will delete it
3557 :``deleted``: push with ``-B`` will delete it
3645 :``advanced``: push will update it
3558 :``advanced``: push will update it
3646 :``diverged``: push with ``-B`` will update it
3559 :``diverged``: push with ``-B`` will update it
3647 :``changed``: push with ``-B`` will update it
3560 :``changed``: push with ``-B`` will update it
3648
3561
3649 From the point of view of pushing behavior, bookmarks
3562 From the point of view of pushing behavior, bookmarks
3650 existing only in the remote repository are treated as
3563 existing only in the remote repository are treated as
3651 ``deleted``, even if it is in fact added remotely.
3564 ``deleted``, even if it is in fact added remotely.
3652
3565
3653 Returns 0 if there are outgoing changes, 1 otherwise.
3566 Returns 0 if there are outgoing changes, 1 otherwise.
3654 """
3567 """
3655 opts = pycompat.byteskwargs(opts)
3568 opts = pycompat.byteskwargs(opts)
3656 if opts.get('graph'):
3569 if opts.get('graph'):
3657 cmdutil.checkunsupportedgraphflags([], opts)
3570 cmdutil.checkunsupportedgraphflags([], opts)
3658 o, other = hg._outgoing(ui, repo, dest, opts)
3571 o, other = hg._outgoing(ui, repo, dest, opts)
3659 if not o:
3572 if not o:
3660 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3573 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3661 return
3574 return
3662
3575
3663 revdag = cmdutil.graphrevs(repo, o, opts)
3576 revdag = cmdutil.graphrevs(repo, o, opts)
3664 ui.pager('outgoing')
3577 ui.pager('outgoing')
3665 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3578 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3666 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3579 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3667 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3580 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3668 return 0
3581 return 0
3669
3582
3670 if opts.get('bookmarks'):
3583 if opts.get('bookmarks'):
3671 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3584 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3672 dest, branches = hg.parseurl(dest, opts.get('branch'))
3585 dest, branches = hg.parseurl(dest, opts.get('branch'))
3673 other = hg.peer(repo, opts, dest)
3586 other = hg.peer(repo, opts, dest)
3674 if 'bookmarks' not in other.listkeys('namespaces'):
3587 if 'bookmarks' not in other.listkeys('namespaces'):
3675 ui.warn(_("remote doesn't support bookmarks\n"))
3588 ui.warn(_("remote doesn't support bookmarks\n"))
3676 return 0
3589 return 0
3677 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3590 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3678 ui.pager('outgoing')
3591 ui.pager('outgoing')
3679 return bookmarks.outgoing(ui, repo, other)
3592 return bookmarks.outgoing(ui, repo, other)
3680
3593
3681 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3594 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3682 try:
3595 try:
3683 return hg.outgoing(ui, repo, dest, opts)
3596 return hg.outgoing(ui, repo, dest, opts)
3684 finally:
3597 finally:
3685 del repo._subtoppath
3598 del repo._subtoppath
3686
3599
3687 @command('parents',
3600 @command('parents',
3688 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3601 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3689 ] + templateopts,
3602 ] + templateopts,
3690 _('[-r REV] [FILE]'),
3603 _('[-r REV] [FILE]'),
3691 inferrepo=True)
3604 inferrepo=True)
3692 def parents(ui, repo, file_=None, **opts):
3605 def parents(ui, repo, file_=None, **opts):
3693 """show the parents of the working directory or revision (DEPRECATED)
3606 """show the parents of the working directory or revision (DEPRECATED)
3694
3607
3695 Print the working directory's parent revisions. If a revision is
3608 Print the working directory's parent revisions. If a revision is
3696 given via -r/--rev, the parent of that revision will be printed.
3609 given via -r/--rev, the parent of that revision will be printed.
3697 If a file argument is given, the revision in which the file was
3610 If a file argument is given, the revision in which the file was
3698 last changed (before the working directory revision or the
3611 last changed (before the working directory revision or the
3699 argument to --rev if given) is printed.
3612 argument to --rev if given) is printed.
3700
3613
3701 This command is equivalent to::
3614 This command is equivalent to::
3702
3615
3703 hg log -r "p1()+p2()" or
3616 hg log -r "p1()+p2()" or
3704 hg log -r "p1(REV)+p2(REV)" or
3617 hg log -r "p1(REV)+p2(REV)" or
3705 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3618 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3706 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3619 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3707
3620
3708 See :hg:`summary` and :hg:`help revsets` for related information.
3621 See :hg:`summary` and :hg:`help revsets` for related information.
3709
3622
3710 Returns 0 on success.
3623 Returns 0 on success.
3711 """
3624 """
3712
3625
3713 opts = pycompat.byteskwargs(opts)
3626 opts = pycompat.byteskwargs(opts)
3714 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3627 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3715
3628
3716 if file_:
3629 if file_:
3717 m = scmutil.match(ctx, (file_,), opts)
3630 m = scmutil.match(ctx, (file_,), opts)
3718 if m.anypats() or len(m.files()) != 1:
3631 if m.anypats() or len(m.files()) != 1:
3719 raise error.Abort(_('can only specify an explicit filename'))
3632 raise error.Abort(_('can only specify an explicit filename'))
3720 file_ = m.files()[0]
3633 file_ = m.files()[0]
3721 filenodes = []
3634 filenodes = []
3722 for cp in ctx.parents():
3635 for cp in ctx.parents():
3723 if not cp:
3636 if not cp:
3724 continue
3637 continue
3725 try:
3638 try:
3726 filenodes.append(cp.filenode(file_))
3639 filenodes.append(cp.filenode(file_))
3727 except error.LookupError:
3640 except error.LookupError:
3728 pass
3641 pass
3729 if not filenodes:
3642 if not filenodes:
3730 raise error.Abort(_("'%s' not found in manifest!") % file_)
3643 raise error.Abort(_("'%s' not found in manifest!") % file_)
3731 p = []
3644 p = []
3732 for fn in filenodes:
3645 for fn in filenodes:
3733 fctx = repo.filectx(file_, fileid=fn)
3646 fctx = repo.filectx(file_, fileid=fn)
3734 p.append(fctx.node())
3647 p.append(fctx.node())
3735 else:
3648 else:
3736 p = [cp.node() for cp in ctx.parents()]
3649 p = [cp.node() for cp in ctx.parents()]
3737
3650
3738 displayer = cmdutil.show_changeset(ui, repo, opts)
3651 displayer = cmdutil.show_changeset(ui, repo, opts)
3739 for n in p:
3652 for n in p:
3740 if n != nullid:
3653 if n != nullid:
3741 displayer.show(repo[n])
3654 displayer.show(repo[n])
3742 displayer.close()
3655 displayer.close()
3743
3656
3744 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3657 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3745 def paths(ui, repo, search=None, **opts):
3658 def paths(ui, repo, search=None, **opts):
3746 """show aliases for remote repositories
3659 """show aliases for remote repositories
3747
3660
3748 Show definition of symbolic path name NAME. If no name is given,
3661 Show definition of symbolic path name NAME. If no name is given,
3749 show definition of all available names.
3662 show definition of all available names.
3750
3663
3751 Option -q/--quiet suppresses all output when searching for NAME
3664 Option -q/--quiet suppresses all output when searching for NAME
3752 and shows only the path names when listing all definitions.
3665 and shows only the path names when listing all definitions.
3753
3666
3754 Path names are defined in the [paths] section of your
3667 Path names are defined in the [paths] section of your
3755 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3668 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3756 repository, ``.hg/hgrc`` is used, too.
3669 repository, ``.hg/hgrc`` is used, too.
3757
3670
3758 The path names ``default`` and ``default-push`` have a special
3671 The path names ``default`` and ``default-push`` have a special
3759 meaning. When performing a push or pull operation, they are used
3672 meaning. When performing a push or pull operation, they are used
3760 as fallbacks if no location is specified on the command-line.
3673 as fallbacks if no location is specified on the command-line.
3761 When ``default-push`` is set, it will be used for push and
3674 When ``default-push`` is set, it will be used for push and
3762 ``default`` will be used for pull; otherwise ``default`` is used
3675 ``default`` will be used for pull; otherwise ``default`` is used
3763 as the fallback for both. When cloning a repository, the clone
3676 as the fallback for both. When cloning a repository, the clone
3764 source is written as ``default`` in ``.hg/hgrc``.
3677 source is written as ``default`` in ``.hg/hgrc``.
3765
3678
3766 .. note::
3679 .. note::
3767
3680
3768 ``default`` and ``default-push`` apply to all inbound (e.g.
3681 ``default`` and ``default-push`` apply to all inbound (e.g.
3769 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3682 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3770 and :hg:`bundle`) operations.
3683 and :hg:`bundle`) operations.
3771
3684
3772 See :hg:`help urls` for more information.
3685 See :hg:`help urls` for more information.
3773
3686
3774 Returns 0 on success.
3687 Returns 0 on success.
3775 """
3688 """
3776
3689
3777 opts = pycompat.byteskwargs(opts)
3690 opts = pycompat.byteskwargs(opts)
3778 ui.pager('paths')
3691 ui.pager('paths')
3779 if search:
3692 if search:
3780 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3693 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3781 if name == search]
3694 if name == search]
3782 else:
3695 else:
3783 pathitems = sorted(ui.paths.iteritems())
3696 pathitems = sorted(ui.paths.iteritems())
3784
3697
3785 fm = ui.formatter('paths', opts)
3698 fm = ui.formatter('paths', opts)
3786 if fm.isplain():
3699 if fm.isplain():
3787 hidepassword = util.hidepassword
3700 hidepassword = util.hidepassword
3788 else:
3701 else:
3789 hidepassword = str
3702 hidepassword = str
3790 if ui.quiet:
3703 if ui.quiet:
3791 namefmt = '%s\n'
3704 namefmt = '%s\n'
3792 else:
3705 else:
3793 namefmt = '%s = '
3706 namefmt = '%s = '
3794 showsubopts = not search and not ui.quiet
3707 showsubopts = not search and not ui.quiet
3795
3708
3796 for name, path in pathitems:
3709 for name, path in pathitems:
3797 fm.startitem()
3710 fm.startitem()
3798 fm.condwrite(not search, 'name', namefmt, name)
3711 fm.condwrite(not search, 'name', namefmt, name)
3799 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3712 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3800 for subopt, value in sorted(path.suboptions.items()):
3713 for subopt, value in sorted(path.suboptions.items()):
3801 assert subopt not in ('name', 'url')
3714 assert subopt not in ('name', 'url')
3802 if showsubopts:
3715 if showsubopts:
3803 fm.plain('%s:%s = ' % (name, subopt))
3716 fm.plain('%s:%s = ' % (name, subopt))
3804 fm.condwrite(showsubopts, subopt, '%s\n', value)
3717 fm.condwrite(showsubopts, subopt, '%s\n', value)
3805
3718
3806 fm.end()
3719 fm.end()
3807
3720
3808 if search and not pathitems:
3721 if search and not pathitems:
3809 if not ui.quiet:
3722 if not ui.quiet:
3810 ui.warn(_("not found!\n"))
3723 ui.warn(_("not found!\n"))
3811 return 1
3724 return 1
3812 else:
3725 else:
3813 return 0
3726 return 0
3814
3727
3815 @command('phase',
3728 @command('phase',
3816 [('p', 'public', False, _('set changeset phase to public')),
3729 [('p', 'public', False, _('set changeset phase to public')),
3817 ('d', 'draft', False, _('set changeset phase to draft')),
3730 ('d', 'draft', False, _('set changeset phase to draft')),
3818 ('s', 'secret', False, _('set changeset phase to secret')),
3731 ('s', 'secret', False, _('set changeset phase to secret')),
3819 ('f', 'force', False, _('allow to move boundary backward')),
3732 ('f', 'force', False, _('allow to move boundary backward')),
3820 ('r', 'rev', [], _('target revision'), _('REV')),
3733 ('r', 'rev', [], _('target revision'), _('REV')),
3821 ],
3734 ],
3822 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3735 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3823 def phase(ui, repo, *revs, **opts):
3736 def phase(ui, repo, *revs, **opts):
3824 """set or show the current phase name
3737 """set or show the current phase name
3825
3738
3826 With no argument, show the phase name of the current revision(s).
3739 With no argument, show the phase name of the current revision(s).
3827
3740
3828 With one of -p/--public, -d/--draft or -s/--secret, change the
3741 With one of -p/--public, -d/--draft or -s/--secret, change the
3829 phase value of the specified revisions.
3742 phase value of the specified revisions.
3830
3743
3831 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3744 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3832 lower phase to an higher phase. Phases are ordered as follows::
3745 lower phase to an higher phase. Phases are ordered as follows::
3833
3746
3834 public < draft < secret
3747 public < draft < secret
3835
3748
3836 Returns 0 on success, 1 if some phases could not be changed.
3749 Returns 0 on success, 1 if some phases could not be changed.
3837
3750
3838 (For more information about the phases concept, see :hg:`help phases`.)
3751 (For more information about the phases concept, see :hg:`help phases`.)
3839 """
3752 """
3840 opts = pycompat.byteskwargs(opts)
3753 opts = pycompat.byteskwargs(opts)
3841 # search for a unique phase argument
3754 # search for a unique phase argument
3842 targetphase = None
3755 targetphase = None
3843 for idx, name in enumerate(phases.phasenames):
3756 for idx, name in enumerate(phases.phasenames):
3844 if opts[name]:
3757 if opts[name]:
3845 if targetphase is not None:
3758 if targetphase is not None:
3846 raise error.Abort(_('only one phase can be specified'))
3759 raise error.Abort(_('only one phase can be specified'))
3847 targetphase = idx
3760 targetphase = idx
3848
3761
3849 # look for specified revision
3762 # look for specified revision
3850 revs = list(revs)
3763 revs = list(revs)
3851 revs.extend(opts['rev'])
3764 revs.extend(opts['rev'])
3852 if not revs:
3765 if not revs:
3853 # display both parents as the second parent phase can influence
3766 # display both parents as the second parent phase can influence
3854 # the phase of a merge commit
3767 # the phase of a merge commit
3855 revs = [c.rev() for c in repo[None].parents()]
3768 revs = [c.rev() for c in repo[None].parents()]
3856
3769
3857 revs = scmutil.revrange(repo, revs)
3770 revs = scmutil.revrange(repo, revs)
3858
3771
3859 lock = None
3772 lock = None
3860 ret = 0
3773 ret = 0
3861 if targetphase is None:
3774 if targetphase is None:
3862 # display
3775 # display
3863 for r in revs:
3776 for r in revs:
3864 ctx = repo[r]
3777 ctx = repo[r]
3865 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3778 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3866 else:
3779 else:
3867 tr = None
3780 tr = None
3868 lock = repo.lock()
3781 lock = repo.lock()
3869 try:
3782 try:
3870 tr = repo.transaction("phase")
3783 tr = repo.transaction("phase")
3871 # set phase
3784 # set phase
3872 if not revs:
3785 if not revs:
3873 raise error.Abort(_('empty revision set'))
3786 raise error.Abort(_('empty revision set'))
3874 nodes = [repo[r].node() for r in revs]
3787 nodes = [repo[r].node() for r in revs]
3875 # moving revision from public to draft may hide them
3788 # moving revision from public to draft may hide them
3876 # We have to check result on an unfiltered repository
3789 # We have to check result on an unfiltered repository
3877 unfi = repo.unfiltered()
3790 unfi = repo.unfiltered()
3878 getphase = unfi._phasecache.phase
3791 getphase = unfi._phasecache.phase
3879 olddata = [getphase(unfi, r) for r in unfi]
3792 olddata = [getphase(unfi, r) for r in unfi]
3880 phases.advanceboundary(repo, tr, targetphase, nodes)
3793 phases.advanceboundary(repo, tr, targetphase, nodes)
3881 if opts['force']:
3794 if opts['force']:
3882 phases.retractboundary(repo, tr, targetphase, nodes)
3795 phases.retractboundary(repo, tr, targetphase, nodes)
3883 tr.close()
3796 tr.close()
3884 finally:
3797 finally:
3885 if tr is not None:
3798 if tr is not None:
3886 tr.release()
3799 tr.release()
3887 lock.release()
3800 lock.release()
3888 getphase = unfi._phasecache.phase
3801 getphase = unfi._phasecache.phase
3889 newdata = [getphase(unfi, r) for r in unfi]
3802 newdata = [getphase(unfi, r) for r in unfi]
3890 changes = sum(newdata[r] != olddata[r] for r in unfi)
3803 changes = sum(newdata[r] != olddata[r] for r in unfi)
3891 cl = unfi.changelog
3804 cl = unfi.changelog
3892 rejected = [n for n in nodes
3805 rejected = [n for n in nodes
3893 if newdata[cl.rev(n)] < targetphase]
3806 if newdata[cl.rev(n)] < targetphase]
3894 if rejected:
3807 if rejected:
3895 ui.warn(_('cannot move %i changesets to a higher '
3808 ui.warn(_('cannot move %i changesets to a higher '
3896 'phase, use --force\n') % len(rejected))
3809 'phase, use --force\n') % len(rejected))
3897 ret = 1
3810 ret = 1
3898 if changes:
3811 if changes:
3899 msg = _('phase changed for %i changesets\n') % changes
3812 msg = _('phase changed for %i changesets\n') % changes
3900 if ret:
3813 if ret:
3901 ui.status(msg)
3814 ui.status(msg)
3902 else:
3815 else:
3903 ui.note(msg)
3816 ui.note(msg)
3904 else:
3817 else:
3905 ui.warn(_('no phases changed\n'))
3818 ui.warn(_('no phases changed\n'))
3906 return ret
3819 return ret
3907
3820
3908 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3821 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3909 """Run after a changegroup has been added via pull/unbundle
3822 """Run after a changegroup has been added via pull/unbundle
3910
3823
3911 This takes arguments below:
3824 This takes arguments below:
3912
3825
3913 :modheads: change of heads by pull/unbundle
3826 :modheads: change of heads by pull/unbundle
3914 :optupdate: updating working directory is needed or not
3827 :optupdate: updating working directory is needed or not
3915 :checkout: update destination revision (or None to default destination)
3828 :checkout: update destination revision (or None to default destination)
3916 :brev: a name, which might be a bookmark to be activated after updating
3829 :brev: a name, which might be a bookmark to be activated after updating
3917 """
3830 """
3918 if modheads == 0:
3831 if modheads == 0:
3919 return
3832 return
3920 if optupdate:
3833 if optupdate:
3921 try:
3834 try:
3922 return hg.updatetotally(ui, repo, checkout, brev)
3835 return hg.updatetotally(ui, repo, checkout, brev)
3923 except error.UpdateAbort as inst:
3836 except error.UpdateAbort as inst:
3924 msg = _("not updating: %s") % str(inst)
3837 msg = _("not updating: %s") % str(inst)
3925 hint = inst.hint
3838 hint = inst.hint
3926 raise error.UpdateAbort(msg, hint=hint)
3839 raise error.UpdateAbort(msg, hint=hint)
3927 if modheads > 1:
3840 if modheads > 1:
3928 currentbranchheads = len(repo.branchheads())
3841 currentbranchheads = len(repo.branchheads())
3929 if currentbranchheads == modheads:
3842 if currentbranchheads == modheads:
3930 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3843 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3931 elif currentbranchheads > 1:
3844 elif currentbranchheads > 1:
3932 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3845 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3933 "merge)\n"))
3846 "merge)\n"))
3934 else:
3847 else:
3935 ui.status(_("(run 'hg heads' to see heads)\n"))
3848 ui.status(_("(run 'hg heads' to see heads)\n"))
3936 else:
3849 else:
3937 ui.status(_("(run 'hg update' to get a working copy)\n"))
3850 ui.status(_("(run 'hg update' to get a working copy)\n"))
3938
3851
3939 @command('^pull',
3852 @command('^pull',
3940 [('u', 'update', None,
3853 [('u', 'update', None,
3941 _('update to new branch head if changesets were pulled')),
3854 _('update to new branch head if changesets were pulled')),
3942 ('f', 'force', None, _('run even when remote repository is unrelated')),
3855 ('f', 'force', None, _('run even when remote repository is unrelated')),
3943 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3856 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3944 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3857 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3945 ('b', 'branch', [], _('a specific branch you would like to pull'),
3858 ('b', 'branch', [], _('a specific branch you would like to pull'),
3946 _('BRANCH')),
3859 _('BRANCH')),
3947 ] + remoteopts,
3860 ] + remoteopts,
3948 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3861 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3949 def pull(ui, repo, source="default", **opts):
3862 def pull(ui, repo, source="default", **opts):
3950 """pull changes from the specified source
3863 """pull changes from the specified source
3951
3864
3952 Pull changes from a remote repository to a local one.
3865 Pull changes from a remote repository to a local one.
3953
3866
3954 This finds all changes from the repository at the specified path
3867 This finds all changes from the repository at the specified path
3955 or URL and adds them to a local repository (the current one unless
3868 or URL and adds them to a local repository (the current one unless
3956 -R is specified). By default, this does not update the copy of the
3869 -R is specified). By default, this does not update the copy of the
3957 project in the working directory.
3870 project in the working directory.
3958
3871
3959 Use :hg:`incoming` if you want to see what would have been added
3872 Use :hg:`incoming` if you want to see what would have been added
3960 by a pull at the time you issued this command. If you then decide
3873 by a pull at the time you issued this command. If you then decide
3961 to add those changes to the repository, you should use :hg:`pull
3874 to add those changes to the repository, you should use :hg:`pull
3962 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3875 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3963
3876
3964 If SOURCE is omitted, the 'default' path will be used.
3877 If SOURCE is omitted, the 'default' path will be used.
3965 See :hg:`help urls` for more information.
3878 See :hg:`help urls` for more information.
3966
3879
3967 Specifying bookmark as ``.`` is equivalent to specifying the active
3880 Specifying bookmark as ``.`` is equivalent to specifying the active
3968 bookmark's name.
3881 bookmark's name.
3969
3882
3970 Returns 0 on success, 1 if an update had unresolved files.
3883 Returns 0 on success, 1 if an update had unresolved files.
3971 """
3884 """
3972
3885
3973 opts = pycompat.byteskwargs(opts)
3886 opts = pycompat.byteskwargs(opts)
3974 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3887 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3975 msg = _('update destination required by configuration')
3888 msg = _('update destination required by configuration')
3976 hint = _('use hg pull followed by hg update DEST')
3889 hint = _('use hg pull followed by hg update DEST')
3977 raise error.Abort(msg, hint=hint)
3890 raise error.Abort(msg, hint=hint)
3978
3891
3979 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3892 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3980 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3893 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3981 other = hg.peer(repo, opts, source)
3894 other = hg.peer(repo, opts, source)
3982 try:
3895 try:
3983 revs, checkout = hg.addbranchrevs(repo, other, branches,
3896 revs, checkout = hg.addbranchrevs(repo, other, branches,
3984 opts.get('rev'))
3897 opts.get('rev'))
3985
3898
3986
3899
3987 pullopargs = {}
3900 pullopargs = {}
3988 if opts.get('bookmark'):
3901 if opts.get('bookmark'):
3989 if not revs:
3902 if not revs:
3990 revs = []
3903 revs = []
3991 # The list of bookmark used here is not the one used to actually
3904 # The list of bookmark used here is not the one used to actually
3992 # update the bookmark name. This can result in the revision pulled
3905 # update the bookmark name. This can result in the revision pulled
3993 # not ending up with the name of the bookmark because of a race
3906 # not ending up with the name of the bookmark because of a race
3994 # condition on the server. (See issue 4689 for details)
3907 # condition on the server. (See issue 4689 for details)
3995 remotebookmarks = other.listkeys('bookmarks')
3908 remotebookmarks = other.listkeys('bookmarks')
3996 pullopargs['remotebookmarks'] = remotebookmarks
3909 pullopargs['remotebookmarks'] = remotebookmarks
3997 for b in opts['bookmark']:
3910 for b in opts['bookmark']:
3998 b = repo._bookmarks.expandname(b)
3911 b = repo._bookmarks.expandname(b)
3999 if b not in remotebookmarks:
3912 if b not in remotebookmarks:
4000 raise error.Abort(_('remote bookmark %s not found!') % b)
3913 raise error.Abort(_('remote bookmark %s not found!') % b)
4001 revs.append(remotebookmarks[b])
3914 revs.append(remotebookmarks[b])
4002
3915
4003 if revs:
3916 if revs:
4004 try:
3917 try:
4005 # When 'rev' is a bookmark name, we cannot guarantee that it
3918 # When 'rev' is a bookmark name, we cannot guarantee that it
4006 # will be updated with that name because of a race condition
3919 # will be updated with that name because of a race condition
4007 # server side. (See issue 4689 for details)
3920 # server side. (See issue 4689 for details)
4008 oldrevs = revs
3921 oldrevs = revs
4009 revs = [] # actually, nodes
3922 revs = [] # actually, nodes
4010 for r in oldrevs:
3923 for r in oldrevs:
4011 node = other.lookup(r)
3924 node = other.lookup(r)
4012 revs.append(node)
3925 revs.append(node)
4013 if r == checkout:
3926 if r == checkout:
4014 checkout = node
3927 checkout = node
4015 except error.CapabilityError:
3928 except error.CapabilityError:
4016 err = _("other repository doesn't support revision lookup, "
3929 err = _("other repository doesn't support revision lookup, "
4017 "so a rev cannot be specified.")
3930 "so a rev cannot be specified.")
4018 raise error.Abort(err)
3931 raise error.Abort(err)
4019
3932
4020 pullopargs.update(opts.get('opargs', {}))
3933 pullopargs.update(opts.get('opargs', {}))
4021 modheads = exchange.pull(repo, other, heads=revs,
3934 modheads = exchange.pull(repo, other, heads=revs,
4022 force=opts.get('force'),
3935 force=opts.get('force'),
4023 bookmarks=opts.get('bookmark', ()),
3936 bookmarks=opts.get('bookmark', ()),
4024 opargs=pullopargs).cgresult
3937 opargs=pullopargs).cgresult
4025
3938
4026 # brev is a name, which might be a bookmark to be activated at
3939 # brev is a name, which might be a bookmark to be activated at
4027 # the end of the update. In other words, it is an explicit
3940 # the end of the update. In other words, it is an explicit
4028 # destination of the update
3941 # destination of the update
4029 brev = None
3942 brev = None
4030
3943
4031 if checkout:
3944 if checkout:
4032 checkout = str(repo.changelog.rev(checkout))
3945 checkout = str(repo.changelog.rev(checkout))
4033
3946
4034 # order below depends on implementation of
3947 # order below depends on implementation of
4035 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3948 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4036 # because 'checkout' is determined without it.
3949 # because 'checkout' is determined without it.
4037 if opts.get('rev'):
3950 if opts.get('rev'):
4038 brev = opts['rev'][0]
3951 brev = opts['rev'][0]
4039 elif opts.get('branch'):
3952 elif opts.get('branch'):
4040 brev = opts['branch'][0]
3953 brev = opts['branch'][0]
4041 else:
3954 else:
4042 brev = branches[0]
3955 brev = branches[0]
4043 repo._subtoppath = source
3956 repo._subtoppath = source
4044 try:
3957 try:
4045 ret = postincoming(ui, repo, modheads, opts.get('update'),
3958 ret = postincoming(ui, repo, modheads, opts.get('update'),
4046 checkout, brev)
3959 checkout, brev)
4047
3960
4048 finally:
3961 finally:
4049 del repo._subtoppath
3962 del repo._subtoppath
4050
3963
4051 finally:
3964 finally:
4052 other.close()
3965 other.close()
4053 return ret
3966 return ret
4054
3967
4055 @command('^push',
3968 @command('^push',
4056 [('f', 'force', None, _('force push')),
3969 [('f', 'force', None, _('force push')),
4057 ('r', 'rev', [],
3970 ('r', 'rev', [],
4058 _('a changeset intended to be included in the destination'),
3971 _('a changeset intended to be included in the destination'),
4059 _('REV')),
3972 _('REV')),
4060 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3973 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4061 ('b', 'branch', [],
3974 ('b', 'branch', [],
4062 _('a specific branch you would like to push'), _('BRANCH')),
3975 _('a specific branch you would like to push'), _('BRANCH')),
4063 ('', 'new-branch', False, _('allow pushing a new branch')),
3976 ('', 'new-branch', False, _('allow pushing a new branch')),
4064 ] + remoteopts,
3977 ] + remoteopts,
4065 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3978 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4066 def push(ui, repo, dest=None, **opts):
3979 def push(ui, repo, dest=None, **opts):
4067 """push changes to the specified destination
3980 """push changes to the specified destination
4068
3981
4069 Push changesets from the local repository to the specified
3982 Push changesets from the local repository to the specified
4070 destination.
3983 destination.
4071
3984
4072 This operation is symmetrical to pull: it is identical to a pull
3985 This operation is symmetrical to pull: it is identical to a pull
4073 in the destination repository from the current one.
3986 in the destination repository from the current one.
4074
3987
4075 By default, push will not allow creation of new heads at the
3988 By default, push will not allow creation of new heads at the
4076 destination, since multiple heads would make it unclear which head
3989 destination, since multiple heads would make it unclear which head
4077 to use. In this situation, it is recommended to pull and merge
3990 to use. In this situation, it is recommended to pull and merge
4078 before pushing.
3991 before pushing.
4079
3992
4080 Use --new-branch if you want to allow push to create a new named
3993 Use --new-branch if you want to allow push to create a new named
4081 branch that is not present at the destination. This allows you to
3994 branch that is not present at the destination. This allows you to
4082 only create a new branch without forcing other changes.
3995 only create a new branch without forcing other changes.
4083
3996
4084 .. note::
3997 .. note::
4085
3998
4086 Extra care should be taken with the -f/--force option,
3999 Extra care should be taken with the -f/--force option,
4087 which will push all new heads on all branches, an action which will
4000 which will push all new heads on all branches, an action which will
4088 almost always cause confusion for collaborators.
4001 almost always cause confusion for collaborators.
4089
4002
4090 If -r/--rev is used, the specified revision and all its ancestors
4003 If -r/--rev is used, the specified revision and all its ancestors
4091 will be pushed to the remote repository.
4004 will be pushed to the remote repository.
4092
4005
4093 If -B/--bookmark is used, the specified bookmarked revision, its
4006 If -B/--bookmark is used, the specified bookmarked revision, its
4094 ancestors, and the bookmark will be pushed to the remote
4007 ancestors, and the bookmark will be pushed to the remote
4095 repository. Specifying ``.`` is equivalent to specifying the active
4008 repository. Specifying ``.`` is equivalent to specifying the active
4096 bookmark's name.
4009 bookmark's name.
4097
4010
4098 Please see :hg:`help urls` for important details about ``ssh://``
4011 Please see :hg:`help urls` for important details about ``ssh://``
4099 URLs. If DESTINATION is omitted, a default path will be used.
4012 URLs. If DESTINATION is omitted, a default path will be used.
4100
4013
4101 Returns 0 if push was successful, 1 if nothing to push.
4014 Returns 0 if push was successful, 1 if nothing to push.
4102 """
4015 """
4103
4016
4104 opts = pycompat.byteskwargs(opts)
4017 opts = pycompat.byteskwargs(opts)
4105 if opts.get('bookmark'):
4018 if opts.get('bookmark'):
4106 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4019 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4107 for b in opts['bookmark']:
4020 for b in opts['bookmark']:
4108 # translate -B options to -r so changesets get pushed
4021 # translate -B options to -r so changesets get pushed
4109 b = repo._bookmarks.expandname(b)
4022 b = repo._bookmarks.expandname(b)
4110 if b in repo._bookmarks:
4023 if b in repo._bookmarks:
4111 opts.setdefault('rev', []).append(b)
4024 opts.setdefault('rev', []).append(b)
4112 else:
4025 else:
4113 # if we try to push a deleted bookmark, translate it to null
4026 # if we try to push a deleted bookmark, translate it to null
4114 # this lets simultaneous -r, -b options continue working
4027 # this lets simultaneous -r, -b options continue working
4115 opts.setdefault('rev', []).append("null")
4028 opts.setdefault('rev', []).append("null")
4116
4029
4117 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4030 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4118 if not path:
4031 if not path:
4119 raise error.Abort(_('default repository not configured!'),
4032 raise error.Abort(_('default repository not configured!'),
4120 hint=_("see 'hg help config.paths'"))
4033 hint=_("see 'hg help config.paths'"))
4121 dest = path.pushloc or path.loc
4034 dest = path.pushloc or path.loc
4122 branches = (path.branch, opts.get('branch') or [])
4035 branches = (path.branch, opts.get('branch') or [])
4123 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4036 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4124 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4037 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4125 other = hg.peer(repo, opts, dest)
4038 other = hg.peer(repo, opts, dest)
4126
4039
4127 if revs:
4040 if revs:
4128 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4041 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4129 if not revs:
4042 if not revs:
4130 raise error.Abort(_("specified revisions evaluate to an empty set"),
4043 raise error.Abort(_("specified revisions evaluate to an empty set"),
4131 hint=_("use different revision arguments"))
4044 hint=_("use different revision arguments"))
4132 elif path.pushrev:
4045 elif path.pushrev:
4133 # It doesn't make any sense to specify ancestor revisions. So limit
4046 # It doesn't make any sense to specify ancestor revisions. So limit
4134 # to DAG heads to make discovery simpler.
4047 # to DAG heads to make discovery simpler.
4135 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4048 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4136 revs = scmutil.revrange(repo, [expr])
4049 revs = scmutil.revrange(repo, [expr])
4137 revs = [repo[rev].node() for rev in revs]
4050 revs = [repo[rev].node() for rev in revs]
4138 if not revs:
4051 if not revs:
4139 raise error.Abort(_('default push revset for path evaluates to an '
4052 raise error.Abort(_('default push revset for path evaluates to an '
4140 'empty set'))
4053 'empty set'))
4141
4054
4142 repo._subtoppath = dest
4055 repo._subtoppath = dest
4143 try:
4056 try:
4144 # push subrepos depth-first for coherent ordering
4057 # push subrepos depth-first for coherent ordering
4145 c = repo['']
4058 c = repo['']
4146 subs = c.substate # only repos that are committed
4059 subs = c.substate # only repos that are committed
4147 for s in sorted(subs):
4060 for s in sorted(subs):
4148 result = c.sub(s).push(opts)
4061 result = c.sub(s).push(opts)
4149 if result == 0:
4062 if result == 0:
4150 return not result
4063 return not result
4151 finally:
4064 finally:
4152 del repo._subtoppath
4065 del repo._subtoppath
4153 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4066 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4154 newbranch=opts.get('new_branch'),
4067 newbranch=opts.get('new_branch'),
4155 bookmarks=opts.get('bookmark', ()),
4068 bookmarks=opts.get('bookmark', ()),
4156 opargs=opts.get('opargs'))
4069 opargs=opts.get('opargs'))
4157
4070
4158 result = not pushop.cgresult
4071 result = not pushop.cgresult
4159
4072
4160 if pushop.bkresult is not None:
4073 if pushop.bkresult is not None:
4161 if pushop.bkresult == 2:
4074 if pushop.bkresult == 2:
4162 result = 2
4075 result = 2
4163 elif not result and pushop.bkresult:
4076 elif not result and pushop.bkresult:
4164 result = 2
4077 result = 2
4165
4078
4166 return result
4079 return result
4167
4080
4168 @command('recover', [])
4081 @command('recover', [])
4169 def recover(ui, repo):
4082 def recover(ui, repo):
4170 """roll back an interrupted transaction
4083 """roll back an interrupted transaction
4171
4084
4172 Recover from an interrupted commit or pull.
4085 Recover from an interrupted commit or pull.
4173
4086
4174 This command tries to fix the repository status after an
4087 This command tries to fix the repository status after an
4175 interrupted operation. It should only be necessary when Mercurial
4088 interrupted operation. It should only be necessary when Mercurial
4176 suggests it.
4089 suggests it.
4177
4090
4178 Returns 0 if successful, 1 if nothing to recover or verify fails.
4091 Returns 0 if successful, 1 if nothing to recover or verify fails.
4179 """
4092 """
4180 if repo.recover():
4093 if repo.recover():
4181 return hg.verify(repo)
4094 return hg.verify(repo)
4182 return 1
4095 return 1
4183
4096
4184 @command('^remove|rm',
4097 @command('^remove|rm',
4185 [('A', 'after', None, _('record delete for missing files')),
4098 [('A', 'after', None, _('record delete for missing files')),
4186 ('f', 'force', None,
4099 ('f', 'force', None,
4187 _('forget added files, delete modified files')),
4100 _('forget added files, delete modified files')),
4188 ] + subrepoopts + walkopts,
4101 ] + subrepoopts + walkopts,
4189 _('[OPTION]... FILE...'),
4102 _('[OPTION]... FILE...'),
4190 inferrepo=True)
4103 inferrepo=True)
4191 def remove(ui, repo, *pats, **opts):
4104 def remove(ui, repo, *pats, **opts):
4192 """remove the specified files on the next commit
4105 """remove the specified files on the next commit
4193
4106
4194 Schedule the indicated files for removal from the current branch.
4107 Schedule the indicated files for removal from the current branch.
4195
4108
4196 This command schedules the files to be removed at the next commit.
4109 This command schedules the files to be removed at the next commit.
4197 To undo a remove before that, see :hg:`revert`. To undo added
4110 To undo a remove before that, see :hg:`revert`. To undo added
4198 files, see :hg:`forget`.
4111 files, see :hg:`forget`.
4199
4112
4200 .. container:: verbose
4113 .. container:: verbose
4201
4114
4202 -A/--after can be used to remove only files that have already
4115 -A/--after can be used to remove only files that have already
4203 been deleted, -f/--force can be used to force deletion, and -Af
4116 been deleted, -f/--force can be used to force deletion, and -Af
4204 can be used to remove files from the next revision without
4117 can be used to remove files from the next revision without
4205 deleting them from the working directory.
4118 deleting them from the working directory.
4206
4119
4207 The following table details the behavior of remove for different
4120 The following table details the behavior of remove for different
4208 file states (columns) and option combinations (rows). The file
4121 file states (columns) and option combinations (rows). The file
4209 states are Added [A], Clean [C], Modified [M] and Missing [!]
4122 states are Added [A], Clean [C], Modified [M] and Missing [!]
4210 (as reported by :hg:`status`). The actions are Warn, Remove
4123 (as reported by :hg:`status`). The actions are Warn, Remove
4211 (from branch) and Delete (from disk):
4124 (from branch) and Delete (from disk):
4212
4125
4213 ========= == == == ==
4126 ========= == == == ==
4214 opt/state A C M !
4127 opt/state A C M !
4215 ========= == == == ==
4128 ========= == == == ==
4216 none W RD W R
4129 none W RD W R
4217 -f R RD RD R
4130 -f R RD RD R
4218 -A W W W R
4131 -A W W W R
4219 -Af R R R R
4132 -Af R R R R
4220 ========= == == == ==
4133 ========= == == == ==
4221
4134
4222 .. note::
4135 .. note::
4223
4136
4224 :hg:`remove` never deletes files in Added [A] state from the
4137 :hg:`remove` never deletes files in Added [A] state from the
4225 working directory, not even if ``--force`` is specified.
4138 working directory, not even if ``--force`` is specified.
4226
4139
4227 Returns 0 on success, 1 if any warnings encountered.
4140 Returns 0 on success, 1 if any warnings encountered.
4228 """
4141 """
4229
4142
4230 opts = pycompat.byteskwargs(opts)
4143 opts = pycompat.byteskwargs(opts)
4231 after, force = opts.get('after'), opts.get('force')
4144 after, force = opts.get('after'), opts.get('force')
4232 if not pats and not after:
4145 if not pats and not after:
4233 raise error.Abort(_('no files specified'))
4146 raise error.Abort(_('no files specified'))
4234
4147
4235 m = scmutil.match(repo[None], pats, opts)
4148 m = scmutil.match(repo[None], pats, opts)
4236 subrepos = opts.get('subrepos')
4149 subrepos = opts.get('subrepos')
4237 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4150 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4238
4151
4239 @command('rename|move|mv',
4152 @command('rename|move|mv',
4240 [('A', 'after', None, _('record a rename that has already occurred')),
4153 [('A', 'after', None, _('record a rename that has already occurred')),
4241 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4154 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4242 ] + walkopts + dryrunopts,
4155 ] + walkopts + dryrunopts,
4243 _('[OPTION]... SOURCE... DEST'))
4156 _('[OPTION]... SOURCE... DEST'))
4244 def rename(ui, repo, *pats, **opts):
4157 def rename(ui, repo, *pats, **opts):
4245 """rename files; equivalent of copy + remove
4158 """rename files; equivalent of copy + remove
4246
4159
4247 Mark dest as copies of sources; mark sources for deletion. If dest
4160 Mark dest as copies of sources; mark sources for deletion. If dest
4248 is a directory, copies are put in that directory. If dest is a
4161 is a directory, copies are put in that directory. If dest is a
4249 file, there can only be one source.
4162 file, there can only be one source.
4250
4163
4251 By default, this command copies the contents of files as they
4164 By default, this command copies the contents of files as they
4252 exist in the working directory. If invoked with -A/--after, the
4165 exist in the working directory. If invoked with -A/--after, the
4253 operation is recorded, but no copying is performed.
4166 operation is recorded, but no copying is performed.
4254
4167
4255 This command takes effect at the next commit. To undo a rename
4168 This command takes effect at the next commit. To undo a rename
4256 before that, see :hg:`revert`.
4169 before that, see :hg:`revert`.
4257
4170
4258 Returns 0 on success, 1 if errors are encountered.
4171 Returns 0 on success, 1 if errors are encountered.
4259 """
4172 """
4260 opts = pycompat.byteskwargs(opts)
4173 opts = pycompat.byteskwargs(opts)
4261 with repo.wlock(False):
4174 with repo.wlock(False):
4262 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4175 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4263
4176
4264 @command('resolve',
4177 @command('resolve',
4265 [('a', 'all', None, _('select all unresolved files')),
4178 [('a', 'all', None, _('select all unresolved files')),
4266 ('l', 'list', None, _('list state of files needing merge')),
4179 ('l', 'list', None, _('list state of files needing merge')),
4267 ('m', 'mark', None, _('mark files as resolved')),
4180 ('m', 'mark', None, _('mark files as resolved')),
4268 ('u', 'unmark', None, _('mark files as unresolved')),
4181 ('u', 'unmark', None, _('mark files as unresolved')),
4269 ('n', 'no-status', None, _('hide status prefix'))]
4182 ('n', 'no-status', None, _('hide status prefix'))]
4270 + mergetoolopts + walkopts + formatteropts,
4183 + mergetoolopts + walkopts + formatteropts,
4271 _('[OPTION]... [FILE]...'),
4184 _('[OPTION]... [FILE]...'),
4272 inferrepo=True)
4185 inferrepo=True)
4273 def resolve(ui, repo, *pats, **opts):
4186 def resolve(ui, repo, *pats, **opts):
4274 """redo merges or set/view the merge status of files
4187 """redo merges or set/view the merge status of files
4275
4188
4276 Merges with unresolved conflicts are often the result of
4189 Merges with unresolved conflicts are often the result of
4277 non-interactive merging using the ``internal:merge`` configuration
4190 non-interactive merging using the ``internal:merge`` configuration
4278 setting, or a command-line merge tool like ``diff3``. The resolve
4191 setting, or a command-line merge tool like ``diff3``. The resolve
4279 command is used to manage the files involved in a merge, after
4192 command is used to manage the files involved in a merge, after
4280 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4193 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4281 working directory must have two parents). See :hg:`help
4194 working directory must have two parents). See :hg:`help
4282 merge-tools` for information on configuring merge tools.
4195 merge-tools` for information on configuring merge tools.
4283
4196
4284 The resolve command can be used in the following ways:
4197 The resolve command can be used in the following ways:
4285
4198
4286 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4199 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4287 files, discarding any previous merge attempts. Re-merging is not
4200 files, discarding any previous merge attempts. Re-merging is not
4288 performed for files already marked as resolved. Use ``--all/-a``
4201 performed for files already marked as resolved. Use ``--all/-a``
4289 to select all unresolved files. ``--tool`` can be used to specify
4202 to select all unresolved files. ``--tool`` can be used to specify
4290 the merge tool used for the given files. It overrides the HGMERGE
4203 the merge tool used for the given files. It overrides the HGMERGE
4291 environment variable and your configuration files. Previous file
4204 environment variable and your configuration files. Previous file
4292 contents are saved with a ``.orig`` suffix.
4205 contents are saved with a ``.orig`` suffix.
4293
4206
4294 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4207 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4295 (e.g. after having manually fixed-up the files). The default is
4208 (e.g. after having manually fixed-up the files). The default is
4296 to mark all unresolved files.
4209 to mark all unresolved files.
4297
4210
4298 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4211 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4299 default is to mark all resolved files.
4212 default is to mark all resolved files.
4300
4213
4301 - :hg:`resolve -l`: list files which had or still have conflicts.
4214 - :hg:`resolve -l`: list files which had or still have conflicts.
4302 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4215 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4303 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4216 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4304 the list. See :hg:`help filesets` for details.
4217 the list. See :hg:`help filesets` for details.
4305
4218
4306 .. note::
4219 .. note::
4307
4220
4308 Mercurial will not let you commit files with unresolved merge
4221 Mercurial will not let you commit files with unresolved merge
4309 conflicts. You must use :hg:`resolve -m ...` before you can
4222 conflicts. You must use :hg:`resolve -m ...` before you can
4310 commit after a conflicting merge.
4223 commit after a conflicting merge.
4311
4224
4312 Returns 0 on success, 1 if any files fail a resolve attempt.
4225 Returns 0 on success, 1 if any files fail a resolve attempt.
4313 """
4226 """
4314
4227
4315 opts = pycompat.byteskwargs(opts)
4228 opts = pycompat.byteskwargs(opts)
4316 flaglist = 'all mark unmark list no_status'.split()
4229 flaglist = 'all mark unmark list no_status'.split()
4317 all, mark, unmark, show, nostatus = \
4230 all, mark, unmark, show, nostatus = \
4318 [opts.get(o) for o in flaglist]
4231 [opts.get(o) for o in flaglist]
4319
4232
4320 if (show and (mark or unmark)) or (mark and unmark):
4233 if (show and (mark or unmark)) or (mark and unmark):
4321 raise error.Abort(_("too many options specified"))
4234 raise error.Abort(_("too many options specified"))
4322 if pats and all:
4235 if pats and all:
4323 raise error.Abort(_("can't specify --all and patterns"))
4236 raise error.Abort(_("can't specify --all and patterns"))
4324 if not (all or pats or show or mark or unmark):
4237 if not (all or pats or show or mark or unmark):
4325 raise error.Abort(_('no files or directories specified'),
4238 raise error.Abort(_('no files or directories specified'),
4326 hint=('use --all to re-merge all unresolved files'))
4239 hint=('use --all to re-merge all unresolved files'))
4327
4240
4328 if show:
4241 if show:
4329 ui.pager('resolve')
4242 ui.pager('resolve')
4330 fm = ui.formatter('resolve', opts)
4243 fm = ui.formatter('resolve', opts)
4331 ms = mergemod.mergestate.read(repo)
4244 ms = mergemod.mergestate.read(repo)
4332 m = scmutil.match(repo[None], pats, opts)
4245 m = scmutil.match(repo[None], pats, opts)
4333 for f in ms:
4246 for f in ms:
4334 if not m(f):
4247 if not m(f):
4335 continue
4248 continue
4336 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4249 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4337 'd': 'driverresolved'}[ms[f]]
4250 'd': 'driverresolved'}[ms[f]]
4338 fm.startitem()
4251 fm.startitem()
4339 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4252 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4340 fm.write('path', '%s\n', f, label=l)
4253 fm.write('path', '%s\n', f, label=l)
4341 fm.end()
4254 fm.end()
4342 return 0
4255 return 0
4343
4256
4344 with repo.wlock():
4257 with repo.wlock():
4345 ms = mergemod.mergestate.read(repo)
4258 ms = mergemod.mergestate.read(repo)
4346
4259
4347 if not (ms.active() or repo.dirstate.p2() != nullid):
4260 if not (ms.active() or repo.dirstate.p2() != nullid):
4348 raise error.Abort(
4261 raise error.Abort(
4349 _('resolve command not applicable when not merging'))
4262 _('resolve command not applicable when not merging'))
4350
4263
4351 wctx = repo[None]
4264 wctx = repo[None]
4352
4265
4353 if ms.mergedriver and ms.mdstate() == 'u':
4266 if ms.mergedriver and ms.mdstate() == 'u':
4354 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4267 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4355 ms.commit()
4268 ms.commit()
4356 # allow mark and unmark to go through
4269 # allow mark and unmark to go through
4357 if not mark and not unmark and not proceed:
4270 if not mark and not unmark and not proceed:
4358 return 1
4271 return 1
4359
4272
4360 m = scmutil.match(wctx, pats, opts)
4273 m = scmutil.match(wctx, pats, opts)
4361 ret = 0
4274 ret = 0
4362 didwork = False
4275 didwork = False
4363 runconclude = False
4276 runconclude = False
4364
4277
4365 tocomplete = []
4278 tocomplete = []
4366 for f in ms:
4279 for f in ms:
4367 if not m(f):
4280 if not m(f):
4368 continue
4281 continue
4369
4282
4370 didwork = True
4283 didwork = True
4371
4284
4372 # don't let driver-resolved files be marked, and run the conclude
4285 # don't let driver-resolved files be marked, and run the conclude
4373 # step if asked to resolve
4286 # step if asked to resolve
4374 if ms[f] == "d":
4287 if ms[f] == "d":
4375 exact = m.exact(f)
4288 exact = m.exact(f)
4376 if mark:
4289 if mark:
4377 if exact:
4290 if exact:
4378 ui.warn(_('not marking %s as it is driver-resolved\n')
4291 ui.warn(_('not marking %s as it is driver-resolved\n')
4379 % f)
4292 % f)
4380 elif unmark:
4293 elif unmark:
4381 if exact:
4294 if exact:
4382 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4295 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4383 % f)
4296 % f)
4384 else:
4297 else:
4385 runconclude = True
4298 runconclude = True
4386 continue
4299 continue
4387
4300
4388 if mark:
4301 if mark:
4389 ms.mark(f, "r")
4302 ms.mark(f, "r")
4390 elif unmark:
4303 elif unmark:
4391 ms.mark(f, "u")
4304 ms.mark(f, "u")
4392 else:
4305 else:
4393 # backup pre-resolve (merge uses .orig for its own purposes)
4306 # backup pre-resolve (merge uses .orig for its own purposes)
4394 a = repo.wjoin(f)
4307 a = repo.wjoin(f)
4395 try:
4308 try:
4396 util.copyfile(a, a + ".resolve")
4309 util.copyfile(a, a + ".resolve")
4397 except (IOError, OSError) as inst:
4310 except (IOError, OSError) as inst:
4398 if inst.errno != errno.ENOENT:
4311 if inst.errno != errno.ENOENT:
4399 raise
4312 raise
4400
4313
4401 try:
4314 try:
4402 # preresolve file
4315 # preresolve file
4403 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4316 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4404 'resolve')
4317 'resolve')
4405 complete, r = ms.preresolve(f, wctx)
4318 complete, r = ms.preresolve(f, wctx)
4406 if not complete:
4319 if not complete:
4407 tocomplete.append(f)
4320 tocomplete.append(f)
4408 elif r:
4321 elif r:
4409 ret = 1
4322 ret = 1
4410 finally:
4323 finally:
4411 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4324 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4412 ms.commit()
4325 ms.commit()
4413
4326
4414 # replace filemerge's .orig file with our resolve file, but only
4327 # replace filemerge's .orig file with our resolve file, but only
4415 # for merges that are complete
4328 # for merges that are complete
4416 if complete:
4329 if complete:
4417 try:
4330 try:
4418 util.rename(a + ".resolve",
4331 util.rename(a + ".resolve",
4419 scmutil.origpath(ui, repo, a))
4332 scmutil.origpath(ui, repo, a))
4420 except OSError as inst:
4333 except OSError as inst:
4421 if inst.errno != errno.ENOENT:
4334 if inst.errno != errno.ENOENT:
4422 raise
4335 raise
4423
4336
4424 for f in tocomplete:
4337 for f in tocomplete:
4425 try:
4338 try:
4426 # resolve file
4339 # resolve file
4427 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4340 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4428 'resolve')
4341 'resolve')
4429 r = ms.resolve(f, wctx)
4342 r = ms.resolve(f, wctx)
4430 if r:
4343 if r:
4431 ret = 1
4344 ret = 1
4432 finally:
4345 finally:
4433 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4346 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4434 ms.commit()
4347 ms.commit()
4435
4348
4436 # replace filemerge's .orig file with our resolve file
4349 # replace filemerge's .orig file with our resolve file
4437 a = repo.wjoin(f)
4350 a = repo.wjoin(f)
4438 try:
4351 try:
4439 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4352 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4440 except OSError as inst:
4353 except OSError as inst:
4441 if inst.errno != errno.ENOENT:
4354 if inst.errno != errno.ENOENT:
4442 raise
4355 raise
4443
4356
4444 ms.commit()
4357 ms.commit()
4445 ms.recordactions()
4358 ms.recordactions()
4446
4359
4447 if not didwork and pats:
4360 if not didwork and pats:
4448 hint = None
4361 hint = None
4449 if not any([p for p in pats if p.find(':') >= 0]):
4362 if not any([p for p in pats if p.find(':') >= 0]):
4450 pats = ['path:%s' % p for p in pats]
4363 pats = ['path:%s' % p for p in pats]
4451 m = scmutil.match(wctx, pats, opts)
4364 m = scmutil.match(wctx, pats, opts)
4452 for f in ms:
4365 for f in ms:
4453 if not m(f):
4366 if not m(f):
4454 continue
4367 continue
4455 flags = ''.join(['-%s ' % o[0] for o in flaglist
4368 flags = ''.join(['-%s ' % o[0] for o in flaglist
4456 if opts.get(o)])
4369 if opts.get(o)])
4457 hint = _("(try: hg resolve %s%s)\n") % (
4370 hint = _("(try: hg resolve %s%s)\n") % (
4458 flags,
4371 flags,
4459 ' '.join(pats))
4372 ' '.join(pats))
4460 break
4373 break
4461 ui.warn(_("arguments do not match paths that need resolving\n"))
4374 ui.warn(_("arguments do not match paths that need resolving\n"))
4462 if hint:
4375 if hint:
4463 ui.warn(hint)
4376 ui.warn(hint)
4464 elif ms.mergedriver and ms.mdstate() != 's':
4377 elif ms.mergedriver and ms.mdstate() != 's':
4465 # run conclude step when either a driver-resolved file is requested
4378 # run conclude step when either a driver-resolved file is requested
4466 # or there are no driver-resolved files
4379 # or there are no driver-resolved files
4467 # we can't use 'ret' to determine whether any files are unresolved
4380 # we can't use 'ret' to determine whether any files are unresolved
4468 # because we might not have tried to resolve some
4381 # because we might not have tried to resolve some
4469 if ((runconclude or not list(ms.driverresolved()))
4382 if ((runconclude or not list(ms.driverresolved()))
4470 and not list(ms.unresolved())):
4383 and not list(ms.unresolved())):
4471 proceed = mergemod.driverconclude(repo, ms, wctx)
4384 proceed = mergemod.driverconclude(repo, ms, wctx)
4472 ms.commit()
4385 ms.commit()
4473 if not proceed:
4386 if not proceed:
4474 return 1
4387 return 1
4475
4388
4476 # Nudge users into finishing an unfinished operation
4389 # Nudge users into finishing an unfinished operation
4477 unresolvedf = list(ms.unresolved())
4390 unresolvedf = list(ms.unresolved())
4478 driverresolvedf = list(ms.driverresolved())
4391 driverresolvedf = list(ms.driverresolved())
4479 if not unresolvedf and not driverresolvedf:
4392 if not unresolvedf and not driverresolvedf:
4480 ui.status(_('(no more unresolved files)\n'))
4393 ui.status(_('(no more unresolved files)\n'))
4481 cmdutil.checkafterresolved(repo)
4394 cmdutil.checkafterresolved(repo)
4482 elif not unresolvedf:
4395 elif not unresolvedf:
4483 ui.status(_('(no more unresolved files -- '
4396 ui.status(_('(no more unresolved files -- '
4484 'run "hg resolve --all" to conclude)\n'))
4397 'run "hg resolve --all" to conclude)\n'))
4485
4398
4486 return ret
4399 return ret
4487
4400
4488 @command('revert',
4401 @command('revert',
4489 [('a', 'all', None, _('revert all changes when no arguments given')),
4402 [('a', 'all', None, _('revert all changes when no arguments given')),
4490 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4403 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4491 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4404 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4492 ('C', 'no-backup', None, _('do not save backup copies of files')),
4405 ('C', 'no-backup', None, _('do not save backup copies of files')),
4493 ('i', 'interactive', None,
4406 ('i', 'interactive', None,
4494 _('interactively select the changes (EXPERIMENTAL)')),
4407 _('interactively select the changes (EXPERIMENTAL)')),
4495 ] + walkopts + dryrunopts,
4408 ] + walkopts + dryrunopts,
4496 _('[OPTION]... [-r REV] [NAME]...'))
4409 _('[OPTION]... [-r REV] [NAME]...'))
4497 def revert(ui, repo, *pats, **opts):
4410 def revert(ui, repo, *pats, **opts):
4498 """restore files to their checkout state
4411 """restore files to their checkout state
4499
4412
4500 .. note::
4413 .. note::
4501
4414
4502 To check out earlier revisions, you should use :hg:`update REV`.
4415 To check out earlier revisions, you should use :hg:`update REV`.
4503 To cancel an uncommitted merge (and lose your changes),
4416 To cancel an uncommitted merge (and lose your changes),
4504 use :hg:`update --clean .`.
4417 use :hg:`update --clean .`.
4505
4418
4506 With no revision specified, revert the specified files or directories
4419 With no revision specified, revert the specified files or directories
4507 to the contents they had in the parent of the working directory.
4420 to the contents they had in the parent of the working directory.
4508 This restores the contents of files to an unmodified
4421 This restores the contents of files to an unmodified
4509 state and unschedules adds, removes, copies, and renames. If the
4422 state and unschedules adds, removes, copies, and renames. If the
4510 working directory has two parents, you must explicitly specify a
4423 working directory has two parents, you must explicitly specify a
4511 revision.
4424 revision.
4512
4425
4513 Using the -r/--rev or -d/--date options, revert the given files or
4426 Using the -r/--rev or -d/--date options, revert the given files or
4514 directories to their states as of a specific revision. Because
4427 directories to their states as of a specific revision. Because
4515 revert does not change the working directory parents, this will
4428 revert does not change the working directory parents, this will
4516 cause these files to appear modified. This can be helpful to "back
4429 cause these files to appear modified. This can be helpful to "back
4517 out" some or all of an earlier change. See :hg:`backout` for a
4430 out" some or all of an earlier change. See :hg:`backout` for a
4518 related method.
4431 related method.
4519
4432
4520 Modified files are saved with a .orig suffix before reverting.
4433 Modified files are saved with a .orig suffix before reverting.
4521 To disable these backups, use --no-backup. It is possible to store
4434 To disable these backups, use --no-backup. It is possible to store
4522 the backup files in a custom directory relative to the root of the
4435 the backup files in a custom directory relative to the root of the
4523 repository by setting the ``ui.origbackuppath`` configuration
4436 repository by setting the ``ui.origbackuppath`` configuration
4524 option.
4437 option.
4525
4438
4526 See :hg:`help dates` for a list of formats valid for -d/--date.
4439 See :hg:`help dates` for a list of formats valid for -d/--date.
4527
4440
4528 See :hg:`help backout` for a way to reverse the effect of an
4441 See :hg:`help backout` for a way to reverse the effect of an
4529 earlier changeset.
4442 earlier changeset.
4530
4443
4531 Returns 0 on success.
4444 Returns 0 on success.
4532 """
4445 """
4533
4446
4534 if opts.get("date"):
4447 if opts.get("date"):
4535 if opts.get("rev"):
4448 if opts.get("rev"):
4536 raise error.Abort(_("you can't specify a revision and a date"))
4449 raise error.Abort(_("you can't specify a revision and a date"))
4537 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4450 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4538
4451
4539 parent, p2 = repo.dirstate.parents()
4452 parent, p2 = repo.dirstate.parents()
4540 if not opts.get('rev') and p2 != nullid:
4453 if not opts.get('rev') and p2 != nullid:
4541 # revert after merge is a trap for new users (issue2915)
4454 # revert after merge is a trap for new users (issue2915)
4542 raise error.Abort(_('uncommitted merge with no revision specified'),
4455 raise error.Abort(_('uncommitted merge with no revision specified'),
4543 hint=_("use 'hg update' or see 'hg help revert'"))
4456 hint=_("use 'hg update' or see 'hg help revert'"))
4544
4457
4545 ctx = scmutil.revsingle(repo, opts.get('rev'))
4458 ctx = scmutil.revsingle(repo, opts.get('rev'))
4546
4459
4547 if (not (pats or opts.get('include') or opts.get('exclude') or
4460 if (not (pats or opts.get('include') or opts.get('exclude') or
4548 opts.get('all') or opts.get('interactive'))):
4461 opts.get('all') or opts.get('interactive'))):
4549 msg = _("no files or directories specified")
4462 msg = _("no files or directories specified")
4550 if p2 != nullid:
4463 if p2 != nullid:
4551 hint = _("uncommitted merge, use --all to discard all changes,"
4464 hint = _("uncommitted merge, use --all to discard all changes,"
4552 " or 'hg update -C .' to abort the merge")
4465 " or 'hg update -C .' to abort the merge")
4553 raise error.Abort(msg, hint=hint)
4466 raise error.Abort(msg, hint=hint)
4554 dirty = any(repo.status())
4467 dirty = any(repo.status())
4555 node = ctx.node()
4468 node = ctx.node()
4556 if node != parent:
4469 if node != parent:
4557 if dirty:
4470 if dirty:
4558 hint = _("uncommitted changes, use --all to discard all"
4471 hint = _("uncommitted changes, use --all to discard all"
4559 " changes, or 'hg update %s' to update") % ctx.rev()
4472 " changes, or 'hg update %s' to update") % ctx.rev()
4560 else:
4473 else:
4561 hint = _("use --all to revert all files,"
4474 hint = _("use --all to revert all files,"
4562 " or 'hg update %s' to update") % ctx.rev()
4475 " or 'hg update %s' to update") % ctx.rev()
4563 elif dirty:
4476 elif dirty:
4564 hint = _("uncommitted changes, use --all to discard all changes")
4477 hint = _("uncommitted changes, use --all to discard all changes")
4565 else:
4478 else:
4566 hint = _("use --all to revert all files")
4479 hint = _("use --all to revert all files")
4567 raise error.Abort(msg, hint=hint)
4480 raise error.Abort(msg, hint=hint)
4568
4481
4569 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4482 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4570
4483
4571 @command('rollback', dryrunopts +
4484 @command('rollback', dryrunopts +
4572 [('f', 'force', False, _('ignore safety measures'))])
4485 [('f', 'force', False, _('ignore safety measures'))])
4573 def rollback(ui, repo, **opts):
4486 def rollback(ui, repo, **opts):
4574 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4487 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4575
4488
4576 Please use :hg:`commit --amend` instead of rollback to correct
4489 Please use :hg:`commit --amend` instead of rollback to correct
4577 mistakes in the last commit.
4490 mistakes in the last commit.
4578
4491
4579 This command should be used with care. There is only one level of
4492 This command should be used with care. There is only one level of
4580 rollback, and there is no way to undo a rollback. It will also
4493 rollback, and there is no way to undo a rollback. It will also
4581 restore the dirstate at the time of the last transaction, losing
4494 restore the dirstate at the time of the last transaction, losing
4582 any dirstate changes since that time. This command does not alter
4495 any dirstate changes since that time. This command does not alter
4583 the working directory.
4496 the working directory.
4584
4497
4585 Transactions are used to encapsulate the effects of all commands
4498 Transactions are used to encapsulate the effects of all commands
4586 that create new changesets or propagate existing changesets into a
4499 that create new changesets or propagate existing changesets into a
4587 repository.
4500 repository.
4588
4501
4589 .. container:: verbose
4502 .. container:: verbose
4590
4503
4591 For example, the following commands are transactional, and their
4504 For example, the following commands are transactional, and their
4592 effects can be rolled back:
4505 effects can be rolled back:
4593
4506
4594 - commit
4507 - commit
4595 - import
4508 - import
4596 - pull
4509 - pull
4597 - push (with this repository as the destination)
4510 - push (with this repository as the destination)
4598 - unbundle
4511 - unbundle
4599
4512
4600 To avoid permanent data loss, rollback will refuse to rollback a
4513 To avoid permanent data loss, rollback will refuse to rollback a
4601 commit transaction if it isn't checked out. Use --force to
4514 commit transaction if it isn't checked out. Use --force to
4602 override this protection.
4515 override this protection.
4603
4516
4604 The rollback command can be entirely disabled by setting the
4517 The rollback command can be entirely disabled by setting the
4605 ``ui.rollback`` configuration setting to false. If you're here
4518 ``ui.rollback`` configuration setting to false. If you're here
4606 because you want to use rollback and it's disabled, you can
4519 because you want to use rollback and it's disabled, you can
4607 re-enable the command by setting ``ui.rollback`` to true.
4520 re-enable the command by setting ``ui.rollback`` to true.
4608
4521
4609 This command is not intended for use on public repositories. Once
4522 This command is not intended for use on public repositories. Once
4610 changes are visible for pull by other users, rolling a transaction
4523 changes are visible for pull by other users, rolling a transaction
4611 back locally is ineffective (someone else may already have pulled
4524 back locally is ineffective (someone else may already have pulled
4612 the changes). Furthermore, a race is possible with readers of the
4525 the changes). Furthermore, a race is possible with readers of the
4613 repository; for example an in-progress pull from the repository
4526 repository; for example an in-progress pull from the repository
4614 may fail if a rollback is performed.
4527 may fail if a rollback is performed.
4615
4528
4616 Returns 0 on success, 1 if no rollback data is available.
4529 Returns 0 on success, 1 if no rollback data is available.
4617 """
4530 """
4618 if not ui.configbool('ui', 'rollback', True):
4531 if not ui.configbool('ui', 'rollback', True):
4619 raise error.Abort(_('rollback is disabled because it is unsafe'),
4532 raise error.Abort(_('rollback is disabled because it is unsafe'),
4620 hint=('see `hg help -v rollback` for information'))
4533 hint=('see `hg help -v rollback` for information'))
4621 return repo.rollback(dryrun=opts.get(r'dry_run'),
4534 return repo.rollback(dryrun=opts.get(r'dry_run'),
4622 force=opts.get(r'force'))
4535 force=opts.get(r'force'))
4623
4536
4624 @command('root', [])
4537 @command('root', [])
4625 def root(ui, repo):
4538 def root(ui, repo):
4626 """print the root (top) of the current working directory
4539 """print the root (top) of the current working directory
4627
4540
4628 Print the root directory of the current repository.
4541 Print the root directory of the current repository.
4629
4542
4630 Returns 0 on success.
4543 Returns 0 on success.
4631 """
4544 """
4632 ui.write(repo.root + "\n")
4545 ui.write(repo.root + "\n")
4633
4546
4634 @command('^serve',
4547 @command('^serve',
4635 [('A', 'accesslog', '', _('name of access log file to write to'),
4548 [('A', 'accesslog', '', _('name of access log file to write to'),
4636 _('FILE')),
4549 _('FILE')),
4637 ('d', 'daemon', None, _('run server in background')),
4550 ('d', 'daemon', None, _('run server in background')),
4638 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4551 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4639 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4552 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4640 # use string type, then we can check if something was passed
4553 # use string type, then we can check if something was passed
4641 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4554 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4642 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4555 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4643 _('ADDR')),
4556 _('ADDR')),
4644 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4557 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4645 _('PREFIX')),
4558 _('PREFIX')),
4646 ('n', 'name', '',
4559 ('n', 'name', '',
4647 _('name to show in web pages (default: working directory)'), _('NAME')),
4560 _('name to show in web pages (default: working directory)'), _('NAME')),
4648 ('', 'web-conf', '',
4561 ('', 'web-conf', '',
4649 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4562 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4650 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4563 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4651 _('FILE')),
4564 _('FILE')),
4652 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4565 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4653 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4566 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4654 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4567 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4655 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4568 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4656 ('', 'style', '', _('template style to use'), _('STYLE')),
4569 ('', 'style', '', _('template style to use'), _('STYLE')),
4657 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4570 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4658 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4571 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4659 + subrepoopts,
4572 + subrepoopts,
4660 _('[OPTION]...'),
4573 _('[OPTION]...'),
4661 optionalrepo=True)
4574 optionalrepo=True)
4662 def serve(ui, repo, **opts):
4575 def serve(ui, repo, **opts):
4663 """start stand-alone webserver
4576 """start stand-alone webserver
4664
4577
4665 Start a local HTTP repository browser and pull server. You can use
4578 Start a local HTTP repository browser and pull server. You can use
4666 this for ad-hoc sharing and browsing of repositories. It is
4579 this for ad-hoc sharing and browsing of repositories. It is
4667 recommended to use a real web server to serve a repository for
4580 recommended to use a real web server to serve a repository for
4668 longer periods of time.
4581 longer periods of time.
4669
4582
4670 Please note that the server does not implement access control.
4583 Please note that the server does not implement access control.
4671 This means that, by default, anybody can read from the server and
4584 This means that, by default, anybody can read from the server and
4672 nobody can write to it by default. Set the ``web.allow_push``
4585 nobody can write to it by default. Set the ``web.allow_push``
4673 option to ``*`` to allow everybody to push to the server. You
4586 option to ``*`` to allow everybody to push to the server. You
4674 should use a real web server if you need to authenticate users.
4587 should use a real web server if you need to authenticate users.
4675
4588
4676 By default, the server logs accesses to stdout and errors to
4589 By default, the server logs accesses to stdout and errors to
4677 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4590 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4678 files.
4591 files.
4679
4592
4680 To have the server choose a free port number to listen on, specify
4593 To have the server choose a free port number to listen on, specify
4681 a port number of 0; in this case, the server will print the port
4594 a port number of 0; in this case, the server will print the port
4682 number it uses.
4595 number it uses.
4683
4596
4684 Returns 0 on success.
4597 Returns 0 on success.
4685 """
4598 """
4686
4599
4687 opts = pycompat.byteskwargs(opts)
4600 opts = pycompat.byteskwargs(opts)
4688 if opts["stdio"] and opts["cmdserver"]:
4601 if opts["stdio"] and opts["cmdserver"]:
4689 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4602 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4690
4603
4691 if opts["stdio"]:
4604 if opts["stdio"]:
4692 if repo is None:
4605 if repo is None:
4693 raise error.RepoError(_("there is no Mercurial repository here"
4606 raise error.RepoError(_("there is no Mercurial repository here"
4694 " (.hg not found)"))
4607 " (.hg not found)"))
4695 s = sshserver.sshserver(ui, repo)
4608 s = sshserver.sshserver(ui, repo)
4696 s.serve_forever()
4609 s.serve_forever()
4697
4610
4698 service = server.createservice(ui, repo, opts)
4611 service = server.createservice(ui, repo, opts)
4699 return server.runservice(opts, initfn=service.init, runfn=service.run)
4612 return server.runservice(opts, initfn=service.init, runfn=service.run)
4700
4613
4701 @command('^status|st',
4614 @command('^status|st',
4702 [('A', 'all', None, _('show status of all files')),
4615 [('A', 'all', None, _('show status of all files')),
4703 ('m', 'modified', None, _('show only modified files')),
4616 ('m', 'modified', None, _('show only modified files')),
4704 ('a', 'added', None, _('show only added files')),
4617 ('a', 'added', None, _('show only added files')),
4705 ('r', 'removed', None, _('show only removed files')),
4618 ('r', 'removed', None, _('show only removed files')),
4706 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4619 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4707 ('c', 'clean', None, _('show only files without changes')),
4620 ('c', 'clean', None, _('show only files without changes')),
4708 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4621 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4709 ('i', 'ignored', None, _('show only ignored files')),
4622 ('i', 'ignored', None, _('show only ignored files')),
4710 ('n', 'no-status', None, _('hide status prefix')),
4623 ('n', 'no-status', None, _('hide status prefix')),
4711 ('C', 'copies', None, _('show source of copied files')),
4624 ('C', 'copies', None, _('show source of copied files')),
4712 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4625 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4713 ('', 'rev', [], _('show difference from revision'), _('REV')),
4626 ('', 'rev', [], _('show difference from revision'), _('REV')),
4714 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4627 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4715 ] + walkopts + subrepoopts + formatteropts,
4628 ] + walkopts + subrepoopts + formatteropts,
4716 _('[OPTION]... [FILE]...'),
4629 _('[OPTION]... [FILE]...'),
4717 inferrepo=True)
4630 inferrepo=True)
4718 def status(ui, repo, *pats, **opts):
4631 def status(ui, repo, *pats, **opts):
4719 """show changed files in the working directory
4632 """show changed files in the working directory
4720
4633
4721 Show status of files in the repository. If names are given, only
4634 Show status of files in the repository. If names are given, only
4722 files that match are shown. Files that are clean or ignored or
4635 files that match are shown. Files that are clean or ignored or
4723 the source of a copy/move operation, are not listed unless
4636 the source of a copy/move operation, are not listed unless
4724 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4637 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4725 Unless options described with "show only ..." are given, the
4638 Unless options described with "show only ..." are given, the
4726 options -mardu are used.
4639 options -mardu are used.
4727
4640
4728 Option -q/--quiet hides untracked (unknown and ignored) files
4641 Option -q/--quiet hides untracked (unknown and ignored) files
4729 unless explicitly requested with -u/--unknown or -i/--ignored.
4642 unless explicitly requested with -u/--unknown or -i/--ignored.
4730
4643
4731 .. note::
4644 .. note::
4732
4645
4733 :hg:`status` may appear to disagree with diff if permissions have
4646 :hg:`status` may appear to disagree with diff if permissions have
4734 changed or a merge has occurred. The standard diff format does
4647 changed or a merge has occurred. The standard diff format does
4735 not report permission changes and diff only reports changes
4648 not report permission changes and diff only reports changes
4736 relative to one merge parent.
4649 relative to one merge parent.
4737
4650
4738 If one revision is given, it is used as the base revision.
4651 If one revision is given, it is used as the base revision.
4739 If two revisions are given, the differences between them are
4652 If two revisions are given, the differences between them are
4740 shown. The --change option can also be used as a shortcut to list
4653 shown. The --change option can also be used as a shortcut to list
4741 the changed files of a revision from its first parent.
4654 the changed files of a revision from its first parent.
4742
4655
4743 The codes used to show the status of files are::
4656 The codes used to show the status of files are::
4744
4657
4745 M = modified
4658 M = modified
4746 A = added
4659 A = added
4747 R = removed
4660 R = removed
4748 C = clean
4661 C = clean
4749 ! = missing (deleted by non-hg command, but still tracked)
4662 ! = missing (deleted by non-hg command, but still tracked)
4750 ? = not tracked
4663 ? = not tracked
4751 I = ignored
4664 I = ignored
4752 = origin of the previous file (with --copies)
4665 = origin of the previous file (with --copies)
4753
4666
4754 .. container:: verbose
4667 .. container:: verbose
4755
4668
4756 Examples:
4669 Examples:
4757
4670
4758 - show changes in the working directory relative to a
4671 - show changes in the working directory relative to a
4759 changeset::
4672 changeset::
4760
4673
4761 hg status --rev 9353
4674 hg status --rev 9353
4762
4675
4763 - show changes in the working directory relative to the
4676 - show changes in the working directory relative to the
4764 current directory (see :hg:`help patterns` for more information)::
4677 current directory (see :hg:`help patterns` for more information)::
4765
4678
4766 hg status re:
4679 hg status re:
4767
4680
4768 - show all changes including copies in an existing changeset::
4681 - show all changes including copies in an existing changeset::
4769
4682
4770 hg status --copies --change 9353
4683 hg status --copies --change 9353
4771
4684
4772 - get a NUL separated list of added files, suitable for xargs::
4685 - get a NUL separated list of added files, suitable for xargs::
4773
4686
4774 hg status -an0
4687 hg status -an0
4775
4688
4776 Returns 0 on success.
4689 Returns 0 on success.
4777 """
4690 """
4778
4691
4779 opts = pycompat.byteskwargs(opts)
4692 opts = pycompat.byteskwargs(opts)
4780 revs = opts.get('rev')
4693 revs = opts.get('rev')
4781 change = opts.get('change')
4694 change = opts.get('change')
4782
4695
4783 if revs and change:
4696 if revs and change:
4784 msg = _('cannot specify --rev and --change at the same time')
4697 msg = _('cannot specify --rev and --change at the same time')
4785 raise error.Abort(msg)
4698 raise error.Abort(msg)
4786 elif change:
4699 elif change:
4787 node2 = scmutil.revsingle(repo, change, None).node()
4700 node2 = scmutil.revsingle(repo, change, None).node()
4788 node1 = repo[node2].p1().node()
4701 node1 = repo[node2].p1().node()
4789 else:
4702 else:
4790 node1, node2 = scmutil.revpair(repo, revs)
4703 node1, node2 = scmutil.revpair(repo, revs)
4791
4704
4792 if pats or ui.configbool('commands', 'status.relative'):
4705 if pats or ui.configbool('commands', 'status.relative'):
4793 cwd = repo.getcwd()
4706 cwd = repo.getcwd()
4794 else:
4707 else:
4795 cwd = ''
4708 cwd = ''
4796
4709
4797 if opts.get('print0'):
4710 if opts.get('print0'):
4798 end = '\0'
4711 end = '\0'
4799 else:
4712 else:
4800 end = '\n'
4713 end = '\n'
4801 copy = {}
4714 copy = {}
4802 states = 'modified added removed deleted unknown ignored clean'.split()
4715 states = 'modified added removed deleted unknown ignored clean'.split()
4803 show = [k for k in states if opts.get(k)]
4716 show = [k for k in states if opts.get(k)]
4804 if opts.get('all'):
4717 if opts.get('all'):
4805 show += ui.quiet and (states[:4] + ['clean']) or states
4718 show += ui.quiet and (states[:4] + ['clean']) or states
4806 if not show:
4719 if not show:
4807 if ui.quiet:
4720 if ui.quiet:
4808 show = states[:4]
4721 show = states[:4]
4809 else:
4722 else:
4810 show = states[:5]
4723 show = states[:5]
4811
4724
4812 m = scmutil.match(repo[node2], pats, opts)
4725 m = scmutil.match(repo[node2], pats, opts)
4813 stat = repo.status(node1, node2, m,
4726 stat = repo.status(node1, node2, m,
4814 'ignored' in show, 'clean' in show, 'unknown' in show,
4727 'ignored' in show, 'clean' in show, 'unknown' in show,
4815 opts.get('subrepos'))
4728 opts.get('subrepos'))
4816 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4729 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4817
4730
4818 if (opts.get('all') or opts.get('copies')
4731 if (opts.get('all') or opts.get('copies')
4819 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4732 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4820 copy = copies.pathcopies(repo[node1], repo[node2], m)
4733 copy = copies.pathcopies(repo[node1], repo[node2], m)
4821
4734
4822 ui.pager('status')
4735 ui.pager('status')
4823 fm = ui.formatter('status', opts)
4736 fm = ui.formatter('status', opts)
4824 fmt = '%s' + end
4737 fmt = '%s' + end
4825 showchar = not opts.get('no_status')
4738 showchar = not opts.get('no_status')
4826
4739
4827 for state, char, files in changestates:
4740 for state, char, files in changestates:
4828 if state in show:
4741 if state in show:
4829 label = 'status.' + state
4742 label = 'status.' + state
4830 for f in files:
4743 for f in files:
4831 fm.startitem()
4744 fm.startitem()
4832 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4745 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4833 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4746 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4834 if f in copy:
4747 if f in copy:
4835 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4748 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4836 label='status.copied')
4749 label='status.copied')
4837 fm.end()
4750 fm.end()
4838
4751
4839 @command('^summary|sum',
4752 @command('^summary|sum',
4840 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4753 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4841 def summary(ui, repo, **opts):
4754 def summary(ui, repo, **opts):
4842 """summarize working directory state
4755 """summarize working directory state
4843
4756
4844 This generates a brief summary of the working directory state,
4757 This generates a brief summary of the working directory state,
4845 including parents, branch, commit status, phase and available updates.
4758 including parents, branch, commit status, phase and available updates.
4846
4759
4847 With the --remote option, this will check the default paths for
4760 With the --remote option, this will check the default paths for
4848 incoming and outgoing changes. This can be time-consuming.
4761 incoming and outgoing changes. This can be time-consuming.
4849
4762
4850 Returns 0 on success.
4763 Returns 0 on success.
4851 """
4764 """
4852
4765
4853 opts = pycompat.byteskwargs(opts)
4766 opts = pycompat.byteskwargs(opts)
4854 ui.pager('summary')
4767 ui.pager('summary')
4855 ctx = repo[None]
4768 ctx = repo[None]
4856 parents = ctx.parents()
4769 parents = ctx.parents()
4857 pnode = parents[0].node()
4770 pnode = parents[0].node()
4858 marks = []
4771 marks = []
4859
4772
4860 ms = None
4773 ms = None
4861 try:
4774 try:
4862 ms = mergemod.mergestate.read(repo)
4775 ms = mergemod.mergestate.read(repo)
4863 except error.UnsupportedMergeRecords as e:
4776 except error.UnsupportedMergeRecords as e:
4864 s = ' '.join(e.recordtypes)
4777 s = ' '.join(e.recordtypes)
4865 ui.warn(
4778 ui.warn(
4866 _('warning: merge state has unsupported record types: %s\n') % s)
4779 _('warning: merge state has unsupported record types: %s\n') % s)
4867 unresolved = 0
4780 unresolved = 0
4868 else:
4781 else:
4869 unresolved = [f for f in ms if ms[f] == 'u']
4782 unresolved = [f for f in ms if ms[f] == 'u']
4870
4783
4871 for p in parents:
4784 for p in parents:
4872 # label with log.changeset (instead of log.parent) since this
4785 # label with log.changeset (instead of log.parent) since this
4873 # shows a working directory parent *changeset*:
4786 # shows a working directory parent *changeset*:
4874 # i18n: column positioning for "hg summary"
4787 # i18n: column positioning for "hg summary"
4875 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4788 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4876 label=cmdutil._changesetlabels(p))
4789 label=cmdutil._changesetlabels(p))
4877 ui.write(' '.join(p.tags()), label='log.tag')
4790 ui.write(' '.join(p.tags()), label='log.tag')
4878 if p.bookmarks():
4791 if p.bookmarks():
4879 marks.extend(p.bookmarks())
4792 marks.extend(p.bookmarks())
4880 if p.rev() == -1:
4793 if p.rev() == -1:
4881 if not len(repo):
4794 if not len(repo):
4882 ui.write(_(' (empty repository)'))
4795 ui.write(_(' (empty repository)'))
4883 else:
4796 else:
4884 ui.write(_(' (no revision checked out)'))
4797 ui.write(_(' (no revision checked out)'))
4885 if p.obsolete():
4798 if p.obsolete():
4886 ui.write(_(' (obsolete)'))
4799 ui.write(_(' (obsolete)'))
4887 if p.troubled():
4800 if p.troubled():
4888 ui.write(' ('
4801 ui.write(' ('
4889 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4802 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4890 for trouble in p.troubles())
4803 for trouble in p.troubles())
4891 + ')')
4804 + ')')
4892 ui.write('\n')
4805 ui.write('\n')
4893 if p.description():
4806 if p.description():
4894 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4807 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4895 label='log.summary')
4808 label='log.summary')
4896
4809
4897 branch = ctx.branch()
4810 branch = ctx.branch()
4898 bheads = repo.branchheads(branch)
4811 bheads = repo.branchheads(branch)
4899 # i18n: column positioning for "hg summary"
4812 # i18n: column positioning for "hg summary"
4900 m = _('branch: %s\n') % branch
4813 m = _('branch: %s\n') % branch
4901 if branch != 'default':
4814 if branch != 'default':
4902 ui.write(m, label='log.branch')
4815 ui.write(m, label='log.branch')
4903 else:
4816 else:
4904 ui.status(m, label='log.branch')
4817 ui.status(m, label='log.branch')
4905
4818
4906 if marks:
4819 if marks:
4907 active = repo._activebookmark
4820 active = repo._activebookmark
4908 # i18n: column positioning for "hg summary"
4821 # i18n: column positioning for "hg summary"
4909 ui.write(_('bookmarks:'), label='log.bookmark')
4822 ui.write(_('bookmarks:'), label='log.bookmark')
4910 if active is not None:
4823 if active is not None:
4911 if active in marks:
4824 if active in marks:
4912 ui.write(' *' + active, label=activebookmarklabel)
4825 ui.write(' *' + active, label=activebookmarklabel)
4913 marks.remove(active)
4826 marks.remove(active)
4914 else:
4827 else:
4915 ui.write(' [%s]' % active, label=activebookmarklabel)
4828 ui.write(' [%s]' % active, label=activebookmarklabel)
4916 for m in marks:
4829 for m in marks:
4917 ui.write(' ' + m, label='log.bookmark')
4830 ui.write(' ' + m, label='log.bookmark')
4918 ui.write('\n', label='log.bookmark')
4831 ui.write('\n', label='log.bookmark')
4919
4832
4920 status = repo.status(unknown=True)
4833 status = repo.status(unknown=True)
4921
4834
4922 c = repo.dirstate.copies()
4835 c = repo.dirstate.copies()
4923 copied, renamed = [], []
4836 copied, renamed = [], []
4924 for d, s in c.iteritems():
4837 for d, s in c.iteritems():
4925 if s in status.removed:
4838 if s in status.removed:
4926 status.removed.remove(s)
4839 status.removed.remove(s)
4927 renamed.append(d)
4840 renamed.append(d)
4928 else:
4841 else:
4929 copied.append(d)
4842 copied.append(d)
4930 if d in status.added:
4843 if d in status.added:
4931 status.added.remove(d)
4844 status.added.remove(d)
4932
4845
4933 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4846 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4934
4847
4935 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4848 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4936 (ui.label(_('%d added'), 'status.added'), status.added),
4849 (ui.label(_('%d added'), 'status.added'), status.added),
4937 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4850 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4938 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4851 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4939 (ui.label(_('%d copied'), 'status.copied'), copied),
4852 (ui.label(_('%d copied'), 'status.copied'), copied),
4940 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4853 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4941 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4854 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4942 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4855 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4943 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4856 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4944 t = []
4857 t = []
4945 for l, s in labels:
4858 for l, s in labels:
4946 if s:
4859 if s:
4947 t.append(l % len(s))
4860 t.append(l % len(s))
4948
4861
4949 t = ', '.join(t)
4862 t = ', '.join(t)
4950 cleanworkdir = False
4863 cleanworkdir = False
4951
4864
4952 if repo.vfs.exists('graftstate'):
4865 if repo.vfs.exists('graftstate'):
4953 t += _(' (graft in progress)')
4866 t += _(' (graft in progress)')
4954 if repo.vfs.exists('updatestate'):
4867 if repo.vfs.exists('updatestate'):
4955 t += _(' (interrupted update)')
4868 t += _(' (interrupted update)')
4956 elif len(parents) > 1:
4869 elif len(parents) > 1:
4957 t += _(' (merge)')
4870 t += _(' (merge)')
4958 elif branch != parents[0].branch():
4871 elif branch != parents[0].branch():
4959 t += _(' (new branch)')
4872 t += _(' (new branch)')
4960 elif (parents[0].closesbranch() and
4873 elif (parents[0].closesbranch() and
4961 pnode in repo.branchheads(branch, closed=True)):
4874 pnode in repo.branchheads(branch, closed=True)):
4962 t += _(' (head closed)')
4875 t += _(' (head closed)')
4963 elif not (status.modified or status.added or status.removed or renamed or
4876 elif not (status.modified or status.added or status.removed or renamed or
4964 copied or subs):
4877 copied or subs):
4965 t += _(' (clean)')
4878 t += _(' (clean)')
4966 cleanworkdir = True
4879 cleanworkdir = True
4967 elif pnode not in bheads:
4880 elif pnode not in bheads:
4968 t += _(' (new branch head)')
4881 t += _(' (new branch head)')
4969
4882
4970 if parents:
4883 if parents:
4971 pendingphase = max(p.phase() for p in parents)
4884 pendingphase = max(p.phase() for p in parents)
4972 else:
4885 else:
4973 pendingphase = phases.public
4886 pendingphase = phases.public
4974
4887
4975 if pendingphase > phases.newcommitphase(ui):
4888 if pendingphase > phases.newcommitphase(ui):
4976 t += ' (%s)' % phases.phasenames[pendingphase]
4889 t += ' (%s)' % phases.phasenames[pendingphase]
4977
4890
4978 if cleanworkdir:
4891 if cleanworkdir:
4979 # i18n: column positioning for "hg summary"
4892 # i18n: column positioning for "hg summary"
4980 ui.status(_('commit: %s\n') % t.strip())
4893 ui.status(_('commit: %s\n') % t.strip())
4981 else:
4894 else:
4982 # i18n: column positioning for "hg summary"
4895 # i18n: column positioning for "hg summary"
4983 ui.write(_('commit: %s\n') % t.strip())
4896 ui.write(_('commit: %s\n') % t.strip())
4984
4897
4985 # all ancestors of branch heads - all ancestors of parent = new csets
4898 # all ancestors of branch heads - all ancestors of parent = new csets
4986 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4899 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4987 bheads))
4900 bheads))
4988
4901
4989 if new == 0:
4902 if new == 0:
4990 # i18n: column positioning for "hg summary"
4903 # i18n: column positioning for "hg summary"
4991 ui.status(_('update: (current)\n'))
4904 ui.status(_('update: (current)\n'))
4992 elif pnode not in bheads:
4905 elif pnode not in bheads:
4993 # i18n: column positioning for "hg summary"
4906 # i18n: column positioning for "hg summary"
4994 ui.write(_('update: %d new changesets (update)\n') % new)
4907 ui.write(_('update: %d new changesets (update)\n') % new)
4995 else:
4908 else:
4996 # i18n: column positioning for "hg summary"
4909 # i18n: column positioning for "hg summary"
4997 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4910 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4998 (new, len(bheads)))
4911 (new, len(bheads)))
4999
4912
5000 t = []
4913 t = []
5001 draft = len(repo.revs('draft()'))
4914 draft = len(repo.revs('draft()'))
5002 if draft:
4915 if draft:
5003 t.append(_('%d draft') % draft)
4916 t.append(_('%d draft') % draft)
5004 secret = len(repo.revs('secret()'))
4917 secret = len(repo.revs('secret()'))
5005 if secret:
4918 if secret:
5006 t.append(_('%d secret') % secret)
4919 t.append(_('%d secret') % secret)
5007
4920
5008 if draft or secret:
4921 if draft or secret:
5009 ui.status(_('phases: %s\n') % ', '.join(t))
4922 ui.status(_('phases: %s\n') % ', '.join(t))
5010
4923
5011 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4924 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5012 for trouble in ("unstable", "divergent", "bumped"):
4925 for trouble in ("unstable", "divergent", "bumped"):
5013 numtrouble = len(repo.revs(trouble + "()"))
4926 numtrouble = len(repo.revs(trouble + "()"))
5014 # We write all the possibilities to ease translation
4927 # We write all the possibilities to ease translation
5015 troublemsg = {
4928 troublemsg = {
5016 "unstable": _("unstable: %d changesets"),
4929 "unstable": _("unstable: %d changesets"),
5017 "divergent": _("divergent: %d changesets"),
4930 "divergent": _("divergent: %d changesets"),
5018 "bumped": _("bumped: %d changesets"),
4931 "bumped": _("bumped: %d changesets"),
5019 }
4932 }
5020 if numtrouble > 0:
4933 if numtrouble > 0:
5021 ui.status(troublemsg[trouble] % numtrouble + "\n")
4934 ui.status(troublemsg[trouble] % numtrouble + "\n")
5022
4935
5023 cmdutil.summaryhooks(ui, repo)
4936 cmdutil.summaryhooks(ui, repo)
5024
4937
5025 if opts.get('remote'):
4938 if opts.get('remote'):
5026 needsincoming, needsoutgoing = True, True
4939 needsincoming, needsoutgoing = True, True
5027 else:
4940 else:
5028 needsincoming, needsoutgoing = False, False
4941 needsincoming, needsoutgoing = False, False
5029 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4942 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5030 if i:
4943 if i:
5031 needsincoming = True
4944 needsincoming = True
5032 if o:
4945 if o:
5033 needsoutgoing = True
4946 needsoutgoing = True
5034 if not needsincoming and not needsoutgoing:
4947 if not needsincoming and not needsoutgoing:
5035 return
4948 return
5036
4949
5037 def getincoming():
4950 def getincoming():
5038 source, branches = hg.parseurl(ui.expandpath('default'))
4951 source, branches = hg.parseurl(ui.expandpath('default'))
5039 sbranch = branches[0]
4952 sbranch = branches[0]
5040 try:
4953 try:
5041 other = hg.peer(repo, {}, source)
4954 other = hg.peer(repo, {}, source)
5042 except error.RepoError:
4955 except error.RepoError:
5043 if opts.get('remote'):
4956 if opts.get('remote'):
5044 raise
4957 raise
5045 return source, sbranch, None, None, None
4958 return source, sbranch, None, None, None
5046 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4959 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5047 if revs:
4960 if revs:
5048 revs = [other.lookup(rev) for rev in revs]
4961 revs = [other.lookup(rev) for rev in revs]
5049 ui.debug('comparing with %s\n' % util.hidepassword(source))
4962 ui.debug('comparing with %s\n' % util.hidepassword(source))
5050 repo.ui.pushbuffer()
4963 repo.ui.pushbuffer()
5051 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4964 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5052 repo.ui.popbuffer()
4965 repo.ui.popbuffer()
5053 return source, sbranch, other, commoninc, commoninc[1]
4966 return source, sbranch, other, commoninc, commoninc[1]
5054
4967
5055 if needsincoming:
4968 if needsincoming:
5056 source, sbranch, sother, commoninc, incoming = getincoming()
4969 source, sbranch, sother, commoninc, incoming = getincoming()
5057 else:
4970 else:
5058 source = sbranch = sother = commoninc = incoming = None
4971 source = sbranch = sother = commoninc = incoming = None
5059
4972
5060 def getoutgoing():
4973 def getoutgoing():
5061 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4974 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5062 dbranch = branches[0]
4975 dbranch = branches[0]
5063 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4976 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5064 if source != dest:
4977 if source != dest:
5065 try:
4978 try:
5066 dother = hg.peer(repo, {}, dest)
4979 dother = hg.peer(repo, {}, dest)
5067 except error.RepoError:
4980 except error.RepoError:
5068 if opts.get('remote'):
4981 if opts.get('remote'):
5069 raise
4982 raise
5070 return dest, dbranch, None, None
4983 return dest, dbranch, None, None
5071 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4984 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5072 elif sother is None:
4985 elif sother is None:
5073 # there is no explicit destination peer, but source one is invalid
4986 # there is no explicit destination peer, but source one is invalid
5074 return dest, dbranch, None, None
4987 return dest, dbranch, None, None
5075 else:
4988 else:
5076 dother = sother
4989 dother = sother
5077 if (source != dest or (sbranch is not None and sbranch != dbranch)):
4990 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5078 common = None
4991 common = None
5079 else:
4992 else:
5080 common = commoninc
4993 common = commoninc
5081 if revs:
4994 if revs:
5082 revs = [repo.lookup(rev) for rev in revs]
4995 revs = [repo.lookup(rev) for rev in revs]
5083 repo.ui.pushbuffer()
4996 repo.ui.pushbuffer()
5084 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
4997 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5085 commoninc=common)
4998 commoninc=common)
5086 repo.ui.popbuffer()
4999 repo.ui.popbuffer()
5087 return dest, dbranch, dother, outgoing
5000 return dest, dbranch, dother, outgoing
5088
5001
5089 if needsoutgoing:
5002 if needsoutgoing:
5090 dest, dbranch, dother, outgoing = getoutgoing()
5003 dest, dbranch, dother, outgoing = getoutgoing()
5091 else:
5004 else:
5092 dest = dbranch = dother = outgoing = None
5005 dest = dbranch = dother = outgoing = None
5093
5006
5094 if opts.get('remote'):
5007 if opts.get('remote'):
5095 t = []
5008 t = []
5096 if incoming:
5009 if incoming:
5097 t.append(_('1 or more incoming'))
5010 t.append(_('1 or more incoming'))
5098 o = outgoing.missing
5011 o = outgoing.missing
5099 if o:
5012 if o:
5100 t.append(_('%d outgoing') % len(o))
5013 t.append(_('%d outgoing') % len(o))
5101 other = dother or sother
5014 other = dother or sother
5102 if 'bookmarks' in other.listkeys('namespaces'):
5015 if 'bookmarks' in other.listkeys('namespaces'):
5103 counts = bookmarks.summary(repo, other)
5016 counts = bookmarks.summary(repo, other)
5104 if counts[0] > 0:
5017 if counts[0] > 0:
5105 t.append(_('%d incoming bookmarks') % counts[0])
5018 t.append(_('%d incoming bookmarks') % counts[0])
5106 if counts[1] > 0:
5019 if counts[1] > 0:
5107 t.append(_('%d outgoing bookmarks') % counts[1])
5020 t.append(_('%d outgoing bookmarks') % counts[1])
5108
5021
5109 if t:
5022 if t:
5110 # i18n: column positioning for "hg summary"
5023 # i18n: column positioning for "hg summary"
5111 ui.write(_('remote: %s\n') % (', '.join(t)))
5024 ui.write(_('remote: %s\n') % (', '.join(t)))
5112 else:
5025 else:
5113 # i18n: column positioning for "hg summary"
5026 # i18n: column positioning for "hg summary"
5114 ui.status(_('remote: (synced)\n'))
5027 ui.status(_('remote: (synced)\n'))
5115
5028
5116 cmdutil.summaryremotehooks(ui, repo, opts,
5029 cmdutil.summaryremotehooks(ui, repo, opts,
5117 ((source, sbranch, sother, commoninc),
5030 ((source, sbranch, sother, commoninc),
5118 (dest, dbranch, dother, outgoing)))
5031 (dest, dbranch, dother, outgoing)))
5119
5032
5120 @command('tag',
5033 @command('tag',
5121 [('f', 'force', None, _('force tag')),
5034 [('f', 'force', None, _('force tag')),
5122 ('l', 'local', None, _('make the tag local')),
5035 ('l', 'local', None, _('make the tag local')),
5123 ('r', 'rev', '', _('revision to tag'), _('REV')),
5036 ('r', 'rev', '', _('revision to tag'), _('REV')),
5124 ('', 'remove', None, _('remove a tag')),
5037 ('', 'remove', None, _('remove a tag')),
5125 # -l/--local is already there, commitopts cannot be used
5038 # -l/--local is already there, commitopts cannot be used
5126 ('e', 'edit', None, _('invoke editor on commit messages')),
5039 ('e', 'edit', None, _('invoke editor on commit messages')),
5127 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5040 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5128 ] + commitopts2,
5041 ] + commitopts2,
5129 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5042 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5130 def tag(ui, repo, name1, *names, **opts):
5043 def tag(ui, repo, name1, *names, **opts):
5131 """add one or more tags for the current or given revision
5044 """add one or more tags for the current or given revision
5132
5045
5133 Name a particular revision using <name>.
5046 Name a particular revision using <name>.
5134
5047
5135 Tags are used to name particular revisions of the repository and are
5048 Tags are used to name particular revisions of the repository and are
5136 very useful to compare different revisions, to go back to significant
5049 very useful to compare different revisions, to go back to significant
5137 earlier versions or to mark branch points as releases, etc. Changing
5050 earlier versions or to mark branch points as releases, etc. Changing
5138 an existing tag is normally disallowed; use -f/--force to override.
5051 an existing tag is normally disallowed; use -f/--force to override.
5139
5052
5140 If no revision is given, the parent of the working directory is
5053 If no revision is given, the parent of the working directory is
5141 used.
5054 used.
5142
5055
5143 To facilitate version control, distribution, and merging of tags,
5056 To facilitate version control, distribution, and merging of tags,
5144 they are stored as a file named ".hgtags" which is managed similarly
5057 they are stored as a file named ".hgtags" which is managed similarly
5145 to other project files and can be hand-edited if necessary. This
5058 to other project files and can be hand-edited if necessary. This
5146 also means that tagging creates a new commit. The file
5059 also means that tagging creates a new commit. The file
5147 ".hg/localtags" is used for local tags (not shared among
5060 ".hg/localtags" is used for local tags (not shared among
5148 repositories).
5061 repositories).
5149
5062
5150 Tag commits are usually made at the head of a branch. If the parent
5063 Tag commits are usually made at the head of a branch. If the parent
5151 of the working directory is not a branch head, :hg:`tag` aborts; use
5064 of the working directory is not a branch head, :hg:`tag` aborts; use
5152 -f/--force to force the tag commit to be based on a non-head
5065 -f/--force to force the tag commit to be based on a non-head
5153 changeset.
5066 changeset.
5154
5067
5155 See :hg:`help dates` for a list of formats valid for -d/--date.
5068 See :hg:`help dates` for a list of formats valid for -d/--date.
5156
5069
5157 Since tag names have priority over branch names during revision
5070 Since tag names have priority over branch names during revision
5158 lookup, using an existing branch name as a tag name is discouraged.
5071 lookup, using an existing branch name as a tag name is discouraged.
5159
5072
5160 Returns 0 on success.
5073 Returns 0 on success.
5161 """
5074 """
5162 opts = pycompat.byteskwargs(opts)
5075 opts = pycompat.byteskwargs(opts)
5163 wlock = lock = None
5076 wlock = lock = None
5164 try:
5077 try:
5165 wlock = repo.wlock()
5078 wlock = repo.wlock()
5166 lock = repo.lock()
5079 lock = repo.lock()
5167 rev_ = "."
5080 rev_ = "."
5168 names = [t.strip() for t in (name1,) + names]
5081 names = [t.strip() for t in (name1,) + names]
5169 if len(names) != len(set(names)):
5082 if len(names) != len(set(names)):
5170 raise error.Abort(_('tag names must be unique'))
5083 raise error.Abort(_('tag names must be unique'))
5171 for n in names:
5084 for n in names:
5172 scmutil.checknewlabel(repo, n, 'tag')
5085 scmutil.checknewlabel(repo, n, 'tag')
5173 if not n:
5086 if not n:
5174 raise error.Abort(_('tag names cannot consist entirely of '
5087 raise error.Abort(_('tag names cannot consist entirely of '
5175 'whitespace'))
5088 'whitespace'))
5176 if opts.get('rev') and opts.get('remove'):
5089 if opts.get('rev') and opts.get('remove'):
5177 raise error.Abort(_("--rev and --remove are incompatible"))
5090 raise error.Abort(_("--rev and --remove are incompatible"))
5178 if opts.get('rev'):
5091 if opts.get('rev'):
5179 rev_ = opts['rev']
5092 rev_ = opts['rev']
5180 message = opts.get('message')
5093 message = opts.get('message')
5181 if opts.get('remove'):
5094 if opts.get('remove'):
5182 if opts.get('local'):
5095 if opts.get('local'):
5183 expectedtype = 'local'
5096 expectedtype = 'local'
5184 else:
5097 else:
5185 expectedtype = 'global'
5098 expectedtype = 'global'
5186
5099
5187 for n in names:
5100 for n in names:
5188 if not repo.tagtype(n):
5101 if not repo.tagtype(n):
5189 raise error.Abort(_("tag '%s' does not exist") % n)
5102 raise error.Abort(_("tag '%s' does not exist") % n)
5190 if repo.tagtype(n) != expectedtype:
5103 if repo.tagtype(n) != expectedtype:
5191 if expectedtype == 'global':
5104 if expectedtype == 'global':
5192 raise error.Abort(_("tag '%s' is not a global tag") % n)
5105 raise error.Abort(_("tag '%s' is not a global tag") % n)
5193 else:
5106 else:
5194 raise error.Abort(_("tag '%s' is not a local tag") % n)
5107 raise error.Abort(_("tag '%s' is not a local tag") % n)
5195 rev_ = 'null'
5108 rev_ = 'null'
5196 if not message:
5109 if not message:
5197 # we don't translate commit messages
5110 # we don't translate commit messages
5198 message = 'Removed tag %s' % ', '.join(names)
5111 message = 'Removed tag %s' % ', '.join(names)
5199 elif not opts.get('force'):
5112 elif not opts.get('force'):
5200 for n in names:
5113 for n in names:
5201 if n in repo.tags():
5114 if n in repo.tags():
5202 raise error.Abort(_("tag '%s' already exists "
5115 raise error.Abort(_("tag '%s' already exists "
5203 "(use -f to force)") % n)
5116 "(use -f to force)") % n)
5204 if not opts.get('local'):
5117 if not opts.get('local'):
5205 p1, p2 = repo.dirstate.parents()
5118 p1, p2 = repo.dirstate.parents()
5206 if p2 != nullid:
5119 if p2 != nullid:
5207 raise error.Abort(_('uncommitted merge'))
5120 raise error.Abort(_('uncommitted merge'))
5208 bheads = repo.branchheads()
5121 bheads = repo.branchheads()
5209 if not opts.get('force') and bheads and p1 not in bheads:
5122 if not opts.get('force') and bheads and p1 not in bheads:
5210 raise error.Abort(_('working directory is not at a branch head '
5123 raise error.Abort(_('working directory is not at a branch head '
5211 '(use -f to force)'))
5124 '(use -f to force)'))
5212 r = scmutil.revsingle(repo, rev_).node()
5125 r = scmutil.revsingle(repo, rev_).node()
5213
5126
5214 if not message:
5127 if not message:
5215 # we don't translate commit messages
5128 # we don't translate commit messages
5216 message = ('Added tag %s for changeset %s' %
5129 message = ('Added tag %s for changeset %s' %
5217 (', '.join(names), short(r)))
5130 (', '.join(names), short(r)))
5218
5131
5219 date = opts.get('date')
5132 date = opts.get('date')
5220 if date:
5133 if date:
5221 date = util.parsedate(date)
5134 date = util.parsedate(date)
5222
5135
5223 if opts.get('remove'):
5136 if opts.get('remove'):
5224 editform = 'tag.remove'
5137 editform = 'tag.remove'
5225 else:
5138 else:
5226 editform = 'tag.add'
5139 editform = 'tag.add'
5227 editor = cmdutil.getcommiteditor(editform=editform,
5140 editor = cmdutil.getcommiteditor(editform=editform,
5228 **pycompat.strkwargs(opts))
5141 **pycompat.strkwargs(opts))
5229
5142
5230 # don't allow tagging the null rev
5143 # don't allow tagging the null rev
5231 if (not opts.get('remove') and
5144 if (not opts.get('remove') and
5232 scmutil.revsingle(repo, rev_).rev() == nullrev):
5145 scmutil.revsingle(repo, rev_).rev() == nullrev):
5233 raise error.Abort(_("cannot tag null revision"))
5146 raise error.Abort(_("cannot tag null revision"))
5234
5147
5235 tagsmod.tag(repo, names, r, message, opts.get('local'),
5148 tagsmod.tag(repo, names, r, message, opts.get('local'),
5236 opts.get('user'), date, editor=editor)
5149 opts.get('user'), date, editor=editor)
5237 finally:
5150 finally:
5238 release(lock, wlock)
5151 release(lock, wlock)
5239
5152
5240 @command('tags', formatteropts, '')
5153 @command('tags', formatteropts, '')
5241 def tags(ui, repo, **opts):
5154 def tags(ui, repo, **opts):
5242 """list repository tags
5155 """list repository tags
5243
5156
5244 This lists both regular and local tags. When the -v/--verbose
5157 This lists both regular and local tags. When the -v/--verbose
5245 switch is used, a third column "local" is printed for local tags.
5158 switch is used, a third column "local" is printed for local tags.
5246 When the -q/--quiet switch is used, only the tag name is printed.
5159 When the -q/--quiet switch is used, only the tag name is printed.
5247
5160
5248 Returns 0 on success.
5161 Returns 0 on success.
5249 """
5162 """
5250
5163
5251 opts = pycompat.byteskwargs(opts)
5164 opts = pycompat.byteskwargs(opts)
5252 ui.pager('tags')
5165 ui.pager('tags')
5253 fm = ui.formatter('tags', opts)
5166 fm = ui.formatter('tags', opts)
5254 hexfunc = fm.hexfunc
5167 hexfunc = fm.hexfunc
5255 tagtype = ""
5168 tagtype = ""
5256
5169
5257 for t, n in reversed(repo.tagslist()):
5170 for t, n in reversed(repo.tagslist()):
5258 hn = hexfunc(n)
5171 hn = hexfunc(n)
5259 label = 'tags.normal'
5172 label = 'tags.normal'
5260 tagtype = ''
5173 tagtype = ''
5261 if repo.tagtype(t) == 'local':
5174 if repo.tagtype(t) == 'local':
5262 label = 'tags.local'
5175 label = 'tags.local'
5263 tagtype = 'local'
5176 tagtype = 'local'
5264
5177
5265 fm.startitem()
5178 fm.startitem()
5266 fm.write('tag', '%s', t, label=label)
5179 fm.write('tag', '%s', t, label=label)
5267 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5180 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5268 fm.condwrite(not ui.quiet, 'rev node', fmt,
5181 fm.condwrite(not ui.quiet, 'rev node', fmt,
5269 repo.changelog.rev(n), hn, label=label)
5182 repo.changelog.rev(n), hn, label=label)
5270 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5183 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5271 tagtype, label=label)
5184 tagtype, label=label)
5272 fm.plain('\n')
5185 fm.plain('\n')
5273 fm.end()
5186 fm.end()
5274
5187
5275 @command('tip',
5188 @command('tip',
5276 [('p', 'patch', None, _('show patch')),
5189 [('p', 'patch', None, _('show patch')),
5277 ('g', 'git', None, _('use git extended diff format')),
5190 ('g', 'git', None, _('use git extended diff format')),
5278 ] + templateopts,
5191 ] + templateopts,
5279 _('[-p] [-g]'))
5192 _('[-p] [-g]'))
5280 def tip(ui, repo, **opts):
5193 def tip(ui, repo, **opts):
5281 """show the tip revision (DEPRECATED)
5194 """show the tip revision (DEPRECATED)
5282
5195
5283 The tip revision (usually just called the tip) is the changeset
5196 The tip revision (usually just called the tip) is the changeset
5284 most recently added to the repository (and therefore the most
5197 most recently added to the repository (and therefore the most
5285 recently changed head).
5198 recently changed head).
5286
5199
5287 If you have just made a commit, that commit will be the tip. If
5200 If you have just made a commit, that commit will be the tip. If
5288 you have just pulled changes from another repository, the tip of
5201 you have just pulled changes from another repository, the tip of
5289 that repository becomes the current tip. The "tip" tag is special
5202 that repository becomes the current tip. The "tip" tag is special
5290 and cannot be renamed or assigned to a different changeset.
5203 and cannot be renamed or assigned to a different changeset.
5291
5204
5292 This command is deprecated, please use :hg:`heads` instead.
5205 This command is deprecated, please use :hg:`heads` instead.
5293
5206
5294 Returns 0 on success.
5207 Returns 0 on success.
5295 """
5208 """
5296 opts = pycompat.byteskwargs(opts)
5209 opts = pycompat.byteskwargs(opts)
5297 displayer = cmdutil.show_changeset(ui, repo, opts)
5210 displayer = cmdutil.show_changeset(ui, repo, opts)
5298 displayer.show(repo['tip'])
5211 displayer.show(repo['tip'])
5299 displayer.close()
5212 displayer.close()
5300
5213
5301 @command('unbundle',
5214 @command('unbundle',
5302 [('u', 'update', None,
5215 [('u', 'update', None,
5303 _('update to new branch head if changesets were unbundled'))],
5216 _('update to new branch head if changesets were unbundled'))],
5304 _('[-u] FILE...'))
5217 _('[-u] FILE...'))
5305 def unbundle(ui, repo, fname1, *fnames, **opts):
5218 def unbundle(ui, repo, fname1, *fnames, **opts):
5306 """apply one or more bundle files
5219 """apply one or more bundle files
5307
5220
5308 Apply one or more bundle files generated by :hg:`bundle`.
5221 Apply one or more bundle files generated by :hg:`bundle`.
5309
5222
5310 Returns 0 on success, 1 if an update has unresolved files.
5223 Returns 0 on success, 1 if an update has unresolved files.
5311 """
5224 """
5312 fnames = (fname1,) + fnames
5225 fnames = (fname1,) + fnames
5313
5226
5314 with repo.lock():
5227 with repo.lock():
5315 for fname in fnames:
5228 for fname in fnames:
5316 f = hg.openpath(ui, fname)
5229 f = hg.openpath(ui, fname)
5317 gen = exchange.readbundle(ui, f, fname)
5230 gen = exchange.readbundle(ui, f, fname)
5318 if isinstance(gen, bundle2.unbundle20):
5231 if isinstance(gen, bundle2.unbundle20):
5319 tr = repo.transaction('unbundle')
5232 tr = repo.transaction('unbundle')
5320 try:
5233 try:
5321 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5234 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5322 url='bundle:' + fname)
5235 url='bundle:' + fname)
5323 tr.close()
5236 tr.close()
5324 except error.BundleUnknownFeatureError as exc:
5237 except error.BundleUnknownFeatureError as exc:
5325 raise error.Abort(_('%s: unknown bundle feature, %s')
5238 raise error.Abort(_('%s: unknown bundle feature, %s')
5326 % (fname, exc),
5239 % (fname, exc),
5327 hint=_("see https://mercurial-scm.org/"
5240 hint=_("see https://mercurial-scm.org/"
5328 "wiki/BundleFeature for more "
5241 "wiki/BundleFeature for more "
5329 "information"))
5242 "information"))
5330 finally:
5243 finally:
5331 if tr:
5244 if tr:
5332 tr.release()
5245 tr.release()
5333 changes = [r.get('return', 0)
5246 changes = [r.get('return', 0)
5334 for r in op.records['changegroup']]
5247 for r in op.records['changegroup']]
5335 modheads = changegroup.combineresults(changes)
5248 modheads = changegroup.combineresults(changes)
5336 elif isinstance(gen, streamclone.streamcloneapplier):
5249 elif isinstance(gen, streamclone.streamcloneapplier):
5337 raise error.Abort(
5250 raise error.Abort(
5338 _('packed bundles cannot be applied with '
5251 _('packed bundles cannot be applied with '
5339 '"hg unbundle"'),
5252 '"hg unbundle"'),
5340 hint=_('use "hg debugapplystreamclonebundle"'))
5253 hint=_('use "hg debugapplystreamclonebundle"'))
5341 else:
5254 else:
5342 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
5255 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
5343
5256
5344 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5257 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5345
5258
5346 @command('^update|up|checkout|co',
5259 @command('^update|up|checkout|co',
5347 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5260 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5348 ('c', 'check', None, _('require clean working directory')),
5261 ('c', 'check', None, _('require clean working directory')),
5349 ('m', 'merge', None, _('merge uncommitted changes')),
5262 ('m', 'merge', None, _('merge uncommitted changes')),
5350 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5263 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5351 ('r', 'rev', '', _('revision'), _('REV'))
5264 ('r', 'rev', '', _('revision'), _('REV'))
5352 ] + mergetoolopts,
5265 ] + mergetoolopts,
5353 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5266 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5354 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5267 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5355 merge=None, tool=None):
5268 merge=None, tool=None):
5356 """update working directory (or switch revisions)
5269 """update working directory (or switch revisions)
5357
5270
5358 Update the repository's working directory to the specified
5271 Update the repository's working directory to the specified
5359 changeset. If no changeset is specified, update to the tip of the
5272 changeset. If no changeset is specified, update to the tip of the
5360 current named branch and move the active bookmark (see :hg:`help
5273 current named branch and move the active bookmark (see :hg:`help
5361 bookmarks`).
5274 bookmarks`).
5362
5275
5363 Update sets the working directory's parent revision to the specified
5276 Update sets the working directory's parent revision to the specified
5364 changeset (see :hg:`help parents`).
5277 changeset (see :hg:`help parents`).
5365
5278
5366 If the changeset is not a descendant or ancestor of the working
5279 If the changeset is not a descendant or ancestor of the working
5367 directory's parent and there are uncommitted changes, the update is
5280 directory's parent and there are uncommitted changes, the update is
5368 aborted. With the -c/--check option, the working directory is checked
5281 aborted. With the -c/--check option, the working directory is checked
5369 for uncommitted changes; if none are found, the working directory is
5282 for uncommitted changes; if none are found, the working directory is
5370 updated to the specified changeset.
5283 updated to the specified changeset.
5371
5284
5372 .. container:: verbose
5285 .. container:: verbose
5373
5286
5374 The -C/--clean, -c/--check, and -m/--merge options control what
5287 The -C/--clean, -c/--check, and -m/--merge options control what
5375 happens if the working directory contains uncommitted changes.
5288 happens if the working directory contains uncommitted changes.
5376 At most of one of them can be specified.
5289 At most of one of them can be specified.
5377
5290
5378 1. If no option is specified, and if
5291 1. If no option is specified, and if
5379 the requested changeset is an ancestor or descendant of
5292 the requested changeset is an ancestor or descendant of
5380 the working directory's parent, the uncommitted changes
5293 the working directory's parent, the uncommitted changes
5381 are merged into the requested changeset and the merged
5294 are merged into the requested changeset and the merged
5382 result is left uncommitted. If the requested changeset is
5295 result is left uncommitted. If the requested changeset is
5383 not an ancestor or descendant (that is, it is on another
5296 not an ancestor or descendant (that is, it is on another
5384 branch), the update is aborted and the uncommitted changes
5297 branch), the update is aborted and the uncommitted changes
5385 are preserved.
5298 are preserved.
5386
5299
5387 2. With the -m/--merge option, the update is allowed even if the
5300 2. With the -m/--merge option, the update is allowed even if the
5388 requested changeset is not an ancestor or descendant of
5301 requested changeset is not an ancestor or descendant of
5389 the working directory's parent.
5302 the working directory's parent.
5390
5303
5391 3. With the -c/--check option, the update is aborted and the
5304 3. With the -c/--check option, the update is aborted and the
5392 uncommitted changes are preserved.
5305 uncommitted changes are preserved.
5393
5306
5394 4. With the -C/--clean option, uncommitted changes are discarded and
5307 4. With the -C/--clean option, uncommitted changes are discarded and
5395 the working directory is updated to the requested changeset.
5308 the working directory is updated to the requested changeset.
5396
5309
5397 To cancel an uncommitted merge (and lose your changes), use
5310 To cancel an uncommitted merge (and lose your changes), use
5398 :hg:`update --clean .`.
5311 :hg:`update --clean .`.
5399
5312
5400 Use null as the changeset to remove the working directory (like
5313 Use null as the changeset to remove the working directory (like
5401 :hg:`clone -U`).
5314 :hg:`clone -U`).
5402
5315
5403 If you want to revert just one file to an older revision, use
5316 If you want to revert just one file to an older revision, use
5404 :hg:`revert [-r REV] NAME`.
5317 :hg:`revert [-r REV] NAME`.
5405
5318
5406 See :hg:`help dates` for a list of formats valid for -d/--date.
5319 See :hg:`help dates` for a list of formats valid for -d/--date.
5407
5320
5408 Returns 0 on success, 1 if there are unresolved files.
5321 Returns 0 on success, 1 if there are unresolved files.
5409 """
5322 """
5410 if rev and node:
5323 if rev and node:
5411 raise error.Abort(_("please specify just one revision"))
5324 raise error.Abort(_("please specify just one revision"))
5412
5325
5413 if ui.configbool('commands', 'update.requiredest'):
5326 if ui.configbool('commands', 'update.requiredest'):
5414 if not node and not rev and not date:
5327 if not node and not rev and not date:
5415 raise error.Abort(_('you must specify a destination'),
5328 raise error.Abort(_('you must specify a destination'),
5416 hint=_('for example: hg update ".::"'))
5329 hint=_('for example: hg update ".::"'))
5417
5330
5418 if rev is None or rev == '':
5331 if rev is None or rev == '':
5419 rev = node
5332 rev = node
5420
5333
5421 if date and rev is not None:
5334 if date and rev is not None:
5422 raise error.Abort(_("you can't specify a revision and a date"))
5335 raise error.Abort(_("you can't specify a revision and a date"))
5423
5336
5424 if len([x for x in (clean, check, merge) if x]) > 1:
5337 if len([x for x in (clean, check, merge) if x]) > 1:
5425 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5338 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5426 "or -m/merge"))
5339 "or -m/merge"))
5427
5340
5428 updatecheck = None
5341 updatecheck = None
5429 if check:
5342 if check:
5430 updatecheck = 'abort'
5343 updatecheck = 'abort'
5431 elif merge:
5344 elif merge:
5432 updatecheck = 'none'
5345 updatecheck = 'none'
5433
5346
5434 with repo.wlock():
5347 with repo.wlock():
5435 cmdutil.clearunfinished(repo)
5348 cmdutil.clearunfinished(repo)
5436
5349
5437 if date:
5350 if date:
5438 rev = cmdutil.finddate(ui, repo, date)
5351 rev = cmdutil.finddate(ui, repo, date)
5439
5352
5440 # if we defined a bookmark, we have to remember the original name
5353 # if we defined a bookmark, we have to remember the original name
5441 brev = rev
5354 brev = rev
5442 rev = scmutil.revsingle(repo, rev, rev).rev()
5355 rev = scmutil.revsingle(repo, rev, rev).rev()
5443
5356
5444 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5357 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5445
5358
5446 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5359 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5447 updatecheck=updatecheck)
5360 updatecheck=updatecheck)
5448
5361
5449 @command('verify', [])
5362 @command('verify', [])
5450 def verify(ui, repo):
5363 def verify(ui, repo):
5451 """verify the integrity of the repository
5364 """verify the integrity of the repository
5452
5365
5453 Verify the integrity of the current repository.
5366 Verify the integrity of the current repository.
5454
5367
5455 This will perform an extensive check of the repository's
5368 This will perform an extensive check of the repository's
5456 integrity, validating the hashes and checksums of each entry in
5369 integrity, validating the hashes and checksums of each entry in
5457 the changelog, manifest, and tracked files, as well as the
5370 the changelog, manifest, and tracked files, as well as the
5458 integrity of their crosslinks and indices.
5371 integrity of their crosslinks and indices.
5459
5372
5460 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5373 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5461 for more information about recovery from corruption of the
5374 for more information about recovery from corruption of the
5462 repository.
5375 repository.
5463
5376
5464 Returns 0 on success, 1 if errors are encountered.
5377 Returns 0 on success, 1 if errors are encountered.
5465 """
5378 """
5466 return hg.verify(repo)
5379 return hg.verify(repo)
5467
5380
5468 @command('version', [] + formatteropts, norepo=True)
5381 @command('version', [] + formatteropts, norepo=True)
5469 def version_(ui, **opts):
5382 def version_(ui, **opts):
5470 """output version and copyright information"""
5383 """output version and copyright information"""
5471 opts = pycompat.byteskwargs(opts)
5384 opts = pycompat.byteskwargs(opts)
5472 if ui.verbose:
5385 if ui.verbose:
5473 ui.pager('version')
5386 ui.pager('version')
5474 fm = ui.formatter("version", opts)
5387 fm = ui.formatter("version", opts)
5475 fm.startitem()
5388 fm.startitem()
5476 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5389 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5477 util.version())
5390 util.version())
5478 license = _(
5391 license = _(
5479 "(see https://mercurial-scm.org for more information)\n"
5392 "(see https://mercurial-scm.org for more information)\n"
5480 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5393 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5481 "This is free software; see the source for copying conditions. "
5394 "This is free software; see the source for copying conditions. "
5482 "There is NO\nwarranty; "
5395 "There is NO\nwarranty; "
5483 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5396 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5484 )
5397 )
5485 if not ui.quiet:
5398 if not ui.quiet:
5486 fm.plain(license)
5399 fm.plain(license)
5487
5400
5488 if ui.verbose:
5401 if ui.verbose:
5489 fm.plain(_("\nEnabled extensions:\n\n"))
5402 fm.plain(_("\nEnabled extensions:\n\n"))
5490 # format names and versions into columns
5403 # format names and versions into columns
5491 names = []
5404 names = []
5492 vers = []
5405 vers = []
5493 isinternals = []
5406 isinternals = []
5494 for name, module in extensions.extensions():
5407 for name, module in extensions.extensions():
5495 names.append(name)
5408 names.append(name)
5496 vers.append(extensions.moduleversion(module) or None)
5409 vers.append(extensions.moduleversion(module) or None)
5497 isinternals.append(extensions.ismoduleinternal(module))
5410 isinternals.append(extensions.ismoduleinternal(module))
5498 fn = fm.nested("extensions")
5411 fn = fm.nested("extensions")
5499 if names:
5412 if names:
5500 namefmt = " %%-%ds " % max(len(n) for n in names)
5413 namefmt = " %%-%ds " % max(len(n) for n in names)
5501 places = [_("external"), _("internal")]
5414 places = [_("external"), _("internal")]
5502 for n, v, p in zip(names, vers, isinternals):
5415 for n, v, p in zip(names, vers, isinternals):
5503 fn.startitem()
5416 fn.startitem()
5504 fn.condwrite(ui.verbose, "name", namefmt, n)
5417 fn.condwrite(ui.verbose, "name", namefmt, n)
5505 if ui.verbose:
5418 if ui.verbose:
5506 fn.plain("%s " % places[p])
5419 fn.plain("%s " % places[p])
5507 fn.data(bundled=p)
5420 fn.data(bundled=p)
5508 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5421 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5509 if ui.verbose:
5422 if ui.verbose:
5510 fn.plain("\n")
5423 fn.plain("\n")
5511 fn.end()
5424 fn.end()
5512 fm.end()
5425 fm.end()
5513
5426
5514 def loadcmdtable(ui, name, cmdtable):
5427 def loadcmdtable(ui, name, cmdtable):
5515 """Load command functions from specified cmdtable
5428 """Load command functions from specified cmdtable
5516 """
5429 """
5517 overrides = [cmd for cmd in cmdtable if cmd in table]
5430 overrides = [cmd for cmd in cmdtable if cmd in table]
5518 if overrides:
5431 if overrides:
5519 ui.warn(_("extension '%s' overrides commands: %s\n")
5432 ui.warn(_("extension '%s' overrides commands: %s\n")
5520 % (name, " ".join(overrides)))
5433 % (name, " ".join(overrides)))
5521 table.update(cmdtable)
5434 table.update(cmdtable)
@@ -1,2204 +1,2204 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 filemerge,
43 filemerge,
44 fileset,
44 fileset,
45 formatter,
45 formatter,
46 hg,
46 hg,
47 localrepo,
47 localrepo,
48 lock as lockmod,
48 lock as lockmod,
49 merge as mergemod,
49 merge as mergemod,
50 obsolete,
50 obsolete,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 # We reuse the command table from commands because it is easier than
74 # We reuse the command table from commands because it is easier than
75 # teaching dispatch about multiple tables.
75 # teaching dispatch about multiple tables.
76 command = registrar.command(commands.table)
76 command = registrar.command(commands.table)
77
77
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
79 def debugancestor(ui, repo, *args):
79 def debugancestor(ui, repo, *args):
80 """find the ancestor revision of two revisions in a given index"""
80 """find the ancestor revision of two revisions in a given index"""
81 if len(args) == 3:
81 if len(args) == 3:
82 index, rev1, rev2 = args
82 index, rev1, rev2 = args
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
84 lookup = r.lookup
84 lookup = r.lookup
85 elif len(args) == 2:
85 elif len(args) == 2:
86 if not repo:
86 if not repo:
87 raise error.Abort(_('there is no Mercurial repository here '
87 raise error.Abort(_('there is no Mercurial repository here '
88 '(.hg not found)'))
88 '(.hg not found)'))
89 rev1, rev2 = args
89 rev1, rev2 = args
90 r = repo.changelog
90 r = repo.changelog
91 lookup = repo.lookup
91 lookup = repo.lookup
92 else:
92 else:
93 raise error.Abort(_('either two or three arguments required'))
93 raise error.Abort(_('either two or three arguments required'))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
96
96
97 @command('debugapplystreamclonebundle', [], 'FILE')
97 @command('debugapplystreamclonebundle', [], 'FILE')
98 def debugapplystreamclonebundle(ui, repo, fname):
98 def debugapplystreamclonebundle(ui, repo, fname):
99 """apply a stream clone bundle file"""
99 """apply a stream clone bundle file"""
100 f = hg.openpath(ui, fname)
100 f = hg.openpath(ui, fname)
101 gen = exchange.readbundle(ui, f, fname)
101 gen = exchange.readbundle(ui, f, fname)
102 gen.apply(repo)
102 gen.apply(repo)
103
103
104 @command('debugbuilddag',
104 @command('debugbuilddag',
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
107 ('n', 'new-file', None, _('add new file at each rev'))],
107 ('n', 'new-file', None, _('add new file at each rev'))],
108 _('[OPTION]... [TEXT]'))
108 _('[OPTION]... [TEXT]'))
109 def debugbuilddag(ui, repo, text=None,
109 def debugbuilddag(ui, repo, text=None,
110 mergeable_file=False,
110 mergeable_file=False,
111 overwritten_file=False,
111 overwritten_file=False,
112 new_file=False):
112 new_file=False):
113 """builds a repo with a given DAG from scratch in the current empty repo
113 """builds a repo with a given DAG from scratch in the current empty repo
114
114
115 The description of the DAG is read from stdin if not given on the
115 The description of the DAG is read from stdin if not given on the
116 command line.
116 command line.
117
117
118 Elements:
118 Elements:
119
119
120 - "+n" is a linear run of n nodes based on the current default parent
120 - "+n" is a linear run of n nodes based on the current default parent
121 - "." is a single node based on the current default parent
121 - "." is a single node based on the current default parent
122 - "$" resets the default parent to null (implied at the start);
122 - "$" resets the default parent to null (implied at the start);
123 otherwise the default parent is always the last node created
123 otherwise the default parent is always the last node created
124 - "<p" sets the default parent to the backref p
124 - "<p" sets the default parent to the backref p
125 - "*p" is a fork at parent p, which is a backref
125 - "*p" is a fork at parent p, which is a backref
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
127 - "/p2" is a merge of the preceding node and p2
127 - "/p2" is a merge of the preceding node and p2
128 - ":tag" defines a local tag for the preceding node
128 - ":tag" defines a local tag for the preceding node
129 - "@branch" sets the named branch for subsequent nodes
129 - "@branch" sets the named branch for subsequent nodes
130 - "#...\\n" is a comment up to the end of the line
130 - "#...\\n" is a comment up to the end of the line
131
131
132 Whitespace between the above elements is ignored.
132 Whitespace between the above elements is ignored.
133
133
134 A backref is either
134 A backref is either
135
135
136 - a number n, which references the node curr-n, where curr is the current
136 - a number n, which references the node curr-n, where curr is the current
137 node, or
137 node, or
138 - the name of a local tag you placed earlier using ":tag", or
138 - the name of a local tag you placed earlier using ":tag", or
139 - empty to denote the default parent.
139 - empty to denote the default parent.
140
140
141 All string valued-elements are either strictly alphanumeric, or must
141 All string valued-elements are either strictly alphanumeric, or must
142 be enclosed in double quotes ("..."), with "\\" as escape character.
142 be enclosed in double quotes ("..."), with "\\" as escape character.
143 """
143 """
144
144
145 if text is None:
145 if text is None:
146 ui.status(_("reading DAG from stdin\n"))
146 ui.status(_("reading DAG from stdin\n"))
147 text = ui.fin.read()
147 text = ui.fin.read()
148
148
149 cl = repo.changelog
149 cl = repo.changelog
150 if len(cl) > 0:
150 if len(cl) > 0:
151 raise error.Abort(_('repository is not empty'))
151 raise error.Abort(_('repository is not empty'))
152
152
153 # determine number of revs in DAG
153 # determine number of revs in DAG
154 total = 0
154 total = 0
155 for type, data in dagparser.parsedag(text):
155 for type, data in dagparser.parsedag(text):
156 if type == 'n':
156 if type == 'n':
157 total += 1
157 total += 1
158
158
159 if mergeable_file:
159 if mergeable_file:
160 linesperrev = 2
160 linesperrev = 2
161 # make a file with k lines per rev
161 # make a file with k lines per rev
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
163 initialmergedlines.append("")
163 initialmergedlines.append("")
164
164
165 tags = []
165 tags = []
166
166
167 wlock = lock = tr = None
167 wlock = lock = tr = None
168 try:
168 try:
169 wlock = repo.wlock()
169 wlock = repo.wlock()
170 lock = repo.lock()
170 lock = repo.lock()
171 tr = repo.transaction("builddag")
171 tr = repo.transaction("builddag")
172
172
173 at = -1
173 at = -1
174 atbranch = 'default'
174 atbranch = 'default'
175 nodeids = []
175 nodeids = []
176 id = 0
176 id = 0
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
178 for type, data in dagparser.parsedag(text):
178 for type, data in dagparser.parsedag(text):
179 if type == 'n':
179 if type == 'n':
180 ui.note(('node %s\n' % str(data)))
180 ui.note(('node %s\n' % str(data)))
181 id, ps = data
181 id, ps = data
182
182
183 files = []
183 files = []
184 fctxs = {}
184 fctxs = {}
185
185
186 p2 = None
186 p2 = None
187 if mergeable_file:
187 if mergeable_file:
188 fn = "mf"
188 fn = "mf"
189 p1 = repo[ps[0]]
189 p1 = repo[ps[0]]
190 if len(ps) > 1:
190 if len(ps) > 1:
191 p2 = repo[ps[1]]
191 p2 = repo[ps[1]]
192 pa = p1.ancestor(p2)
192 pa = p1.ancestor(p2)
193 base, local, other = [x[fn].data() for x in (pa, p1,
193 base, local, other = [x[fn].data() for x in (pa, p1,
194 p2)]
194 p2)]
195 m3 = simplemerge.Merge3Text(base, local, other)
195 m3 = simplemerge.Merge3Text(base, local, other)
196 ml = [l.strip() for l in m3.merge_lines()]
196 ml = [l.strip() for l in m3.merge_lines()]
197 ml.append("")
197 ml.append("")
198 elif at > 0:
198 elif at > 0:
199 ml = p1[fn].data().split("\n")
199 ml = p1[fn].data().split("\n")
200 else:
200 else:
201 ml = initialmergedlines
201 ml = initialmergedlines
202 ml[id * linesperrev] += " r%i" % id
202 ml[id * linesperrev] += " r%i" % id
203 mergedtext = "\n".join(ml)
203 mergedtext = "\n".join(ml)
204 files.append(fn)
204 files.append(fn)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
206
206
207 if overwritten_file:
207 if overwritten_file:
208 fn = "of"
208 fn = "of"
209 files.append(fn)
209 files.append(fn)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211
211
212 if new_file:
212 if new_file:
213 fn = "nf%i" % id
213 fn = "nf%i" % id
214 files.append(fn)
214 files.append(fn)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
216 if len(ps) > 1:
216 if len(ps) > 1:
217 if not p2:
217 if not p2:
218 p2 = repo[ps[1]]
218 p2 = repo[ps[1]]
219 for fn in p2:
219 for fn in p2:
220 if fn.startswith("nf"):
220 if fn.startswith("nf"):
221 files.append(fn)
221 files.append(fn)
222 fctxs[fn] = p2[fn]
222 fctxs[fn] = p2[fn]
223
223
224 def fctxfn(repo, cx, path):
224 def fctxfn(repo, cx, path):
225 return fctxs.get(path)
225 return fctxs.get(path)
226
226
227 if len(ps) == 0 or ps[0] < 0:
227 if len(ps) == 0 or ps[0] < 0:
228 pars = [None, None]
228 pars = [None, None]
229 elif len(ps) == 1:
229 elif len(ps) == 1:
230 pars = [nodeids[ps[0]], None]
230 pars = [nodeids[ps[0]], None]
231 else:
231 else:
232 pars = [nodeids[p] for p in ps]
232 pars = [nodeids[p] for p in ps]
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
234 date=(id, 0),
234 date=(id, 0),
235 user="debugbuilddag",
235 user="debugbuilddag",
236 extra={'branch': atbranch})
236 extra={'branch': atbranch})
237 nodeid = repo.commitctx(cx)
237 nodeid = repo.commitctx(cx)
238 nodeids.append(nodeid)
238 nodeids.append(nodeid)
239 at = id
239 at = id
240 elif type == 'l':
240 elif type == 'l':
241 id, name = data
241 id, name = data
242 ui.note(('tag %s\n' % name))
242 ui.note(('tag %s\n' % name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
244 elif type == 'a':
244 elif type == 'a':
245 ui.note(('branch %s\n' % data))
245 ui.note(('branch %s\n' % data))
246 atbranch = data
246 atbranch = data
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
248 tr.close()
248 tr.close()
249
249
250 if tags:
250 if tags:
251 repo.vfs.write("localtags", "".join(tags))
251 repo.vfs.write("localtags", "".join(tags))
252 finally:
252 finally:
253 ui.progress(_('building'), None)
253 ui.progress(_('building'), None)
254 release(tr, lock, wlock)
254 release(tr, lock, wlock)
255
255
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
257 indent_string = ' ' * indent
257 indent_string = ' ' * indent
258 if all:
258 if all:
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
260 % indent_string)
260 % indent_string)
261
261
262 def showchunks(named):
262 def showchunks(named):
263 ui.write("\n%s%s\n" % (indent_string, named))
263 ui.write("\n%s%s\n" % (indent_string, named))
264 chain = None
264 chain = None
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
266 node = chunkdata['node']
266 node = chunkdata['node']
267 p1 = chunkdata['p1']
267 p1 = chunkdata['p1']
268 p2 = chunkdata['p2']
268 p2 = chunkdata['p2']
269 cs = chunkdata['cs']
269 cs = chunkdata['cs']
270 deltabase = chunkdata['deltabase']
270 deltabase = chunkdata['deltabase']
271 delta = chunkdata['delta']
271 delta = chunkdata['delta']
272 ui.write("%s%s %s %s %s %s %s\n" %
272 ui.write("%s%s %s %s %s %s %s\n" %
273 (indent_string, hex(node), hex(p1), hex(p2),
273 (indent_string, hex(node), hex(p1), hex(p2),
274 hex(cs), hex(deltabase), len(delta)))
274 hex(cs), hex(deltabase), len(delta)))
275 chain = node
275 chain = node
276
276
277 chunkdata = gen.changelogheader()
277 chunkdata = gen.changelogheader()
278 showchunks("changelog")
278 showchunks("changelog")
279 chunkdata = gen.manifestheader()
279 chunkdata = gen.manifestheader()
280 showchunks("manifest")
280 showchunks("manifest")
281 for chunkdata in iter(gen.filelogheader, {}):
281 for chunkdata in iter(gen.filelogheader, {}):
282 fname = chunkdata['filename']
282 fname = chunkdata['filename']
283 showchunks(fname)
283 showchunks(fname)
284 else:
284 else:
285 if isinstance(gen, bundle2.unbundle20):
285 if isinstance(gen, bundle2.unbundle20):
286 raise error.Abort(_('use debugbundle2 for this file'))
286 raise error.Abort(_('use debugbundle2 for this file'))
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 chain = None
288 chain = None
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
290 node = chunkdata['node']
290 node = chunkdata['node']
291 ui.write("%s%s\n" % (indent_string, hex(node)))
291 ui.write("%s%s\n" % (indent_string, hex(node)))
292 chain = node
292 chain = node
293
293
294 def _debugbundle2(ui, gen, all=None, **opts):
294 def _debugbundle2(ui, gen, all=None, **opts):
295 """lists the contents of a bundle2"""
295 """lists the contents of a bundle2"""
296 if not isinstance(gen, bundle2.unbundle20):
296 if not isinstance(gen, bundle2.unbundle20):
297 raise error.Abort(_('not a bundle2 file'))
297 raise error.Abort(_('not a bundle2 file'))
298 ui.write(('Stream params: %s\n' % repr(gen.params)))
298 ui.write(('Stream params: %s\n' % repr(gen.params)))
299 for part in gen.iterparts():
299 for part in gen.iterparts():
300 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
300 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
301 if part.type == 'changegroup':
301 if part.type == 'changegroup':
302 version = part.params.get('version', '01')
302 version = part.params.get('version', '01')
303 cg = changegroup.getunbundler(version, part, 'UN')
303 cg = changegroup.getunbundler(version, part, 'UN')
304 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
304 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
305
305
306 @command('debugbundle',
306 @command('debugbundle',
307 [('a', 'all', None, _('show all details')),
307 [('a', 'all', None, _('show all details')),
308 ('', 'spec', None, _('print the bundlespec of the bundle'))],
308 ('', 'spec', None, _('print the bundlespec of the bundle'))],
309 _('FILE'),
309 _('FILE'),
310 norepo=True)
310 norepo=True)
311 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
311 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
312 """lists the contents of a bundle"""
312 """lists the contents of a bundle"""
313 with hg.openpath(ui, bundlepath) as f:
313 with hg.openpath(ui, bundlepath) as f:
314 if spec:
314 if spec:
315 spec = exchange.getbundlespec(ui, f)
315 spec = exchange.getbundlespec(ui, f)
316 ui.write('%s\n' % spec)
316 ui.write('%s\n' % spec)
317 return
317 return
318
318
319 gen = exchange.readbundle(ui, f, bundlepath)
319 gen = exchange.readbundle(ui, f, bundlepath)
320 if isinstance(gen, bundle2.unbundle20):
320 if isinstance(gen, bundle2.unbundle20):
321 return _debugbundle2(ui, gen, all=all, **opts)
321 return _debugbundle2(ui, gen, all=all, **opts)
322 _debugchangegroup(ui, gen, all=all, **opts)
322 _debugchangegroup(ui, gen, all=all, **opts)
323
323
324 @command('debugcheckstate', [], '')
324 @command('debugcheckstate', [], '')
325 def debugcheckstate(ui, repo):
325 def debugcheckstate(ui, repo):
326 """validate the correctness of the current dirstate"""
326 """validate the correctness of the current dirstate"""
327 parent1, parent2 = repo.dirstate.parents()
327 parent1, parent2 = repo.dirstate.parents()
328 m1 = repo[parent1].manifest()
328 m1 = repo[parent1].manifest()
329 m2 = repo[parent2].manifest()
329 m2 = repo[parent2].manifest()
330 errors = 0
330 errors = 0
331 for f in repo.dirstate:
331 for f in repo.dirstate:
332 state = repo.dirstate[f]
332 state = repo.dirstate[f]
333 if state in "nr" and f not in m1:
333 if state in "nr" and f not in m1:
334 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
335 errors += 1
335 errors += 1
336 if state in "a" and f in m1:
336 if state in "a" and f in m1:
337 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
337 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
338 errors += 1
338 errors += 1
339 if state in "m" and f not in m1 and f not in m2:
339 if state in "m" and f not in m1 and f not in m2:
340 ui.warn(_("%s in state %s, but not in either manifest\n") %
340 ui.warn(_("%s in state %s, but not in either manifest\n") %
341 (f, state))
341 (f, state))
342 errors += 1
342 errors += 1
343 for f in m1:
343 for f in m1:
344 state = repo.dirstate[f]
344 state = repo.dirstate[f]
345 if state not in "nrm":
345 if state not in "nrm":
346 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
346 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
347 errors += 1
347 errors += 1
348 if errors:
348 if errors:
349 error = _(".hg/dirstate inconsistent with current parent's manifest")
349 error = _(".hg/dirstate inconsistent with current parent's manifest")
350 raise error.Abort(error)
350 raise error.Abort(error)
351
351
352 @command('debugcolor',
352 @command('debugcolor',
353 [('', 'style', None, _('show all configured styles'))],
353 [('', 'style', None, _('show all configured styles'))],
354 'hg debugcolor')
354 'hg debugcolor')
355 def debugcolor(ui, repo, **opts):
355 def debugcolor(ui, repo, **opts):
356 """show available color, effects or style"""
356 """show available color, effects or style"""
357 ui.write(('color mode: %s\n') % ui._colormode)
357 ui.write(('color mode: %s\n') % ui._colormode)
358 if opts.get('style'):
358 if opts.get('style'):
359 return _debugdisplaystyle(ui)
359 return _debugdisplaystyle(ui)
360 else:
360 else:
361 return _debugdisplaycolor(ui)
361 return _debugdisplaycolor(ui)
362
362
363 def _debugdisplaycolor(ui):
363 def _debugdisplaycolor(ui):
364 ui = ui.copy()
364 ui = ui.copy()
365 ui._styles.clear()
365 ui._styles.clear()
366 for effect in color._activeeffects(ui).keys():
366 for effect in color._activeeffects(ui).keys():
367 ui._styles[effect] = effect
367 ui._styles[effect] = effect
368 if ui._terminfoparams:
368 if ui._terminfoparams:
369 for k, v in ui.configitems('color'):
369 for k, v in ui.configitems('color'):
370 if k.startswith('color.'):
370 if k.startswith('color.'):
371 ui._styles[k] = k[6:]
371 ui._styles[k] = k[6:]
372 elif k.startswith('terminfo.'):
372 elif k.startswith('terminfo.'):
373 ui._styles[k] = k[9:]
373 ui._styles[k] = k[9:]
374 ui.write(_('available colors:\n'))
374 ui.write(_('available colors:\n'))
375 # sort label with a '_' after the other to group '_background' entry.
375 # sort label with a '_' after the other to group '_background' entry.
376 items = sorted(ui._styles.items(),
376 items = sorted(ui._styles.items(),
377 key=lambda i: ('_' in i[0], i[0], i[1]))
377 key=lambda i: ('_' in i[0], i[0], i[1]))
378 for colorname, label in items:
378 for colorname, label in items:
379 ui.write(('%s\n') % colorname, label=label)
379 ui.write(('%s\n') % colorname, label=label)
380
380
381 def _debugdisplaystyle(ui):
381 def _debugdisplaystyle(ui):
382 ui.write(_('available style:\n'))
382 ui.write(_('available style:\n'))
383 width = max(len(s) for s in ui._styles)
383 width = max(len(s) for s in ui._styles)
384 for label, effects in sorted(ui._styles.items()):
384 for label, effects in sorted(ui._styles.items()):
385 ui.write('%s' % label, label=label)
385 ui.write('%s' % label, label=label)
386 if effects:
386 if effects:
387 # 50
387 # 50
388 ui.write(': ')
388 ui.write(': ')
389 ui.write(' ' * (max(0, width - len(label))))
389 ui.write(' ' * (max(0, width - len(label))))
390 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
390 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
391 ui.write('\n')
391 ui.write('\n')
392
392
393 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
393 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
394 def debugcommands(ui, cmd='', *args):
394 def debugcommands(ui, cmd='', *args):
395 """list all available commands and options"""
395 """list all available commands and options"""
396 for cmd, vals in sorted(commands.table.iteritems()):
396 for cmd, vals in sorted(commands.table.iteritems()):
397 cmd = cmd.split('|')[0].strip('^')
397 cmd = cmd.split('|')[0].strip('^')
398 opts = ', '.join([i[1] for i in vals[1]])
398 opts = ', '.join([i[1] for i in vals[1]])
399 ui.write('%s: %s\n' % (cmd, opts))
399 ui.write('%s: %s\n' % (cmd, opts))
400
400
401 @command('debugcomplete',
401 @command('debugcomplete',
402 [('o', 'options', None, _('show the command options'))],
402 [('o', 'options', None, _('show the command options'))],
403 _('[-o] CMD'),
403 _('[-o] CMD'),
404 norepo=True)
404 norepo=True)
405 def debugcomplete(ui, cmd='', **opts):
405 def debugcomplete(ui, cmd='', **opts):
406 """returns the completion list associated with the given command"""
406 """returns the completion list associated with the given command"""
407
407
408 if opts.get('options'):
408 if opts.get('options'):
409 options = []
409 options = []
410 otables = [commands.globalopts]
410 otables = [commands.globalopts]
411 if cmd:
411 if cmd:
412 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
412 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
413 otables.append(entry[1])
413 otables.append(entry[1])
414 for t in otables:
414 for t in otables:
415 for o in t:
415 for o in t:
416 if "(DEPRECATED)" in o[3]:
416 if "(DEPRECATED)" in o[3]:
417 continue
417 continue
418 if o[0]:
418 if o[0]:
419 options.append('-%s' % o[0])
419 options.append('-%s' % o[0])
420 options.append('--%s' % o[1])
420 options.append('--%s' % o[1])
421 ui.write("%s\n" % "\n".join(options))
421 ui.write("%s\n" % "\n".join(options))
422 return
422 return
423
423
424 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
424 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
425 if ui.verbose:
425 if ui.verbose:
426 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
426 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
427 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
427 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
428
428
429 @command('debugcreatestreamclonebundle', [], 'FILE')
429 @command('debugcreatestreamclonebundle', [], 'FILE')
430 def debugcreatestreamclonebundle(ui, repo, fname):
430 def debugcreatestreamclonebundle(ui, repo, fname):
431 """create a stream clone bundle file
431 """create a stream clone bundle file
432
432
433 Stream bundles are special bundles that are essentially archives of
433 Stream bundles are special bundles that are essentially archives of
434 revlog files. They are commonly used for cloning very quickly.
434 revlog files. They are commonly used for cloning very quickly.
435 """
435 """
436 requirements, gen = streamclone.generatebundlev1(repo)
436 requirements, gen = streamclone.generatebundlev1(repo)
437 changegroup.writechunks(ui, gen, fname)
437 changegroup.writechunks(ui, gen, fname)
438
438
439 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
439 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
440
440
441 @command('debugdag',
441 @command('debugdag',
442 [('t', 'tags', None, _('use tags as labels')),
442 [('t', 'tags', None, _('use tags as labels')),
443 ('b', 'branches', None, _('annotate with branch names')),
443 ('b', 'branches', None, _('annotate with branch names')),
444 ('', 'dots', None, _('use dots for runs')),
444 ('', 'dots', None, _('use dots for runs')),
445 ('s', 'spaces', None, _('separate elements by spaces'))],
445 ('s', 'spaces', None, _('separate elements by spaces'))],
446 _('[OPTION]... [FILE [REV]...]'),
446 _('[OPTION]... [FILE [REV]...]'),
447 optionalrepo=True)
447 optionalrepo=True)
448 def debugdag(ui, repo, file_=None, *revs, **opts):
448 def debugdag(ui, repo, file_=None, *revs, **opts):
449 """format the changelog or an index DAG as a concise textual description
449 """format the changelog or an index DAG as a concise textual description
450
450
451 If you pass a revlog index, the revlog's DAG is emitted. If you list
451 If you pass a revlog index, the revlog's DAG is emitted. If you list
452 revision numbers, they get labeled in the output as rN.
452 revision numbers, they get labeled in the output as rN.
453
453
454 Otherwise, the changelog DAG of the current repo is emitted.
454 Otherwise, the changelog DAG of the current repo is emitted.
455 """
455 """
456 spaces = opts.get('spaces')
456 spaces = opts.get('spaces')
457 dots = opts.get('dots')
457 dots = opts.get('dots')
458 if file_:
458 if file_:
459 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
459 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
460 file_)
460 file_)
461 revs = set((int(r) for r in revs))
461 revs = set((int(r) for r in revs))
462 def events():
462 def events():
463 for r in rlog:
463 for r in rlog:
464 yield 'n', (r, list(p for p in rlog.parentrevs(r)
464 yield 'n', (r, list(p for p in rlog.parentrevs(r)
465 if p != -1))
465 if p != -1))
466 if r in revs:
466 if r in revs:
467 yield 'l', (r, "r%i" % r)
467 yield 'l', (r, "r%i" % r)
468 elif repo:
468 elif repo:
469 cl = repo.changelog
469 cl = repo.changelog
470 tags = opts.get('tags')
470 tags = opts.get('tags')
471 branches = opts.get('branches')
471 branches = opts.get('branches')
472 if tags:
472 if tags:
473 labels = {}
473 labels = {}
474 for l, n in repo.tags().items():
474 for l, n in repo.tags().items():
475 labels.setdefault(cl.rev(n), []).append(l)
475 labels.setdefault(cl.rev(n), []).append(l)
476 def events():
476 def events():
477 b = "default"
477 b = "default"
478 for r in cl:
478 for r in cl:
479 if branches:
479 if branches:
480 newb = cl.read(cl.node(r))[5]['branch']
480 newb = cl.read(cl.node(r))[5]['branch']
481 if newb != b:
481 if newb != b:
482 yield 'a', newb
482 yield 'a', newb
483 b = newb
483 b = newb
484 yield 'n', (r, list(p for p in cl.parentrevs(r)
484 yield 'n', (r, list(p for p in cl.parentrevs(r)
485 if p != -1))
485 if p != -1))
486 if tags:
486 if tags:
487 ls = labels.get(r)
487 ls = labels.get(r)
488 if ls:
488 if ls:
489 for l in ls:
489 for l in ls:
490 yield 'l', (r, l)
490 yield 'l', (r, l)
491 else:
491 else:
492 raise error.Abort(_('need repo for changelog dag'))
492 raise error.Abort(_('need repo for changelog dag'))
493
493
494 for line in dagparser.dagtextlines(events(),
494 for line in dagparser.dagtextlines(events(),
495 addspaces=spaces,
495 addspaces=spaces,
496 wraplabels=True,
496 wraplabels=True,
497 wrapannotations=True,
497 wrapannotations=True,
498 wrapnonlinear=dots,
498 wrapnonlinear=dots,
499 usedots=dots,
499 usedots=dots,
500 maxlinewidth=70):
500 maxlinewidth=70):
501 ui.write(line)
501 ui.write(line)
502 ui.write("\n")
502 ui.write("\n")
503
503
504 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
504 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
505 def debugdata(ui, repo, file_, rev=None, **opts):
505 def debugdata(ui, repo, file_, rev=None, **opts):
506 """dump the contents of a data file revision"""
506 """dump the contents of a data file revision"""
507 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
507 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
508 if rev is not None:
508 if rev is not None:
509 raise error.CommandError('debugdata', _('invalid arguments'))
509 raise error.CommandError('debugdata', _('invalid arguments'))
510 file_, rev = None, file_
510 file_, rev = None, file_
511 elif rev is None:
511 elif rev is None:
512 raise error.CommandError('debugdata', _('invalid arguments'))
512 raise error.CommandError('debugdata', _('invalid arguments'))
513 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
513 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
514 try:
514 try:
515 ui.write(r.revision(r.lookup(rev), raw=True))
515 ui.write(r.revision(r.lookup(rev), raw=True))
516 except KeyError:
516 except KeyError:
517 raise error.Abort(_('invalid revision identifier %s') % rev)
517 raise error.Abort(_('invalid revision identifier %s') % rev)
518
518
519 @command('debugdate',
519 @command('debugdate',
520 [('e', 'extended', None, _('try extended date formats'))],
520 [('e', 'extended', None, _('try extended date formats'))],
521 _('[-e] DATE [RANGE]'),
521 _('[-e] DATE [RANGE]'),
522 norepo=True, optionalrepo=True)
522 norepo=True, optionalrepo=True)
523 def debugdate(ui, date, range=None, **opts):
523 def debugdate(ui, date, range=None, **opts):
524 """parse and display a date"""
524 """parse and display a date"""
525 if opts["extended"]:
525 if opts["extended"]:
526 d = util.parsedate(date, util.extendeddateformats)
526 d = util.parsedate(date, util.extendeddateformats)
527 else:
527 else:
528 d = util.parsedate(date)
528 d = util.parsedate(date)
529 ui.write(("internal: %s %s\n") % d)
529 ui.write(("internal: %s %s\n") % d)
530 ui.write(("standard: %s\n") % util.datestr(d))
530 ui.write(("standard: %s\n") % util.datestr(d))
531 if range:
531 if range:
532 m = util.matchdate(range)
532 m = util.matchdate(range)
533 ui.write(("match: %s\n") % m(d[0]))
533 ui.write(("match: %s\n") % m(d[0]))
534
534
535 @command('debugdeltachain',
535 @command('debugdeltachain',
536 commands.debugrevlogopts + commands.formatteropts,
536 cmdutil.debugrevlogopts + cmdutil.formatteropts,
537 _('-c|-m|FILE'),
537 _('-c|-m|FILE'),
538 optionalrepo=True)
538 optionalrepo=True)
539 def debugdeltachain(ui, repo, file_=None, **opts):
539 def debugdeltachain(ui, repo, file_=None, **opts):
540 """dump information about delta chains in a revlog
540 """dump information about delta chains in a revlog
541
541
542 Output can be templatized. Available template keywords are:
542 Output can be templatized. Available template keywords are:
543
543
544 :``rev``: revision number
544 :``rev``: revision number
545 :``chainid``: delta chain identifier (numbered by unique base)
545 :``chainid``: delta chain identifier (numbered by unique base)
546 :``chainlen``: delta chain length to this revision
546 :``chainlen``: delta chain length to this revision
547 :``prevrev``: previous revision in delta chain
547 :``prevrev``: previous revision in delta chain
548 :``deltatype``: role of delta / how it was computed
548 :``deltatype``: role of delta / how it was computed
549 :``compsize``: compressed size of revision
549 :``compsize``: compressed size of revision
550 :``uncompsize``: uncompressed size of revision
550 :``uncompsize``: uncompressed size of revision
551 :``chainsize``: total size of compressed revisions in chain
551 :``chainsize``: total size of compressed revisions in chain
552 :``chainratio``: total chain size divided by uncompressed revision size
552 :``chainratio``: total chain size divided by uncompressed revision size
553 (new delta chains typically start at ratio 2.00)
553 (new delta chains typically start at ratio 2.00)
554 :``lindist``: linear distance from base revision in delta chain to end
554 :``lindist``: linear distance from base revision in delta chain to end
555 of this revision
555 of this revision
556 :``extradist``: total size of revisions not part of this delta chain from
556 :``extradist``: total size of revisions not part of this delta chain from
557 base of delta chain to end of this revision; a measurement
557 base of delta chain to end of this revision; a measurement
558 of how much extra data we need to read/seek across to read
558 of how much extra data we need to read/seek across to read
559 the delta chain for this revision
559 the delta chain for this revision
560 :``extraratio``: extradist divided by chainsize; another representation of
560 :``extraratio``: extradist divided by chainsize; another representation of
561 how much unrelated data is needed to load this delta chain
561 how much unrelated data is needed to load this delta chain
562 """
562 """
563 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
563 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
564 index = r.index
564 index = r.index
565 generaldelta = r.version & revlog.FLAG_GENERALDELTA
565 generaldelta = r.version & revlog.FLAG_GENERALDELTA
566
566
567 def revinfo(rev):
567 def revinfo(rev):
568 e = index[rev]
568 e = index[rev]
569 compsize = e[1]
569 compsize = e[1]
570 uncompsize = e[2]
570 uncompsize = e[2]
571 chainsize = 0
571 chainsize = 0
572
572
573 if generaldelta:
573 if generaldelta:
574 if e[3] == e[5]:
574 if e[3] == e[5]:
575 deltatype = 'p1'
575 deltatype = 'p1'
576 elif e[3] == e[6]:
576 elif e[3] == e[6]:
577 deltatype = 'p2'
577 deltatype = 'p2'
578 elif e[3] == rev - 1:
578 elif e[3] == rev - 1:
579 deltatype = 'prev'
579 deltatype = 'prev'
580 elif e[3] == rev:
580 elif e[3] == rev:
581 deltatype = 'base'
581 deltatype = 'base'
582 else:
582 else:
583 deltatype = 'other'
583 deltatype = 'other'
584 else:
584 else:
585 if e[3] == rev:
585 if e[3] == rev:
586 deltatype = 'base'
586 deltatype = 'base'
587 else:
587 else:
588 deltatype = 'prev'
588 deltatype = 'prev'
589
589
590 chain = r._deltachain(rev)[0]
590 chain = r._deltachain(rev)[0]
591 for iterrev in chain:
591 for iterrev in chain:
592 e = index[iterrev]
592 e = index[iterrev]
593 chainsize += e[1]
593 chainsize += e[1]
594
594
595 return compsize, uncompsize, deltatype, chain, chainsize
595 return compsize, uncompsize, deltatype, chain, chainsize
596
596
597 fm = ui.formatter('debugdeltachain', opts)
597 fm = ui.formatter('debugdeltachain', opts)
598
598
599 fm.plain(' rev chain# chainlen prev delta '
599 fm.plain(' rev chain# chainlen prev delta '
600 'size rawsize chainsize ratio lindist extradist '
600 'size rawsize chainsize ratio lindist extradist '
601 'extraratio\n')
601 'extraratio\n')
602
602
603 chainbases = {}
603 chainbases = {}
604 for rev in r:
604 for rev in r:
605 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
605 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
606 chainbase = chain[0]
606 chainbase = chain[0]
607 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
607 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
608 basestart = r.start(chainbase)
608 basestart = r.start(chainbase)
609 revstart = r.start(rev)
609 revstart = r.start(rev)
610 lineardist = revstart + comp - basestart
610 lineardist = revstart + comp - basestart
611 extradist = lineardist - chainsize
611 extradist = lineardist - chainsize
612 try:
612 try:
613 prevrev = chain[-2]
613 prevrev = chain[-2]
614 except IndexError:
614 except IndexError:
615 prevrev = -1
615 prevrev = -1
616
616
617 chainratio = float(chainsize) / float(uncomp)
617 chainratio = float(chainsize) / float(uncomp)
618 extraratio = float(extradist) / float(chainsize)
618 extraratio = float(extradist) / float(chainsize)
619
619
620 fm.startitem()
620 fm.startitem()
621 fm.write('rev chainid chainlen prevrev deltatype compsize '
621 fm.write('rev chainid chainlen prevrev deltatype compsize '
622 'uncompsize chainsize chainratio lindist extradist '
622 'uncompsize chainsize chainratio lindist extradist '
623 'extraratio',
623 'extraratio',
624 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
624 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
625 rev, chainid, len(chain), prevrev, deltatype, comp,
625 rev, chainid, len(chain), prevrev, deltatype, comp,
626 uncomp, chainsize, chainratio, lineardist, extradist,
626 uncomp, chainsize, chainratio, lineardist, extradist,
627 extraratio,
627 extraratio,
628 rev=rev, chainid=chainid, chainlen=len(chain),
628 rev=rev, chainid=chainid, chainlen=len(chain),
629 prevrev=prevrev, deltatype=deltatype, compsize=comp,
629 prevrev=prevrev, deltatype=deltatype, compsize=comp,
630 uncompsize=uncomp, chainsize=chainsize,
630 uncompsize=uncomp, chainsize=chainsize,
631 chainratio=chainratio, lindist=lineardist,
631 chainratio=chainratio, lindist=lineardist,
632 extradist=extradist, extraratio=extraratio)
632 extradist=extradist, extraratio=extraratio)
633
633
634 fm.end()
634 fm.end()
635
635
636 @command('debugdirstate|debugstate',
636 @command('debugdirstate|debugstate',
637 [('', 'nodates', None, _('do not display the saved mtime')),
637 [('', 'nodates', None, _('do not display the saved mtime')),
638 ('', 'datesort', None, _('sort by saved mtime'))],
638 ('', 'datesort', None, _('sort by saved mtime'))],
639 _('[OPTION]...'))
639 _('[OPTION]...'))
640 def debugstate(ui, repo, **opts):
640 def debugstate(ui, repo, **opts):
641 """show the contents of the current dirstate"""
641 """show the contents of the current dirstate"""
642
642
643 nodates = opts.get('nodates')
643 nodates = opts.get('nodates')
644 datesort = opts.get('datesort')
644 datesort = opts.get('datesort')
645
645
646 timestr = ""
646 timestr = ""
647 if datesort:
647 if datesort:
648 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
648 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
649 else:
649 else:
650 keyfunc = None # sort by filename
650 keyfunc = None # sort by filename
651 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
651 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
652 if ent[3] == -1:
652 if ent[3] == -1:
653 timestr = 'unset '
653 timestr = 'unset '
654 elif nodates:
654 elif nodates:
655 timestr = 'set '
655 timestr = 'set '
656 else:
656 else:
657 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
657 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
658 time.localtime(ent[3]))
658 time.localtime(ent[3]))
659 if ent[1] & 0o20000:
659 if ent[1] & 0o20000:
660 mode = 'lnk'
660 mode = 'lnk'
661 else:
661 else:
662 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
662 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
663 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
663 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
664 for f in repo.dirstate.copies():
664 for f in repo.dirstate.copies():
665 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
665 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
666
666
667 @command('debugdiscovery',
667 @command('debugdiscovery',
668 [('', 'old', None, _('use old-style discovery')),
668 [('', 'old', None, _('use old-style discovery')),
669 ('', 'nonheads', None,
669 ('', 'nonheads', None,
670 _('use old-style discovery with non-heads included')),
670 _('use old-style discovery with non-heads included')),
671 ] + commands.remoteopts,
671 ] + cmdutil.remoteopts,
672 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
672 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
673 def debugdiscovery(ui, repo, remoteurl="default", **opts):
673 def debugdiscovery(ui, repo, remoteurl="default", **opts):
674 """runs the changeset discovery protocol in isolation"""
674 """runs the changeset discovery protocol in isolation"""
675 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
675 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
676 opts.get('branch'))
676 opts.get('branch'))
677 remote = hg.peer(repo, opts, remoteurl)
677 remote = hg.peer(repo, opts, remoteurl)
678 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
678 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
679
679
680 # make sure tests are repeatable
680 # make sure tests are repeatable
681 random.seed(12323)
681 random.seed(12323)
682
682
683 def doit(localheads, remoteheads, remote=remote):
683 def doit(localheads, remoteheads, remote=remote):
684 if opts.get('old'):
684 if opts.get('old'):
685 if localheads:
685 if localheads:
686 raise error.Abort('cannot use localheads with old style '
686 raise error.Abort('cannot use localheads with old style '
687 'discovery')
687 'discovery')
688 if not util.safehasattr(remote, 'branches'):
688 if not util.safehasattr(remote, 'branches'):
689 # enable in-client legacy support
689 # enable in-client legacy support
690 remote = localrepo.locallegacypeer(remote.local())
690 remote = localrepo.locallegacypeer(remote.local())
691 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
691 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
692 force=True)
692 force=True)
693 common = set(common)
693 common = set(common)
694 if not opts.get('nonheads'):
694 if not opts.get('nonheads'):
695 ui.write(("unpruned common: %s\n") %
695 ui.write(("unpruned common: %s\n") %
696 " ".join(sorted(short(n) for n in common)))
696 " ".join(sorted(short(n) for n in common)))
697 dag = dagutil.revlogdag(repo.changelog)
697 dag = dagutil.revlogdag(repo.changelog)
698 all = dag.ancestorset(dag.internalizeall(common))
698 all = dag.ancestorset(dag.internalizeall(common))
699 common = dag.externalizeall(dag.headsetofconnecteds(all))
699 common = dag.externalizeall(dag.headsetofconnecteds(all))
700 else:
700 else:
701 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
701 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
702 common = set(common)
702 common = set(common)
703 rheads = set(hds)
703 rheads = set(hds)
704 lheads = set(repo.heads())
704 lheads = set(repo.heads())
705 ui.write(("common heads: %s\n") %
705 ui.write(("common heads: %s\n") %
706 " ".join(sorted(short(n) for n in common)))
706 " ".join(sorted(short(n) for n in common)))
707 if lheads <= common:
707 if lheads <= common:
708 ui.write(("local is subset\n"))
708 ui.write(("local is subset\n"))
709 elif rheads <= common:
709 elif rheads <= common:
710 ui.write(("remote is subset\n"))
710 ui.write(("remote is subset\n"))
711
711
712 serverlogs = opts.get('serverlog')
712 serverlogs = opts.get('serverlog')
713 if serverlogs:
713 if serverlogs:
714 for filename in serverlogs:
714 for filename in serverlogs:
715 with open(filename, 'r') as logfile:
715 with open(filename, 'r') as logfile:
716 line = logfile.readline()
716 line = logfile.readline()
717 while line:
717 while line:
718 parts = line.strip().split(';')
718 parts = line.strip().split(';')
719 op = parts[1]
719 op = parts[1]
720 if op == 'cg':
720 if op == 'cg':
721 pass
721 pass
722 elif op == 'cgss':
722 elif op == 'cgss':
723 doit(parts[2].split(' '), parts[3].split(' '))
723 doit(parts[2].split(' '), parts[3].split(' '))
724 elif op == 'unb':
724 elif op == 'unb':
725 doit(parts[3].split(' '), parts[2].split(' '))
725 doit(parts[3].split(' '), parts[2].split(' '))
726 line = logfile.readline()
726 line = logfile.readline()
727 else:
727 else:
728 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
728 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
729 opts.get('remote_head'))
729 opts.get('remote_head'))
730 localrevs = opts.get('local_head')
730 localrevs = opts.get('local_head')
731 doit(localrevs, remoterevs)
731 doit(localrevs, remoterevs)
732
732
733 @command('debugextensions', commands.formatteropts, [], norepo=True)
733 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
734 def debugextensions(ui, **opts):
734 def debugextensions(ui, **opts):
735 '''show information about active extensions'''
735 '''show information about active extensions'''
736 exts = extensions.extensions(ui)
736 exts = extensions.extensions(ui)
737 hgver = util.version()
737 hgver = util.version()
738 fm = ui.formatter('debugextensions', opts)
738 fm = ui.formatter('debugextensions', opts)
739 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
739 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
740 isinternal = extensions.ismoduleinternal(extmod)
740 isinternal = extensions.ismoduleinternal(extmod)
741 extsource = pycompat.fsencode(extmod.__file__)
741 extsource = pycompat.fsencode(extmod.__file__)
742 if isinternal:
742 if isinternal:
743 exttestedwith = [] # never expose magic string to users
743 exttestedwith = [] # never expose magic string to users
744 else:
744 else:
745 exttestedwith = getattr(extmod, 'testedwith', '').split()
745 exttestedwith = getattr(extmod, 'testedwith', '').split()
746 extbuglink = getattr(extmod, 'buglink', None)
746 extbuglink = getattr(extmod, 'buglink', None)
747
747
748 fm.startitem()
748 fm.startitem()
749
749
750 if ui.quiet or ui.verbose:
750 if ui.quiet or ui.verbose:
751 fm.write('name', '%s\n', extname)
751 fm.write('name', '%s\n', extname)
752 else:
752 else:
753 fm.write('name', '%s', extname)
753 fm.write('name', '%s', extname)
754 if isinternal or hgver in exttestedwith:
754 if isinternal or hgver in exttestedwith:
755 fm.plain('\n')
755 fm.plain('\n')
756 elif not exttestedwith:
756 elif not exttestedwith:
757 fm.plain(_(' (untested!)\n'))
757 fm.plain(_(' (untested!)\n'))
758 else:
758 else:
759 lasttestedversion = exttestedwith[-1]
759 lasttestedversion = exttestedwith[-1]
760 fm.plain(' (%s!)\n' % lasttestedversion)
760 fm.plain(' (%s!)\n' % lasttestedversion)
761
761
762 fm.condwrite(ui.verbose and extsource, 'source',
762 fm.condwrite(ui.verbose and extsource, 'source',
763 _(' location: %s\n'), extsource or "")
763 _(' location: %s\n'), extsource or "")
764
764
765 if ui.verbose:
765 if ui.verbose:
766 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
766 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
767 fm.data(bundled=isinternal)
767 fm.data(bundled=isinternal)
768
768
769 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
769 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
770 _(' tested with: %s\n'),
770 _(' tested with: %s\n'),
771 fm.formatlist(exttestedwith, name='ver'))
771 fm.formatlist(exttestedwith, name='ver'))
772
772
773 fm.condwrite(ui.verbose and extbuglink, 'buglink',
773 fm.condwrite(ui.verbose and extbuglink, 'buglink',
774 _(' bug reporting: %s\n'), extbuglink or "")
774 _(' bug reporting: %s\n'), extbuglink or "")
775
775
776 fm.end()
776 fm.end()
777
777
778 @command('debugfileset',
778 @command('debugfileset',
779 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
779 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
780 _('[-r REV] FILESPEC'))
780 _('[-r REV] FILESPEC'))
781 def debugfileset(ui, repo, expr, **opts):
781 def debugfileset(ui, repo, expr, **opts):
782 '''parse and apply a fileset specification'''
782 '''parse and apply a fileset specification'''
783 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
783 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
784 if ui.verbose:
784 if ui.verbose:
785 tree = fileset.parse(expr)
785 tree = fileset.parse(expr)
786 ui.note(fileset.prettyformat(tree), "\n")
786 ui.note(fileset.prettyformat(tree), "\n")
787
787
788 for f in ctx.getfileset(expr):
788 for f in ctx.getfileset(expr):
789 ui.write("%s\n" % f)
789 ui.write("%s\n" % f)
790
790
791 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
791 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
792 def debugfsinfo(ui, path="."):
792 def debugfsinfo(ui, path="."):
793 """show information detected about current filesystem"""
793 """show information detected about current filesystem"""
794 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
794 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
795 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
795 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
796 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
796 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
797 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
797 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
798 casesensitive = '(unknown)'
798 casesensitive = '(unknown)'
799 try:
799 try:
800 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
800 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
801 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
801 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
802 except OSError:
802 except OSError:
803 pass
803 pass
804 ui.write(('case-sensitive: %s\n') % casesensitive)
804 ui.write(('case-sensitive: %s\n') % casesensitive)
805
805
806 @command('debuggetbundle',
806 @command('debuggetbundle',
807 [('H', 'head', [], _('id of head node'), _('ID')),
807 [('H', 'head', [], _('id of head node'), _('ID')),
808 ('C', 'common', [], _('id of common node'), _('ID')),
808 ('C', 'common', [], _('id of common node'), _('ID')),
809 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
809 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
810 _('REPO FILE [-H|-C ID]...'),
810 _('REPO FILE [-H|-C ID]...'),
811 norepo=True)
811 norepo=True)
812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
813 """retrieves a bundle from a repo
813 """retrieves a bundle from a repo
814
814
815 Every ID must be a full-length hex node id string. Saves the bundle to the
815 Every ID must be a full-length hex node id string. Saves the bundle to the
816 given file.
816 given file.
817 """
817 """
818 repo = hg.peer(ui, opts, repopath)
818 repo = hg.peer(ui, opts, repopath)
819 if not repo.capable('getbundle'):
819 if not repo.capable('getbundle'):
820 raise error.Abort("getbundle() not supported by target repository")
820 raise error.Abort("getbundle() not supported by target repository")
821 args = {}
821 args = {}
822 if common:
822 if common:
823 args['common'] = [bin(s) for s in common]
823 args['common'] = [bin(s) for s in common]
824 if head:
824 if head:
825 args['heads'] = [bin(s) for s in head]
825 args['heads'] = [bin(s) for s in head]
826 # TODO: get desired bundlecaps from command line.
826 # TODO: get desired bundlecaps from command line.
827 args['bundlecaps'] = None
827 args['bundlecaps'] = None
828 bundle = repo.getbundle('debug', **args)
828 bundle = repo.getbundle('debug', **args)
829
829
830 bundletype = opts.get('type', 'bzip2').lower()
830 bundletype = opts.get('type', 'bzip2').lower()
831 btypes = {'none': 'HG10UN',
831 btypes = {'none': 'HG10UN',
832 'bzip2': 'HG10BZ',
832 'bzip2': 'HG10BZ',
833 'gzip': 'HG10GZ',
833 'gzip': 'HG10GZ',
834 'bundle2': 'HG20'}
834 'bundle2': 'HG20'}
835 bundletype = btypes.get(bundletype)
835 bundletype = btypes.get(bundletype)
836 if bundletype not in bundle2.bundletypes:
836 if bundletype not in bundle2.bundletypes:
837 raise error.Abort(_('unknown bundle type specified with --type'))
837 raise error.Abort(_('unknown bundle type specified with --type'))
838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
839
839
840 @command('debugignore', [], '[FILE]')
840 @command('debugignore', [], '[FILE]')
841 def debugignore(ui, repo, *files, **opts):
841 def debugignore(ui, repo, *files, **opts):
842 """display the combined ignore pattern and information about ignored files
842 """display the combined ignore pattern and information about ignored files
843
843
844 With no argument display the combined ignore pattern.
844 With no argument display the combined ignore pattern.
845
845
846 Given space separated file names, shows if the given file is ignored and
846 Given space separated file names, shows if the given file is ignored and
847 if so, show the ignore rule (file and line number) that matched it.
847 if so, show the ignore rule (file and line number) that matched it.
848 """
848 """
849 ignore = repo.dirstate._ignore
849 ignore = repo.dirstate._ignore
850 if not files:
850 if not files:
851 # Show all the patterns
851 # Show all the patterns
852 includepat = getattr(ignore, 'includepat', None)
852 includepat = getattr(ignore, 'includepat', None)
853 if includepat is not None:
853 if includepat is not None:
854 ui.write("%s\n" % includepat)
854 ui.write("%s\n" % includepat)
855 else:
855 else:
856 raise error.Abort(_("no ignore patterns found"))
856 raise error.Abort(_("no ignore patterns found"))
857 else:
857 else:
858 for f in files:
858 for f in files:
859 nf = util.normpath(f)
859 nf = util.normpath(f)
860 ignored = None
860 ignored = None
861 ignoredata = None
861 ignoredata = None
862 if nf != '.':
862 if nf != '.':
863 if ignore(nf):
863 if ignore(nf):
864 ignored = nf
864 ignored = nf
865 ignoredata = repo.dirstate._ignorefileandline(nf)
865 ignoredata = repo.dirstate._ignorefileandline(nf)
866 else:
866 else:
867 for p in util.finddirs(nf):
867 for p in util.finddirs(nf):
868 if ignore(p):
868 if ignore(p):
869 ignored = p
869 ignored = p
870 ignoredata = repo.dirstate._ignorefileandline(p)
870 ignoredata = repo.dirstate._ignorefileandline(p)
871 break
871 break
872 if ignored:
872 if ignored:
873 if ignored == nf:
873 if ignored == nf:
874 ui.write(_("%s is ignored\n") % f)
874 ui.write(_("%s is ignored\n") % f)
875 else:
875 else:
876 ui.write(_("%s is ignored because of "
876 ui.write(_("%s is ignored because of "
877 "containing folder %s\n")
877 "containing folder %s\n")
878 % (f, ignored))
878 % (f, ignored))
879 ignorefile, lineno, line = ignoredata
879 ignorefile, lineno, line = ignoredata
880 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
880 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
881 % (ignorefile, lineno, line))
881 % (ignorefile, lineno, line))
882 else:
882 else:
883 ui.write(_("%s is not ignored\n") % f)
883 ui.write(_("%s is not ignored\n") % f)
884
884
885 @command('debugindex', commands.debugrevlogopts +
885 @command('debugindex', cmdutil.debugrevlogopts +
886 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
886 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
887 _('[-f FORMAT] -c|-m|FILE'),
887 _('[-f FORMAT] -c|-m|FILE'),
888 optionalrepo=True)
888 optionalrepo=True)
889 def debugindex(ui, repo, file_=None, **opts):
889 def debugindex(ui, repo, file_=None, **opts):
890 """dump the contents of an index file"""
890 """dump the contents of an index file"""
891 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
891 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
892 format = opts.get('format', 0)
892 format = opts.get('format', 0)
893 if format not in (0, 1):
893 if format not in (0, 1):
894 raise error.Abort(_("unknown format %d") % format)
894 raise error.Abort(_("unknown format %d") % format)
895
895
896 generaldelta = r.version & revlog.FLAG_GENERALDELTA
896 generaldelta = r.version & revlog.FLAG_GENERALDELTA
897 if generaldelta:
897 if generaldelta:
898 basehdr = ' delta'
898 basehdr = ' delta'
899 else:
899 else:
900 basehdr = ' base'
900 basehdr = ' base'
901
901
902 if ui.debugflag:
902 if ui.debugflag:
903 shortfn = hex
903 shortfn = hex
904 else:
904 else:
905 shortfn = short
905 shortfn = short
906
906
907 # There might not be anything in r, so have a sane default
907 # There might not be anything in r, so have a sane default
908 idlen = 12
908 idlen = 12
909 for i in r:
909 for i in r:
910 idlen = len(shortfn(r.node(i)))
910 idlen = len(shortfn(r.node(i)))
911 break
911 break
912
912
913 if format == 0:
913 if format == 0:
914 ui.write((" rev offset length " + basehdr + " linkrev"
914 ui.write((" rev offset length " + basehdr + " linkrev"
915 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
915 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
916 elif format == 1:
916 elif format == 1:
917 ui.write((" rev flag offset length"
917 ui.write((" rev flag offset length"
918 " size " + basehdr + " link p1 p2"
918 " size " + basehdr + " link p1 p2"
919 " %s\n") % "nodeid".rjust(idlen))
919 " %s\n") % "nodeid".rjust(idlen))
920
920
921 for i in r:
921 for i in r:
922 node = r.node(i)
922 node = r.node(i)
923 if generaldelta:
923 if generaldelta:
924 base = r.deltaparent(i)
924 base = r.deltaparent(i)
925 else:
925 else:
926 base = r.chainbase(i)
926 base = r.chainbase(i)
927 if format == 0:
927 if format == 0:
928 try:
928 try:
929 pp = r.parents(node)
929 pp = r.parents(node)
930 except Exception:
930 except Exception:
931 pp = [nullid, nullid]
931 pp = [nullid, nullid]
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
933 i, r.start(i), r.length(i), base, r.linkrev(i),
933 i, r.start(i), r.length(i), base, r.linkrev(i),
934 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
934 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
935 elif format == 1:
935 elif format == 1:
936 pr = r.parentrevs(i)
936 pr = r.parentrevs(i)
937 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
937 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
938 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
938 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
939 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
939 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
940
940
941 @command('debugindexdot', commands.debugrevlogopts,
941 @command('debugindexdot', cmdutil.debugrevlogopts,
942 _('-c|-m|FILE'), optionalrepo=True)
942 _('-c|-m|FILE'), optionalrepo=True)
943 def debugindexdot(ui, repo, file_=None, **opts):
943 def debugindexdot(ui, repo, file_=None, **opts):
944 """dump an index DAG as a graphviz dot file"""
944 """dump an index DAG as a graphviz dot file"""
945 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
945 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
946 ui.write(("digraph G {\n"))
946 ui.write(("digraph G {\n"))
947 for i in r:
947 for i in r:
948 node = r.node(i)
948 node = r.node(i)
949 pp = r.parents(node)
949 pp = r.parents(node)
950 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
950 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
951 if pp[1] != nullid:
951 if pp[1] != nullid:
952 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
952 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
953 ui.write("}\n")
953 ui.write("}\n")
954
954
955 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
955 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
956 def debuginstall(ui, **opts):
956 def debuginstall(ui, **opts):
957 '''test Mercurial installation
957 '''test Mercurial installation
958
958
959 Returns 0 on success.
959 Returns 0 on success.
960 '''
960 '''
961
961
962 def writetemp(contents):
962 def writetemp(contents):
963 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
963 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
964 f = os.fdopen(fd, pycompat.sysstr("wb"))
964 f = os.fdopen(fd, pycompat.sysstr("wb"))
965 f.write(contents)
965 f.write(contents)
966 f.close()
966 f.close()
967 return name
967 return name
968
968
969 problems = 0
969 problems = 0
970
970
971 fm = ui.formatter('debuginstall', opts)
971 fm = ui.formatter('debuginstall', opts)
972 fm.startitem()
972 fm.startitem()
973
973
974 # encoding
974 # encoding
975 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
975 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
976 err = None
976 err = None
977 try:
977 try:
978 encoding.fromlocal("test")
978 encoding.fromlocal("test")
979 except error.Abort as inst:
979 except error.Abort as inst:
980 err = inst
980 err = inst
981 problems += 1
981 problems += 1
982 fm.condwrite(err, 'encodingerror', _(" %s\n"
982 fm.condwrite(err, 'encodingerror', _(" %s\n"
983 " (check that your locale is properly set)\n"), err)
983 " (check that your locale is properly set)\n"), err)
984
984
985 # Python
985 # Python
986 fm.write('pythonexe', _("checking Python executable (%s)\n"),
986 fm.write('pythonexe', _("checking Python executable (%s)\n"),
987 pycompat.sysexecutable)
987 pycompat.sysexecutable)
988 fm.write('pythonver', _("checking Python version (%s)\n"),
988 fm.write('pythonver', _("checking Python version (%s)\n"),
989 ("%d.%d.%d" % sys.version_info[:3]))
989 ("%d.%d.%d" % sys.version_info[:3]))
990 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
990 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
991 os.path.dirname(pycompat.fsencode(os.__file__)))
991 os.path.dirname(pycompat.fsencode(os.__file__)))
992
992
993 security = set(sslutil.supportedprotocols)
993 security = set(sslutil.supportedprotocols)
994 if sslutil.hassni:
994 if sslutil.hassni:
995 security.add('sni')
995 security.add('sni')
996
996
997 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
997 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
998 fm.formatlist(sorted(security), name='protocol',
998 fm.formatlist(sorted(security), name='protocol',
999 fmt='%s', sep=','))
999 fmt='%s', sep=','))
1000
1000
1001 # These are warnings, not errors. So don't increment problem count. This
1001 # These are warnings, not errors. So don't increment problem count. This
1002 # may change in the future.
1002 # may change in the future.
1003 if 'tls1.2' not in security:
1003 if 'tls1.2' not in security:
1004 fm.plain(_(' TLS 1.2 not supported by Python install; '
1004 fm.plain(_(' TLS 1.2 not supported by Python install; '
1005 'network connections lack modern security\n'))
1005 'network connections lack modern security\n'))
1006 if 'sni' not in security:
1006 if 'sni' not in security:
1007 fm.plain(_(' SNI not supported by Python install; may have '
1007 fm.plain(_(' SNI not supported by Python install; may have '
1008 'connectivity issues with some servers\n'))
1008 'connectivity issues with some servers\n'))
1009
1009
1010 # TODO print CA cert info
1010 # TODO print CA cert info
1011
1011
1012 # hg version
1012 # hg version
1013 hgver = util.version()
1013 hgver = util.version()
1014 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1014 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1015 hgver.split('+')[0])
1015 hgver.split('+')[0])
1016 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1016 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1017 '+'.join(hgver.split('+')[1:]))
1017 '+'.join(hgver.split('+')[1:]))
1018
1018
1019 # compiled modules
1019 # compiled modules
1020 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1020 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1021 policy.policy)
1021 policy.policy)
1022 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1022 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1023 os.path.dirname(pycompat.fsencode(__file__)))
1023 os.path.dirname(pycompat.fsencode(__file__)))
1024
1024
1025 if policy.policy in ('c', 'allow'):
1025 if policy.policy in ('c', 'allow'):
1026 err = None
1026 err = None
1027 try:
1027 try:
1028 from .cext import (
1028 from .cext import (
1029 base85,
1029 base85,
1030 bdiff,
1030 bdiff,
1031 mpatch,
1031 mpatch,
1032 osutil,
1032 osutil,
1033 )
1033 )
1034 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1034 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1035 except Exception as inst:
1035 except Exception as inst:
1036 err = inst
1036 err = inst
1037 problems += 1
1037 problems += 1
1038 fm.condwrite(err, 'extensionserror', " %s\n", err)
1038 fm.condwrite(err, 'extensionserror', " %s\n", err)
1039
1039
1040 compengines = util.compengines._engines.values()
1040 compengines = util.compengines._engines.values()
1041 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1041 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1042 fm.formatlist(sorted(e.name() for e in compengines),
1042 fm.formatlist(sorted(e.name() for e in compengines),
1043 name='compengine', fmt='%s', sep=', '))
1043 name='compengine', fmt='%s', sep=', '))
1044 fm.write('compenginesavail', _('checking available compression engines '
1044 fm.write('compenginesavail', _('checking available compression engines '
1045 '(%s)\n'),
1045 '(%s)\n'),
1046 fm.formatlist(sorted(e.name() for e in compengines
1046 fm.formatlist(sorted(e.name() for e in compengines
1047 if e.available()),
1047 if e.available()),
1048 name='compengine', fmt='%s', sep=', '))
1048 name='compengine', fmt='%s', sep=', '))
1049 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1049 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1050 fm.write('compenginesserver', _('checking available compression engines '
1050 fm.write('compenginesserver', _('checking available compression engines '
1051 'for wire protocol (%s)\n'),
1051 'for wire protocol (%s)\n'),
1052 fm.formatlist([e.name() for e in wirecompengines
1052 fm.formatlist([e.name() for e in wirecompengines
1053 if e.wireprotosupport()],
1053 if e.wireprotosupport()],
1054 name='compengine', fmt='%s', sep=', '))
1054 name='compengine', fmt='%s', sep=', '))
1055
1055
1056 # templates
1056 # templates
1057 p = templater.templatepaths()
1057 p = templater.templatepaths()
1058 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1058 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1059 fm.condwrite(not p, '', _(" no template directories found\n"))
1059 fm.condwrite(not p, '', _(" no template directories found\n"))
1060 if p:
1060 if p:
1061 m = templater.templatepath("map-cmdline.default")
1061 m = templater.templatepath("map-cmdline.default")
1062 if m:
1062 if m:
1063 # template found, check if it is working
1063 # template found, check if it is working
1064 err = None
1064 err = None
1065 try:
1065 try:
1066 templater.templater.frommapfile(m)
1066 templater.templater.frommapfile(m)
1067 except Exception as inst:
1067 except Exception as inst:
1068 err = inst
1068 err = inst
1069 p = None
1069 p = None
1070 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1070 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1071 else:
1071 else:
1072 p = None
1072 p = None
1073 fm.condwrite(p, 'defaulttemplate',
1073 fm.condwrite(p, 'defaulttemplate',
1074 _("checking default template (%s)\n"), m)
1074 _("checking default template (%s)\n"), m)
1075 fm.condwrite(not m, 'defaulttemplatenotfound',
1075 fm.condwrite(not m, 'defaulttemplatenotfound',
1076 _(" template '%s' not found\n"), "default")
1076 _(" template '%s' not found\n"), "default")
1077 if not p:
1077 if not p:
1078 problems += 1
1078 problems += 1
1079 fm.condwrite(not p, '',
1079 fm.condwrite(not p, '',
1080 _(" (templates seem to have been installed incorrectly)\n"))
1080 _(" (templates seem to have been installed incorrectly)\n"))
1081
1081
1082 # editor
1082 # editor
1083 editor = ui.geteditor()
1083 editor = ui.geteditor()
1084 editor = util.expandpath(editor)
1084 editor = util.expandpath(editor)
1085 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1085 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1086 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1086 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1087 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1087 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1088 _(" No commit editor set and can't find %s in PATH\n"
1088 _(" No commit editor set and can't find %s in PATH\n"
1089 " (specify a commit editor in your configuration"
1089 " (specify a commit editor in your configuration"
1090 " file)\n"), not cmdpath and editor == 'vi' and editor)
1090 " file)\n"), not cmdpath and editor == 'vi' and editor)
1091 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1091 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1092 _(" Can't find editor '%s' in PATH\n"
1092 _(" Can't find editor '%s' in PATH\n"
1093 " (specify a commit editor in your configuration"
1093 " (specify a commit editor in your configuration"
1094 " file)\n"), not cmdpath and editor)
1094 " file)\n"), not cmdpath and editor)
1095 if not cmdpath and editor != 'vi':
1095 if not cmdpath and editor != 'vi':
1096 problems += 1
1096 problems += 1
1097
1097
1098 # check username
1098 # check username
1099 username = None
1099 username = None
1100 err = None
1100 err = None
1101 try:
1101 try:
1102 username = ui.username()
1102 username = ui.username()
1103 except error.Abort as e:
1103 except error.Abort as e:
1104 err = e
1104 err = e
1105 problems += 1
1105 problems += 1
1106
1106
1107 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1107 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1108 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1108 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1109 " (specify a username in your configuration file)\n"), err)
1109 " (specify a username in your configuration file)\n"), err)
1110
1110
1111 fm.condwrite(not problems, '',
1111 fm.condwrite(not problems, '',
1112 _("no problems detected\n"))
1112 _("no problems detected\n"))
1113 if not problems:
1113 if not problems:
1114 fm.data(problems=problems)
1114 fm.data(problems=problems)
1115 fm.condwrite(problems, 'problems',
1115 fm.condwrite(problems, 'problems',
1116 _("%d problems detected,"
1116 _("%d problems detected,"
1117 " please check your install!\n"), problems)
1117 " please check your install!\n"), problems)
1118 fm.end()
1118 fm.end()
1119
1119
1120 return problems
1120 return problems
1121
1121
1122 @command('debugknown', [], _('REPO ID...'), norepo=True)
1122 @command('debugknown', [], _('REPO ID...'), norepo=True)
1123 def debugknown(ui, repopath, *ids, **opts):
1123 def debugknown(ui, repopath, *ids, **opts):
1124 """test whether node ids are known to a repo
1124 """test whether node ids are known to a repo
1125
1125
1126 Every ID must be a full-length hex node id string. Returns a list of 0s
1126 Every ID must be a full-length hex node id string. Returns a list of 0s
1127 and 1s indicating unknown/known.
1127 and 1s indicating unknown/known.
1128 """
1128 """
1129 repo = hg.peer(ui, opts, repopath)
1129 repo = hg.peer(ui, opts, repopath)
1130 if not repo.capable('known'):
1130 if not repo.capable('known'):
1131 raise error.Abort("known() not supported by target repository")
1131 raise error.Abort("known() not supported by target repository")
1132 flags = repo.known([bin(s) for s in ids])
1132 flags = repo.known([bin(s) for s in ids])
1133 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1133 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1134
1134
1135 @command('debuglabelcomplete', [], _('LABEL...'))
1135 @command('debuglabelcomplete', [], _('LABEL...'))
1136 def debuglabelcomplete(ui, repo, *args):
1136 def debuglabelcomplete(ui, repo, *args):
1137 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1137 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1138 debugnamecomplete(ui, repo, *args)
1138 debugnamecomplete(ui, repo, *args)
1139
1139
1140 @command('debuglocks',
1140 @command('debuglocks',
1141 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1141 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1142 ('W', 'force-wlock', None,
1142 ('W', 'force-wlock', None,
1143 _('free the working state lock (DANGEROUS)'))],
1143 _('free the working state lock (DANGEROUS)'))],
1144 _('[OPTION]...'))
1144 _('[OPTION]...'))
1145 def debuglocks(ui, repo, **opts):
1145 def debuglocks(ui, repo, **opts):
1146 """show or modify state of locks
1146 """show or modify state of locks
1147
1147
1148 By default, this command will show which locks are held. This
1148 By default, this command will show which locks are held. This
1149 includes the user and process holding the lock, the amount of time
1149 includes the user and process holding the lock, the amount of time
1150 the lock has been held, and the machine name where the process is
1150 the lock has been held, and the machine name where the process is
1151 running if it's not local.
1151 running if it's not local.
1152
1152
1153 Locks protect the integrity of Mercurial's data, so should be
1153 Locks protect the integrity of Mercurial's data, so should be
1154 treated with care. System crashes or other interruptions may cause
1154 treated with care. System crashes or other interruptions may cause
1155 locks to not be properly released, though Mercurial will usually
1155 locks to not be properly released, though Mercurial will usually
1156 detect and remove such stale locks automatically.
1156 detect and remove such stale locks automatically.
1157
1157
1158 However, detecting stale locks may not always be possible (for
1158 However, detecting stale locks may not always be possible (for
1159 instance, on a shared filesystem). Removing locks may also be
1159 instance, on a shared filesystem). Removing locks may also be
1160 blocked by filesystem permissions.
1160 blocked by filesystem permissions.
1161
1161
1162 Returns 0 if no locks are held.
1162 Returns 0 if no locks are held.
1163
1163
1164 """
1164 """
1165
1165
1166 if opts.get('force_lock'):
1166 if opts.get('force_lock'):
1167 repo.svfs.unlink('lock')
1167 repo.svfs.unlink('lock')
1168 if opts.get('force_wlock'):
1168 if opts.get('force_wlock'):
1169 repo.vfs.unlink('wlock')
1169 repo.vfs.unlink('wlock')
1170 if opts.get('force_lock') or opts.get('force_lock'):
1170 if opts.get('force_lock') or opts.get('force_lock'):
1171 return 0
1171 return 0
1172
1172
1173 now = time.time()
1173 now = time.time()
1174 held = 0
1174 held = 0
1175
1175
1176 def report(vfs, name, method):
1176 def report(vfs, name, method):
1177 # this causes stale locks to get reaped for more accurate reporting
1177 # this causes stale locks to get reaped for more accurate reporting
1178 try:
1178 try:
1179 l = method(False)
1179 l = method(False)
1180 except error.LockHeld:
1180 except error.LockHeld:
1181 l = None
1181 l = None
1182
1182
1183 if l:
1183 if l:
1184 l.release()
1184 l.release()
1185 else:
1185 else:
1186 try:
1186 try:
1187 stat = vfs.lstat(name)
1187 stat = vfs.lstat(name)
1188 age = now - stat.st_mtime
1188 age = now - stat.st_mtime
1189 user = util.username(stat.st_uid)
1189 user = util.username(stat.st_uid)
1190 locker = vfs.readlock(name)
1190 locker = vfs.readlock(name)
1191 if ":" in locker:
1191 if ":" in locker:
1192 host, pid = locker.split(':')
1192 host, pid = locker.split(':')
1193 if host == socket.gethostname():
1193 if host == socket.gethostname():
1194 locker = 'user %s, process %s' % (user, pid)
1194 locker = 'user %s, process %s' % (user, pid)
1195 else:
1195 else:
1196 locker = 'user %s, process %s, host %s' \
1196 locker = 'user %s, process %s, host %s' \
1197 % (user, pid, host)
1197 % (user, pid, host)
1198 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1198 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1199 return 1
1199 return 1
1200 except OSError as e:
1200 except OSError as e:
1201 if e.errno != errno.ENOENT:
1201 if e.errno != errno.ENOENT:
1202 raise
1202 raise
1203
1203
1204 ui.write(("%-6s free\n") % (name + ":"))
1204 ui.write(("%-6s free\n") % (name + ":"))
1205 return 0
1205 return 0
1206
1206
1207 held += report(repo.svfs, "lock", repo.lock)
1207 held += report(repo.svfs, "lock", repo.lock)
1208 held += report(repo.vfs, "wlock", repo.wlock)
1208 held += report(repo.vfs, "wlock", repo.wlock)
1209
1209
1210 return held
1210 return held
1211
1211
1212 @command('debugmergestate', [], '')
1212 @command('debugmergestate', [], '')
1213 def debugmergestate(ui, repo, *args):
1213 def debugmergestate(ui, repo, *args):
1214 """print merge state
1214 """print merge state
1215
1215
1216 Use --verbose to print out information about whether v1 or v2 merge state
1216 Use --verbose to print out information about whether v1 or v2 merge state
1217 was chosen."""
1217 was chosen."""
1218 def _hashornull(h):
1218 def _hashornull(h):
1219 if h == nullhex:
1219 if h == nullhex:
1220 return 'null'
1220 return 'null'
1221 else:
1221 else:
1222 return h
1222 return h
1223
1223
1224 def printrecords(version):
1224 def printrecords(version):
1225 ui.write(('* version %s records\n') % version)
1225 ui.write(('* version %s records\n') % version)
1226 if version == 1:
1226 if version == 1:
1227 records = v1records
1227 records = v1records
1228 else:
1228 else:
1229 records = v2records
1229 records = v2records
1230
1230
1231 for rtype, record in records:
1231 for rtype, record in records:
1232 # pretty print some record types
1232 # pretty print some record types
1233 if rtype == 'L':
1233 if rtype == 'L':
1234 ui.write(('local: %s\n') % record)
1234 ui.write(('local: %s\n') % record)
1235 elif rtype == 'O':
1235 elif rtype == 'O':
1236 ui.write(('other: %s\n') % record)
1236 ui.write(('other: %s\n') % record)
1237 elif rtype == 'm':
1237 elif rtype == 'm':
1238 driver, mdstate = record.split('\0', 1)
1238 driver, mdstate = record.split('\0', 1)
1239 ui.write(('merge driver: %s (state "%s")\n')
1239 ui.write(('merge driver: %s (state "%s")\n')
1240 % (driver, mdstate))
1240 % (driver, mdstate))
1241 elif rtype in 'FDC':
1241 elif rtype in 'FDC':
1242 r = record.split('\0')
1242 r = record.split('\0')
1243 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1243 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1244 if version == 1:
1244 if version == 1:
1245 onode = 'not stored in v1 format'
1245 onode = 'not stored in v1 format'
1246 flags = r[7]
1246 flags = r[7]
1247 else:
1247 else:
1248 onode, flags = r[7:9]
1248 onode, flags = r[7:9]
1249 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1249 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1250 % (f, rtype, state, _hashornull(hash)))
1250 % (f, rtype, state, _hashornull(hash)))
1251 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1251 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1252 ui.write((' ancestor path: %s (node %s)\n')
1252 ui.write((' ancestor path: %s (node %s)\n')
1253 % (afile, _hashornull(anode)))
1253 % (afile, _hashornull(anode)))
1254 ui.write((' other path: %s (node %s)\n')
1254 ui.write((' other path: %s (node %s)\n')
1255 % (ofile, _hashornull(onode)))
1255 % (ofile, _hashornull(onode)))
1256 elif rtype == 'f':
1256 elif rtype == 'f':
1257 filename, rawextras = record.split('\0', 1)
1257 filename, rawextras = record.split('\0', 1)
1258 extras = rawextras.split('\0')
1258 extras = rawextras.split('\0')
1259 i = 0
1259 i = 0
1260 extrastrings = []
1260 extrastrings = []
1261 while i < len(extras):
1261 while i < len(extras):
1262 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1262 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1263 i += 2
1263 i += 2
1264
1264
1265 ui.write(('file extras: %s (%s)\n')
1265 ui.write(('file extras: %s (%s)\n')
1266 % (filename, ', '.join(extrastrings)))
1266 % (filename, ', '.join(extrastrings)))
1267 elif rtype == 'l':
1267 elif rtype == 'l':
1268 labels = record.split('\0', 2)
1268 labels = record.split('\0', 2)
1269 labels = [l for l in labels if len(l) > 0]
1269 labels = [l for l in labels if len(l) > 0]
1270 ui.write(('labels:\n'))
1270 ui.write(('labels:\n'))
1271 ui.write((' local: %s\n' % labels[0]))
1271 ui.write((' local: %s\n' % labels[0]))
1272 ui.write((' other: %s\n' % labels[1]))
1272 ui.write((' other: %s\n' % labels[1]))
1273 if len(labels) > 2:
1273 if len(labels) > 2:
1274 ui.write((' base: %s\n' % labels[2]))
1274 ui.write((' base: %s\n' % labels[2]))
1275 else:
1275 else:
1276 ui.write(('unrecognized entry: %s\t%s\n')
1276 ui.write(('unrecognized entry: %s\t%s\n')
1277 % (rtype, record.replace('\0', '\t')))
1277 % (rtype, record.replace('\0', '\t')))
1278
1278
1279 # Avoid mergestate.read() since it may raise an exception for unsupported
1279 # Avoid mergestate.read() since it may raise an exception for unsupported
1280 # merge state records. We shouldn't be doing this, but this is OK since this
1280 # merge state records. We shouldn't be doing this, but this is OK since this
1281 # command is pretty low-level.
1281 # command is pretty low-level.
1282 ms = mergemod.mergestate(repo)
1282 ms = mergemod.mergestate(repo)
1283
1283
1284 # sort so that reasonable information is on top
1284 # sort so that reasonable information is on top
1285 v1records = ms._readrecordsv1()
1285 v1records = ms._readrecordsv1()
1286 v2records = ms._readrecordsv2()
1286 v2records = ms._readrecordsv2()
1287 order = 'LOml'
1287 order = 'LOml'
1288 def key(r):
1288 def key(r):
1289 idx = order.find(r[0])
1289 idx = order.find(r[0])
1290 if idx == -1:
1290 if idx == -1:
1291 return (1, r[1])
1291 return (1, r[1])
1292 else:
1292 else:
1293 return (0, idx)
1293 return (0, idx)
1294 v1records.sort(key=key)
1294 v1records.sort(key=key)
1295 v2records.sort(key=key)
1295 v2records.sort(key=key)
1296
1296
1297 if not v1records and not v2records:
1297 if not v1records and not v2records:
1298 ui.write(('no merge state found\n'))
1298 ui.write(('no merge state found\n'))
1299 elif not v2records:
1299 elif not v2records:
1300 ui.note(('no version 2 merge state\n'))
1300 ui.note(('no version 2 merge state\n'))
1301 printrecords(1)
1301 printrecords(1)
1302 elif ms._v1v2match(v1records, v2records):
1302 elif ms._v1v2match(v1records, v2records):
1303 ui.note(('v1 and v2 states match: using v2\n'))
1303 ui.note(('v1 and v2 states match: using v2\n'))
1304 printrecords(2)
1304 printrecords(2)
1305 else:
1305 else:
1306 ui.note(('v1 and v2 states mismatch: using v1\n'))
1306 ui.note(('v1 and v2 states mismatch: using v1\n'))
1307 printrecords(1)
1307 printrecords(1)
1308 if ui.verbose:
1308 if ui.verbose:
1309 printrecords(2)
1309 printrecords(2)
1310
1310
1311 @command('debugnamecomplete', [], _('NAME...'))
1311 @command('debugnamecomplete', [], _('NAME...'))
1312 def debugnamecomplete(ui, repo, *args):
1312 def debugnamecomplete(ui, repo, *args):
1313 '''complete "names" - tags, open branch names, bookmark names'''
1313 '''complete "names" - tags, open branch names, bookmark names'''
1314
1314
1315 names = set()
1315 names = set()
1316 # since we previously only listed open branches, we will handle that
1316 # since we previously only listed open branches, we will handle that
1317 # specially (after this for loop)
1317 # specially (after this for loop)
1318 for name, ns in repo.names.iteritems():
1318 for name, ns in repo.names.iteritems():
1319 if name != 'branches':
1319 if name != 'branches':
1320 names.update(ns.listnames(repo))
1320 names.update(ns.listnames(repo))
1321 names.update(tag for (tag, heads, tip, closed)
1321 names.update(tag for (tag, heads, tip, closed)
1322 in repo.branchmap().iterbranches() if not closed)
1322 in repo.branchmap().iterbranches() if not closed)
1323 completions = set()
1323 completions = set()
1324 if not args:
1324 if not args:
1325 args = ['']
1325 args = ['']
1326 for a in args:
1326 for a in args:
1327 completions.update(n for n in names if n.startswith(a))
1327 completions.update(n for n in names if n.startswith(a))
1328 ui.write('\n'.join(sorted(completions)))
1328 ui.write('\n'.join(sorted(completions)))
1329 ui.write('\n')
1329 ui.write('\n')
1330
1330
1331 @command('debugobsolete',
1331 @command('debugobsolete',
1332 [('', 'flags', 0, _('markers flag')),
1332 [('', 'flags', 0, _('markers flag')),
1333 ('', 'record-parents', False,
1333 ('', 'record-parents', False,
1334 _('record parent information for the precursor')),
1334 _('record parent information for the precursor')),
1335 ('r', 'rev', [], _('display markers relevant to REV')),
1335 ('r', 'rev', [], _('display markers relevant to REV')),
1336 ('', 'index', False, _('display index of the marker')),
1336 ('', 'index', False, _('display index of the marker')),
1337 ('', 'delete', [], _('delete markers specified by indices')),
1337 ('', 'delete', [], _('delete markers specified by indices')),
1338 ] + commands.commitopts2 + commands.formatteropts,
1338 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1339 _('[OBSOLETED [REPLACEMENT ...]]'))
1339 _('[OBSOLETED [REPLACEMENT ...]]'))
1340 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1340 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1341 """create arbitrary obsolete marker
1341 """create arbitrary obsolete marker
1342
1342
1343 With no arguments, displays the list of obsolescence markers."""
1343 With no arguments, displays the list of obsolescence markers."""
1344
1344
1345 def parsenodeid(s):
1345 def parsenodeid(s):
1346 try:
1346 try:
1347 # We do not use revsingle/revrange functions here to accept
1347 # We do not use revsingle/revrange functions here to accept
1348 # arbitrary node identifiers, possibly not present in the
1348 # arbitrary node identifiers, possibly not present in the
1349 # local repository.
1349 # local repository.
1350 n = bin(s)
1350 n = bin(s)
1351 if len(n) != len(nullid):
1351 if len(n) != len(nullid):
1352 raise TypeError()
1352 raise TypeError()
1353 return n
1353 return n
1354 except TypeError:
1354 except TypeError:
1355 raise error.Abort('changeset references must be full hexadecimal '
1355 raise error.Abort('changeset references must be full hexadecimal '
1356 'node identifiers')
1356 'node identifiers')
1357
1357
1358 if opts.get('delete'):
1358 if opts.get('delete'):
1359 indices = []
1359 indices = []
1360 for v in opts.get('delete'):
1360 for v in opts.get('delete'):
1361 try:
1361 try:
1362 indices.append(int(v))
1362 indices.append(int(v))
1363 except ValueError:
1363 except ValueError:
1364 raise error.Abort(_('invalid index value: %r') % v,
1364 raise error.Abort(_('invalid index value: %r') % v,
1365 hint=_('use integers for indices'))
1365 hint=_('use integers for indices'))
1366
1366
1367 if repo.currenttransaction():
1367 if repo.currenttransaction():
1368 raise error.Abort(_('cannot delete obsmarkers in the middle '
1368 raise error.Abort(_('cannot delete obsmarkers in the middle '
1369 'of transaction.'))
1369 'of transaction.'))
1370
1370
1371 with repo.lock():
1371 with repo.lock():
1372 n = repair.deleteobsmarkers(repo.obsstore, indices)
1372 n = repair.deleteobsmarkers(repo.obsstore, indices)
1373 ui.write(_('deleted %i obsolescence markers\n') % n)
1373 ui.write(_('deleted %i obsolescence markers\n') % n)
1374
1374
1375 return
1375 return
1376
1376
1377 if precursor is not None:
1377 if precursor is not None:
1378 if opts['rev']:
1378 if opts['rev']:
1379 raise error.Abort('cannot select revision when creating marker')
1379 raise error.Abort('cannot select revision when creating marker')
1380 metadata = {}
1380 metadata = {}
1381 metadata['user'] = opts['user'] or ui.username()
1381 metadata['user'] = opts['user'] or ui.username()
1382 succs = tuple(parsenodeid(succ) for succ in successors)
1382 succs = tuple(parsenodeid(succ) for succ in successors)
1383 l = repo.lock()
1383 l = repo.lock()
1384 try:
1384 try:
1385 tr = repo.transaction('debugobsolete')
1385 tr = repo.transaction('debugobsolete')
1386 try:
1386 try:
1387 date = opts.get('date')
1387 date = opts.get('date')
1388 if date:
1388 if date:
1389 date = util.parsedate(date)
1389 date = util.parsedate(date)
1390 else:
1390 else:
1391 date = None
1391 date = None
1392 prec = parsenodeid(precursor)
1392 prec = parsenodeid(precursor)
1393 parents = None
1393 parents = None
1394 if opts['record_parents']:
1394 if opts['record_parents']:
1395 if prec not in repo.unfiltered():
1395 if prec not in repo.unfiltered():
1396 raise error.Abort('cannot used --record-parents on '
1396 raise error.Abort('cannot used --record-parents on '
1397 'unknown changesets')
1397 'unknown changesets')
1398 parents = repo.unfiltered()[prec].parents()
1398 parents = repo.unfiltered()[prec].parents()
1399 parents = tuple(p.node() for p in parents)
1399 parents = tuple(p.node() for p in parents)
1400 repo.obsstore.create(tr, prec, succs, opts['flags'],
1400 repo.obsstore.create(tr, prec, succs, opts['flags'],
1401 parents=parents, date=date,
1401 parents=parents, date=date,
1402 metadata=metadata)
1402 metadata=metadata)
1403 tr.close()
1403 tr.close()
1404 except ValueError as exc:
1404 except ValueError as exc:
1405 raise error.Abort(_('bad obsmarker input: %s') % exc)
1405 raise error.Abort(_('bad obsmarker input: %s') % exc)
1406 finally:
1406 finally:
1407 tr.release()
1407 tr.release()
1408 finally:
1408 finally:
1409 l.release()
1409 l.release()
1410 else:
1410 else:
1411 if opts['rev']:
1411 if opts['rev']:
1412 revs = scmutil.revrange(repo, opts['rev'])
1412 revs = scmutil.revrange(repo, opts['rev'])
1413 nodes = [repo[r].node() for r in revs]
1413 nodes = [repo[r].node() for r in revs]
1414 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1414 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1415 markers.sort(key=lambda x: x._data)
1415 markers.sort(key=lambda x: x._data)
1416 else:
1416 else:
1417 markers = obsolete.getmarkers(repo)
1417 markers = obsolete.getmarkers(repo)
1418
1418
1419 markerstoiter = markers
1419 markerstoiter = markers
1420 isrelevant = lambda m: True
1420 isrelevant = lambda m: True
1421 if opts.get('rev') and opts.get('index'):
1421 if opts.get('rev') and opts.get('index'):
1422 markerstoiter = obsolete.getmarkers(repo)
1422 markerstoiter = obsolete.getmarkers(repo)
1423 markerset = set(markers)
1423 markerset = set(markers)
1424 isrelevant = lambda m: m in markerset
1424 isrelevant = lambda m: m in markerset
1425
1425
1426 fm = ui.formatter('debugobsolete', opts)
1426 fm = ui.formatter('debugobsolete', opts)
1427 for i, m in enumerate(markerstoiter):
1427 for i, m in enumerate(markerstoiter):
1428 if not isrelevant(m):
1428 if not isrelevant(m):
1429 # marker can be irrelevant when we're iterating over a set
1429 # marker can be irrelevant when we're iterating over a set
1430 # of markers (markerstoiter) which is bigger than the set
1430 # of markers (markerstoiter) which is bigger than the set
1431 # of markers we want to display (markers)
1431 # of markers we want to display (markers)
1432 # this can happen if both --index and --rev options are
1432 # this can happen if both --index and --rev options are
1433 # provided and thus we need to iterate over all of the markers
1433 # provided and thus we need to iterate over all of the markers
1434 # to get the correct indices, but only display the ones that
1434 # to get the correct indices, but only display the ones that
1435 # are relevant to --rev value
1435 # are relevant to --rev value
1436 continue
1436 continue
1437 fm.startitem()
1437 fm.startitem()
1438 ind = i if opts.get('index') else None
1438 ind = i if opts.get('index') else None
1439 cmdutil.showmarker(fm, m, index=ind)
1439 cmdutil.showmarker(fm, m, index=ind)
1440 fm.end()
1440 fm.end()
1441
1441
1442 @command('debugpathcomplete',
1442 @command('debugpathcomplete',
1443 [('f', 'full', None, _('complete an entire path')),
1443 [('f', 'full', None, _('complete an entire path')),
1444 ('n', 'normal', None, _('show only normal files')),
1444 ('n', 'normal', None, _('show only normal files')),
1445 ('a', 'added', None, _('show only added files')),
1445 ('a', 'added', None, _('show only added files')),
1446 ('r', 'removed', None, _('show only removed files'))],
1446 ('r', 'removed', None, _('show only removed files'))],
1447 _('FILESPEC...'))
1447 _('FILESPEC...'))
1448 def debugpathcomplete(ui, repo, *specs, **opts):
1448 def debugpathcomplete(ui, repo, *specs, **opts):
1449 '''complete part or all of a tracked path
1449 '''complete part or all of a tracked path
1450
1450
1451 This command supports shells that offer path name completion. It
1451 This command supports shells that offer path name completion. It
1452 currently completes only files already known to the dirstate.
1452 currently completes only files already known to the dirstate.
1453
1453
1454 Completion extends only to the next path segment unless
1454 Completion extends only to the next path segment unless
1455 --full is specified, in which case entire paths are used.'''
1455 --full is specified, in which case entire paths are used.'''
1456
1456
1457 def complete(path, acceptable):
1457 def complete(path, acceptable):
1458 dirstate = repo.dirstate
1458 dirstate = repo.dirstate
1459 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1459 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1460 rootdir = repo.root + pycompat.ossep
1460 rootdir = repo.root + pycompat.ossep
1461 if spec != repo.root and not spec.startswith(rootdir):
1461 if spec != repo.root and not spec.startswith(rootdir):
1462 return [], []
1462 return [], []
1463 if os.path.isdir(spec):
1463 if os.path.isdir(spec):
1464 spec += '/'
1464 spec += '/'
1465 spec = spec[len(rootdir):]
1465 spec = spec[len(rootdir):]
1466 fixpaths = pycompat.ossep != '/'
1466 fixpaths = pycompat.ossep != '/'
1467 if fixpaths:
1467 if fixpaths:
1468 spec = spec.replace(pycompat.ossep, '/')
1468 spec = spec.replace(pycompat.ossep, '/')
1469 speclen = len(spec)
1469 speclen = len(spec)
1470 fullpaths = opts['full']
1470 fullpaths = opts['full']
1471 files, dirs = set(), set()
1471 files, dirs = set(), set()
1472 adddir, addfile = dirs.add, files.add
1472 adddir, addfile = dirs.add, files.add
1473 for f, st in dirstate.iteritems():
1473 for f, st in dirstate.iteritems():
1474 if f.startswith(spec) and st[0] in acceptable:
1474 if f.startswith(spec) and st[0] in acceptable:
1475 if fixpaths:
1475 if fixpaths:
1476 f = f.replace('/', pycompat.ossep)
1476 f = f.replace('/', pycompat.ossep)
1477 if fullpaths:
1477 if fullpaths:
1478 addfile(f)
1478 addfile(f)
1479 continue
1479 continue
1480 s = f.find(pycompat.ossep, speclen)
1480 s = f.find(pycompat.ossep, speclen)
1481 if s >= 0:
1481 if s >= 0:
1482 adddir(f[:s])
1482 adddir(f[:s])
1483 else:
1483 else:
1484 addfile(f)
1484 addfile(f)
1485 return files, dirs
1485 return files, dirs
1486
1486
1487 acceptable = ''
1487 acceptable = ''
1488 if opts['normal']:
1488 if opts['normal']:
1489 acceptable += 'nm'
1489 acceptable += 'nm'
1490 if opts['added']:
1490 if opts['added']:
1491 acceptable += 'a'
1491 acceptable += 'a'
1492 if opts['removed']:
1492 if opts['removed']:
1493 acceptable += 'r'
1493 acceptable += 'r'
1494 cwd = repo.getcwd()
1494 cwd = repo.getcwd()
1495 if not specs:
1495 if not specs:
1496 specs = ['.']
1496 specs = ['.']
1497
1497
1498 files, dirs = set(), set()
1498 files, dirs = set(), set()
1499 for spec in specs:
1499 for spec in specs:
1500 f, d = complete(spec, acceptable or 'nmar')
1500 f, d = complete(spec, acceptable or 'nmar')
1501 files.update(f)
1501 files.update(f)
1502 dirs.update(d)
1502 dirs.update(d)
1503 files.update(dirs)
1503 files.update(dirs)
1504 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1504 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1505 ui.write('\n')
1505 ui.write('\n')
1506
1506
1507 @command('debugpickmergetool',
1507 @command('debugpickmergetool',
1508 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1508 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1509 ('', 'changedelete', None, _('emulate merging change and delete')),
1509 ('', 'changedelete', None, _('emulate merging change and delete')),
1510 ] + commands.walkopts + commands.mergetoolopts,
1510 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1511 _('[PATTERN]...'),
1511 _('[PATTERN]...'),
1512 inferrepo=True)
1512 inferrepo=True)
1513 def debugpickmergetool(ui, repo, *pats, **opts):
1513 def debugpickmergetool(ui, repo, *pats, **opts):
1514 """examine which merge tool is chosen for specified file
1514 """examine which merge tool is chosen for specified file
1515
1515
1516 As described in :hg:`help merge-tools`, Mercurial examines
1516 As described in :hg:`help merge-tools`, Mercurial examines
1517 configurations below in this order to decide which merge tool is
1517 configurations below in this order to decide which merge tool is
1518 chosen for specified file.
1518 chosen for specified file.
1519
1519
1520 1. ``--tool`` option
1520 1. ``--tool`` option
1521 2. ``HGMERGE`` environment variable
1521 2. ``HGMERGE`` environment variable
1522 3. configurations in ``merge-patterns`` section
1522 3. configurations in ``merge-patterns`` section
1523 4. configuration of ``ui.merge``
1523 4. configuration of ``ui.merge``
1524 5. configurations in ``merge-tools`` section
1524 5. configurations in ``merge-tools`` section
1525 6. ``hgmerge`` tool (for historical reason only)
1525 6. ``hgmerge`` tool (for historical reason only)
1526 7. default tool for fallback (``:merge`` or ``:prompt``)
1526 7. default tool for fallback (``:merge`` or ``:prompt``)
1527
1527
1528 This command writes out examination result in the style below::
1528 This command writes out examination result in the style below::
1529
1529
1530 FILE = MERGETOOL
1530 FILE = MERGETOOL
1531
1531
1532 By default, all files known in the first parent context of the
1532 By default, all files known in the first parent context of the
1533 working directory are examined. Use file patterns and/or -I/-X
1533 working directory are examined. Use file patterns and/or -I/-X
1534 options to limit target files. -r/--rev is also useful to examine
1534 options to limit target files. -r/--rev is also useful to examine
1535 files in another context without actual updating to it.
1535 files in another context without actual updating to it.
1536
1536
1537 With --debug, this command shows warning messages while matching
1537 With --debug, this command shows warning messages while matching
1538 against ``merge-patterns`` and so on, too. It is recommended to
1538 against ``merge-patterns`` and so on, too. It is recommended to
1539 use this option with explicit file patterns and/or -I/-X options,
1539 use this option with explicit file patterns and/or -I/-X options,
1540 because this option increases amount of output per file according
1540 because this option increases amount of output per file according
1541 to configurations in hgrc.
1541 to configurations in hgrc.
1542
1542
1543 With -v/--verbose, this command shows configurations below at
1543 With -v/--verbose, this command shows configurations below at
1544 first (only if specified).
1544 first (only if specified).
1545
1545
1546 - ``--tool`` option
1546 - ``--tool`` option
1547 - ``HGMERGE`` environment variable
1547 - ``HGMERGE`` environment variable
1548 - configuration of ``ui.merge``
1548 - configuration of ``ui.merge``
1549
1549
1550 If merge tool is chosen before matching against
1550 If merge tool is chosen before matching against
1551 ``merge-patterns``, this command can't show any helpful
1551 ``merge-patterns``, this command can't show any helpful
1552 information, even with --debug. In such case, information above is
1552 information, even with --debug. In such case, information above is
1553 useful to know why a merge tool is chosen.
1553 useful to know why a merge tool is chosen.
1554 """
1554 """
1555 overrides = {}
1555 overrides = {}
1556 if opts['tool']:
1556 if opts['tool']:
1557 overrides[('ui', 'forcemerge')] = opts['tool']
1557 overrides[('ui', 'forcemerge')] = opts['tool']
1558 ui.note(('with --tool %r\n') % (opts['tool']))
1558 ui.note(('with --tool %r\n') % (opts['tool']))
1559
1559
1560 with ui.configoverride(overrides, 'debugmergepatterns'):
1560 with ui.configoverride(overrides, 'debugmergepatterns'):
1561 hgmerge = encoding.environ.get("HGMERGE")
1561 hgmerge = encoding.environ.get("HGMERGE")
1562 if hgmerge is not None:
1562 if hgmerge is not None:
1563 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1563 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1564 uimerge = ui.config("ui", "merge")
1564 uimerge = ui.config("ui", "merge")
1565 if uimerge:
1565 if uimerge:
1566 ui.note(('with ui.merge=%r\n') % (uimerge))
1566 ui.note(('with ui.merge=%r\n') % (uimerge))
1567
1567
1568 ctx = scmutil.revsingle(repo, opts.get('rev'))
1568 ctx = scmutil.revsingle(repo, opts.get('rev'))
1569 m = scmutil.match(ctx, pats, opts)
1569 m = scmutil.match(ctx, pats, opts)
1570 changedelete = opts['changedelete']
1570 changedelete = opts['changedelete']
1571 for path in ctx.walk(m):
1571 for path in ctx.walk(m):
1572 fctx = ctx[path]
1572 fctx = ctx[path]
1573 try:
1573 try:
1574 if not ui.debugflag:
1574 if not ui.debugflag:
1575 ui.pushbuffer(error=True)
1575 ui.pushbuffer(error=True)
1576 tool, toolpath = filemerge._picktool(repo, ui, path,
1576 tool, toolpath = filemerge._picktool(repo, ui, path,
1577 fctx.isbinary(),
1577 fctx.isbinary(),
1578 'l' in fctx.flags(),
1578 'l' in fctx.flags(),
1579 changedelete)
1579 changedelete)
1580 finally:
1580 finally:
1581 if not ui.debugflag:
1581 if not ui.debugflag:
1582 ui.popbuffer()
1582 ui.popbuffer()
1583 ui.write(('%s = %s\n') % (path, tool))
1583 ui.write(('%s = %s\n') % (path, tool))
1584
1584
1585 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1585 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1586 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1586 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1587 '''access the pushkey key/value protocol
1587 '''access the pushkey key/value protocol
1588
1588
1589 With two args, list the keys in the given namespace.
1589 With two args, list the keys in the given namespace.
1590
1590
1591 With five args, set a key to new if it currently is set to old.
1591 With five args, set a key to new if it currently is set to old.
1592 Reports success or failure.
1592 Reports success or failure.
1593 '''
1593 '''
1594
1594
1595 target = hg.peer(ui, {}, repopath)
1595 target = hg.peer(ui, {}, repopath)
1596 if keyinfo:
1596 if keyinfo:
1597 key, old, new = keyinfo
1597 key, old, new = keyinfo
1598 r = target.pushkey(namespace, key, old, new)
1598 r = target.pushkey(namespace, key, old, new)
1599 ui.status(str(r) + '\n')
1599 ui.status(str(r) + '\n')
1600 return not r
1600 return not r
1601 else:
1601 else:
1602 for k, v in sorted(target.listkeys(namespace).iteritems()):
1602 for k, v in sorted(target.listkeys(namespace).iteritems()):
1603 ui.write("%s\t%s\n" % (util.escapestr(k),
1603 ui.write("%s\t%s\n" % (util.escapestr(k),
1604 util.escapestr(v)))
1604 util.escapestr(v)))
1605
1605
1606 @command('debugpvec', [], _('A B'))
1606 @command('debugpvec', [], _('A B'))
1607 def debugpvec(ui, repo, a, b=None):
1607 def debugpvec(ui, repo, a, b=None):
1608 ca = scmutil.revsingle(repo, a)
1608 ca = scmutil.revsingle(repo, a)
1609 cb = scmutil.revsingle(repo, b)
1609 cb = scmutil.revsingle(repo, b)
1610 pa = pvec.ctxpvec(ca)
1610 pa = pvec.ctxpvec(ca)
1611 pb = pvec.ctxpvec(cb)
1611 pb = pvec.ctxpvec(cb)
1612 if pa == pb:
1612 if pa == pb:
1613 rel = "="
1613 rel = "="
1614 elif pa > pb:
1614 elif pa > pb:
1615 rel = ">"
1615 rel = ">"
1616 elif pa < pb:
1616 elif pa < pb:
1617 rel = "<"
1617 rel = "<"
1618 elif pa | pb:
1618 elif pa | pb:
1619 rel = "|"
1619 rel = "|"
1620 ui.write(_("a: %s\n") % pa)
1620 ui.write(_("a: %s\n") % pa)
1621 ui.write(_("b: %s\n") % pb)
1621 ui.write(_("b: %s\n") % pb)
1622 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1622 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1623 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1623 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1624 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1624 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1625 pa.distance(pb), rel))
1625 pa.distance(pb), rel))
1626
1626
1627 @command('debugrebuilddirstate|debugrebuildstate',
1627 @command('debugrebuilddirstate|debugrebuildstate',
1628 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1628 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1629 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1629 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1630 'the working copy parent')),
1630 'the working copy parent')),
1631 ],
1631 ],
1632 _('[-r REV]'))
1632 _('[-r REV]'))
1633 def debugrebuilddirstate(ui, repo, rev, **opts):
1633 def debugrebuilddirstate(ui, repo, rev, **opts):
1634 """rebuild the dirstate as it would look like for the given revision
1634 """rebuild the dirstate as it would look like for the given revision
1635
1635
1636 If no revision is specified the first current parent will be used.
1636 If no revision is specified the first current parent will be used.
1637
1637
1638 The dirstate will be set to the files of the given revision.
1638 The dirstate will be set to the files of the given revision.
1639 The actual working directory content or existing dirstate
1639 The actual working directory content or existing dirstate
1640 information such as adds or removes is not considered.
1640 information such as adds or removes is not considered.
1641
1641
1642 ``minimal`` will only rebuild the dirstate status for files that claim to be
1642 ``minimal`` will only rebuild the dirstate status for files that claim to be
1643 tracked but are not in the parent manifest, or that exist in the parent
1643 tracked but are not in the parent manifest, or that exist in the parent
1644 manifest but are not in the dirstate. It will not change adds, removes, or
1644 manifest but are not in the dirstate. It will not change adds, removes, or
1645 modified files that are in the working copy parent.
1645 modified files that are in the working copy parent.
1646
1646
1647 One use of this command is to make the next :hg:`status` invocation
1647 One use of this command is to make the next :hg:`status` invocation
1648 check the actual file content.
1648 check the actual file content.
1649 """
1649 """
1650 ctx = scmutil.revsingle(repo, rev)
1650 ctx = scmutil.revsingle(repo, rev)
1651 with repo.wlock():
1651 with repo.wlock():
1652 dirstate = repo.dirstate
1652 dirstate = repo.dirstate
1653 changedfiles = None
1653 changedfiles = None
1654 # See command doc for what minimal does.
1654 # See command doc for what minimal does.
1655 if opts.get('minimal'):
1655 if opts.get('minimal'):
1656 manifestfiles = set(ctx.manifest().keys())
1656 manifestfiles = set(ctx.manifest().keys())
1657 dirstatefiles = set(dirstate)
1657 dirstatefiles = set(dirstate)
1658 manifestonly = manifestfiles - dirstatefiles
1658 manifestonly = manifestfiles - dirstatefiles
1659 dsonly = dirstatefiles - manifestfiles
1659 dsonly = dirstatefiles - manifestfiles
1660 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1660 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1661 changedfiles = manifestonly | dsnotadded
1661 changedfiles = manifestonly | dsnotadded
1662
1662
1663 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1663 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1664
1664
1665 @command('debugrebuildfncache', [], '')
1665 @command('debugrebuildfncache', [], '')
1666 def debugrebuildfncache(ui, repo):
1666 def debugrebuildfncache(ui, repo):
1667 """rebuild the fncache file"""
1667 """rebuild the fncache file"""
1668 repair.rebuildfncache(ui, repo)
1668 repair.rebuildfncache(ui, repo)
1669
1669
1670 @command('debugrename',
1670 @command('debugrename',
1671 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1671 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1672 _('[-r REV] FILE'))
1672 _('[-r REV] FILE'))
1673 def debugrename(ui, repo, file1, *pats, **opts):
1673 def debugrename(ui, repo, file1, *pats, **opts):
1674 """dump rename information"""
1674 """dump rename information"""
1675
1675
1676 ctx = scmutil.revsingle(repo, opts.get('rev'))
1676 ctx = scmutil.revsingle(repo, opts.get('rev'))
1677 m = scmutil.match(ctx, (file1,) + pats, opts)
1677 m = scmutil.match(ctx, (file1,) + pats, opts)
1678 for abs in ctx.walk(m):
1678 for abs in ctx.walk(m):
1679 fctx = ctx[abs]
1679 fctx = ctx[abs]
1680 o = fctx.filelog().renamed(fctx.filenode())
1680 o = fctx.filelog().renamed(fctx.filenode())
1681 rel = m.rel(abs)
1681 rel = m.rel(abs)
1682 if o:
1682 if o:
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1684 else:
1684 else:
1685 ui.write(_("%s not renamed\n") % rel)
1685 ui.write(_("%s not renamed\n") % rel)
1686
1686
1687 @command('debugrevlog', commands.debugrevlogopts +
1687 @command('debugrevlog', cmdutil.debugrevlogopts +
1688 [('d', 'dump', False, _('dump index data'))],
1688 [('d', 'dump', False, _('dump index data'))],
1689 _('-c|-m|FILE'),
1689 _('-c|-m|FILE'),
1690 optionalrepo=True)
1690 optionalrepo=True)
1691 def debugrevlog(ui, repo, file_=None, **opts):
1691 def debugrevlog(ui, repo, file_=None, **opts):
1692 """show data and statistics about a revlog"""
1692 """show data and statistics about a revlog"""
1693 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1693 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1694
1694
1695 if opts.get("dump"):
1695 if opts.get("dump"):
1696 numrevs = len(r)
1696 numrevs = len(r)
1697 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1697 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1698 " rawsize totalsize compression heads chainlen\n"))
1698 " rawsize totalsize compression heads chainlen\n"))
1699 ts = 0
1699 ts = 0
1700 heads = set()
1700 heads = set()
1701
1701
1702 for rev in xrange(numrevs):
1702 for rev in xrange(numrevs):
1703 dbase = r.deltaparent(rev)
1703 dbase = r.deltaparent(rev)
1704 if dbase == -1:
1704 if dbase == -1:
1705 dbase = rev
1705 dbase = rev
1706 cbase = r.chainbase(rev)
1706 cbase = r.chainbase(rev)
1707 clen = r.chainlen(rev)
1707 clen = r.chainlen(rev)
1708 p1, p2 = r.parentrevs(rev)
1708 p1, p2 = r.parentrevs(rev)
1709 rs = r.rawsize(rev)
1709 rs = r.rawsize(rev)
1710 ts = ts + rs
1710 ts = ts + rs
1711 heads -= set(r.parentrevs(rev))
1711 heads -= set(r.parentrevs(rev))
1712 heads.add(rev)
1712 heads.add(rev)
1713 try:
1713 try:
1714 compression = ts / r.end(rev)
1714 compression = ts / r.end(rev)
1715 except ZeroDivisionError:
1715 except ZeroDivisionError:
1716 compression = 0
1716 compression = 0
1717 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1717 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1718 "%11d %5d %8d\n" %
1718 "%11d %5d %8d\n" %
1719 (rev, p1, p2, r.start(rev), r.end(rev),
1719 (rev, p1, p2, r.start(rev), r.end(rev),
1720 r.start(dbase), r.start(cbase),
1720 r.start(dbase), r.start(cbase),
1721 r.start(p1), r.start(p2),
1721 r.start(p1), r.start(p2),
1722 rs, ts, compression, len(heads), clen))
1722 rs, ts, compression, len(heads), clen))
1723 return 0
1723 return 0
1724
1724
1725 v = r.version
1725 v = r.version
1726 format = v & 0xFFFF
1726 format = v & 0xFFFF
1727 flags = []
1727 flags = []
1728 gdelta = False
1728 gdelta = False
1729 if v & revlog.FLAG_INLINE_DATA:
1729 if v & revlog.FLAG_INLINE_DATA:
1730 flags.append('inline')
1730 flags.append('inline')
1731 if v & revlog.FLAG_GENERALDELTA:
1731 if v & revlog.FLAG_GENERALDELTA:
1732 gdelta = True
1732 gdelta = True
1733 flags.append('generaldelta')
1733 flags.append('generaldelta')
1734 if not flags:
1734 if not flags:
1735 flags = ['(none)']
1735 flags = ['(none)']
1736
1736
1737 nummerges = 0
1737 nummerges = 0
1738 numfull = 0
1738 numfull = 0
1739 numprev = 0
1739 numprev = 0
1740 nump1 = 0
1740 nump1 = 0
1741 nump2 = 0
1741 nump2 = 0
1742 numother = 0
1742 numother = 0
1743 nump1prev = 0
1743 nump1prev = 0
1744 nump2prev = 0
1744 nump2prev = 0
1745 chainlengths = []
1745 chainlengths = []
1746
1746
1747 datasize = [None, 0, 0]
1747 datasize = [None, 0, 0]
1748 fullsize = [None, 0, 0]
1748 fullsize = [None, 0, 0]
1749 deltasize = [None, 0, 0]
1749 deltasize = [None, 0, 0]
1750 chunktypecounts = {}
1750 chunktypecounts = {}
1751 chunktypesizes = {}
1751 chunktypesizes = {}
1752
1752
1753 def addsize(size, l):
1753 def addsize(size, l):
1754 if l[0] is None or size < l[0]:
1754 if l[0] is None or size < l[0]:
1755 l[0] = size
1755 l[0] = size
1756 if size > l[1]:
1756 if size > l[1]:
1757 l[1] = size
1757 l[1] = size
1758 l[2] += size
1758 l[2] += size
1759
1759
1760 numrevs = len(r)
1760 numrevs = len(r)
1761 for rev in xrange(numrevs):
1761 for rev in xrange(numrevs):
1762 p1, p2 = r.parentrevs(rev)
1762 p1, p2 = r.parentrevs(rev)
1763 delta = r.deltaparent(rev)
1763 delta = r.deltaparent(rev)
1764 if format > 0:
1764 if format > 0:
1765 addsize(r.rawsize(rev), datasize)
1765 addsize(r.rawsize(rev), datasize)
1766 if p2 != nullrev:
1766 if p2 != nullrev:
1767 nummerges += 1
1767 nummerges += 1
1768 size = r.length(rev)
1768 size = r.length(rev)
1769 if delta == nullrev:
1769 if delta == nullrev:
1770 chainlengths.append(0)
1770 chainlengths.append(0)
1771 numfull += 1
1771 numfull += 1
1772 addsize(size, fullsize)
1772 addsize(size, fullsize)
1773 else:
1773 else:
1774 chainlengths.append(chainlengths[delta] + 1)
1774 chainlengths.append(chainlengths[delta] + 1)
1775 addsize(size, deltasize)
1775 addsize(size, deltasize)
1776 if delta == rev - 1:
1776 if delta == rev - 1:
1777 numprev += 1
1777 numprev += 1
1778 if delta == p1:
1778 if delta == p1:
1779 nump1prev += 1
1779 nump1prev += 1
1780 elif delta == p2:
1780 elif delta == p2:
1781 nump2prev += 1
1781 nump2prev += 1
1782 elif delta == p1:
1782 elif delta == p1:
1783 nump1 += 1
1783 nump1 += 1
1784 elif delta == p2:
1784 elif delta == p2:
1785 nump2 += 1
1785 nump2 += 1
1786 elif delta != nullrev:
1786 elif delta != nullrev:
1787 numother += 1
1787 numother += 1
1788
1788
1789 # Obtain data on the raw chunks in the revlog.
1789 # Obtain data on the raw chunks in the revlog.
1790 segment = r._getsegmentforrevs(rev, rev)[1]
1790 segment = r._getsegmentforrevs(rev, rev)[1]
1791 if segment:
1791 if segment:
1792 chunktype = segment[0]
1792 chunktype = segment[0]
1793 else:
1793 else:
1794 chunktype = 'empty'
1794 chunktype = 'empty'
1795
1795
1796 if chunktype not in chunktypecounts:
1796 if chunktype not in chunktypecounts:
1797 chunktypecounts[chunktype] = 0
1797 chunktypecounts[chunktype] = 0
1798 chunktypesizes[chunktype] = 0
1798 chunktypesizes[chunktype] = 0
1799
1799
1800 chunktypecounts[chunktype] += 1
1800 chunktypecounts[chunktype] += 1
1801 chunktypesizes[chunktype] += size
1801 chunktypesizes[chunktype] += size
1802
1802
1803 # Adjust size min value for empty cases
1803 # Adjust size min value for empty cases
1804 for size in (datasize, fullsize, deltasize):
1804 for size in (datasize, fullsize, deltasize):
1805 if size[0] is None:
1805 if size[0] is None:
1806 size[0] = 0
1806 size[0] = 0
1807
1807
1808 numdeltas = numrevs - numfull
1808 numdeltas = numrevs - numfull
1809 numoprev = numprev - nump1prev - nump2prev
1809 numoprev = numprev - nump1prev - nump2prev
1810 totalrawsize = datasize[2]
1810 totalrawsize = datasize[2]
1811 datasize[2] /= numrevs
1811 datasize[2] /= numrevs
1812 fulltotal = fullsize[2]
1812 fulltotal = fullsize[2]
1813 fullsize[2] /= numfull
1813 fullsize[2] /= numfull
1814 deltatotal = deltasize[2]
1814 deltatotal = deltasize[2]
1815 if numrevs - numfull > 0:
1815 if numrevs - numfull > 0:
1816 deltasize[2] /= numrevs - numfull
1816 deltasize[2] /= numrevs - numfull
1817 totalsize = fulltotal + deltatotal
1817 totalsize = fulltotal + deltatotal
1818 avgchainlen = sum(chainlengths) / numrevs
1818 avgchainlen = sum(chainlengths) / numrevs
1819 maxchainlen = max(chainlengths)
1819 maxchainlen = max(chainlengths)
1820 compratio = 1
1820 compratio = 1
1821 if totalsize:
1821 if totalsize:
1822 compratio = totalrawsize / totalsize
1822 compratio = totalrawsize / totalsize
1823
1823
1824 basedfmtstr = '%%%dd\n'
1824 basedfmtstr = '%%%dd\n'
1825 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1825 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1826
1826
1827 def dfmtstr(max):
1827 def dfmtstr(max):
1828 return basedfmtstr % len(str(max))
1828 return basedfmtstr % len(str(max))
1829 def pcfmtstr(max, padding=0):
1829 def pcfmtstr(max, padding=0):
1830 return basepcfmtstr % (len(str(max)), ' ' * padding)
1830 return basepcfmtstr % (len(str(max)), ' ' * padding)
1831
1831
1832 def pcfmt(value, total):
1832 def pcfmt(value, total):
1833 if total:
1833 if total:
1834 return (value, 100 * float(value) / total)
1834 return (value, 100 * float(value) / total)
1835 else:
1835 else:
1836 return value, 100.0
1836 return value, 100.0
1837
1837
1838 ui.write(('format : %d\n') % format)
1838 ui.write(('format : %d\n') % format)
1839 ui.write(('flags : %s\n') % ', '.join(flags))
1839 ui.write(('flags : %s\n') % ', '.join(flags))
1840
1840
1841 ui.write('\n')
1841 ui.write('\n')
1842 fmt = pcfmtstr(totalsize)
1842 fmt = pcfmtstr(totalsize)
1843 fmt2 = dfmtstr(totalsize)
1843 fmt2 = dfmtstr(totalsize)
1844 ui.write(('revisions : ') + fmt2 % numrevs)
1844 ui.write(('revisions : ') + fmt2 % numrevs)
1845 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1845 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1846 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1846 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1847 ui.write(('revisions : ') + fmt2 % numrevs)
1847 ui.write(('revisions : ') + fmt2 % numrevs)
1848 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1848 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1849 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1849 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1850 ui.write(('revision size : ') + fmt2 % totalsize)
1850 ui.write(('revision size : ') + fmt2 % totalsize)
1851 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1851 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1852 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1852 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1853
1853
1854 def fmtchunktype(chunktype):
1854 def fmtchunktype(chunktype):
1855 if chunktype == 'empty':
1855 if chunktype == 'empty':
1856 return ' %s : ' % chunktype
1856 return ' %s : ' % chunktype
1857 elif chunktype in string.ascii_letters:
1857 elif chunktype in string.ascii_letters:
1858 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1858 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1859 else:
1859 else:
1860 return ' 0x%s : ' % hex(chunktype)
1860 return ' 0x%s : ' % hex(chunktype)
1861
1861
1862 ui.write('\n')
1862 ui.write('\n')
1863 ui.write(('chunks : ') + fmt2 % numrevs)
1863 ui.write(('chunks : ') + fmt2 % numrevs)
1864 for chunktype in sorted(chunktypecounts):
1864 for chunktype in sorted(chunktypecounts):
1865 ui.write(fmtchunktype(chunktype))
1865 ui.write(fmtchunktype(chunktype))
1866 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1866 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1867 ui.write(('chunks size : ') + fmt2 % totalsize)
1867 ui.write(('chunks size : ') + fmt2 % totalsize)
1868 for chunktype in sorted(chunktypecounts):
1868 for chunktype in sorted(chunktypecounts):
1869 ui.write(fmtchunktype(chunktype))
1869 ui.write(fmtchunktype(chunktype))
1870 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1870 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1871
1871
1872 ui.write('\n')
1872 ui.write('\n')
1873 fmt = dfmtstr(max(avgchainlen, compratio))
1873 fmt = dfmtstr(max(avgchainlen, compratio))
1874 ui.write(('avg chain length : ') + fmt % avgchainlen)
1874 ui.write(('avg chain length : ') + fmt % avgchainlen)
1875 ui.write(('max chain length : ') + fmt % maxchainlen)
1875 ui.write(('max chain length : ') + fmt % maxchainlen)
1876 ui.write(('compression ratio : ') + fmt % compratio)
1876 ui.write(('compression ratio : ') + fmt % compratio)
1877
1877
1878 if format > 0:
1878 if format > 0:
1879 ui.write('\n')
1879 ui.write('\n')
1880 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1880 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1881 % tuple(datasize))
1881 % tuple(datasize))
1882 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1882 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1883 % tuple(fullsize))
1883 % tuple(fullsize))
1884 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1884 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1885 % tuple(deltasize))
1885 % tuple(deltasize))
1886
1886
1887 if numdeltas > 0:
1887 if numdeltas > 0:
1888 ui.write('\n')
1888 ui.write('\n')
1889 fmt = pcfmtstr(numdeltas)
1889 fmt = pcfmtstr(numdeltas)
1890 fmt2 = pcfmtstr(numdeltas, 4)
1890 fmt2 = pcfmtstr(numdeltas, 4)
1891 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1891 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1892 if numprev > 0:
1892 if numprev > 0:
1893 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1893 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1894 numprev))
1894 numprev))
1895 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1895 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1896 numprev))
1896 numprev))
1897 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1897 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1898 numprev))
1898 numprev))
1899 if gdelta:
1899 if gdelta:
1900 ui.write(('deltas against p1 : ')
1900 ui.write(('deltas against p1 : ')
1901 + fmt % pcfmt(nump1, numdeltas))
1901 + fmt % pcfmt(nump1, numdeltas))
1902 ui.write(('deltas against p2 : ')
1902 ui.write(('deltas against p2 : ')
1903 + fmt % pcfmt(nump2, numdeltas))
1903 + fmt % pcfmt(nump2, numdeltas))
1904 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1904 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1905 numdeltas))
1905 numdeltas))
1906
1906
1907 @command('debugrevspec',
1907 @command('debugrevspec',
1908 [('', 'optimize', None,
1908 [('', 'optimize', None,
1909 _('print parsed tree after optimizing (DEPRECATED)')),
1909 _('print parsed tree after optimizing (DEPRECATED)')),
1910 ('p', 'show-stage', [],
1910 ('p', 'show-stage', [],
1911 _('print parsed tree at the given stage'), _('NAME')),
1911 _('print parsed tree at the given stage'), _('NAME')),
1912 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1912 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1913 ('', 'verify-optimized', False, _('verify optimized result')),
1913 ('', 'verify-optimized', False, _('verify optimized result')),
1914 ],
1914 ],
1915 ('REVSPEC'))
1915 ('REVSPEC'))
1916 def debugrevspec(ui, repo, expr, **opts):
1916 def debugrevspec(ui, repo, expr, **opts):
1917 """parse and apply a revision specification
1917 """parse and apply a revision specification
1918
1918
1919 Use -p/--show-stage option to print the parsed tree at the given stages.
1919 Use -p/--show-stage option to print the parsed tree at the given stages.
1920 Use -p all to print tree at every stage.
1920 Use -p all to print tree at every stage.
1921
1921
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1923 one. Returns 1 if the optimized result differs.
1923 one. Returns 1 if the optimized result differs.
1924 """
1924 """
1925 stages = [
1925 stages = [
1926 ('parsed', lambda tree: tree),
1926 ('parsed', lambda tree: tree),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1928 ('concatenated', revsetlang.foldconcat),
1928 ('concatenated', revsetlang.foldconcat),
1929 ('analyzed', revsetlang.analyze),
1929 ('analyzed', revsetlang.analyze),
1930 ('optimized', revsetlang.optimize),
1930 ('optimized', revsetlang.optimize),
1931 ]
1931 ]
1932 if opts['no_optimized']:
1932 if opts['no_optimized']:
1933 stages = stages[:-1]
1933 stages = stages[:-1]
1934 if opts['verify_optimized'] and opts['no_optimized']:
1934 if opts['verify_optimized'] and opts['no_optimized']:
1935 raise error.Abort(_('cannot use --verify-optimized with '
1935 raise error.Abort(_('cannot use --verify-optimized with '
1936 '--no-optimized'))
1936 '--no-optimized'))
1937 stagenames = set(n for n, f in stages)
1937 stagenames = set(n for n, f in stages)
1938
1938
1939 showalways = set()
1939 showalways = set()
1940 showchanged = set()
1940 showchanged = set()
1941 if ui.verbose and not opts['show_stage']:
1941 if ui.verbose and not opts['show_stage']:
1942 # show parsed tree by --verbose (deprecated)
1942 # show parsed tree by --verbose (deprecated)
1943 showalways.add('parsed')
1943 showalways.add('parsed')
1944 showchanged.update(['expanded', 'concatenated'])
1944 showchanged.update(['expanded', 'concatenated'])
1945 if opts['optimize']:
1945 if opts['optimize']:
1946 showalways.add('optimized')
1946 showalways.add('optimized')
1947 if opts['show_stage'] and opts['optimize']:
1947 if opts['show_stage'] and opts['optimize']:
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1949 if opts['show_stage'] == ['all']:
1949 if opts['show_stage'] == ['all']:
1950 showalways.update(stagenames)
1950 showalways.update(stagenames)
1951 else:
1951 else:
1952 for n in opts['show_stage']:
1952 for n in opts['show_stage']:
1953 if n not in stagenames:
1953 if n not in stagenames:
1954 raise error.Abort(_('invalid stage name: %s') % n)
1954 raise error.Abort(_('invalid stage name: %s') % n)
1955 showalways.update(opts['show_stage'])
1955 showalways.update(opts['show_stage'])
1956
1956
1957 treebystage = {}
1957 treebystage = {}
1958 printedtree = None
1958 printedtree = None
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1960 for n, f in stages:
1960 for n, f in stages:
1961 treebystage[n] = tree = f(tree)
1961 treebystage[n] = tree = f(tree)
1962 if n in showalways or (n in showchanged and tree != printedtree):
1962 if n in showalways or (n in showchanged and tree != printedtree):
1963 if opts['show_stage'] or n != 'parsed':
1963 if opts['show_stage'] or n != 'parsed':
1964 ui.write(("* %s:\n") % n)
1964 ui.write(("* %s:\n") % n)
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1966 printedtree = tree
1966 printedtree = tree
1967
1967
1968 if opts['verify_optimized']:
1968 if opts['verify_optimized']:
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1971 if ui.verbose:
1971 if ui.verbose:
1972 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1973 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1974 arevs = list(arevs)
1974 arevs = list(arevs)
1975 brevs = list(brevs)
1975 brevs = list(brevs)
1976 if arevs == brevs:
1976 if arevs == brevs:
1977 return 0
1977 return 0
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1982 if tag in ('delete', 'replace'):
1982 if tag in ('delete', 'replace'):
1983 for c in arevs[alo:ahi]:
1983 for c in arevs[alo:ahi]:
1984 ui.write('-%s\n' % c, label='diff.deleted')
1984 ui.write('-%s\n' % c, label='diff.deleted')
1985 if tag in ('insert', 'replace'):
1985 if tag in ('insert', 'replace'):
1986 for c in brevs[blo:bhi]:
1986 for c in brevs[blo:bhi]:
1987 ui.write('+%s\n' % c, label='diff.inserted')
1987 ui.write('+%s\n' % c, label='diff.inserted')
1988 if tag == 'equal':
1988 if tag == 'equal':
1989 for c in arevs[alo:ahi]:
1989 for c in arevs[alo:ahi]:
1990 ui.write(' %s\n' % c)
1990 ui.write(' %s\n' % c)
1991 return 1
1991 return 1
1992
1992
1993 func = revset.makematcher(tree)
1993 func = revset.makematcher(tree)
1994 revs = func(repo)
1994 revs = func(repo)
1995 if ui.verbose:
1995 if ui.verbose:
1996 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1997 for c in revs:
1997 for c in revs:
1998 ui.write("%s\n" % c)
1998 ui.write("%s\n" % c)
1999
1999
2000 @command('debugsetparents', [], _('REV1 [REV2]'))
2000 @command('debugsetparents', [], _('REV1 [REV2]'))
2001 def debugsetparents(ui, repo, rev1, rev2=None):
2001 def debugsetparents(ui, repo, rev1, rev2=None):
2002 """manually set the parents of the current working directory
2002 """manually set the parents of the current working directory
2003
2003
2004 This is useful for writing repository conversion tools, but should
2004 This is useful for writing repository conversion tools, but should
2005 be used with care. For example, neither the working directory nor the
2005 be used with care. For example, neither the working directory nor the
2006 dirstate is updated, so file status may be incorrect after running this
2006 dirstate is updated, so file status may be incorrect after running this
2007 command.
2007 command.
2008
2008
2009 Returns 0 on success.
2009 Returns 0 on success.
2010 """
2010 """
2011
2011
2012 r1 = scmutil.revsingle(repo, rev1).node()
2012 r1 = scmutil.revsingle(repo, rev1).node()
2013 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2013 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2014
2014
2015 with repo.wlock():
2015 with repo.wlock():
2016 repo.setparents(r1, r2)
2016 repo.setparents(r1, r2)
2017
2017
2018 @command('debugsub',
2018 @command('debugsub',
2019 [('r', 'rev', '',
2019 [('r', 'rev', '',
2020 _('revision to check'), _('REV'))],
2020 _('revision to check'), _('REV'))],
2021 _('[-r REV] [REV]'))
2021 _('[-r REV] [REV]'))
2022 def debugsub(ui, repo, rev=None):
2022 def debugsub(ui, repo, rev=None):
2023 ctx = scmutil.revsingle(repo, rev, None)
2023 ctx = scmutil.revsingle(repo, rev, None)
2024 for k, v in sorted(ctx.substate.items()):
2024 for k, v in sorted(ctx.substate.items()):
2025 ui.write(('path %s\n') % k)
2025 ui.write(('path %s\n') % k)
2026 ui.write((' source %s\n') % v[0])
2026 ui.write((' source %s\n') % v[0])
2027 ui.write((' revision %s\n') % v[1])
2027 ui.write((' revision %s\n') % v[1])
2028
2028
2029 @command('debugsuccessorssets',
2029 @command('debugsuccessorssets',
2030 [],
2030 [],
2031 _('[REV]'))
2031 _('[REV]'))
2032 def debugsuccessorssets(ui, repo, *revs):
2032 def debugsuccessorssets(ui, repo, *revs):
2033 """show set of successors for revision
2033 """show set of successors for revision
2034
2034
2035 A successors set of changeset A is a consistent group of revisions that
2035 A successors set of changeset A is a consistent group of revisions that
2036 succeed A. It contains non-obsolete changesets only.
2036 succeed A. It contains non-obsolete changesets only.
2037
2037
2038 In most cases a changeset A has a single successors set containing a single
2038 In most cases a changeset A has a single successors set containing a single
2039 successor (changeset A replaced by A').
2039 successor (changeset A replaced by A').
2040
2040
2041 A changeset that is made obsolete with no successors are called "pruned".
2041 A changeset that is made obsolete with no successors are called "pruned".
2042 Such changesets have no successors sets at all.
2042 Such changesets have no successors sets at all.
2043
2043
2044 A changeset that has been "split" will have a successors set containing
2044 A changeset that has been "split" will have a successors set containing
2045 more than one successor.
2045 more than one successor.
2046
2046
2047 A changeset that has been rewritten in multiple different ways is called
2047 A changeset that has been rewritten in multiple different ways is called
2048 "divergent". Such changesets have multiple successor sets (each of which
2048 "divergent". Such changesets have multiple successor sets (each of which
2049 may also be split, i.e. have multiple successors).
2049 may also be split, i.e. have multiple successors).
2050
2050
2051 Results are displayed as follows::
2051 Results are displayed as follows::
2052
2052
2053 <rev1>
2053 <rev1>
2054 <successors-1A>
2054 <successors-1A>
2055 <rev2>
2055 <rev2>
2056 <successors-2A>
2056 <successors-2A>
2057 <successors-2B1> <successors-2B2> <successors-2B3>
2057 <successors-2B1> <successors-2B2> <successors-2B3>
2058
2058
2059 Here rev2 has two possible (i.e. divergent) successors sets. The first
2059 Here rev2 has two possible (i.e. divergent) successors sets. The first
2060 holds one element, whereas the second holds three (i.e. the changeset has
2060 holds one element, whereas the second holds three (i.e. the changeset has
2061 been split).
2061 been split).
2062 """
2062 """
2063 # passed to successorssets caching computation from one call to another
2063 # passed to successorssets caching computation from one call to another
2064 cache = {}
2064 cache = {}
2065 ctx2str = str
2065 ctx2str = str
2066 node2str = short
2066 node2str = short
2067 if ui.debug():
2067 if ui.debug():
2068 def ctx2str(ctx):
2068 def ctx2str(ctx):
2069 return ctx.hex()
2069 return ctx.hex()
2070 node2str = hex
2070 node2str = hex
2071 for rev in scmutil.revrange(repo, revs):
2071 for rev in scmutil.revrange(repo, revs):
2072 ctx = repo[rev]
2072 ctx = repo[rev]
2073 ui.write('%s\n'% ctx2str(ctx))
2073 ui.write('%s\n'% ctx2str(ctx))
2074 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2074 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2075 if succsset:
2075 if succsset:
2076 ui.write(' ')
2076 ui.write(' ')
2077 ui.write(node2str(succsset[0]))
2077 ui.write(node2str(succsset[0]))
2078 for node in succsset[1:]:
2078 for node in succsset[1:]:
2079 ui.write(' ')
2079 ui.write(' ')
2080 ui.write(node2str(node))
2080 ui.write(node2str(node))
2081 ui.write('\n')
2081 ui.write('\n')
2082
2082
2083 @command('debugtemplate',
2083 @command('debugtemplate',
2084 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2084 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2085 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2085 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2086 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2086 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2087 optionalrepo=True)
2087 optionalrepo=True)
2088 def debugtemplate(ui, repo, tmpl, **opts):
2088 def debugtemplate(ui, repo, tmpl, **opts):
2089 """parse and apply a template
2089 """parse and apply a template
2090
2090
2091 If -r/--rev is given, the template is processed as a log template and
2091 If -r/--rev is given, the template is processed as a log template and
2092 applied to the given changesets. Otherwise, it is processed as a generic
2092 applied to the given changesets. Otherwise, it is processed as a generic
2093 template.
2093 template.
2094
2094
2095 Use --verbose to print the parsed tree.
2095 Use --verbose to print the parsed tree.
2096 """
2096 """
2097 revs = None
2097 revs = None
2098 if opts['rev']:
2098 if opts['rev']:
2099 if repo is None:
2099 if repo is None:
2100 raise error.RepoError(_('there is no Mercurial repository here '
2100 raise error.RepoError(_('there is no Mercurial repository here '
2101 '(.hg not found)'))
2101 '(.hg not found)'))
2102 revs = scmutil.revrange(repo, opts['rev'])
2102 revs = scmutil.revrange(repo, opts['rev'])
2103
2103
2104 props = {}
2104 props = {}
2105 for d in opts['define']:
2105 for d in opts['define']:
2106 try:
2106 try:
2107 k, v = (e.strip() for e in d.split('=', 1))
2107 k, v = (e.strip() for e in d.split('=', 1))
2108 if not k or k == 'ui':
2108 if not k or k == 'ui':
2109 raise ValueError
2109 raise ValueError
2110 props[k] = v
2110 props[k] = v
2111 except ValueError:
2111 except ValueError:
2112 raise error.Abort(_('malformed keyword definition: %s') % d)
2112 raise error.Abort(_('malformed keyword definition: %s') % d)
2113
2113
2114 if ui.verbose:
2114 if ui.verbose:
2115 aliases = ui.configitems('templatealias')
2115 aliases = ui.configitems('templatealias')
2116 tree = templater.parse(tmpl)
2116 tree = templater.parse(tmpl)
2117 ui.note(templater.prettyformat(tree), '\n')
2117 ui.note(templater.prettyformat(tree), '\n')
2118 newtree = templater.expandaliases(tree, aliases)
2118 newtree = templater.expandaliases(tree, aliases)
2119 if newtree != tree:
2119 if newtree != tree:
2120 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2120 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2121
2121
2122 mapfile = None
2122 mapfile = None
2123 if revs is None:
2123 if revs is None:
2124 k = 'debugtemplate'
2124 k = 'debugtemplate'
2125 t = formatter.maketemplater(ui, k, tmpl)
2125 t = formatter.maketemplater(ui, k, tmpl)
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2127 else:
2127 else:
2128 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2128 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2129 mapfile, buffered=False)
2129 mapfile, buffered=False)
2130 for r in revs:
2130 for r in revs:
2131 displayer.show(repo[r], **props)
2131 displayer.show(repo[r], **props)
2132 displayer.close()
2132 displayer.close()
2133
2133
2134 @command('debugupdatecaches', [])
2134 @command('debugupdatecaches', [])
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2136 """warm all known caches in the repository"""
2136 """warm all known caches in the repository"""
2137 with repo.wlock():
2137 with repo.wlock():
2138 with repo.lock():
2138 with repo.lock():
2139 repo.updatecaches()
2139 repo.updatecaches()
2140
2140
2141 @command('debugupgraderepo', [
2141 @command('debugupgraderepo', [
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2143 ('', 'run', False, _('performs an upgrade')),
2143 ('', 'run', False, _('performs an upgrade')),
2144 ])
2144 ])
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2146 """upgrade a repository to use different features
2146 """upgrade a repository to use different features
2147
2147
2148 If no arguments are specified, the repository is evaluated for upgrade
2148 If no arguments are specified, the repository is evaluated for upgrade
2149 and a list of problems and potential optimizations is printed.
2149 and a list of problems and potential optimizations is printed.
2150
2150
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2152 can be influenced via additional arguments. More details will be provided
2152 can be influenced via additional arguments. More details will be provided
2153 by the command output when run without ``--run``.
2153 by the command output when run without ``--run``.
2154
2154
2155 During the upgrade, the repository will be locked and no writes will be
2155 During the upgrade, the repository will be locked and no writes will be
2156 allowed.
2156 allowed.
2157
2157
2158 At the end of the upgrade, the repository may not be readable while new
2158 At the end of the upgrade, the repository may not be readable while new
2159 repository data is swapped in. This window will be as long as it takes to
2159 repository data is swapped in. This window will be as long as it takes to
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2161 should complete almost instantaneously and the chances of a consumer being
2161 should complete almost instantaneously and the chances of a consumer being
2162 unable to access the repository should be low.
2162 unable to access the repository should be low.
2163 """
2163 """
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2165
2165
2166 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2166 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2167 inferrepo=True)
2167 inferrepo=True)
2168 def debugwalk(ui, repo, *pats, **opts):
2168 def debugwalk(ui, repo, *pats, **opts):
2169 """show how files match on given patterns"""
2169 """show how files match on given patterns"""
2170 m = scmutil.match(repo[None], pats, opts)
2170 m = scmutil.match(repo[None], pats, opts)
2171 items = list(repo[None].walk(m))
2171 items = list(repo[None].walk(m))
2172 if not items:
2172 if not items:
2173 return
2173 return
2174 f = lambda fn: fn
2174 f = lambda fn: fn
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 f = lambda fn: util.normpath(fn)
2176 f = lambda fn: util.normpath(fn)
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 max([len(abs) for abs in items]),
2178 max([len(abs) for abs in items]),
2179 max([len(m.rel(abs)) for abs in items]))
2179 max([len(m.rel(abs)) for abs in items]))
2180 for abs in items:
2180 for abs in items:
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 ui.write("%s\n" % line.rstrip())
2182 ui.write("%s\n" % line.rstrip())
2183
2183
2184 @command('debugwireargs',
2184 @command('debugwireargs',
2185 [('', 'three', '', 'three'),
2185 [('', 'three', '', 'three'),
2186 ('', 'four', '', 'four'),
2186 ('', 'four', '', 'four'),
2187 ('', 'five', '', 'five'),
2187 ('', 'five', '', 'five'),
2188 ] + commands.remoteopts,
2188 ] + cmdutil.remoteopts,
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 norepo=True)
2190 norepo=True)
2191 def debugwireargs(ui, repopath, *vals, **opts):
2191 def debugwireargs(ui, repopath, *vals, **opts):
2192 repo = hg.peer(ui, opts, repopath)
2192 repo = hg.peer(ui, opts, repopath)
2193 for opt in commands.remoteopts:
2193 for opt in cmdutil.remoteopts:
2194 del opts[opt[1]]
2194 del opts[opt[1]]
2195 args = {}
2195 args = {}
2196 for k, v in opts.iteritems():
2196 for k, v in opts.iteritems():
2197 if v:
2197 if v:
2198 args[k] = v
2198 args[k] = v
2199 # run twice to check that we don't mess up the stream for the next command
2199 # run twice to check that we don't mess up the stream for the next command
2200 res1 = repo.debugwireargs(*vals, **args)
2200 res1 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2202 ui.write("%s\n" % res1)
2202 ui.write("%s\n" % res1)
2203 if res1 != res2:
2203 if res1 != res2:
2204 ui.warn("%s\n" % res2)
2204 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now