##// END OF EJS Templates
addremove: remove dry_run, similarity from scmutil.addremove (API)...
Sushil khanchi -
r37286:14cd5290 default
parent child Browse files
Show More
@@ -1,1763 +1,1764 b''
1 # perf.py - performance test routines
1 # perf.py - performance test routines
2 '''helper extension to measure performance'''
2 '''helper extension to measure performance'''
3
3
4 # "historical portability" policy of perf.py:
4 # "historical portability" policy of perf.py:
5 #
5 #
6 # We have to do:
6 # We have to do:
7 # - make perf.py "loadable" with as wide Mercurial version as possible
7 # - make perf.py "loadable" with as wide Mercurial version as possible
8 # This doesn't mean that perf commands work correctly with that Mercurial.
8 # This doesn't mean that perf commands work correctly with that Mercurial.
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
10 # - make historical perf command work correctly with as wide Mercurial
10 # - make historical perf command work correctly with as wide Mercurial
11 # version as possible
11 # version as possible
12 #
12 #
13 # We have to do, if possible with reasonable cost:
13 # We have to do, if possible with reasonable cost:
14 # - make recent perf command for historical feature work correctly
14 # - make recent perf command for historical feature work correctly
15 # with early Mercurial
15 # with early Mercurial
16 #
16 #
17 # We don't have to do:
17 # We don't have to do:
18 # - make perf command for recent feature work correctly with early
18 # - make perf command for recent feature work correctly with early
19 # Mercurial
19 # Mercurial
20
20
21 from __future__ import absolute_import
21 from __future__ import absolute_import
22 import functools
22 import functools
23 import gc
23 import gc
24 import os
24 import os
25 import random
25 import random
26 import struct
26 import struct
27 import sys
27 import sys
28 import threading
28 import threading
29 import time
29 import time
30 from mercurial import (
30 from mercurial import (
31 changegroup,
31 changegroup,
32 cmdutil,
32 cmdutil,
33 commands,
33 commands,
34 copies,
34 copies,
35 error,
35 error,
36 extensions,
36 extensions,
37 mdiff,
37 mdiff,
38 merge,
38 merge,
39 revlog,
39 revlog,
40 util,
40 util,
41 )
41 )
42
42
43 # for "historical portability":
43 # for "historical portability":
44 # try to import modules separately (in dict order), and ignore
44 # try to import modules separately (in dict order), and ignore
45 # failure, because these aren't available with early Mercurial
45 # failure, because these aren't available with early Mercurial
46 try:
46 try:
47 from mercurial import branchmap # since 2.5 (or bcee63733aad)
47 from mercurial import branchmap # since 2.5 (or bcee63733aad)
48 except ImportError:
48 except ImportError:
49 pass
49 pass
50 try:
50 try:
51 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
51 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
52 except ImportError:
52 except ImportError:
53 pass
53 pass
54 try:
54 try:
55 from mercurial import registrar # since 3.7 (or 37d50250b696)
55 from mercurial import registrar # since 3.7 (or 37d50250b696)
56 dir(registrar) # forcibly load it
56 dir(registrar) # forcibly load it
57 except ImportError:
57 except ImportError:
58 registrar = None
58 registrar = None
59 try:
59 try:
60 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
60 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
61 except ImportError:
61 except ImportError:
62 pass
62 pass
63 try:
63 try:
64 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
64 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
65 except ImportError:
65 except ImportError:
66 pass
66 pass
67 try:
67 try:
68 from mercurial import pycompat
68 from mercurial import pycompat
69 getargspec = pycompat.getargspec # added to module after 4.5
69 getargspec = pycompat.getargspec # added to module after 4.5
70 except (ImportError, AttributeError):
70 except (ImportError, AttributeError):
71 import inspect
71 import inspect
72 getargspec = inspect.getargspec
72 getargspec = inspect.getargspec
73
73
74 # for "historical portability":
74 # for "historical portability":
75 # define util.safehasattr forcibly, because util.safehasattr has been
75 # define util.safehasattr forcibly, because util.safehasattr has been
76 # available since 1.9.3 (or 94b200a11cf7)
76 # available since 1.9.3 (or 94b200a11cf7)
77 _undefined = object()
77 _undefined = object()
78 def safehasattr(thing, attr):
78 def safehasattr(thing, attr):
79 return getattr(thing, attr, _undefined) is not _undefined
79 return getattr(thing, attr, _undefined) is not _undefined
80 setattr(util, 'safehasattr', safehasattr)
80 setattr(util, 'safehasattr', safehasattr)
81
81
82 # for "historical portability":
82 # for "historical portability":
83 # define util.timer forcibly, because util.timer has been available
83 # define util.timer forcibly, because util.timer has been available
84 # since ae5d60bb70c9
84 # since ae5d60bb70c9
85 if safehasattr(time, 'perf_counter'):
85 if safehasattr(time, 'perf_counter'):
86 util.timer = time.perf_counter
86 util.timer = time.perf_counter
87 elif os.name == 'nt':
87 elif os.name == 'nt':
88 util.timer = time.clock
88 util.timer = time.clock
89 else:
89 else:
90 util.timer = time.time
90 util.timer = time.time
91
91
92 # for "historical portability":
92 # for "historical portability":
93 # use locally defined empty option list, if formatteropts isn't
93 # use locally defined empty option list, if formatteropts isn't
94 # available, because commands.formatteropts has been available since
94 # available, because commands.formatteropts has been available since
95 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
95 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
96 # available since 2.2 (or ae5f92e154d3)
96 # available since 2.2 (or ae5f92e154d3)
97 formatteropts = getattr(cmdutil, "formatteropts",
97 formatteropts = getattr(cmdutil, "formatteropts",
98 getattr(commands, "formatteropts", []))
98 getattr(commands, "formatteropts", []))
99
99
100 # for "historical portability":
100 # for "historical portability":
101 # use locally defined option list, if debugrevlogopts isn't available,
101 # use locally defined option list, if debugrevlogopts isn't available,
102 # because commands.debugrevlogopts has been available since 3.7 (or
102 # because commands.debugrevlogopts has been available since 3.7 (or
103 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
103 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
104 # since 1.9 (or a79fea6b3e77).
104 # since 1.9 (or a79fea6b3e77).
105 revlogopts = getattr(cmdutil, "debugrevlogopts",
105 revlogopts = getattr(cmdutil, "debugrevlogopts",
106 getattr(commands, "debugrevlogopts", [
106 getattr(commands, "debugrevlogopts", [
107 ('c', 'changelog', False, ('open changelog')),
107 ('c', 'changelog', False, ('open changelog')),
108 ('m', 'manifest', False, ('open manifest')),
108 ('m', 'manifest', False, ('open manifest')),
109 ('', 'dir', False, ('open directory manifest')),
109 ('', 'dir', False, ('open directory manifest')),
110 ]))
110 ]))
111
111
112 cmdtable = {}
112 cmdtable = {}
113
113
114 # for "historical portability":
114 # for "historical portability":
115 # define parsealiases locally, because cmdutil.parsealiases has been
115 # define parsealiases locally, because cmdutil.parsealiases has been
116 # available since 1.5 (or 6252852b4332)
116 # available since 1.5 (or 6252852b4332)
117 def parsealiases(cmd):
117 def parsealiases(cmd):
118 return cmd.lstrip("^").split("|")
118 return cmd.lstrip("^").split("|")
119
119
120 if safehasattr(registrar, 'command'):
120 if safehasattr(registrar, 'command'):
121 command = registrar.command(cmdtable)
121 command = registrar.command(cmdtable)
122 elif safehasattr(cmdutil, 'command'):
122 elif safehasattr(cmdutil, 'command'):
123 command = cmdutil.command(cmdtable)
123 command = cmdutil.command(cmdtable)
124 if 'norepo' not in getargspec(command).args:
124 if 'norepo' not in getargspec(command).args:
125 # for "historical portability":
125 # for "historical portability":
126 # wrap original cmdutil.command, because "norepo" option has
126 # wrap original cmdutil.command, because "norepo" option has
127 # been available since 3.1 (or 75a96326cecb)
127 # been available since 3.1 (or 75a96326cecb)
128 _command = command
128 _command = command
129 def command(name, options=(), synopsis=None, norepo=False):
129 def command(name, options=(), synopsis=None, norepo=False):
130 if norepo:
130 if norepo:
131 commands.norepo += ' %s' % ' '.join(parsealiases(name))
131 commands.norepo += ' %s' % ' '.join(parsealiases(name))
132 return _command(name, list(options), synopsis)
132 return _command(name, list(options), synopsis)
133 else:
133 else:
134 # for "historical portability":
134 # for "historical portability":
135 # define "@command" annotation locally, because cmdutil.command
135 # define "@command" annotation locally, because cmdutil.command
136 # has been available since 1.9 (or 2daa5179e73f)
136 # has been available since 1.9 (or 2daa5179e73f)
137 def command(name, options=(), synopsis=None, norepo=False):
137 def command(name, options=(), synopsis=None, norepo=False):
138 def decorator(func):
138 def decorator(func):
139 if synopsis:
139 if synopsis:
140 cmdtable[name] = func, list(options), synopsis
140 cmdtable[name] = func, list(options), synopsis
141 else:
141 else:
142 cmdtable[name] = func, list(options)
142 cmdtable[name] = func, list(options)
143 if norepo:
143 if norepo:
144 commands.norepo += ' %s' % ' '.join(parsealiases(name))
144 commands.norepo += ' %s' % ' '.join(parsealiases(name))
145 return func
145 return func
146 return decorator
146 return decorator
147
147
148 try:
148 try:
149 import mercurial.registrar
149 import mercurial.registrar
150 import mercurial.configitems
150 import mercurial.configitems
151 configtable = {}
151 configtable = {}
152 configitem = mercurial.registrar.configitem(configtable)
152 configitem = mercurial.registrar.configitem(configtable)
153 configitem('perf', 'presleep',
153 configitem('perf', 'presleep',
154 default=mercurial.configitems.dynamicdefault,
154 default=mercurial.configitems.dynamicdefault,
155 )
155 )
156 configitem('perf', 'stub',
156 configitem('perf', 'stub',
157 default=mercurial.configitems.dynamicdefault,
157 default=mercurial.configitems.dynamicdefault,
158 )
158 )
159 configitem('perf', 'parentscount',
159 configitem('perf', 'parentscount',
160 default=mercurial.configitems.dynamicdefault,
160 default=mercurial.configitems.dynamicdefault,
161 )
161 )
162 except (ImportError, AttributeError):
162 except (ImportError, AttributeError):
163 pass
163 pass
164
164
165 def getlen(ui):
165 def getlen(ui):
166 if ui.configbool("perf", "stub", False):
166 if ui.configbool("perf", "stub", False):
167 return lambda x: 1
167 return lambda x: 1
168 return len
168 return len
169
169
170 def gettimer(ui, opts=None):
170 def gettimer(ui, opts=None):
171 """return a timer function and formatter: (timer, formatter)
171 """return a timer function and formatter: (timer, formatter)
172
172
173 This function exists to gather the creation of formatter in a single
173 This function exists to gather the creation of formatter in a single
174 place instead of duplicating it in all performance commands."""
174 place instead of duplicating it in all performance commands."""
175
175
176 # enforce an idle period before execution to counteract power management
176 # enforce an idle period before execution to counteract power management
177 # experimental config: perf.presleep
177 # experimental config: perf.presleep
178 time.sleep(getint(ui, "perf", "presleep", 1))
178 time.sleep(getint(ui, "perf", "presleep", 1))
179
179
180 if opts is None:
180 if opts is None:
181 opts = {}
181 opts = {}
182 # redirect all to stderr unless buffer api is in use
182 # redirect all to stderr unless buffer api is in use
183 if not ui._buffers:
183 if not ui._buffers:
184 ui = ui.copy()
184 ui = ui.copy()
185 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
185 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
186 if uifout:
186 if uifout:
187 # for "historical portability":
187 # for "historical portability":
188 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
188 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
189 uifout.set(ui.ferr)
189 uifout.set(ui.ferr)
190
190
191 # get a formatter
191 # get a formatter
192 uiformatter = getattr(ui, 'formatter', None)
192 uiformatter = getattr(ui, 'formatter', None)
193 if uiformatter:
193 if uiformatter:
194 fm = uiformatter('perf', opts)
194 fm = uiformatter('perf', opts)
195 else:
195 else:
196 # for "historical portability":
196 # for "historical portability":
197 # define formatter locally, because ui.formatter has been
197 # define formatter locally, because ui.formatter has been
198 # available since 2.2 (or ae5f92e154d3)
198 # available since 2.2 (or ae5f92e154d3)
199 from mercurial import node
199 from mercurial import node
200 class defaultformatter(object):
200 class defaultformatter(object):
201 """Minimized composition of baseformatter and plainformatter
201 """Minimized composition of baseformatter and plainformatter
202 """
202 """
203 def __init__(self, ui, topic, opts):
203 def __init__(self, ui, topic, opts):
204 self._ui = ui
204 self._ui = ui
205 if ui.debugflag:
205 if ui.debugflag:
206 self.hexfunc = node.hex
206 self.hexfunc = node.hex
207 else:
207 else:
208 self.hexfunc = node.short
208 self.hexfunc = node.short
209 def __nonzero__(self):
209 def __nonzero__(self):
210 return False
210 return False
211 __bool__ = __nonzero__
211 __bool__ = __nonzero__
212 def startitem(self):
212 def startitem(self):
213 pass
213 pass
214 def data(self, **data):
214 def data(self, **data):
215 pass
215 pass
216 def write(self, fields, deftext, *fielddata, **opts):
216 def write(self, fields, deftext, *fielddata, **opts):
217 self._ui.write(deftext % fielddata, **opts)
217 self._ui.write(deftext % fielddata, **opts)
218 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
218 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
219 if cond:
219 if cond:
220 self._ui.write(deftext % fielddata, **opts)
220 self._ui.write(deftext % fielddata, **opts)
221 def plain(self, text, **opts):
221 def plain(self, text, **opts):
222 self._ui.write(text, **opts)
222 self._ui.write(text, **opts)
223 def end(self):
223 def end(self):
224 pass
224 pass
225 fm = defaultformatter(ui, 'perf', opts)
225 fm = defaultformatter(ui, 'perf', opts)
226
226
227 # stub function, runs code only once instead of in a loop
227 # stub function, runs code only once instead of in a loop
228 # experimental config: perf.stub
228 # experimental config: perf.stub
229 if ui.configbool("perf", "stub", False):
229 if ui.configbool("perf", "stub", False):
230 return functools.partial(stub_timer, fm), fm
230 return functools.partial(stub_timer, fm), fm
231 return functools.partial(_timer, fm), fm
231 return functools.partial(_timer, fm), fm
232
232
233 def stub_timer(fm, func, title=None):
233 def stub_timer(fm, func, title=None):
234 func()
234 func()
235
235
236 def _timer(fm, func, title=None):
236 def _timer(fm, func, title=None):
237 gc.collect()
237 gc.collect()
238 results = []
238 results = []
239 begin = util.timer()
239 begin = util.timer()
240 count = 0
240 count = 0
241 while True:
241 while True:
242 ostart = os.times()
242 ostart = os.times()
243 cstart = util.timer()
243 cstart = util.timer()
244 r = func()
244 r = func()
245 cstop = util.timer()
245 cstop = util.timer()
246 ostop = os.times()
246 ostop = os.times()
247 count += 1
247 count += 1
248 a, b = ostart, ostop
248 a, b = ostart, ostop
249 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
249 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
250 if cstop - begin > 3 and count >= 100:
250 if cstop - begin > 3 and count >= 100:
251 break
251 break
252 if cstop - begin > 10 and count >= 3:
252 if cstop - begin > 10 and count >= 3:
253 break
253 break
254
254
255 fm.startitem()
255 fm.startitem()
256
256
257 if title:
257 if title:
258 fm.write('title', '! %s\n', title)
258 fm.write('title', '! %s\n', title)
259 if r:
259 if r:
260 fm.write('result', '! result: %s\n', r)
260 fm.write('result', '! result: %s\n', r)
261 m = min(results)
261 m = min(results)
262 fm.plain('!')
262 fm.plain('!')
263 fm.write('wall', ' wall %f', m[0])
263 fm.write('wall', ' wall %f', m[0])
264 fm.write('comb', ' comb %f', m[1] + m[2])
264 fm.write('comb', ' comb %f', m[1] + m[2])
265 fm.write('user', ' user %f', m[1])
265 fm.write('user', ' user %f', m[1])
266 fm.write('sys', ' sys %f', m[2])
266 fm.write('sys', ' sys %f', m[2])
267 fm.write('count', ' (best of %d)', count)
267 fm.write('count', ' (best of %d)', count)
268 fm.plain('\n')
268 fm.plain('\n')
269
269
270 # utilities for historical portability
270 # utilities for historical portability
271
271
272 def getint(ui, section, name, default):
272 def getint(ui, section, name, default):
273 # for "historical portability":
273 # for "historical portability":
274 # ui.configint has been available since 1.9 (or fa2b596db182)
274 # ui.configint has been available since 1.9 (or fa2b596db182)
275 v = ui.config(section, name, None)
275 v = ui.config(section, name, None)
276 if v is None:
276 if v is None:
277 return default
277 return default
278 try:
278 try:
279 return int(v)
279 return int(v)
280 except ValueError:
280 except ValueError:
281 raise error.ConfigError(("%s.%s is not an integer ('%s')")
281 raise error.ConfigError(("%s.%s is not an integer ('%s')")
282 % (section, name, v))
282 % (section, name, v))
283
283
284 def safeattrsetter(obj, name, ignoremissing=False):
284 def safeattrsetter(obj, name, ignoremissing=False):
285 """Ensure that 'obj' has 'name' attribute before subsequent setattr
285 """Ensure that 'obj' has 'name' attribute before subsequent setattr
286
286
287 This function is aborted, if 'obj' doesn't have 'name' attribute
287 This function is aborted, if 'obj' doesn't have 'name' attribute
288 at runtime. This avoids overlooking removal of an attribute, which
288 at runtime. This avoids overlooking removal of an attribute, which
289 breaks assumption of performance measurement, in the future.
289 breaks assumption of performance measurement, in the future.
290
290
291 This function returns the object to (1) assign a new value, and
291 This function returns the object to (1) assign a new value, and
292 (2) restore an original value to the attribute.
292 (2) restore an original value to the attribute.
293
293
294 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
294 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
295 abortion, and this function returns None. This is useful to
295 abortion, and this function returns None. This is useful to
296 examine an attribute, which isn't ensured in all Mercurial
296 examine an attribute, which isn't ensured in all Mercurial
297 versions.
297 versions.
298 """
298 """
299 if not util.safehasattr(obj, name):
299 if not util.safehasattr(obj, name):
300 if ignoremissing:
300 if ignoremissing:
301 return None
301 return None
302 raise error.Abort(("missing attribute %s of %s might break assumption"
302 raise error.Abort(("missing attribute %s of %s might break assumption"
303 " of performance measurement") % (name, obj))
303 " of performance measurement") % (name, obj))
304
304
305 origvalue = getattr(obj, name)
305 origvalue = getattr(obj, name)
306 class attrutil(object):
306 class attrutil(object):
307 def set(self, newvalue):
307 def set(self, newvalue):
308 setattr(obj, name, newvalue)
308 setattr(obj, name, newvalue)
309 def restore(self):
309 def restore(self):
310 setattr(obj, name, origvalue)
310 setattr(obj, name, origvalue)
311
311
312 return attrutil()
312 return attrutil()
313
313
314 # utilities to examine each internal API changes
314 # utilities to examine each internal API changes
315
315
316 def getbranchmapsubsettable():
316 def getbranchmapsubsettable():
317 # for "historical portability":
317 # for "historical portability":
318 # subsettable is defined in:
318 # subsettable is defined in:
319 # - branchmap since 2.9 (or 175c6fd8cacc)
319 # - branchmap since 2.9 (or 175c6fd8cacc)
320 # - repoview since 2.5 (or 59a9f18d4587)
320 # - repoview since 2.5 (or 59a9f18d4587)
321 for mod in (branchmap, repoview):
321 for mod in (branchmap, repoview):
322 subsettable = getattr(mod, 'subsettable', None)
322 subsettable = getattr(mod, 'subsettable', None)
323 if subsettable:
323 if subsettable:
324 return subsettable
324 return subsettable
325
325
326 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
326 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
327 # branchmap and repoview modules exist, but subsettable attribute
327 # branchmap and repoview modules exist, but subsettable attribute
328 # doesn't)
328 # doesn't)
329 raise error.Abort(("perfbranchmap not available with this Mercurial"),
329 raise error.Abort(("perfbranchmap not available with this Mercurial"),
330 hint="use 2.5 or later")
330 hint="use 2.5 or later")
331
331
332 def getsvfs(repo):
332 def getsvfs(repo):
333 """Return appropriate object to access files under .hg/store
333 """Return appropriate object to access files under .hg/store
334 """
334 """
335 # for "historical portability":
335 # for "historical portability":
336 # repo.svfs has been available since 2.3 (or 7034365089bf)
336 # repo.svfs has been available since 2.3 (or 7034365089bf)
337 svfs = getattr(repo, 'svfs', None)
337 svfs = getattr(repo, 'svfs', None)
338 if svfs:
338 if svfs:
339 return svfs
339 return svfs
340 else:
340 else:
341 return getattr(repo, 'sopener')
341 return getattr(repo, 'sopener')
342
342
343 def getvfs(repo):
343 def getvfs(repo):
344 """Return appropriate object to access files under .hg
344 """Return appropriate object to access files under .hg
345 """
345 """
346 # for "historical portability":
346 # for "historical portability":
347 # repo.vfs has been available since 2.3 (or 7034365089bf)
347 # repo.vfs has been available since 2.3 (or 7034365089bf)
348 vfs = getattr(repo, 'vfs', None)
348 vfs = getattr(repo, 'vfs', None)
349 if vfs:
349 if vfs:
350 return vfs
350 return vfs
351 else:
351 else:
352 return getattr(repo, 'opener')
352 return getattr(repo, 'opener')
353
353
354 def repocleartagscachefunc(repo):
354 def repocleartagscachefunc(repo):
355 """Return the function to clear tags cache according to repo internal API
355 """Return the function to clear tags cache according to repo internal API
356 """
356 """
357 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
357 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
358 # in this case, setattr(repo, '_tagscache', None) or so isn't
358 # in this case, setattr(repo, '_tagscache', None) or so isn't
359 # correct way to clear tags cache, because existing code paths
359 # correct way to clear tags cache, because existing code paths
360 # expect _tagscache to be a structured object.
360 # expect _tagscache to be a structured object.
361 def clearcache():
361 def clearcache():
362 # _tagscache has been filteredpropertycache since 2.5 (or
362 # _tagscache has been filteredpropertycache since 2.5 (or
363 # 98c867ac1330), and delattr() can't work in such case
363 # 98c867ac1330), and delattr() can't work in such case
364 if '_tagscache' in vars(repo):
364 if '_tagscache' in vars(repo):
365 del repo.__dict__['_tagscache']
365 del repo.__dict__['_tagscache']
366 return clearcache
366 return clearcache
367
367
368 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
368 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
369 if repotags: # since 1.4 (or 5614a628d173)
369 if repotags: # since 1.4 (or 5614a628d173)
370 return lambda : repotags.set(None)
370 return lambda : repotags.set(None)
371
371
372 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
372 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
373 if repotagscache: # since 0.6 (or d7df759d0e97)
373 if repotagscache: # since 0.6 (or d7df759d0e97)
374 return lambda : repotagscache.set(None)
374 return lambda : repotagscache.set(None)
375
375
376 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
376 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
377 # this point, but it isn't so problematic, because:
377 # this point, but it isn't so problematic, because:
378 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
378 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
379 # in perftags() causes failure soon
379 # in perftags() causes failure soon
380 # - perf.py itself has been available since 1.1 (or eb240755386d)
380 # - perf.py itself has been available since 1.1 (or eb240755386d)
381 raise error.Abort(("tags API of this hg command is unknown"))
381 raise error.Abort(("tags API of this hg command is unknown"))
382
382
383 # utilities to clear cache
383 # utilities to clear cache
384
384
385 def clearfilecache(repo, attrname):
385 def clearfilecache(repo, attrname):
386 unfi = repo.unfiltered()
386 unfi = repo.unfiltered()
387 if attrname in vars(unfi):
387 if attrname in vars(unfi):
388 delattr(unfi, attrname)
388 delattr(unfi, attrname)
389 unfi._filecache.pop(attrname, None)
389 unfi._filecache.pop(attrname, None)
390
390
391 # perf commands
391 # perf commands
392
392
393 @command('perfwalk', formatteropts)
393 @command('perfwalk', formatteropts)
394 def perfwalk(ui, repo, *pats, **opts):
394 def perfwalk(ui, repo, *pats, **opts):
395 timer, fm = gettimer(ui, opts)
395 timer, fm = gettimer(ui, opts)
396 m = scmutil.match(repo[None], pats, {})
396 m = scmutil.match(repo[None], pats, {})
397 timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
397 timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
398 ignored=False))))
398 ignored=False))))
399 fm.end()
399 fm.end()
400
400
401 @command('perfannotate', formatteropts)
401 @command('perfannotate', formatteropts)
402 def perfannotate(ui, repo, f, **opts):
402 def perfannotate(ui, repo, f, **opts):
403 timer, fm = gettimer(ui, opts)
403 timer, fm = gettimer(ui, opts)
404 fc = repo['.'][f]
404 fc = repo['.'][f]
405 timer(lambda: len(fc.annotate(True)))
405 timer(lambda: len(fc.annotate(True)))
406 fm.end()
406 fm.end()
407
407
408 @command('perfstatus',
408 @command('perfstatus',
409 [('u', 'unknown', False,
409 [('u', 'unknown', False,
410 'ask status to look for unknown files')] + formatteropts)
410 'ask status to look for unknown files')] + formatteropts)
411 def perfstatus(ui, repo, **opts):
411 def perfstatus(ui, repo, **opts):
412 #m = match.always(repo.root, repo.getcwd())
412 #m = match.always(repo.root, repo.getcwd())
413 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
413 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
414 # False))))
414 # False))))
415 timer, fm = gettimer(ui, opts)
415 timer, fm = gettimer(ui, opts)
416 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
416 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
417 fm.end()
417 fm.end()
418
418
419 @command('perfaddremove', formatteropts)
419 @command('perfaddremove', formatteropts)
420 def perfaddremove(ui, repo, **opts):
420 def perfaddremove(ui, repo, **opts):
421 timer, fm = gettimer(ui, opts)
421 timer, fm = gettimer(ui, opts)
422 try:
422 try:
423 oldquiet = repo.ui.quiet
423 oldquiet = repo.ui.quiet
424 repo.ui.quiet = True
424 repo.ui.quiet = True
425 matcher = scmutil.match(repo[None])
425 matcher = scmutil.match(repo[None])
426 timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
426 opts['dry_run'] = True
427 timer(lambda: scmutil.addremove(repo, matcher, "", opts))
427 finally:
428 finally:
428 repo.ui.quiet = oldquiet
429 repo.ui.quiet = oldquiet
429 fm.end()
430 fm.end()
430
431
431 def clearcaches(cl):
432 def clearcaches(cl):
432 # behave somewhat consistently across internal API changes
433 # behave somewhat consistently across internal API changes
433 if util.safehasattr(cl, 'clearcaches'):
434 if util.safehasattr(cl, 'clearcaches'):
434 cl.clearcaches()
435 cl.clearcaches()
435 elif util.safehasattr(cl, '_nodecache'):
436 elif util.safehasattr(cl, '_nodecache'):
436 from mercurial.node import nullid, nullrev
437 from mercurial.node import nullid, nullrev
437 cl._nodecache = {nullid: nullrev}
438 cl._nodecache = {nullid: nullrev}
438 cl._nodepos = None
439 cl._nodepos = None
439
440
440 @command('perfheads', formatteropts)
441 @command('perfheads', formatteropts)
441 def perfheads(ui, repo, **opts):
442 def perfheads(ui, repo, **opts):
442 timer, fm = gettimer(ui, opts)
443 timer, fm = gettimer(ui, opts)
443 cl = repo.changelog
444 cl = repo.changelog
444 def d():
445 def d():
445 len(cl.headrevs())
446 len(cl.headrevs())
446 clearcaches(cl)
447 clearcaches(cl)
447 timer(d)
448 timer(d)
448 fm.end()
449 fm.end()
449
450
450 @command('perftags', formatteropts)
451 @command('perftags', formatteropts)
451 def perftags(ui, repo, **opts):
452 def perftags(ui, repo, **opts):
452 import mercurial.changelog
453 import mercurial.changelog
453 import mercurial.manifest
454 import mercurial.manifest
454 timer, fm = gettimer(ui, opts)
455 timer, fm = gettimer(ui, opts)
455 svfs = getsvfs(repo)
456 svfs = getsvfs(repo)
456 repocleartagscache = repocleartagscachefunc(repo)
457 repocleartagscache = repocleartagscachefunc(repo)
457 def t():
458 def t():
458 repo.changelog = mercurial.changelog.changelog(svfs)
459 repo.changelog = mercurial.changelog.changelog(svfs)
459 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
460 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
460 repocleartagscache()
461 repocleartagscache()
461 return len(repo.tags())
462 return len(repo.tags())
462 timer(t)
463 timer(t)
463 fm.end()
464 fm.end()
464
465
465 @command('perfancestors', formatteropts)
466 @command('perfancestors', formatteropts)
466 def perfancestors(ui, repo, **opts):
467 def perfancestors(ui, repo, **opts):
467 timer, fm = gettimer(ui, opts)
468 timer, fm = gettimer(ui, opts)
468 heads = repo.changelog.headrevs()
469 heads = repo.changelog.headrevs()
469 def d():
470 def d():
470 for a in repo.changelog.ancestors(heads):
471 for a in repo.changelog.ancestors(heads):
471 pass
472 pass
472 timer(d)
473 timer(d)
473 fm.end()
474 fm.end()
474
475
475 @command('perfancestorset', formatteropts)
476 @command('perfancestorset', formatteropts)
476 def perfancestorset(ui, repo, revset, **opts):
477 def perfancestorset(ui, repo, revset, **opts):
477 timer, fm = gettimer(ui, opts)
478 timer, fm = gettimer(ui, opts)
478 revs = repo.revs(revset)
479 revs = repo.revs(revset)
479 heads = repo.changelog.headrevs()
480 heads = repo.changelog.headrevs()
480 def d():
481 def d():
481 s = repo.changelog.ancestors(heads)
482 s = repo.changelog.ancestors(heads)
482 for rev in revs:
483 for rev in revs:
483 rev in s
484 rev in s
484 timer(d)
485 timer(d)
485 fm.end()
486 fm.end()
486
487
487 @command('perfbookmarks', formatteropts)
488 @command('perfbookmarks', formatteropts)
488 def perfbookmarks(ui, repo, **opts):
489 def perfbookmarks(ui, repo, **opts):
489 """benchmark parsing bookmarks from disk to memory"""
490 """benchmark parsing bookmarks from disk to memory"""
490 timer, fm = gettimer(ui, opts)
491 timer, fm = gettimer(ui, opts)
491 def d():
492 def d():
492 clearfilecache(repo, '_bookmarks')
493 clearfilecache(repo, '_bookmarks')
493 repo._bookmarks
494 repo._bookmarks
494 timer(d)
495 timer(d)
495 fm.end()
496 fm.end()
496
497
497 @command('perfbundleread', formatteropts, 'BUNDLE')
498 @command('perfbundleread', formatteropts, 'BUNDLE')
498 def perfbundleread(ui, repo, bundlepath, **opts):
499 def perfbundleread(ui, repo, bundlepath, **opts):
499 """Benchmark reading of bundle files.
500 """Benchmark reading of bundle files.
500
501
501 This command is meant to isolate the I/O part of bundle reading as
502 This command is meant to isolate the I/O part of bundle reading as
502 much as possible.
503 much as possible.
503 """
504 """
504 from mercurial import (
505 from mercurial import (
505 bundle2,
506 bundle2,
506 exchange,
507 exchange,
507 streamclone,
508 streamclone,
508 )
509 )
509
510
510 def makebench(fn):
511 def makebench(fn):
511 def run():
512 def run():
512 with open(bundlepath, 'rb') as fh:
513 with open(bundlepath, 'rb') as fh:
513 bundle = exchange.readbundle(ui, fh, bundlepath)
514 bundle = exchange.readbundle(ui, fh, bundlepath)
514 fn(bundle)
515 fn(bundle)
515
516
516 return run
517 return run
517
518
518 def makereadnbytes(size):
519 def makereadnbytes(size):
519 def run():
520 def run():
520 with open(bundlepath, 'rb') as fh:
521 with open(bundlepath, 'rb') as fh:
521 bundle = exchange.readbundle(ui, fh, bundlepath)
522 bundle = exchange.readbundle(ui, fh, bundlepath)
522 while bundle.read(size):
523 while bundle.read(size):
523 pass
524 pass
524
525
525 return run
526 return run
526
527
527 def makestdioread(size):
528 def makestdioread(size):
528 def run():
529 def run():
529 with open(bundlepath, 'rb') as fh:
530 with open(bundlepath, 'rb') as fh:
530 while fh.read(size):
531 while fh.read(size):
531 pass
532 pass
532
533
533 return run
534 return run
534
535
535 # bundle1
536 # bundle1
536
537
537 def deltaiter(bundle):
538 def deltaiter(bundle):
538 for delta in bundle.deltaiter():
539 for delta in bundle.deltaiter():
539 pass
540 pass
540
541
541 def iterchunks(bundle):
542 def iterchunks(bundle):
542 for chunk in bundle.getchunks():
543 for chunk in bundle.getchunks():
543 pass
544 pass
544
545
545 # bundle2
546 # bundle2
546
547
547 def forwardchunks(bundle):
548 def forwardchunks(bundle):
548 for chunk in bundle._forwardchunks():
549 for chunk in bundle._forwardchunks():
549 pass
550 pass
550
551
551 def iterparts(bundle):
552 def iterparts(bundle):
552 for part in bundle.iterparts():
553 for part in bundle.iterparts():
553 pass
554 pass
554
555
555 def iterpartsseekable(bundle):
556 def iterpartsseekable(bundle):
556 for part in bundle.iterparts(seekable=True):
557 for part in bundle.iterparts(seekable=True):
557 pass
558 pass
558
559
559 def seek(bundle):
560 def seek(bundle):
560 for part in bundle.iterparts(seekable=True):
561 for part in bundle.iterparts(seekable=True):
561 part.seek(0, os.SEEK_END)
562 part.seek(0, os.SEEK_END)
562
563
563 def makepartreadnbytes(size):
564 def makepartreadnbytes(size):
564 def run():
565 def run():
565 with open(bundlepath, 'rb') as fh:
566 with open(bundlepath, 'rb') as fh:
566 bundle = exchange.readbundle(ui, fh, bundlepath)
567 bundle = exchange.readbundle(ui, fh, bundlepath)
567 for part in bundle.iterparts():
568 for part in bundle.iterparts():
568 while part.read(size):
569 while part.read(size):
569 pass
570 pass
570
571
571 return run
572 return run
572
573
573 benches = [
574 benches = [
574 (makestdioread(8192), 'read(8k)'),
575 (makestdioread(8192), 'read(8k)'),
575 (makestdioread(16384), 'read(16k)'),
576 (makestdioread(16384), 'read(16k)'),
576 (makestdioread(32768), 'read(32k)'),
577 (makestdioread(32768), 'read(32k)'),
577 (makestdioread(131072), 'read(128k)'),
578 (makestdioread(131072), 'read(128k)'),
578 ]
579 ]
579
580
580 with open(bundlepath, 'rb') as fh:
581 with open(bundlepath, 'rb') as fh:
581 bundle = exchange.readbundle(ui, fh, bundlepath)
582 bundle = exchange.readbundle(ui, fh, bundlepath)
582
583
583 if isinstance(bundle, changegroup.cg1unpacker):
584 if isinstance(bundle, changegroup.cg1unpacker):
584 benches.extend([
585 benches.extend([
585 (makebench(deltaiter), 'cg1 deltaiter()'),
586 (makebench(deltaiter), 'cg1 deltaiter()'),
586 (makebench(iterchunks), 'cg1 getchunks()'),
587 (makebench(iterchunks), 'cg1 getchunks()'),
587 (makereadnbytes(8192), 'cg1 read(8k)'),
588 (makereadnbytes(8192), 'cg1 read(8k)'),
588 (makereadnbytes(16384), 'cg1 read(16k)'),
589 (makereadnbytes(16384), 'cg1 read(16k)'),
589 (makereadnbytes(32768), 'cg1 read(32k)'),
590 (makereadnbytes(32768), 'cg1 read(32k)'),
590 (makereadnbytes(131072), 'cg1 read(128k)'),
591 (makereadnbytes(131072), 'cg1 read(128k)'),
591 ])
592 ])
592 elif isinstance(bundle, bundle2.unbundle20):
593 elif isinstance(bundle, bundle2.unbundle20):
593 benches.extend([
594 benches.extend([
594 (makebench(forwardchunks), 'bundle2 forwardchunks()'),
595 (makebench(forwardchunks), 'bundle2 forwardchunks()'),
595 (makebench(iterparts), 'bundle2 iterparts()'),
596 (makebench(iterparts), 'bundle2 iterparts()'),
596 (makebench(iterpartsseekable), 'bundle2 iterparts() seekable'),
597 (makebench(iterpartsseekable), 'bundle2 iterparts() seekable'),
597 (makebench(seek), 'bundle2 part seek()'),
598 (makebench(seek), 'bundle2 part seek()'),
598 (makepartreadnbytes(8192), 'bundle2 part read(8k)'),
599 (makepartreadnbytes(8192), 'bundle2 part read(8k)'),
599 (makepartreadnbytes(16384), 'bundle2 part read(16k)'),
600 (makepartreadnbytes(16384), 'bundle2 part read(16k)'),
600 (makepartreadnbytes(32768), 'bundle2 part read(32k)'),
601 (makepartreadnbytes(32768), 'bundle2 part read(32k)'),
601 (makepartreadnbytes(131072), 'bundle2 part read(128k)'),
602 (makepartreadnbytes(131072), 'bundle2 part read(128k)'),
602 ])
603 ])
603 elif isinstance(bundle, streamclone.streamcloneapplier):
604 elif isinstance(bundle, streamclone.streamcloneapplier):
604 raise error.Abort('stream clone bundles not supported')
605 raise error.Abort('stream clone bundles not supported')
605 else:
606 else:
606 raise error.Abort('unhandled bundle type: %s' % type(bundle))
607 raise error.Abort('unhandled bundle type: %s' % type(bundle))
607
608
608 for fn, title in benches:
609 for fn, title in benches:
609 timer, fm = gettimer(ui, opts)
610 timer, fm = gettimer(ui, opts)
610 timer(fn, title=title)
611 timer(fn, title=title)
611 fm.end()
612 fm.end()
612
613
613 @command('perfchangegroupchangelog', formatteropts +
614 @command('perfchangegroupchangelog', formatteropts +
614 [('', 'version', '02', 'changegroup version'),
615 [('', 'version', '02', 'changegroup version'),
615 ('r', 'rev', '', 'revisions to add to changegroup')])
616 ('r', 'rev', '', 'revisions to add to changegroup')])
616 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
617 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
617 """Benchmark producing a changelog group for a changegroup.
618 """Benchmark producing a changelog group for a changegroup.
618
619
619 This measures the time spent processing the changelog during a
620 This measures the time spent processing the changelog during a
620 bundle operation. This occurs during `hg bundle` and on a server
621 bundle operation. This occurs during `hg bundle` and on a server
621 processing a `getbundle` wire protocol request (handles clones
622 processing a `getbundle` wire protocol request (handles clones
622 and pull requests).
623 and pull requests).
623
624
624 By default, all revisions are added to the changegroup.
625 By default, all revisions are added to the changegroup.
625 """
626 """
626 cl = repo.changelog
627 cl = repo.changelog
627 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
628 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
628 bundler = changegroup.getbundler(version, repo)
629 bundler = changegroup.getbundler(version, repo)
629
630
630 def lookup(node):
631 def lookup(node):
631 # The real bundler reads the revision in order to access the
632 # The real bundler reads the revision in order to access the
632 # manifest node and files list. Do that here.
633 # manifest node and files list. Do that here.
633 cl.read(node)
634 cl.read(node)
634 return node
635 return node
635
636
636 def d():
637 def d():
637 for chunk in bundler.group(revs, cl, lookup):
638 for chunk in bundler.group(revs, cl, lookup):
638 pass
639 pass
639
640
640 timer, fm = gettimer(ui, opts)
641 timer, fm = gettimer(ui, opts)
641 timer(d)
642 timer(d)
642 fm.end()
643 fm.end()
643
644
644 @command('perfdirs', formatteropts)
645 @command('perfdirs', formatteropts)
645 def perfdirs(ui, repo, **opts):
646 def perfdirs(ui, repo, **opts):
646 timer, fm = gettimer(ui, opts)
647 timer, fm = gettimer(ui, opts)
647 dirstate = repo.dirstate
648 dirstate = repo.dirstate
648 'a' in dirstate
649 'a' in dirstate
649 def d():
650 def d():
650 dirstate.hasdir('a')
651 dirstate.hasdir('a')
651 del dirstate._map._dirs
652 del dirstate._map._dirs
652 timer(d)
653 timer(d)
653 fm.end()
654 fm.end()
654
655
655 @command('perfdirstate', formatteropts)
656 @command('perfdirstate', formatteropts)
656 def perfdirstate(ui, repo, **opts):
657 def perfdirstate(ui, repo, **opts):
657 timer, fm = gettimer(ui, opts)
658 timer, fm = gettimer(ui, opts)
658 "a" in repo.dirstate
659 "a" in repo.dirstate
659 def d():
660 def d():
660 repo.dirstate.invalidate()
661 repo.dirstate.invalidate()
661 "a" in repo.dirstate
662 "a" in repo.dirstate
662 timer(d)
663 timer(d)
663 fm.end()
664 fm.end()
664
665
665 @command('perfdirstatedirs', formatteropts)
666 @command('perfdirstatedirs', formatteropts)
666 def perfdirstatedirs(ui, repo, **opts):
667 def perfdirstatedirs(ui, repo, **opts):
667 timer, fm = gettimer(ui, opts)
668 timer, fm = gettimer(ui, opts)
668 "a" in repo.dirstate
669 "a" in repo.dirstate
669 def d():
670 def d():
670 repo.dirstate.hasdir("a")
671 repo.dirstate.hasdir("a")
671 del repo.dirstate._map._dirs
672 del repo.dirstate._map._dirs
672 timer(d)
673 timer(d)
673 fm.end()
674 fm.end()
674
675
675 @command('perfdirstatefoldmap', formatteropts)
676 @command('perfdirstatefoldmap', formatteropts)
676 def perfdirstatefoldmap(ui, repo, **opts):
677 def perfdirstatefoldmap(ui, repo, **opts):
677 timer, fm = gettimer(ui, opts)
678 timer, fm = gettimer(ui, opts)
678 dirstate = repo.dirstate
679 dirstate = repo.dirstate
679 'a' in dirstate
680 'a' in dirstate
680 def d():
681 def d():
681 dirstate._map.filefoldmap.get('a')
682 dirstate._map.filefoldmap.get('a')
682 del dirstate._map.filefoldmap
683 del dirstate._map.filefoldmap
683 timer(d)
684 timer(d)
684 fm.end()
685 fm.end()
685
686
686 @command('perfdirfoldmap', formatteropts)
687 @command('perfdirfoldmap', formatteropts)
687 def perfdirfoldmap(ui, repo, **opts):
688 def perfdirfoldmap(ui, repo, **opts):
688 timer, fm = gettimer(ui, opts)
689 timer, fm = gettimer(ui, opts)
689 dirstate = repo.dirstate
690 dirstate = repo.dirstate
690 'a' in dirstate
691 'a' in dirstate
691 def d():
692 def d():
692 dirstate._map.dirfoldmap.get('a')
693 dirstate._map.dirfoldmap.get('a')
693 del dirstate._map.dirfoldmap
694 del dirstate._map.dirfoldmap
694 del dirstate._map._dirs
695 del dirstate._map._dirs
695 timer(d)
696 timer(d)
696 fm.end()
697 fm.end()
697
698
698 @command('perfdirstatewrite', formatteropts)
699 @command('perfdirstatewrite', formatteropts)
699 def perfdirstatewrite(ui, repo, **opts):
700 def perfdirstatewrite(ui, repo, **opts):
700 timer, fm = gettimer(ui, opts)
701 timer, fm = gettimer(ui, opts)
701 ds = repo.dirstate
702 ds = repo.dirstate
702 "a" in ds
703 "a" in ds
703 def d():
704 def d():
704 ds._dirty = True
705 ds._dirty = True
705 ds.write(repo.currenttransaction())
706 ds.write(repo.currenttransaction())
706 timer(d)
707 timer(d)
707 fm.end()
708 fm.end()
708
709
709 @command('perfmergecalculate',
710 @command('perfmergecalculate',
710 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
711 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
711 def perfmergecalculate(ui, repo, rev, **opts):
712 def perfmergecalculate(ui, repo, rev, **opts):
712 timer, fm = gettimer(ui, opts)
713 timer, fm = gettimer(ui, opts)
713 wctx = repo[None]
714 wctx = repo[None]
714 rctx = scmutil.revsingle(repo, rev, rev)
715 rctx = scmutil.revsingle(repo, rev, rev)
715 ancestor = wctx.ancestor(rctx)
716 ancestor = wctx.ancestor(rctx)
716 # we don't want working dir files to be stat'd in the benchmark, so prime
717 # we don't want working dir files to be stat'd in the benchmark, so prime
717 # that cache
718 # that cache
718 wctx.dirty()
719 wctx.dirty()
719 def d():
720 def d():
720 # acceptremote is True because we don't want prompts in the middle of
721 # acceptremote is True because we don't want prompts in the middle of
721 # our benchmark
722 # our benchmark
722 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
723 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
723 acceptremote=True, followcopies=True)
724 acceptremote=True, followcopies=True)
724 timer(d)
725 timer(d)
725 fm.end()
726 fm.end()
726
727
727 @command('perfpathcopies', [], "REV REV")
728 @command('perfpathcopies', [], "REV REV")
728 def perfpathcopies(ui, repo, rev1, rev2, **opts):
729 def perfpathcopies(ui, repo, rev1, rev2, **opts):
729 timer, fm = gettimer(ui, opts)
730 timer, fm = gettimer(ui, opts)
730 ctx1 = scmutil.revsingle(repo, rev1, rev1)
731 ctx1 = scmutil.revsingle(repo, rev1, rev1)
731 ctx2 = scmutil.revsingle(repo, rev2, rev2)
732 ctx2 = scmutil.revsingle(repo, rev2, rev2)
732 def d():
733 def d():
733 copies.pathcopies(ctx1, ctx2)
734 copies.pathcopies(ctx1, ctx2)
734 timer(d)
735 timer(d)
735 fm.end()
736 fm.end()
736
737
737 @command('perfphases',
738 @command('perfphases',
738 [('', 'full', False, 'include file reading time too'),
739 [('', 'full', False, 'include file reading time too'),
739 ], "")
740 ], "")
740 def perfphases(ui, repo, **opts):
741 def perfphases(ui, repo, **opts):
741 """benchmark phasesets computation"""
742 """benchmark phasesets computation"""
742 timer, fm = gettimer(ui, opts)
743 timer, fm = gettimer(ui, opts)
743 _phases = repo._phasecache
744 _phases = repo._phasecache
744 full = opts.get('full')
745 full = opts.get('full')
745 def d():
746 def d():
746 phases = _phases
747 phases = _phases
747 if full:
748 if full:
748 clearfilecache(repo, '_phasecache')
749 clearfilecache(repo, '_phasecache')
749 phases = repo._phasecache
750 phases = repo._phasecache
750 phases.invalidate()
751 phases.invalidate()
751 phases.loadphaserevs(repo)
752 phases.loadphaserevs(repo)
752 timer(d)
753 timer(d)
753 fm.end()
754 fm.end()
754
755
755 @command('perfmanifest', [], 'REV')
756 @command('perfmanifest', [], 'REV')
756 def perfmanifest(ui, repo, rev, **opts):
757 def perfmanifest(ui, repo, rev, **opts):
757 timer, fm = gettimer(ui, opts)
758 timer, fm = gettimer(ui, opts)
758 ctx = scmutil.revsingle(repo, rev, rev)
759 ctx = scmutil.revsingle(repo, rev, rev)
759 t = ctx.manifestnode()
760 t = ctx.manifestnode()
760 def d():
761 def d():
761 repo.manifestlog.clearcaches()
762 repo.manifestlog.clearcaches()
762 repo.manifestlog[t].read()
763 repo.manifestlog[t].read()
763 timer(d)
764 timer(d)
764 fm.end()
765 fm.end()
765
766
766 @command('perfchangeset', formatteropts)
767 @command('perfchangeset', formatteropts)
767 def perfchangeset(ui, repo, rev, **opts):
768 def perfchangeset(ui, repo, rev, **opts):
768 timer, fm = gettimer(ui, opts)
769 timer, fm = gettimer(ui, opts)
769 n = repo[rev].node()
770 n = repo[rev].node()
770 def d():
771 def d():
771 repo.changelog.read(n)
772 repo.changelog.read(n)
772 #repo.changelog._cache = None
773 #repo.changelog._cache = None
773 timer(d)
774 timer(d)
774 fm.end()
775 fm.end()
775
776
776 @command('perfindex', formatteropts)
777 @command('perfindex', formatteropts)
777 def perfindex(ui, repo, **opts):
778 def perfindex(ui, repo, **opts):
778 import mercurial.revlog
779 import mercurial.revlog
779 timer, fm = gettimer(ui, opts)
780 timer, fm = gettimer(ui, opts)
780 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
781 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
781 n = repo["tip"].node()
782 n = repo["tip"].node()
782 svfs = getsvfs(repo)
783 svfs = getsvfs(repo)
783 def d():
784 def d():
784 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
785 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
785 cl.rev(n)
786 cl.rev(n)
786 timer(d)
787 timer(d)
787 fm.end()
788 fm.end()
788
789
789 @command('perfstartup', formatteropts)
790 @command('perfstartup', formatteropts)
790 def perfstartup(ui, repo, **opts):
791 def perfstartup(ui, repo, **opts):
791 timer, fm = gettimer(ui, opts)
792 timer, fm = gettimer(ui, opts)
792 cmd = sys.argv[0]
793 cmd = sys.argv[0]
793 def d():
794 def d():
794 if os.name != 'nt':
795 if os.name != 'nt':
795 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
796 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
796 else:
797 else:
797 os.environ['HGRCPATH'] = ' '
798 os.environ['HGRCPATH'] = ' '
798 os.system("%s version -q > NUL" % cmd)
799 os.system("%s version -q > NUL" % cmd)
799 timer(d)
800 timer(d)
800 fm.end()
801 fm.end()
801
802
802 @command('perfparents', formatteropts)
803 @command('perfparents', formatteropts)
803 def perfparents(ui, repo, **opts):
804 def perfparents(ui, repo, **opts):
804 timer, fm = gettimer(ui, opts)
805 timer, fm = gettimer(ui, opts)
805 # control the number of commits perfparents iterates over
806 # control the number of commits perfparents iterates over
806 # experimental config: perf.parentscount
807 # experimental config: perf.parentscount
807 count = getint(ui, "perf", "parentscount", 1000)
808 count = getint(ui, "perf", "parentscount", 1000)
808 if len(repo.changelog) < count:
809 if len(repo.changelog) < count:
809 raise error.Abort("repo needs %d commits for this test" % count)
810 raise error.Abort("repo needs %d commits for this test" % count)
810 repo = repo.unfiltered()
811 repo = repo.unfiltered()
811 nl = [repo.changelog.node(i) for i in xrange(count)]
812 nl = [repo.changelog.node(i) for i in xrange(count)]
812 def d():
813 def d():
813 for n in nl:
814 for n in nl:
814 repo.changelog.parents(n)
815 repo.changelog.parents(n)
815 timer(d)
816 timer(d)
816 fm.end()
817 fm.end()
817
818
818 @command('perfctxfiles', formatteropts)
819 @command('perfctxfiles', formatteropts)
819 def perfctxfiles(ui, repo, x, **opts):
820 def perfctxfiles(ui, repo, x, **opts):
820 x = int(x)
821 x = int(x)
821 timer, fm = gettimer(ui, opts)
822 timer, fm = gettimer(ui, opts)
822 def d():
823 def d():
823 len(repo[x].files())
824 len(repo[x].files())
824 timer(d)
825 timer(d)
825 fm.end()
826 fm.end()
826
827
827 @command('perfrawfiles', formatteropts)
828 @command('perfrawfiles', formatteropts)
828 def perfrawfiles(ui, repo, x, **opts):
829 def perfrawfiles(ui, repo, x, **opts):
829 x = int(x)
830 x = int(x)
830 timer, fm = gettimer(ui, opts)
831 timer, fm = gettimer(ui, opts)
831 cl = repo.changelog
832 cl = repo.changelog
832 def d():
833 def d():
833 len(cl.read(x)[3])
834 len(cl.read(x)[3])
834 timer(d)
835 timer(d)
835 fm.end()
836 fm.end()
836
837
837 @command('perflookup', formatteropts)
838 @command('perflookup', formatteropts)
838 def perflookup(ui, repo, rev, **opts):
839 def perflookup(ui, repo, rev, **opts):
839 timer, fm = gettimer(ui, opts)
840 timer, fm = gettimer(ui, opts)
840 timer(lambda: len(repo.lookup(rev)))
841 timer(lambda: len(repo.lookup(rev)))
841 fm.end()
842 fm.end()
842
843
843 @command('perfrevrange', formatteropts)
844 @command('perfrevrange', formatteropts)
844 def perfrevrange(ui, repo, *specs, **opts):
845 def perfrevrange(ui, repo, *specs, **opts):
845 timer, fm = gettimer(ui, opts)
846 timer, fm = gettimer(ui, opts)
846 revrange = scmutil.revrange
847 revrange = scmutil.revrange
847 timer(lambda: len(revrange(repo, specs)))
848 timer(lambda: len(revrange(repo, specs)))
848 fm.end()
849 fm.end()
849
850
850 @command('perfnodelookup', formatteropts)
851 @command('perfnodelookup', formatteropts)
851 def perfnodelookup(ui, repo, rev, **opts):
852 def perfnodelookup(ui, repo, rev, **opts):
852 timer, fm = gettimer(ui, opts)
853 timer, fm = gettimer(ui, opts)
853 import mercurial.revlog
854 import mercurial.revlog
854 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
855 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
855 n = repo[rev].node()
856 n = repo[rev].node()
856 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
857 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
857 def d():
858 def d():
858 cl.rev(n)
859 cl.rev(n)
859 clearcaches(cl)
860 clearcaches(cl)
860 timer(d)
861 timer(d)
861 fm.end()
862 fm.end()
862
863
863 @command('perflog',
864 @command('perflog',
864 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
865 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
865 def perflog(ui, repo, rev=None, **opts):
866 def perflog(ui, repo, rev=None, **opts):
866 if rev is None:
867 if rev is None:
867 rev=[]
868 rev=[]
868 timer, fm = gettimer(ui, opts)
869 timer, fm = gettimer(ui, opts)
869 ui.pushbuffer()
870 ui.pushbuffer()
870 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
871 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
871 copies=opts.get('rename')))
872 copies=opts.get('rename')))
872 ui.popbuffer()
873 ui.popbuffer()
873 fm.end()
874 fm.end()
874
875
875 @command('perfmoonwalk', formatteropts)
876 @command('perfmoonwalk', formatteropts)
876 def perfmoonwalk(ui, repo, **opts):
877 def perfmoonwalk(ui, repo, **opts):
877 """benchmark walking the changelog backwards
878 """benchmark walking the changelog backwards
878
879
879 This also loads the changelog data for each revision in the changelog.
880 This also loads the changelog data for each revision in the changelog.
880 """
881 """
881 timer, fm = gettimer(ui, opts)
882 timer, fm = gettimer(ui, opts)
882 def moonwalk():
883 def moonwalk():
883 for i in xrange(len(repo), -1, -1):
884 for i in xrange(len(repo), -1, -1):
884 ctx = repo[i]
885 ctx = repo[i]
885 ctx.branch() # read changelog data (in addition to the index)
886 ctx.branch() # read changelog data (in addition to the index)
886 timer(moonwalk)
887 timer(moonwalk)
887 fm.end()
888 fm.end()
888
889
889 @command('perftemplating', formatteropts)
890 @command('perftemplating', formatteropts)
890 def perftemplating(ui, repo, rev=None, **opts):
891 def perftemplating(ui, repo, rev=None, **opts):
891 if rev is None:
892 if rev is None:
892 rev=[]
893 rev=[]
893 timer, fm = gettimer(ui, opts)
894 timer, fm = gettimer(ui, opts)
894 ui.pushbuffer()
895 ui.pushbuffer()
895 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
896 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
896 template='{date|shortdate} [{rev}:{node|short}]'
897 template='{date|shortdate} [{rev}:{node|short}]'
897 ' {author|person}: {desc|firstline}\n'))
898 ' {author|person}: {desc|firstline}\n'))
898 ui.popbuffer()
899 ui.popbuffer()
899 fm.end()
900 fm.end()
900
901
901 @command('perfcca', formatteropts)
902 @command('perfcca', formatteropts)
902 def perfcca(ui, repo, **opts):
903 def perfcca(ui, repo, **opts):
903 timer, fm = gettimer(ui, opts)
904 timer, fm = gettimer(ui, opts)
904 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
905 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
905 fm.end()
906 fm.end()
906
907
907 @command('perffncacheload', formatteropts)
908 @command('perffncacheload', formatteropts)
908 def perffncacheload(ui, repo, **opts):
909 def perffncacheload(ui, repo, **opts):
909 timer, fm = gettimer(ui, opts)
910 timer, fm = gettimer(ui, opts)
910 s = repo.store
911 s = repo.store
911 def d():
912 def d():
912 s.fncache._load()
913 s.fncache._load()
913 timer(d)
914 timer(d)
914 fm.end()
915 fm.end()
915
916
916 @command('perffncachewrite', formatteropts)
917 @command('perffncachewrite', formatteropts)
917 def perffncachewrite(ui, repo, **opts):
918 def perffncachewrite(ui, repo, **opts):
918 timer, fm = gettimer(ui, opts)
919 timer, fm = gettimer(ui, opts)
919 s = repo.store
920 s = repo.store
920 s.fncache._load()
921 s.fncache._load()
921 lock = repo.lock()
922 lock = repo.lock()
922 tr = repo.transaction('perffncachewrite')
923 tr = repo.transaction('perffncachewrite')
923 def d():
924 def d():
924 s.fncache._dirty = True
925 s.fncache._dirty = True
925 s.fncache.write(tr)
926 s.fncache.write(tr)
926 timer(d)
927 timer(d)
927 tr.close()
928 tr.close()
928 lock.release()
929 lock.release()
929 fm.end()
930 fm.end()
930
931
931 @command('perffncacheencode', formatteropts)
932 @command('perffncacheencode', formatteropts)
932 def perffncacheencode(ui, repo, **opts):
933 def perffncacheencode(ui, repo, **opts):
933 timer, fm = gettimer(ui, opts)
934 timer, fm = gettimer(ui, opts)
934 s = repo.store
935 s = repo.store
935 s.fncache._load()
936 s.fncache._load()
936 def d():
937 def d():
937 for p in s.fncache.entries:
938 for p in s.fncache.entries:
938 s.encode(p)
939 s.encode(p)
939 timer(d)
940 timer(d)
940 fm.end()
941 fm.end()
941
942
942 def _bdiffworker(q, blocks, xdiff, ready, done):
943 def _bdiffworker(q, blocks, xdiff, ready, done):
943 while not done.is_set():
944 while not done.is_set():
944 pair = q.get()
945 pair = q.get()
945 while pair is not None:
946 while pair is not None:
946 if xdiff:
947 if xdiff:
947 mdiff.bdiff.xdiffblocks(*pair)
948 mdiff.bdiff.xdiffblocks(*pair)
948 elif blocks:
949 elif blocks:
949 mdiff.bdiff.blocks(*pair)
950 mdiff.bdiff.blocks(*pair)
950 else:
951 else:
951 mdiff.textdiff(*pair)
952 mdiff.textdiff(*pair)
952 q.task_done()
953 q.task_done()
953 pair = q.get()
954 pair = q.get()
954 q.task_done() # for the None one
955 q.task_done() # for the None one
955 with ready:
956 with ready:
956 ready.wait()
957 ready.wait()
957
958
958 @command('perfbdiff', revlogopts + formatteropts + [
959 @command('perfbdiff', revlogopts + formatteropts + [
959 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
960 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
960 ('', 'alldata', False, 'test bdiffs for all associated revisions'),
961 ('', 'alldata', False, 'test bdiffs for all associated revisions'),
961 ('', 'threads', 0, 'number of thread to use (disable with 0)'),
962 ('', 'threads', 0, 'number of thread to use (disable with 0)'),
962 ('', 'blocks', False, 'test computing diffs into blocks'),
963 ('', 'blocks', False, 'test computing diffs into blocks'),
963 ('', 'xdiff', False, 'use xdiff algorithm'),
964 ('', 'xdiff', False, 'use xdiff algorithm'),
964 ],
965 ],
965
966
966 '-c|-m|FILE REV')
967 '-c|-m|FILE REV')
967 def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
968 def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
968 """benchmark a bdiff between revisions
969 """benchmark a bdiff between revisions
969
970
970 By default, benchmark a bdiff between its delta parent and itself.
971 By default, benchmark a bdiff between its delta parent and itself.
971
972
972 With ``--count``, benchmark bdiffs between delta parents and self for N
973 With ``--count``, benchmark bdiffs between delta parents and self for N
973 revisions starting at the specified revision.
974 revisions starting at the specified revision.
974
975
975 With ``--alldata``, assume the requested revision is a changeset and
976 With ``--alldata``, assume the requested revision is a changeset and
976 measure bdiffs for all changes related to that changeset (manifest
977 measure bdiffs for all changes related to that changeset (manifest
977 and filelogs).
978 and filelogs).
978 """
979 """
979 opts = pycompat.byteskwargs(opts)
980 opts = pycompat.byteskwargs(opts)
980
981
981 if opts['xdiff'] and not opts['blocks']:
982 if opts['xdiff'] and not opts['blocks']:
982 raise error.CommandError('perfbdiff', '--xdiff requires --blocks')
983 raise error.CommandError('perfbdiff', '--xdiff requires --blocks')
983
984
984 if opts['alldata']:
985 if opts['alldata']:
985 opts['changelog'] = True
986 opts['changelog'] = True
986
987
987 if opts.get('changelog') or opts.get('manifest'):
988 if opts.get('changelog') or opts.get('manifest'):
988 file_, rev = None, file_
989 file_, rev = None, file_
989 elif rev is None:
990 elif rev is None:
990 raise error.CommandError('perfbdiff', 'invalid arguments')
991 raise error.CommandError('perfbdiff', 'invalid arguments')
991
992
992 blocks = opts['blocks']
993 blocks = opts['blocks']
993 xdiff = opts['xdiff']
994 xdiff = opts['xdiff']
994 textpairs = []
995 textpairs = []
995
996
996 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
997 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
997
998
998 startrev = r.rev(r.lookup(rev))
999 startrev = r.rev(r.lookup(rev))
999 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1000 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1000 if opts['alldata']:
1001 if opts['alldata']:
1001 # Load revisions associated with changeset.
1002 # Load revisions associated with changeset.
1002 ctx = repo[rev]
1003 ctx = repo[rev]
1003 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
1004 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
1004 for pctx in ctx.parents():
1005 for pctx in ctx.parents():
1005 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
1006 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
1006 textpairs.append((pman, mtext))
1007 textpairs.append((pman, mtext))
1007
1008
1008 # Load filelog revisions by iterating manifest delta.
1009 # Load filelog revisions by iterating manifest delta.
1009 man = ctx.manifest()
1010 man = ctx.manifest()
1010 pman = ctx.p1().manifest()
1011 pman = ctx.p1().manifest()
1011 for filename, change in pman.diff(man).items():
1012 for filename, change in pman.diff(man).items():
1012 fctx = repo.file(filename)
1013 fctx = repo.file(filename)
1013 f1 = fctx.revision(change[0][0] or -1)
1014 f1 = fctx.revision(change[0][0] or -1)
1014 f2 = fctx.revision(change[1][0] or -1)
1015 f2 = fctx.revision(change[1][0] or -1)
1015 textpairs.append((f1, f2))
1016 textpairs.append((f1, f2))
1016 else:
1017 else:
1017 dp = r.deltaparent(rev)
1018 dp = r.deltaparent(rev)
1018 textpairs.append((r.revision(dp), r.revision(rev)))
1019 textpairs.append((r.revision(dp), r.revision(rev)))
1019
1020
1020 withthreads = threads > 0
1021 withthreads = threads > 0
1021 if not withthreads:
1022 if not withthreads:
1022 def d():
1023 def d():
1023 for pair in textpairs:
1024 for pair in textpairs:
1024 if xdiff:
1025 if xdiff:
1025 mdiff.bdiff.xdiffblocks(*pair)
1026 mdiff.bdiff.xdiffblocks(*pair)
1026 elif blocks:
1027 elif blocks:
1027 mdiff.bdiff.blocks(*pair)
1028 mdiff.bdiff.blocks(*pair)
1028 else:
1029 else:
1029 mdiff.textdiff(*pair)
1030 mdiff.textdiff(*pair)
1030 else:
1031 else:
1031 q = util.queue()
1032 q = util.queue()
1032 for i in xrange(threads):
1033 for i in xrange(threads):
1033 q.put(None)
1034 q.put(None)
1034 ready = threading.Condition()
1035 ready = threading.Condition()
1035 done = threading.Event()
1036 done = threading.Event()
1036 for i in xrange(threads):
1037 for i in xrange(threads):
1037 threading.Thread(target=_bdiffworker,
1038 threading.Thread(target=_bdiffworker,
1038 args=(q, blocks, xdiff, ready, done)).start()
1039 args=(q, blocks, xdiff, ready, done)).start()
1039 q.join()
1040 q.join()
1040 def d():
1041 def d():
1041 for pair in textpairs:
1042 for pair in textpairs:
1042 q.put(pair)
1043 q.put(pair)
1043 for i in xrange(threads):
1044 for i in xrange(threads):
1044 q.put(None)
1045 q.put(None)
1045 with ready:
1046 with ready:
1046 ready.notify_all()
1047 ready.notify_all()
1047 q.join()
1048 q.join()
1048 timer, fm = gettimer(ui, opts)
1049 timer, fm = gettimer(ui, opts)
1049 timer(d)
1050 timer(d)
1050 fm.end()
1051 fm.end()
1051
1052
1052 if withthreads:
1053 if withthreads:
1053 done.set()
1054 done.set()
1054 for i in xrange(threads):
1055 for i in xrange(threads):
1055 q.put(None)
1056 q.put(None)
1056 with ready:
1057 with ready:
1057 ready.notify_all()
1058 ready.notify_all()
1058
1059
1059 @command('perfunidiff', revlogopts + formatteropts + [
1060 @command('perfunidiff', revlogopts + formatteropts + [
1060 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
1061 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
1061 ('', 'alldata', False, 'test unidiffs for all associated revisions'),
1062 ('', 'alldata', False, 'test unidiffs for all associated revisions'),
1062 ], '-c|-m|FILE REV')
1063 ], '-c|-m|FILE REV')
1063 def perfunidiff(ui, repo, file_, rev=None, count=None, **opts):
1064 def perfunidiff(ui, repo, file_, rev=None, count=None, **opts):
1064 """benchmark a unified diff between revisions
1065 """benchmark a unified diff between revisions
1065
1066
1066 This doesn't include any copy tracing - it's just a unified diff
1067 This doesn't include any copy tracing - it's just a unified diff
1067 of the texts.
1068 of the texts.
1068
1069
1069 By default, benchmark a diff between its delta parent and itself.
1070 By default, benchmark a diff between its delta parent and itself.
1070
1071
1071 With ``--count``, benchmark diffs between delta parents and self for N
1072 With ``--count``, benchmark diffs between delta parents and self for N
1072 revisions starting at the specified revision.
1073 revisions starting at the specified revision.
1073
1074
1074 With ``--alldata``, assume the requested revision is a changeset and
1075 With ``--alldata``, assume the requested revision is a changeset and
1075 measure diffs for all changes related to that changeset (manifest
1076 measure diffs for all changes related to that changeset (manifest
1076 and filelogs).
1077 and filelogs).
1077 """
1078 """
1078 if opts['alldata']:
1079 if opts['alldata']:
1079 opts['changelog'] = True
1080 opts['changelog'] = True
1080
1081
1081 if opts.get('changelog') or opts.get('manifest'):
1082 if opts.get('changelog') or opts.get('manifest'):
1082 file_, rev = None, file_
1083 file_, rev = None, file_
1083 elif rev is None:
1084 elif rev is None:
1084 raise error.CommandError('perfunidiff', 'invalid arguments')
1085 raise error.CommandError('perfunidiff', 'invalid arguments')
1085
1086
1086 textpairs = []
1087 textpairs = []
1087
1088
1088 r = cmdutil.openrevlog(repo, 'perfunidiff', file_, opts)
1089 r = cmdutil.openrevlog(repo, 'perfunidiff', file_, opts)
1089
1090
1090 startrev = r.rev(r.lookup(rev))
1091 startrev = r.rev(r.lookup(rev))
1091 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1092 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1092 if opts['alldata']:
1093 if opts['alldata']:
1093 # Load revisions associated with changeset.
1094 # Load revisions associated with changeset.
1094 ctx = repo[rev]
1095 ctx = repo[rev]
1095 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
1096 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
1096 for pctx in ctx.parents():
1097 for pctx in ctx.parents():
1097 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
1098 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
1098 textpairs.append((pman, mtext))
1099 textpairs.append((pman, mtext))
1099
1100
1100 # Load filelog revisions by iterating manifest delta.
1101 # Load filelog revisions by iterating manifest delta.
1101 man = ctx.manifest()
1102 man = ctx.manifest()
1102 pman = ctx.p1().manifest()
1103 pman = ctx.p1().manifest()
1103 for filename, change in pman.diff(man).items():
1104 for filename, change in pman.diff(man).items():
1104 fctx = repo.file(filename)
1105 fctx = repo.file(filename)
1105 f1 = fctx.revision(change[0][0] or -1)
1106 f1 = fctx.revision(change[0][0] or -1)
1106 f2 = fctx.revision(change[1][0] or -1)
1107 f2 = fctx.revision(change[1][0] or -1)
1107 textpairs.append((f1, f2))
1108 textpairs.append((f1, f2))
1108 else:
1109 else:
1109 dp = r.deltaparent(rev)
1110 dp = r.deltaparent(rev)
1110 textpairs.append((r.revision(dp), r.revision(rev)))
1111 textpairs.append((r.revision(dp), r.revision(rev)))
1111
1112
1112 def d():
1113 def d():
1113 for left, right in textpairs:
1114 for left, right in textpairs:
1114 # The date strings don't matter, so we pass empty strings.
1115 # The date strings don't matter, so we pass empty strings.
1115 headerlines, hunks = mdiff.unidiff(
1116 headerlines, hunks = mdiff.unidiff(
1116 left, '', right, '', 'left', 'right', binary=False)
1117 left, '', right, '', 'left', 'right', binary=False)
1117 # consume iterators in roughly the way patch.py does
1118 # consume iterators in roughly the way patch.py does
1118 b'\n'.join(headerlines)
1119 b'\n'.join(headerlines)
1119 b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
1120 b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
1120 timer, fm = gettimer(ui, opts)
1121 timer, fm = gettimer(ui, opts)
1121 timer(d)
1122 timer(d)
1122 fm.end()
1123 fm.end()
1123
1124
1124 @command('perfdiffwd', formatteropts)
1125 @command('perfdiffwd', formatteropts)
1125 def perfdiffwd(ui, repo, **opts):
1126 def perfdiffwd(ui, repo, **opts):
1126 """Profile diff of working directory changes"""
1127 """Profile diff of working directory changes"""
1127 timer, fm = gettimer(ui, opts)
1128 timer, fm = gettimer(ui, opts)
1128 options = {
1129 options = {
1129 'w': 'ignore_all_space',
1130 'w': 'ignore_all_space',
1130 'b': 'ignore_space_change',
1131 'b': 'ignore_space_change',
1131 'B': 'ignore_blank_lines',
1132 'B': 'ignore_blank_lines',
1132 }
1133 }
1133
1134
1134 for diffopt in ('', 'w', 'b', 'B', 'wB'):
1135 for diffopt in ('', 'w', 'b', 'B', 'wB'):
1135 opts = dict((options[c], '1') for c in diffopt)
1136 opts = dict((options[c], '1') for c in diffopt)
1136 def d():
1137 def d():
1137 ui.pushbuffer()
1138 ui.pushbuffer()
1138 commands.diff(ui, repo, **opts)
1139 commands.diff(ui, repo, **opts)
1139 ui.popbuffer()
1140 ui.popbuffer()
1140 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
1141 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
1141 timer(d, title)
1142 timer(d, title)
1142 fm.end()
1143 fm.end()
1143
1144
1144 @command('perfrevlogindex', revlogopts + formatteropts,
1145 @command('perfrevlogindex', revlogopts + formatteropts,
1145 '-c|-m|FILE')
1146 '-c|-m|FILE')
1146 def perfrevlogindex(ui, repo, file_=None, **opts):
1147 def perfrevlogindex(ui, repo, file_=None, **opts):
1147 """Benchmark operations against a revlog index.
1148 """Benchmark operations against a revlog index.
1148
1149
1149 This tests constructing a revlog instance, reading index data,
1150 This tests constructing a revlog instance, reading index data,
1150 parsing index data, and performing various operations related to
1151 parsing index data, and performing various operations related to
1151 index data.
1152 index data.
1152 """
1153 """
1153
1154
1154 rl = cmdutil.openrevlog(repo, 'perfrevlogindex', file_, opts)
1155 rl = cmdutil.openrevlog(repo, 'perfrevlogindex', file_, opts)
1155
1156
1156 opener = getattr(rl, 'opener') # trick linter
1157 opener = getattr(rl, 'opener') # trick linter
1157 indexfile = rl.indexfile
1158 indexfile = rl.indexfile
1158 data = opener.read(indexfile)
1159 data = opener.read(indexfile)
1159
1160
1160 header = struct.unpack('>I', data[0:4])[0]
1161 header = struct.unpack('>I', data[0:4])[0]
1161 version = header & 0xFFFF
1162 version = header & 0xFFFF
1162 if version == 1:
1163 if version == 1:
1163 revlogio = revlog.revlogio()
1164 revlogio = revlog.revlogio()
1164 inline = header & (1 << 16)
1165 inline = header & (1 << 16)
1165 else:
1166 else:
1166 raise error.Abort(('unsupported revlog version: %d') % version)
1167 raise error.Abort(('unsupported revlog version: %d') % version)
1167
1168
1168 rllen = len(rl)
1169 rllen = len(rl)
1169
1170
1170 node0 = rl.node(0)
1171 node0 = rl.node(0)
1171 node25 = rl.node(rllen // 4)
1172 node25 = rl.node(rllen // 4)
1172 node50 = rl.node(rllen // 2)
1173 node50 = rl.node(rllen // 2)
1173 node75 = rl.node(rllen // 4 * 3)
1174 node75 = rl.node(rllen // 4 * 3)
1174 node100 = rl.node(rllen - 1)
1175 node100 = rl.node(rllen - 1)
1175
1176
1176 allrevs = range(rllen)
1177 allrevs = range(rllen)
1177 allrevsrev = list(reversed(allrevs))
1178 allrevsrev = list(reversed(allrevs))
1178 allnodes = [rl.node(rev) for rev in range(rllen)]
1179 allnodes = [rl.node(rev) for rev in range(rllen)]
1179 allnodesrev = list(reversed(allnodes))
1180 allnodesrev = list(reversed(allnodes))
1180
1181
1181 def constructor():
1182 def constructor():
1182 revlog.revlog(opener, indexfile)
1183 revlog.revlog(opener, indexfile)
1183
1184
1184 def read():
1185 def read():
1185 with opener(indexfile) as fh:
1186 with opener(indexfile) as fh:
1186 fh.read()
1187 fh.read()
1187
1188
1188 def parseindex():
1189 def parseindex():
1189 revlogio.parseindex(data, inline)
1190 revlogio.parseindex(data, inline)
1190
1191
1191 def getentry(revornode):
1192 def getentry(revornode):
1192 index = revlogio.parseindex(data, inline)[0]
1193 index = revlogio.parseindex(data, inline)[0]
1193 index[revornode]
1194 index[revornode]
1194
1195
1195 def getentries(revs, count=1):
1196 def getentries(revs, count=1):
1196 index = revlogio.parseindex(data, inline)[0]
1197 index = revlogio.parseindex(data, inline)[0]
1197
1198
1198 for i in range(count):
1199 for i in range(count):
1199 for rev in revs:
1200 for rev in revs:
1200 index[rev]
1201 index[rev]
1201
1202
1202 def resolvenode(node):
1203 def resolvenode(node):
1203 nodemap = revlogio.parseindex(data, inline)[1]
1204 nodemap = revlogio.parseindex(data, inline)[1]
1204 # This only works for the C code.
1205 # This only works for the C code.
1205 if nodemap is None:
1206 if nodemap is None:
1206 return
1207 return
1207
1208
1208 try:
1209 try:
1209 nodemap[node]
1210 nodemap[node]
1210 except error.RevlogError:
1211 except error.RevlogError:
1211 pass
1212 pass
1212
1213
1213 def resolvenodes(nodes, count=1):
1214 def resolvenodes(nodes, count=1):
1214 nodemap = revlogio.parseindex(data, inline)[1]
1215 nodemap = revlogio.parseindex(data, inline)[1]
1215 if nodemap is None:
1216 if nodemap is None:
1216 return
1217 return
1217
1218
1218 for i in range(count):
1219 for i in range(count):
1219 for node in nodes:
1220 for node in nodes:
1220 try:
1221 try:
1221 nodemap[node]
1222 nodemap[node]
1222 except error.RevlogError:
1223 except error.RevlogError:
1223 pass
1224 pass
1224
1225
1225 benches = [
1226 benches = [
1226 (constructor, 'revlog constructor'),
1227 (constructor, 'revlog constructor'),
1227 (read, 'read'),
1228 (read, 'read'),
1228 (parseindex, 'create index object'),
1229 (parseindex, 'create index object'),
1229 (lambda: getentry(0), 'retrieve index entry for rev 0'),
1230 (lambda: getentry(0), 'retrieve index entry for rev 0'),
1230 (lambda: resolvenode('a' * 20), 'look up missing node'),
1231 (lambda: resolvenode('a' * 20), 'look up missing node'),
1231 (lambda: resolvenode(node0), 'look up node at rev 0'),
1232 (lambda: resolvenode(node0), 'look up node at rev 0'),
1232 (lambda: resolvenode(node25), 'look up node at 1/4 len'),
1233 (lambda: resolvenode(node25), 'look up node at 1/4 len'),
1233 (lambda: resolvenode(node50), 'look up node at 1/2 len'),
1234 (lambda: resolvenode(node50), 'look up node at 1/2 len'),
1234 (lambda: resolvenode(node75), 'look up node at 3/4 len'),
1235 (lambda: resolvenode(node75), 'look up node at 3/4 len'),
1235 (lambda: resolvenode(node100), 'look up node at tip'),
1236 (lambda: resolvenode(node100), 'look up node at tip'),
1236 # 2x variation is to measure caching impact.
1237 # 2x variation is to measure caching impact.
1237 (lambda: resolvenodes(allnodes),
1238 (lambda: resolvenodes(allnodes),
1238 'look up all nodes (forward)'),
1239 'look up all nodes (forward)'),
1239 (lambda: resolvenodes(allnodes, 2),
1240 (lambda: resolvenodes(allnodes, 2),
1240 'look up all nodes 2x (forward)'),
1241 'look up all nodes 2x (forward)'),
1241 (lambda: resolvenodes(allnodesrev),
1242 (lambda: resolvenodes(allnodesrev),
1242 'look up all nodes (reverse)'),
1243 'look up all nodes (reverse)'),
1243 (lambda: resolvenodes(allnodesrev, 2),
1244 (lambda: resolvenodes(allnodesrev, 2),
1244 'look up all nodes 2x (reverse)'),
1245 'look up all nodes 2x (reverse)'),
1245 (lambda: getentries(allrevs),
1246 (lambda: getentries(allrevs),
1246 'retrieve all index entries (forward)'),
1247 'retrieve all index entries (forward)'),
1247 (lambda: getentries(allrevs, 2),
1248 (lambda: getentries(allrevs, 2),
1248 'retrieve all index entries 2x (forward)'),
1249 'retrieve all index entries 2x (forward)'),
1249 (lambda: getentries(allrevsrev),
1250 (lambda: getentries(allrevsrev),
1250 'retrieve all index entries (reverse)'),
1251 'retrieve all index entries (reverse)'),
1251 (lambda: getentries(allrevsrev, 2),
1252 (lambda: getentries(allrevsrev, 2),
1252 'retrieve all index entries 2x (reverse)'),
1253 'retrieve all index entries 2x (reverse)'),
1253 ]
1254 ]
1254
1255
1255 for fn, title in benches:
1256 for fn, title in benches:
1256 timer, fm = gettimer(ui, opts)
1257 timer, fm = gettimer(ui, opts)
1257 timer(fn, title=title)
1258 timer(fn, title=title)
1258 fm.end()
1259 fm.end()
1259
1260
1260 @command('perfrevlogrevisions', revlogopts + formatteropts +
1261 @command('perfrevlogrevisions', revlogopts + formatteropts +
1261 [('d', 'dist', 100, 'distance between the revisions'),
1262 [('d', 'dist', 100, 'distance between the revisions'),
1262 ('s', 'startrev', 0, 'revision to start reading at'),
1263 ('s', 'startrev', 0, 'revision to start reading at'),
1263 ('', 'reverse', False, 'read in reverse')],
1264 ('', 'reverse', False, 'read in reverse')],
1264 '-c|-m|FILE')
1265 '-c|-m|FILE')
1265 def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
1266 def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
1266 **opts):
1267 **opts):
1267 """Benchmark reading a series of revisions from a revlog.
1268 """Benchmark reading a series of revisions from a revlog.
1268
1269
1269 By default, we read every ``-d/--dist`` revision from 0 to tip of
1270 By default, we read every ``-d/--dist`` revision from 0 to tip of
1270 the specified revlog.
1271 the specified revlog.
1271
1272
1272 The start revision can be defined via ``-s/--startrev``.
1273 The start revision can be defined via ``-s/--startrev``.
1273 """
1274 """
1274 rl = cmdutil.openrevlog(repo, 'perfrevlogrevisions', file_, opts)
1275 rl = cmdutil.openrevlog(repo, 'perfrevlogrevisions', file_, opts)
1275 rllen = getlen(ui)(rl)
1276 rllen = getlen(ui)(rl)
1276
1277
1277 def d():
1278 def d():
1278 rl.clearcaches()
1279 rl.clearcaches()
1279
1280
1280 beginrev = startrev
1281 beginrev = startrev
1281 endrev = rllen
1282 endrev = rllen
1282 dist = opts['dist']
1283 dist = opts['dist']
1283
1284
1284 if reverse:
1285 if reverse:
1285 beginrev, endrev = endrev, beginrev
1286 beginrev, endrev = endrev, beginrev
1286 dist = -1 * dist
1287 dist = -1 * dist
1287
1288
1288 for x in xrange(beginrev, endrev, dist):
1289 for x in xrange(beginrev, endrev, dist):
1289 # Old revisions don't support passing int.
1290 # Old revisions don't support passing int.
1290 n = rl.node(x)
1291 n = rl.node(x)
1291 rl.revision(n)
1292 rl.revision(n)
1292
1293
1293 timer, fm = gettimer(ui, opts)
1294 timer, fm = gettimer(ui, opts)
1294 timer(d)
1295 timer(d)
1295 fm.end()
1296 fm.end()
1296
1297
1297 @command('perfrevlogchunks', revlogopts + formatteropts +
1298 @command('perfrevlogchunks', revlogopts + formatteropts +
1298 [('e', 'engines', '', 'compression engines to use'),
1299 [('e', 'engines', '', 'compression engines to use'),
1299 ('s', 'startrev', 0, 'revision to start at')],
1300 ('s', 'startrev', 0, 'revision to start at')],
1300 '-c|-m|FILE')
1301 '-c|-m|FILE')
1301 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
1302 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
1302 """Benchmark operations on revlog chunks.
1303 """Benchmark operations on revlog chunks.
1303
1304
1304 Logically, each revlog is a collection of fulltext revisions. However,
1305 Logically, each revlog is a collection of fulltext revisions. However,
1305 stored within each revlog are "chunks" of possibly compressed data. This
1306 stored within each revlog are "chunks" of possibly compressed data. This
1306 data needs to be read and decompressed or compressed and written.
1307 data needs to be read and decompressed or compressed and written.
1307
1308
1308 This command measures the time it takes to read+decompress and recompress
1309 This command measures the time it takes to read+decompress and recompress
1309 chunks in a revlog. It effectively isolates I/O and compression performance.
1310 chunks in a revlog. It effectively isolates I/O and compression performance.
1310 For measurements of higher-level operations like resolving revisions,
1311 For measurements of higher-level operations like resolving revisions,
1311 see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
1312 see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
1312 """
1313 """
1313 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
1314 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
1314
1315
1315 # _chunkraw was renamed to _getsegmentforrevs.
1316 # _chunkraw was renamed to _getsegmentforrevs.
1316 try:
1317 try:
1317 segmentforrevs = rl._getsegmentforrevs
1318 segmentforrevs = rl._getsegmentforrevs
1318 except AttributeError:
1319 except AttributeError:
1319 segmentforrevs = rl._chunkraw
1320 segmentforrevs = rl._chunkraw
1320
1321
1321 # Verify engines argument.
1322 # Verify engines argument.
1322 if engines:
1323 if engines:
1323 engines = set(e.strip() for e in engines.split(','))
1324 engines = set(e.strip() for e in engines.split(','))
1324 for engine in engines:
1325 for engine in engines:
1325 try:
1326 try:
1326 util.compressionengines[engine]
1327 util.compressionengines[engine]
1327 except KeyError:
1328 except KeyError:
1328 raise error.Abort('unknown compression engine: %s' % engine)
1329 raise error.Abort('unknown compression engine: %s' % engine)
1329 else:
1330 else:
1330 engines = []
1331 engines = []
1331 for e in util.compengines:
1332 for e in util.compengines:
1332 engine = util.compengines[e]
1333 engine = util.compengines[e]
1333 try:
1334 try:
1334 if engine.available():
1335 if engine.available():
1335 engine.revlogcompressor().compress('dummy')
1336 engine.revlogcompressor().compress('dummy')
1336 engines.append(e)
1337 engines.append(e)
1337 except NotImplementedError:
1338 except NotImplementedError:
1338 pass
1339 pass
1339
1340
1340 revs = list(rl.revs(startrev, len(rl) - 1))
1341 revs = list(rl.revs(startrev, len(rl) - 1))
1341
1342
1342 def rlfh(rl):
1343 def rlfh(rl):
1343 if rl._inline:
1344 if rl._inline:
1344 return getsvfs(repo)(rl.indexfile)
1345 return getsvfs(repo)(rl.indexfile)
1345 else:
1346 else:
1346 return getsvfs(repo)(rl.datafile)
1347 return getsvfs(repo)(rl.datafile)
1347
1348
1348 def doread():
1349 def doread():
1349 rl.clearcaches()
1350 rl.clearcaches()
1350 for rev in revs:
1351 for rev in revs:
1351 segmentforrevs(rev, rev)
1352 segmentforrevs(rev, rev)
1352
1353
1353 def doreadcachedfh():
1354 def doreadcachedfh():
1354 rl.clearcaches()
1355 rl.clearcaches()
1355 fh = rlfh(rl)
1356 fh = rlfh(rl)
1356 for rev in revs:
1357 for rev in revs:
1357 segmentforrevs(rev, rev, df=fh)
1358 segmentforrevs(rev, rev, df=fh)
1358
1359
1359 def doreadbatch():
1360 def doreadbatch():
1360 rl.clearcaches()
1361 rl.clearcaches()
1361 segmentforrevs(revs[0], revs[-1])
1362 segmentforrevs(revs[0], revs[-1])
1362
1363
1363 def doreadbatchcachedfh():
1364 def doreadbatchcachedfh():
1364 rl.clearcaches()
1365 rl.clearcaches()
1365 fh = rlfh(rl)
1366 fh = rlfh(rl)
1366 segmentforrevs(revs[0], revs[-1], df=fh)
1367 segmentforrevs(revs[0], revs[-1], df=fh)
1367
1368
1368 def dochunk():
1369 def dochunk():
1369 rl.clearcaches()
1370 rl.clearcaches()
1370 fh = rlfh(rl)
1371 fh = rlfh(rl)
1371 for rev in revs:
1372 for rev in revs:
1372 rl._chunk(rev, df=fh)
1373 rl._chunk(rev, df=fh)
1373
1374
1374 chunks = [None]
1375 chunks = [None]
1375
1376
1376 def dochunkbatch():
1377 def dochunkbatch():
1377 rl.clearcaches()
1378 rl.clearcaches()
1378 fh = rlfh(rl)
1379 fh = rlfh(rl)
1379 # Save chunks as a side-effect.
1380 # Save chunks as a side-effect.
1380 chunks[0] = rl._chunks(revs, df=fh)
1381 chunks[0] = rl._chunks(revs, df=fh)
1381
1382
1382 def docompress(compressor):
1383 def docompress(compressor):
1383 rl.clearcaches()
1384 rl.clearcaches()
1384
1385
1385 try:
1386 try:
1386 # Swap in the requested compression engine.
1387 # Swap in the requested compression engine.
1387 oldcompressor = rl._compressor
1388 oldcompressor = rl._compressor
1388 rl._compressor = compressor
1389 rl._compressor = compressor
1389 for chunk in chunks[0]:
1390 for chunk in chunks[0]:
1390 rl.compress(chunk)
1391 rl.compress(chunk)
1391 finally:
1392 finally:
1392 rl._compressor = oldcompressor
1393 rl._compressor = oldcompressor
1393
1394
1394 benches = [
1395 benches = [
1395 (lambda: doread(), 'read'),
1396 (lambda: doread(), 'read'),
1396 (lambda: doreadcachedfh(), 'read w/ reused fd'),
1397 (lambda: doreadcachedfh(), 'read w/ reused fd'),
1397 (lambda: doreadbatch(), 'read batch'),
1398 (lambda: doreadbatch(), 'read batch'),
1398 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
1399 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
1399 (lambda: dochunk(), 'chunk'),
1400 (lambda: dochunk(), 'chunk'),
1400 (lambda: dochunkbatch(), 'chunk batch'),
1401 (lambda: dochunkbatch(), 'chunk batch'),
1401 ]
1402 ]
1402
1403
1403 for engine in sorted(engines):
1404 for engine in sorted(engines):
1404 compressor = util.compengines[engine].revlogcompressor()
1405 compressor = util.compengines[engine].revlogcompressor()
1405 benches.append((functools.partial(docompress, compressor),
1406 benches.append((functools.partial(docompress, compressor),
1406 'compress w/ %s' % engine))
1407 'compress w/ %s' % engine))
1407
1408
1408 for fn, title in benches:
1409 for fn, title in benches:
1409 timer, fm = gettimer(ui, opts)
1410 timer, fm = gettimer(ui, opts)
1410 timer(fn, title=title)
1411 timer(fn, title=title)
1411 fm.end()
1412 fm.end()
1412
1413
1413 @command('perfrevlogrevision', revlogopts + formatteropts +
1414 @command('perfrevlogrevision', revlogopts + formatteropts +
1414 [('', 'cache', False, 'use caches instead of clearing')],
1415 [('', 'cache', False, 'use caches instead of clearing')],
1415 '-c|-m|FILE REV')
1416 '-c|-m|FILE REV')
1416 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1417 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1417 """Benchmark obtaining a revlog revision.
1418 """Benchmark obtaining a revlog revision.
1418
1419
1419 Obtaining a revlog revision consists of roughly the following steps:
1420 Obtaining a revlog revision consists of roughly the following steps:
1420
1421
1421 1. Compute the delta chain
1422 1. Compute the delta chain
1422 2. Obtain the raw chunks for that delta chain
1423 2. Obtain the raw chunks for that delta chain
1423 3. Decompress each raw chunk
1424 3. Decompress each raw chunk
1424 4. Apply binary patches to obtain fulltext
1425 4. Apply binary patches to obtain fulltext
1425 5. Verify hash of fulltext
1426 5. Verify hash of fulltext
1426
1427
1427 This command measures the time spent in each of these phases.
1428 This command measures the time spent in each of these phases.
1428 """
1429 """
1429 if opts.get('changelog') or opts.get('manifest'):
1430 if opts.get('changelog') or opts.get('manifest'):
1430 file_, rev = None, file_
1431 file_, rev = None, file_
1431 elif rev is None:
1432 elif rev is None:
1432 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1433 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1433
1434
1434 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1435 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1435
1436
1436 # _chunkraw was renamed to _getsegmentforrevs.
1437 # _chunkraw was renamed to _getsegmentforrevs.
1437 try:
1438 try:
1438 segmentforrevs = r._getsegmentforrevs
1439 segmentforrevs = r._getsegmentforrevs
1439 except AttributeError:
1440 except AttributeError:
1440 segmentforrevs = r._chunkraw
1441 segmentforrevs = r._chunkraw
1441
1442
1442 node = r.lookup(rev)
1443 node = r.lookup(rev)
1443 rev = r.rev(node)
1444 rev = r.rev(node)
1444
1445
1445 def getrawchunks(data, chain):
1446 def getrawchunks(data, chain):
1446 start = r.start
1447 start = r.start
1447 length = r.length
1448 length = r.length
1448 inline = r._inline
1449 inline = r._inline
1449 iosize = r._io.size
1450 iosize = r._io.size
1450 buffer = util.buffer
1451 buffer = util.buffer
1451 offset = start(chain[0])
1452 offset = start(chain[0])
1452
1453
1453 chunks = []
1454 chunks = []
1454 ladd = chunks.append
1455 ladd = chunks.append
1455
1456
1456 for rev in chain:
1457 for rev in chain:
1457 chunkstart = start(rev)
1458 chunkstart = start(rev)
1458 if inline:
1459 if inline:
1459 chunkstart += (rev + 1) * iosize
1460 chunkstart += (rev + 1) * iosize
1460 chunklength = length(rev)
1461 chunklength = length(rev)
1461 ladd(buffer(data, chunkstart - offset, chunklength))
1462 ladd(buffer(data, chunkstart - offset, chunklength))
1462
1463
1463 return chunks
1464 return chunks
1464
1465
1465 def dodeltachain(rev):
1466 def dodeltachain(rev):
1466 if not cache:
1467 if not cache:
1467 r.clearcaches()
1468 r.clearcaches()
1468 r._deltachain(rev)
1469 r._deltachain(rev)
1469
1470
1470 def doread(chain):
1471 def doread(chain):
1471 if not cache:
1472 if not cache:
1472 r.clearcaches()
1473 r.clearcaches()
1473 segmentforrevs(chain[0], chain[-1])
1474 segmentforrevs(chain[0], chain[-1])
1474
1475
1475 def dorawchunks(data, chain):
1476 def dorawchunks(data, chain):
1476 if not cache:
1477 if not cache:
1477 r.clearcaches()
1478 r.clearcaches()
1478 getrawchunks(data, chain)
1479 getrawchunks(data, chain)
1479
1480
1480 def dodecompress(chunks):
1481 def dodecompress(chunks):
1481 decomp = r.decompress
1482 decomp = r.decompress
1482 for chunk in chunks:
1483 for chunk in chunks:
1483 decomp(chunk)
1484 decomp(chunk)
1484
1485
1485 def dopatch(text, bins):
1486 def dopatch(text, bins):
1486 if not cache:
1487 if not cache:
1487 r.clearcaches()
1488 r.clearcaches()
1488 mdiff.patches(text, bins)
1489 mdiff.patches(text, bins)
1489
1490
1490 def dohash(text):
1491 def dohash(text):
1491 if not cache:
1492 if not cache:
1492 r.clearcaches()
1493 r.clearcaches()
1493 r.checkhash(text, node, rev=rev)
1494 r.checkhash(text, node, rev=rev)
1494
1495
1495 def dorevision():
1496 def dorevision():
1496 if not cache:
1497 if not cache:
1497 r.clearcaches()
1498 r.clearcaches()
1498 r.revision(node)
1499 r.revision(node)
1499
1500
1500 chain = r._deltachain(rev)[0]
1501 chain = r._deltachain(rev)[0]
1501 data = segmentforrevs(chain[0], chain[-1])[1]
1502 data = segmentforrevs(chain[0], chain[-1])[1]
1502 rawchunks = getrawchunks(data, chain)
1503 rawchunks = getrawchunks(data, chain)
1503 bins = r._chunks(chain)
1504 bins = r._chunks(chain)
1504 text = str(bins[0])
1505 text = str(bins[0])
1505 bins = bins[1:]
1506 bins = bins[1:]
1506 text = mdiff.patches(text, bins)
1507 text = mdiff.patches(text, bins)
1507
1508
1508 benches = [
1509 benches = [
1509 (lambda: dorevision(), 'full'),
1510 (lambda: dorevision(), 'full'),
1510 (lambda: dodeltachain(rev), 'deltachain'),
1511 (lambda: dodeltachain(rev), 'deltachain'),
1511 (lambda: doread(chain), 'read'),
1512 (lambda: doread(chain), 'read'),
1512 (lambda: dorawchunks(data, chain), 'rawchunks'),
1513 (lambda: dorawchunks(data, chain), 'rawchunks'),
1513 (lambda: dodecompress(rawchunks), 'decompress'),
1514 (lambda: dodecompress(rawchunks), 'decompress'),
1514 (lambda: dopatch(text, bins), 'patch'),
1515 (lambda: dopatch(text, bins), 'patch'),
1515 (lambda: dohash(text), 'hash'),
1516 (lambda: dohash(text), 'hash'),
1516 ]
1517 ]
1517
1518
1518 for fn, title in benches:
1519 for fn, title in benches:
1519 timer, fm = gettimer(ui, opts)
1520 timer, fm = gettimer(ui, opts)
1520 timer(fn, title=title)
1521 timer(fn, title=title)
1521 fm.end()
1522 fm.end()
1522
1523
1523 @command('perfrevset',
1524 @command('perfrevset',
1524 [('C', 'clear', False, 'clear volatile cache between each call.'),
1525 [('C', 'clear', False, 'clear volatile cache between each call.'),
1525 ('', 'contexts', False, 'obtain changectx for each revision')]
1526 ('', 'contexts', False, 'obtain changectx for each revision')]
1526 + formatteropts, "REVSET")
1527 + formatteropts, "REVSET")
1527 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1528 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1528 """benchmark the execution time of a revset
1529 """benchmark the execution time of a revset
1529
1530
1530 Use the --clean option if need to evaluate the impact of build volatile
1531 Use the --clean option if need to evaluate the impact of build volatile
1531 revisions set cache on the revset execution. Volatile cache hold filtered
1532 revisions set cache on the revset execution. Volatile cache hold filtered
1532 and obsolete related cache."""
1533 and obsolete related cache."""
1533 timer, fm = gettimer(ui, opts)
1534 timer, fm = gettimer(ui, opts)
1534 def d():
1535 def d():
1535 if clear:
1536 if clear:
1536 repo.invalidatevolatilesets()
1537 repo.invalidatevolatilesets()
1537 if contexts:
1538 if contexts:
1538 for ctx in repo.set(expr): pass
1539 for ctx in repo.set(expr): pass
1539 else:
1540 else:
1540 for r in repo.revs(expr): pass
1541 for r in repo.revs(expr): pass
1541 timer(d)
1542 timer(d)
1542 fm.end()
1543 fm.end()
1543
1544
1544 @command('perfvolatilesets',
1545 @command('perfvolatilesets',
1545 [('', 'clear-obsstore', False, 'drop obsstore between each call.'),
1546 [('', 'clear-obsstore', False, 'drop obsstore between each call.'),
1546 ] + formatteropts)
1547 ] + formatteropts)
1547 def perfvolatilesets(ui, repo, *names, **opts):
1548 def perfvolatilesets(ui, repo, *names, **opts):
1548 """benchmark the computation of various volatile set
1549 """benchmark the computation of various volatile set
1549
1550
1550 Volatile set computes element related to filtering and obsolescence."""
1551 Volatile set computes element related to filtering and obsolescence."""
1551 timer, fm = gettimer(ui, opts)
1552 timer, fm = gettimer(ui, opts)
1552 repo = repo.unfiltered()
1553 repo = repo.unfiltered()
1553
1554
1554 def getobs(name):
1555 def getobs(name):
1555 def d():
1556 def d():
1556 repo.invalidatevolatilesets()
1557 repo.invalidatevolatilesets()
1557 if opts['clear_obsstore']:
1558 if opts['clear_obsstore']:
1558 clearfilecache(repo, 'obsstore')
1559 clearfilecache(repo, 'obsstore')
1559 obsolete.getrevs(repo, name)
1560 obsolete.getrevs(repo, name)
1560 return d
1561 return d
1561
1562
1562 allobs = sorted(obsolete.cachefuncs)
1563 allobs = sorted(obsolete.cachefuncs)
1563 if names:
1564 if names:
1564 allobs = [n for n in allobs if n in names]
1565 allobs = [n for n in allobs if n in names]
1565
1566
1566 for name in allobs:
1567 for name in allobs:
1567 timer(getobs(name), title=name)
1568 timer(getobs(name), title=name)
1568
1569
1569 def getfiltered(name):
1570 def getfiltered(name):
1570 def d():
1571 def d():
1571 repo.invalidatevolatilesets()
1572 repo.invalidatevolatilesets()
1572 if opts['clear_obsstore']:
1573 if opts['clear_obsstore']:
1573 clearfilecache(repo, 'obsstore')
1574 clearfilecache(repo, 'obsstore')
1574 repoview.filterrevs(repo, name)
1575 repoview.filterrevs(repo, name)
1575 return d
1576 return d
1576
1577
1577 allfilter = sorted(repoview.filtertable)
1578 allfilter = sorted(repoview.filtertable)
1578 if names:
1579 if names:
1579 allfilter = [n for n in allfilter if n in names]
1580 allfilter = [n for n in allfilter if n in names]
1580
1581
1581 for name in allfilter:
1582 for name in allfilter:
1582 timer(getfiltered(name), title=name)
1583 timer(getfiltered(name), title=name)
1583 fm.end()
1584 fm.end()
1584
1585
1585 @command('perfbranchmap',
1586 @command('perfbranchmap',
1586 [('f', 'full', False,
1587 [('f', 'full', False,
1587 'Includes build time of subset'),
1588 'Includes build time of subset'),
1588 ('', 'clear-revbranch', False,
1589 ('', 'clear-revbranch', False,
1589 'purge the revbranch cache between computation'),
1590 'purge the revbranch cache between computation'),
1590 ] + formatteropts)
1591 ] + formatteropts)
1591 def perfbranchmap(ui, repo, *filternames, **opts):
1592 def perfbranchmap(ui, repo, *filternames, **opts):
1592 """benchmark the update of a branchmap
1593 """benchmark the update of a branchmap
1593
1594
1594 This benchmarks the full repo.branchmap() call with read and write disabled
1595 This benchmarks the full repo.branchmap() call with read and write disabled
1595 """
1596 """
1596 full = opts.get("full", False)
1597 full = opts.get("full", False)
1597 clear_revbranch = opts.get("clear_revbranch", False)
1598 clear_revbranch = opts.get("clear_revbranch", False)
1598 timer, fm = gettimer(ui, opts)
1599 timer, fm = gettimer(ui, opts)
1599 def getbranchmap(filtername):
1600 def getbranchmap(filtername):
1600 """generate a benchmark function for the filtername"""
1601 """generate a benchmark function for the filtername"""
1601 if filtername is None:
1602 if filtername is None:
1602 view = repo
1603 view = repo
1603 else:
1604 else:
1604 view = repo.filtered(filtername)
1605 view = repo.filtered(filtername)
1605 def d():
1606 def d():
1606 if clear_revbranch:
1607 if clear_revbranch:
1607 repo.revbranchcache()._clear()
1608 repo.revbranchcache()._clear()
1608 if full:
1609 if full:
1609 view._branchcaches.clear()
1610 view._branchcaches.clear()
1610 else:
1611 else:
1611 view._branchcaches.pop(filtername, None)
1612 view._branchcaches.pop(filtername, None)
1612 view.branchmap()
1613 view.branchmap()
1613 return d
1614 return d
1614 # add filter in smaller subset to bigger subset
1615 # add filter in smaller subset to bigger subset
1615 possiblefilters = set(repoview.filtertable)
1616 possiblefilters = set(repoview.filtertable)
1616 if filternames:
1617 if filternames:
1617 possiblefilters &= set(filternames)
1618 possiblefilters &= set(filternames)
1618 subsettable = getbranchmapsubsettable()
1619 subsettable = getbranchmapsubsettable()
1619 allfilters = []
1620 allfilters = []
1620 while possiblefilters:
1621 while possiblefilters:
1621 for name in possiblefilters:
1622 for name in possiblefilters:
1622 subset = subsettable.get(name)
1623 subset = subsettable.get(name)
1623 if subset not in possiblefilters:
1624 if subset not in possiblefilters:
1624 break
1625 break
1625 else:
1626 else:
1626 assert False, 'subset cycle %s!' % possiblefilters
1627 assert False, 'subset cycle %s!' % possiblefilters
1627 allfilters.append(name)
1628 allfilters.append(name)
1628 possiblefilters.remove(name)
1629 possiblefilters.remove(name)
1629
1630
1630 # warm the cache
1631 # warm the cache
1631 if not full:
1632 if not full:
1632 for name in allfilters:
1633 for name in allfilters:
1633 repo.filtered(name).branchmap()
1634 repo.filtered(name).branchmap()
1634 if not filternames or 'unfiltered' in filternames:
1635 if not filternames or 'unfiltered' in filternames:
1635 # add unfiltered
1636 # add unfiltered
1636 allfilters.append(None)
1637 allfilters.append(None)
1637
1638
1638 branchcacheread = safeattrsetter(branchmap, 'read')
1639 branchcacheread = safeattrsetter(branchmap, 'read')
1639 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1640 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1640 branchcacheread.set(lambda repo: None)
1641 branchcacheread.set(lambda repo: None)
1641 branchcachewrite.set(lambda bc, repo: None)
1642 branchcachewrite.set(lambda bc, repo: None)
1642 try:
1643 try:
1643 for name in allfilters:
1644 for name in allfilters:
1644 printname = name
1645 printname = name
1645 if name is None:
1646 if name is None:
1646 printname = 'unfiltered'
1647 printname = 'unfiltered'
1647 timer(getbranchmap(name), title=str(printname))
1648 timer(getbranchmap(name), title=str(printname))
1648 finally:
1649 finally:
1649 branchcacheread.restore()
1650 branchcacheread.restore()
1650 branchcachewrite.restore()
1651 branchcachewrite.restore()
1651 fm.end()
1652 fm.end()
1652
1653
1653 @command('perfloadmarkers')
1654 @command('perfloadmarkers')
1654 def perfloadmarkers(ui, repo):
1655 def perfloadmarkers(ui, repo):
1655 """benchmark the time to parse the on-disk markers for a repo
1656 """benchmark the time to parse the on-disk markers for a repo
1656
1657
1657 Result is the number of markers in the repo."""
1658 Result is the number of markers in the repo."""
1658 timer, fm = gettimer(ui)
1659 timer, fm = gettimer(ui)
1659 svfs = getsvfs(repo)
1660 svfs = getsvfs(repo)
1660 timer(lambda: len(obsolete.obsstore(svfs)))
1661 timer(lambda: len(obsolete.obsstore(svfs)))
1661 fm.end()
1662 fm.end()
1662
1663
1663 @command('perflrucachedict', formatteropts +
1664 @command('perflrucachedict', formatteropts +
1664 [('', 'size', 4, 'size of cache'),
1665 [('', 'size', 4, 'size of cache'),
1665 ('', 'gets', 10000, 'number of key lookups'),
1666 ('', 'gets', 10000, 'number of key lookups'),
1666 ('', 'sets', 10000, 'number of key sets'),
1667 ('', 'sets', 10000, 'number of key sets'),
1667 ('', 'mixed', 10000, 'number of mixed mode operations'),
1668 ('', 'mixed', 10000, 'number of mixed mode operations'),
1668 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1669 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1669 norepo=True)
1670 norepo=True)
1670 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1671 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1671 mixedgetfreq=50, **opts):
1672 mixedgetfreq=50, **opts):
1672 def doinit():
1673 def doinit():
1673 for i in xrange(10000):
1674 for i in xrange(10000):
1674 util.lrucachedict(size)
1675 util.lrucachedict(size)
1675
1676
1676 values = []
1677 values = []
1677 for i in xrange(size):
1678 for i in xrange(size):
1678 values.append(random.randint(0, sys.maxint))
1679 values.append(random.randint(0, sys.maxint))
1679
1680
1680 # Get mode fills the cache and tests raw lookup performance with no
1681 # Get mode fills the cache and tests raw lookup performance with no
1681 # eviction.
1682 # eviction.
1682 getseq = []
1683 getseq = []
1683 for i in xrange(gets):
1684 for i in xrange(gets):
1684 getseq.append(random.choice(values))
1685 getseq.append(random.choice(values))
1685
1686
1686 def dogets():
1687 def dogets():
1687 d = util.lrucachedict(size)
1688 d = util.lrucachedict(size)
1688 for v in values:
1689 for v in values:
1689 d[v] = v
1690 d[v] = v
1690 for key in getseq:
1691 for key in getseq:
1691 value = d[key]
1692 value = d[key]
1692 value # silence pyflakes warning
1693 value # silence pyflakes warning
1693
1694
1694 # Set mode tests insertion speed with cache eviction.
1695 # Set mode tests insertion speed with cache eviction.
1695 setseq = []
1696 setseq = []
1696 for i in xrange(sets):
1697 for i in xrange(sets):
1697 setseq.append(random.randint(0, sys.maxint))
1698 setseq.append(random.randint(0, sys.maxint))
1698
1699
1699 def dosets():
1700 def dosets():
1700 d = util.lrucachedict(size)
1701 d = util.lrucachedict(size)
1701 for v in setseq:
1702 for v in setseq:
1702 d[v] = v
1703 d[v] = v
1703
1704
1704 # Mixed mode randomly performs gets and sets with eviction.
1705 # Mixed mode randomly performs gets and sets with eviction.
1705 mixedops = []
1706 mixedops = []
1706 for i in xrange(mixed):
1707 for i in xrange(mixed):
1707 r = random.randint(0, 100)
1708 r = random.randint(0, 100)
1708 if r < mixedgetfreq:
1709 if r < mixedgetfreq:
1709 op = 0
1710 op = 0
1710 else:
1711 else:
1711 op = 1
1712 op = 1
1712
1713
1713 mixedops.append((op, random.randint(0, size * 2)))
1714 mixedops.append((op, random.randint(0, size * 2)))
1714
1715
1715 def domixed():
1716 def domixed():
1716 d = util.lrucachedict(size)
1717 d = util.lrucachedict(size)
1717
1718
1718 for op, v in mixedops:
1719 for op, v in mixedops:
1719 if op == 0:
1720 if op == 0:
1720 try:
1721 try:
1721 d[v]
1722 d[v]
1722 except KeyError:
1723 except KeyError:
1723 pass
1724 pass
1724 else:
1725 else:
1725 d[v] = v
1726 d[v] = v
1726
1727
1727 benches = [
1728 benches = [
1728 (doinit, 'init'),
1729 (doinit, 'init'),
1729 (dogets, 'gets'),
1730 (dogets, 'gets'),
1730 (dosets, 'sets'),
1731 (dosets, 'sets'),
1731 (domixed, 'mixed')
1732 (domixed, 'mixed')
1732 ]
1733 ]
1733
1734
1734 for fn, title in benches:
1735 for fn, title in benches:
1735 timer, fm = gettimer(ui, opts)
1736 timer, fm = gettimer(ui, opts)
1736 timer(fn, title=title)
1737 timer(fn, title=title)
1737 fm.end()
1738 fm.end()
1738
1739
1739 @command('perfwrite', formatteropts)
1740 @command('perfwrite', formatteropts)
1740 def perfwrite(ui, repo, **opts):
1741 def perfwrite(ui, repo, **opts):
1741 """microbenchmark ui.write
1742 """microbenchmark ui.write
1742 """
1743 """
1743 timer, fm = gettimer(ui, opts)
1744 timer, fm = gettimer(ui, opts)
1744 def write():
1745 def write():
1745 for i in range(100000):
1746 for i in range(100000):
1746 ui.write(('Testing write performance\n'))
1747 ui.write(('Testing write performance\n'))
1747 timer(write)
1748 timer(write)
1748 fm.end()
1749 fm.end()
1749
1750
1750 def uisetup(ui):
1751 def uisetup(ui):
1751 if (util.safehasattr(cmdutil, 'openrevlog') and
1752 if (util.safehasattr(cmdutil, 'openrevlog') and
1752 not util.safehasattr(commands, 'debugrevlogopts')):
1753 not util.safehasattr(commands, 'debugrevlogopts')):
1753 # for "historical portability":
1754 # for "historical portability":
1754 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1755 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1755 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1756 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1756 # openrevlog() should cause failure, because it has been
1757 # openrevlog() should cause failure, because it has been
1757 # available since 3.5 (or 49c583ca48c4).
1758 # available since 3.5 (or 49c583ca48c4).
1758 def openrevlog(orig, repo, cmd, file_, opts):
1759 def openrevlog(orig, repo, cmd, file_, opts):
1759 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1760 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1760 raise error.Abort("This version doesn't support --dir option",
1761 raise error.Abort("This version doesn't support --dir option",
1761 hint="use 3.5 or later")
1762 hint="use 3.5 or later")
1762 return orig(repo, cmd, file_, opts)
1763 return orig(repo, cmd, file_, opts)
1763 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
1764 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
@@ -1,1496 +1,1495 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13 import os
13 import os
14
14
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 from mercurial import (
17 from mercurial import (
18 archival,
18 archival,
19 cmdutil,
19 cmdutil,
20 error,
20 error,
21 hg,
21 hg,
22 logcmdutil,
22 logcmdutil,
23 match as matchmod,
23 match as matchmod,
24 pathutil,
24 pathutil,
25 pycompat,
25 pycompat,
26 registrar,
26 registrar,
27 scmutil,
27 scmutil,
28 smartset,
28 smartset,
29 util,
29 util,
30 )
30 )
31
31
32 from . import (
32 from . import (
33 lfcommands,
33 lfcommands,
34 lfutil,
34 lfutil,
35 storefactory,
35 storefactory,
36 )
36 )
37
37
38 # -- Utility functions: commonly/repeatedly needed functionality ---------------
38 # -- Utility functions: commonly/repeatedly needed functionality ---------------
39
39
40 def composelargefilematcher(match, manifest):
40 def composelargefilematcher(match, manifest):
41 '''create a matcher that matches only the largefiles in the original
41 '''create a matcher that matches only the largefiles in the original
42 matcher'''
42 matcher'''
43 m = copy.copy(match)
43 m = copy.copy(match)
44 lfile = lambda f: lfutil.standin(f) in manifest
44 lfile = lambda f: lfutil.standin(f) in manifest
45 m._files = [lf for lf in m._files if lfile(lf)]
45 m._files = [lf for lf in m._files if lfile(lf)]
46 m._fileset = set(m._files)
46 m._fileset = set(m._files)
47 m.always = lambda: False
47 m.always = lambda: False
48 origmatchfn = m.matchfn
48 origmatchfn = m.matchfn
49 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
49 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
50 return m
50 return m
51
51
52 def composenormalfilematcher(match, manifest, exclude=None):
52 def composenormalfilematcher(match, manifest, exclude=None):
53 excluded = set()
53 excluded = set()
54 if exclude is not None:
54 if exclude is not None:
55 excluded.update(exclude)
55 excluded.update(exclude)
56
56
57 m = copy.copy(match)
57 m = copy.copy(match)
58 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
58 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
59 manifest or f in excluded)
59 manifest or f in excluded)
60 m._files = [lf for lf in m._files if notlfile(lf)]
60 m._files = [lf for lf in m._files if notlfile(lf)]
61 m._fileset = set(m._files)
61 m._fileset = set(m._files)
62 m.always = lambda: False
62 m.always = lambda: False
63 origmatchfn = m.matchfn
63 origmatchfn = m.matchfn
64 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
64 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
65 return m
65 return m
66
66
67 def installnormalfilesmatchfn(manifest):
67 def installnormalfilesmatchfn(manifest):
68 '''installmatchfn with a matchfn that ignores all largefiles'''
68 '''installmatchfn with a matchfn that ignores all largefiles'''
69 def overridematch(ctx, pats=(), opts=None, globbed=False,
69 def overridematch(ctx, pats=(), opts=None, globbed=False,
70 default='relpath', badfn=None):
70 default='relpath', badfn=None):
71 if opts is None:
71 if opts is None:
72 opts = {}
72 opts = {}
73 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
73 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
74 return composenormalfilematcher(match, manifest)
74 return composenormalfilematcher(match, manifest)
75 oldmatch = installmatchfn(overridematch)
75 oldmatch = installmatchfn(overridematch)
76
76
77 def installmatchfn(f):
77 def installmatchfn(f):
78 '''monkey patch the scmutil module with a custom match function.
78 '''monkey patch the scmutil module with a custom match function.
79 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
79 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
80 oldmatch = scmutil.match
80 oldmatch = scmutil.match
81 setattr(f, 'oldmatch', oldmatch)
81 setattr(f, 'oldmatch', oldmatch)
82 scmutil.match = f
82 scmutil.match = f
83 return oldmatch
83 return oldmatch
84
84
85 def restorematchfn():
85 def restorematchfn():
86 '''restores scmutil.match to what it was before installmatchfn
86 '''restores scmutil.match to what it was before installmatchfn
87 was called. no-op if scmutil.match is its original function.
87 was called. no-op if scmutil.match is its original function.
88
88
89 Note that n calls to installmatchfn will require n calls to
89 Note that n calls to installmatchfn will require n calls to
90 restore the original matchfn.'''
90 restore the original matchfn.'''
91 scmutil.match = getattr(scmutil.match, 'oldmatch')
91 scmutil.match = getattr(scmutil.match, 'oldmatch')
92
92
93 def installmatchandpatsfn(f):
93 def installmatchandpatsfn(f):
94 oldmatchandpats = scmutil.matchandpats
94 oldmatchandpats = scmutil.matchandpats
95 setattr(f, 'oldmatchandpats', oldmatchandpats)
95 setattr(f, 'oldmatchandpats', oldmatchandpats)
96 scmutil.matchandpats = f
96 scmutil.matchandpats = f
97 return oldmatchandpats
97 return oldmatchandpats
98
98
99 def restorematchandpatsfn():
99 def restorematchandpatsfn():
100 '''restores scmutil.matchandpats to what it was before
100 '''restores scmutil.matchandpats to what it was before
101 installmatchandpatsfn was called. No-op if scmutil.matchandpats
101 installmatchandpatsfn was called. No-op if scmutil.matchandpats
102 is its original function.
102 is its original function.
103
103
104 Note that n calls to installmatchandpatsfn will require n calls
104 Note that n calls to installmatchandpatsfn will require n calls
105 to restore the original matchfn.'''
105 to restore the original matchfn.'''
106 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
106 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
107 scmutil.matchandpats)
107 scmutil.matchandpats)
108
108
109 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
109 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
110 large = opts.get(r'large')
110 large = opts.get(r'large')
111 lfsize = lfutil.getminsize(
111 lfsize = lfutil.getminsize(
112 ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
112 ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
113
113
114 lfmatcher = None
114 lfmatcher = None
115 if lfutil.islfilesrepo(repo):
115 if lfutil.islfilesrepo(repo):
116 lfpats = ui.configlist(lfutil.longname, 'patterns')
116 lfpats = ui.configlist(lfutil.longname, 'patterns')
117 if lfpats:
117 if lfpats:
118 lfmatcher = matchmod.match(repo.root, '', list(lfpats))
118 lfmatcher = matchmod.match(repo.root, '', list(lfpats))
119
119
120 lfnames = []
120 lfnames = []
121 m = matcher
121 m = matcher
122
122
123 wctx = repo[None]
123 wctx = repo[None]
124 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
124 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
125 exact = m.exact(f)
125 exact = m.exact(f)
126 lfile = lfutil.standin(f) in wctx
126 lfile = lfutil.standin(f) in wctx
127 nfile = f in wctx
127 nfile = f in wctx
128 exists = lfile or nfile
128 exists = lfile or nfile
129
129
130 # addremove in core gets fancy with the name, add doesn't
130 # addremove in core gets fancy with the name, add doesn't
131 if isaddremove:
131 if isaddremove:
132 name = m.uipath(f)
132 name = m.uipath(f)
133 else:
133 else:
134 name = m.rel(f)
134 name = m.rel(f)
135
135
136 # Don't warn the user when they attempt to add a normal tracked file.
136 # Don't warn the user when they attempt to add a normal tracked file.
137 # The normal add code will do that for us.
137 # The normal add code will do that for us.
138 if exact and exists:
138 if exact and exists:
139 if lfile:
139 if lfile:
140 ui.warn(_('%s already a largefile\n') % name)
140 ui.warn(_('%s already a largefile\n') % name)
141 continue
141 continue
142
142
143 if (exact or not exists) and not lfutil.isstandin(f):
143 if (exact or not exists) and not lfutil.isstandin(f):
144 # In case the file was removed previously, but not committed
144 # In case the file was removed previously, but not committed
145 # (issue3507)
145 # (issue3507)
146 if not repo.wvfs.exists(f):
146 if not repo.wvfs.exists(f):
147 continue
147 continue
148
148
149 abovemin = (lfsize and
149 abovemin = (lfsize and
150 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
150 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
151 if large or abovemin or (lfmatcher and lfmatcher(f)):
151 if large or abovemin or (lfmatcher and lfmatcher(f)):
152 lfnames.append(f)
152 lfnames.append(f)
153 if ui.verbose or not exact:
153 if ui.verbose or not exact:
154 ui.status(_('adding %s as a largefile\n') % name)
154 ui.status(_('adding %s as a largefile\n') % name)
155
155
156 bad = []
156 bad = []
157
157
158 # Need to lock, otherwise there could be a race condition between
158 # Need to lock, otherwise there could be a race condition between
159 # when standins are created and added to the repo.
159 # when standins are created and added to the repo.
160 with repo.wlock():
160 with repo.wlock():
161 if not opts.get(r'dry_run'):
161 if not opts.get(r'dry_run'):
162 standins = []
162 standins = []
163 lfdirstate = lfutil.openlfdirstate(ui, repo)
163 lfdirstate = lfutil.openlfdirstate(ui, repo)
164 for f in lfnames:
164 for f in lfnames:
165 standinname = lfutil.standin(f)
165 standinname = lfutil.standin(f)
166 lfutil.writestandin(repo, standinname, hash='',
166 lfutil.writestandin(repo, standinname, hash='',
167 executable=lfutil.getexecutable(repo.wjoin(f)))
167 executable=lfutil.getexecutable(repo.wjoin(f)))
168 standins.append(standinname)
168 standins.append(standinname)
169 if lfdirstate[f] == 'r':
169 if lfdirstate[f] == 'r':
170 lfdirstate.normallookup(f)
170 lfdirstate.normallookup(f)
171 else:
171 else:
172 lfdirstate.add(f)
172 lfdirstate.add(f)
173 lfdirstate.write()
173 lfdirstate.write()
174 bad += [lfutil.splitstandin(f)
174 bad += [lfutil.splitstandin(f)
175 for f in repo[None].add(standins)
175 for f in repo[None].add(standins)
176 if f in m.files()]
176 if f in m.files()]
177
177
178 added = [f for f in lfnames if f not in bad]
178 added = [f for f in lfnames if f not in bad]
179 return added, bad
179 return added, bad
180
180
181 def removelargefiles(ui, repo, isaddremove, matcher, dryrun, **opts):
181 def removelargefiles(ui, repo, isaddremove, matcher, dryrun, **opts):
182 after = opts.get(r'after')
182 after = opts.get(r'after')
183 m = composelargefilematcher(matcher, repo[None].manifest())
183 m = composelargefilematcher(matcher, repo[None].manifest())
184 try:
184 try:
185 repo.lfstatus = True
185 repo.lfstatus = True
186 s = repo.status(match=m, clean=not isaddremove)
186 s = repo.status(match=m, clean=not isaddremove)
187 finally:
187 finally:
188 repo.lfstatus = False
188 repo.lfstatus = False
189 manifest = repo[None].manifest()
189 manifest = repo[None].manifest()
190 modified, added, deleted, clean = [[f for f in list
190 modified, added, deleted, clean = [[f for f in list
191 if lfutil.standin(f) in manifest]
191 if lfutil.standin(f) in manifest]
192 for list in (s.modified, s.added,
192 for list in (s.modified, s.added,
193 s.deleted, s.clean)]
193 s.deleted, s.clean)]
194
194
195 def warn(files, msg):
195 def warn(files, msg):
196 for f in files:
196 for f in files:
197 ui.warn(msg % m.rel(f))
197 ui.warn(msg % m.rel(f))
198 return int(len(files) > 0)
198 return int(len(files) > 0)
199
199
200 result = 0
200 result = 0
201
201
202 if after:
202 if after:
203 remove = deleted
203 remove = deleted
204 result = warn(modified + added + clean,
204 result = warn(modified + added + clean,
205 _('not removing %s: file still exists\n'))
205 _('not removing %s: file still exists\n'))
206 else:
206 else:
207 remove = deleted + clean
207 remove = deleted + clean
208 result = warn(modified, _('not removing %s: file is modified (use -f'
208 result = warn(modified, _('not removing %s: file is modified (use -f'
209 ' to force removal)\n'))
209 ' to force removal)\n'))
210 result = warn(added, _('not removing %s: file has been marked for add'
210 result = warn(added, _('not removing %s: file has been marked for add'
211 ' (use forget to undo)\n')) or result
211 ' (use forget to undo)\n')) or result
212
212
213 # Need to lock because standin files are deleted then removed from the
213 # Need to lock because standin files are deleted then removed from the
214 # repository and we could race in-between.
214 # repository and we could race in-between.
215 with repo.wlock():
215 with repo.wlock():
216 lfdirstate = lfutil.openlfdirstate(ui, repo)
216 lfdirstate = lfutil.openlfdirstate(ui, repo)
217 for f in sorted(remove):
217 for f in sorted(remove):
218 if ui.verbose or not m.exact(f):
218 if ui.verbose or not m.exact(f):
219 # addremove in core gets fancy with the name, remove doesn't
219 # addremove in core gets fancy with the name, remove doesn't
220 if isaddremove:
220 if isaddremove:
221 name = m.uipath(f)
221 name = m.uipath(f)
222 else:
222 else:
223 name = m.rel(f)
223 name = m.rel(f)
224 ui.status(_('removing %s\n') % name)
224 ui.status(_('removing %s\n') % name)
225
225
226 if not dryrun:
226 if not dryrun:
227 if not after:
227 if not after:
228 repo.wvfs.unlinkpath(f, ignoremissing=True)
228 repo.wvfs.unlinkpath(f, ignoremissing=True)
229
229
230 if dryrun:
230 if dryrun:
231 return result
231 return result
232
232
233 remove = [lfutil.standin(f) for f in remove]
233 remove = [lfutil.standin(f) for f in remove]
234 # If this is being called by addremove, let the original addremove
234 # If this is being called by addremove, let the original addremove
235 # function handle this.
235 # function handle this.
236 if not isaddremove:
236 if not isaddremove:
237 for f in remove:
237 for f in remove:
238 repo.wvfs.unlinkpath(f, ignoremissing=True)
238 repo.wvfs.unlinkpath(f, ignoremissing=True)
239 repo[None].forget(remove)
239 repo[None].forget(remove)
240
240
241 for f in remove:
241 for f in remove:
242 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
242 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
243 False)
243 False)
244
244
245 lfdirstate.write()
245 lfdirstate.write()
246
246
247 return result
247 return result
248
248
249 # For overriding mercurial.hgweb.webcommands so that largefiles will
249 # For overriding mercurial.hgweb.webcommands so that largefiles will
250 # appear at their right place in the manifests.
250 # appear at their right place in the manifests.
251 def decodepath(orig, path):
251 def decodepath(orig, path):
252 return lfutil.splitstandin(path) or path
252 return lfutil.splitstandin(path) or path
253
253
254 # -- Wrappers: modify existing commands --------------------------------
254 # -- Wrappers: modify existing commands --------------------------------
255
255
256 def overrideadd(orig, ui, repo, *pats, **opts):
256 def overrideadd(orig, ui, repo, *pats, **opts):
257 if opts.get(r'normal') and opts.get(r'large'):
257 if opts.get(r'normal') and opts.get(r'large'):
258 raise error.Abort(_('--normal cannot be used with --large'))
258 raise error.Abort(_('--normal cannot be used with --large'))
259 return orig(ui, repo, *pats, **opts)
259 return orig(ui, repo, *pats, **opts)
260
260
261 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
261 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
262 # The --normal flag short circuits this override
262 # The --normal flag short circuits this override
263 if opts.get(r'normal'):
263 if opts.get(r'normal'):
264 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
264 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
265
265
266 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
266 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
267 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
267 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
268 ladded)
268 ladded)
269 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
269 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
270
270
271 bad.extend(f for f in lbad)
271 bad.extend(f for f in lbad)
272 return bad
272 return bad
273
273
274 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos,
274 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos,
275 dryrun):
275 dryrun):
276 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
276 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
277 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos,
277 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos,
278 dryrun)
278 dryrun)
279 return removelargefiles(ui, repo, False, matcher, dryrun, after=after,
279 return removelargefiles(ui, repo, False, matcher, dryrun, after=after,
280 force=force) or result
280 force=force) or result
281
281
282 def overridestatusfn(orig, repo, rev2, **opts):
282 def overridestatusfn(orig, repo, rev2, **opts):
283 try:
283 try:
284 repo._repo.lfstatus = True
284 repo._repo.lfstatus = True
285 return orig(repo, rev2, **opts)
285 return orig(repo, rev2, **opts)
286 finally:
286 finally:
287 repo._repo.lfstatus = False
287 repo._repo.lfstatus = False
288
288
289 def overridestatus(orig, ui, repo, *pats, **opts):
289 def overridestatus(orig, ui, repo, *pats, **opts):
290 try:
290 try:
291 repo.lfstatus = True
291 repo.lfstatus = True
292 return orig(ui, repo, *pats, **opts)
292 return orig(ui, repo, *pats, **opts)
293 finally:
293 finally:
294 repo.lfstatus = False
294 repo.lfstatus = False
295
295
296 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
296 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
297 try:
297 try:
298 repo._repo.lfstatus = True
298 repo._repo.lfstatus = True
299 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
299 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
300 finally:
300 finally:
301 repo._repo.lfstatus = False
301 repo._repo.lfstatus = False
302
302
303 def overridelog(orig, ui, repo, *pats, **opts):
303 def overridelog(orig, ui, repo, *pats, **opts):
304 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
304 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
305 default='relpath', badfn=None):
305 default='relpath', badfn=None):
306 """Matcher that merges root directory with .hglf, suitable for log.
306 """Matcher that merges root directory with .hglf, suitable for log.
307 It is still possible to match .hglf directly.
307 It is still possible to match .hglf directly.
308 For any listed files run log on the standin too.
308 For any listed files run log on the standin too.
309 matchfn tries both the given filename and with .hglf stripped.
309 matchfn tries both the given filename and with .hglf stripped.
310 """
310 """
311 if opts is None:
311 if opts is None:
312 opts = {}
312 opts = {}
313 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
313 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
314 badfn=badfn)
314 badfn=badfn)
315 m, p = copy.copy(matchandpats)
315 m, p = copy.copy(matchandpats)
316
316
317 if m.always():
317 if m.always():
318 # We want to match everything anyway, so there's no benefit trying
318 # We want to match everything anyway, so there's no benefit trying
319 # to add standins.
319 # to add standins.
320 return matchandpats
320 return matchandpats
321
321
322 pats = set(p)
322 pats = set(p)
323
323
324 def fixpats(pat, tostandin=lfutil.standin):
324 def fixpats(pat, tostandin=lfutil.standin):
325 if pat.startswith('set:'):
325 if pat.startswith('set:'):
326 return pat
326 return pat
327
327
328 kindpat = matchmod._patsplit(pat, None)
328 kindpat = matchmod._patsplit(pat, None)
329
329
330 if kindpat[0] is not None:
330 if kindpat[0] is not None:
331 return kindpat[0] + ':' + tostandin(kindpat[1])
331 return kindpat[0] + ':' + tostandin(kindpat[1])
332 return tostandin(kindpat[1])
332 return tostandin(kindpat[1])
333
333
334 if m._cwd:
334 if m._cwd:
335 hglf = lfutil.shortname
335 hglf = lfutil.shortname
336 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
336 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
337
337
338 def tostandin(f):
338 def tostandin(f):
339 # The file may already be a standin, so truncate the back
339 # The file may already be a standin, so truncate the back
340 # prefix and test before mangling it. This avoids turning
340 # prefix and test before mangling it. This avoids turning
341 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
341 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
342 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
342 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
343 return f
343 return f
344
344
345 # An absolute path is from outside the repo, so truncate the
345 # An absolute path is from outside the repo, so truncate the
346 # path to the root before building the standin. Otherwise cwd
346 # path to the root before building the standin. Otherwise cwd
347 # is somewhere in the repo, relative to root, and needs to be
347 # is somewhere in the repo, relative to root, and needs to be
348 # prepended before building the standin.
348 # prepended before building the standin.
349 if os.path.isabs(m._cwd):
349 if os.path.isabs(m._cwd):
350 f = f[len(back):]
350 f = f[len(back):]
351 else:
351 else:
352 f = m._cwd + '/' + f
352 f = m._cwd + '/' + f
353 return back + lfutil.standin(f)
353 return back + lfutil.standin(f)
354 else:
354 else:
355 def tostandin(f):
355 def tostandin(f):
356 if lfutil.isstandin(f):
356 if lfutil.isstandin(f):
357 return f
357 return f
358 return lfutil.standin(f)
358 return lfutil.standin(f)
359 pats.update(fixpats(f, tostandin) for f in p)
359 pats.update(fixpats(f, tostandin) for f in p)
360
360
361 for i in range(0, len(m._files)):
361 for i in range(0, len(m._files)):
362 # Don't add '.hglf' to m.files, since that is already covered by '.'
362 # Don't add '.hglf' to m.files, since that is already covered by '.'
363 if m._files[i] == '.':
363 if m._files[i] == '.':
364 continue
364 continue
365 standin = lfutil.standin(m._files[i])
365 standin = lfutil.standin(m._files[i])
366 # If the "standin" is a directory, append instead of replace to
366 # If the "standin" is a directory, append instead of replace to
367 # support naming a directory on the command line with only
367 # support naming a directory on the command line with only
368 # largefiles. The original directory is kept to support normal
368 # largefiles. The original directory is kept to support normal
369 # files.
369 # files.
370 if standin in ctx:
370 if standin in ctx:
371 m._files[i] = standin
371 m._files[i] = standin
372 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
372 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
373 m._files.append(standin)
373 m._files.append(standin)
374
374
375 m._fileset = set(m._files)
375 m._fileset = set(m._files)
376 m.always = lambda: False
376 m.always = lambda: False
377 origmatchfn = m.matchfn
377 origmatchfn = m.matchfn
378 def lfmatchfn(f):
378 def lfmatchfn(f):
379 lf = lfutil.splitstandin(f)
379 lf = lfutil.splitstandin(f)
380 if lf is not None and origmatchfn(lf):
380 if lf is not None and origmatchfn(lf):
381 return True
381 return True
382 r = origmatchfn(f)
382 r = origmatchfn(f)
383 return r
383 return r
384 m.matchfn = lfmatchfn
384 m.matchfn = lfmatchfn
385
385
386 ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
386 ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
387 return m, pats
387 return m, pats
388
388
389 # For hg log --patch, the match object is used in two different senses:
389 # For hg log --patch, the match object is used in two different senses:
390 # (1) to determine what revisions should be printed out, and
390 # (1) to determine what revisions should be printed out, and
391 # (2) to determine what files to print out diffs for.
391 # (2) to determine what files to print out diffs for.
392 # The magic matchandpats override should be used for case (1) but not for
392 # The magic matchandpats override should be used for case (1) but not for
393 # case (2).
393 # case (2).
394 def overridemakefilematcher(repo, pats, opts, badfn=None):
394 def overridemakefilematcher(repo, pats, opts, badfn=None):
395 wctx = repo[None]
395 wctx = repo[None]
396 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
396 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
397 return lambda ctx: match
397 return lambda ctx: match
398
398
399 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
399 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
400 oldmakefilematcher = logcmdutil._makenofollowfilematcher
400 oldmakefilematcher = logcmdutil._makenofollowfilematcher
401 setattr(logcmdutil, '_makenofollowfilematcher', overridemakefilematcher)
401 setattr(logcmdutil, '_makenofollowfilematcher', overridemakefilematcher)
402
402
403 try:
403 try:
404 return orig(ui, repo, *pats, **opts)
404 return orig(ui, repo, *pats, **opts)
405 finally:
405 finally:
406 restorematchandpatsfn()
406 restorematchandpatsfn()
407 setattr(logcmdutil, '_makenofollowfilematcher', oldmakefilematcher)
407 setattr(logcmdutil, '_makenofollowfilematcher', oldmakefilematcher)
408
408
409 def overrideverify(orig, ui, repo, *pats, **opts):
409 def overrideverify(orig, ui, repo, *pats, **opts):
410 large = opts.pop(r'large', False)
410 large = opts.pop(r'large', False)
411 all = opts.pop(r'lfa', False)
411 all = opts.pop(r'lfa', False)
412 contents = opts.pop(r'lfc', False)
412 contents = opts.pop(r'lfc', False)
413
413
414 result = orig(ui, repo, *pats, **opts)
414 result = orig(ui, repo, *pats, **opts)
415 if large or all or contents:
415 if large or all or contents:
416 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
416 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
417 return result
417 return result
418
418
419 def overridedebugstate(orig, ui, repo, *pats, **opts):
419 def overridedebugstate(orig, ui, repo, *pats, **opts):
420 large = opts.pop(r'large', False)
420 large = opts.pop(r'large', False)
421 if large:
421 if large:
422 class fakerepo(object):
422 class fakerepo(object):
423 dirstate = lfutil.openlfdirstate(ui, repo)
423 dirstate = lfutil.openlfdirstate(ui, repo)
424 orig(ui, fakerepo, *pats, **opts)
424 orig(ui, fakerepo, *pats, **opts)
425 else:
425 else:
426 orig(ui, repo, *pats, **opts)
426 orig(ui, repo, *pats, **opts)
427
427
428 # Before starting the manifest merge, merge.updates will call
428 # Before starting the manifest merge, merge.updates will call
429 # _checkunknownfile to check if there are any files in the merged-in
429 # _checkunknownfile to check if there are any files in the merged-in
430 # changeset that collide with unknown files in the working copy.
430 # changeset that collide with unknown files in the working copy.
431 #
431 #
432 # The largefiles are seen as unknown, so this prevents us from merging
432 # The largefiles are seen as unknown, so this prevents us from merging
433 # in a file 'foo' if we already have a largefile with the same name.
433 # in a file 'foo' if we already have a largefile with the same name.
434 #
434 #
435 # The overridden function filters the unknown files by removing any
435 # The overridden function filters the unknown files by removing any
436 # largefiles. This makes the merge proceed and we can then handle this
436 # largefiles. This makes the merge proceed and we can then handle this
437 # case further in the overridden calculateupdates function below.
437 # case further in the overridden calculateupdates function below.
438 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
438 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
439 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
439 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
440 return False
440 return False
441 return origfn(repo, wctx, mctx, f, f2)
441 return origfn(repo, wctx, mctx, f, f2)
442
442
443 # The manifest merge handles conflicts on the manifest level. We want
443 # The manifest merge handles conflicts on the manifest level. We want
444 # to handle changes in largefile-ness of files at this level too.
444 # to handle changes in largefile-ness of files at this level too.
445 #
445 #
446 # The strategy is to run the original calculateupdates and then process
446 # The strategy is to run the original calculateupdates and then process
447 # the action list it outputs. There are two cases we need to deal with:
447 # the action list it outputs. There are two cases we need to deal with:
448 #
448 #
449 # 1. Normal file in p1, largefile in p2. Here the largefile is
449 # 1. Normal file in p1, largefile in p2. Here the largefile is
450 # detected via its standin file, which will enter the working copy
450 # detected via its standin file, which will enter the working copy
451 # with a "get" action. It is not "merge" since the standin is all
451 # with a "get" action. It is not "merge" since the standin is all
452 # Mercurial is concerned with at this level -- the link to the
452 # Mercurial is concerned with at this level -- the link to the
453 # existing normal file is not relevant here.
453 # existing normal file is not relevant here.
454 #
454 #
455 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
455 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
456 # since the largefile will be present in the working copy and
456 # since the largefile will be present in the working copy and
457 # different from the normal file in p2. Mercurial therefore
457 # different from the normal file in p2. Mercurial therefore
458 # triggers a merge action.
458 # triggers a merge action.
459 #
459 #
460 # In both cases, we prompt the user and emit new actions to either
460 # In both cases, we prompt the user and emit new actions to either
461 # remove the standin (if the normal file was kept) or to remove the
461 # remove the standin (if the normal file was kept) or to remove the
462 # normal file and get the standin (if the largefile was kept). The
462 # normal file and get the standin (if the largefile was kept). The
463 # default prompt answer is to use the largefile version since it was
463 # default prompt answer is to use the largefile version since it was
464 # presumably changed on purpose.
464 # presumably changed on purpose.
465 #
465 #
466 # Finally, the merge.applyupdates function will then take care of
466 # Finally, the merge.applyupdates function will then take care of
467 # writing the files into the working copy and lfcommands.updatelfiles
467 # writing the files into the working copy and lfcommands.updatelfiles
468 # will update the largefiles.
468 # will update the largefiles.
469 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
469 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
470 acceptremote, *args, **kwargs):
470 acceptremote, *args, **kwargs):
471 overwrite = force and not branchmerge
471 overwrite = force and not branchmerge
472 actions, diverge, renamedelete = origfn(
472 actions, diverge, renamedelete = origfn(
473 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs)
473 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs)
474
474
475 if overwrite:
475 if overwrite:
476 return actions, diverge, renamedelete
476 return actions, diverge, renamedelete
477
477
478 # Convert to dictionary with filename as key and action as value.
478 # Convert to dictionary with filename as key and action as value.
479 lfiles = set()
479 lfiles = set()
480 for f in actions:
480 for f in actions:
481 splitstandin = lfutil.splitstandin(f)
481 splitstandin = lfutil.splitstandin(f)
482 if splitstandin in p1:
482 if splitstandin in p1:
483 lfiles.add(splitstandin)
483 lfiles.add(splitstandin)
484 elif lfutil.standin(f) in p1:
484 elif lfutil.standin(f) in p1:
485 lfiles.add(f)
485 lfiles.add(f)
486
486
487 for lfile in sorted(lfiles):
487 for lfile in sorted(lfiles):
488 standin = lfutil.standin(lfile)
488 standin = lfutil.standin(lfile)
489 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
489 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
490 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
490 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
491 if sm in ('g', 'dc') and lm != 'r':
491 if sm in ('g', 'dc') and lm != 'r':
492 if sm == 'dc':
492 if sm == 'dc':
493 f1, f2, fa, move, anc = sargs
493 f1, f2, fa, move, anc = sargs
494 sargs = (p2[f2].flags(), False)
494 sargs = (p2[f2].flags(), False)
495 # Case 1: normal file in the working copy, largefile in
495 # Case 1: normal file in the working copy, largefile in
496 # the second parent
496 # the second parent
497 usermsg = _('remote turned local normal file %s into a largefile\n'
497 usermsg = _('remote turned local normal file %s into a largefile\n'
498 'use (l)argefile or keep (n)ormal file?'
498 'use (l)argefile or keep (n)ormal file?'
499 '$$ &Largefile $$ &Normal file') % lfile
499 '$$ &Largefile $$ &Normal file') % lfile
500 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
500 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
501 actions[lfile] = ('r', None, 'replaced by standin')
501 actions[lfile] = ('r', None, 'replaced by standin')
502 actions[standin] = ('g', sargs, 'replaces standin')
502 actions[standin] = ('g', sargs, 'replaces standin')
503 else: # keep local normal file
503 else: # keep local normal file
504 actions[lfile] = ('k', None, 'replaces standin')
504 actions[lfile] = ('k', None, 'replaces standin')
505 if branchmerge:
505 if branchmerge:
506 actions[standin] = ('k', None, 'replaced by non-standin')
506 actions[standin] = ('k', None, 'replaced by non-standin')
507 else:
507 else:
508 actions[standin] = ('r', None, 'replaced by non-standin')
508 actions[standin] = ('r', None, 'replaced by non-standin')
509 elif lm in ('g', 'dc') and sm != 'r':
509 elif lm in ('g', 'dc') and sm != 'r':
510 if lm == 'dc':
510 if lm == 'dc':
511 f1, f2, fa, move, anc = largs
511 f1, f2, fa, move, anc = largs
512 largs = (p2[f2].flags(), False)
512 largs = (p2[f2].flags(), False)
513 # Case 2: largefile in the working copy, normal file in
513 # Case 2: largefile in the working copy, normal file in
514 # the second parent
514 # the second parent
515 usermsg = _('remote turned local largefile %s into a normal file\n'
515 usermsg = _('remote turned local largefile %s into a normal file\n'
516 'keep (l)argefile or use (n)ormal file?'
516 'keep (l)argefile or use (n)ormal file?'
517 '$$ &Largefile $$ &Normal file') % lfile
517 '$$ &Largefile $$ &Normal file') % lfile
518 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
518 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
519 if branchmerge:
519 if branchmerge:
520 # largefile can be restored from standin safely
520 # largefile can be restored from standin safely
521 actions[lfile] = ('k', None, 'replaced by standin')
521 actions[lfile] = ('k', None, 'replaced by standin')
522 actions[standin] = ('k', None, 'replaces standin')
522 actions[standin] = ('k', None, 'replaces standin')
523 else:
523 else:
524 # "lfile" should be marked as "removed" without
524 # "lfile" should be marked as "removed" without
525 # removal of itself
525 # removal of itself
526 actions[lfile] = ('lfmr', None,
526 actions[lfile] = ('lfmr', None,
527 'forget non-standin largefile')
527 'forget non-standin largefile')
528
528
529 # linear-merge should treat this largefile as 're-added'
529 # linear-merge should treat this largefile as 're-added'
530 actions[standin] = ('a', None, 'keep standin')
530 actions[standin] = ('a', None, 'keep standin')
531 else: # pick remote normal file
531 else: # pick remote normal file
532 actions[lfile] = ('g', largs, 'replaces standin')
532 actions[lfile] = ('g', largs, 'replaces standin')
533 actions[standin] = ('r', None, 'replaced by non-standin')
533 actions[standin] = ('r', None, 'replaced by non-standin')
534
534
535 return actions, diverge, renamedelete
535 return actions, diverge, renamedelete
536
536
537 def mergerecordupdates(orig, repo, actions, branchmerge):
537 def mergerecordupdates(orig, repo, actions, branchmerge):
538 if 'lfmr' in actions:
538 if 'lfmr' in actions:
539 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
539 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
540 for lfile, args, msg in actions['lfmr']:
540 for lfile, args, msg in actions['lfmr']:
541 # this should be executed before 'orig', to execute 'remove'
541 # this should be executed before 'orig', to execute 'remove'
542 # before all other actions
542 # before all other actions
543 repo.dirstate.remove(lfile)
543 repo.dirstate.remove(lfile)
544 # make sure lfile doesn't get synclfdirstate'd as normal
544 # make sure lfile doesn't get synclfdirstate'd as normal
545 lfdirstate.add(lfile)
545 lfdirstate.add(lfile)
546 lfdirstate.write()
546 lfdirstate.write()
547
547
548 return orig(repo, actions, branchmerge)
548 return orig(repo, actions, branchmerge)
549
549
550 # Override filemerge to prompt the user about how they wish to merge
550 # Override filemerge to prompt the user about how they wish to merge
551 # largefiles. This will handle identical edits without prompting the user.
551 # largefiles. This will handle identical edits without prompting the user.
552 def overridefilemerge(origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca,
552 def overridefilemerge(origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca,
553 labels=None):
553 labels=None):
554 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
554 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
555 return origfn(premerge, repo, wctx, mynode, orig, fcd, fco, fca,
555 return origfn(premerge, repo, wctx, mynode, orig, fcd, fco, fca,
556 labels=labels)
556 labels=labels)
557
557
558 ahash = lfutil.readasstandin(fca).lower()
558 ahash = lfutil.readasstandin(fca).lower()
559 dhash = lfutil.readasstandin(fcd).lower()
559 dhash = lfutil.readasstandin(fcd).lower()
560 ohash = lfutil.readasstandin(fco).lower()
560 ohash = lfutil.readasstandin(fco).lower()
561 if (ohash != ahash and
561 if (ohash != ahash and
562 ohash != dhash and
562 ohash != dhash and
563 (dhash == ahash or
563 (dhash == ahash or
564 repo.ui.promptchoice(
564 repo.ui.promptchoice(
565 _('largefile %s has a merge conflict\nancestor was %s\n'
565 _('largefile %s has a merge conflict\nancestor was %s\n'
566 'keep (l)ocal %s or\ntake (o)ther %s?'
566 'keep (l)ocal %s or\ntake (o)ther %s?'
567 '$$ &Local $$ &Other') %
567 '$$ &Local $$ &Other') %
568 (lfutil.splitstandin(orig), ahash, dhash, ohash),
568 (lfutil.splitstandin(orig), ahash, dhash, ohash),
569 0) == 1)):
569 0) == 1)):
570 repo.wwrite(fcd.path(), fco.data(), fco.flags())
570 repo.wwrite(fcd.path(), fco.data(), fco.flags())
571 return True, 0, False
571 return True, 0, False
572
572
573 def copiespathcopies(orig, ctx1, ctx2, match=None):
573 def copiespathcopies(orig, ctx1, ctx2, match=None):
574 copies = orig(ctx1, ctx2, match=match)
574 copies = orig(ctx1, ctx2, match=match)
575 updated = {}
575 updated = {}
576
576
577 for k, v in copies.iteritems():
577 for k, v in copies.iteritems():
578 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
578 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
579
579
580 return updated
580 return updated
581
581
582 # Copy first changes the matchers to match standins instead of
582 # Copy first changes the matchers to match standins instead of
583 # largefiles. Then it overrides util.copyfile in that function it
583 # largefiles. Then it overrides util.copyfile in that function it
584 # checks if the destination largefile already exists. It also keeps a
584 # checks if the destination largefile already exists. It also keeps a
585 # list of copied files so that the largefiles can be copied and the
585 # list of copied files so that the largefiles can be copied and the
586 # dirstate updated.
586 # dirstate updated.
587 def overridecopy(orig, ui, repo, pats, opts, rename=False):
587 def overridecopy(orig, ui, repo, pats, opts, rename=False):
588 # doesn't remove largefile on rename
588 # doesn't remove largefile on rename
589 if len(pats) < 2:
589 if len(pats) < 2:
590 # this isn't legal, let the original function deal with it
590 # this isn't legal, let the original function deal with it
591 return orig(ui, repo, pats, opts, rename)
591 return orig(ui, repo, pats, opts, rename)
592
592
593 # This could copy both lfiles and normal files in one command,
593 # This could copy both lfiles and normal files in one command,
594 # but we don't want to do that. First replace their matcher to
594 # but we don't want to do that. First replace their matcher to
595 # only match normal files and run it, then replace it to just
595 # only match normal files and run it, then replace it to just
596 # match largefiles and run it again.
596 # match largefiles and run it again.
597 nonormalfiles = False
597 nonormalfiles = False
598 nolfiles = False
598 nolfiles = False
599 installnormalfilesmatchfn(repo[None].manifest())
599 installnormalfilesmatchfn(repo[None].manifest())
600 try:
600 try:
601 result = orig(ui, repo, pats, opts, rename)
601 result = orig(ui, repo, pats, opts, rename)
602 except error.Abort as e:
602 except error.Abort as e:
603 if pycompat.bytestr(e) != _('no files to copy'):
603 if pycompat.bytestr(e) != _('no files to copy'):
604 raise e
604 raise e
605 else:
605 else:
606 nonormalfiles = True
606 nonormalfiles = True
607 result = 0
607 result = 0
608 finally:
608 finally:
609 restorematchfn()
609 restorematchfn()
610
610
611 # The first rename can cause our current working directory to be removed.
611 # The first rename can cause our current working directory to be removed.
612 # In that case there is nothing left to copy/rename so just quit.
612 # In that case there is nothing left to copy/rename so just quit.
613 try:
613 try:
614 repo.getcwd()
614 repo.getcwd()
615 except OSError:
615 except OSError:
616 return result
616 return result
617
617
618 def makestandin(relpath):
618 def makestandin(relpath):
619 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
619 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
620 return repo.wvfs.join(lfutil.standin(path))
620 return repo.wvfs.join(lfutil.standin(path))
621
621
622 fullpats = scmutil.expandpats(pats)
622 fullpats = scmutil.expandpats(pats)
623 dest = fullpats[-1]
623 dest = fullpats[-1]
624
624
625 if os.path.isdir(dest):
625 if os.path.isdir(dest):
626 if not os.path.isdir(makestandin(dest)):
626 if not os.path.isdir(makestandin(dest)):
627 os.makedirs(makestandin(dest))
627 os.makedirs(makestandin(dest))
628
628
629 try:
629 try:
630 # When we call orig below it creates the standins but we don't add
630 # When we call orig below it creates the standins but we don't add
631 # them to the dir state until later so lock during that time.
631 # them to the dir state until later so lock during that time.
632 wlock = repo.wlock()
632 wlock = repo.wlock()
633
633
634 manifest = repo[None].manifest()
634 manifest = repo[None].manifest()
635 def overridematch(ctx, pats=(), opts=None, globbed=False,
635 def overridematch(ctx, pats=(), opts=None, globbed=False,
636 default='relpath', badfn=None):
636 default='relpath', badfn=None):
637 if opts is None:
637 if opts is None:
638 opts = {}
638 opts = {}
639 newpats = []
639 newpats = []
640 # The patterns were previously mangled to add the standin
640 # The patterns were previously mangled to add the standin
641 # directory; we need to remove that now
641 # directory; we need to remove that now
642 for pat in pats:
642 for pat in pats:
643 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
643 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
644 newpats.append(pat.replace(lfutil.shortname, ''))
644 newpats.append(pat.replace(lfutil.shortname, ''))
645 else:
645 else:
646 newpats.append(pat)
646 newpats.append(pat)
647 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
647 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
648 m = copy.copy(match)
648 m = copy.copy(match)
649 lfile = lambda f: lfutil.standin(f) in manifest
649 lfile = lambda f: lfutil.standin(f) in manifest
650 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
650 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
651 m._fileset = set(m._files)
651 m._fileset = set(m._files)
652 origmatchfn = m.matchfn
652 origmatchfn = m.matchfn
653 def matchfn(f):
653 def matchfn(f):
654 lfile = lfutil.splitstandin(f)
654 lfile = lfutil.splitstandin(f)
655 return (lfile is not None and
655 return (lfile is not None and
656 (f in manifest) and
656 (f in manifest) and
657 origmatchfn(lfile) or
657 origmatchfn(lfile) or
658 None)
658 None)
659 m.matchfn = matchfn
659 m.matchfn = matchfn
660 return m
660 return m
661 oldmatch = installmatchfn(overridematch)
661 oldmatch = installmatchfn(overridematch)
662 listpats = []
662 listpats = []
663 for pat in pats:
663 for pat in pats:
664 if matchmod.patkind(pat) is not None:
664 if matchmod.patkind(pat) is not None:
665 listpats.append(pat)
665 listpats.append(pat)
666 else:
666 else:
667 listpats.append(makestandin(pat))
667 listpats.append(makestandin(pat))
668
668
669 try:
669 try:
670 origcopyfile = util.copyfile
670 origcopyfile = util.copyfile
671 copiedfiles = []
671 copiedfiles = []
672 def overridecopyfile(src, dest, *args, **kwargs):
672 def overridecopyfile(src, dest, *args, **kwargs):
673 if (lfutil.shortname in src and
673 if (lfutil.shortname in src and
674 dest.startswith(repo.wjoin(lfutil.shortname))):
674 dest.startswith(repo.wjoin(lfutil.shortname))):
675 destlfile = dest.replace(lfutil.shortname, '')
675 destlfile = dest.replace(lfutil.shortname, '')
676 if not opts['force'] and os.path.exists(destlfile):
676 if not opts['force'] and os.path.exists(destlfile):
677 raise IOError('',
677 raise IOError('',
678 _('destination largefile already exists'))
678 _('destination largefile already exists'))
679 copiedfiles.append((src, dest))
679 copiedfiles.append((src, dest))
680 origcopyfile(src, dest, *args, **kwargs)
680 origcopyfile(src, dest, *args, **kwargs)
681
681
682 util.copyfile = overridecopyfile
682 util.copyfile = overridecopyfile
683 result += orig(ui, repo, listpats, opts, rename)
683 result += orig(ui, repo, listpats, opts, rename)
684 finally:
684 finally:
685 util.copyfile = origcopyfile
685 util.copyfile = origcopyfile
686
686
687 lfdirstate = lfutil.openlfdirstate(ui, repo)
687 lfdirstate = lfutil.openlfdirstate(ui, repo)
688 for (src, dest) in copiedfiles:
688 for (src, dest) in copiedfiles:
689 if (lfutil.shortname in src and
689 if (lfutil.shortname in src and
690 dest.startswith(repo.wjoin(lfutil.shortname))):
690 dest.startswith(repo.wjoin(lfutil.shortname))):
691 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
691 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
692 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
692 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
693 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
693 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
694 if not os.path.isdir(destlfiledir):
694 if not os.path.isdir(destlfiledir):
695 os.makedirs(destlfiledir)
695 os.makedirs(destlfiledir)
696 if rename:
696 if rename:
697 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
697 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
698
698
699 # The file is gone, but this deletes any empty parent
699 # The file is gone, but this deletes any empty parent
700 # directories as a side-effect.
700 # directories as a side-effect.
701 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
701 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
702 lfdirstate.remove(srclfile)
702 lfdirstate.remove(srclfile)
703 else:
703 else:
704 util.copyfile(repo.wjoin(srclfile),
704 util.copyfile(repo.wjoin(srclfile),
705 repo.wjoin(destlfile))
705 repo.wjoin(destlfile))
706
706
707 lfdirstate.add(destlfile)
707 lfdirstate.add(destlfile)
708 lfdirstate.write()
708 lfdirstate.write()
709 except error.Abort as e:
709 except error.Abort as e:
710 if pycompat.bytestr(e) != _('no files to copy'):
710 if pycompat.bytestr(e) != _('no files to copy'):
711 raise e
711 raise e
712 else:
712 else:
713 nolfiles = True
713 nolfiles = True
714 finally:
714 finally:
715 restorematchfn()
715 restorematchfn()
716 wlock.release()
716 wlock.release()
717
717
718 if nolfiles and nonormalfiles:
718 if nolfiles and nonormalfiles:
719 raise error.Abort(_('no files to copy'))
719 raise error.Abort(_('no files to copy'))
720
720
721 return result
721 return result
722
722
723 # When the user calls revert, we have to be careful to not revert any
723 # When the user calls revert, we have to be careful to not revert any
724 # changes to other largefiles accidentally. This means we have to keep
724 # changes to other largefiles accidentally. This means we have to keep
725 # track of the largefiles that are being reverted so we only pull down
725 # track of the largefiles that are being reverted so we only pull down
726 # the necessary largefiles.
726 # the necessary largefiles.
727 #
727 #
728 # Standins are only updated (to match the hash of largefiles) before
728 # Standins are only updated (to match the hash of largefiles) before
729 # commits. Update the standins then run the original revert, changing
729 # commits. Update the standins then run the original revert, changing
730 # the matcher to hit standins instead of largefiles. Based on the
730 # the matcher to hit standins instead of largefiles. Based on the
731 # resulting standins update the largefiles.
731 # resulting standins update the largefiles.
732 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
732 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
733 # Because we put the standins in a bad state (by updating them)
733 # Because we put the standins in a bad state (by updating them)
734 # and then return them to a correct state we need to lock to
734 # and then return them to a correct state we need to lock to
735 # prevent others from changing them in their incorrect state.
735 # prevent others from changing them in their incorrect state.
736 with repo.wlock():
736 with repo.wlock():
737 lfdirstate = lfutil.openlfdirstate(ui, repo)
737 lfdirstate = lfutil.openlfdirstate(ui, repo)
738 s = lfutil.lfdirstatestatus(lfdirstate, repo)
738 s = lfutil.lfdirstatestatus(lfdirstate, repo)
739 lfdirstate.write()
739 lfdirstate.write()
740 for lfile in s.modified:
740 for lfile in s.modified:
741 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
741 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
742 for lfile in s.deleted:
742 for lfile in s.deleted:
743 fstandin = lfutil.standin(lfile)
743 fstandin = lfutil.standin(lfile)
744 if (repo.wvfs.exists(fstandin)):
744 if (repo.wvfs.exists(fstandin)):
745 repo.wvfs.unlink(fstandin)
745 repo.wvfs.unlink(fstandin)
746
746
747 oldstandins = lfutil.getstandinsstate(repo)
747 oldstandins = lfutil.getstandinsstate(repo)
748
748
749 def overridematch(mctx, pats=(), opts=None, globbed=False,
749 def overridematch(mctx, pats=(), opts=None, globbed=False,
750 default='relpath', badfn=None):
750 default='relpath', badfn=None):
751 if opts is None:
751 if opts is None:
752 opts = {}
752 opts = {}
753 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
753 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
754 m = copy.copy(match)
754 m = copy.copy(match)
755
755
756 # revert supports recursing into subrepos, and though largefiles
756 # revert supports recursing into subrepos, and though largefiles
757 # currently doesn't work correctly in that case, this match is
757 # currently doesn't work correctly in that case, this match is
758 # called, so the lfdirstate above may not be the correct one for
758 # called, so the lfdirstate above may not be the correct one for
759 # this invocation of match.
759 # this invocation of match.
760 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
760 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
761 False)
761 False)
762
762
763 wctx = repo[None]
763 wctx = repo[None]
764 matchfiles = []
764 matchfiles = []
765 for f in m._files:
765 for f in m._files:
766 standin = lfutil.standin(f)
766 standin = lfutil.standin(f)
767 if standin in ctx or standin in mctx:
767 if standin in ctx or standin in mctx:
768 matchfiles.append(standin)
768 matchfiles.append(standin)
769 elif standin in wctx or lfdirstate[f] == 'r':
769 elif standin in wctx or lfdirstate[f] == 'r':
770 continue
770 continue
771 else:
771 else:
772 matchfiles.append(f)
772 matchfiles.append(f)
773 m._files = matchfiles
773 m._files = matchfiles
774 m._fileset = set(m._files)
774 m._fileset = set(m._files)
775 origmatchfn = m.matchfn
775 origmatchfn = m.matchfn
776 def matchfn(f):
776 def matchfn(f):
777 lfile = lfutil.splitstandin(f)
777 lfile = lfutil.splitstandin(f)
778 if lfile is not None:
778 if lfile is not None:
779 return (origmatchfn(lfile) and
779 return (origmatchfn(lfile) and
780 (f in ctx or f in mctx))
780 (f in ctx or f in mctx))
781 return origmatchfn(f)
781 return origmatchfn(f)
782 m.matchfn = matchfn
782 m.matchfn = matchfn
783 return m
783 return m
784 oldmatch = installmatchfn(overridematch)
784 oldmatch = installmatchfn(overridematch)
785 try:
785 try:
786 orig(ui, repo, ctx, parents, *pats, **opts)
786 orig(ui, repo, ctx, parents, *pats, **opts)
787 finally:
787 finally:
788 restorematchfn()
788 restorematchfn()
789
789
790 newstandins = lfutil.getstandinsstate(repo)
790 newstandins = lfutil.getstandinsstate(repo)
791 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
791 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
792 # lfdirstate should be 'normallookup'-ed for updated files,
792 # lfdirstate should be 'normallookup'-ed for updated files,
793 # because reverting doesn't touch dirstate for 'normal' files
793 # because reverting doesn't touch dirstate for 'normal' files
794 # when target revision is explicitly specified: in such case,
794 # when target revision is explicitly specified: in such case,
795 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
795 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
796 # of target (standin) file.
796 # of target (standin) file.
797 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
797 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
798 normallookup=True)
798 normallookup=True)
799
799
800 # after pulling changesets, we need to take some extra care to get
800 # after pulling changesets, we need to take some extra care to get
801 # largefiles updated remotely
801 # largefiles updated remotely
802 def overridepull(orig, ui, repo, source=None, **opts):
802 def overridepull(orig, ui, repo, source=None, **opts):
803 revsprepull = len(repo)
803 revsprepull = len(repo)
804 if not source:
804 if not source:
805 source = 'default'
805 source = 'default'
806 repo.lfpullsource = source
806 repo.lfpullsource = source
807 result = orig(ui, repo, source, **opts)
807 result = orig(ui, repo, source, **opts)
808 revspostpull = len(repo)
808 revspostpull = len(repo)
809 lfrevs = opts.get(r'lfrev', [])
809 lfrevs = opts.get(r'lfrev', [])
810 if opts.get(r'all_largefiles'):
810 if opts.get(r'all_largefiles'):
811 lfrevs.append('pulled()')
811 lfrevs.append('pulled()')
812 if lfrevs and revspostpull > revsprepull:
812 if lfrevs and revspostpull > revsprepull:
813 numcached = 0
813 numcached = 0
814 repo.firstpulled = revsprepull # for pulled() revset expression
814 repo.firstpulled = revsprepull # for pulled() revset expression
815 try:
815 try:
816 for rev in scmutil.revrange(repo, lfrevs):
816 for rev in scmutil.revrange(repo, lfrevs):
817 ui.note(_('pulling largefiles for revision %d\n') % rev)
817 ui.note(_('pulling largefiles for revision %d\n') % rev)
818 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
818 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
819 numcached += len(cached)
819 numcached += len(cached)
820 finally:
820 finally:
821 del repo.firstpulled
821 del repo.firstpulled
822 ui.status(_("%d largefiles cached\n") % numcached)
822 ui.status(_("%d largefiles cached\n") % numcached)
823 return result
823 return result
824
824
825 def overridepush(orig, ui, repo, *args, **kwargs):
825 def overridepush(orig, ui, repo, *args, **kwargs):
826 """Override push command and store --lfrev parameters in opargs"""
826 """Override push command and store --lfrev parameters in opargs"""
827 lfrevs = kwargs.pop(r'lfrev', None)
827 lfrevs = kwargs.pop(r'lfrev', None)
828 if lfrevs:
828 if lfrevs:
829 opargs = kwargs.setdefault(r'opargs', {})
829 opargs = kwargs.setdefault(r'opargs', {})
830 opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
830 opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
831 return orig(ui, repo, *args, **kwargs)
831 return orig(ui, repo, *args, **kwargs)
832
832
833 def exchangepushoperation(orig, *args, **kwargs):
833 def exchangepushoperation(orig, *args, **kwargs):
834 """Override pushoperation constructor and store lfrevs parameter"""
834 """Override pushoperation constructor and store lfrevs parameter"""
835 lfrevs = kwargs.pop(r'lfrevs', None)
835 lfrevs = kwargs.pop(r'lfrevs', None)
836 pushop = orig(*args, **kwargs)
836 pushop = orig(*args, **kwargs)
837 pushop.lfrevs = lfrevs
837 pushop.lfrevs = lfrevs
838 return pushop
838 return pushop
839
839
840 revsetpredicate = registrar.revsetpredicate()
840 revsetpredicate = registrar.revsetpredicate()
841
841
842 @revsetpredicate('pulled()')
842 @revsetpredicate('pulled()')
843 def pulledrevsetsymbol(repo, subset, x):
843 def pulledrevsetsymbol(repo, subset, x):
844 """Changesets that just has been pulled.
844 """Changesets that just has been pulled.
845
845
846 Only available with largefiles from pull --lfrev expressions.
846 Only available with largefiles from pull --lfrev expressions.
847
847
848 .. container:: verbose
848 .. container:: verbose
849
849
850 Some examples:
850 Some examples:
851
851
852 - pull largefiles for all new changesets::
852 - pull largefiles for all new changesets::
853
853
854 hg pull -lfrev "pulled()"
854 hg pull -lfrev "pulled()"
855
855
856 - pull largefiles for all new branch heads::
856 - pull largefiles for all new branch heads::
857
857
858 hg pull -lfrev "head(pulled()) and not closed()"
858 hg pull -lfrev "head(pulled()) and not closed()"
859
859
860 """
860 """
861
861
862 try:
862 try:
863 firstpulled = repo.firstpulled
863 firstpulled = repo.firstpulled
864 except AttributeError:
864 except AttributeError:
865 raise error.Abort(_("pulled() only available in --lfrev"))
865 raise error.Abort(_("pulled() only available in --lfrev"))
866 return smartset.baseset([r for r in subset if r >= firstpulled])
866 return smartset.baseset([r for r in subset if r >= firstpulled])
867
867
868 def overrideclone(orig, ui, source, dest=None, **opts):
868 def overrideclone(orig, ui, source, dest=None, **opts):
869 d = dest
869 d = dest
870 if d is None:
870 if d is None:
871 d = hg.defaultdest(source)
871 d = hg.defaultdest(source)
872 if opts.get(r'all_largefiles') and not hg.islocal(d):
872 if opts.get(r'all_largefiles') and not hg.islocal(d):
873 raise error.Abort(_(
873 raise error.Abort(_(
874 '--all-largefiles is incompatible with non-local destination %s') %
874 '--all-largefiles is incompatible with non-local destination %s') %
875 d)
875 d)
876
876
877 return orig(ui, source, dest, **opts)
877 return orig(ui, source, dest, **opts)
878
878
879 def hgclone(orig, ui, opts, *args, **kwargs):
879 def hgclone(orig, ui, opts, *args, **kwargs):
880 result = orig(ui, opts, *args, **kwargs)
880 result = orig(ui, opts, *args, **kwargs)
881
881
882 if result is not None:
882 if result is not None:
883 sourcerepo, destrepo = result
883 sourcerepo, destrepo = result
884 repo = destrepo.local()
884 repo = destrepo.local()
885
885
886 # When cloning to a remote repo (like through SSH), no repo is available
886 # When cloning to a remote repo (like through SSH), no repo is available
887 # from the peer. Therefore the largefiles can't be downloaded and the
887 # from the peer. Therefore the largefiles can't be downloaded and the
888 # hgrc can't be updated.
888 # hgrc can't be updated.
889 if not repo:
889 if not repo:
890 return result
890 return result
891
891
892 # If largefiles is required for this repo, permanently enable it locally
892 # If largefiles is required for this repo, permanently enable it locally
893 if 'largefiles' in repo.requirements:
893 if 'largefiles' in repo.requirements:
894 repo.vfs.append('hgrc',
894 repo.vfs.append('hgrc',
895 util.tonativeeol('\n[extensions]\nlargefiles=\n'))
895 util.tonativeeol('\n[extensions]\nlargefiles=\n'))
896
896
897 # Caching is implicitly limited to 'rev' option, since the dest repo was
897 # Caching is implicitly limited to 'rev' option, since the dest repo was
898 # truncated at that point. The user may expect a download count with
898 # truncated at that point. The user may expect a download count with
899 # this option, so attempt whether or not this is a largefile repo.
899 # this option, so attempt whether or not this is a largefile repo.
900 if opts.get(r'all_largefiles'):
900 if opts.get(r'all_largefiles'):
901 success, missing = lfcommands.downloadlfiles(ui, repo, None)
901 success, missing = lfcommands.downloadlfiles(ui, repo, None)
902
902
903 if missing != 0:
903 if missing != 0:
904 return None
904 return None
905
905
906 return result
906 return result
907
907
908 def hgpostshare(orig, sourcerepo, destrepo, bookmarks=True, defaultpath=None):
908 def hgpostshare(orig, sourcerepo, destrepo, bookmarks=True, defaultpath=None):
909 orig(sourcerepo, destrepo, bookmarks, defaultpath)
909 orig(sourcerepo, destrepo, bookmarks, defaultpath)
910
910
911 # If largefiles is required for this repo, permanently enable it locally
911 # If largefiles is required for this repo, permanently enable it locally
912 if 'largefiles' in destrepo.requirements:
912 if 'largefiles' in destrepo.requirements:
913 destrepo.vfs.append('hgrc',
913 destrepo.vfs.append('hgrc',
914 util.tonativeeol('\n[extensions]\nlargefiles=\n'))
914 util.tonativeeol('\n[extensions]\nlargefiles=\n'))
915
915
916 def overriderebase(orig, ui, repo, **opts):
916 def overriderebase(orig, ui, repo, **opts):
917 if not util.safehasattr(repo, '_largefilesenabled'):
917 if not util.safehasattr(repo, '_largefilesenabled'):
918 return orig(ui, repo, **opts)
918 return orig(ui, repo, **opts)
919
919
920 resuming = opts.get(r'continue')
920 resuming = opts.get(r'continue')
921 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
921 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
922 repo._lfstatuswriters.append(lambda *msg, **opts: None)
922 repo._lfstatuswriters.append(lambda *msg, **opts: None)
923 try:
923 try:
924 return orig(ui, repo, **opts)
924 return orig(ui, repo, **opts)
925 finally:
925 finally:
926 repo._lfstatuswriters.pop()
926 repo._lfstatuswriters.pop()
927 repo._lfcommithooks.pop()
927 repo._lfcommithooks.pop()
928
928
929 def overridearchivecmd(orig, ui, repo, dest, **opts):
929 def overridearchivecmd(orig, ui, repo, dest, **opts):
930 repo.unfiltered().lfstatus = True
930 repo.unfiltered().lfstatus = True
931
931
932 try:
932 try:
933 return orig(ui, repo.unfiltered(), dest, **opts)
933 return orig(ui, repo.unfiltered(), dest, **opts)
934 finally:
934 finally:
935 repo.unfiltered().lfstatus = False
935 repo.unfiltered().lfstatus = False
936
936
937 def hgwebarchive(orig, web):
937 def hgwebarchive(orig, web):
938 web.repo.lfstatus = True
938 web.repo.lfstatus = True
939
939
940 try:
940 try:
941 return orig(web)
941 return orig(web)
942 finally:
942 finally:
943 web.repo.lfstatus = False
943 web.repo.lfstatus = False
944
944
945 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
945 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
946 prefix='', mtime=None, subrepos=None):
946 prefix='', mtime=None, subrepos=None):
947 # For some reason setting repo.lfstatus in hgwebarchive only changes the
947 # For some reason setting repo.lfstatus in hgwebarchive only changes the
948 # unfiltered repo's attr, so check that as well.
948 # unfiltered repo's attr, so check that as well.
949 if not repo.lfstatus and not repo.unfiltered().lfstatus:
949 if not repo.lfstatus and not repo.unfiltered().lfstatus:
950 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
950 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
951 subrepos)
951 subrepos)
952
952
953 # No need to lock because we are only reading history and
953 # No need to lock because we are only reading history and
954 # largefile caches, neither of which are modified.
954 # largefile caches, neither of which are modified.
955 if node is not None:
955 if node is not None:
956 lfcommands.cachelfiles(repo.ui, repo, node)
956 lfcommands.cachelfiles(repo.ui, repo, node)
957
957
958 if kind not in archival.archivers:
958 if kind not in archival.archivers:
959 raise error.Abort(_("unknown archive type '%s'") % kind)
959 raise error.Abort(_("unknown archive type '%s'") % kind)
960
960
961 ctx = repo[node]
961 ctx = repo[node]
962
962
963 if kind == 'files':
963 if kind == 'files':
964 if prefix:
964 if prefix:
965 raise error.Abort(
965 raise error.Abort(
966 _('cannot give prefix when archiving to files'))
966 _('cannot give prefix when archiving to files'))
967 else:
967 else:
968 prefix = archival.tidyprefix(dest, kind, prefix)
968 prefix = archival.tidyprefix(dest, kind, prefix)
969
969
970 def write(name, mode, islink, getdata):
970 def write(name, mode, islink, getdata):
971 if matchfn and not matchfn(name):
971 if matchfn and not matchfn(name):
972 return
972 return
973 data = getdata()
973 data = getdata()
974 if decode:
974 if decode:
975 data = repo.wwritedata(name, data)
975 data = repo.wwritedata(name, data)
976 archiver.addfile(prefix + name, mode, islink, data)
976 archiver.addfile(prefix + name, mode, islink, data)
977
977
978 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
978 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
979
979
980 if repo.ui.configbool("ui", "archivemeta"):
980 if repo.ui.configbool("ui", "archivemeta"):
981 write('.hg_archival.txt', 0o644, False,
981 write('.hg_archival.txt', 0o644, False,
982 lambda: archival.buildmetadata(ctx))
982 lambda: archival.buildmetadata(ctx))
983
983
984 for f in ctx:
984 for f in ctx:
985 ff = ctx.flags(f)
985 ff = ctx.flags(f)
986 getdata = ctx[f].data
986 getdata = ctx[f].data
987 lfile = lfutil.splitstandin(f)
987 lfile = lfutil.splitstandin(f)
988 if lfile is not None:
988 if lfile is not None:
989 if node is not None:
989 if node is not None:
990 path = lfutil.findfile(repo, getdata().strip())
990 path = lfutil.findfile(repo, getdata().strip())
991
991
992 if path is None:
992 if path is None:
993 raise error.Abort(
993 raise error.Abort(
994 _('largefile %s not found in repo store or system cache')
994 _('largefile %s not found in repo store or system cache')
995 % lfile)
995 % lfile)
996 else:
996 else:
997 path = lfile
997 path = lfile
998
998
999 f = lfile
999 f = lfile
1000
1000
1001 getdata = lambda: util.readfile(path)
1001 getdata = lambda: util.readfile(path)
1002 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1002 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1003
1003
1004 if subrepos:
1004 if subrepos:
1005 for subpath in sorted(ctx.substate):
1005 for subpath in sorted(ctx.substate):
1006 sub = ctx.workingsub(subpath)
1006 sub = ctx.workingsub(subpath)
1007 submatch = matchmod.subdirmatcher(subpath, matchfn)
1007 submatch = matchmod.subdirmatcher(subpath, matchfn)
1008 sub._repo.lfstatus = True
1008 sub._repo.lfstatus = True
1009 sub.archive(archiver, prefix, submatch)
1009 sub.archive(archiver, prefix, submatch)
1010
1010
1011 archiver.done()
1011 archiver.done()
1012
1012
1013 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1013 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1014 lfenabled = util.safehasattr(repo._repo, '_largefilesenabled')
1014 lfenabled = util.safehasattr(repo._repo, '_largefilesenabled')
1015 if not lfenabled or not repo._repo.lfstatus:
1015 if not lfenabled or not repo._repo.lfstatus:
1016 return orig(repo, archiver, prefix, match, decode)
1016 return orig(repo, archiver, prefix, match, decode)
1017
1017
1018 repo._get(repo._state + ('hg',))
1018 repo._get(repo._state + ('hg',))
1019 rev = repo._state[1]
1019 rev = repo._state[1]
1020 ctx = repo._repo[rev]
1020 ctx = repo._repo[rev]
1021
1021
1022 if ctx.node() is not None:
1022 if ctx.node() is not None:
1023 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1023 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1024
1024
1025 def write(name, mode, islink, getdata):
1025 def write(name, mode, islink, getdata):
1026 # At this point, the standin has been replaced with the largefile name,
1026 # At this point, the standin has been replaced with the largefile name,
1027 # so the normal matcher works here without the lfutil variants.
1027 # so the normal matcher works here without the lfutil variants.
1028 if match and not match(f):
1028 if match and not match(f):
1029 return
1029 return
1030 data = getdata()
1030 data = getdata()
1031 if decode:
1031 if decode:
1032 data = repo._repo.wwritedata(name, data)
1032 data = repo._repo.wwritedata(name, data)
1033
1033
1034 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1034 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1035
1035
1036 for f in ctx:
1036 for f in ctx:
1037 ff = ctx.flags(f)
1037 ff = ctx.flags(f)
1038 getdata = ctx[f].data
1038 getdata = ctx[f].data
1039 lfile = lfutil.splitstandin(f)
1039 lfile = lfutil.splitstandin(f)
1040 if lfile is not None:
1040 if lfile is not None:
1041 if ctx.node() is not None:
1041 if ctx.node() is not None:
1042 path = lfutil.findfile(repo._repo, getdata().strip())
1042 path = lfutil.findfile(repo._repo, getdata().strip())
1043
1043
1044 if path is None:
1044 if path is None:
1045 raise error.Abort(
1045 raise error.Abort(
1046 _('largefile %s not found in repo store or system cache')
1046 _('largefile %s not found in repo store or system cache')
1047 % lfile)
1047 % lfile)
1048 else:
1048 else:
1049 path = lfile
1049 path = lfile
1050
1050
1051 f = lfile
1051 f = lfile
1052
1052
1053 getdata = lambda: util.readfile(os.path.join(prefix, path))
1053 getdata = lambda: util.readfile(os.path.join(prefix, path))
1054
1054
1055 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1055 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1056
1056
1057 for subpath in sorted(ctx.substate):
1057 for subpath in sorted(ctx.substate):
1058 sub = ctx.workingsub(subpath)
1058 sub = ctx.workingsub(subpath)
1059 submatch = matchmod.subdirmatcher(subpath, match)
1059 submatch = matchmod.subdirmatcher(subpath, match)
1060 sub._repo.lfstatus = True
1060 sub._repo.lfstatus = True
1061 sub.archive(archiver, prefix + repo._path + '/', submatch, decode)
1061 sub.archive(archiver, prefix + repo._path + '/', submatch, decode)
1062
1062
1063 # If a largefile is modified, the change is not reflected in its
1063 # If a largefile is modified, the change is not reflected in its
1064 # standin until a commit. cmdutil.bailifchanged() raises an exception
1064 # standin until a commit. cmdutil.bailifchanged() raises an exception
1065 # if the repo has uncommitted changes. Wrap it to also check if
1065 # if the repo has uncommitted changes. Wrap it to also check if
1066 # largefiles were changed. This is used by bisect, backout and fetch.
1066 # largefiles were changed. This is used by bisect, backout and fetch.
1067 def overridebailifchanged(orig, repo, *args, **kwargs):
1067 def overridebailifchanged(orig, repo, *args, **kwargs):
1068 orig(repo, *args, **kwargs)
1068 orig(repo, *args, **kwargs)
1069 repo.lfstatus = True
1069 repo.lfstatus = True
1070 s = repo.status()
1070 s = repo.status()
1071 repo.lfstatus = False
1071 repo.lfstatus = False
1072 if s.modified or s.added or s.removed or s.deleted:
1072 if s.modified or s.added or s.removed or s.deleted:
1073 raise error.Abort(_('uncommitted changes'))
1073 raise error.Abort(_('uncommitted changes'))
1074
1074
1075 def postcommitstatus(orig, repo, *args, **kwargs):
1075 def postcommitstatus(orig, repo, *args, **kwargs):
1076 repo.lfstatus = True
1076 repo.lfstatus = True
1077 try:
1077 try:
1078 return orig(repo, *args, **kwargs)
1078 return orig(repo, *args, **kwargs)
1079 finally:
1079 finally:
1080 repo.lfstatus = False
1080 repo.lfstatus = False
1081
1081
1082 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly, dryrun):
1082 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly, dryrun):
1083 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1083 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1084 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly, dryrun)
1084 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly, dryrun)
1085 m = composelargefilematcher(match, repo[None].manifest())
1085 m = composelargefilematcher(match, repo[None].manifest())
1086
1086
1087 try:
1087 try:
1088 repo.lfstatus = True
1088 repo.lfstatus = True
1089 s = repo.status(match=m, clean=True)
1089 s = repo.status(match=m, clean=True)
1090 finally:
1090 finally:
1091 repo.lfstatus = False
1091 repo.lfstatus = False
1092 manifest = repo[None].manifest()
1092 manifest = repo[None].manifest()
1093 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1093 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1094 forget = [f for f in forget if lfutil.standin(f) in manifest]
1094 forget = [f for f in forget if lfutil.standin(f) in manifest]
1095
1095
1096 for f in forget:
1096 for f in forget:
1097 fstandin = lfutil.standin(f)
1097 fstandin = lfutil.standin(f)
1098 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1098 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1099 ui.warn(_('not removing %s: file is already untracked\n')
1099 ui.warn(_('not removing %s: file is already untracked\n')
1100 % m.rel(f))
1100 % m.rel(f))
1101 bad.append(f)
1101 bad.append(f)
1102
1102
1103 for f in forget:
1103 for f in forget:
1104 if ui.verbose or not m.exact(f):
1104 if ui.verbose or not m.exact(f):
1105 ui.status(_('removing %s\n') % m.rel(f))
1105 ui.status(_('removing %s\n') % m.rel(f))
1106
1106
1107 # Need to lock because standin files are deleted then removed from the
1107 # Need to lock because standin files are deleted then removed from the
1108 # repository and we could race in-between.
1108 # repository and we could race in-between.
1109 with repo.wlock():
1109 with repo.wlock():
1110 lfdirstate = lfutil.openlfdirstate(ui, repo)
1110 lfdirstate = lfutil.openlfdirstate(ui, repo)
1111 for f in forget:
1111 for f in forget:
1112 if lfdirstate[f] == 'a':
1112 if lfdirstate[f] == 'a':
1113 lfdirstate.drop(f)
1113 lfdirstate.drop(f)
1114 else:
1114 else:
1115 lfdirstate.remove(f)
1115 lfdirstate.remove(f)
1116 lfdirstate.write()
1116 lfdirstate.write()
1117 standins = [lfutil.standin(f) for f in forget]
1117 standins = [lfutil.standin(f) for f in forget]
1118 for f in standins:
1118 for f in standins:
1119 repo.wvfs.unlinkpath(f, ignoremissing=True)
1119 repo.wvfs.unlinkpath(f, ignoremissing=True)
1120 rejected = repo[None].forget(standins)
1120 rejected = repo[None].forget(standins)
1121
1121
1122 bad.extend(f for f in rejected if f in m.files())
1122 bad.extend(f for f in rejected if f in m.files())
1123 forgot.extend(f for f in forget if f not in rejected)
1123 forgot.extend(f for f in forget if f not in rejected)
1124 return bad, forgot
1124 return bad, forgot
1125
1125
1126 def _getoutgoings(repo, other, missing, addfunc):
1126 def _getoutgoings(repo, other, missing, addfunc):
1127 """get pairs of filename and largefile hash in outgoing revisions
1127 """get pairs of filename and largefile hash in outgoing revisions
1128 in 'missing'.
1128 in 'missing'.
1129
1129
1130 largefiles already existing on 'other' repository are ignored.
1130 largefiles already existing on 'other' repository are ignored.
1131
1131
1132 'addfunc' is invoked with each unique pairs of filename and
1132 'addfunc' is invoked with each unique pairs of filename and
1133 largefile hash value.
1133 largefile hash value.
1134 """
1134 """
1135 knowns = set()
1135 knowns = set()
1136 lfhashes = set()
1136 lfhashes = set()
1137 def dedup(fn, lfhash):
1137 def dedup(fn, lfhash):
1138 k = (fn, lfhash)
1138 k = (fn, lfhash)
1139 if k not in knowns:
1139 if k not in knowns:
1140 knowns.add(k)
1140 knowns.add(k)
1141 lfhashes.add(lfhash)
1141 lfhashes.add(lfhash)
1142 lfutil.getlfilestoupload(repo, missing, dedup)
1142 lfutil.getlfilestoupload(repo, missing, dedup)
1143 if lfhashes:
1143 if lfhashes:
1144 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1144 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1145 for fn, lfhash in knowns:
1145 for fn, lfhash in knowns:
1146 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1146 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1147 addfunc(fn, lfhash)
1147 addfunc(fn, lfhash)
1148
1148
1149 def outgoinghook(ui, repo, other, opts, missing):
1149 def outgoinghook(ui, repo, other, opts, missing):
1150 if opts.pop('large', None):
1150 if opts.pop('large', None):
1151 lfhashes = set()
1151 lfhashes = set()
1152 if ui.debugflag:
1152 if ui.debugflag:
1153 toupload = {}
1153 toupload = {}
1154 def addfunc(fn, lfhash):
1154 def addfunc(fn, lfhash):
1155 if fn not in toupload:
1155 if fn not in toupload:
1156 toupload[fn] = []
1156 toupload[fn] = []
1157 toupload[fn].append(lfhash)
1157 toupload[fn].append(lfhash)
1158 lfhashes.add(lfhash)
1158 lfhashes.add(lfhash)
1159 def showhashes(fn):
1159 def showhashes(fn):
1160 for lfhash in sorted(toupload[fn]):
1160 for lfhash in sorted(toupload[fn]):
1161 ui.debug(' %s\n' % (lfhash))
1161 ui.debug(' %s\n' % (lfhash))
1162 else:
1162 else:
1163 toupload = set()
1163 toupload = set()
1164 def addfunc(fn, lfhash):
1164 def addfunc(fn, lfhash):
1165 toupload.add(fn)
1165 toupload.add(fn)
1166 lfhashes.add(lfhash)
1166 lfhashes.add(lfhash)
1167 def showhashes(fn):
1167 def showhashes(fn):
1168 pass
1168 pass
1169 _getoutgoings(repo, other, missing, addfunc)
1169 _getoutgoings(repo, other, missing, addfunc)
1170
1170
1171 if not toupload:
1171 if not toupload:
1172 ui.status(_('largefiles: no files to upload\n'))
1172 ui.status(_('largefiles: no files to upload\n'))
1173 else:
1173 else:
1174 ui.status(_('largefiles to upload (%d entities):\n')
1174 ui.status(_('largefiles to upload (%d entities):\n')
1175 % (len(lfhashes)))
1175 % (len(lfhashes)))
1176 for file in sorted(toupload):
1176 for file in sorted(toupload):
1177 ui.status(lfutil.splitstandin(file) + '\n')
1177 ui.status(lfutil.splitstandin(file) + '\n')
1178 showhashes(file)
1178 showhashes(file)
1179 ui.status('\n')
1179 ui.status('\n')
1180
1180
1181 def summaryremotehook(ui, repo, opts, changes):
1181 def summaryremotehook(ui, repo, opts, changes):
1182 largeopt = opts.get('large', False)
1182 largeopt = opts.get('large', False)
1183 if changes is None:
1183 if changes is None:
1184 if largeopt:
1184 if largeopt:
1185 return (False, True) # only outgoing check is needed
1185 return (False, True) # only outgoing check is needed
1186 else:
1186 else:
1187 return (False, False)
1187 return (False, False)
1188 elif largeopt:
1188 elif largeopt:
1189 url, branch, peer, outgoing = changes[1]
1189 url, branch, peer, outgoing = changes[1]
1190 if peer is None:
1190 if peer is None:
1191 # i18n: column positioning for "hg summary"
1191 # i18n: column positioning for "hg summary"
1192 ui.status(_('largefiles: (no remote repo)\n'))
1192 ui.status(_('largefiles: (no remote repo)\n'))
1193 return
1193 return
1194
1194
1195 toupload = set()
1195 toupload = set()
1196 lfhashes = set()
1196 lfhashes = set()
1197 def addfunc(fn, lfhash):
1197 def addfunc(fn, lfhash):
1198 toupload.add(fn)
1198 toupload.add(fn)
1199 lfhashes.add(lfhash)
1199 lfhashes.add(lfhash)
1200 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1200 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1201
1201
1202 if not toupload:
1202 if not toupload:
1203 # i18n: column positioning for "hg summary"
1203 # i18n: column positioning for "hg summary"
1204 ui.status(_('largefiles: (no files to upload)\n'))
1204 ui.status(_('largefiles: (no files to upload)\n'))
1205 else:
1205 else:
1206 # i18n: column positioning for "hg summary"
1206 # i18n: column positioning for "hg summary"
1207 ui.status(_('largefiles: %d entities for %d files to upload\n')
1207 ui.status(_('largefiles: %d entities for %d files to upload\n')
1208 % (len(lfhashes), len(toupload)))
1208 % (len(lfhashes), len(toupload)))
1209
1209
1210 def overridesummary(orig, ui, repo, *pats, **opts):
1210 def overridesummary(orig, ui, repo, *pats, **opts):
1211 try:
1211 try:
1212 repo.lfstatus = True
1212 repo.lfstatus = True
1213 orig(ui, repo, *pats, **opts)
1213 orig(ui, repo, *pats, **opts)
1214 finally:
1214 finally:
1215 repo.lfstatus = False
1215 repo.lfstatus = False
1216
1216
1217 def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
1217 def scmutiladdremove(orig, repo, matcher, prefix, opts=None):
1218 similarity=None):
1219 if opts is None:
1218 if opts is None:
1220 opts = {}
1219 opts = {}
1221 if not lfutil.islfilesrepo(repo):
1220 if not lfutil.islfilesrepo(repo):
1222 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1221 return orig(repo, matcher, prefix, opts)
1223 # Get the list of missing largefiles so we can remove them
1222 # Get the list of missing largefiles so we can remove them
1224 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1223 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1225 unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()),
1224 unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()),
1226 subrepos=[], ignored=False, clean=False,
1225 subrepos=[], ignored=False, clean=False,
1227 unknown=False)
1226 unknown=False)
1228
1227
1229 # Call into the normal remove code, but the removing of the standin, we want
1228 # Call into the normal remove code, but the removing of the standin, we want
1230 # to have handled by original addremove. Monkey patching here makes sure
1229 # to have handled by original addremove. Monkey patching here makes sure
1231 # we don't remove the standin in the largefiles code, preventing a very
1230 # we don't remove the standin in the largefiles code, preventing a very
1232 # confused state later.
1231 # confused state later.
1233 if s.deleted:
1232 if s.deleted:
1234 m = copy.copy(matcher)
1233 m = copy.copy(matcher)
1235
1234
1236 # The m._files and m._map attributes are not changed to the deleted list
1235 # The m._files and m._map attributes are not changed to the deleted list
1237 # because that affects the m.exact() test, which in turn governs whether
1236 # because that affects the m.exact() test, which in turn governs whether
1238 # or not the file name is printed, and how. Simply limit the original
1237 # or not the file name is printed, and how. Simply limit the original
1239 # matches to those in the deleted status list.
1238 # matches to those in the deleted status list.
1240 matchfn = m.matchfn
1239 matchfn = m.matchfn
1241 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1240 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1242
1241
1243 removelargefiles(repo.ui, repo, True, m, opts.get('dry_run'),
1242 removelargefiles(repo.ui, repo, True, m, opts.get('dry_run'),
1244 **pycompat.strkwargs(opts))
1243 **pycompat.strkwargs(opts))
1245 # Call into the normal add code, and any files that *should* be added as
1244 # Call into the normal add code, and any files that *should* be added as
1246 # largefiles will be
1245 # largefiles will be
1247 added, bad = addlargefiles(repo.ui, repo, True, matcher,
1246 added, bad = addlargefiles(repo.ui, repo, True, matcher,
1248 **pycompat.strkwargs(opts))
1247 **pycompat.strkwargs(opts))
1249 # Now that we've handled largefiles, hand off to the original addremove
1248 # Now that we've handled largefiles, hand off to the original addremove
1250 # function to take care of the rest. Make sure it doesn't do anything with
1249 # function to take care of the rest. Make sure it doesn't do anything with
1251 # largefiles by passing a matcher that will ignore them.
1250 # largefiles by passing a matcher that will ignore them.
1252 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1251 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1253 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1252 return orig(repo, matcher, prefix, opts)
1254
1253
1255 # Calling purge with --all will cause the largefiles to be deleted.
1254 # Calling purge with --all will cause the largefiles to be deleted.
1256 # Override repo.status to prevent this from happening.
1255 # Override repo.status to prevent this from happening.
1257 def overridepurge(orig, ui, repo, *dirs, **opts):
1256 def overridepurge(orig, ui, repo, *dirs, **opts):
1258 # XXX Monkey patching a repoview will not work. The assigned attribute will
1257 # XXX Monkey patching a repoview will not work. The assigned attribute will
1259 # be set on the unfiltered repo, but we will only lookup attributes in the
1258 # be set on the unfiltered repo, but we will only lookup attributes in the
1260 # unfiltered repo if the lookup in the repoview object itself fails. As the
1259 # unfiltered repo if the lookup in the repoview object itself fails. As the
1261 # monkey patched method exists on the repoview class the lookup will not
1260 # monkey patched method exists on the repoview class the lookup will not
1262 # fail. As a result, the original version will shadow the monkey patched
1261 # fail. As a result, the original version will shadow the monkey patched
1263 # one, defeating the monkey patch.
1262 # one, defeating the monkey patch.
1264 #
1263 #
1265 # As a work around we use an unfiltered repo here. We should do something
1264 # As a work around we use an unfiltered repo here. We should do something
1266 # cleaner instead.
1265 # cleaner instead.
1267 repo = repo.unfiltered()
1266 repo = repo.unfiltered()
1268 oldstatus = repo.status
1267 oldstatus = repo.status
1269 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1268 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1270 clean=False, unknown=False, listsubrepos=False):
1269 clean=False, unknown=False, listsubrepos=False):
1271 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1270 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1272 listsubrepos)
1271 listsubrepos)
1273 lfdirstate = lfutil.openlfdirstate(ui, repo)
1272 lfdirstate = lfutil.openlfdirstate(ui, repo)
1274 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1273 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1275 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1274 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1276 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1275 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1277 unknown, ignored, r.clean)
1276 unknown, ignored, r.clean)
1278 repo.status = overridestatus
1277 repo.status = overridestatus
1279 orig(ui, repo, *dirs, **opts)
1278 orig(ui, repo, *dirs, **opts)
1280 repo.status = oldstatus
1279 repo.status = oldstatus
1281
1280
1282 def overriderollback(orig, ui, repo, **opts):
1281 def overriderollback(orig, ui, repo, **opts):
1283 with repo.wlock():
1282 with repo.wlock():
1284 before = repo.dirstate.parents()
1283 before = repo.dirstate.parents()
1285 orphans = set(f for f in repo.dirstate
1284 orphans = set(f for f in repo.dirstate
1286 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1285 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1287 result = orig(ui, repo, **opts)
1286 result = orig(ui, repo, **opts)
1288 after = repo.dirstate.parents()
1287 after = repo.dirstate.parents()
1289 if before == after:
1288 if before == after:
1290 return result # no need to restore standins
1289 return result # no need to restore standins
1291
1290
1292 pctx = repo['.']
1291 pctx = repo['.']
1293 for f in repo.dirstate:
1292 for f in repo.dirstate:
1294 if lfutil.isstandin(f):
1293 if lfutil.isstandin(f):
1295 orphans.discard(f)
1294 orphans.discard(f)
1296 if repo.dirstate[f] == 'r':
1295 if repo.dirstate[f] == 'r':
1297 repo.wvfs.unlinkpath(f, ignoremissing=True)
1296 repo.wvfs.unlinkpath(f, ignoremissing=True)
1298 elif f in pctx:
1297 elif f in pctx:
1299 fctx = pctx[f]
1298 fctx = pctx[f]
1300 repo.wwrite(f, fctx.data(), fctx.flags())
1299 repo.wwrite(f, fctx.data(), fctx.flags())
1301 else:
1300 else:
1302 # content of standin is not so important in 'a',
1301 # content of standin is not so important in 'a',
1303 # 'm' or 'n' (coming from the 2nd parent) cases
1302 # 'm' or 'n' (coming from the 2nd parent) cases
1304 lfutil.writestandin(repo, f, '', False)
1303 lfutil.writestandin(repo, f, '', False)
1305 for standin in orphans:
1304 for standin in orphans:
1306 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1305 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1307
1306
1308 lfdirstate = lfutil.openlfdirstate(ui, repo)
1307 lfdirstate = lfutil.openlfdirstate(ui, repo)
1309 orphans = set(lfdirstate)
1308 orphans = set(lfdirstate)
1310 lfiles = lfutil.listlfiles(repo)
1309 lfiles = lfutil.listlfiles(repo)
1311 for file in lfiles:
1310 for file in lfiles:
1312 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1311 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1313 orphans.discard(file)
1312 orphans.discard(file)
1314 for lfile in orphans:
1313 for lfile in orphans:
1315 lfdirstate.drop(lfile)
1314 lfdirstate.drop(lfile)
1316 lfdirstate.write()
1315 lfdirstate.write()
1317 return result
1316 return result
1318
1317
1319 def overridetransplant(orig, ui, repo, *revs, **opts):
1318 def overridetransplant(orig, ui, repo, *revs, **opts):
1320 resuming = opts.get(r'continue')
1319 resuming = opts.get(r'continue')
1321 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1320 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1322 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1321 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1323 try:
1322 try:
1324 result = orig(ui, repo, *revs, **opts)
1323 result = orig(ui, repo, *revs, **opts)
1325 finally:
1324 finally:
1326 repo._lfstatuswriters.pop()
1325 repo._lfstatuswriters.pop()
1327 repo._lfcommithooks.pop()
1326 repo._lfcommithooks.pop()
1328 return result
1327 return result
1329
1328
1330 def overridecat(orig, ui, repo, file1, *pats, **opts):
1329 def overridecat(orig, ui, repo, file1, *pats, **opts):
1331 opts = pycompat.byteskwargs(opts)
1330 opts = pycompat.byteskwargs(opts)
1332 ctx = scmutil.revsingle(repo, opts.get('rev'))
1331 ctx = scmutil.revsingle(repo, opts.get('rev'))
1333 err = 1
1332 err = 1
1334 notbad = set()
1333 notbad = set()
1335 m = scmutil.match(ctx, (file1,) + pats, opts)
1334 m = scmutil.match(ctx, (file1,) + pats, opts)
1336 origmatchfn = m.matchfn
1335 origmatchfn = m.matchfn
1337 def lfmatchfn(f):
1336 def lfmatchfn(f):
1338 if origmatchfn(f):
1337 if origmatchfn(f):
1339 return True
1338 return True
1340 lf = lfutil.splitstandin(f)
1339 lf = lfutil.splitstandin(f)
1341 if lf is None:
1340 if lf is None:
1342 return False
1341 return False
1343 notbad.add(lf)
1342 notbad.add(lf)
1344 return origmatchfn(lf)
1343 return origmatchfn(lf)
1345 m.matchfn = lfmatchfn
1344 m.matchfn = lfmatchfn
1346 origbadfn = m.bad
1345 origbadfn = m.bad
1347 def lfbadfn(f, msg):
1346 def lfbadfn(f, msg):
1348 if not f in notbad:
1347 if not f in notbad:
1349 origbadfn(f, msg)
1348 origbadfn(f, msg)
1350 m.bad = lfbadfn
1349 m.bad = lfbadfn
1351
1350
1352 origvisitdirfn = m.visitdir
1351 origvisitdirfn = m.visitdir
1353 def lfvisitdirfn(dir):
1352 def lfvisitdirfn(dir):
1354 if dir == lfutil.shortname:
1353 if dir == lfutil.shortname:
1355 return True
1354 return True
1356 ret = origvisitdirfn(dir)
1355 ret = origvisitdirfn(dir)
1357 if ret:
1356 if ret:
1358 return ret
1357 return ret
1359 lf = lfutil.splitstandin(dir)
1358 lf = lfutil.splitstandin(dir)
1360 if lf is None:
1359 if lf is None:
1361 return False
1360 return False
1362 return origvisitdirfn(lf)
1361 return origvisitdirfn(lf)
1363 m.visitdir = lfvisitdirfn
1362 m.visitdir = lfvisitdirfn
1364
1363
1365 for f in ctx.walk(m):
1364 for f in ctx.walk(m):
1366 with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
1365 with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
1367 lf = lfutil.splitstandin(f)
1366 lf = lfutil.splitstandin(f)
1368 if lf is None or origmatchfn(f):
1367 if lf is None or origmatchfn(f):
1369 # duplicating unreachable code from commands.cat
1368 # duplicating unreachable code from commands.cat
1370 data = ctx[f].data()
1369 data = ctx[f].data()
1371 if opts.get('decode'):
1370 if opts.get('decode'):
1372 data = repo.wwritedata(f, data)
1371 data = repo.wwritedata(f, data)
1373 fp.write(data)
1372 fp.write(data)
1374 else:
1373 else:
1375 hash = lfutil.readasstandin(ctx[f])
1374 hash = lfutil.readasstandin(ctx[f])
1376 if not lfutil.inusercache(repo.ui, hash):
1375 if not lfutil.inusercache(repo.ui, hash):
1377 store = storefactory.openstore(repo)
1376 store = storefactory.openstore(repo)
1378 success, missing = store.get([(lf, hash)])
1377 success, missing = store.get([(lf, hash)])
1379 if len(success) != 1:
1378 if len(success) != 1:
1380 raise error.Abort(
1379 raise error.Abort(
1381 _('largefile %s is not in cache and could not be '
1380 _('largefile %s is not in cache and could not be '
1382 'downloaded') % lf)
1381 'downloaded') % lf)
1383 path = lfutil.usercachepath(repo.ui, hash)
1382 path = lfutil.usercachepath(repo.ui, hash)
1384 with open(path, "rb") as fpin:
1383 with open(path, "rb") as fpin:
1385 for chunk in util.filechunkiter(fpin):
1384 for chunk in util.filechunkiter(fpin):
1386 fp.write(chunk)
1385 fp.write(chunk)
1387 err = 0
1386 err = 0
1388 return err
1387 return err
1389
1388
1390 def mergeupdate(orig, repo, node, branchmerge, force,
1389 def mergeupdate(orig, repo, node, branchmerge, force,
1391 *args, **kwargs):
1390 *args, **kwargs):
1392 matcher = kwargs.get(r'matcher', None)
1391 matcher = kwargs.get(r'matcher', None)
1393 # note if this is a partial update
1392 # note if this is a partial update
1394 partial = matcher and not matcher.always()
1393 partial = matcher and not matcher.always()
1395 with repo.wlock():
1394 with repo.wlock():
1396 # branch | | |
1395 # branch | | |
1397 # merge | force | partial | action
1396 # merge | force | partial | action
1398 # -------+-------+---------+--------------
1397 # -------+-------+---------+--------------
1399 # x | x | x | linear-merge
1398 # x | x | x | linear-merge
1400 # o | x | x | branch-merge
1399 # o | x | x | branch-merge
1401 # x | o | x | overwrite (as clean update)
1400 # x | o | x | overwrite (as clean update)
1402 # o | o | x | force-branch-merge (*1)
1401 # o | o | x | force-branch-merge (*1)
1403 # x | x | o | (*)
1402 # x | x | o | (*)
1404 # o | x | o | (*)
1403 # o | x | o | (*)
1405 # x | o | o | overwrite (as revert)
1404 # x | o | o | overwrite (as revert)
1406 # o | o | o | (*)
1405 # o | o | o | (*)
1407 #
1406 #
1408 # (*) don't care
1407 # (*) don't care
1409 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1408 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1410
1409
1411 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1410 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1412 unsure, s = lfdirstate.status(matchmod.always(repo.root,
1411 unsure, s = lfdirstate.status(matchmod.always(repo.root,
1413 repo.getcwd()),
1412 repo.getcwd()),
1414 subrepos=[], ignored=False,
1413 subrepos=[], ignored=False,
1415 clean=True, unknown=False)
1414 clean=True, unknown=False)
1416 oldclean = set(s.clean)
1415 oldclean = set(s.clean)
1417 pctx = repo['.']
1416 pctx = repo['.']
1418 dctx = repo[node]
1417 dctx = repo[node]
1419 for lfile in unsure + s.modified:
1418 for lfile in unsure + s.modified:
1420 lfileabs = repo.wvfs.join(lfile)
1419 lfileabs = repo.wvfs.join(lfile)
1421 if not repo.wvfs.exists(lfileabs):
1420 if not repo.wvfs.exists(lfileabs):
1422 continue
1421 continue
1423 lfhash = lfutil.hashfile(lfileabs)
1422 lfhash = lfutil.hashfile(lfileabs)
1424 standin = lfutil.standin(lfile)
1423 standin = lfutil.standin(lfile)
1425 lfutil.writestandin(repo, standin, lfhash,
1424 lfutil.writestandin(repo, standin, lfhash,
1426 lfutil.getexecutable(lfileabs))
1425 lfutil.getexecutable(lfileabs))
1427 if (standin in pctx and
1426 if (standin in pctx and
1428 lfhash == lfutil.readasstandin(pctx[standin])):
1427 lfhash == lfutil.readasstandin(pctx[standin])):
1429 oldclean.add(lfile)
1428 oldclean.add(lfile)
1430 for lfile in s.added:
1429 for lfile in s.added:
1431 fstandin = lfutil.standin(lfile)
1430 fstandin = lfutil.standin(lfile)
1432 if fstandin not in dctx:
1431 if fstandin not in dctx:
1433 # in this case, content of standin file is meaningless
1432 # in this case, content of standin file is meaningless
1434 # (in dctx, lfile is unknown, or normal file)
1433 # (in dctx, lfile is unknown, or normal file)
1435 continue
1434 continue
1436 lfutil.updatestandin(repo, lfile, fstandin)
1435 lfutil.updatestandin(repo, lfile, fstandin)
1437 # mark all clean largefiles as dirty, just in case the update gets
1436 # mark all clean largefiles as dirty, just in case the update gets
1438 # interrupted before largefiles and lfdirstate are synchronized
1437 # interrupted before largefiles and lfdirstate are synchronized
1439 for lfile in oldclean:
1438 for lfile in oldclean:
1440 lfdirstate.normallookup(lfile)
1439 lfdirstate.normallookup(lfile)
1441 lfdirstate.write()
1440 lfdirstate.write()
1442
1441
1443 oldstandins = lfutil.getstandinsstate(repo)
1442 oldstandins = lfutil.getstandinsstate(repo)
1444 # Make sure the merge runs on disk, not in-memory. largefiles is not a
1443 # Make sure the merge runs on disk, not in-memory. largefiles is not a
1445 # good candidate for in-memory merge (large files, custom dirstate,
1444 # good candidate for in-memory merge (large files, custom dirstate,
1446 # matcher usage).
1445 # matcher usage).
1447 kwargs[r'wc'] = repo[None]
1446 kwargs[r'wc'] = repo[None]
1448 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1447 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1449
1448
1450 newstandins = lfutil.getstandinsstate(repo)
1449 newstandins = lfutil.getstandinsstate(repo)
1451 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1450 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1452
1451
1453 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1452 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1454 # all the ones that didn't change as clean
1453 # all the ones that didn't change as clean
1455 for lfile in oldclean.difference(filelist):
1454 for lfile in oldclean.difference(filelist):
1456 lfdirstate.normal(lfile)
1455 lfdirstate.normal(lfile)
1457 lfdirstate.write()
1456 lfdirstate.write()
1458
1457
1459 if branchmerge or force or partial:
1458 if branchmerge or force or partial:
1460 filelist.extend(s.deleted + s.removed)
1459 filelist.extend(s.deleted + s.removed)
1461
1460
1462 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1461 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1463 normallookup=partial)
1462 normallookup=partial)
1464
1463
1465 return result
1464 return result
1466
1465
1467 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1466 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1468 result = orig(repo, files, *args, **kwargs)
1467 result = orig(repo, files, *args, **kwargs)
1469
1468
1470 filelist = []
1469 filelist = []
1471 for f in files:
1470 for f in files:
1472 lf = lfutil.splitstandin(f)
1471 lf = lfutil.splitstandin(f)
1473 if lf is not None:
1472 if lf is not None:
1474 filelist.append(lf)
1473 filelist.append(lf)
1475 if filelist:
1474 if filelist:
1476 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1475 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1477 printmessage=False, normallookup=True)
1476 printmessage=False, normallookup=True)
1478
1477
1479 return result
1478 return result
1480
1479
1481 def upgraderequirements(orig, repo):
1480 def upgraderequirements(orig, repo):
1482 reqs = orig(repo)
1481 reqs = orig(repo)
1483 if 'largefiles' in repo.requirements:
1482 if 'largefiles' in repo.requirements:
1484 reqs.add('largefiles')
1483 reqs.add('largefiles')
1485 return reqs
1484 return reqs
1486
1485
1487 _lfscheme = 'largefile://'
1486 _lfscheme = 'largefile://'
1488 def openlargefile(orig, ui, url_, data=None):
1487 def openlargefile(orig, ui, url_, data=None):
1489 if url_.startswith(_lfscheme):
1488 if url_.startswith(_lfscheme):
1490 if data:
1489 if data:
1491 msg = "cannot use data on a 'largefile://' url"
1490 msg = "cannot use data on a 'largefile://' url"
1492 raise error.ProgrammingError(msg)
1491 raise error.ProgrammingError(msg)
1493 lfid = url_[len(_lfscheme):]
1492 lfid = url_[len(_lfscheme):]
1494 return storefactory.getlfile(ui, lfid)
1493 return storefactory.getlfile(ui, lfid)
1495 else:
1494 else:
1496 return orig(ui, url_, data=data)
1495 return orig(ui, url_, data=data)
@@ -1,5640 +1,5641 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import sys
14 import sys
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23 from . import (
23 from . import (
24 archival,
24 archival,
25 bookmarks,
25 bookmarks,
26 bundle2,
26 bundle2,
27 changegroup,
27 changegroup,
28 cmdutil,
28 cmdutil,
29 copies,
29 copies,
30 debugcommands as debugcommandsmod,
30 debugcommands as debugcommandsmod,
31 destutil,
31 destutil,
32 dirstateguard,
32 dirstateguard,
33 discovery,
33 discovery,
34 encoding,
34 encoding,
35 error,
35 error,
36 exchange,
36 exchange,
37 extensions,
37 extensions,
38 formatter,
38 formatter,
39 graphmod,
39 graphmod,
40 hbisect,
40 hbisect,
41 help,
41 help,
42 hg,
42 hg,
43 lock as lockmod,
43 lock as lockmod,
44 logcmdutil,
44 logcmdutil,
45 merge as mergemod,
45 merge as mergemod,
46 obsolete,
46 obsolete,
47 obsutil,
47 obsutil,
48 patch,
48 patch,
49 phases,
49 phases,
50 pycompat,
50 pycompat,
51 rcutil,
51 rcutil,
52 registrar,
52 registrar,
53 revsetlang,
53 revsetlang,
54 rewriteutil,
54 rewriteutil,
55 scmutil,
55 scmutil,
56 server,
56 server,
57 streamclone,
57 streamclone,
58 tags as tagsmod,
58 tags as tagsmod,
59 templatekw,
59 templatekw,
60 ui as uimod,
60 ui as uimod,
61 util,
61 util,
62 wireprotoserver,
62 wireprotoserver,
63 )
63 )
64 from .utils import (
64 from .utils import (
65 dateutil,
65 dateutil,
66 procutil,
66 procutil,
67 stringutil,
67 stringutil,
68 )
68 )
69
69
70 release = lockmod.release
70 release = lockmod.release
71
71
72 table = {}
72 table = {}
73 table.update(debugcommandsmod.command._table)
73 table.update(debugcommandsmod.command._table)
74
74
75 command = registrar.command(table)
75 command = registrar.command(table)
76 readonly = registrar.command.readonly
76 readonly = registrar.command.readonly
77
77
78 # common command options
78 # common command options
79
79
80 globalopts = [
80 globalopts = [
81 ('R', 'repository', '',
81 ('R', 'repository', '',
82 _('repository root directory or name of overlay bundle file'),
82 _('repository root directory or name of overlay bundle file'),
83 _('REPO')),
83 _('REPO')),
84 ('', 'cwd', '',
84 ('', 'cwd', '',
85 _('change working directory'), _('DIR')),
85 _('change working directory'), _('DIR')),
86 ('y', 'noninteractive', None,
86 ('y', 'noninteractive', None,
87 _('do not prompt, automatically pick the first choice for all prompts')),
87 _('do not prompt, automatically pick the first choice for all prompts')),
88 ('q', 'quiet', None, _('suppress output')),
88 ('q', 'quiet', None, _('suppress output')),
89 ('v', 'verbose', None, _('enable additional output')),
89 ('v', 'verbose', None, _('enable additional output')),
90 ('', 'color', '',
90 ('', 'color', '',
91 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
91 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
92 # and should not be translated
92 # and should not be translated
93 _("when to colorize (boolean, always, auto, never, or debug)"),
93 _("when to colorize (boolean, always, auto, never, or debug)"),
94 _('TYPE')),
94 _('TYPE')),
95 ('', 'config', [],
95 ('', 'config', [],
96 _('set/override config option (use \'section.name=value\')'),
96 _('set/override config option (use \'section.name=value\')'),
97 _('CONFIG')),
97 _('CONFIG')),
98 ('', 'debug', None, _('enable debugging output')),
98 ('', 'debug', None, _('enable debugging output')),
99 ('', 'debugger', None, _('start debugger')),
99 ('', 'debugger', None, _('start debugger')),
100 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
100 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
101 _('ENCODE')),
101 _('ENCODE')),
102 ('', 'encodingmode', encoding.encodingmode,
102 ('', 'encodingmode', encoding.encodingmode,
103 _('set the charset encoding mode'), _('MODE')),
103 _('set the charset encoding mode'), _('MODE')),
104 ('', 'traceback', None, _('always print a traceback on exception')),
104 ('', 'traceback', None, _('always print a traceback on exception')),
105 ('', 'time', None, _('time how long the command takes')),
105 ('', 'time', None, _('time how long the command takes')),
106 ('', 'profile', None, _('print command execution profile')),
106 ('', 'profile', None, _('print command execution profile')),
107 ('', 'version', None, _('output version information and exit')),
107 ('', 'version', None, _('output version information and exit')),
108 ('h', 'help', None, _('display help and exit')),
108 ('h', 'help', None, _('display help and exit')),
109 ('', 'hidden', False, _('consider hidden changesets')),
109 ('', 'hidden', False, _('consider hidden changesets')),
110 ('', 'pager', 'auto',
110 ('', 'pager', 'auto',
111 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
111 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
112 ]
112 ]
113
113
114 dryrunopts = cmdutil.dryrunopts
114 dryrunopts = cmdutil.dryrunopts
115 remoteopts = cmdutil.remoteopts
115 remoteopts = cmdutil.remoteopts
116 walkopts = cmdutil.walkopts
116 walkopts = cmdutil.walkopts
117 commitopts = cmdutil.commitopts
117 commitopts = cmdutil.commitopts
118 commitopts2 = cmdutil.commitopts2
118 commitopts2 = cmdutil.commitopts2
119 formatteropts = cmdutil.formatteropts
119 formatteropts = cmdutil.formatteropts
120 templateopts = cmdutil.templateopts
120 templateopts = cmdutil.templateopts
121 logopts = cmdutil.logopts
121 logopts = cmdutil.logopts
122 diffopts = cmdutil.diffopts
122 diffopts = cmdutil.diffopts
123 diffwsopts = cmdutil.diffwsopts
123 diffwsopts = cmdutil.diffwsopts
124 diffopts2 = cmdutil.diffopts2
124 diffopts2 = cmdutil.diffopts2
125 mergetoolopts = cmdutil.mergetoolopts
125 mergetoolopts = cmdutil.mergetoolopts
126 similarityopts = cmdutil.similarityopts
126 similarityopts = cmdutil.similarityopts
127 subrepoopts = cmdutil.subrepoopts
127 subrepoopts = cmdutil.subrepoopts
128 debugrevlogopts = cmdutil.debugrevlogopts
128 debugrevlogopts = cmdutil.debugrevlogopts
129
129
130 # Commands start here, listed alphabetically
130 # Commands start here, listed alphabetically
131
131
132 @command('^add',
132 @command('^add',
133 walkopts + subrepoopts + dryrunopts,
133 walkopts + subrepoopts + dryrunopts,
134 _('[OPTION]... [FILE]...'),
134 _('[OPTION]... [FILE]...'),
135 inferrepo=True)
135 inferrepo=True)
136 def add(ui, repo, *pats, **opts):
136 def add(ui, repo, *pats, **opts):
137 """add the specified files on the next commit
137 """add the specified files on the next commit
138
138
139 Schedule files to be version controlled and added to the
139 Schedule files to be version controlled and added to the
140 repository.
140 repository.
141
141
142 The files will be added to the repository at the next commit. To
142 The files will be added to the repository at the next commit. To
143 undo an add before that, see :hg:`forget`.
143 undo an add before that, see :hg:`forget`.
144
144
145 If no names are given, add all files to the repository (except
145 If no names are given, add all files to the repository (except
146 files matching ``.hgignore``).
146 files matching ``.hgignore``).
147
147
148 .. container:: verbose
148 .. container:: verbose
149
149
150 Examples:
150 Examples:
151
151
152 - New (unknown) files are added
152 - New (unknown) files are added
153 automatically by :hg:`add`::
153 automatically by :hg:`add`::
154
154
155 $ ls
155 $ ls
156 foo.c
156 foo.c
157 $ hg status
157 $ hg status
158 ? foo.c
158 ? foo.c
159 $ hg add
159 $ hg add
160 adding foo.c
160 adding foo.c
161 $ hg status
161 $ hg status
162 A foo.c
162 A foo.c
163
163
164 - Specific files to be added can be specified::
164 - Specific files to be added can be specified::
165
165
166 $ ls
166 $ ls
167 bar.c foo.c
167 bar.c foo.c
168 $ hg status
168 $ hg status
169 ? bar.c
169 ? bar.c
170 ? foo.c
170 ? foo.c
171 $ hg add bar.c
171 $ hg add bar.c
172 $ hg status
172 $ hg status
173 A bar.c
173 A bar.c
174 ? foo.c
174 ? foo.c
175
175
176 Returns 0 if all files are successfully added.
176 Returns 0 if all files are successfully added.
177 """
177 """
178
178
179 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
179 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
180 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
180 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
181 return rejected and 1 or 0
181 return rejected and 1 or 0
182
182
183 @command('addremove',
183 @command('addremove',
184 similarityopts + subrepoopts + walkopts + dryrunopts,
184 similarityopts + subrepoopts + walkopts + dryrunopts,
185 _('[OPTION]... [FILE]...'),
185 _('[OPTION]... [FILE]...'),
186 inferrepo=True)
186 inferrepo=True)
187 def addremove(ui, repo, *pats, **opts):
187 def addremove(ui, repo, *pats, **opts):
188 """add all new files, delete all missing files
188 """add all new files, delete all missing files
189
189
190 Add all new files and remove all missing files from the
190 Add all new files and remove all missing files from the
191 repository.
191 repository.
192
192
193 Unless names are given, new files are ignored if they match any of
193 Unless names are given, new files are ignored if they match any of
194 the patterns in ``.hgignore``. As with add, these changes take
194 the patterns in ``.hgignore``. As with add, these changes take
195 effect at the next commit.
195 effect at the next commit.
196
196
197 Use the -s/--similarity option to detect renamed files. This
197 Use the -s/--similarity option to detect renamed files. This
198 option takes a percentage between 0 (disabled) and 100 (files must
198 option takes a percentage between 0 (disabled) and 100 (files must
199 be identical) as its parameter. With a parameter greater than 0,
199 be identical) as its parameter. With a parameter greater than 0,
200 this compares every removed file with every added file and records
200 this compares every removed file with every added file and records
201 those similar enough as renames. Detecting renamed files this way
201 those similar enough as renames. Detecting renamed files this way
202 can be expensive. After using this option, :hg:`status -C` can be
202 can be expensive. After using this option, :hg:`status -C` can be
203 used to check which files were identified as moved or renamed. If
203 used to check which files were identified as moved or renamed. If
204 not specified, -s/--similarity defaults to 100 and only renames of
204 not specified, -s/--similarity defaults to 100 and only renames of
205 identical files are detected.
205 identical files are detected.
206
206
207 .. container:: verbose
207 .. container:: verbose
208
208
209 Examples:
209 Examples:
210
210
211 - A number of files (bar.c and foo.c) are new,
211 - A number of files (bar.c and foo.c) are new,
212 while foobar.c has been removed (without using :hg:`remove`)
212 while foobar.c has been removed (without using :hg:`remove`)
213 from the repository::
213 from the repository::
214
214
215 $ ls
215 $ ls
216 bar.c foo.c
216 bar.c foo.c
217 $ hg status
217 $ hg status
218 ! foobar.c
218 ! foobar.c
219 ? bar.c
219 ? bar.c
220 ? foo.c
220 ? foo.c
221 $ hg addremove
221 $ hg addremove
222 adding bar.c
222 adding bar.c
223 adding foo.c
223 adding foo.c
224 removing foobar.c
224 removing foobar.c
225 $ hg status
225 $ hg status
226 A bar.c
226 A bar.c
227 A foo.c
227 A foo.c
228 R foobar.c
228 R foobar.c
229
229
230 - A file foobar.c was moved to foo.c without using :hg:`rename`.
230 - A file foobar.c was moved to foo.c without using :hg:`rename`.
231 Afterwards, it was edited slightly::
231 Afterwards, it was edited slightly::
232
232
233 $ ls
233 $ ls
234 foo.c
234 foo.c
235 $ hg status
235 $ hg status
236 ! foobar.c
236 ! foobar.c
237 ? foo.c
237 ? foo.c
238 $ hg addremove --similarity 90
238 $ hg addremove --similarity 90
239 removing foobar.c
239 removing foobar.c
240 adding foo.c
240 adding foo.c
241 recording removal of foobar.c as rename to foo.c (94% similar)
241 recording removal of foobar.c as rename to foo.c (94% similar)
242 $ hg status -C
242 $ hg status -C
243 A foo.c
243 A foo.c
244 foobar.c
244 foobar.c
245 R foobar.c
245 R foobar.c
246
246
247 Returns 0 if all files are successfully added.
247 Returns 0 if all files are successfully added.
248 """
248 """
249 opts = pycompat.byteskwargs(opts)
249 opts = pycompat.byteskwargs(opts)
250 try:
250 try:
251 sim = float(opts.get('similarity') or 100)
251 sim = float(opts.get('similarity') or 100)
252 except ValueError:
252 except ValueError:
253 raise error.Abort(_('similarity must be a number'))
253 raise error.Abort(_('similarity must be a number'))
254 if sim < 0 or sim > 100:
254 if sim < 0 or sim > 100:
255 raise error.Abort(_('similarity must be between 0 and 100'))
255 raise error.Abort(_('similarity must be between 0 and 100'))
256 opts['similarity'] = sim / 100.0
256 matcher = scmutil.match(repo[None], pats, opts)
257 matcher = scmutil.match(repo[None], pats, opts)
257 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
258 return scmutil.addremove(repo, matcher, "", opts)
258
259
259 @command('^annotate|blame',
260 @command('^annotate|blame',
260 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
261 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
261 ('', 'follow', None,
262 ('', 'follow', None,
262 _('follow copies/renames and list the filename (DEPRECATED)')),
263 _('follow copies/renames and list the filename (DEPRECATED)')),
263 ('', 'no-follow', None, _("don't follow copies and renames")),
264 ('', 'no-follow', None, _("don't follow copies and renames")),
264 ('a', 'text', None, _('treat all files as text')),
265 ('a', 'text', None, _('treat all files as text')),
265 ('u', 'user', None, _('list the author (long with -v)')),
266 ('u', 'user', None, _('list the author (long with -v)')),
266 ('f', 'file', None, _('list the filename')),
267 ('f', 'file', None, _('list the filename')),
267 ('d', 'date', None, _('list the date (short with -q)')),
268 ('d', 'date', None, _('list the date (short with -q)')),
268 ('n', 'number', None, _('list the revision number (default)')),
269 ('n', 'number', None, _('list the revision number (default)')),
269 ('c', 'changeset', None, _('list the changeset')),
270 ('c', 'changeset', None, _('list the changeset')),
270 ('l', 'line-number', None, _('show line number at the first appearance')),
271 ('l', 'line-number', None, _('show line number at the first appearance')),
271 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
272 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
272 ] + diffwsopts + walkopts + formatteropts,
273 ] + diffwsopts + walkopts + formatteropts,
273 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
274 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
274 inferrepo=True)
275 inferrepo=True)
275 def annotate(ui, repo, *pats, **opts):
276 def annotate(ui, repo, *pats, **opts):
276 """show changeset information by line for each file
277 """show changeset information by line for each file
277
278
278 List changes in files, showing the revision id responsible for
279 List changes in files, showing the revision id responsible for
279 each line.
280 each line.
280
281
281 This command is useful for discovering when a change was made and
282 This command is useful for discovering when a change was made and
282 by whom.
283 by whom.
283
284
284 If you include --file, --user, or --date, the revision number is
285 If you include --file, --user, or --date, the revision number is
285 suppressed unless you also include --number.
286 suppressed unless you also include --number.
286
287
287 Without the -a/--text option, annotate will avoid processing files
288 Without the -a/--text option, annotate will avoid processing files
288 it detects as binary. With -a, annotate will annotate the file
289 it detects as binary. With -a, annotate will annotate the file
289 anyway, although the results will probably be neither useful
290 anyway, although the results will probably be neither useful
290 nor desirable.
291 nor desirable.
291
292
292 Returns 0 on success.
293 Returns 0 on success.
293 """
294 """
294 opts = pycompat.byteskwargs(opts)
295 opts = pycompat.byteskwargs(opts)
295 if not pats:
296 if not pats:
296 raise error.Abort(_('at least one filename or pattern is required'))
297 raise error.Abort(_('at least one filename or pattern is required'))
297
298
298 if opts.get('follow'):
299 if opts.get('follow'):
299 # --follow is deprecated and now just an alias for -f/--file
300 # --follow is deprecated and now just an alias for -f/--file
300 # to mimic the behavior of Mercurial before version 1.5
301 # to mimic the behavior of Mercurial before version 1.5
301 opts['file'] = True
302 opts['file'] = True
302
303
303 rev = opts.get('rev')
304 rev = opts.get('rev')
304 if rev:
305 if rev:
305 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
306 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
306 ctx = scmutil.revsingle(repo, rev)
307 ctx = scmutil.revsingle(repo, rev)
307
308
308 rootfm = ui.formatter('annotate', opts)
309 rootfm = ui.formatter('annotate', opts)
309 if ui.quiet:
310 if ui.quiet:
310 datefunc = dateutil.shortdate
311 datefunc = dateutil.shortdate
311 else:
312 else:
312 datefunc = dateutil.datestr
313 datefunc = dateutil.datestr
313 if ctx.rev() is None:
314 if ctx.rev() is None:
314 def hexfn(node):
315 def hexfn(node):
315 if node is None:
316 if node is None:
316 return None
317 return None
317 else:
318 else:
318 return rootfm.hexfunc(node)
319 return rootfm.hexfunc(node)
319 if opts.get('changeset'):
320 if opts.get('changeset'):
320 # omit "+" suffix which is appended to node hex
321 # omit "+" suffix which is appended to node hex
321 def formatrev(rev):
322 def formatrev(rev):
322 if rev is None:
323 if rev is None:
323 return '%d' % ctx.p1().rev()
324 return '%d' % ctx.p1().rev()
324 else:
325 else:
325 return '%d' % rev
326 return '%d' % rev
326 else:
327 else:
327 def formatrev(rev):
328 def formatrev(rev):
328 if rev is None:
329 if rev is None:
329 return '%d+' % ctx.p1().rev()
330 return '%d+' % ctx.p1().rev()
330 else:
331 else:
331 return '%d ' % rev
332 return '%d ' % rev
332 def formathex(hex):
333 def formathex(hex):
333 if hex is None:
334 if hex is None:
334 return '%s+' % rootfm.hexfunc(ctx.p1().node())
335 return '%s+' % rootfm.hexfunc(ctx.p1().node())
335 else:
336 else:
336 return '%s ' % hex
337 return '%s ' % hex
337 else:
338 else:
338 hexfn = rootfm.hexfunc
339 hexfn = rootfm.hexfunc
339 formatrev = formathex = pycompat.bytestr
340 formatrev = formathex = pycompat.bytestr
340
341
341 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
342 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
342 ('number', ' ', lambda x: x.fctx.rev(), formatrev),
343 ('number', ' ', lambda x: x.fctx.rev(), formatrev),
343 ('changeset', ' ', lambda x: hexfn(x.fctx.node()), formathex),
344 ('changeset', ' ', lambda x: hexfn(x.fctx.node()), formathex),
344 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
345 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
345 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
346 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
346 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
347 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
347 ]
348 ]
348 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
349 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
349
350
350 if (not opts.get('user') and not opts.get('changeset')
351 if (not opts.get('user') and not opts.get('changeset')
351 and not opts.get('date') and not opts.get('file')):
352 and not opts.get('date') and not opts.get('file')):
352 opts['number'] = True
353 opts['number'] = True
353
354
354 linenumber = opts.get('line_number') is not None
355 linenumber = opts.get('line_number') is not None
355 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
356 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
356 raise error.Abort(_('at least one of -n/-c is required for -l'))
357 raise error.Abort(_('at least one of -n/-c is required for -l'))
357
358
358 ui.pager('annotate')
359 ui.pager('annotate')
359
360
360 if rootfm.isplain():
361 if rootfm.isplain():
361 def makefunc(get, fmt):
362 def makefunc(get, fmt):
362 return lambda x: fmt(get(x))
363 return lambda x: fmt(get(x))
363 else:
364 else:
364 def makefunc(get, fmt):
365 def makefunc(get, fmt):
365 return get
366 return get
366 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
367 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
367 if opts.get(op)]
368 if opts.get(op)]
368 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
369 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
369 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
370 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
370 if opts.get(op))
371 if opts.get(op))
371
372
372 def bad(x, y):
373 def bad(x, y):
373 raise error.Abort("%s: %s" % (x, y))
374 raise error.Abort("%s: %s" % (x, y))
374
375
375 m = scmutil.match(ctx, pats, opts, badfn=bad)
376 m = scmutil.match(ctx, pats, opts, badfn=bad)
376
377
377 follow = not opts.get('no_follow')
378 follow = not opts.get('no_follow')
378 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
379 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
379 whitespace=True)
380 whitespace=True)
380 skiprevs = opts.get('skip')
381 skiprevs = opts.get('skip')
381 if skiprevs:
382 if skiprevs:
382 skiprevs = scmutil.revrange(repo, skiprevs)
383 skiprevs = scmutil.revrange(repo, skiprevs)
383
384
384 for abs in ctx.walk(m):
385 for abs in ctx.walk(m):
385 fctx = ctx[abs]
386 fctx = ctx[abs]
386 rootfm.startitem()
387 rootfm.startitem()
387 rootfm.data(abspath=abs, path=m.rel(abs))
388 rootfm.data(abspath=abs, path=m.rel(abs))
388 if not opts.get('text') and fctx.isbinary():
389 if not opts.get('text') and fctx.isbinary():
389 rootfm.plain(_("%s: binary file\n")
390 rootfm.plain(_("%s: binary file\n")
390 % ((pats and m.rel(abs)) or abs))
391 % ((pats and m.rel(abs)) or abs))
391 continue
392 continue
392
393
393 fm = rootfm.nested('lines')
394 fm = rootfm.nested('lines')
394 lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
395 lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
395 diffopts=diffopts)
396 diffopts=diffopts)
396 if not lines:
397 if not lines:
397 fm.end()
398 fm.end()
398 continue
399 continue
399 formats = []
400 formats = []
400 pieces = []
401 pieces = []
401
402
402 for f, sep in funcmap:
403 for f, sep in funcmap:
403 l = [f(n) for n in lines]
404 l = [f(n) for n in lines]
404 if fm.isplain():
405 if fm.isplain():
405 sizes = [encoding.colwidth(x) for x in l]
406 sizes = [encoding.colwidth(x) for x in l]
406 ml = max(sizes)
407 ml = max(sizes)
407 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
408 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
408 else:
409 else:
409 formats.append(['%s' for x in l])
410 formats.append(['%s' for x in l])
410 pieces.append(l)
411 pieces.append(l)
411
412
412 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
413 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
413 fm.startitem()
414 fm.startitem()
414 fm.context(fctx=n.fctx)
415 fm.context(fctx=n.fctx)
415 fm.write(fields, "".join(f), *p)
416 fm.write(fields, "".join(f), *p)
416 if n.skip:
417 if n.skip:
417 fmt = "* %s"
418 fmt = "* %s"
418 else:
419 else:
419 fmt = ": %s"
420 fmt = ": %s"
420 fm.write('line', fmt, n.text)
421 fm.write('line', fmt, n.text)
421
422
422 if not lines[-1].text.endswith('\n'):
423 if not lines[-1].text.endswith('\n'):
423 fm.plain('\n')
424 fm.plain('\n')
424 fm.end()
425 fm.end()
425
426
426 rootfm.end()
427 rootfm.end()
427
428
428 @command('archive',
429 @command('archive',
429 [('', 'no-decode', None, _('do not pass files through decoders')),
430 [('', 'no-decode', None, _('do not pass files through decoders')),
430 ('p', 'prefix', '', _('directory prefix for files in archive'),
431 ('p', 'prefix', '', _('directory prefix for files in archive'),
431 _('PREFIX')),
432 _('PREFIX')),
432 ('r', 'rev', '', _('revision to distribute'), _('REV')),
433 ('r', 'rev', '', _('revision to distribute'), _('REV')),
433 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
434 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
434 ] + subrepoopts + walkopts,
435 ] + subrepoopts + walkopts,
435 _('[OPTION]... DEST'))
436 _('[OPTION]... DEST'))
436 def archive(ui, repo, dest, **opts):
437 def archive(ui, repo, dest, **opts):
437 '''create an unversioned archive of a repository revision
438 '''create an unversioned archive of a repository revision
438
439
439 By default, the revision used is the parent of the working
440 By default, the revision used is the parent of the working
440 directory; use -r/--rev to specify a different revision.
441 directory; use -r/--rev to specify a different revision.
441
442
442 The archive type is automatically detected based on file
443 The archive type is automatically detected based on file
443 extension (to override, use -t/--type).
444 extension (to override, use -t/--type).
444
445
445 .. container:: verbose
446 .. container:: verbose
446
447
447 Examples:
448 Examples:
448
449
449 - create a zip file containing the 1.0 release::
450 - create a zip file containing the 1.0 release::
450
451
451 hg archive -r 1.0 project-1.0.zip
452 hg archive -r 1.0 project-1.0.zip
452
453
453 - create a tarball excluding .hg files::
454 - create a tarball excluding .hg files::
454
455
455 hg archive project.tar.gz -X ".hg*"
456 hg archive project.tar.gz -X ".hg*"
456
457
457 Valid types are:
458 Valid types are:
458
459
459 :``files``: a directory full of files (default)
460 :``files``: a directory full of files (default)
460 :``tar``: tar archive, uncompressed
461 :``tar``: tar archive, uncompressed
461 :``tbz2``: tar archive, compressed using bzip2
462 :``tbz2``: tar archive, compressed using bzip2
462 :``tgz``: tar archive, compressed using gzip
463 :``tgz``: tar archive, compressed using gzip
463 :``uzip``: zip archive, uncompressed
464 :``uzip``: zip archive, uncompressed
464 :``zip``: zip archive, compressed using deflate
465 :``zip``: zip archive, compressed using deflate
465
466
466 The exact name of the destination archive or directory is given
467 The exact name of the destination archive or directory is given
467 using a format string; see :hg:`help export` for details.
468 using a format string; see :hg:`help export` for details.
468
469
469 Each member added to an archive file has a directory prefix
470 Each member added to an archive file has a directory prefix
470 prepended. Use -p/--prefix to specify a format string for the
471 prepended. Use -p/--prefix to specify a format string for the
471 prefix. The default is the basename of the archive, with suffixes
472 prefix. The default is the basename of the archive, with suffixes
472 removed.
473 removed.
473
474
474 Returns 0 on success.
475 Returns 0 on success.
475 '''
476 '''
476
477
477 opts = pycompat.byteskwargs(opts)
478 opts = pycompat.byteskwargs(opts)
478 rev = opts.get('rev')
479 rev = opts.get('rev')
479 if rev:
480 if rev:
480 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
481 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
481 ctx = scmutil.revsingle(repo, rev)
482 ctx = scmutil.revsingle(repo, rev)
482 if not ctx:
483 if not ctx:
483 raise error.Abort(_('no working directory: please specify a revision'))
484 raise error.Abort(_('no working directory: please specify a revision'))
484 node = ctx.node()
485 node = ctx.node()
485 dest = cmdutil.makefilename(ctx, dest)
486 dest = cmdutil.makefilename(ctx, dest)
486 if os.path.realpath(dest) == repo.root:
487 if os.path.realpath(dest) == repo.root:
487 raise error.Abort(_('repository root cannot be destination'))
488 raise error.Abort(_('repository root cannot be destination'))
488
489
489 kind = opts.get('type') or archival.guesskind(dest) or 'files'
490 kind = opts.get('type') or archival.guesskind(dest) or 'files'
490 prefix = opts.get('prefix')
491 prefix = opts.get('prefix')
491
492
492 if dest == '-':
493 if dest == '-':
493 if kind == 'files':
494 if kind == 'files':
494 raise error.Abort(_('cannot archive plain files to stdout'))
495 raise error.Abort(_('cannot archive plain files to stdout'))
495 dest = cmdutil.makefileobj(ctx, dest)
496 dest = cmdutil.makefileobj(ctx, dest)
496 if not prefix:
497 if not prefix:
497 prefix = os.path.basename(repo.root) + '-%h'
498 prefix = os.path.basename(repo.root) + '-%h'
498
499
499 prefix = cmdutil.makefilename(ctx, prefix)
500 prefix = cmdutil.makefilename(ctx, prefix)
500 match = scmutil.match(ctx, [], opts)
501 match = scmutil.match(ctx, [], opts)
501 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
502 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
502 match, prefix, subrepos=opts.get('subrepos'))
503 match, prefix, subrepos=opts.get('subrepos'))
503
504
504 @command('backout',
505 @command('backout',
505 [('', 'merge', None, _('merge with old dirstate parent after backout')),
506 [('', 'merge', None, _('merge with old dirstate parent after backout')),
506 ('', 'commit', None,
507 ('', 'commit', None,
507 _('commit if no conflicts were encountered (DEPRECATED)')),
508 _('commit if no conflicts were encountered (DEPRECATED)')),
508 ('', 'no-commit', None, _('do not commit')),
509 ('', 'no-commit', None, _('do not commit')),
509 ('', 'parent', '',
510 ('', 'parent', '',
510 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
511 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
511 ('r', 'rev', '', _('revision to backout'), _('REV')),
512 ('r', 'rev', '', _('revision to backout'), _('REV')),
512 ('e', 'edit', False, _('invoke editor on commit messages')),
513 ('e', 'edit', False, _('invoke editor on commit messages')),
513 ] + mergetoolopts + walkopts + commitopts + commitopts2,
514 ] + mergetoolopts + walkopts + commitopts + commitopts2,
514 _('[OPTION]... [-r] REV'))
515 _('[OPTION]... [-r] REV'))
515 def backout(ui, repo, node=None, rev=None, **opts):
516 def backout(ui, repo, node=None, rev=None, **opts):
516 '''reverse effect of earlier changeset
517 '''reverse effect of earlier changeset
517
518
518 Prepare a new changeset with the effect of REV undone in the
519 Prepare a new changeset with the effect of REV undone in the
519 current working directory. If no conflicts were encountered,
520 current working directory. If no conflicts were encountered,
520 it will be committed immediately.
521 it will be committed immediately.
521
522
522 If REV is the parent of the working directory, then this new changeset
523 If REV is the parent of the working directory, then this new changeset
523 is committed automatically (unless --no-commit is specified).
524 is committed automatically (unless --no-commit is specified).
524
525
525 .. note::
526 .. note::
526
527
527 :hg:`backout` cannot be used to fix either an unwanted or
528 :hg:`backout` cannot be used to fix either an unwanted or
528 incorrect merge.
529 incorrect merge.
529
530
530 .. container:: verbose
531 .. container:: verbose
531
532
532 Examples:
533 Examples:
533
534
534 - Reverse the effect of the parent of the working directory.
535 - Reverse the effect of the parent of the working directory.
535 This backout will be committed immediately::
536 This backout will be committed immediately::
536
537
537 hg backout -r .
538 hg backout -r .
538
539
539 - Reverse the effect of previous bad revision 23::
540 - Reverse the effect of previous bad revision 23::
540
541
541 hg backout -r 23
542 hg backout -r 23
542
543
543 - Reverse the effect of previous bad revision 23 and
544 - Reverse the effect of previous bad revision 23 and
544 leave changes uncommitted::
545 leave changes uncommitted::
545
546
546 hg backout -r 23 --no-commit
547 hg backout -r 23 --no-commit
547 hg commit -m "Backout revision 23"
548 hg commit -m "Backout revision 23"
548
549
549 By default, the pending changeset will have one parent,
550 By default, the pending changeset will have one parent,
550 maintaining a linear history. With --merge, the pending
551 maintaining a linear history. With --merge, the pending
551 changeset will instead have two parents: the old parent of the
552 changeset will instead have two parents: the old parent of the
552 working directory and a new child of REV that simply undoes REV.
553 working directory and a new child of REV that simply undoes REV.
553
554
554 Before version 1.7, the behavior without --merge was equivalent
555 Before version 1.7, the behavior without --merge was equivalent
555 to specifying --merge followed by :hg:`update --clean .` to
556 to specifying --merge followed by :hg:`update --clean .` to
556 cancel the merge and leave the child of REV as a head to be
557 cancel the merge and leave the child of REV as a head to be
557 merged separately.
558 merged separately.
558
559
559 See :hg:`help dates` for a list of formats valid for -d/--date.
560 See :hg:`help dates` for a list of formats valid for -d/--date.
560
561
561 See :hg:`help revert` for a way to restore files to the state
562 See :hg:`help revert` for a way to restore files to the state
562 of another revision.
563 of another revision.
563
564
564 Returns 0 on success, 1 if nothing to backout or there are unresolved
565 Returns 0 on success, 1 if nothing to backout or there are unresolved
565 files.
566 files.
566 '''
567 '''
567 wlock = lock = None
568 wlock = lock = None
568 try:
569 try:
569 wlock = repo.wlock()
570 wlock = repo.wlock()
570 lock = repo.lock()
571 lock = repo.lock()
571 return _dobackout(ui, repo, node, rev, **opts)
572 return _dobackout(ui, repo, node, rev, **opts)
572 finally:
573 finally:
573 release(lock, wlock)
574 release(lock, wlock)
574
575
575 def _dobackout(ui, repo, node=None, rev=None, **opts):
576 def _dobackout(ui, repo, node=None, rev=None, **opts):
576 opts = pycompat.byteskwargs(opts)
577 opts = pycompat.byteskwargs(opts)
577 if opts.get('commit') and opts.get('no_commit'):
578 if opts.get('commit') and opts.get('no_commit'):
578 raise error.Abort(_("cannot use --commit with --no-commit"))
579 raise error.Abort(_("cannot use --commit with --no-commit"))
579 if opts.get('merge') and opts.get('no_commit'):
580 if opts.get('merge') and opts.get('no_commit'):
580 raise error.Abort(_("cannot use --merge with --no-commit"))
581 raise error.Abort(_("cannot use --merge with --no-commit"))
581
582
582 if rev and node:
583 if rev and node:
583 raise error.Abort(_("please specify just one revision"))
584 raise error.Abort(_("please specify just one revision"))
584
585
585 if not rev:
586 if not rev:
586 rev = node
587 rev = node
587
588
588 if not rev:
589 if not rev:
589 raise error.Abort(_("please specify a revision to backout"))
590 raise error.Abort(_("please specify a revision to backout"))
590
591
591 date = opts.get('date')
592 date = opts.get('date')
592 if date:
593 if date:
593 opts['date'] = dateutil.parsedate(date)
594 opts['date'] = dateutil.parsedate(date)
594
595
595 cmdutil.checkunfinished(repo)
596 cmdutil.checkunfinished(repo)
596 cmdutil.bailifchanged(repo)
597 cmdutil.bailifchanged(repo)
597 node = scmutil.revsingle(repo, rev).node()
598 node = scmutil.revsingle(repo, rev).node()
598
599
599 op1, op2 = repo.dirstate.parents()
600 op1, op2 = repo.dirstate.parents()
600 if not repo.changelog.isancestor(node, op1):
601 if not repo.changelog.isancestor(node, op1):
601 raise error.Abort(_('cannot backout change that is not an ancestor'))
602 raise error.Abort(_('cannot backout change that is not an ancestor'))
602
603
603 p1, p2 = repo.changelog.parents(node)
604 p1, p2 = repo.changelog.parents(node)
604 if p1 == nullid:
605 if p1 == nullid:
605 raise error.Abort(_('cannot backout a change with no parents'))
606 raise error.Abort(_('cannot backout a change with no parents'))
606 if p2 != nullid:
607 if p2 != nullid:
607 if not opts.get('parent'):
608 if not opts.get('parent'):
608 raise error.Abort(_('cannot backout a merge changeset'))
609 raise error.Abort(_('cannot backout a merge changeset'))
609 p = repo.lookup(opts['parent'])
610 p = repo.lookup(opts['parent'])
610 if p not in (p1, p2):
611 if p not in (p1, p2):
611 raise error.Abort(_('%s is not a parent of %s') %
612 raise error.Abort(_('%s is not a parent of %s') %
612 (short(p), short(node)))
613 (short(p), short(node)))
613 parent = p
614 parent = p
614 else:
615 else:
615 if opts.get('parent'):
616 if opts.get('parent'):
616 raise error.Abort(_('cannot use --parent on non-merge changeset'))
617 raise error.Abort(_('cannot use --parent on non-merge changeset'))
617 parent = p1
618 parent = p1
618
619
619 # the backout should appear on the same branch
620 # the backout should appear on the same branch
620 branch = repo.dirstate.branch()
621 branch = repo.dirstate.branch()
621 bheads = repo.branchheads(branch)
622 bheads = repo.branchheads(branch)
622 rctx = scmutil.revsingle(repo, hex(parent))
623 rctx = scmutil.revsingle(repo, hex(parent))
623 if not opts.get('merge') and op1 != node:
624 if not opts.get('merge') and op1 != node:
624 dsguard = dirstateguard.dirstateguard(repo, 'backout')
625 dsguard = dirstateguard.dirstateguard(repo, 'backout')
625 try:
626 try:
626 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
627 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
627 'backout')
628 'backout')
628 stats = mergemod.update(repo, parent, True, True, node, False)
629 stats = mergemod.update(repo, parent, True, True, node, False)
629 repo.setparents(op1, op2)
630 repo.setparents(op1, op2)
630 dsguard.close()
631 dsguard.close()
631 hg._showstats(repo, stats)
632 hg._showstats(repo, stats)
632 if stats.unresolvedcount:
633 if stats.unresolvedcount:
633 repo.ui.status(_("use 'hg resolve' to retry unresolved "
634 repo.ui.status(_("use 'hg resolve' to retry unresolved "
634 "file merges\n"))
635 "file merges\n"))
635 return 1
636 return 1
636 finally:
637 finally:
637 ui.setconfig('ui', 'forcemerge', '', '')
638 ui.setconfig('ui', 'forcemerge', '', '')
638 lockmod.release(dsguard)
639 lockmod.release(dsguard)
639 else:
640 else:
640 hg.clean(repo, node, show_stats=False)
641 hg.clean(repo, node, show_stats=False)
641 repo.dirstate.setbranch(branch)
642 repo.dirstate.setbranch(branch)
642 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
643 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
643
644
644 if opts.get('no_commit'):
645 if opts.get('no_commit'):
645 msg = _("changeset %s backed out, "
646 msg = _("changeset %s backed out, "
646 "don't forget to commit.\n")
647 "don't forget to commit.\n")
647 ui.status(msg % short(node))
648 ui.status(msg % short(node))
648 return 0
649 return 0
649
650
650 def commitfunc(ui, repo, message, match, opts):
651 def commitfunc(ui, repo, message, match, opts):
651 editform = 'backout'
652 editform = 'backout'
652 e = cmdutil.getcommiteditor(editform=editform,
653 e = cmdutil.getcommiteditor(editform=editform,
653 **pycompat.strkwargs(opts))
654 **pycompat.strkwargs(opts))
654 if not message:
655 if not message:
655 # we don't translate commit messages
656 # we don't translate commit messages
656 message = "Backed out changeset %s" % short(node)
657 message = "Backed out changeset %s" % short(node)
657 e = cmdutil.getcommiteditor(edit=True, editform=editform)
658 e = cmdutil.getcommiteditor(edit=True, editform=editform)
658 return repo.commit(message, opts.get('user'), opts.get('date'),
659 return repo.commit(message, opts.get('user'), opts.get('date'),
659 match, editor=e)
660 match, editor=e)
660 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
661 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
661 if not newnode:
662 if not newnode:
662 ui.status(_("nothing changed\n"))
663 ui.status(_("nothing changed\n"))
663 return 1
664 return 1
664 cmdutil.commitstatus(repo, newnode, branch, bheads)
665 cmdutil.commitstatus(repo, newnode, branch, bheads)
665
666
666 def nice(node):
667 def nice(node):
667 return '%d:%s' % (repo.changelog.rev(node), short(node))
668 return '%d:%s' % (repo.changelog.rev(node), short(node))
668 ui.status(_('changeset %s backs out changeset %s\n') %
669 ui.status(_('changeset %s backs out changeset %s\n') %
669 (nice(repo.changelog.tip()), nice(node)))
670 (nice(repo.changelog.tip()), nice(node)))
670 if opts.get('merge') and op1 != node:
671 if opts.get('merge') and op1 != node:
671 hg.clean(repo, op1, show_stats=False)
672 hg.clean(repo, op1, show_stats=False)
672 ui.status(_('merging with changeset %s\n')
673 ui.status(_('merging with changeset %s\n')
673 % nice(repo.changelog.tip()))
674 % nice(repo.changelog.tip()))
674 try:
675 try:
675 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
676 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
676 'backout')
677 'backout')
677 return hg.merge(repo, hex(repo.changelog.tip()))
678 return hg.merge(repo, hex(repo.changelog.tip()))
678 finally:
679 finally:
679 ui.setconfig('ui', 'forcemerge', '', '')
680 ui.setconfig('ui', 'forcemerge', '', '')
680 return 0
681 return 0
681
682
682 @command('bisect',
683 @command('bisect',
683 [('r', 'reset', False, _('reset bisect state')),
684 [('r', 'reset', False, _('reset bisect state')),
684 ('g', 'good', False, _('mark changeset good')),
685 ('g', 'good', False, _('mark changeset good')),
685 ('b', 'bad', False, _('mark changeset bad')),
686 ('b', 'bad', False, _('mark changeset bad')),
686 ('s', 'skip', False, _('skip testing changeset')),
687 ('s', 'skip', False, _('skip testing changeset')),
687 ('e', 'extend', False, _('extend the bisect range')),
688 ('e', 'extend', False, _('extend the bisect range')),
688 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
689 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
689 ('U', 'noupdate', False, _('do not update to target'))],
690 ('U', 'noupdate', False, _('do not update to target'))],
690 _("[-gbsr] [-U] [-c CMD] [REV]"))
691 _("[-gbsr] [-U] [-c CMD] [REV]"))
691 def bisect(ui, repo, rev=None, extra=None, command=None,
692 def bisect(ui, repo, rev=None, extra=None, command=None,
692 reset=None, good=None, bad=None, skip=None, extend=None,
693 reset=None, good=None, bad=None, skip=None, extend=None,
693 noupdate=None):
694 noupdate=None):
694 """subdivision search of changesets
695 """subdivision search of changesets
695
696
696 This command helps to find changesets which introduce problems. To
697 This command helps to find changesets which introduce problems. To
697 use, mark the earliest changeset you know exhibits the problem as
698 use, mark the earliest changeset you know exhibits the problem as
698 bad, then mark the latest changeset which is free from the problem
699 bad, then mark the latest changeset which is free from the problem
699 as good. Bisect will update your working directory to a revision
700 as good. Bisect will update your working directory to a revision
700 for testing (unless the -U/--noupdate option is specified). Once
701 for testing (unless the -U/--noupdate option is specified). Once
701 you have performed tests, mark the working directory as good or
702 you have performed tests, mark the working directory as good or
702 bad, and bisect will either update to another candidate changeset
703 bad, and bisect will either update to another candidate changeset
703 or announce that it has found the bad revision.
704 or announce that it has found the bad revision.
704
705
705 As a shortcut, you can also use the revision argument to mark a
706 As a shortcut, you can also use the revision argument to mark a
706 revision as good or bad without checking it out first.
707 revision as good or bad without checking it out first.
707
708
708 If you supply a command, it will be used for automatic bisection.
709 If you supply a command, it will be used for automatic bisection.
709 The environment variable HG_NODE will contain the ID of the
710 The environment variable HG_NODE will contain the ID of the
710 changeset being tested. The exit status of the command will be
711 changeset being tested. The exit status of the command will be
711 used to mark revisions as good or bad: status 0 means good, 125
712 used to mark revisions as good or bad: status 0 means good, 125
712 means to skip the revision, 127 (command not found) will abort the
713 means to skip the revision, 127 (command not found) will abort the
713 bisection, and any other non-zero exit status means the revision
714 bisection, and any other non-zero exit status means the revision
714 is bad.
715 is bad.
715
716
716 .. container:: verbose
717 .. container:: verbose
717
718
718 Some examples:
719 Some examples:
719
720
720 - start a bisection with known bad revision 34, and good revision 12::
721 - start a bisection with known bad revision 34, and good revision 12::
721
722
722 hg bisect --bad 34
723 hg bisect --bad 34
723 hg bisect --good 12
724 hg bisect --good 12
724
725
725 - advance the current bisection by marking current revision as good or
726 - advance the current bisection by marking current revision as good or
726 bad::
727 bad::
727
728
728 hg bisect --good
729 hg bisect --good
729 hg bisect --bad
730 hg bisect --bad
730
731
731 - mark the current revision, or a known revision, to be skipped (e.g. if
732 - mark the current revision, or a known revision, to be skipped (e.g. if
732 that revision is not usable because of another issue)::
733 that revision is not usable because of another issue)::
733
734
734 hg bisect --skip
735 hg bisect --skip
735 hg bisect --skip 23
736 hg bisect --skip 23
736
737
737 - skip all revisions that do not touch directories ``foo`` or ``bar``::
738 - skip all revisions that do not touch directories ``foo`` or ``bar``::
738
739
739 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
740 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
740
741
741 - forget the current bisection::
742 - forget the current bisection::
742
743
743 hg bisect --reset
744 hg bisect --reset
744
745
745 - use 'make && make tests' to automatically find the first broken
746 - use 'make && make tests' to automatically find the first broken
746 revision::
747 revision::
747
748
748 hg bisect --reset
749 hg bisect --reset
749 hg bisect --bad 34
750 hg bisect --bad 34
750 hg bisect --good 12
751 hg bisect --good 12
751 hg bisect --command "make && make tests"
752 hg bisect --command "make && make tests"
752
753
753 - see all changesets whose states are already known in the current
754 - see all changesets whose states are already known in the current
754 bisection::
755 bisection::
755
756
756 hg log -r "bisect(pruned)"
757 hg log -r "bisect(pruned)"
757
758
758 - see the changeset currently being bisected (especially useful
759 - see the changeset currently being bisected (especially useful
759 if running with -U/--noupdate)::
760 if running with -U/--noupdate)::
760
761
761 hg log -r "bisect(current)"
762 hg log -r "bisect(current)"
762
763
763 - see all changesets that took part in the current bisection::
764 - see all changesets that took part in the current bisection::
764
765
765 hg log -r "bisect(range)"
766 hg log -r "bisect(range)"
766
767
767 - you can even get a nice graph::
768 - you can even get a nice graph::
768
769
769 hg log --graph -r "bisect(range)"
770 hg log --graph -r "bisect(range)"
770
771
771 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
772 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
772
773
773 Returns 0 on success.
774 Returns 0 on success.
774 """
775 """
775 # backward compatibility
776 # backward compatibility
776 if rev in "good bad reset init".split():
777 if rev in "good bad reset init".split():
777 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
778 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
778 cmd, rev, extra = rev, extra, None
779 cmd, rev, extra = rev, extra, None
779 if cmd == "good":
780 if cmd == "good":
780 good = True
781 good = True
781 elif cmd == "bad":
782 elif cmd == "bad":
782 bad = True
783 bad = True
783 else:
784 else:
784 reset = True
785 reset = True
785 elif extra:
786 elif extra:
786 raise error.Abort(_('incompatible arguments'))
787 raise error.Abort(_('incompatible arguments'))
787
788
788 incompatibles = {
789 incompatibles = {
789 '--bad': bad,
790 '--bad': bad,
790 '--command': bool(command),
791 '--command': bool(command),
791 '--extend': extend,
792 '--extend': extend,
792 '--good': good,
793 '--good': good,
793 '--reset': reset,
794 '--reset': reset,
794 '--skip': skip,
795 '--skip': skip,
795 }
796 }
796
797
797 enabled = [x for x in incompatibles if incompatibles[x]]
798 enabled = [x for x in incompatibles if incompatibles[x]]
798
799
799 if len(enabled) > 1:
800 if len(enabled) > 1:
800 raise error.Abort(_('%s and %s are incompatible') %
801 raise error.Abort(_('%s and %s are incompatible') %
801 tuple(sorted(enabled)[0:2]))
802 tuple(sorted(enabled)[0:2]))
802
803
803 if reset:
804 if reset:
804 hbisect.resetstate(repo)
805 hbisect.resetstate(repo)
805 return
806 return
806
807
807 state = hbisect.load_state(repo)
808 state = hbisect.load_state(repo)
808
809
809 # update state
810 # update state
810 if good or bad or skip:
811 if good or bad or skip:
811 if rev:
812 if rev:
812 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
813 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
813 else:
814 else:
814 nodes = [repo.lookup('.')]
815 nodes = [repo.lookup('.')]
815 if good:
816 if good:
816 state['good'] += nodes
817 state['good'] += nodes
817 elif bad:
818 elif bad:
818 state['bad'] += nodes
819 state['bad'] += nodes
819 elif skip:
820 elif skip:
820 state['skip'] += nodes
821 state['skip'] += nodes
821 hbisect.save_state(repo, state)
822 hbisect.save_state(repo, state)
822 if not (state['good'] and state['bad']):
823 if not (state['good'] and state['bad']):
823 return
824 return
824
825
825 def mayupdate(repo, node, show_stats=True):
826 def mayupdate(repo, node, show_stats=True):
826 """common used update sequence"""
827 """common used update sequence"""
827 if noupdate:
828 if noupdate:
828 return
829 return
829 cmdutil.checkunfinished(repo)
830 cmdutil.checkunfinished(repo)
830 cmdutil.bailifchanged(repo)
831 cmdutil.bailifchanged(repo)
831 return hg.clean(repo, node, show_stats=show_stats)
832 return hg.clean(repo, node, show_stats=show_stats)
832
833
833 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
834 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
834
835
835 if command:
836 if command:
836 changesets = 1
837 changesets = 1
837 if noupdate:
838 if noupdate:
838 try:
839 try:
839 node = state['current'][0]
840 node = state['current'][0]
840 except LookupError:
841 except LookupError:
841 raise error.Abort(_('current bisect revision is unknown - '
842 raise error.Abort(_('current bisect revision is unknown - '
842 'start a new bisect to fix'))
843 'start a new bisect to fix'))
843 else:
844 else:
844 node, p2 = repo.dirstate.parents()
845 node, p2 = repo.dirstate.parents()
845 if p2 != nullid:
846 if p2 != nullid:
846 raise error.Abort(_('current bisect revision is a merge'))
847 raise error.Abort(_('current bisect revision is a merge'))
847 if rev:
848 if rev:
848 node = repo[scmutil.revsingle(repo, rev, node)].node()
849 node = repo[scmutil.revsingle(repo, rev, node)].node()
849 try:
850 try:
850 while changesets:
851 while changesets:
851 # update state
852 # update state
852 state['current'] = [node]
853 state['current'] = [node]
853 hbisect.save_state(repo, state)
854 hbisect.save_state(repo, state)
854 status = ui.system(command, environ={'HG_NODE': hex(node)},
855 status = ui.system(command, environ={'HG_NODE': hex(node)},
855 blockedtag='bisect_check')
856 blockedtag='bisect_check')
856 if status == 125:
857 if status == 125:
857 transition = "skip"
858 transition = "skip"
858 elif status == 0:
859 elif status == 0:
859 transition = "good"
860 transition = "good"
860 # status < 0 means process was killed
861 # status < 0 means process was killed
861 elif status == 127:
862 elif status == 127:
862 raise error.Abort(_("failed to execute %s") % command)
863 raise error.Abort(_("failed to execute %s") % command)
863 elif status < 0:
864 elif status < 0:
864 raise error.Abort(_("%s killed") % command)
865 raise error.Abort(_("%s killed") % command)
865 else:
866 else:
866 transition = "bad"
867 transition = "bad"
867 state[transition].append(node)
868 state[transition].append(node)
868 ctx = repo[node]
869 ctx = repo[node]
869 ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
870 ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
870 transition))
871 transition))
871 hbisect.checkstate(state)
872 hbisect.checkstate(state)
872 # bisect
873 # bisect
873 nodes, changesets, bgood = hbisect.bisect(repo, state)
874 nodes, changesets, bgood = hbisect.bisect(repo, state)
874 # update to next check
875 # update to next check
875 node = nodes[0]
876 node = nodes[0]
876 mayupdate(repo, node, show_stats=False)
877 mayupdate(repo, node, show_stats=False)
877 finally:
878 finally:
878 state['current'] = [node]
879 state['current'] = [node]
879 hbisect.save_state(repo, state)
880 hbisect.save_state(repo, state)
880 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
881 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
881 return
882 return
882
883
883 hbisect.checkstate(state)
884 hbisect.checkstate(state)
884
885
885 # actually bisect
886 # actually bisect
886 nodes, changesets, good = hbisect.bisect(repo, state)
887 nodes, changesets, good = hbisect.bisect(repo, state)
887 if extend:
888 if extend:
888 if not changesets:
889 if not changesets:
889 extendnode = hbisect.extendrange(repo, state, nodes, good)
890 extendnode = hbisect.extendrange(repo, state, nodes, good)
890 if extendnode is not None:
891 if extendnode is not None:
891 ui.write(_("Extending search to changeset %d:%s\n")
892 ui.write(_("Extending search to changeset %d:%s\n")
892 % (extendnode.rev(), extendnode))
893 % (extendnode.rev(), extendnode))
893 state['current'] = [extendnode.node()]
894 state['current'] = [extendnode.node()]
894 hbisect.save_state(repo, state)
895 hbisect.save_state(repo, state)
895 return mayupdate(repo, extendnode.node())
896 return mayupdate(repo, extendnode.node())
896 raise error.Abort(_("nothing to extend"))
897 raise error.Abort(_("nothing to extend"))
897
898
898 if changesets == 0:
899 if changesets == 0:
899 hbisect.printresult(ui, repo, state, displayer, nodes, good)
900 hbisect.printresult(ui, repo, state, displayer, nodes, good)
900 else:
901 else:
901 assert len(nodes) == 1 # only a single node can be tested next
902 assert len(nodes) == 1 # only a single node can be tested next
902 node = nodes[0]
903 node = nodes[0]
903 # compute the approximate number of remaining tests
904 # compute the approximate number of remaining tests
904 tests, size = 0, 2
905 tests, size = 0, 2
905 while size <= changesets:
906 while size <= changesets:
906 tests, size = tests + 1, size * 2
907 tests, size = tests + 1, size * 2
907 rev = repo.changelog.rev(node)
908 rev = repo.changelog.rev(node)
908 ui.write(_("Testing changeset %d:%s "
909 ui.write(_("Testing changeset %d:%s "
909 "(%d changesets remaining, ~%d tests)\n")
910 "(%d changesets remaining, ~%d tests)\n")
910 % (rev, short(node), changesets, tests))
911 % (rev, short(node), changesets, tests))
911 state['current'] = [node]
912 state['current'] = [node]
912 hbisect.save_state(repo, state)
913 hbisect.save_state(repo, state)
913 return mayupdate(repo, node)
914 return mayupdate(repo, node)
914
915
915 @command('bookmarks|bookmark',
916 @command('bookmarks|bookmark',
916 [('f', 'force', False, _('force')),
917 [('f', 'force', False, _('force')),
917 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
918 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
918 ('d', 'delete', False, _('delete a given bookmark')),
919 ('d', 'delete', False, _('delete a given bookmark')),
919 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
920 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
920 ('i', 'inactive', False, _('mark a bookmark inactive')),
921 ('i', 'inactive', False, _('mark a bookmark inactive')),
921 ] + formatteropts,
922 ] + formatteropts,
922 _('hg bookmarks [OPTIONS]... [NAME]...'))
923 _('hg bookmarks [OPTIONS]... [NAME]...'))
923 def bookmark(ui, repo, *names, **opts):
924 def bookmark(ui, repo, *names, **opts):
924 '''create a new bookmark or list existing bookmarks
925 '''create a new bookmark or list existing bookmarks
925
926
926 Bookmarks are labels on changesets to help track lines of development.
927 Bookmarks are labels on changesets to help track lines of development.
927 Bookmarks are unversioned and can be moved, renamed and deleted.
928 Bookmarks are unversioned and can be moved, renamed and deleted.
928 Deleting or moving a bookmark has no effect on the associated changesets.
929 Deleting or moving a bookmark has no effect on the associated changesets.
929
930
930 Creating or updating to a bookmark causes it to be marked as 'active'.
931 Creating or updating to a bookmark causes it to be marked as 'active'.
931 The active bookmark is indicated with a '*'.
932 The active bookmark is indicated with a '*'.
932 When a commit is made, the active bookmark will advance to the new commit.
933 When a commit is made, the active bookmark will advance to the new commit.
933 A plain :hg:`update` will also advance an active bookmark, if possible.
934 A plain :hg:`update` will also advance an active bookmark, if possible.
934 Updating away from a bookmark will cause it to be deactivated.
935 Updating away from a bookmark will cause it to be deactivated.
935
936
936 Bookmarks can be pushed and pulled between repositories (see
937 Bookmarks can be pushed and pulled between repositories (see
937 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
938 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
938 diverged, a new 'divergent bookmark' of the form 'name@path' will
939 diverged, a new 'divergent bookmark' of the form 'name@path' will
939 be created. Using :hg:`merge` will resolve the divergence.
940 be created. Using :hg:`merge` will resolve the divergence.
940
941
941 Specifying bookmark as '.' to -m or -d options is equivalent to specifying
942 Specifying bookmark as '.' to -m or -d options is equivalent to specifying
942 the active bookmark's name.
943 the active bookmark's name.
943
944
944 A bookmark named '@' has the special property that :hg:`clone` will
945 A bookmark named '@' has the special property that :hg:`clone` will
945 check it out by default if it exists.
946 check it out by default if it exists.
946
947
947 .. container:: verbose
948 .. container:: verbose
948
949
949 Examples:
950 Examples:
950
951
951 - create an active bookmark for a new line of development::
952 - create an active bookmark for a new line of development::
952
953
953 hg book new-feature
954 hg book new-feature
954
955
955 - create an inactive bookmark as a place marker::
956 - create an inactive bookmark as a place marker::
956
957
957 hg book -i reviewed
958 hg book -i reviewed
958
959
959 - create an inactive bookmark on another changeset::
960 - create an inactive bookmark on another changeset::
960
961
961 hg book -r .^ tested
962 hg book -r .^ tested
962
963
963 - rename bookmark turkey to dinner::
964 - rename bookmark turkey to dinner::
964
965
965 hg book -m turkey dinner
966 hg book -m turkey dinner
966
967
967 - move the '@' bookmark from another branch::
968 - move the '@' bookmark from another branch::
968
969
969 hg book -f @
970 hg book -f @
970 '''
971 '''
971 force = opts.get(r'force')
972 force = opts.get(r'force')
972 rev = opts.get(r'rev')
973 rev = opts.get(r'rev')
973 delete = opts.get(r'delete')
974 delete = opts.get(r'delete')
974 rename = opts.get(r'rename')
975 rename = opts.get(r'rename')
975 inactive = opts.get(r'inactive')
976 inactive = opts.get(r'inactive')
976
977
977 if delete and rename:
978 if delete and rename:
978 raise error.Abort(_("--delete and --rename are incompatible"))
979 raise error.Abort(_("--delete and --rename are incompatible"))
979 if delete and rev:
980 if delete and rev:
980 raise error.Abort(_("--rev is incompatible with --delete"))
981 raise error.Abort(_("--rev is incompatible with --delete"))
981 if rename and rev:
982 if rename and rev:
982 raise error.Abort(_("--rev is incompatible with --rename"))
983 raise error.Abort(_("--rev is incompatible with --rename"))
983 if not names and (delete or rev):
984 if not names and (delete or rev):
984 raise error.Abort(_("bookmark name required"))
985 raise error.Abort(_("bookmark name required"))
985
986
986 if delete or rename or names or inactive:
987 if delete or rename or names or inactive:
987 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
988 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
988 if delete:
989 if delete:
989 names = pycompat.maplist(repo._bookmarks.expandname, names)
990 names = pycompat.maplist(repo._bookmarks.expandname, names)
990 bookmarks.delete(repo, tr, names)
991 bookmarks.delete(repo, tr, names)
991 elif rename:
992 elif rename:
992 if not names:
993 if not names:
993 raise error.Abort(_("new bookmark name required"))
994 raise error.Abort(_("new bookmark name required"))
994 elif len(names) > 1:
995 elif len(names) > 1:
995 raise error.Abort(_("only one new bookmark name allowed"))
996 raise error.Abort(_("only one new bookmark name allowed"))
996 rename = repo._bookmarks.expandname(rename)
997 rename = repo._bookmarks.expandname(rename)
997 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
998 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
998 elif names:
999 elif names:
999 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1000 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1000 elif inactive:
1001 elif inactive:
1001 if len(repo._bookmarks) == 0:
1002 if len(repo._bookmarks) == 0:
1002 ui.status(_("no bookmarks set\n"))
1003 ui.status(_("no bookmarks set\n"))
1003 elif not repo._activebookmark:
1004 elif not repo._activebookmark:
1004 ui.status(_("no active bookmark\n"))
1005 ui.status(_("no active bookmark\n"))
1005 else:
1006 else:
1006 bookmarks.deactivate(repo)
1007 bookmarks.deactivate(repo)
1007 else: # show bookmarks
1008 else: # show bookmarks
1008 bookmarks.printbookmarks(ui, repo, **opts)
1009 bookmarks.printbookmarks(ui, repo, **opts)
1009
1010
1010 @command('branch',
1011 @command('branch',
1011 [('f', 'force', None,
1012 [('f', 'force', None,
1012 _('set branch name even if it shadows an existing branch')),
1013 _('set branch name even if it shadows an existing branch')),
1013 ('C', 'clean', None, _('reset branch name to parent branch name')),
1014 ('C', 'clean', None, _('reset branch name to parent branch name')),
1014 ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
1015 ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
1015 ],
1016 ],
1016 _('[-fC] [NAME]'))
1017 _('[-fC] [NAME]'))
1017 def branch(ui, repo, label=None, **opts):
1018 def branch(ui, repo, label=None, **opts):
1018 """set or show the current branch name
1019 """set or show the current branch name
1019
1020
1020 .. note::
1021 .. note::
1021
1022
1022 Branch names are permanent and global. Use :hg:`bookmark` to create a
1023 Branch names are permanent and global. Use :hg:`bookmark` to create a
1023 light-weight bookmark instead. See :hg:`help glossary` for more
1024 light-weight bookmark instead. See :hg:`help glossary` for more
1024 information about named branches and bookmarks.
1025 information about named branches and bookmarks.
1025
1026
1026 With no argument, show the current branch name. With one argument,
1027 With no argument, show the current branch name. With one argument,
1027 set the working directory branch name (the branch will not exist
1028 set the working directory branch name (the branch will not exist
1028 in the repository until the next commit). Standard practice
1029 in the repository until the next commit). Standard practice
1029 recommends that primary development take place on the 'default'
1030 recommends that primary development take place on the 'default'
1030 branch.
1031 branch.
1031
1032
1032 Unless -f/--force is specified, branch will not let you set a
1033 Unless -f/--force is specified, branch will not let you set a
1033 branch name that already exists.
1034 branch name that already exists.
1034
1035
1035 Use -C/--clean to reset the working directory branch to that of
1036 Use -C/--clean to reset the working directory branch to that of
1036 the parent of the working directory, negating a previous branch
1037 the parent of the working directory, negating a previous branch
1037 change.
1038 change.
1038
1039
1039 Use the command :hg:`update` to switch to an existing branch. Use
1040 Use the command :hg:`update` to switch to an existing branch. Use
1040 :hg:`commit --close-branch` to mark this branch head as closed.
1041 :hg:`commit --close-branch` to mark this branch head as closed.
1041 When all heads of a branch are closed, the branch will be
1042 When all heads of a branch are closed, the branch will be
1042 considered closed.
1043 considered closed.
1043
1044
1044 Returns 0 on success.
1045 Returns 0 on success.
1045 """
1046 """
1046 opts = pycompat.byteskwargs(opts)
1047 opts = pycompat.byteskwargs(opts)
1047 revs = opts.get('rev')
1048 revs = opts.get('rev')
1048 if label:
1049 if label:
1049 label = label.strip()
1050 label = label.strip()
1050
1051
1051 if not opts.get('clean') and not label:
1052 if not opts.get('clean') and not label:
1052 if revs:
1053 if revs:
1053 raise error.Abort(_("no branch name specified for the revisions"))
1054 raise error.Abort(_("no branch name specified for the revisions"))
1054 ui.write("%s\n" % repo.dirstate.branch())
1055 ui.write("%s\n" % repo.dirstate.branch())
1055 return
1056 return
1056
1057
1057 with repo.wlock():
1058 with repo.wlock():
1058 if opts.get('clean'):
1059 if opts.get('clean'):
1059 label = repo[None].p1().branch()
1060 label = repo[None].p1().branch()
1060 repo.dirstate.setbranch(label)
1061 repo.dirstate.setbranch(label)
1061 ui.status(_('reset working directory to branch %s\n') % label)
1062 ui.status(_('reset working directory to branch %s\n') % label)
1062 elif label:
1063 elif label:
1063
1064
1064 scmutil.checknewlabel(repo, label, 'branch')
1065 scmutil.checknewlabel(repo, label, 'branch')
1065 if revs:
1066 if revs:
1066 return cmdutil.changebranch(ui, repo, revs, label)
1067 return cmdutil.changebranch(ui, repo, revs, label)
1067
1068
1068 if not opts.get('force') and label in repo.branchmap():
1069 if not opts.get('force') and label in repo.branchmap():
1069 if label not in [p.branch() for p in repo[None].parents()]:
1070 if label not in [p.branch() for p in repo[None].parents()]:
1070 raise error.Abort(_('a branch of the same name already'
1071 raise error.Abort(_('a branch of the same name already'
1071 ' exists'),
1072 ' exists'),
1072 # i18n: "it" refers to an existing branch
1073 # i18n: "it" refers to an existing branch
1073 hint=_("use 'hg update' to switch to it"))
1074 hint=_("use 'hg update' to switch to it"))
1074
1075
1075 repo.dirstate.setbranch(label)
1076 repo.dirstate.setbranch(label)
1076 ui.status(_('marked working directory as branch %s\n') % label)
1077 ui.status(_('marked working directory as branch %s\n') % label)
1077
1078
1078 # find any open named branches aside from default
1079 # find any open named branches aside from default
1079 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1080 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1080 if n != "default" and not c]
1081 if n != "default" and not c]
1081 if not others:
1082 if not others:
1082 ui.status(_('(branches are permanent and global, '
1083 ui.status(_('(branches are permanent and global, '
1083 'did you want a bookmark?)\n'))
1084 'did you want a bookmark?)\n'))
1084
1085
1085 @command('branches',
1086 @command('branches',
1086 [('a', 'active', False,
1087 [('a', 'active', False,
1087 _('show only branches that have unmerged heads (DEPRECATED)')),
1088 _('show only branches that have unmerged heads (DEPRECATED)')),
1088 ('c', 'closed', False, _('show normal and closed branches')),
1089 ('c', 'closed', False, _('show normal and closed branches')),
1089 ] + formatteropts,
1090 ] + formatteropts,
1090 _('[-c]'), cmdtype=readonly)
1091 _('[-c]'), cmdtype=readonly)
1091 def branches(ui, repo, active=False, closed=False, **opts):
1092 def branches(ui, repo, active=False, closed=False, **opts):
1092 """list repository named branches
1093 """list repository named branches
1093
1094
1094 List the repository's named branches, indicating which ones are
1095 List the repository's named branches, indicating which ones are
1095 inactive. If -c/--closed is specified, also list branches which have
1096 inactive. If -c/--closed is specified, also list branches which have
1096 been marked closed (see :hg:`commit --close-branch`).
1097 been marked closed (see :hg:`commit --close-branch`).
1097
1098
1098 Use the command :hg:`update` to switch to an existing branch.
1099 Use the command :hg:`update` to switch to an existing branch.
1099
1100
1100 Returns 0.
1101 Returns 0.
1101 """
1102 """
1102
1103
1103 opts = pycompat.byteskwargs(opts)
1104 opts = pycompat.byteskwargs(opts)
1104 ui.pager('branches')
1105 ui.pager('branches')
1105 fm = ui.formatter('branches', opts)
1106 fm = ui.formatter('branches', opts)
1106 hexfunc = fm.hexfunc
1107 hexfunc = fm.hexfunc
1107
1108
1108 allheads = set(repo.heads())
1109 allheads = set(repo.heads())
1109 branches = []
1110 branches = []
1110 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1111 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1111 isactive = False
1112 isactive = False
1112 if not isclosed:
1113 if not isclosed:
1113 openheads = set(repo.branchmap().iteropen(heads))
1114 openheads = set(repo.branchmap().iteropen(heads))
1114 isactive = bool(openheads & allheads)
1115 isactive = bool(openheads & allheads)
1115 branches.append((tag, repo[tip], isactive, not isclosed))
1116 branches.append((tag, repo[tip], isactive, not isclosed))
1116 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1117 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1117 reverse=True)
1118 reverse=True)
1118
1119
1119 for tag, ctx, isactive, isopen in branches:
1120 for tag, ctx, isactive, isopen in branches:
1120 if active and not isactive:
1121 if active and not isactive:
1121 continue
1122 continue
1122 if isactive:
1123 if isactive:
1123 label = 'branches.active'
1124 label = 'branches.active'
1124 notice = ''
1125 notice = ''
1125 elif not isopen:
1126 elif not isopen:
1126 if not closed:
1127 if not closed:
1127 continue
1128 continue
1128 label = 'branches.closed'
1129 label = 'branches.closed'
1129 notice = _(' (closed)')
1130 notice = _(' (closed)')
1130 else:
1131 else:
1131 label = 'branches.inactive'
1132 label = 'branches.inactive'
1132 notice = _(' (inactive)')
1133 notice = _(' (inactive)')
1133 current = (tag == repo.dirstate.branch())
1134 current = (tag == repo.dirstate.branch())
1134 if current:
1135 if current:
1135 label = 'branches.current'
1136 label = 'branches.current'
1136
1137
1137 fm.startitem()
1138 fm.startitem()
1138 fm.write('branch', '%s', tag, label=label)
1139 fm.write('branch', '%s', tag, label=label)
1139 rev = ctx.rev()
1140 rev = ctx.rev()
1140 padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
1141 padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
1141 fmt = ' ' * padsize + ' %d:%s'
1142 fmt = ' ' * padsize + ' %d:%s'
1142 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1143 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1143 label='log.changeset changeset.%s' % ctx.phasestr())
1144 label='log.changeset changeset.%s' % ctx.phasestr())
1144 fm.context(ctx=ctx)
1145 fm.context(ctx=ctx)
1145 fm.data(active=isactive, closed=not isopen, current=current)
1146 fm.data(active=isactive, closed=not isopen, current=current)
1146 if not ui.quiet:
1147 if not ui.quiet:
1147 fm.plain(notice)
1148 fm.plain(notice)
1148 fm.plain('\n')
1149 fm.plain('\n')
1149 fm.end()
1150 fm.end()
1150
1151
1151 @command('bundle',
1152 @command('bundle',
1152 [('f', 'force', None, _('run even when the destination is unrelated')),
1153 [('f', 'force', None, _('run even when the destination is unrelated')),
1153 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1154 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1154 _('REV')),
1155 _('REV')),
1155 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1156 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1156 _('BRANCH')),
1157 _('BRANCH')),
1157 ('', 'base', [],
1158 ('', 'base', [],
1158 _('a base changeset assumed to be available at the destination'),
1159 _('a base changeset assumed to be available at the destination'),
1159 _('REV')),
1160 _('REV')),
1160 ('a', 'all', None, _('bundle all changesets in the repository')),
1161 ('a', 'all', None, _('bundle all changesets in the repository')),
1161 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1162 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1162 ] + remoteopts,
1163 ] + remoteopts,
1163 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1164 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1164 def bundle(ui, repo, fname, dest=None, **opts):
1165 def bundle(ui, repo, fname, dest=None, **opts):
1165 """create a bundle file
1166 """create a bundle file
1166
1167
1167 Generate a bundle file containing data to be transferred to another
1168 Generate a bundle file containing data to be transferred to another
1168 repository.
1169 repository.
1169
1170
1170 To create a bundle containing all changesets, use -a/--all
1171 To create a bundle containing all changesets, use -a/--all
1171 (or --base null). Otherwise, hg assumes the destination will have
1172 (or --base null). Otherwise, hg assumes the destination will have
1172 all the nodes you specify with --base parameters. Otherwise, hg
1173 all the nodes you specify with --base parameters. Otherwise, hg
1173 will assume the repository has all the nodes in destination, or
1174 will assume the repository has all the nodes in destination, or
1174 default-push/default if no destination is specified, where destination
1175 default-push/default if no destination is specified, where destination
1175 is the repository you provide through DEST option.
1176 is the repository you provide through DEST option.
1176
1177
1177 You can change bundle format with the -t/--type option. See
1178 You can change bundle format with the -t/--type option. See
1178 :hg:`help bundlespec` for documentation on this format. By default,
1179 :hg:`help bundlespec` for documentation on this format. By default,
1179 the most appropriate format is used and compression defaults to
1180 the most appropriate format is used and compression defaults to
1180 bzip2.
1181 bzip2.
1181
1182
1182 The bundle file can then be transferred using conventional means
1183 The bundle file can then be transferred using conventional means
1183 and applied to another repository with the unbundle or pull
1184 and applied to another repository with the unbundle or pull
1184 command. This is useful when direct push and pull are not
1185 command. This is useful when direct push and pull are not
1185 available or when exporting an entire repository is undesirable.
1186 available or when exporting an entire repository is undesirable.
1186
1187
1187 Applying bundles preserves all changeset contents including
1188 Applying bundles preserves all changeset contents including
1188 permissions, copy/rename information, and revision history.
1189 permissions, copy/rename information, and revision history.
1189
1190
1190 Returns 0 on success, 1 if no changes found.
1191 Returns 0 on success, 1 if no changes found.
1191 """
1192 """
1192 opts = pycompat.byteskwargs(opts)
1193 opts = pycompat.byteskwargs(opts)
1193 revs = None
1194 revs = None
1194 if 'rev' in opts:
1195 if 'rev' in opts:
1195 revstrings = opts['rev']
1196 revstrings = opts['rev']
1196 revs = scmutil.revrange(repo, revstrings)
1197 revs = scmutil.revrange(repo, revstrings)
1197 if revstrings and not revs:
1198 if revstrings and not revs:
1198 raise error.Abort(_('no commits to bundle'))
1199 raise error.Abort(_('no commits to bundle'))
1199
1200
1200 bundletype = opts.get('type', 'bzip2').lower()
1201 bundletype = opts.get('type', 'bzip2').lower()
1201 try:
1202 try:
1202 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1203 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1203 except error.UnsupportedBundleSpecification as e:
1204 except error.UnsupportedBundleSpecification as e:
1204 raise error.Abort(pycompat.bytestr(e),
1205 raise error.Abort(pycompat.bytestr(e),
1205 hint=_("see 'hg help bundlespec' for supported "
1206 hint=_("see 'hg help bundlespec' for supported "
1206 "values for --type"))
1207 "values for --type"))
1207 cgversion = bundlespec.contentopts["cg.version"]
1208 cgversion = bundlespec.contentopts["cg.version"]
1208
1209
1209 # Packed bundles are a pseudo bundle format for now.
1210 # Packed bundles are a pseudo bundle format for now.
1210 if cgversion == 's1':
1211 if cgversion == 's1':
1211 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1212 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1212 hint=_("use 'hg debugcreatestreamclonebundle'"))
1213 hint=_("use 'hg debugcreatestreamclonebundle'"))
1213
1214
1214 if opts.get('all'):
1215 if opts.get('all'):
1215 if dest:
1216 if dest:
1216 raise error.Abort(_("--all is incompatible with specifying "
1217 raise error.Abort(_("--all is incompatible with specifying "
1217 "a destination"))
1218 "a destination"))
1218 if opts.get('base'):
1219 if opts.get('base'):
1219 ui.warn(_("ignoring --base because --all was specified\n"))
1220 ui.warn(_("ignoring --base because --all was specified\n"))
1220 base = ['null']
1221 base = ['null']
1221 else:
1222 else:
1222 base = scmutil.revrange(repo, opts.get('base'))
1223 base = scmutil.revrange(repo, opts.get('base'))
1223 if cgversion not in changegroup.supportedoutgoingversions(repo):
1224 if cgversion not in changegroup.supportedoutgoingversions(repo):
1224 raise error.Abort(_("repository does not support bundle version %s") %
1225 raise error.Abort(_("repository does not support bundle version %s") %
1225 cgversion)
1226 cgversion)
1226
1227
1227 if base:
1228 if base:
1228 if dest:
1229 if dest:
1229 raise error.Abort(_("--base is incompatible with specifying "
1230 raise error.Abort(_("--base is incompatible with specifying "
1230 "a destination"))
1231 "a destination"))
1231 common = [repo.lookup(rev) for rev in base]
1232 common = [repo.lookup(rev) for rev in base]
1232 heads = [repo.lookup(r) for r in revs] if revs else None
1233 heads = [repo.lookup(r) for r in revs] if revs else None
1233 outgoing = discovery.outgoing(repo, common, heads)
1234 outgoing = discovery.outgoing(repo, common, heads)
1234 else:
1235 else:
1235 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1236 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1236 dest, branches = hg.parseurl(dest, opts.get('branch'))
1237 dest, branches = hg.parseurl(dest, opts.get('branch'))
1237 other = hg.peer(repo, opts, dest)
1238 other = hg.peer(repo, opts, dest)
1238 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1239 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1239 heads = revs and map(repo.lookup, revs) or revs
1240 heads = revs and map(repo.lookup, revs) or revs
1240 outgoing = discovery.findcommonoutgoing(repo, other,
1241 outgoing = discovery.findcommonoutgoing(repo, other,
1241 onlyheads=heads,
1242 onlyheads=heads,
1242 force=opts.get('force'),
1243 force=opts.get('force'),
1243 portable=True)
1244 portable=True)
1244
1245
1245 if not outgoing.missing:
1246 if not outgoing.missing:
1246 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1247 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1247 return 1
1248 return 1
1248
1249
1249 bcompression = bundlespec.compression
1250 bcompression = bundlespec.compression
1250 if cgversion == '01': #bundle1
1251 if cgversion == '01': #bundle1
1251 if bcompression is None:
1252 if bcompression is None:
1252 bcompression = 'UN'
1253 bcompression = 'UN'
1253 bversion = 'HG10' + bcompression
1254 bversion = 'HG10' + bcompression
1254 bcompression = None
1255 bcompression = None
1255 elif cgversion in ('02', '03'):
1256 elif cgversion in ('02', '03'):
1256 bversion = 'HG20'
1257 bversion = 'HG20'
1257 else:
1258 else:
1258 raise error.ProgrammingError(
1259 raise error.ProgrammingError(
1259 'bundle: unexpected changegroup version %s' % cgversion)
1260 'bundle: unexpected changegroup version %s' % cgversion)
1260
1261
1261 # TODO compression options should be derived from bundlespec parsing.
1262 # TODO compression options should be derived from bundlespec parsing.
1262 # This is a temporary hack to allow adjusting bundle compression
1263 # This is a temporary hack to allow adjusting bundle compression
1263 # level without a) formalizing the bundlespec changes to declare it
1264 # level without a) formalizing the bundlespec changes to declare it
1264 # b) introducing a command flag.
1265 # b) introducing a command flag.
1265 compopts = {}
1266 compopts = {}
1266 complevel = ui.configint('experimental', 'bundlecomplevel')
1267 complevel = ui.configint('experimental', 'bundlecomplevel')
1267 if complevel is not None:
1268 if complevel is not None:
1268 compopts['level'] = complevel
1269 compopts['level'] = complevel
1269
1270
1270 # Allow overriding the bundling of obsmarker in phases through
1271 # Allow overriding the bundling of obsmarker in phases through
1271 # configuration while we don't have a bundle version that include them
1272 # configuration while we don't have a bundle version that include them
1272 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1273 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1273 bundlespec.contentopts['obsolescence'] = True
1274 bundlespec.contentopts['obsolescence'] = True
1274 if repo.ui.configbool('experimental', 'bundle-phases'):
1275 if repo.ui.configbool('experimental', 'bundle-phases'):
1275 bundlespec.contentopts['phases'] = True
1276 bundlespec.contentopts['phases'] = True
1276
1277
1277 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1278 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1278 bundlespec.contentopts, compression=bcompression,
1279 bundlespec.contentopts, compression=bcompression,
1279 compopts=compopts)
1280 compopts=compopts)
1280
1281
1281 @command('cat',
1282 @command('cat',
1282 [('o', 'output', '',
1283 [('o', 'output', '',
1283 _('print output to file with formatted name'), _('FORMAT')),
1284 _('print output to file with formatted name'), _('FORMAT')),
1284 ('r', 'rev', '', _('print the given revision'), _('REV')),
1285 ('r', 'rev', '', _('print the given revision'), _('REV')),
1285 ('', 'decode', None, _('apply any matching decode filter')),
1286 ('', 'decode', None, _('apply any matching decode filter')),
1286 ] + walkopts + formatteropts,
1287 ] + walkopts + formatteropts,
1287 _('[OPTION]... FILE...'),
1288 _('[OPTION]... FILE...'),
1288 inferrepo=True, cmdtype=readonly)
1289 inferrepo=True, cmdtype=readonly)
1289 def cat(ui, repo, file1, *pats, **opts):
1290 def cat(ui, repo, file1, *pats, **opts):
1290 """output the current or given revision of files
1291 """output the current or given revision of files
1291
1292
1292 Print the specified files as they were at the given revision. If
1293 Print the specified files as they were at the given revision. If
1293 no revision is given, the parent of the working directory is used.
1294 no revision is given, the parent of the working directory is used.
1294
1295
1295 Output may be to a file, in which case the name of the file is
1296 Output may be to a file, in which case the name of the file is
1296 given using a template string. See :hg:`help templates`. In addition
1297 given using a template string. See :hg:`help templates`. In addition
1297 to the common template keywords, the following formatting rules are
1298 to the common template keywords, the following formatting rules are
1298 supported:
1299 supported:
1299
1300
1300 :``%%``: literal "%" character
1301 :``%%``: literal "%" character
1301 :``%s``: basename of file being printed
1302 :``%s``: basename of file being printed
1302 :``%d``: dirname of file being printed, or '.' if in repository root
1303 :``%d``: dirname of file being printed, or '.' if in repository root
1303 :``%p``: root-relative path name of file being printed
1304 :``%p``: root-relative path name of file being printed
1304 :``%H``: changeset hash (40 hexadecimal digits)
1305 :``%H``: changeset hash (40 hexadecimal digits)
1305 :``%R``: changeset revision number
1306 :``%R``: changeset revision number
1306 :``%h``: short-form changeset hash (12 hexadecimal digits)
1307 :``%h``: short-form changeset hash (12 hexadecimal digits)
1307 :``%r``: zero-padded changeset revision number
1308 :``%r``: zero-padded changeset revision number
1308 :``%b``: basename of the exporting repository
1309 :``%b``: basename of the exporting repository
1309 :``\\``: literal "\\" character
1310 :``\\``: literal "\\" character
1310
1311
1311 Returns 0 on success.
1312 Returns 0 on success.
1312 """
1313 """
1313 opts = pycompat.byteskwargs(opts)
1314 opts = pycompat.byteskwargs(opts)
1314 rev = opts.get('rev')
1315 rev = opts.get('rev')
1315 if rev:
1316 if rev:
1316 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
1317 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
1317 ctx = scmutil.revsingle(repo, rev)
1318 ctx = scmutil.revsingle(repo, rev)
1318 m = scmutil.match(ctx, (file1,) + pats, opts)
1319 m = scmutil.match(ctx, (file1,) + pats, opts)
1319 fntemplate = opts.pop('output', '')
1320 fntemplate = opts.pop('output', '')
1320 if cmdutil.isstdiofilename(fntemplate):
1321 if cmdutil.isstdiofilename(fntemplate):
1321 fntemplate = ''
1322 fntemplate = ''
1322
1323
1323 if fntemplate:
1324 if fntemplate:
1324 fm = formatter.nullformatter(ui, 'cat')
1325 fm = formatter.nullformatter(ui, 'cat')
1325 else:
1326 else:
1326 ui.pager('cat')
1327 ui.pager('cat')
1327 fm = ui.formatter('cat', opts)
1328 fm = ui.formatter('cat', opts)
1328 with fm:
1329 with fm:
1329 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
1330 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
1330 **pycompat.strkwargs(opts))
1331 **pycompat.strkwargs(opts))
1331
1332
1332 @command('^clone',
1333 @command('^clone',
1333 [('U', 'noupdate', None, _('the clone will include an empty working '
1334 [('U', 'noupdate', None, _('the clone will include an empty working '
1334 'directory (only a repository)')),
1335 'directory (only a repository)')),
1335 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1336 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1336 _('REV')),
1337 _('REV')),
1337 ('r', 'rev', [], _('do not clone everything, but include this changeset'
1338 ('r', 'rev', [], _('do not clone everything, but include this changeset'
1338 ' and its ancestors'), _('REV')),
1339 ' and its ancestors'), _('REV')),
1339 ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
1340 ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
1340 ' changesets and their ancestors'), _('BRANCH')),
1341 ' changesets and their ancestors'), _('BRANCH')),
1341 ('', 'pull', None, _('use pull protocol to copy metadata')),
1342 ('', 'pull', None, _('use pull protocol to copy metadata')),
1342 ('', 'uncompressed', None,
1343 ('', 'uncompressed', None,
1343 _('an alias to --stream (DEPRECATED)')),
1344 _('an alias to --stream (DEPRECATED)')),
1344 ('', 'stream', None,
1345 ('', 'stream', None,
1345 _('clone with minimal data processing')),
1346 _('clone with minimal data processing')),
1346 ] + remoteopts,
1347 ] + remoteopts,
1347 _('[OPTION]... SOURCE [DEST]'),
1348 _('[OPTION]... SOURCE [DEST]'),
1348 norepo=True)
1349 norepo=True)
1349 def clone(ui, source, dest=None, **opts):
1350 def clone(ui, source, dest=None, **opts):
1350 """make a copy of an existing repository
1351 """make a copy of an existing repository
1351
1352
1352 Create a copy of an existing repository in a new directory.
1353 Create a copy of an existing repository in a new directory.
1353
1354
1354 If no destination directory name is specified, it defaults to the
1355 If no destination directory name is specified, it defaults to the
1355 basename of the source.
1356 basename of the source.
1356
1357
1357 The location of the source is added to the new repository's
1358 The location of the source is added to the new repository's
1358 ``.hg/hgrc`` file, as the default to be used for future pulls.
1359 ``.hg/hgrc`` file, as the default to be used for future pulls.
1359
1360
1360 Only local paths and ``ssh://`` URLs are supported as
1361 Only local paths and ``ssh://`` URLs are supported as
1361 destinations. For ``ssh://`` destinations, no working directory or
1362 destinations. For ``ssh://`` destinations, no working directory or
1362 ``.hg/hgrc`` will be created on the remote side.
1363 ``.hg/hgrc`` will be created on the remote side.
1363
1364
1364 If the source repository has a bookmark called '@' set, that
1365 If the source repository has a bookmark called '@' set, that
1365 revision will be checked out in the new repository by default.
1366 revision will be checked out in the new repository by default.
1366
1367
1367 To check out a particular version, use -u/--update, or
1368 To check out a particular version, use -u/--update, or
1368 -U/--noupdate to create a clone with no working directory.
1369 -U/--noupdate to create a clone with no working directory.
1369
1370
1370 To pull only a subset of changesets, specify one or more revisions
1371 To pull only a subset of changesets, specify one or more revisions
1371 identifiers with -r/--rev or branches with -b/--branch. The
1372 identifiers with -r/--rev or branches with -b/--branch. The
1372 resulting clone will contain only the specified changesets and
1373 resulting clone will contain only the specified changesets and
1373 their ancestors. These options (or 'clone src#rev dest') imply
1374 their ancestors. These options (or 'clone src#rev dest') imply
1374 --pull, even for local source repositories.
1375 --pull, even for local source repositories.
1375
1376
1376 In normal clone mode, the remote normalizes repository data into a common
1377 In normal clone mode, the remote normalizes repository data into a common
1377 exchange format and the receiving end translates this data into its local
1378 exchange format and the receiving end translates this data into its local
1378 storage format. --stream activates a different clone mode that essentially
1379 storage format. --stream activates a different clone mode that essentially
1379 copies repository files from the remote with minimal data processing. This
1380 copies repository files from the remote with minimal data processing. This
1380 significantly reduces the CPU cost of a clone both remotely and locally.
1381 significantly reduces the CPU cost of a clone both remotely and locally.
1381 However, it often increases the transferred data size by 30-40%. This can
1382 However, it often increases the transferred data size by 30-40%. This can
1382 result in substantially faster clones where I/O throughput is plentiful,
1383 result in substantially faster clones where I/O throughput is plentiful,
1383 especially for larger repositories. A side-effect of --stream clones is
1384 especially for larger repositories. A side-effect of --stream clones is
1384 that storage settings and requirements on the remote are applied locally:
1385 that storage settings and requirements on the remote are applied locally:
1385 a modern client may inherit legacy or inefficient storage used by the
1386 a modern client may inherit legacy or inefficient storage used by the
1386 remote or a legacy Mercurial client may not be able to clone from a
1387 remote or a legacy Mercurial client may not be able to clone from a
1387 modern Mercurial remote.
1388 modern Mercurial remote.
1388
1389
1389 .. note::
1390 .. note::
1390
1391
1391 Specifying a tag will include the tagged changeset but not the
1392 Specifying a tag will include the tagged changeset but not the
1392 changeset containing the tag.
1393 changeset containing the tag.
1393
1394
1394 .. container:: verbose
1395 .. container:: verbose
1395
1396
1396 For efficiency, hardlinks are used for cloning whenever the
1397 For efficiency, hardlinks are used for cloning whenever the
1397 source and destination are on the same filesystem (note this
1398 source and destination are on the same filesystem (note this
1398 applies only to the repository data, not to the working
1399 applies only to the repository data, not to the working
1399 directory). Some filesystems, such as AFS, implement hardlinking
1400 directory). Some filesystems, such as AFS, implement hardlinking
1400 incorrectly, but do not report errors. In these cases, use the
1401 incorrectly, but do not report errors. In these cases, use the
1401 --pull option to avoid hardlinking.
1402 --pull option to avoid hardlinking.
1402
1403
1403 Mercurial will update the working directory to the first applicable
1404 Mercurial will update the working directory to the first applicable
1404 revision from this list:
1405 revision from this list:
1405
1406
1406 a) null if -U or the source repository has no changesets
1407 a) null if -U or the source repository has no changesets
1407 b) if -u . and the source repository is local, the first parent of
1408 b) if -u . and the source repository is local, the first parent of
1408 the source repository's working directory
1409 the source repository's working directory
1409 c) the changeset specified with -u (if a branch name, this means the
1410 c) the changeset specified with -u (if a branch name, this means the
1410 latest head of that branch)
1411 latest head of that branch)
1411 d) the changeset specified with -r
1412 d) the changeset specified with -r
1412 e) the tipmost head specified with -b
1413 e) the tipmost head specified with -b
1413 f) the tipmost head specified with the url#branch source syntax
1414 f) the tipmost head specified with the url#branch source syntax
1414 g) the revision marked with the '@' bookmark, if present
1415 g) the revision marked with the '@' bookmark, if present
1415 h) the tipmost head of the default branch
1416 h) the tipmost head of the default branch
1416 i) tip
1417 i) tip
1417
1418
1418 When cloning from servers that support it, Mercurial may fetch
1419 When cloning from servers that support it, Mercurial may fetch
1419 pre-generated data from a server-advertised URL. When this is done,
1420 pre-generated data from a server-advertised URL. When this is done,
1420 hooks operating on incoming changesets and changegroups may fire twice,
1421 hooks operating on incoming changesets and changegroups may fire twice,
1421 once for the bundle fetched from the URL and another for any additional
1422 once for the bundle fetched from the URL and another for any additional
1422 data not fetched from this URL. In addition, if an error occurs, the
1423 data not fetched from this URL. In addition, if an error occurs, the
1423 repository may be rolled back to a partial clone. This behavior may
1424 repository may be rolled back to a partial clone. This behavior may
1424 change in future releases. See :hg:`help -e clonebundles` for more.
1425 change in future releases. See :hg:`help -e clonebundles` for more.
1425
1426
1426 Examples:
1427 Examples:
1427
1428
1428 - clone a remote repository to a new directory named hg/::
1429 - clone a remote repository to a new directory named hg/::
1429
1430
1430 hg clone https://www.mercurial-scm.org/repo/hg/
1431 hg clone https://www.mercurial-scm.org/repo/hg/
1431
1432
1432 - create a lightweight local clone::
1433 - create a lightweight local clone::
1433
1434
1434 hg clone project/ project-feature/
1435 hg clone project/ project-feature/
1435
1436
1436 - clone from an absolute path on an ssh server (note double-slash)::
1437 - clone from an absolute path on an ssh server (note double-slash)::
1437
1438
1438 hg clone ssh://user@server//home/projects/alpha/
1439 hg clone ssh://user@server//home/projects/alpha/
1439
1440
1440 - do a streaming clone while checking out a specified version::
1441 - do a streaming clone while checking out a specified version::
1441
1442
1442 hg clone --stream http://server/repo -u 1.5
1443 hg clone --stream http://server/repo -u 1.5
1443
1444
1444 - create a repository without changesets after a particular revision::
1445 - create a repository without changesets after a particular revision::
1445
1446
1446 hg clone -r 04e544 experimental/ good/
1447 hg clone -r 04e544 experimental/ good/
1447
1448
1448 - clone (and track) a particular named branch::
1449 - clone (and track) a particular named branch::
1449
1450
1450 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1451 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1451
1452
1452 See :hg:`help urls` for details on specifying URLs.
1453 See :hg:`help urls` for details on specifying URLs.
1453
1454
1454 Returns 0 on success.
1455 Returns 0 on success.
1455 """
1456 """
1456 opts = pycompat.byteskwargs(opts)
1457 opts = pycompat.byteskwargs(opts)
1457 if opts.get('noupdate') and opts.get('updaterev'):
1458 if opts.get('noupdate') and opts.get('updaterev'):
1458 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1459 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1459
1460
1460 r = hg.clone(ui, opts, source, dest,
1461 r = hg.clone(ui, opts, source, dest,
1461 pull=opts.get('pull'),
1462 pull=opts.get('pull'),
1462 stream=opts.get('stream') or opts.get('uncompressed'),
1463 stream=opts.get('stream') or opts.get('uncompressed'),
1463 revs=opts.get('rev'),
1464 revs=opts.get('rev'),
1464 update=opts.get('updaterev') or not opts.get('noupdate'),
1465 update=opts.get('updaterev') or not opts.get('noupdate'),
1465 branch=opts.get('branch'),
1466 branch=opts.get('branch'),
1466 shareopts=opts.get('shareopts'))
1467 shareopts=opts.get('shareopts'))
1467
1468
1468 return r is None
1469 return r is None
1469
1470
1470 @command('^commit|ci',
1471 @command('^commit|ci',
1471 [('A', 'addremove', None,
1472 [('A', 'addremove', None,
1472 _('mark new/missing files as added/removed before committing')),
1473 _('mark new/missing files as added/removed before committing')),
1473 ('', 'close-branch', None,
1474 ('', 'close-branch', None,
1474 _('mark a branch head as closed')),
1475 _('mark a branch head as closed')),
1475 ('', 'amend', None, _('amend the parent of the working directory')),
1476 ('', 'amend', None, _('amend the parent of the working directory')),
1476 ('s', 'secret', None, _('use the secret phase for committing')),
1477 ('s', 'secret', None, _('use the secret phase for committing')),
1477 ('e', 'edit', None, _('invoke editor on commit messages')),
1478 ('e', 'edit', None, _('invoke editor on commit messages')),
1478 ('i', 'interactive', None, _('use interactive mode')),
1479 ('i', 'interactive', None, _('use interactive mode')),
1479 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1480 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1480 _('[OPTION]... [FILE]...'),
1481 _('[OPTION]... [FILE]...'),
1481 inferrepo=True)
1482 inferrepo=True)
1482 def commit(ui, repo, *pats, **opts):
1483 def commit(ui, repo, *pats, **opts):
1483 """commit the specified files or all outstanding changes
1484 """commit the specified files or all outstanding changes
1484
1485
1485 Commit changes to the given files into the repository. Unlike a
1486 Commit changes to the given files into the repository. Unlike a
1486 centralized SCM, this operation is a local operation. See
1487 centralized SCM, this operation is a local operation. See
1487 :hg:`push` for a way to actively distribute your changes.
1488 :hg:`push` for a way to actively distribute your changes.
1488
1489
1489 If a list of files is omitted, all changes reported by :hg:`status`
1490 If a list of files is omitted, all changes reported by :hg:`status`
1490 will be committed.
1491 will be committed.
1491
1492
1492 If you are committing the result of a merge, do not provide any
1493 If you are committing the result of a merge, do not provide any
1493 filenames or -I/-X filters.
1494 filenames or -I/-X filters.
1494
1495
1495 If no commit message is specified, Mercurial starts your
1496 If no commit message is specified, Mercurial starts your
1496 configured editor where you can enter a message. In case your
1497 configured editor where you can enter a message. In case your
1497 commit fails, you will find a backup of your message in
1498 commit fails, you will find a backup of your message in
1498 ``.hg/last-message.txt``.
1499 ``.hg/last-message.txt``.
1499
1500
1500 The --close-branch flag can be used to mark the current branch
1501 The --close-branch flag can be used to mark the current branch
1501 head closed. When all heads of a branch are closed, the branch
1502 head closed. When all heads of a branch are closed, the branch
1502 will be considered closed and no longer listed.
1503 will be considered closed and no longer listed.
1503
1504
1504 The --amend flag can be used to amend the parent of the
1505 The --amend flag can be used to amend the parent of the
1505 working directory with a new commit that contains the changes
1506 working directory with a new commit that contains the changes
1506 in the parent in addition to those currently reported by :hg:`status`,
1507 in the parent in addition to those currently reported by :hg:`status`,
1507 if there are any. The old commit is stored in a backup bundle in
1508 if there are any. The old commit is stored in a backup bundle in
1508 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1509 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1509 on how to restore it).
1510 on how to restore it).
1510
1511
1511 Message, user and date are taken from the amended commit unless
1512 Message, user and date are taken from the amended commit unless
1512 specified. When a message isn't specified on the command line,
1513 specified. When a message isn't specified on the command line,
1513 the editor will open with the message of the amended commit.
1514 the editor will open with the message of the amended commit.
1514
1515
1515 It is not possible to amend public changesets (see :hg:`help phases`)
1516 It is not possible to amend public changesets (see :hg:`help phases`)
1516 or changesets that have children.
1517 or changesets that have children.
1517
1518
1518 See :hg:`help dates` for a list of formats valid for -d/--date.
1519 See :hg:`help dates` for a list of formats valid for -d/--date.
1519
1520
1520 Returns 0 on success, 1 if nothing changed.
1521 Returns 0 on success, 1 if nothing changed.
1521
1522
1522 .. container:: verbose
1523 .. container:: verbose
1523
1524
1524 Examples:
1525 Examples:
1525
1526
1526 - commit all files ending in .py::
1527 - commit all files ending in .py::
1527
1528
1528 hg commit --include "set:**.py"
1529 hg commit --include "set:**.py"
1529
1530
1530 - commit all non-binary files::
1531 - commit all non-binary files::
1531
1532
1532 hg commit --exclude "set:binary()"
1533 hg commit --exclude "set:binary()"
1533
1534
1534 - amend the current commit and set the date to now::
1535 - amend the current commit and set the date to now::
1535
1536
1536 hg commit --amend --date now
1537 hg commit --amend --date now
1537 """
1538 """
1538 wlock = lock = None
1539 wlock = lock = None
1539 try:
1540 try:
1540 wlock = repo.wlock()
1541 wlock = repo.wlock()
1541 lock = repo.lock()
1542 lock = repo.lock()
1542 return _docommit(ui, repo, *pats, **opts)
1543 return _docommit(ui, repo, *pats, **opts)
1543 finally:
1544 finally:
1544 release(lock, wlock)
1545 release(lock, wlock)
1545
1546
1546 def _docommit(ui, repo, *pats, **opts):
1547 def _docommit(ui, repo, *pats, **opts):
1547 if opts.get(r'interactive'):
1548 if opts.get(r'interactive'):
1548 opts.pop(r'interactive')
1549 opts.pop(r'interactive')
1549 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1550 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1550 cmdutil.recordfilter, *pats,
1551 cmdutil.recordfilter, *pats,
1551 **opts)
1552 **opts)
1552 # ret can be 0 (no changes to record) or the value returned by
1553 # ret can be 0 (no changes to record) or the value returned by
1553 # commit(), 1 if nothing changed or None on success.
1554 # commit(), 1 if nothing changed or None on success.
1554 return 1 if ret == 0 else ret
1555 return 1 if ret == 0 else ret
1555
1556
1556 opts = pycompat.byteskwargs(opts)
1557 opts = pycompat.byteskwargs(opts)
1557 if opts.get('subrepos'):
1558 if opts.get('subrepos'):
1558 if opts.get('amend'):
1559 if opts.get('amend'):
1559 raise error.Abort(_('cannot amend with --subrepos'))
1560 raise error.Abort(_('cannot amend with --subrepos'))
1560 # Let --subrepos on the command line override config setting.
1561 # Let --subrepos on the command line override config setting.
1561 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1562 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1562
1563
1563 cmdutil.checkunfinished(repo, commit=True)
1564 cmdutil.checkunfinished(repo, commit=True)
1564
1565
1565 branch = repo[None].branch()
1566 branch = repo[None].branch()
1566 bheads = repo.branchheads(branch)
1567 bheads = repo.branchheads(branch)
1567
1568
1568 extra = {}
1569 extra = {}
1569 if opts.get('close_branch'):
1570 if opts.get('close_branch'):
1570 extra['close'] = '1'
1571 extra['close'] = '1'
1571
1572
1572 if not bheads:
1573 if not bheads:
1573 raise error.Abort(_('can only close branch heads'))
1574 raise error.Abort(_('can only close branch heads'))
1574 elif opts.get('amend'):
1575 elif opts.get('amend'):
1575 if repo[None].parents()[0].p1().branch() != branch and \
1576 if repo[None].parents()[0].p1().branch() != branch and \
1576 repo[None].parents()[0].p2().branch() != branch:
1577 repo[None].parents()[0].p2().branch() != branch:
1577 raise error.Abort(_('can only close branch heads'))
1578 raise error.Abort(_('can only close branch heads'))
1578
1579
1579 if opts.get('amend'):
1580 if opts.get('amend'):
1580 if ui.configbool('ui', 'commitsubrepos'):
1581 if ui.configbool('ui', 'commitsubrepos'):
1581 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1582 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1582
1583
1583 old = repo['.']
1584 old = repo['.']
1584 rewriteutil.precheck(repo, [old.rev()], 'amend')
1585 rewriteutil.precheck(repo, [old.rev()], 'amend')
1585
1586
1586 # Currently histedit gets confused if an amend happens while histedit
1587 # Currently histedit gets confused if an amend happens while histedit
1587 # is in progress. Since we have a checkunfinished command, we are
1588 # is in progress. Since we have a checkunfinished command, we are
1588 # temporarily honoring it.
1589 # temporarily honoring it.
1589 #
1590 #
1590 # Note: eventually this guard will be removed. Please do not expect
1591 # Note: eventually this guard will be removed. Please do not expect
1591 # this behavior to remain.
1592 # this behavior to remain.
1592 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1593 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1593 cmdutil.checkunfinished(repo)
1594 cmdutil.checkunfinished(repo)
1594
1595
1595 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
1596 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
1596 if node == old.node():
1597 if node == old.node():
1597 ui.status(_("nothing changed\n"))
1598 ui.status(_("nothing changed\n"))
1598 return 1
1599 return 1
1599 else:
1600 else:
1600 def commitfunc(ui, repo, message, match, opts):
1601 def commitfunc(ui, repo, message, match, opts):
1601 overrides = {}
1602 overrides = {}
1602 if opts.get('secret'):
1603 if opts.get('secret'):
1603 overrides[('phases', 'new-commit')] = 'secret'
1604 overrides[('phases', 'new-commit')] = 'secret'
1604
1605
1605 baseui = repo.baseui
1606 baseui = repo.baseui
1606 with baseui.configoverride(overrides, 'commit'):
1607 with baseui.configoverride(overrides, 'commit'):
1607 with ui.configoverride(overrides, 'commit'):
1608 with ui.configoverride(overrides, 'commit'):
1608 editform = cmdutil.mergeeditform(repo[None],
1609 editform = cmdutil.mergeeditform(repo[None],
1609 'commit.normal')
1610 'commit.normal')
1610 editor = cmdutil.getcommiteditor(
1611 editor = cmdutil.getcommiteditor(
1611 editform=editform, **pycompat.strkwargs(opts))
1612 editform=editform, **pycompat.strkwargs(opts))
1612 return repo.commit(message,
1613 return repo.commit(message,
1613 opts.get('user'),
1614 opts.get('user'),
1614 opts.get('date'),
1615 opts.get('date'),
1615 match,
1616 match,
1616 editor=editor,
1617 editor=editor,
1617 extra=extra)
1618 extra=extra)
1618
1619
1619 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1620 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1620
1621
1621 if not node:
1622 if not node:
1622 stat = cmdutil.postcommitstatus(repo, pats, opts)
1623 stat = cmdutil.postcommitstatus(repo, pats, opts)
1623 if stat[3]:
1624 if stat[3]:
1624 ui.status(_("nothing changed (%d missing files, see "
1625 ui.status(_("nothing changed (%d missing files, see "
1625 "'hg status')\n") % len(stat[3]))
1626 "'hg status')\n") % len(stat[3]))
1626 else:
1627 else:
1627 ui.status(_("nothing changed\n"))
1628 ui.status(_("nothing changed\n"))
1628 return 1
1629 return 1
1629
1630
1630 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1631 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1631
1632
1632 @command('config|showconfig|debugconfig',
1633 @command('config|showconfig|debugconfig',
1633 [('u', 'untrusted', None, _('show untrusted configuration options')),
1634 [('u', 'untrusted', None, _('show untrusted configuration options')),
1634 ('e', 'edit', None, _('edit user config')),
1635 ('e', 'edit', None, _('edit user config')),
1635 ('l', 'local', None, _('edit repository config')),
1636 ('l', 'local', None, _('edit repository config')),
1636 ('g', 'global', None, _('edit global config'))] + formatteropts,
1637 ('g', 'global', None, _('edit global config'))] + formatteropts,
1637 _('[-u] [NAME]...'),
1638 _('[-u] [NAME]...'),
1638 optionalrepo=True, cmdtype=readonly)
1639 optionalrepo=True, cmdtype=readonly)
1639 def config(ui, repo, *values, **opts):
1640 def config(ui, repo, *values, **opts):
1640 """show combined config settings from all hgrc files
1641 """show combined config settings from all hgrc files
1641
1642
1642 With no arguments, print names and values of all config items.
1643 With no arguments, print names and values of all config items.
1643
1644
1644 With one argument of the form section.name, print just the value
1645 With one argument of the form section.name, print just the value
1645 of that config item.
1646 of that config item.
1646
1647
1647 With multiple arguments, print names and values of all config
1648 With multiple arguments, print names and values of all config
1648 items with matching section names or section.names.
1649 items with matching section names or section.names.
1649
1650
1650 With --edit, start an editor on the user-level config file. With
1651 With --edit, start an editor on the user-level config file. With
1651 --global, edit the system-wide config file. With --local, edit the
1652 --global, edit the system-wide config file. With --local, edit the
1652 repository-level config file.
1653 repository-level config file.
1653
1654
1654 With --debug, the source (filename and line number) is printed
1655 With --debug, the source (filename and line number) is printed
1655 for each config item.
1656 for each config item.
1656
1657
1657 See :hg:`help config` for more information about config files.
1658 See :hg:`help config` for more information about config files.
1658
1659
1659 Returns 0 on success, 1 if NAME does not exist.
1660 Returns 0 on success, 1 if NAME does not exist.
1660
1661
1661 """
1662 """
1662
1663
1663 opts = pycompat.byteskwargs(opts)
1664 opts = pycompat.byteskwargs(opts)
1664 if opts.get('edit') or opts.get('local') or opts.get('global'):
1665 if opts.get('edit') or opts.get('local') or opts.get('global'):
1665 if opts.get('local') and opts.get('global'):
1666 if opts.get('local') and opts.get('global'):
1666 raise error.Abort(_("can't use --local and --global together"))
1667 raise error.Abort(_("can't use --local and --global together"))
1667
1668
1668 if opts.get('local'):
1669 if opts.get('local'):
1669 if not repo:
1670 if not repo:
1670 raise error.Abort(_("can't use --local outside a repository"))
1671 raise error.Abort(_("can't use --local outside a repository"))
1671 paths = [repo.vfs.join('hgrc')]
1672 paths = [repo.vfs.join('hgrc')]
1672 elif opts.get('global'):
1673 elif opts.get('global'):
1673 paths = rcutil.systemrcpath()
1674 paths = rcutil.systemrcpath()
1674 else:
1675 else:
1675 paths = rcutil.userrcpath()
1676 paths = rcutil.userrcpath()
1676
1677
1677 for f in paths:
1678 for f in paths:
1678 if os.path.exists(f):
1679 if os.path.exists(f):
1679 break
1680 break
1680 else:
1681 else:
1681 if opts.get('global'):
1682 if opts.get('global'):
1682 samplehgrc = uimod.samplehgrcs['global']
1683 samplehgrc = uimod.samplehgrcs['global']
1683 elif opts.get('local'):
1684 elif opts.get('local'):
1684 samplehgrc = uimod.samplehgrcs['local']
1685 samplehgrc = uimod.samplehgrcs['local']
1685 else:
1686 else:
1686 samplehgrc = uimod.samplehgrcs['user']
1687 samplehgrc = uimod.samplehgrcs['user']
1687
1688
1688 f = paths[0]
1689 f = paths[0]
1689 fp = open(f, "wb")
1690 fp = open(f, "wb")
1690 fp.write(util.tonativeeol(samplehgrc))
1691 fp.write(util.tonativeeol(samplehgrc))
1691 fp.close()
1692 fp.close()
1692
1693
1693 editor = ui.geteditor()
1694 editor = ui.geteditor()
1694 ui.system("%s \"%s\"" % (editor, f),
1695 ui.system("%s \"%s\"" % (editor, f),
1695 onerr=error.Abort, errprefix=_("edit failed"),
1696 onerr=error.Abort, errprefix=_("edit failed"),
1696 blockedtag='config_edit')
1697 blockedtag='config_edit')
1697 return
1698 return
1698 ui.pager('config')
1699 ui.pager('config')
1699 fm = ui.formatter('config', opts)
1700 fm = ui.formatter('config', opts)
1700 for t, f in rcutil.rccomponents():
1701 for t, f in rcutil.rccomponents():
1701 if t == 'path':
1702 if t == 'path':
1702 ui.debug('read config from: %s\n' % f)
1703 ui.debug('read config from: %s\n' % f)
1703 elif t == 'items':
1704 elif t == 'items':
1704 for section, name, value, source in f:
1705 for section, name, value, source in f:
1705 ui.debug('set config by: %s\n' % source)
1706 ui.debug('set config by: %s\n' % source)
1706 else:
1707 else:
1707 raise error.ProgrammingError('unknown rctype: %s' % t)
1708 raise error.ProgrammingError('unknown rctype: %s' % t)
1708 untrusted = bool(opts.get('untrusted'))
1709 untrusted = bool(opts.get('untrusted'))
1709
1710
1710 selsections = selentries = []
1711 selsections = selentries = []
1711 if values:
1712 if values:
1712 selsections = [v for v in values if '.' not in v]
1713 selsections = [v for v in values if '.' not in v]
1713 selentries = [v for v in values if '.' in v]
1714 selentries = [v for v in values if '.' in v]
1714 uniquesel = (len(selentries) == 1 and not selsections)
1715 uniquesel = (len(selentries) == 1 and not selsections)
1715 selsections = set(selsections)
1716 selsections = set(selsections)
1716 selentries = set(selentries)
1717 selentries = set(selentries)
1717
1718
1718 matched = False
1719 matched = False
1719 for section, name, value in ui.walkconfig(untrusted=untrusted):
1720 for section, name, value in ui.walkconfig(untrusted=untrusted):
1720 source = ui.configsource(section, name, untrusted)
1721 source = ui.configsource(section, name, untrusted)
1721 value = pycompat.bytestr(value)
1722 value = pycompat.bytestr(value)
1722 if fm.isplain():
1723 if fm.isplain():
1723 source = source or 'none'
1724 source = source or 'none'
1724 value = value.replace('\n', '\\n')
1725 value = value.replace('\n', '\\n')
1725 entryname = section + '.' + name
1726 entryname = section + '.' + name
1726 if values and not (section in selsections or entryname in selentries):
1727 if values and not (section in selsections or entryname in selentries):
1727 continue
1728 continue
1728 fm.startitem()
1729 fm.startitem()
1729 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1730 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1730 if uniquesel:
1731 if uniquesel:
1731 fm.data(name=entryname)
1732 fm.data(name=entryname)
1732 fm.write('value', '%s\n', value)
1733 fm.write('value', '%s\n', value)
1733 else:
1734 else:
1734 fm.write('name value', '%s=%s\n', entryname, value)
1735 fm.write('name value', '%s=%s\n', entryname, value)
1735 matched = True
1736 matched = True
1736 fm.end()
1737 fm.end()
1737 if matched:
1738 if matched:
1738 return 0
1739 return 0
1739 return 1
1740 return 1
1740
1741
1741 @command('copy|cp',
1742 @command('copy|cp',
1742 [('A', 'after', None, _('record a copy that has already occurred')),
1743 [('A', 'after', None, _('record a copy that has already occurred')),
1743 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1744 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1744 ] + walkopts + dryrunopts,
1745 ] + walkopts + dryrunopts,
1745 _('[OPTION]... [SOURCE]... DEST'))
1746 _('[OPTION]... [SOURCE]... DEST'))
1746 def copy(ui, repo, *pats, **opts):
1747 def copy(ui, repo, *pats, **opts):
1747 """mark files as copied for the next commit
1748 """mark files as copied for the next commit
1748
1749
1749 Mark dest as having copies of source files. If dest is a
1750 Mark dest as having copies of source files. If dest is a
1750 directory, copies are put in that directory. If dest is a file,
1751 directory, copies are put in that directory. If dest is a file,
1751 the source must be a single file.
1752 the source must be a single file.
1752
1753
1753 By default, this command copies the contents of files as they
1754 By default, this command copies the contents of files as they
1754 exist in the working directory. If invoked with -A/--after, the
1755 exist in the working directory. If invoked with -A/--after, the
1755 operation is recorded, but no copying is performed.
1756 operation is recorded, but no copying is performed.
1756
1757
1757 This command takes effect with the next commit. To undo a copy
1758 This command takes effect with the next commit. To undo a copy
1758 before that, see :hg:`revert`.
1759 before that, see :hg:`revert`.
1759
1760
1760 Returns 0 on success, 1 if errors are encountered.
1761 Returns 0 on success, 1 if errors are encountered.
1761 """
1762 """
1762 opts = pycompat.byteskwargs(opts)
1763 opts = pycompat.byteskwargs(opts)
1763 with repo.wlock(False):
1764 with repo.wlock(False):
1764 return cmdutil.copy(ui, repo, pats, opts)
1765 return cmdutil.copy(ui, repo, pats, opts)
1765
1766
1766 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1767 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1767 def debugcommands(ui, cmd='', *args):
1768 def debugcommands(ui, cmd='', *args):
1768 """list all available commands and options"""
1769 """list all available commands and options"""
1769 for cmd, vals in sorted(table.iteritems()):
1770 for cmd, vals in sorted(table.iteritems()):
1770 cmd = cmd.split('|')[0].strip('^')
1771 cmd = cmd.split('|')[0].strip('^')
1771 opts = ', '.join([i[1] for i in vals[1]])
1772 opts = ', '.join([i[1] for i in vals[1]])
1772 ui.write('%s: %s\n' % (cmd, opts))
1773 ui.write('%s: %s\n' % (cmd, opts))
1773
1774
1774 @command('debugcomplete',
1775 @command('debugcomplete',
1775 [('o', 'options', None, _('show the command options'))],
1776 [('o', 'options', None, _('show the command options'))],
1776 _('[-o] CMD'),
1777 _('[-o] CMD'),
1777 norepo=True)
1778 norepo=True)
1778 def debugcomplete(ui, cmd='', **opts):
1779 def debugcomplete(ui, cmd='', **opts):
1779 """returns the completion list associated with the given command"""
1780 """returns the completion list associated with the given command"""
1780
1781
1781 if opts.get(r'options'):
1782 if opts.get(r'options'):
1782 options = []
1783 options = []
1783 otables = [globalopts]
1784 otables = [globalopts]
1784 if cmd:
1785 if cmd:
1785 aliases, entry = cmdutil.findcmd(cmd, table, False)
1786 aliases, entry = cmdutil.findcmd(cmd, table, False)
1786 otables.append(entry[1])
1787 otables.append(entry[1])
1787 for t in otables:
1788 for t in otables:
1788 for o in t:
1789 for o in t:
1789 if "(DEPRECATED)" in o[3]:
1790 if "(DEPRECATED)" in o[3]:
1790 continue
1791 continue
1791 if o[0]:
1792 if o[0]:
1792 options.append('-%s' % o[0])
1793 options.append('-%s' % o[0])
1793 options.append('--%s' % o[1])
1794 options.append('--%s' % o[1])
1794 ui.write("%s\n" % "\n".join(options))
1795 ui.write("%s\n" % "\n".join(options))
1795 return
1796 return
1796
1797
1797 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1798 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1798 if ui.verbose:
1799 if ui.verbose:
1799 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1800 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1800 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1801 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1801
1802
1802 @command('^diff',
1803 @command('^diff',
1803 [('r', 'rev', [], _('revision'), _('REV')),
1804 [('r', 'rev', [], _('revision'), _('REV')),
1804 ('c', 'change', '', _('change made by revision'), _('REV'))
1805 ('c', 'change', '', _('change made by revision'), _('REV'))
1805 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1806 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1806 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1807 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1807 inferrepo=True, cmdtype=readonly)
1808 inferrepo=True, cmdtype=readonly)
1808 def diff(ui, repo, *pats, **opts):
1809 def diff(ui, repo, *pats, **opts):
1809 """diff repository (or selected files)
1810 """diff repository (or selected files)
1810
1811
1811 Show differences between revisions for the specified files.
1812 Show differences between revisions for the specified files.
1812
1813
1813 Differences between files are shown using the unified diff format.
1814 Differences between files are shown using the unified diff format.
1814
1815
1815 .. note::
1816 .. note::
1816
1817
1817 :hg:`diff` may generate unexpected results for merges, as it will
1818 :hg:`diff` may generate unexpected results for merges, as it will
1818 default to comparing against the working directory's first
1819 default to comparing against the working directory's first
1819 parent changeset if no revisions are specified.
1820 parent changeset if no revisions are specified.
1820
1821
1821 When two revision arguments are given, then changes are shown
1822 When two revision arguments are given, then changes are shown
1822 between those revisions. If only one revision is specified then
1823 between those revisions. If only one revision is specified then
1823 that revision is compared to the working directory, and, when no
1824 that revision is compared to the working directory, and, when no
1824 revisions are specified, the working directory files are compared
1825 revisions are specified, the working directory files are compared
1825 to its first parent.
1826 to its first parent.
1826
1827
1827 Alternatively you can specify -c/--change with a revision to see
1828 Alternatively you can specify -c/--change with a revision to see
1828 the changes in that changeset relative to its first parent.
1829 the changes in that changeset relative to its first parent.
1829
1830
1830 Without the -a/--text option, diff will avoid generating diffs of
1831 Without the -a/--text option, diff will avoid generating diffs of
1831 files it detects as binary. With -a, diff will generate a diff
1832 files it detects as binary. With -a, diff will generate a diff
1832 anyway, probably with undesirable results.
1833 anyway, probably with undesirable results.
1833
1834
1834 Use the -g/--git option to generate diffs in the git extended diff
1835 Use the -g/--git option to generate diffs in the git extended diff
1835 format. For more information, read :hg:`help diffs`.
1836 format. For more information, read :hg:`help diffs`.
1836
1837
1837 .. container:: verbose
1838 .. container:: verbose
1838
1839
1839 Examples:
1840 Examples:
1840
1841
1841 - compare a file in the current working directory to its parent::
1842 - compare a file in the current working directory to its parent::
1842
1843
1843 hg diff foo.c
1844 hg diff foo.c
1844
1845
1845 - compare two historical versions of a directory, with rename info::
1846 - compare two historical versions of a directory, with rename info::
1846
1847
1847 hg diff --git -r 1.0:1.2 lib/
1848 hg diff --git -r 1.0:1.2 lib/
1848
1849
1849 - get change stats relative to the last change on some date::
1850 - get change stats relative to the last change on some date::
1850
1851
1851 hg diff --stat -r "date('may 2')"
1852 hg diff --stat -r "date('may 2')"
1852
1853
1853 - diff all newly-added files that contain a keyword::
1854 - diff all newly-added files that contain a keyword::
1854
1855
1855 hg diff "set:added() and grep(GNU)"
1856 hg diff "set:added() and grep(GNU)"
1856
1857
1857 - compare a revision and its parents::
1858 - compare a revision and its parents::
1858
1859
1859 hg diff -c 9353 # compare against first parent
1860 hg diff -c 9353 # compare against first parent
1860 hg diff -r 9353^:9353 # same using revset syntax
1861 hg diff -r 9353^:9353 # same using revset syntax
1861 hg diff -r 9353^2:9353 # compare against the second parent
1862 hg diff -r 9353^2:9353 # compare against the second parent
1862
1863
1863 Returns 0 on success.
1864 Returns 0 on success.
1864 """
1865 """
1865
1866
1866 opts = pycompat.byteskwargs(opts)
1867 opts = pycompat.byteskwargs(opts)
1867 revs = opts.get('rev')
1868 revs = opts.get('rev')
1868 change = opts.get('change')
1869 change = opts.get('change')
1869 stat = opts.get('stat')
1870 stat = opts.get('stat')
1870 reverse = opts.get('reverse')
1871 reverse = opts.get('reverse')
1871
1872
1872 if revs and change:
1873 if revs and change:
1873 msg = _('cannot specify --rev and --change at the same time')
1874 msg = _('cannot specify --rev and --change at the same time')
1874 raise error.Abort(msg)
1875 raise error.Abort(msg)
1875 elif change:
1876 elif change:
1876 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
1877 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
1877 ctx2 = scmutil.revsingle(repo, change, None)
1878 ctx2 = scmutil.revsingle(repo, change, None)
1878 ctx1 = ctx2.p1()
1879 ctx1 = ctx2.p1()
1879 else:
1880 else:
1880 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
1881 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
1881 ctx1, ctx2 = scmutil.revpair(repo, revs)
1882 ctx1, ctx2 = scmutil.revpair(repo, revs)
1882 node1, node2 = ctx1.node(), ctx2.node()
1883 node1, node2 = ctx1.node(), ctx2.node()
1883
1884
1884 if reverse:
1885 if reverse:
1885 node1, node2 = node2, node1
1886 node1, node2 = node2, node1
1886
1887
1887 diffopts = patch.diffallopts(ui, opts)
1888 diffopts = patch.diffallopts(ui, opts)
1888 m = scmutil.match(ctx2, pats, opts)
1889 m = scmutil.match(ctx2, pats, opts)
1889 ui.pager('diff')
1890 ui.pager('diff')
1890 logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1891 logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1891 listsubrepos=opts.get('subrepos'),
1892 listsubrepos=opts.get('subrepos'),
1892 root=opts.get('root'))
1893 root=opts.get('root'))
1893
1894
1894 @command('^export',
1895 @command('^export',
1895 [('o', 'output', '',
1896 [('o', 'output', '',
1896 _('print output to file with formatted name'), _('FORMAT')),
1897 _('print output to file with formatted name'), _('FORMAT')),
1897 ('', 'switch-parent', None, _('diff against the second parent')),
1898 ('', 'switch-parent', None, _('diff against the second parent')),
1898 ('r', 'rev', [], _('revisions to export'), _('REV')),
1899 ('r', 'rev', [], _('revisions to export'), _('REV')),
1899 ] + diffopts,
1900 ] + diffopts,
1900 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'), cmdtype=readonly)
1901 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'), cmdtype=readonly)
1901 def export(ui, repo, *changesets, **opts):
1902 def export(ui, repo, *changesets, **opts):
1902 """dump the header and diffs for one or more changesets
1903 """dump the header and diffs for one or more changesets
1903
1904
1904 Print the changeset header and diffs for one or more revisions.
1905 Print the changeset header and diffs for one or more revisions.
1905 If no revision is given, the parent of the working directory is used.
1906 If no revision is given, the parent of the working directory is used.
1906
1907
1907 The information shown in the changeset header is: author, date,
1908 The information shown in the changeset header is: author, date,
1908 branch name (if non-default), changeset hash, parent(s) and commit
1909 branch name (if non-default), changeset hash, parent(s) and commit
1909 comment.
1910 comment.
1910
1911
1911 .. note::
1912 .. note::
1912
1913
1913 :hg:`export` may generate unexpected diff output for merge
1914 :hg:`export` may generate unexpected diff output for merge
1914 changesets, as it will compare the merge changeset against its
1915 changesets, as it will compare the merge changeset against its
1915 first parent only.
1916 first parent only.
1916
1917
1917 Output may be to a file, in which case the name of the file is
1918 Output may be to a file, in which case the name of the file is
1918 given using a template string. See :hg:`help templates`. In addition
1919 given using a template string. See :hg:`help templates`. In addition
1919 to the common template keywords, the following formatting rules are
1920 to the common template keywords, the following formatting rules are
1920 supported:
1921 supported:
1921
1922
1922 :``%%``: literal "%" character
1923 :``%%``: literal "%" character
1923 :``%H``: changeset hash (40 hexadecimal digits)
1924 :``%H``: changeset hash (40 hexadecimal digits)
1924 :``%N``: number of patches being generated
1925 :``%N``: number of patches being generated
1925 :``%R``: changeset revision number
1926 :``%R``: changeset revision number
1926 :``%b``: basename of the exporting repository
1927 :``%b``: basename of the exporting repository
1927 :``%h``: short-form changeset hash (12 hexadecimal digits)
1928 :``%h``: short-form changeset hash (12 hexadecimal digits)
1928 :``%m``: first line of the commit message (only alphanumeric characters)
1929 :``%m``: first line of the commit message (only alphanumeric characters)
1929 :``%n``: zero-padded sequence number, starting at 1
1930 :``%n``: zero-padded sequence number, starting at 1
1930 :``%r``: zero-padded changeset revision number
1931 :``%r``: zero-padded changeset revision number
1931 :``\\``: literal "\\" character
1932 :``\\``: literal "\\" character
1932
1933
1933 Without the -a/--text option, export will avoid generating diffs
1934 Without the -a/--text option, export will avoid generating diffs
1934 of files it detects as binary. With -a, export will generate a
1935 of files it detects as binary. With -a, export will generate a
1935 diff anyway, probably with undesirable results.
1936 diff anyway, probably with undesirable results.
1936
1937
1937 Use the -g/--git option to generate diffs in the git extended diff
1938 Use the -g/--git option to generate diffs in the git extended diff
1938 format. See :hg:`help diffs` for more information.
1939 format. See :hg:`help diffs` for more information.
1939
1940
1940 With the --switch-parent option, the diff will be against the
1941 With the --switch-parent option, the diff will be against the
1941 second parent. It can be useful to review a merge.
1942 second parent. It can be useful to review a merge.
1942
1943
1943 .. container:: verbose
1944 .. container:: verbose
1944
1945
1945 Examples:
1946 Examples:
1946
1947
1947 - use export and import to transplant a bugfix to the current
1948 - use export and import to transplant a bugfix to the current
1948 branch::
1949 branch::
1949
1950
1950 hg export -r 9353 | hg import -
1951 hg export -r 9353 | hg import -
1951
1952
1952 - export all the changesets between two revisions to a file with
1953 - export all the changesets between two revisions to a file with
1953 rename information::
1954 rename information::
1954
1955
1955 hg export --git -r 123:150 > changes.txt
1956 hg export --git -r 123:150 > changes.txt
1956
1957
1957 - split outgoing changes into a series of patches with
1958 - split outgoing changes into a series of patches with
1958 descriptive names::
1959 descriptive names::
1959
1960
1960 hg export -r "outgoing()" -o "%n-%m.patch"
1961 hg export -r "outgoing()" -o "%n-%m.patch"
1961
1962
1962 Returns 0 on success.
1963 Returns 0 on success.
1963 """
1964 """
1964 opts = pycompat.byteskwargs(opts)
1965 opts = pycompat.byteskwargs(opts)
1965 changesets += tuple(opts.get('rev', []))
1966 changesets += tuple(opts.get('rev', []))
1966 if not changesets:
1967 if not changesets:
1967 changesets = ['.']
1968 changesets = ['.']
1968 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
1969 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
1969 revs = scmutil.revrange(repo, changesets)
1970 revs = scmutil.revrange(repo, changesets)
1970 if not revs:
1971 if not revs:
1971 raise error.Abort(_("export requires at least one changeset"))
1972 raise error.Abort(_("export requires at least one changeset"))
1972 if len(revs) > 1:
1973 if len(revs) > 1:
1973 ui.note(_('exporting patches:\n'))
1974 ui.note(_('exporting patches:\n'))
1974 else:
1975 else:
1975 ui.note(_('exporting patch:\n'))
1976 ui.note(_('exporting patch:\n'))
1976 ui.pager('export')
1977 ui.pager('export')
1977 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1978 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1978 switch_parent=opts.get('switch_parent'),
1979 switch_parent=opts.get('switch_parent'),
1979 opts=patch.diffallopts(ui, opts))
1980 opts=patch.diffallopts(ui, opts))
1980
1981
1981 @command('files',
1982 @command('files',
1982 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1983 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1983 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1984 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1984 ] + walkopts + formatteropts + subrepoopts,
1985 ] + walkopts + formatteropts + subrepoopts,
1985 _('[OPTION]... [FILE]...'), cmdtype=readonly)
1986 _('[OPTION]... [FILE]...'), cmdtype=readonly)
1986 def files(ui, repo, *pats, **opts):
1987 def files(ui, repo, *pats, **opts):
1987 """list tracked files
1988 """list tracked files
1988
1989
1989 Print files under Mercurial control in the working directory or
1990 Print files under Mercurial control in the working directory or
1990 specified revision for given files (excluding removed files).
1991 specified revision for given files (excluding removed files).
1991 Files can be specified as filenames or filesets.
1992 Files can be specified as filenames or filesets.
1992
1993
1993 If no files are given to match, this command prints the names
1994 If no files are given to match, this command prints the names
1994 of all files under Mercurial control.
1995 of all files under Mercurial control.
1995
1996
1996 .. container:: verbose
1997 .. container:: verbose
1997
1998
1998 Examples:
1999 Examples:
1999
2000
2000 - list all files under the current directory::
2001 - list all files under the current directory::
2001
2002
2002 hg files .
2003 hg files .
2003
2004
2004 - shows sizes and flags for current revision::
2005 - shows sizes and flags for current revision::
2005
2006
2006 hg files -vr .
2007 hg files -vr .
2007
2008
2008 - list all files named README::
2009 - list all files named README::
2009
2010
2010 hg files -I "**/README"
2011 hg files -I "**/README"
2011
2012
2012 - list all binary files::
2013 - list all binary files::
2013
2014
2014 hg files "set:binary()"
2015 hg files "set:binary()"
2015
2016
2016 - find files containing a regular expression::
2017 - find files containing a regular expression::
2017
2018
2018 hg files "set:grep('bob')"
2019 hg files "set:grep('bob')"
2019
2020
2020 - search tracked file contents with xargs and grep::
2021 - search tracked file contents with xargs and grep::
2021
2022
2022 hg files -0 | xargs -0 grep foo
2023 hg files -0 | xargs -0 grep foo
2023
2024
2024 See :hg:`help patterns` and :hg:`help filesets` for more information
2025 See :hg:`help patterns` and :hg:`help filesets` for more information
2025 on specifying file patterns.
2026 on specifying file patterns.
2026
2027
2027 Returns 0 if a match is found, 1 otherwise.
2028 Returns 0 if a match is found, 1 otherwise.
2028
2029
2029 """
2030 """
2030
2031
2031 opts = pycompat.byteskwargs(opts)
2032 opts = pycompat.byteskwargs(opts)
2032 rev = opts.get('rev')
2033 rev = opts.get('rev')
2033 if rev:
2034 if rev:
2034 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2035 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2035 ctx = scmutil.revsingle(repo, rev, None)
2036 ctx = scmutil.revsingle(repo, rev, None)
2036
2037
2037 end = '\n'
2038 end = '\n'
2038 if opts.get('print0'):
2039 if opts.get('print0'):
2039 end = '\0'
2040 end = '\0'
2040 fmt = '%s' + end
2041 fmt = '%s' + end
2041
2042
2042 m = scmutil.match(ctx, pats, opts)
2043 m = scmutil.match(ctx, pats, opts)
2043 ui.pager('files')
2044 ui.pager('files')
2044 with ui.formatter('files', opts) as fm:
2045 with ui.formatter('files', opts) as fm:
2045 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2046 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2046
2047
2047 @command(
2048 @command(
2048 '^forget',
2049 '^forget',
2049 walkopts + dryrunopts,
2050 walkopts + dryrunopts,
2050 _('[OPTION]... FILE...'), inferrepo=True)
2051 _('[OPTION]... FILE...'), inferrepo=True)
2051 def forget(ui, repo, *pats, **opts):
2052 def forget(ui, repo, *pats, **opts):
2052 """forget the specified files on the next commit
2053 """forget the specified files on the next commit
2053
2054
2054 Mark the specified files so they will no longer be tracked
2055 Mark the specified files so they will no longer be tracked
2055 after the next commit.
2056 after the next commit.
2056
2057
2057 This only removes files from the current branch, not from the
2058 This only removes files from the current branch, not from the
2058 entire project history, and it does not delete them from the
2059 entire project history, and it does not delete them from the
2059 working directory.
2060 working directory.
2060
2061
2061 To delete the file from the working directory, see :hg:`remove`.
2062 To delete the file from the working directory, see :hg:`remove`.
2062
2063
2063 To undo a forget before the next commit, see :hg:`add`.
2064 To undo a forget before the next commit, see :hg:`add`.
2064
2065
2065 .. container:: verbose
2066 .. container:: verbose
2066
2067
2067 Examples:
2068 Examples:
2068
2069
2069 - forget newly-added binary files::
2070 - forget newly-added binary files::
2070
2071
2071 hg forget "set:added() and binary()"
2072 hg forget "set:added() and binary()"
2072
2073
2073 - forget files that would be excluded by .hgignore::
2074 - forget files that would be excluded by .hgignore::
2074
2075
2075 hg forget "set:hgignore()"
2076 hg forget "set:hgignore()"
2076
2077
2077 Returns 0 on success.
2078 Returns 0 on success.
2078 """
2079 """
2079
2080
2080 opts = pycompat.byteskwargs(opts)
2081 opts = pycompat.byteskwargs(opts)
2081 if not pats:
2082 if not pats:
2082 raise error.Abort(_('no files specified'))
2083 raise error.Abort(_('no files specified'))
2083
2084
2084 m = scmutil.match(repo[None], pats, opts)
2085 m = scmutil.match(repo[None], pats, opts)
2085 dryrun = opts.get(r'dry_run')
2086 dryrun = opts.get(r'dry_run')
2086 rejected = cmdutil.forget(ui, repo, m, prefix="",
2087 rejected = cmdutil.forget(ui, repo, m, prefix="",
2087 explicitonly=False, dryrun=dryrun)[0]
2088 explicitonly=False, dryrun=dryrun)[0]
2088 return rejected and 1 or 0
2089 return rejected and 1 or 0
2089
2090
2090 @command(
2091 @command(
2091 'graft',
2092 'graft',
2092 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2093 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2093 ('c', 'continue', False, _('resume interrupted graft')),
2094 ('c', 'continue', False, _('resume interrupted graft')),
2094 ('e', 'edit', False, _('invoke editor on commit messages')),
2095 ('e', 'edit', False, _('invoke editor on commit messages')),
2095 ('', 'log', None, _('append graft info to log message')),
2096 ('', 'log', None, _('append graft info to log message')),
2096 ('f', 'force', False, _('force graft')),
2097 ('f', 'force', False, _('force graft')),
2097 ('D', 'currentdate', False,
2098 ('D', 'currentdate', False,
2098 _('record the current date as commit date')),
2099 _('record the current date as commit date')),
2099 ('U', 'currentuser', False,
2100 ('U', 'currentuser', False,
2100 _('record the current user as committer'), _('DATE'))]
2101 _('record the current user as committer'), _('DATE'))]
2101 + commitopts2 + mergetoolopts + dryrunopts,
2102 + commitopts2 + mergetoolopts + dryrunopts,
2102 _('[OPTION]... [-r REV]... REV...'))
2103 _('[OPTION]... [-r REV]... REV...'))
2103 def graft(ui, repo, *revs, **opts):
2104 def graft(ui, repo, *revs, **opts):
2104 '''copy changes from other branches onto the current branch
2105 '''copy changes from other branches onto the current branch
2105
2106
2106 This command uses Mercurial's merge logic to copy individual
2107 This command uses Mercurial's merge logic to copy individual
2107 changes from other branches without merging branches in the
2108 changes from other branches without merging branches in the
2108 history graph. This is sometimes known as 'backporting' or
2109 history graph. This is sometimes known as 'backporting' or
2109 'cherry-picking'. By default, graft will copy user, date, and
2110 'cherry-picking'. By default, graft will copy user, date, and
2110 description from the source changesets.
2111 description from the source changesets.
2111
2112
2112 Changesets that are ancestors of the current revision, that have
2113 Changesets that are ancestors of the current revision, that have
2113 already been grafted, or that are merges will be skipped.
2114 already been grafted, or that are merges will be skipped.
2114
2115
2115 If --log is specified, log messages will have a comment appended
2116 If --log is specified, log messages will have a comment appended
2116 of the form::
2117 of the form::
2117
2118
2118 (grafted from CHANGESETHASH)
2119 (grafted from CHANGESETHASH)
2119
2120
2120 If --force is specified, revisions will be grafted even if they
2121 If --force is specified, revisions will be grafted even if they
2121 are already ancestors of, or have been grafted to, the destination.
2122 are already ancestors of, or have been grafted to, the destination.
2122 This is useful when the revisions have since been backed out.
2123 This is useful when the revisions have since been backed out.
2123
2124
2124 If a graft merge results in conflicts, the graft process is
2125 If a graft merge results in conflicts, the graft process is
2125 interrupted so that the current merge can be manually resolved.
2126 interrupted so that the current merge can be manually resolved.
2126 Once all conflicts are addressed, the graft process can be
2127 Once all conflicts are addressed, the graft process can be
2127 continued with the -c/--continue option.
2128 continued with the -c/--continue option.
2128
2129
2129 .. note::
2130 .. note::
2130
2131
2131 The -c/--continue option does not reapply earlier options, except
2132 The -c/--continue option does not reapply earlier options, except
2132 for --force.
2133 for --force.
2133
2134
2134 .. container:: verbose
2135 .. container:: verbose
2135
2136
2136 Examples:
2137 Examples:
2137
2138
2138 - copy a single change to the stable branch and edit its description::
2139 - copy a single change to the stable branch and edit its description::
2139
2140
2140 hg update stable
2141 hg update stable
2141 hg graft --edit 9393
2142 hg graft --edit 9393
2142
2143
2143 - graft a range of changesets with one exception, updating dates::
2144 - graft a range of changesets with one exception, updating dates::
2144
2145
2145 hg graft -D "2085::2093 and not 2091"
2146 hg graft -D "2085::2093 and not 2091"
2146
2147
2147 - continue a graft after resolving conflicts::
2148 - continue a graft after resolving conflicts::
2148
2149
2149 hg graft -c
2150 hg graft -c
2150
2151
2151 - show the source of a grafted changeset::
2152 - show the source of a grafted changeset::
2152
2153
2153 hg log --debug -r .
2154 hg log --debug -r .
2154
2155
2155 - show revisions sorted by date::
2156 - show revisions sorted by date::
2156
2157
2157 hg log -r "sort(all(), date)"
2158 hg log -r "sort(all(), date)"
2158
2159
2159 See :hg:`help revisions` for more about specifying revisions.
2160 See :hg:`help revisions` for more about specifying revisions.
2160
2161
2161 Returns 0 on successful completion.
2162 Returns 0 on successful completion.
2162 '''
2163 '''
2163 with repo.wlock():
2164 with repo.wlock():
2164 return _dograft(ui, repo, *revs, **opts)
2165 return _dograft(ui, repo, *revs, **opts)
2165
2166
2166 def _dograft(ui, repo, *revs, **opts):
2167 def _dograft(ui, repo, *revs, **opts):
2167 opts = pycompat.byteskwargs(opts)
2168 opts = pycompat.byteskwargs(opts)
2168 if revs and opts.get('rev'):
2169 if revs and opts.get('rev'):
2169 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2170 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2170 'revision ordering!\n'))
2171 'revision ordering!\n'))
2171
2172
2172 revs = list(revs)
2173 revs = list(revs)
2173 revs.extend(opts.get('rev'))
2174 revs.extend(opts.get('rev'))
2174
2175
2175 if not opts.get('user') and opts.get('currentuser'):
2176 if not opts.get('user') and opts.get('currentuser'):
2176 opts['user'] = ui.username()
2177 opts['user'] = ui.username()
2177 if not opts.get('date') and opts.get('currentdate'):
2178 if not opts.get('date') and opts.get('currentdate'):
2178 opts['date'] = "%d %d" % dateutil.makedate()
2179 opts['date'] = "%d %d" % dateutil.makedate()
2179
2180
2180 editor = cmdutil.getcommiteditor(editform='graft',
2181 editor = cmdutil.getcommiteditor(editform='graft',
2181 **pycompat.strkwargs(opts))
2182 **pycompat.strkwargs(opts))
2182
2183
2183 cont = False
2184 cont = False
2184 if opts.get('continue'):
2185 if opts.get('continue'):
2185 cont = True
2186 cont = True
2186 if revs:
2187 if revs:
2187 raise error.Abort(_("can't specify --continue and revisions"))
2188 raise error.Abort(_("can't specify --continue and revisions"))
2188 # read in unfinished revisions
2189 # read in unfinished revisions
2189 try:
2190 try:
2190 nodes = repo.vfs.read('graftstate').splitlines()
2191 nodes = repo.vfs.read('graftstate').splitlines()
2191 revs = [repo[node].rev() for node in nodes]
2192 revs = [repo[node].rev() for node in nodes]
2192 except IOError as inst:
2193 except IOError as inst:
2193 if inst.errno != errno.ENOENT:
2194 if inst.errno != errno.ENOENT:
2194 raise
2195 raise
2195 cmdutil.wrongtooltocontinue(repo, _('graft'))
2196 cmdutil.wrongtooltocontinue(repo, _('graft'))
2196 else:
2197 else:
2197 if not revs:
2198 if not revs:
2198 raise error.Abort(_('no revisions specified'))
2199 raise error.Abort(_('no revisions specified'))
2199 cmdutil.checkunfinished(repo)
2200 cmdutil.checkunfinished(repo)
2200 cmdutil.bailifchanged(repo)
2201 cmdutil.bailifchanged(repo)
2201 revs = scmutil.revrange(repo, revs)
2202 revs = scmutil.revrange(repo, revs)
2202
2203
2203 skipped = set()
2204 skipped = set()
2204 # check for merges
2205 # check for merges
2205 for rev in repo.revs('%ld and merge()', revs):
2206 for rev in repo.revs('%ld and merge()', revs):
2206 ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
2207 ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
2207 skipped.add(rev)
2208 skipped.add(rev)
2208 revs = [r for r in revs if r not in skipped]
2209 revs = [r for r in revs if r not in skipped]
2209 if not revs:
2210 if not revs:
2210 return -1
2211 return -1
2211
2212
2212 # Don't check in the --continue case, in effect retaining --force across
2213 # Don't check in the --continue case, in effect retaining --force across
2213 # --continues. That's because without --force, any revisions we decided to
2214 # --continues. That's because without --force, any revisions we decided to
2214 # skip would have been filtered out here, so they wouldn't have made their
2215 # skip would have been filtered out here, so they wouldn't have made their
2215 # way to the graftstate. With --force, any revisions we would have otherwise
2216 # way to the graftstate. With --force, any revisions we would have otherwise
2216 # skipped would not have been filtered out, and if they hadn't been applied
2217 # skipped would not have been filtered out, and if they hadn't been applied
2217 # already, they'd have been in the graftstate.
2218 # already, they'd have been in the graftstate.
2218 if not (cont or opts.get('force')):
2219 if not (cont or opts.get('force')):
2219 # check for ancestors of dest branch
2220 # check for ancestors of dest branch
2220 crev = repo['.'].rev()
2221 crev = repo['.'].rev()
2221 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2222 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2222 # XXX make this lazy in the future
2223 # XXX make this lazy in the future
2223 # don't mutate while iterating, create a copy
2224 # don't mutate while iterating, create a copy
2224 for rev in list(revs):
2225 for rev in list(revs):
2225 if rev in ancestors:
2226 if rev in ancestors:
2226 ui.warn(_('skipping ancestor revision %d:%s\n') %
2227 ui.warn(_('skipping ancestor revision %d:%s\n') %
2227 (rev, repo[rev]))
2228 (rev, repo[rev]))
2228 # XXX remove on list is slow
2229 # XXX remove on list is slow
2229 revs.remove(rev)
2230 revs.remove(rev)
2230 if not revs:
2231 if not revs:
2231 return -1
2232 return -1
2232
2233
2233 # analyze revs for earlier grafts
2234 # analyze revs for earlier grafts
2234 ids = {}
2235 ids = {}
2235 for ctx in repo.set("%ld", revs):
2236 for ctx in repo.set("%ld", revs):
2236 ids[ctx.hex()] = ctx.rev()
2237 ids[ctx.hex()] = ctx.rev()
2237 n = ctx.extra().get('source')
2238 n = ctx.extra().get('source')
2238 if n:
2239 if n:
2239 ids[n] = ctx.rev()
2240 ids[n] = ctx.rev()
2240
2241
2241 # check ancestors for earlier grafts
2242 # check ancestors for earlier grafts
2242 ui.debug('scanning for duplicate grafts\n')
2243 ui.debug('scanning for duplicate grafts\n')
2243
2244
2244 # The only changesets we can be sure doesn't contain grafts of any
2245 # The only changesets we can be sure doesn't contain grafts of any
2245 # revs, are the ones that are common ancestors of *all* revs:
2246 # revs, are the ones that are common ancestors of *all* revs:
2246 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2247 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2247 ctx = repo[rev]
2248 ctx = repo[rev]
2248 n = ctx.extra().get('source')
2249 n = ctx.extra().get('source')
2249 if n in ids:
2250 if n in ids:
2250 try:
2251 try:
2251 r = repo[n].rev()
2252 r = repo[n].rev()
2252 except error.RepoLookupError:
2253 except error.RepoLookupError:
2253 r = None
2254 r = None
2254 if r in revs:
2255 if r in revs:
2255 ui.warn(_('skipping revision %d:%s '
2256 ui.warn(_('skipping revision %d:%s '
2256 '(already grafted to %d:%s)\n')
2257 '(already grafted to %d:%s)\n')
2257 % (r, repo[r], rev, ctx))
2258 % (r, repo[r], rev, ctx))
2258 revs.remove(r)
2259 revs.remove(r)
2259 elif ids[n] in revs:
2260 elif ids[n] in revs:
2260 if r is None:
2261 if r is None:
2261 ui.warn(_('skipping already grafted revision %d:%s '
2262 ui.warn(_('skipping already grafted revision %d:%s '
2262 '(%d:%s also has unknown origin %s)\n')
2263 '(%d:%s also has unknown origin %s)\n')
2263 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2264 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2264 else:
2265 else:
2265 ui.warn(_('skipping already grafted revision %d:%s '
2266 ui.warn(_('skipping already grafted revision %d:%s '
2266 '(%d:%s also has origin %d:%s)\n')
2267 '(%d:%s also has origin %d:%s)\n')
2267 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2268 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2268 revs.remove(ids[n])
2269 revs.remove(ids[n])
2269 elif ctx.hex() in ids:
2270 elif ctx.hex() in ids:
2270 r = ids[ctx.hex()]
2271 r = ids[ctx.hex()]
2271 ui.warn(_('skipping already grafted revision %d:%s '
2272 ui.warn(_('skipping already grafted revision %d:%s '
2272 '(was grafted from %d:%s)\n') %
2273 '(was grafted from %d:%s)\n') %
2273 (r, repo[r], rev, ctx))
2274 (r, repo[r], rev, ctx))
2274 revs.remove(r)
2275 revs.remove(r)
2275 if not revs:
2276 if not revs:
2276 return -1
2277 return -1
2277
2278
2278 for pos, ctx in enumerate(repo.set("%ld", revs)):
2279 for pos, ctx in enumerate(repo.set("%ld", revs)):
2279 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2280 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2280 ctx.description().split('\n', 1)[0])
2281 ctx.description().split('\n', 1)[0])
2281 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2282 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2282 if names:
2283 if names:
2283 desc += ' (%s)' % ' '.join(names)
2284 desc += ' (%s)' % ' '.join(names)
2284 ui.status(_('grafting %s\n') % desc)
2285 ui.status(_('grafting %s\n') % desc)
2285 if opts.get('dry_run'):
2286 if opts.get('dry_run'):
2286 continue
2287 continue
2287
2288
2288 source = ctx.extra().get('source')
2289 source = ctx.extra().get('source')
2289 extra = {}
2290 extra = {}
2290 if source:
2291 if source:
2291 extra['source'] = source
2292 extra['source'] = source
2292 extra['intermediate-source'] = ctx.hex()
2293 extra['intermediate-source'] = ctx.hex()
2293 else:
2294 else:
2294 extra['source'] = ctx.hex()
2295 extra['source'] = ctx.hex()
2295 user = ctx.user()
2296 user = ctx.user()
2296 if opts.get('user'):
2297 if opts.get('user'):
2297 user = opts['user']
2298 user = opts['user']
2298 date = ctx.date()
2299 date = ctx.date()
2299 if opts.get('date'):
2300 if opts.get('date'):
2300 date = opts['date']
2301 date = opts['date']
2301 message = ctx.description()
2302 message = ctx.description()
2302 if opts.get('log'):
2303 if opts.get('log'):
2303 message += '\n(grafted from %s)' % ctx.hex()
2304 message += '\n(grafted from %s)' % ctx.hex()
2304
2305
2305 # we don't merge the first commit when continuing
2306 # we don't merge the first commit when continuing
2306 if not cont:
2307 if not cont:
2307 # perform the graft merge with p1(rev) as 'ancestor'
2308 # perform the graft merge with p1(rev) as 'ancestor'
2308 try:
2309 try:
2309 # ui.forcemerge is an internal variable, do not document
2310 # ui.forcemerge is an internal variable, do not document
2310 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2311 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2311 'graft')
2312 'graft')
2312 stats = mergemod.graft(repo, ctx, ctx.p1(),
2313 stats = mergemod.graft(repo, ctx, ctx.p1(),
2313 ['local', 'graft'])
2314 ['local', 'graft'])
2314 finally:
2315 finally:
2315 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2316 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2316 # report any conflicts
2317 # report any conflicts
2317 if stats.unresolvedcount > 0:
2318 if stats.unresolvedcount > 0:
2318 # write out state for --continue
2319 # write out state for --continue
2319 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2320 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2320 repo.vfs.write('graftstate', ''.join(nodelines))
2321 repo.vfs.write('graftstate', ''.join(nodelines))
2321 extra = ''
2322 extra = ''
2322 if opts.get('user'):
2323 if opts.get('user'):
2323 extra += ' --user %s' % procutil.shellquote(opts['user'])
2324 extra += ' --user %s' % procutil.shellquote(opts['user'])
2324 if opts.get('date'):
2325 if opts.get('date'):
2325 extra += ' --date %s' % procutil.shellquote(opts['date'])
2326 extra += ' --date %s' % procutil.shellquote(opts['date'])
2326 if opts.get('log'):
2327 if opts.get('log'):
2327 extra += ' --log'
2328 extra += ' --log'
2328 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2329 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2329 raise error.Abort(
2330 raise error.Abort(
2330 _("unresolved conflicts, can't continue"),
2331 _("unresolved conflicts, can't continue"),
2331 hint=hint)
2332 hint=hint)
2332 else:
2333 else:
2333 cont = False
2334 cont = False
2334
2335
2335 # commit
2336 # commit
2336 node = repo.commit(text=message, user=user,
2337 node = repo.commit(text=message, user=user,
2337 date=date, extra=extra, editor=editor)
2338 date=date, extra=extra, editor=editor)
2338 if node is None:
2339 if node is None:
2339 ui.warn(
2340 ui.warn(
2340 _('note: graft of %d:%s created no changes to commit\n') %
2341 _('note: graft of %d:%s created no changes to commit\n') %
2341 (ctx.rev(), ctx))
2342 (ctx.rev(), ctx))
2342
2343
2343 # remove state when we complete successfully
2344 # remove state when we complete successfully
2344 if not opts.get('dry_run'):
2345 if not opts.get('dry_run'):
2345 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2346 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2346
2347
2347 return 0
2348 return 0
2348
2349
2349 @command('grep',
2350 @command('grep',
2350 [('0', 'print0', None, _('end fields with NUL')),
2351 [('0', 'print0', None, _('end fields with NUL')),
2351 ('', 'all', None, _('print all revisions that match')),
2352 ('', 'all', None, _('print all revisions that match')),
2352 ('a', 'text', None, _('treat all files as text')),
2353 ('a', 'text', None, _('treat all files as text')),
2353 ('f', 'follow', None,
2354 ('f', 'follow', None,
2354 _('follow changeset history,'
2355 _('follow changeset history,'
2355 ' or file history across copies and renames')),
2356 ' or file history across copies and renames')),
2356 ('i', 'ignore-case', None, _('ignore case when matching')),
2357 ('i', 'ignore-case', None, _('ignore case when matching')),
2357 ('l', 'files-with-matches', None,
2358 ('l', 'files-with-matches', None,
2358 _('print only filenames and revisions that match')),
2359 _('print only filenames and revisions that match')),
2359 ('n', 'line-number', None, _('print matching line numbers')),
2360 ('n', 'line-number', None, _('print matching line numbers')),
2360 ('r', 'rev', [],
2361 ('r', 'rev', [],
2361 _('only search files changed within revision range'), _('REV')),
2362 _('only search files changed within revision range'), _('REV')),
2362 ('u', 'user', None, _('list the author (long with -v)')),
2363 ('u', 'user', None, _('list the author (long with -v)')),
2363 ('d', 'date', None, _('list the date (short with -q)')),
2364 ('d', 'date', None, _('list the date (short with -q)')),
2364 ] + formatteropts + walkopts,
2365 ] + formatteropts + walkopts,
2365 _('[OPTION]... PATTERN [FILE]...'),
2366 _('[OPTION]... PATTERN [FILE]...'),
2366 inferrepo=True, cmdtype=readonly)
2367 inferrepo=True, cmdtype=readonly)
2367 def grep(ui, repo, pattern, *pats, **opts):
2368 def grep(ui, repo, pattern, *pats, **opts):
2368 """search revision history for a pattern in specified files
2369 """search revision history for a pattern in specified files
2369
2370
2370 Search revision history for a regular expression in the specified
2371 Search revision history for a regular expression in the specified
2371 files or the entire project.
2372 files or the entire project.
2372
2373
2373 By default, grep prints the most recent revision number for each
2374 By default, grep prints the most recent revision number for each
2374 file in which it finds a match. To get it to print every revision
2375 file in which it finds a match. To get it to print every revision
2375 that contains a change in match status ("-" for a match that becomes
2376 that contains a change in match status ("-" for a match that becomes
2376 a non-match, or "+" for a non-match that becomes a match), use the
2377 a non-match, or "+" for a non-match that becomes a match), use the
2377 --all flag.
2378 --all flag.
2378
2379
2379 PATTERN can be any Python (roughly Perl-compatible) regular
2380 PATTERN can be any Python (roughly Perl-compatible) regular
2380 expression.
2381 expression.
2381
2382
2382 If no FILEs are specified (and -f/--follow isn't set), all files in
2383 If no FILEs are specified (and -f/--follow isn't set), all files in
2383 the repository are searched, including those that don't exist in the
2384 the repository are searched, including those that don't exist in the
2384 current branch or have been deleted in a prior changeset.
2385 current branch or have been deleted in a prior changeset.
2385
2386
2386 Returns 0 if a match is found, 1 otherwise.
2387 Returns 0 if a match is found, 1 otherwise.
2387 """
2388 """
2388 opts = pycompat.byteskwargs(opts)
2389 opts = pycompat.byteskwargs(opts)
2389 reflags = re.M
2390 reflags = re.M
2390 if opts.get('ignore_case'):
2391 if opts.get('ignore_case'):
2391 reflags |= re.I
2392 reflags |= re.I
2392 try:
2393 try:
2393 regexp = util.re.compile(pattern, reflags)
2394 regexp = util.re.compile(pattern, reflags)
2394 except re.error as inst:
2395 except re.error as inst:
2395 ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
2396 ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
2396 return 1
2397 return 1
2397 sep, eol = ':', '\n'
2398 sep, eol = ':', '\n'
2398 if opts.get('print0'):
2399 if opts.get('print0'):
2399 sep = eol = '\0'
2400 sep = eol = '\0'
2400
2401
2401 getfile = util.lrucachefunc(repo.file)
2402 getfile = util.lrucachefunc(repo.file)
2402
2403
2403 def matchlines(body):
2404 def matchlines(body):
2404 begin = 0
2405 begin = 0
2405 linenum = 0
2406 linenum = 0
2406 while begin < len(body):
2407 while begin < len(body):
2407 match = regexp.search(body, begin)
2408 match = regexp.search(body, begin)
2408 if not match:
2409 if not match:
2409 break
2410 break
2410 mstart, mend = match.span()
2411 mstart, mend = match.span()
2411 linenum += body.count('\n', begin, mstart) + 1
2412 linenum += body.count('\n', begin, mstart) + 1
2412 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2413 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2413 begin = body.find('\n', mend) + 1 or len(body) + 1
2414 begin = body.find('\n', mend) + 1 or len(body) + 1
2414 lend = begin - 1
2415 lend = begin - 1
2415 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2416 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2416
2417
2417 class linestate(object):
2418 class linestate(object):
2418 def __init__(self, line, linenum, colstart, colend):
2419 def __init__(self, line, linenum, colstart, colend):
2419 self.line = line
2420 self.line = line
2420 self.linenum = linenum
2421 self.linenum = linenum
2421 self.colstart = colstart
2422 self.colstart = colstart
2422 self.colend = colend
2423 self.colend = colend
2423
2424
2424 def __hash__(self):
2425 def __hash__(self):
2425 return hash((self.linenum, self.line))
2426 return hash((self.linenum, self.line))
2426
2427
2427 def __eq__(self, other):
2428 def __eq__(self, other):
2428 return self.line == other.line
2429 return self.line == other.line
2429
2430
2430 def findpos(self):
2431 def findpos(self):
2431 """Iterate all (start, end) indices of matches"""
2432 """Iterate all (start, end) indices of matches"""
2432 yield self.colstart, self.colend
2433 yield self.colstart, self.colend
2433 p = self.colend
2434 p = self.colend
2434 while p < len(self.line):
2435 while p < len(self.line):
2435 m = regexp.search(self.line, p)
2436 m = regexp.search(self.line, p)
2436 if not m:
2437 if not m:
2437 break
2438 break
2438 yield m.span()
2439 yield m.span()
2439 p = m.end()
2440 p = m.end()
2440
2441
2441 matches = {}
2442 matches = {}
2442 copies = {}
2443 copies = {}
2443 def grepbody(fn, rev, body):
2444 def grepbody(fn, rev, body):
2444 matches[rev].setdefault(fn, [])
2445 matches[rev].setdefault(fn, [])
2445 m = matches[rev][fn]
2446 m = matches[rev][fn]
2446 for lnum, cstart, cend, line in matchlines(body):
2447 for lnum, cstart, cend, line in matchlines(body):
2447 s = linestate(line, lnum, cstart, cend)
2448 s = linestate(line, lnum, cstart, cend)
2448 m.append(s)
2449 m.append(s)
2449
2450
2450 def difflinestates(a, b):
2451 def difflinestates(a, b):
2451 sm = difflib.SequenceMatcher(None, a, b)
2452 sm = difflib.SequenceMatcher(None, a, b)
2452 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2453 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2453 if tag == 'insert':
2454 if tag == 'insert':
2454 for i in xrange(blo, bhi):
2455 for i in xrange(blo, bhi):
2455 yield ('+', b[i])
2456 yield ('+', b[i])
2456 elif tag == 'delete':
2457 elif tag == 'delete':
2457 for i in xrange(alo, ahi):
2458 for i in xrange(alo, ahi):
2458 yield ('-', a[i])
2459 yield ('-', a[i])
2459 elif tag == 'replace':
2460 elif tag == 'replace':
2460 for i in xrange(alo, ahi):
2461 for i in xrange(alo, ahi):
2461 yield ('-', a[i])
2462 yield ('-', a[i])
2462 for i in xrange(blo, bhi):
2463 for i in xrange(blo, bhi):
2463 yield ('+', b[i])
2464 yield ('+', b[i])
2464
2465
2465 def display(fm, fn, ctx, pstates, states):
2466 def display(fm, fn, ctx, pstates, states):
2466 rev = ctx.rev()
2467 rev = ctx.rev()
2467 if fm.isplain():
2468 if fm.isplain():
2468 formatuser = ui.shortuser
2469 formatuser = ui.shortuser
2469 else:
2470 else:
2470 formatuser = str
2471 formatuser = str
2471 if ui.quiet:
2472 if ui.quiet:
2472 datefmt = '%Y-%m-%d'
2473 datefmt = '%Y-%m-%d'
2473 else:
2474 else:
2474 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2475 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2475 found = False
2476 found = False
2476 @util.cachefunc
2477 @util.cachefunc
2477 def binary():
2478 def binary():
2478 flog = getfile(fn)
2479 flog = getfile(fn)
2479 return stringutil.binary(flog.read(ctx.filenode(fn)))
2480 return stringutil.binary(flog.read(ctx.filenode(fn)))
2480
2481
2481 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2482 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2482 if opts.get('all'):
2483 if opts.get('all'):
2483 iter = difflinestates(pstates, states)
2484 iter = difflinestates(pstates, states)
2484 else:
2485 else:
2485 iter = [('', l) for l in states]
2486 iter = [('', l) for l in states]
2486 for change, l in iter:
2487 for change, l in iter:
2487 fm.startitem()
2488 fm.startitem()
2488 fm.data(node=fm.hexfunc(ctx.node()))
2489 fm.data(node=fm.hexfunc(ctx.node()))
2489 cols = [
2490 cols = [
2490 ('filename', fn, True),
2491 ('filename', fn, True),
2491 ('rev', rev, True),
2492 ('rev', rev, True),
2492 ('linenumber', l.linenum, opts.get('line_number')),
2493 ('linenumber', l.linenum, opts.get('line_number')),
2493 ]
2494 ]
2494 if opts.get('all'):
2495 if opts.get('all'):
2495 cols.append(('change', change, True))
2496 cols.append(('change', change, True))
2496 cols.extend([
2497 cols.extend([
2497 ('user', formatuser(ctx.user()), opts.get('user')),
2498 ('user', formatuser(ctx.user()), opts.get('user')),
2498 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2499 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2499 ])
2500 ])
2500 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2501 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2501 for name, data, cond in cols:
2502 for name, data, cond in cols:
2502 field = fieldnamemap.get(name, name)
2503 field = fieldnamemap.get(name, name)
2503 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2504 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2504 if cond and name != lastcol:
2505 if cond and name != lastcol:
2505 fm.plain(sep, label='grep.sep')
2506 fm.plain(sep, label='grep.sep')
2506 if not opts.get('files_with_matches'):
2507 if not opts.get('files_with_matches'):
2507 fm.plain(sep, label='grep.sep')
2508 fm.plain(sep, label='grep.sep')
2508 if not opts.get('text') and binary():
2509 if not opts.get('text') and binary():
2509 fm.plain(_(" Binary file matches"))
2510 fm.plain(_(" Binary file matches"))
2510 else:
2511 else:
2511 displaymatches(fm.nested('texts'), l)
2512 displaymatches(fm.nested('texts'), l)
2512 fm.plain(eol)
2513 fm.plain(eol)
2513 found = True
2514 found = True
2514 if opts.get('files_with_matches'):
2515 if opts.get('files_with_matches'):
2515 break
2516 break
2516 return found
2517 return found
2517
2518
2518 def displaymatches(fm, l):
2519 def displaymatches(fm, l):
2519 p = 0
2520 p = 0
2520 for s, e in l.findpos():
2521 for s, e in l.findpos():
2521 if p < s:
2522 if p < s:
2522 fm.startitem()
2523 fm.startitem()
2523 fm.write('text', '%s', l.line[p:s])
2524 fm.write('text', '%s', l.line[p:s])
2524 fm.data(matched=False)
2525 fm.data(matched=False)
2525 fm.startitem()
2526 fm.startitem()
2526 fm.write('text', '%s', l.line[s:e], label='grep.match')
2527 fm.write('text', '%s', l.line[s:e], label='grep.match')
2527 fm.data(matched=True)
2528 fm.data(matched=True)
2528 p = e
2529 p = e
2529 if p < len(l.line):
2530 if p < len(l.line):
2530 fm.startitem()
2531 fm.startitem()
2531 fm.write('text', '%s', l.line[p:])
2532 fm.write('text', '%s', l.line[p:])
2532 fm.data(matched=False)
2533 fm.data(matched=False)
2533 fm.end()
2534 fm.end()
2534
2535
2535 skip = {}
2536 skip = {}
2536 revfiles = {}
2537 revfiles = {}
2537 match = scmutil.match(repo[None], pats, opts)
2538 match = scmutil.match(repo[None], pats, opts)
2538 found = False
2539 found = False
2539 follow = opts.get('follow')
2540 follow = opts.get('follow')
2540
2541
2541 def prep(ctx, fns):
2542 def prep(ctx, fns):
2542 rev = ctx.rev()
2543 rev = ctx.rev()
2543 pctx = ctx.p1()
2544 pctx = ctx.p1()
2544 parent = pctx.rev()
2545 parent = pctx.rev()
2545 matches.setdefault(rev, {})
2546 matches.setdefault(rev, {})
2546 matches.setdefault(parent, {})
2547 matches.setdefault(parent, {})
2547 files = revfiles.setdefault(rev, [])
2548 files = revfiles.setdefault(rev, [])
2548 for fn in fns:
2549 for fn in fns:
2549 flog = getfile(fn)
2550 flog = getfile(fn)
2550 try:
2551 try:
2551 fnode = ctx.filenode(fn)
2552 fnode = ctx.filenode(fn)
2552 except error.LookupError:
2553 except error.LookupError:
2553 continue
2554 continue
2554
2555
2555 copied = flog.renamed(fnode)
2556 copied = flog.renamed(fnode)
2556 copy = follow and copied and copied[0]
2557 copy = follow and copied and copied[0]
2557 if copy:
2558 if copy:
2558 copies.setdefault(rev, {})[fn] = copy
2559 copies.setdefault(rev, {})[fn] = copy
2559 if fn in skip:
2560 if fn in skip:
2560 if copy:
2561 if copy:
2561 skip[copy] = True
2562 skip[copy] = True
2562 continue
2563 continue
2563 files.append(fn)
2564 files.append(fn)
2564
2565
2565 if fn not in matches[rev]:
2566 if fn not in matches[rev]:
2566 grepbody(fn, rev, flog.read(fnode))
2567 grepbody(fn, rev, flog.read(fnode))
2567
2568
2568 pfn = copy or fn
2569 pfn = copy or fn
2569 if pfn not in matches[parent]:
2570 if pfn not in matches[parent]:
2570 try:
2571 try:
2571 fnode = pctx.filenode(pfn)
2572 fnode = pctx.filenode(pfn)
2572 grepbody(pfn, parent, flog.read(fnode))
2573 grepbody(pfn, parent, flog.read(fnode))
2573 except error.LookupError:
2574 except error.LookupError:
2574 pass
2575 pass
2575
2576
2576 ui.pager('grep')
2577 ui.pager('grep')
2577 fm = ui.formatter('grep', opts)
2578 fm = ui.formatter('grep', opts)
2578 for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
2579 for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
2579 rev = ctx.rev()
2580 rev = ctx.rev()
2580 parent = ctx.p1().rev()
2581 parent = ctx.p1().rev()
2581 for fn in sorted(revfiles.get(rev, [])):
2582 for fn in sorted(revfiles.get(rev, [])):
2582 states = matches[rev][fn]
2583 states = matches[rev][fn]
2583 copy = copies.get(rev, {}).get(fn)
2584 copy = copies.get(rev, {}).get(fn)
2584 if fn in skip:
2585 if fn in skip:
2585 if copy:
2586 if copy:
2586 skip[copy] = True
2587 skip[copy] = True
2587 continue
2588 continue
2588 pstates = matches.get(parent, {}).get(copy or fn, [])
2589 pstates = matches.get(parent, {}).get(copy or fn, [])
2589 if pstates or states:
2590 if pstates or states:
2590 r = display(fm, fn, ctx, pstates, states)
2591 r = display(fm, fn, ctx, pstates, states)
2591 found = found or r
2592 found = found or r
2592 if r and not opts.get('all'):
2593 if r and not opts.get('all'):
2593 skip[fn] = True
2594 skip[fn] = True
2594 if copy:
2595 if copy:
2595 skip[copy] = True
2596 skip[copy] = True
2596 del revfiles[rev]
2597 del revfiles[rev]
2597 # We will keep the matches dict for the duration of the window
2598 # We will keep the matches dict for the duration of the window
2598 # clear the matches dict once the window is over
2599 # clear the matches dict once the window is over
2599 if not revfiles:
2600 if not revfiles:
2600 matches.clear()
2601 matches.clear()
2601 fm.end()
2602 fm.end()
2602
2603
2603 return not found
2604 return not found
2604
2605
2605 @command('heads',
2606 @command('heads',
2606 [('r', 'rev', '',
2607 [('r', 'rev', '',
2607 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2608 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2608 ('t', 'topo', False, _('show topological heads only')),
2609 ('t', 'topo', False, _('show topological heads only')),
2609 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2610 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2610 ('c', 'closed', False, _('show normal and closed branch heads')),
2611 ('c', 'closed', False, _('show normal and closed branch heads')),
2611 ] + templateopts,
2612 ] + templateopts,
2612 _('[-ct] [-r STARTREV] [REV]...'), cmdtype=readonly)
2613 _('[-ct] [-r STARTREV] [REV]...'), cmdtype=readonly)
2613 def heads(ui, repo, *branchrevs, **opts):
2614 def heads(ui, repo, *branchrevs, **opts):
2614 """show branch heads
2615 """show branch heads
2615
2616
2616 With no arguments, show all open branch heads in the repository.
2617 With no arguments, show all open branch heads in the repository.
2617 Branch heads are changesets that have no descendants on the
2618 Branch heads are changesets that have no descendants on the
2618 same branch. They are where development generally takes place and
2619 same branch. They are where development generally takes place and
2619 are the usual targets for update and merge operations.
2620 are the usual targets for update and merge operations.
2620
2621
2621 If one or more REVs are given, only open branch heads on the
2622 If one or more REVs are given, only open branch heads on the
2622 branches associated with the specified changesets are shown. This
2623 branches associated with the specified changesets are shown. This
2623 means that you can use :hg:`heads .` to see the heads on the
2624 means that you can use :hg:`heads .` to see the heads on the
2624 currently checked-out branch.
2625 currently checked-out branch.
2625
2626
2626 If -c/--closed is specified, also show branch heads marked closed
2627 If -c/--closed is specified, also show branch heads marked closed
2627 (see :hg:`commit --close-branch`).
2628 (see :hg:`commit --close-branch`).
2628
2629
2629 If STARTREV is specified, only those heads that are descendants of
2630 If STARTREV is specified, only those heads that are descendants of
2630 STARTREV will be displayed.
2631 STARTREV will be displayed.
2631
2632
2632 If -t/--topo is specified, named branch mechanics will be ignored and only
2633 If -t/--topo is specified, named branch mechanics will be ignored and only
2633 topological heads (changesets with no children) will be shown.
2634 topological heads (changesets with no children) will be shown.
2634
2635
2635 Returns 0 if matching heads are found, 1 if not.
2636 Returns 0 if matching heads are found, 1 if not.
2636 """
2637 """
2637
2638
2638 opts = pycompat.byteskwargs(opts)
2639 opts = pycompat.byteskwargs(opts)
2639 start = None
2640 start = None
2640 rev = opts.get('rev')
2641 rev = opts.get('rev')
2641 if rev:
2642 if rev:
2642 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2643 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2643 start = scmutil.revsingle(repo, rev, None).node()
2644 start = scmutil.revsingle(repo, rev, None).node()
2644
2645
2645 if opts.get('topo'):
2646 if opts.get('topo'):
2646 heads = [repo[h] for h in repo.heads(start)]
2647 heads = [repo[h] for h in repo.heads(start)]
2647 else:
2648 else:
2648 heads = []
2649 heads = []
2649 for branch in repo.branchmap():
2650 for branch in repo.branchmap():
2650 heads += repo.branchheads(branch, start, opts.get('closed'))
2651 heads += repo.branchheads(branch, start, opts.get('closed'))
2651 heads = [repo[h] for h in heads]
2652 heads = [repo[h] for h in heads]
2652
2653
2653 if branchrevs:
2654 if branchrevs:
2654 branches = set(repo[br].branch() for br in branchrevs)
2655 branches = set(repo[br].branch() for br in branchrevs)
2655 heads = [h for h in heads if h.branch() in branches]
2656 heads = [h for h in heads if h.branch() in branches]
2656
2657
2657 if opts.get('active') and branchrevs:
2658 if opts.get('active') and branchrevs:
2658 dagheads = repo.heads(start)
2659 dagheads = repo.heads(start)
2659 heads = [h for h in heads if h.node() in dagheads]
2660 heads = [h for h in heads if h.node() in dagheads]
2660
2661
2661 if branchrevs:
2662 if branchrevs:
2662 haveheads = set(h.branch() for h in heads)
2663 haveheads = set(h.branch() for h in heads)
2663 if branches - haveheads:
2664 if branches - haveheads:
2664 headless = ', '.join(b for b in branches - haveheads)
2665 headless = ', '.join(b for b in branches - haveheads)
2665 msg = _('no open branch heads found on branches %s')
2666 msg = _('no open branch heads found on branches %s')
2666 if opts.get('rev'):
2667 if opts.get('rev'):
2667 msg += _(' (started at %s)') % opts['rev']
2668 msg += _(' (started at %s)') % opts['rev']
2668 ui.warn((msg + '\n') % headless)
2669 ui.warn((msg + '\n') % headless)
2669
2670
2670 if not heads:
2671 if not heads:
2671 return 1
2672 return 1
2672
2673
2673 ui.pager('heads')
2674 ui.pager('heads')
2674 heads = sorted(heads, key=lambda x: -x.rev())
2675 heads = sorted(heads, key=lambda x: -x.rev())
2675 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
2676 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
2676 for ctx in heads:
2677 for ctx in heads:
2677 displayer.show(ctx)
2678 displayer.show(ctx)
2678 displayer.close()
2679 displayer.close()
2679
2680
2680 @command('help',
2681 @command('help',
2681 [('e', 'extension', None, _('show only help for extensions')),
2682 [('e', 'extension', None, _('show only help for extensions')),
2682 ('c', 'command', None, _('show only help for commands')),
2683 ('c', 'command', None, _('show only help for commands')),
2683 ('k', 'keyword', None, _('show topics matching keyword')),
2684 ('k', 'keyword', None, _('show topics matching keyword')),
2684 ('s', 'system', [], _('show help for specific platform(s)')),
2685 ('s', 'system', [], _('show help for specific platform(s)')),
2685 ],
2686 ],
2686 _('[-ecks] [TOPIC]'),
2687 _('[-ecks] [TOPIC]'),
2687 norepo=True, cmdtype=readonly)
2688 norepo=True, cmdtype=readonly)
2688 def help_(ui, name=None, **opts):
2689 def help_(ui, name=None, **opts):
2689 """show help for a given topic or a help overview
2690 """show help for a given topic or a help overview
2690
2691
2691 With no arguments, print a list of commands with short help messages.
2692 With no arguments, print a list of commands with short help messages.
2692
2693
2693 Given a topic, extension, or command name, print help for that
2694 Given a topic, extension, or command name, print help for that
2694 topic.
2695 topic.
2695
2696
2696 Returns 0 if successful.
2697 Returns 0 if successful.
2697 """
2698 """
2698
2699
2699 keep = opts.get(r'system') or []
2700 keep = opts.get(r'system') or []
2700 if len(keep) == 0:
2701 if len(keep) == 0:
2701 if pycompat.sysplatform.startswith('win'):
2702 if pycompat.sysplatform.startswith('win'):
2702 keep.append('windows')
2703 keep.append('windows')
2703 elif pycompat.sysplatform == 'OpenVMS':
2704 elif pycompat.sysplatform == 'OpenVMS':
2704 keep.append('vms')
2705 keep.append('vms')
2705 elif pycompat.sysplatform == 'plan9':
2706 elif pycompat.sysplatform == 'plan9':
2706 keep.append('plan9')
2707 keep.append('plan9')
2707 else:
2708 else:
2708 keep.append('unix')
2709 keep.append('unix')
2709 keep.append(pycompat.sysplatform.lower())
2710 keep.append(pycompat.sysplatform.lower())
2710 if ui.verbose:
2711 if ui.verbose:
2711 keep.append('verbose')
2712 keep.append('verbose')
2712
2713
2713 commands = sys.modules[__name__]
2714 commands = sys.modules[__name__]
2714 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2715 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2715 ui.pager('help')
2716 ui.pager('help')
2716 ui.write(formatted)
2717 ui.write(formatted)
2717
2718
2718
2719
2719 @command('identify|id',
2720 @command('identify|id',
2720 [('r', 'rev', '',
2721 [('r', 'rev', '',
2721 _('identify the specified revision'), _('REV')),
2722 _('identify the specified revision'), _('REV')),
2722 ('n', 'num', None, _('show local revision number')),
2723 ('n', 'num', None, _('show local revision number')),
2723 ('i', 'id', None, _('show global revision id')),
2724 ('i', 'id', None, _('show global revision id')),
2724 ('b', 'branch', None, _('show branch')),
2725 ('b', 'branch', None, _('show branch')),
2725 ('t', 'tags', None, _('show tags')),
2726 ('t', 'tags', None, _('show tags')),
2726 ('B', 'bookmarks', None, _('show bookmarks')),
2727 ('B', 'bookmarks', None, _('show bookmarks')),
2727 ] + remoteopts + formatteropts,
2728 ] + remoteopts + formatteropts,
2728 _('[-nibtB] [-r REV] [SOURCE]'),
2729 _('[-nibtB] [-r REV] [SOURCE]'),
2729 optionalrepo=True, cmdtype=readonly)
2730 optionalrepo=True, cmdtype=readonly)
2730 def identify(ui, repo, source=None, rev=None,
2731 def identify(ui, repo, source=None, rev=None,
2731 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2732 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2732 """identify the working directory or specified revision
2733 """identify the working directory or specified revision
2733
2734
2734 Print a summary identifying the repository state at REV using one or
2735 Print a summary identifying the repository state at REV using one or
2735 two parent hash identifiers, followed by a "+" if the working
2736 two parent hash identifiers, followed by a "+" if the working
2736 directory has uncommitted changes, the branch name (if not default),
2737 directory has uncommitted changes, the branch name (if not default),
2737 a list of tags, and a list of bookmarks.
2738 a list of tags, and a list of bookmarks.
2738
2739
2739 When REV is not given, print a summary of the current state of the
2740 When REV is not given, print a summary of the current state of the
2740 repository including the working directory. Specify -r. to get information
2741 repository including the working directory. Specify -r. to get information
2741 of the working directory parent without scanning uncommitted changes.
2742 of the working directory parent without scanning uncommitted changes.
2742
2743
2743 Specifying a path to a repository root or Mercurial bundle will
2744 Specifying a path to a repository root or Mercurial bundle will
2744 cause lookup to operate on that repository/bundle.
2745 cause lookup to operate on that repository/bundle.
2745
2746
2746 .. container:: verbose
2747 .. container:: verbose
2747
2748
2748 Examples:
2749 Examples:
2749
2750
2750 - generate a build identifier for the working directory::
2751 - generate a build identifier for the working directory::
2751
2752
2752 hg id --id > build-id.dat
2753 hg id --id > build-id.dat
2753
2754
2754 - find the revision corresponding to a tag::
2755 - find the revision corresponding to a tag::
2755
2756
2756 hg id -n -r 1.3
2757 hg id -n -r 1.3
2757
2758
2758 - check the most recent revision of a remote repository::
2759 - check the most recent revision of a remote repository::
2759
2760
2760 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2761 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2761
2762
2762 See :hg:`log` for generating more information about specific revisions,
2763 See :hg:`log` for generating more information about specific revisions,
2763 including full hash identifiers.
2764 including full hash identifiers.
2764
2765
2765 Returns 0 if successful.
2766 Returns 0 if successful.
2766 """
2767 """
2767
2768
2768 opts = pycompat.byteskwargs(opts)
2769 opts = pycompat.byteskwargs(opts)
2769 if not repo and not source:
2770 if not repo and not source:
2770 raise error.Abort(_("there is no Mercurial repository here "
2771 raise error.Abort(_("there is no Mercurial repository here "
2771 "(.hg not found)"))
2772 "(.hg not found)"))
2772
2773
2773 if ui.debugflag:
2774 if ui.debugflag:
2774 hexfunc = hex
2775 hexfunc = hex
2775 else:
2776 else:
2776 hexfunc = short
2777 hexfunc = short
2777 default = not (num or id or branch or tags or bookmarks)
2778 default = not (num or id or branch or tags or bookmarks)
2778 output = []
2779 output = []
2779 revs = []
2780 revs = []
2780
2781
2781 if source:
2782 if source:
2782 source, branches = hg.parseurl(ui.expandpath(source))
2783 source, branches = hg.parseurl(ui.expandpath(source))
2783 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2784 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2784 repo = peer.local()
2785 repo = peer.local()
2785 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2786 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2786
2787
2787 fm = ui.formatter('identify', opts)
2788 fm = ui.formatter('identify', opts)
2788 fm.startitem()
2789 fm.startitem()
2789
2790
2790 if not repo:
2791 if not repo:
2791 if num or branch or tags:
2792 if num or branch or tags:
2792 raise error.Abort(
2793 raise error.Abort(
2793 _("can't query remote revision number, branch, or tags"))
2794 _("can't query remote revision number, branch, or tags"))
2794 if not rev and revs:
2795 if not rev and revs:
2795 rev = revs[0]
2796 rev = revs[0]
2796 if not rev:
2797 if not rev:
2797 rev = "tip"
2798 rev = "tip"
2798
2799
2799 remoterev = peer.lookup(rev)
2800 remoterev = peer.lookup(rev)
2800 hexrev = hexfunc(remoterev)
2801 hexrev = hexfunc(remoterev)
2801 if default or id:
2802 if default or id:
2802 output = [hexrev]
2803 output = [hexrev]
2803 fm.data(id=hexrev)
2804 fm.data(id=hexrev)
2804
2805
2805 def getbms():
2806 def getbms():
2806 bms = []
2807 bms = []
2807
2808
2808 if 'bookmarks' in peer.listkeys('namespaces'):
2809 if 'bookmarks' in peer.listkeys('namespaces'):
2809 hexremoterev = hex(remoterev)
2810 hexremoterev = hex(remoterev)
2810 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2811 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2811 if bmr == hexremoterev]
2812 if bmr == hexremoterev]
2812
2813
2813 return sorted(bms)
2814 return sorted(bms)
2814
2815
2815 bms = getbms()
2816 bms = getbms()
2816 if bookmarks:
2817 if bookmarks:
2817 output.extend(bms)
2818 output.extend(bms)
2818 elif default and not ui.quiet:
2819 elif default and not ui.quiet:
2819 # multiple bookmarks for a single parent separated by '/'
2820 # multiple bookmarks for a single parent separated by '/'
2820 bm = '/'.join(bms)
2821 bm = '/'.join(bms)
2821 if bm:
2822 if bm:
2822 output.append(bm)
2823 output.append(bm)
2823
2824
2824 fm.data(node=hex(remoterev))
2825 fm.data(node=hex(remoterev))
2825 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2826 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2826 else:
2827 else:
2827 if rev:
2828 if rev:
2828 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2829 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2829 ctx = scmutil.revsingle(repo, rev, None)
2830 ctx = scmutil.revsingle(repo, rev, None)
2830
2831
2831 if ctx.rev() is None:
2832 if ctx.rev() is None:
2832 ctx = repo[None]
2833 ctx = repo[None]
2833 parents = ctx.parents()
2834 parents = ctx.parents()
2834 taglist = []
2835 taglist = []
2835 for p in parents:
2836 for p in parents:
2836 taglist.extend(p.tags())
2837 taglist.extend(p.tags())
2837
2838
2838 dirty = ""
2839 dirty = ""
2839 if ctx.dirty(missing=True, merge=False, branch=False):
2840 if ctx.dirty(missing=True, merge=False, branch=False):
2840 dirty = '+'
2841 dirty = '+'
2841 fm.data(dirty=dirty)
2842 fm.data(dirty=dirty)
2842
2843
2843 hexoutput = [hexfunc(p.node()) for p in parents]
2844 hexoutput = [hexfunc(p.node()) for p in parents]
2844 if default or id:
2845 if default or id:
2845 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2846 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2846 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2847 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2847
2848
2848 if num:
2849 if num:
2849 numoutput = ["%d" % p.rev() for p in parents]
2850 numoutput = ["%d" % p.rev() for p in parents]
2850 output.append("%s%s" % ('+'.join(numoutput), dirty))
2851 output.append("%s%s" % ('+'.join(numoutput), dirty))
2851
2852
2852 fn = fm.nested('parents')
2853 fn = fm.nested('parents')
2853 for p in parents:
2854 for p in parents:
2854 fn.startitem()
2855 fn.startitem()
2855 fn.data(rev=p.rev())
2856 fn.data(rev=p.rev())
2856 fn.data(node=p.hex())
2857 fn.data(node=p.hex())
2857 fn.context(ctx=p)
2858 fn.context(ctx=p)
2858 fn.end()
2859 fn.end()
2859 else:
2860 else:
2860 hexoutput = hexfunc(ctx.node())
2861 hexoutput = hexfunc(ctx.node())
2861 if default or id:
2862 if default or id:
2862 output = [hexoutput]
2863 output = [hexoutput]
2863 fm.data(id=hexoutput)
2864 fm.data(id=hexoutput)
2864
2865
2865 if num:
2866 if num:
2866 output.append(pycompat.bytestr(ctx.rev()))
2867 output.append(pycompat.bytestr(ctx.rev()))
2867 taglist = ctx.tags()
2868 taglist = ctx.tags()
2868
2869
2869 if default and not ui.quiet:
2870 if default and not ui.quiet:
2870 b = ctx.branch()
2871 b = ctx.branch()
2871 if b != 'default':
2872 if b != 'default':
2872 output.append("(%s)" % b)
2873 output.append("(%s)" % b)
2873
2874
2874 # multiple tags for a single parent separated by '/'
2875 # multiple tags for a single parent separated by '/'
2875 t = '/'.join(taglist)
2876 t = '/'.join(taglist)
2876 if t:
2877 if t:
2877 output.append(t)
2878 output.append(t)
2878
2879
2879 # multiple bookmarks for a single parent separated by '/'
2880 # multiple bookmarks for a single parent separated by '/'
2880 bm = '/'.join(ctx.bookmarks())
2881 bm = '/'.join(ctx.bookmarks())
2881 if bm:
2882 if bm:
2882 output.append(bm)
2883 output.append(bm)
2883 else:
2884 else:
2884 if branch:
2885 if branch:
2885 output.append(ctx.branch())
2886 output.append(ctx.branch())
2886
2887
2887 if tags:
2888 if tags:
2888 output.extend(taglist)
2889 output.extend(taglist)
2889
2890
2890 if bookmarks:
2891 if bookmarks:
2891 output.extend(ctx.bookmarks())
2892 output.extend(ctx.bookmarks())
2892
2893
2893 fm.data(node=ctx.hex())
2894 fm.data(node=ctx.hex())
2894 fm.data(branch=ctx.branch())
2895 fm.data(branch=ctx.branch())
2895 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2896 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2896 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2897 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2897 fm.context(ctx=ctx)
2898 fm.context(ctx=ctx)
2898
2899
2899 fm.plain("%s\n" % ' '.join(output))
2900 fm.plain("%s\n" % ' '.join(output))
2900 fm.end()
2901 fm.end()
2901
2902
2902 @command('import|patch',
2903 @command('import|patch',
2903 [('p', 'strip', 1,
2904 [('p', 'strip', 1,
2904 _('directory strip option for patch. This has the same '
2905 _('directory strip option for patch. This has the same '
2905 'meaning as the corresponding patch option'), _('NUM')),
2906 'meaning as the corresponding patch option'), _('NUM')),
2906 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2907 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2907 ('e', 'edit', False, _('invoke editor on commit messages')),
2908 ('e', 'edit', False, _('invoke editor on commit messages')),
2908 ('f', 'force', None,
2909 ('f', 'force', None,
2909 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2910 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2910 ('', 'no-commit', None,
2911 ('', 'no-commit', None,
2911 _("don't commit, just update the working directory")),
2912 _("don't commit, just update the working directory")),
2912 ('', 'bypass', None,
2913 ('', 'bypass', None,
2913 _("apply patch without touching the working directory")),
2914 _("apply patch without touching the working directory")),
2914 ('', 'partial', None,
2915 ('', 'partial', None,
2915 _('commit even if some hunks fail')),
2916 _('commit even if some hunks fail')),
2916 ('', 'exact', None,
2917 ('', 'exact', None,
2917 _('abort if patch would apply lossily')),
2918 _('abort if patch would apply lossily')),
2918 ('', 'prefix', '',
2919 ('', 'prefix', '',
2919 _('apply patch to subdirectory'), _('DIR')),
2920 _('apply patch to subdirectory'), _('DIR')),
2920 ('', 'import-branch', None,
2921 ('', 'import-branch', None,
2921 _('use any branch information in patch (implied by --exact)'))] +
2922 _('use any branch information in patch (implied by --exact)'))] +
2922 commitopts + commitopts2 + similarityopts,
2923 commitopts + commitopts2 + similarityopts,
2923 _('[OPTION]... PATCH...'))
2924 _('[OPTION]... PATCH...'))
2924 def import_(ui, repo, patch1=None, *patches, **opts):
2925 def import_(ui, repo, patch1=None, *patches, **opts):
2925 """import an ordered set of patches
2926 """import an ordered set of patches
2926
2927
2927 Import a list of patches and commit them individually (unless
2928 Import a list of patches and commit them individually (unless
2928 --no-commit is specified).
2929 --no-commit is specified).
2929
2930
2930 To read a patch from standard input (stdin), use "-" as the patch
2931 To read a patch from standard input (stdin), use "-" as the patch
2931 name. If a URL is specified, the patch will be downloaded from
2932 name. If a URL is specified, the patch will be downloaded from
2932 there.
2933 there.
2933
2934
2934 Import first applies changes to the working directory (unless
2935 Import first applies changes to the working directory (unless
2935 --bypass is specified), import will abort if there are outstanding
2936 --bypass is specified), import will abort if there are outstanding
2936 changes.
2937 changes.
2937
2938
2938 Use --bypass to apply and commit patches directly to the
2939 Use --bypass to apply and commit patches directly to the
2939 repository, without affecting the working directory. Without
2940 repository, without affecting the working directory. Without
2940 --exact, patches will be applied on top of the working directory
2941 --exact, patches will be applied on top of the working directory
2941 parent revision.
2942 parent revision.
2942
2943
2943 You can import a patch straight from a mail message. Even patches
2944 You can import a patch straight from a mail message. Even patches
2944 as attachments work (to use the body part, it must have type
2945 as attachments work (to use the body part, it must have type
2945 text/plain or text/x-patch). From and Subject headers of email
2946 text/plain or text/x-patch). From and Subject headers of email
2946 message are used as default committer and commit message. All
2947 message are used as default committer and commit message. All
2947 text/plain body parts before first diff are added to the commit
2948 text/plain body parts before first diff are added to the commit
2948 message.
2949 message.
2949
2950
2950 If the imported patch was generated by :hg:`export`, user and
2951 If the imported patch was generated by :hg:`export`, user and
2951 description from patch override values from message headers and
2952 description from patch override values from message headers and
2952 body. Values given on command line with -m/--message and -u/--user
2953 body. Values given on command line with -m/--message and -u/--user
2953 override these.
2954 override these.
2954
2955
2955 If --exact is specified, import will set the working directory to
2956 If --exact is specified, import will set the working directory to
2956 the parent of each patch before applying it, and will abort if the
2957 the parent of each patch before applying it, and will abort if the
2957 resulting changeset has a different ID than the one recorded in
2958 resulting changeset has a different ID than the one recorded in
2958 the patch. This will guard against various ways that portable
2959 the patch. This will guard against various ways that portable
2959 patch formats and mail systems might fail to transfer Mercurial
2960 patch formats and mail systems might fail to transfer Mercurial
2960 data or metadata. See :hg:`bundle` for lossless transmission.
2961 data or metadata. See :hg:`bundle` for lossless transmission.
2961
2962
2962 Use --partial to ensure a changeset will be created from the patch
2963 Use --partial to ensure a changeset will be created from the patch
2963 even if some hunks fail to apply. Hunks that fail to apply will be
2964 even if some hunks fail to apply. Hunks that fail to apply will be
2964 written to a <target-file>.rej file. Conflicts can then be resolved
2965 written to a <target-file>.rej file. Conflicts can then be resolved
2965 by hand before :hg:`commit --amend` is run to update the created
2966 by hand before :hg:`commit --amend` is run to update the created
2966 changeset. This flag exists to let people import patches that
2967 changeset. This flag exists to let people import patches that
2967 partially apply without losing the associated metadata (author,
2968 partially apply without losing the associated metadata (author,
2968 date, description, ...).
2969 date, description, ...).
2969
2970
2970 .. note::
2971 .. note::
2971
2972
2972 When no hunks apply cleanly, :hg:`import --partial` will create
2973 When no hunks apply cleanly, :hg:`import --partial` will create
2973 an empty changeset, importing only the patch metadata.
2974 an empty changeset, importing only the patch metadata.
2974
2975
2975 With -s/--similarity, hg will attempt to discover renames and
2976 With -s/--similarity, hg will attempt to discover renames and
2976 copies in the patch in the same way as :hg:`addremove`.
2977 copies in the patch in the same way as :hg:`addremove`.
2977
2978
2978 It is possible to use external patch programs to perform the patch
2979 It is possible to use external patch programs to perform the patch
2979 by setting the ``ui.patch`` configuration option. For the default
2980 by setting the ``ui.patch`` configuration option. For the default
2980 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2981 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2981 See :hg:`help config` for more information about configuration
2982 See :hg:`help config` for more information about configuration
2982 files and how to use these options.
2983 files and how to use these options.
2983
2984
2984 See :hg:`help dates` for a list of formats valid for -d/--date.
2985 See :hg:`help dates` for a list of formats valid for -d/--date.
2985
2986
2986 .. container:: verbose
2987 .. container:: verbose
2987
2988
2988 Examples:
2989 Examples:
2989
2990
2990 - import a traditional patch from a website and detect renames::
2991 - import a traditional patch from a website and detect renames::
2991
2992
2992 hg import -s 80 http://example.com/bugfix.patch
2993 hg import -s 80 http://example.com/bugfix.patch
2993
2994
2994 - import a changeset from an hgweb server::
2995 - import a changeset from an hgweb server::
2995
2996
2996 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2997 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2997
2998
2998 - import all the patches in an Unix-style mbox::
2999 - import all the patches in an Unix-style mbox::
2999
3000
3000 hg import incoming-patches.mbox
3001 hg import incoming-patches.mbox
3001
3002
3002 - import patches from stdin::
3003 - import patches from stdin::
3003
3004
3004 hg import -
3005 hg import -
3005
3006
3006 - attempt to exactly restore an exported changeset (not always
3007 - attempt to exactly restore an exported changeset (not always
3007 possible)::
3008 possible)::
3008
3009
3009 hg import --exact proposed-fix.patch
3010 hg import --exact proposed-fix.patch
3010
3011
3011 - use an external tool to apply a patch which is too fuzzy for
3012 - use an external tool to apply a patch which is too fuzzy for
3012 the default internal tool.
3013 the default internal tool.
3013
3014
3014 hg import --config ui.patch="patch --merge" fuzzy.patch
3015 hg import --config ui.patch="patch --merge" fuzzy.patch
3015
3016
3016 - change the default fuzzing from 2 to a less strict 7
3017 - change the default fuzzing from 2 to a less strict 7
3017
3018
3018 hg import --config ui.fuzz=7 fuzz.patch
3019 hg import --config ui.fuzz=7 fuzz.patch
3019
3020
3020 Returns 0 on success, 1 on partial success (see --partial).
3021 Returns 0 on success, 1 on partial success (see --partial).
3021 """
3022 """
3022
3023
3023 opts = pycompat.byteskwargs(opts)
3024 opts = pycompat.byteskwargs(opts)
3024 if not patch1:
3025 if not patch1:
3025 raise error.Abort(_('need at least one patch to import'))
3026 raise error.Abort(_('need at least one patch to import'))
3026
3027
3027 patches = (patch1,) + patches
3028 patches = (patch1,) + patches
3028
3029
3029 date = opts.get('date')
3030 date = opts.get('date')
3030 if date:
3031 if date:
3031 opts['date'] = dateutil.parsedate(date)
3032 opts['date'] = dateutil.parsedate(date)
3032
3033
3033 exact = opts.get('exact')
3034 exact = opts.get('exact')
3034 update = not opts.get('bypass')
3035 update = not opts.get('bypass')
3035 if not update and opts.get('no_commit'):
3036 if not update and opts.get('no_commit'):
3036 raise error.Abort(_('cannot use --no-commit with --bypass'))
3037 raise error.Abort(_('cannot use --no-commit with --bypass'))
3037 try:
3038 try:
3038 sim = float(opts.get('similarity') or 0)
3039 sim = float(opts.get('similarity') or 0)
3039 except ValueError:
3040 except ValueError:
3040 raise error.Abort(_('similarity must be a number'))
3041 raise error.Abort(_('similarity must be a number'))
3041 if sim < 0 or sim > 100:
3042 if sim < 0 or sim > 100:
3042 raise error.Abort(_('similarity must be between 0 and 100'))
3043 raise error.Abort(_('similarity must be between 0 and 100'))
3043 if sim and not update:
3044 if sim and not update:
3044 raise error.Abort(_('cannot use --similarity with --bypass'))
3045 raise error.Abort(_('cannot use --similarity with --bypass'))
3045 if exact:
3046 if exact:
3046 if opts.get('edit'):
3047 if opts.get('edit'):
3047 raise error.Abort(_('cannot use --exact with --edit'))
3048 raise error.Abort(_('cannot use --exact with --edit'))
3048 if opts.get('prefix'):
3049 if opts.get('prefix'):
3049 raise error.Abort(_('cannot use --exact with --prefix'))
3050 raise error.Abort(_('cannot use --exact with --prefix'))
3050
3051
3051 base = opts["base"]
3052 base = opts["base"]
3052 wlock = dsguard = lock = tr = None
3053 wlock = dsguard = lock = tr = None
3053 msgs = []
3054 msgs = []
3054 ret = 0
3055 ret = 0
3055
3056
3056
3057
3057 try:
3058 try:
3058 wlock = repo.wlock()
3059 wlock = repo.wlock()
3059
3060
3060 if update:
3061 if update:
3061 cmdutil.checkunfinished(repo)
3062 cmdutil.checkunfinished(repo)
3062 if (exact or not opts.get('force')):
3063 if (exact or not opts.get('force')):
3063 cmdutil.bailifchanged(repo)
3064 cmdutil.bailifchanged(repo)
3064
3065
3065 if not opts.get('no_commit'):
3066 if not opts.get('no_commit'):
3066 lock = repo.lock()
3067 lock = repo.lock()
3067 tr = repo.transaction('import')
3068 tr = repo.transaction('import')
3068 else:
3069 else:
3069 dsguard = dirstateguard.dirstateguard(repo, 'import')
3070 dsguard = dirstateguard.dirstateguard(repo, 'import')
3070 parents = repo[None].parents()
3071 parents = repo[None].parents()
3071 for patchurl in patches:
3072 for patchurl in patches:
3072 if patchurl == '-':
3073 if patchurl == '-':
3073 ui.status(_('applying patch from stdin\n'))
3074 ui.status(_('applying patch from stdin\n'))
3074 patchfile = ui.fin
3075 patchfile = ui.fin
3075 patchurl = 'stdin' # for error message
3076 patchurl = 'stdin' # for error message
3076 else:
3077 else:
3077 patchurl = os.path.join(base, patchurl)
3078 patchurl = os.path.join(base, patchurl)
3078 ui.status(_('applying %s\n') % patchurl)
3079 ui.status(_('applying %s\n') % patchurl)
3079 patchfile = hg.openpath(ui, patchurl)
3080 patchfile = hg.openpath(ui, patchurl)
3080
3081
3081 haspatch = False
3082 haspatch = False
3082 for hunk in patch.split(patchfile):
3083 for hunk in patch.split(patchfile):
3083 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3084 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3084 parents, opts,
3085 parents, opts,
3085 msgs, hg.clean)
3086 msgs, hg.clean)
3086 if msg:
3087 if msg:
3087 haspatch = True
3088 haspatch = True
3088 ui.note(msg + '\n')
3089 ui.note(msg + '\n')
3089 if update or exact:
3090 if update or exact:
3090 parents = repo[None].parents()
3091 parents = repo[None].parents()
3091 else:
3092 else:
3092 parents = [repo[node]]
3093 parents = [repo[node]]
3093 if rej:
3094 if rej:
3094 ui.write_err(_("patch applied partially\n"))
3095 ui.write_err(_("patch applied partially\n"))
3095 ui.write_err(_("(fix the .rej files and run "
3096 ui.write_err(_("(fix the .rej files and run "
3096 "`hg commit --amend`)\n"))
3097 "`hg commit --amend`)\n"))
3097 ret = 1
3098 ret = 1
3098 break
3099 break
3099
3100
3100 if not haspatch:
3101 if not haspatch:
3101 raise error.Abort(_('%s: no diffs found') % patchurl)
3102 raise error.Abort(_('%s: no diffs found') % patchurl)
3102
3103
3103 if tr:
3104 if tr:
3104 tr.close()
3105 tr.close()
3105 if msgs:
3106 if msgs:
3106 repo.savecommitmessage('\n* * *\n'.join(msgs))
3107 repo.savecommitmessage('\n* * *\n'.join(msgs))
3107 if dsguard:
3108 if dsguard:
3108 dsguard.close()
3109 dsguard.close()
3109 return ret
3110 return ret
3110 finally:
3111 finally:
3111 if tr:
3112 if tr:
3112 tr.release()
3113 tr.release()
3113 release(lock, dsguard, wlock)
3114 release(lock, dsguard, wlock)
3114
3115
3115 @command('incoming|in',
3116 @command('incoming|in',
3116 [('f', 'force', None,
3117 [('f', 'force', None,
3117 _('run even if remote repository is unrelated')),
3118 _('run even if remote repository is unrelated')),
3118 ('n', 'newest-first', None, _('show newest record first')),
3119 ('n', 'newest-first', None, _('show newest record first')),
3119 ('', 'bundle', '',
3120 ('', 'bundle', '',
3120 _('file to store the bundles into'), _('FILE')),
3121 _('file to store the bundles into'), _('FILE')),
3121 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3122 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3122 ('B', 'bookmarks', False, _("compare bookmarks")),
3123 ('B', 'bookmarks', False, _("compare bookmarks")),
3123 ('b', 'branch', [],
3124 ('b', 'branch', [],
3124 _('a specific branch you would like to pull'), _('BRANCH')),
3125 _('a specific branch you would like to pull'), _('BRANCH')),
3125 ] + logopts + remoteopts + subrepoopts,
3126 ] + logopts + remoteopts + subrepoopts,
3126 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3127 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3127 def incoming(ui, repo, source="default", **opts):
3128 def incoming(ui, repo, source="default", **opts):
3128 """show new changesets found in source
3129 """show new changesets found in source
3129
3130
3130 Show new changesets found in the specified path/URL or the default
3131 Show new changesets found in the specified path/URL or the default
3131 pull location. These are the changesets that would have been pulled
3132 pull location. These are the changesets that would have been pulled
3132 by :hg:`pull` at the time you issued this command.
3133 by :hg:`pull` at the time you issued this command.
3133
3134
3134 See pull for valid source format details.
3135 See pull for valid source format details.
3135
3136
3136 .. container:: verbose
3137 .. container:: verbose
3137
3138
3138 With -B/--bookmarks, the result of bookmark comparison between
3139 With -B/--bookmarks, the result of bookmark comparison between
3139 local and remote repositories is displayed. With -v/--verbose,
3140 local and remote repositories is displayed. With -v/--verbose,
3140 status is also displayed for each bookmark like below::
3141 status is also displayed for each bookmark like below::
3141
3142
3142 BM1 01234567890a added
3143 BM1 01234567890a added
3143 BM2 1234567890ab advanced
3144 BM2 1234567890ab advanced
3144 BM3 234567890abc diverged
3145 BM3 234567890abc diverged
3145 BM4 34567890abcd changed
3146 BM4 34567890abcd changed
3146
3147
3147 The action taken locally when pulling depends on the
3148 The action taken locally when pulling depends on the
3148 status of each bookmark:
3149 status of each bookmark:
3149
3150
3150 :``added``: pull will create it
3151 :``added``: pull will create it
3151 :``advanced``: pull will update it
3152 :``advanced``: pull will update it
3152 :``diverged``: pull will create a divergent bookmark
3153 :``diverged``: pull will create a divergent bookmark
3153 :``changed``: result depends on remote changesets
3154 :``changed``: result depends on remote changesets
3154
3155
3155 From the point of view of pulling behavior, bookmark
3156 From the point of view of pulling behavior, bookmark
3156 existing only in the remote repository are treated as ``added``,
3157 existing only in the remote repository are treated as ``added``,
3157 even if it is in fact locally deleted.
3158 even if it is in fact locally deleted.
3158
3159
3159 .. container:: verbose
3160 .. container:: verbose
3160
3161
3161 For remote repository, using --bundle avoids downloading the
3162 For remote repository, using --bundle avoids downloading the
3162 changesets twice if the incoming is followed by a pull.
3163 changesets twice if the incoming is followed by a pull.
3163
3164
3164 Examples:
3165 Examples:
3165
3166
3166 - show incoming changes with patches and full description::
3167 - show incoming changes with patches and full description::
3167
3168
3168 hg incoming -vp
3169 hg incoming -vp
3169
3170
3170 - show incoming changes excluding merges, store a bundle::
3171 - show incoming changes excluding merges, store a bundle::
3171
3172
3172 hg in -vpM --bundle incoming.hg
3173 hg in -vpM --bundle incoming.hg
3173 hg pull incoming.hg
3174 hg pull incoming.hg
3174
3175
3175 - briefly list changes inside a bundle::
3176 - briefly list changes inside a bundle::
3176
3177
3177 hg in changes.hg -T "{desc|firstline}\\n"
3178 hg in changes.hg -T "{desc|firstline}\\n"
3178
3179
3179 Returns 0 if there are incoming changes, 1 otherwise.
3180 Returns 0 if there are incoming changes, 1 otherwise.
3180 """
3181 """
3181 opts = pycompat.byteskwargs(opts)
3182 opts = pycompat.byteskwargs(opts)
3182 if opts.get('graph'):
3183 if opts.get('graph'):
3183 logcmdutil.checkunsupportedgraphflags([], opts)
3184 logcmdutil.checkunsupportedgraphflags([], opts)
3184 def display(other, chlist, displayer):
3185 def display(other, chlist, displayer):
3185 revdag = logcmdutil.graphrevs(other, chlist, opts)
3186 revdag = logcmdutil.graphrevs(other, chlist, opts)
3186 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3187 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3187 graphmod.asciiedges)
3188 graphmod.asciiedges)
3188
3189
3189 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3190 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3190 return 0
3191 return 0
3191
3192
3192 if opts.get('bundle') and opts.get('subrepos'):
3193 if opts.get('bundle') and opts.get('subrepos'):
3193 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3194 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3194
3195
3195 if opts.get('bookmarks'):
3196 if opts.get('bookmarks'):
3196 source, branches = hg.parseurl(ui.expandpath(source),
3197 source, branches = hg.parseurl(ui.expandpath(source),
3197 opts.get('branch'))
3198 opts.get('branch'))
3198 other = hg.peer(repo, opts, source)
3199 other = hg.peer(repo, opts, source)
3199 if 'bookmarks' not in other.listkeys('namespaces'):
3200 if 'bookmarks' not in other.listkeys('namespaces'):
3200 ui.warn(_("remote doesn't support bookmarks\n"))
3201 ui.warn(_("remote doesn't support bookmarks\n"))
3201 return 0
3202 return 0
3202 ui.pager('incoming')
3203 ui.pager('incoming')
3203 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3204 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3204 return bookmarks.incoming(ui, repo, other)
3205 return bookmarks.incoming(ui, repo, other)
3205
3206
3206 repo._subtoppath = ui.expandpath(source)
3207 repo._subtoppath = ui.expandpath(source)
3207 try:
3208 try:
3208 return hg.incoming(ui, repo, source, opts)
3209 return hg.incoming(ui, repo, source, opts)
3209 finally:
3210 finally:
3210 del repo._subtoppath
3211 del repo._subtoppath
3211
3212
3212
3213
3213 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3214 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3214 norepo=True)
3215 norepo=True)
3215 def init(ui, dest=".", **opts):
3216 def init(ui, dest=".", **opts):
3216 """create a new repository in the given directory
3217 """create a new repository in the given directory
3217
3218
3218 Initialize a new repository in the given directory. If the given
3219 Initialize a new repository in the given directory. If the given
3219 directory does not exist, it will be created.
3220 directory does not exist, it will be created.
3220
3221
3221 If no directory is given, the current directory is used.
3222 If no directory is given, the current directory is used.
3222
3223
3223 It is possible to specify an ``ssh://`` URL as the destination.
3224 It is possible to specify an ``ssh://`` URL as the destination.
3224 See :hg:`help urls` for more information.
3225 See :hg:`help urls` for more information.
3225
3226
3226 Returns 0 on success.
3227 Returns 0 on success.
3227 """
3228 """
3228 opts = pycompat.byteskwargs(opts)
3229 opts = pycompat.byteskwargs(opts)
3229 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3230 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3230
3231
3231 @command('locate',
3232 @command('locate',
3232 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3233 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3233 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3234 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3234 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3235 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3235 ] + walkopts,
3236 ] + walkopts,
3236 _('[OPTION]... [PATTERN]...'))
3237 _('[OPTION]... [PATTERN]...'))
3237 def locate(ui, repo, *pats, **opts):
3238 def locate(ui, repo, *pats, **opts):
3238 """locate files matching specific patterns (DEPRECATED)
3239 """locate files matching specific patterns (DEPRECATED)
3239
3240
3240 Print files under Mercurial control in the working directory whose
3241 Print files under Mercurial control in the working directory whose
3241 names match the given patterns.
3242 names match the given patterns.
3242
3243
3243 By default, this command searches all directories in the working
3244 By default, this command searches all directories in the working
3244 directory. To search just the current directory and its
3245 directory. To search just the current directory and its
3245 subdirectories, use "--include .".
3246 subdirectories, use "--include .".
3246
3247
3247 If no patterns are given to match, this command prints the names
3248 If no patterns are given to match, this command prints the names
3248 of all files under Mercurial control in the working directory.
3249 of all files under Mercurial control in the working directory.
3249
3250
3250 If you want to feed the output of this command into the "xargs"
3251 If you want to feed the output of this command into the "xargs"
3251 command, use the -0 option to both this command and "xargs". This
3252 command, use the -0 option to both this command and "xargs". This
3252 will avoid the problem of "xargs" treating single filenames that
3253 will avoid the problem of "xargs" treating single filenames that
3253 contain whitespace as multiple filenames.
3254 contain whitespace as multiple filenames.
3254
3255
3255 See :hg:`help files` for a more versatile command.
3256 See :hg:`help files` for a more versatile command.
3256
3257
3257 Returns 0 if a match is found, 1 otherwise.
3258 Returns 0 if a match is found, 1 otherwise.
3258 """
3259 """
3259 opts = pycompat.byteskwargs(opts)
3260 opts = pycompat.byteskwargs(opts)
3260 if opts.get('print0'):
3261 if opts.get('print0'):
3261 end = '\0'
3262 end = '\0'
3262 else:
3263 else:
3263 end = '\n'
3264 end = '\n'
3264 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3265 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3265
3266
3266 ret = 1
3267 ret = 1
3267 m = scmutil.match(ctx, pats, opts, default='relglob',
3268 m = scmutil.match(ctx, pats, opts, default='relglob',
3268 badfn=lambda x, y: False)
3269 badfn=lambda x, y: False)
3269
3270
3270 ui.pager('locate')
3271 ui.pager('locate')
3271 for abs in ctx.matches(m):
3272 for abs in ctx.matches(m):
3272 if opts.get('fullpath'):
3273 if opts.get('fullpath'):
3273 ui.write(repo.wjoin(abs), end)
3274 ui.write(repo.wjoin(abs), end)
3274 else:
3275 else:
3275 ui.write(((pats and m.rel(abs)) or abs), end)
3276 ui.write(((pats and m.rel(abs)) or abs), end)
3276 ret = 0
3277 ret = 0
3277
3278
3278 return ret
3279 return ret
3279
3280
3280 @command('^log|history',
3281 @command('^log|history',
3281 [('f', 'follow', None,
3282 [('f', 'follow', None,
3282 _('follow changeset history, or file history across copies and renames')),
3283 _('follow changeset history, or file history across copies and renames')),
3283 ('', 'follow-first', None,
3284 ('', 'follow-first', None,
3284 _('only follow the first parent of merge changesets (DEPRECATED)')),
3285 _('only follow the first parent of merge changesets (DEPRECATED)')),
3285 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3286 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3286 ('C', 'copies', None, _('show copied files')),
3287 ('C', 'copies', None, _('show copied files')),
3287 ('k', 'keyword', [],
3288 ('k', 'keyword', [],
3288 _('do case-insensitive search for a given text'), _('TEXT')),
3289 _('do case-insensitive search for a given text'), _('TEXT')),
3289 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3290 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3290 ('L', 'line-range', [],
3291 ('L', 'line-range', [],
3291 _('follow line range of specified file (EXPERIMENTAL)'),
3292 _('follow line range of specified file (EXPERIMENTAL)'),
3292 _('FILE,RANGE')),
3293 _('FILE,RANGE')),
3293 ('', 'removed', None, _('include revisions where files were removed')),
3294 ('', 'removed', None, _('include revisions where files were removed')),
3294 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3295 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3295 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3296 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3296 ('', 'only-branch', [],
3297 ('', 'only-branch', [],
3297 _('show only changesets within the given named branch (DEPRECATED)'),
3298 _('show only changesets within the given named branch (DEPRECATED)'),
3298 _('BRANCH')),
3299 _('BRANCH')),
3299 ('b', 'branch', [],
3300 ('b', 'branch', [],
3300 _('show changesets within the given named branch'), _('BRANCH')),
3301 _('show changesets within the given named branch'), _('BRANCH')),
3301 ('P', 'prune', [],
3302 ('P', 'prune', [],
3302 _('do not display revision or any of its ancestors'), _('REV')),
3303 _('do not display revision or any of its ancestors'), _('REV')),
3303 ] + logopts + walkopts,
3304 ] + logopts + walkopts,
3304 _('[OPTION]... [FILE]'),
3305 _('[OPTION]... [FILE]'),
3305 inferrepo=True, cmdtype=readonly)
3306 inferrepo=True, cmdtype=readonly)
3306 def log(ui, repo, *pats, **opts):
3307 def log(ui, repo, *pats, **opts):
3307 """show revision history of entire repository or files
3308 """show revision history of entire repository or files
3308
3309
3309 Print the revision history of the specified files or the entire
3310 Print the revision history of the specified files or the entire
3310 project.
3311 project.
3311
3312
3312 If no revision range is specified, the default is ``tip:0`` unless
3313 If no revision range is specified, the default is ``tip:0`` unless
3313 --follow is set, in which case the working directory parent is
3314 --follow is set, in which case the working directory parent is
3314 used as the starting revision.
3315 used as the starting revision.
3315
3316
3316 File history is shown without following rename or copy history of
3317 File history is shown without following rename or copy history of
3317 files. Use -f/--follow with a filename to follow history across
3318 files. Use -f/--follow with a filename to follow history across
3318 renames and copies. --follow without a filename will only show
3319 renames and copies. --follow without a filename will only show
3319 ancestors of the starting revision.
3320 ancestors of the starting revision.
3320
3321
3321 By default this command prints revision number and changeset id,
3322 By default this command prints revision number and changeset id,
3322 tags, non-trivial parents, user, date and time, and a summary for
3323 tags, non-trivial parents, user, date and time, and a summary for
3323 each commit. When the -v/--verbose switch is used, the list of
3324 each commit. When the -v/--verbose switch is used, the list of
3324 changed files and full commit message are shown.
3325 changed files and full commit message are shown.
3325
3326
3326 With --graph the revisions are shown as an ASCII art DAG with the most
3327 With --graph the revisions are shown as an ASCII art DAG with the most
3327 recent changeset at the top.
3328 recent changeset at the top.
3328 'o' is a changeset, '@' is a working directory parent, '_' closes a branch,
3329 'o' is a changeset, '@' is a working directory parent, '_' closes a branch,
3329 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
3330 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
3330 changeset from the lines below is a parent of the 'o' merge on the same
3331 changeset from the lines below is a parent of the 'o' merge on the same
3331 line.
3332 line.
3332 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3333 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3333 of a '|' indicates one or more revisions in a path are omitted.
3334 of a '|' indicates one or more revisions in a path are omitted.
3334
3335
3335 .. container:: verbose
3336 .. container:: verbose
3336
3337
3337 Use -L/--line-range FILE,M:N options to follow the history of lines
3338 Use -L/--line-range FILE,M:N options to follow the history of lines
3338 from M to N in FILE. With -p/--patch only diff hunks affecting
3339 from M to N in FILE. With -p/--patch only diff hunks affecting
3339 specified line range will be shown. This option requires --follow;
3340 specified line range will be shown. This option requires --follow;
3340 it can be specified multiple times. Currently, this option is not
3341 it can be specified multiple times. Currently, this option is not
3341 compatible with --graph. This option is experimental.
3342 compatible with --graph. This option is experimental.
3342
3343
3343 .. note::
3344 .. note::
3344
3345
3345 :hg:`log --patch` may generate unexpected diff output for merge
3346 :hg:`log --patch` may generate unexpected diff output for merge
3346 changesets, as it will only compare the merge changeset against
3347 changesets, as it will only compare the merge changeset against
3347 its first parent. Also, only files different from BOTH parents
3348 its first parent. Also, only files different from BOTH parents
3348 will appear in files:.
3349 will appear in files:.
3349
3350
3350 .. note::
3351 .. note::
3351
3352
3352 For performance reasons, :hg:`log FILE` may omit duplicate changes
3353 For performance reasons, :hg:`log FILE` may omit duplicate changes
3353 made on branches and will not show removals or mode changes. To
3354 made on branches and will not show removals or mode changes. To
3354 see all such changes, use the --removed switch.
3355 see all such changes, use the --removed switch.
3355
3356
3356 .. container:: verbose
3357 .. container:: verbose
3357
3358
3358 .. note::
3359 .. note::
3359
3360
3360 The history resulting from -L/--line-range options depends on diff
3361 The history resulting from -L/--line-range options depends on diff
3361 options; for instance if white-spaces are ignored, respective changes
3362 options; for instance if white-spaces are ignored, respective changes
3362 with only white-spaces in specified line range will not be listed.
3363 with only white-spaces in specified line range will not be listed.
3363
3364
3364 .. container:: verbose
3365 .. container:: verbose
3365
3366
3366 Some examples:
3367 Some examples:
3367
3368
3368 - changesets with full descriptions and file lists::
3369 - changesets with full descriptions and file lists::
3369
3370
3370 hg log -v
3371 hg log -v
3371
3372
3372 - changesets ancestral to the working directory::
3373 - changesets ancestral to the working directory::
3373
3374
3374 hg log -f
3375 hg log -f
3375
3376
3376 - last 10 commits on the current branch::
3377 - last 10 commits on the current branch::
3377
3378
3378 hg log -l 10 -b .
3379 hg log -l 10 -b .
3379
3380
3380 - changesets showing all modifications of a file, including removals::
3381 - changesets showing all modifications of a file, including removals::
3381
3382
3382 hg log --removed file.c
3383 hg log --removed file.c
3383
3384
3384 - all changesets that touch a directory, with diffs, excluding merges::
3385 - all changesets that touch a directory, with diffs, excluding merges::
3385
3386
3386 hg log -Mp lib/
3387 hg log -Mp lib/
3387
3388
3388 - all revision numbers that match a keyword::
3389 - all revision numbers that match a keyword::
3389
3390
3390 hg log -k bug --template "{rev}\\n"
3391 hg log -k bug --template "{rev}\\n"
3391
3392
3392 - the full hash identifier of the working directory parent::
3393 - the full hash identifier of the working directory parent::
3393
3394
3394 hg log -r . --template "{node}\\n"
3395 hg log -r . --template "{node}\\n"
3395
3396
3396 - list available log templates::
3397 - list available log templates::
3397
3398
3398 hg log -T list
3399 hg log -T list
3399
3400
3400 - check if a given changeset is included in a tagged release::
3401 - check if a given changeset is included in a tagged release::
3401
3402
3402 hg log -r "a21ccf and ancestor(1.9)"
3403 hg log -r "a21ccf and ancestor(1.9)"
3403
3404
3404 - find all changesets by some user in a date range::
3405 - find all changesets by some user in a date range::
3405
3406
3406 hg log -k alice -d "may 2008 to jul 2008"
3407 hg log -k alice -d "may 2008 to jul 2008"
3407
3408
3408 - summary of all changesets after the last tag::
3409 - summary of all changesets after the last tag::
3409
3410
3410 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3411 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3411
3412
3412 - changesets touching lines 13 to 23 for file.c::
3413 - changesets touching lines 13 to 23 for file.c::
3413
3414
3414 hg log -L file.c,13:23
3415 hg log -L file.c,13:23
3415
3416
3416 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
3417 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
3417 main.c with patch::
3418 main.c with patch::
3418
3419
3419 hg log -L file.c,13:23 -L main.c,2:6 -p
3420 hg log -L file.c,13:23 -L main.c,2:6 -p
3420
3421
3421 See :hg:`help dates` for a list of formats valid for -d/--date.
3422 See :hg:`help dates` for a list of formats valid for -d/--date.
3422
3423
3423 See :hg:`help revisions` for more about specifying and ordering
3424 See :hg:`help revisions` for more about specifying and ordering
3424 revisions.
3425 revisions.
3425
3426
3426 See :hg:`help templates` for more about pre-packaged styles and
3427 See :hg:`help templates` for more about pre-packaged styles and
3427 specifying custom templates. The default template used by the log
3428 specifying custom templates. The default template used by the log
3428 command can be customized via the ``ui.logtemplate`` configuration
3429 command can be customized via the ``ui.logtemplate`` configuration
3429 setting.
3430 setting.
3430
3431
3431 Returns 0 on success.
3432 Returns 0 on success.
3432
3433
3433 """
3434 """
3434 opts = pycompat.byteskwargs(opts)
3435 opts = pycompat.byteskwargs(opts)
3435 linerange = opts.get('line_range')
3436 linerange = opts.get('line_range')
3436
3437
3437 if linerange and not opts.get('follow'):
3438 if linerange and not opts.get('follow'):
3438 raise error.Abort(_('--line-range requires --follow'))
3439 raise error.Abort(_('--line-range requires --follow'))
3439
3440
3440 if linerange and pats:
3441 if linerange and pats:
3441 # TODO: take pats as patterns with no line-range filter
3442 # TODO: take pats as patterns with no line-range filter
3442 raise error.Abort(
3443 raise error.Abort(
3443 _('FILE arguments are not compatible with --line-range option')
3444 _('FILE arguments are not compatible with --line-range option')
3444 )
3445 )
3445
3446
3446 repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
3447 repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
3447 revs, differ = logcmdutil.getrevs(repo, pats, opts)
3448 revs, differ = logcmdutil.getrevs(repo, pats, opts)
3448 if linerange:
3449 if linerange:
3449 # TODO: should follow file history from logcmdutil._initialrevs(),
3450 # TODO: should follow file history from logcmdutil._initialrevs(),
3450 # then filter the result by logcmdutil._makerevset() and --limit
3451 # then filter the result by logcmdutil._makerevset() and --limit
3451 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
3452 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
3452
3453
3453 getrenamed = None
3454 getrenamed = None
3454 if opts.get('copies'):
3455 if opts.get('copies'):
3455 endrev = None
3456 endrev = None
3456 if opts.get('rev'):
3457 if opts.get('rev'):
3457 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3458 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3458 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3459 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3459
3460
3460 ui.pager('log')
3461 ui.pager('log')
3461 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
3462 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
3462 buffered=True)
3463 buffered=True)
3463 if opts.get('graph'):
3464 if opts.get('graph'):
3464 displayfn = logcmdutil.displaygraphrevs
3465 displayfn = logcmdutil.displaygraphrevs
3465 else:
3466 else:
3466 displayfn = logcmdutil.displayrevs
3467 displayfn = logcmdutil.displayrevs
3467 displayfn(ui, repo, revs, displayer, getrenamed)
3468 displayfn(ui, repo, revs, displayer, getrenamed)
3468
3469
3469 @command('manifest',
3470 @command('manifest',
3470 [('r', 'rev', '', _('revision to display'), _('REV')),
3471 [('r', 'rev', '', _('revision to display'), _('REV')),
3471 ('', 'all', False, _("list files from all revisions"))]
3472 ('', 'all', False, _("list files from all revisions"))]
3472 + formatteropts,
3473 + formatteropts,
3473 _('[-r REV]'), cmdtype=readonly)
3474 _('[-r REV]'), cmdtype=readonly)
3474 def manifest(ui, repo, node=None, rev=None, **opts):
3475 def manifest(ui, repo, node=None, rev=None, **opts):
3475 """output the current or given revision of the project manifest
3476 """output the current or given revision of the project manifest
3476
3477
3477 Print a list of version controlled files for the given revision.
3478 Print a list of version controlled files for the given revision.
3478 If no revision is given, the first parent of the working directory
3479 If no revision is given, the first parent of the working directory
3479 is used, or the null revision if no revision is checked out.
3480 is used, or the null revision if no revision is checked out.
3480
3481
3481 With -v, print file permissions, symlink and executable bits.
3482 With -v, print file permissions, symlink and executable bits.
3482 With --debug, print file revision hashes.
3483 With --debug, print file revision hashes.
3483
3484
3484 If option --all is specified, the list of all files from all revisions
3485 If option --all is specified, the list of all files from all revisions
3485 is printed. This includes deleted and renamed files.
3486 is printed. This includes deleted and renamed files.
3486
3487
3487 Returns 0 on success.
3488 Returns 0 on success.
3488 """
3489 """
3489 opts = pycompat.byteskwargs(opts)
3490 opts = pycompat.byteskwargs(opts)
3490 fm = ui.formatter('manifest', opts)
3491 fm = ui.formatter('manifest', opts)
3491
3492
3492 if opts.get('all'):
3493 if opts.get('all'):
3493 if rev or node:
3494 if rev or node:
3494 raise error.Abort(_("can't specify a revision with --all"))
3495 raise error.Abort(_("can't specify a revision with --all"))
3495
3496
3496 res = []
3497 res = []
3497 prefix = "data/"
3498 prefix = "data/"
3498 suffix = ".i"
3499 suffix = ".i"
3499 plen = len(prefix)
3500 plen = len(prefix)
3500 slen = len(suffix)
3501 slen = len(suffix)
3501 with repo.lock():
3502 with repo.lock():
3502 for fn, b, size in repo.store.datafiles():
3503 for fn, b, size in repo.store.datafiles():
3503 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3504 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3504 res.append(fn[plen:-slen])
3505 res.append(fn[plen:-slen])
3505 ui.pager('manifest')
3506 ui.pager('manifest')
3506 for f in res:
3507 for f in res:
3507 fm.startitem()
3508 fm.startitem()
3508 fm.write("path", '%s\n', f)
3509 fm.write("path", '%s\n', f)
3509 fm.end()
3510 fm.end()
3510 return
3511 return
3511
3512
3512 if rev and node:
3513 if rev and node:
3513 raise error.Abort(_("please specify just one revision"))
3514 raise error.Abort(_("please specify just one revision"))
3514
3515
3515 if not node:
3516 if not node:
3516 node = rev
3517 node = rev
3517
3518
3518 char = {'l': '@', 'x': '*', '': '', 't': 'd'}
3519 char = {'l': '@', 'x': '*', '': '', 't': 'd'}
3519 mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
3520 mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
3520 if node:
3521 if node:
3521 repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
3522 repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
3522 ctx = scmutil.revsingle(repo, node)
3523 ctx = scmutil.revsingle(repo, node)
3523 mf = ctx.manifest()
3524 mf = ctx.manifest()
3524 ui.pager('manifest')
3525 ui.pager('manifest')
3525 for f in ctx:
3526 for f in ctx:
3526 fm.startitem()
3527 fm.startitem()
3527 fl = ctx[f].flags()
3528 fl = ctx[f].flags()
3528 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3529 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3529 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3530 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3530 fm.write('path', '%s\n', f)
3531 fm.write('path', '%s\n', f)
3531 fm.end()
3532 fm.end()
3532
3533
3533 @command('^merge',
3534 @command('^merge',
3534 [('f', 'force', None,
3535 [('f', 'force', None,
3535 _('force a merge including outstanding changes (DEPRECATED)')),
3536 _('force a merge including outstanding changes (DEPRECATED)')),
3536 ('r', 'rev', '', _('revision to merge'), _('REV')),
3537 ('r', 'rev', '', _('revision to merge'), _('REV')),
3537 ('P', 'preview', None,
3538 ('P', 'preview', None,
3538 _('review revisions to merge (no merge is performed)')),
3539 _('review revisions to merge (no merge is performed)')),
3539 ('', 'abort', None, _('abort the ongoing merge')),
3540 ('', 'abort', None, _('abort the ongoing merge')),
3540 ] + mergetoolopts,
3541 ] + mergetoolopts,
3541 _('[-P] [[-r] REV]'))
3542 _('[-P] [[-r] REV]'))
3542 def merge(ui, repo, node=None, **opts):
3543 def merge(ui, repo, node=None, **opts):
3543 """merge another revision into working directory
3544 """merge another revision into working directory
3544
3545
3545 The current working directory is updated with all changes made in
3546 The current working directory is updated with all changes made in
3546 the requested revision since the last common predecessor revision.
3547 the requested revision since the last common predecessor revision.
3547
3548
3548 Files that changed between either parent are marked as changed for
3549 Files that changed between either parent are marked as changed for
3549 the next commit and a commit must be performed before any further
3550 the next commit and a commit must be performed before any further
3550 updates to the repository are allowed. The next commit will have
3551 updates to the repository are allowed. The next commit will have
3551 two parents.
3552 two parents.
3552
3553
3553 ``--tool`` can be used to specify the merge tool used for file
3554 ``--tool`` can be used to specify the merge tool used for file
3554 merges. It overrides the HGMERGE environment variable and your
3555 merges. It overrides the HGMERGE environment variable and your
3555 configuration files. See :hg:`help merge-tools` for options.
3556 configuration files. See :hg:`help merge-tools` for options.
3556
3557
3557 If no revision is specified, the working directory's parent is a
3558 If no revision is specified, the working directory's parent is a
3558 head revision, and the current branch contains exactly one other
3559 head revision, and the current branch contains exactly one other
3559 head, the other head is merged with by default. Otherwise, an
3560 head, the other head is merged with by default. Otherwise, an
3560 explicit revision with which to merge with must be provided.
3561 explicit revision with which to merge with must be provided.
3561
3562
3562 See :hg:`help resolve` for information on handling file conflicts.
3563 See :hg:`help resolve` for information on handling file conflicts.
3563
3564
3564 To undo an uncommitted merge, use :hg:`merge --abort` which
3565 To undo an uncommitted merge, use :hg:`merge --abort` which
3565 will check out a clean copy of the original merge parent, losing
3566 will check out a clean copy of the original merge parent, losing
3566 all changes.
3567 all changes.
3567
3568
3568 Returns 0 on success, 1 if there are unresolved files.
3569 Returns 0 on success, 1 if there are unresolved files.
3569 """
3570 """
3570
3571
3571 opts = pycompat.byteskwargs(opts)
3572 opts = pycompat.byteskwargs(opts)
3572 abort = opts.get('abort')
3573 abort = opts.get('abort')
3573 if abort and repo.dirstate.p2() == nullid:
3574 if abort and repo.dirstate.p2() == nullid:
3574 cmdutil.wrongtooltocontinue(repo, _('merge'))
3575 cmdutil.wrongtooltocontinue(repo, _('merge'))
3575 if abort:
3576 if abort:
3576 if node:
3577 if node:
3577 raise error.Abort(_("cannot specify a node with --abort"))
3578 raise error.Abort(_("cannot specify a node with --abort"))
3578 if opts.get('rev'):
3579 if opts.get('rev'):
3579 raise error.Abort(_("cannot specify both --rev and --abort"))
3580 raise error.Abort(_("cannot specify both --rev and --abort"))
3580 if opts.get('preview'):
3581 if opts.get('preview'):
3581 raise error.Abort(_("cannot specify --preview with --abort"))
3582 raise error.Abort(_("cannot specify --preview with --abort"))
3582 if opts.get('rev') and node:
3583 if opts.get('rev') and node:
3583 raise error.Abort(_("please specify just one revision"))
3584 raise error.Abort(_("please specify just one revision"))
3584 if not node:
3585 if not node:
3585 node = opts.get('rev')
3586 node = opts.get('rev')
3586
3587
3587 if node:
3588 if node:
3588 node = scmutil.revsingle(repo, node).node()
3589 node = scmutil.revsingle(repo, node).node()
3589
3590
3590 if not node and not abort:
3591 if not node and not abort:
3591 node = repo[destutil.destmerge(repo)].node()
3592 node = repo[destutil.destmerge(repo)].node()
3592
3593
3593 if opts.get('preview'):
3594 if opts.get('preview'):
3594 # find nodes that are ancestors of p2 but not of p1
3595 # find nodes that are ancestors of p2 but not of p1
3595 p1 = repo.lookup('.')
3596 p1 = repo.lookup('.')
3596 p2 = repo.lookup(node)
3597 p2 = repo.lookup(node)
3597 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3598 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3598
3599
3599 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3600 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3600 for node in nodes:
3601 for node in nodes:
3601 displayer.show(repo[node])
3602 displayer.show(repo[node])
3602 displayer.close()
3603 displayer.close()
3603 return 0
3604 return 0
3604
3605
3605 try:
3606 try:
3606 # ui.forcemerge is an internal variable, do not document
3607 # ui.forcemerge is an internal variable, do not document
3607 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3608 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3608 force = opts.get('force')
3609 force = opts.get('force')
3609 labels = ['working copy', 'merge rev']
3610 labels = ['working copy', 'merge rev']
3610 return hg.merge(repo, node, force=force, mergeforce=force,
3611 return hg.merge(repo, node, force=force, mergeforce=force,
3611 labels=labels, abort=abort)
3612 labels=labels, abort=abort)
3612 finally:
3613 finally:
3613 ui.setconfig('ui', 'forcemerge', '', 'merge')
3614 ui.setconfig('ui', 'forcemerge', '', 'merge')
3614
3615
3615 @command('outgoing|out',
3616 @command('outgoing|out',
3616 [('f', 'force', None, _('run even when the destination is unrelated')),
3617 [('f', 'force', None, _('run even when the destination is unrelated')),
3617 ('r', 'rev', [],
3618 ('r', 'rev', [],
3618 _('a changeset intended to be included in the destination'), _('REV')),
3619 _('a changeset intended to be included in the destination'), _('REV')),
3619 ('n', 'newest-first', None, _('show newest record first')),
3620 ('n', 'newest-first', None, _('show newest record first')),
3620 ('B', 'bookmarks', False, _('compare bookmarks')),
3621 ('B', 'bookmarks', False, _('compare bookmarks')),
3621 ('b', 'branch', [], _('a specific branch you would like to push'),
3622 ('b', 'branch', [], _('a specific branch you would like to push'),
3622 _('BRANCH')),
3623 _('BRANCH')),
3623 ] + logopts + remoteopts + subrepoopts,
3624 ] + logopts + remoteopts + subrepoopts,
3624 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3625 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3625 def outgoing(ui, repo, dest=None, **opts):
3626 def outgoing(ui, repo, dest=None, **opts):
3626 """show changesets not found in the destination
3627 """show changesets not found in the destination
3627
3628
3628 Show changesets not found in the specified destination repository
3629 Show changesets not found in the specified destination repository
3629 or the default push location. These are the changesets that would
3630 or the default push location. These are the changesets that would
3630 be pushed if a push was requested.
3631 be pushed if a push was requested.
3631
3632
3632 See pull for details of valid destination formats.
3633 See pull for details of valid destination formats.
3633
3634
3634 .. container:: verbose
3635 .. container:: verbose
3635
3636
3636 With -B/--bookmarks, the result of bookmark comparison between
3637 With -B/--bookmarks, the result of bookmark comparison between
3637 local and remote repositories is displayed. With -v/--verbose,
3638 local and remote repositories is displayed. With -v/--verbose,
3638 status is also displayed for each bookmark like below::
3639 status is also displayed for each bookmark like below::
3639
3640
3640 BM1 01234567890a added
3641 BM1 01234567890a added
3641 BM2 deleted
3642 BM2 deleted
3642 BM3 234567890abc advanced
3643 BM3 234567890abc advanced
3643 BM4 34567890abcd diverged
3644 BM4 34567890abcd diverged
3644 BM5 4567890abcde changed
3645 BM5 4567890abcde changed
3645
3646
3646 The action taken when pushing depends on the
3647 The action taken when pushing depends on the
3647 status of each bookmark:
3648 status of each bookmark:
3648
3649
3649 :``added``: push with ``-B`` will create it
3650 :``added``: push with ``-B`` will create it
3650 :``deleted``: push with ``-B`` will delete it
3651 :``deleted``: push with ``-B`` will delete it
3651 :``advanced``: push will update it
3652 :``advanced``: push will update it
3652 :``diverged``: push with ``-B`` will update it
3653 :``diverged``: push with ``-B`` will update it
3653 :``changed``: push with ``-B`` will update it
3654 :``changed``: push with ``-B`` will update it
3654
3655
3655 From the point of view of pushing behavior, bookmarks
3656 From the point of view of pushing behavior, bookmarks
3656 existing only in the remote repository are treated as
3657 existing only in the remote repository are treated as
3657 ``deleted``, even if it is in fact added remotely.
3658 ``deleted``, even if it is in fact added remotely.
3658
3659
3659 Returns 0 if there are outgoing changes, 1 otherwise.
3660 Returns 0 if there are outgoing changes, 1 otherwise.
3660 """
3661 """
3661 opts = pycompat.byteskwargs(opts)
3662 opts = pycompat.byteskwargs(opts)
3662 if opts.get('graph'):
3663 if opts.get('graph'):
3663 logcmdutil.checkunsupportedgraphflags([], opts)
3664 logcmdutil.checkunsupportedgraphflags([], opts)
3664 o, other = hg._outgoing(ui, repo, dest, opts)
3665 o, other = hg._outgoing(ui, repo, dest, opts)
3665 if not o:
3666 if not o:
3666 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3667 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3667 return
3668 return
3668
3669
3669 revdag = logcmdutil.graphrevs(repo, o, opts)
3670 revdag = logcmdutil.graphrevs(repo, o, opts)
3670 ui.pager('outgoing')
3671 ui.pager('outgoing')
3671 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
3672 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
3672 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3673 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3673 graphmod.asciiedges)
3674 graphmod.asciiedges)
3674 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3675 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3675 return 0
3676 return 0
3676
3677
3677 if opts.get('bookmarks'):
3678 if opts.get('bookmarks'):
3678 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3679 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3679 dest, branches = hg.parseurl(dest, opts.get('branch'))
3680 dest, branches = hg.parseurl(dest, opts.get('branch'))
3680 other = hg.peer(repo, opts, dest)
3681 other = hg.peer(repo, opts, dest)
3681 if 'bookmarks' not in other.listkeys('namespaces'):
3682 if 'bookmarks' not in other.listkeys('namespaces'):
3682 ui.warn(_("remote doesn't support bookmarks\n"))
3683 ui.warn(_("remote doesn't support bookmarks\n"))
3683 return 0
3684 return 0
3684 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3685 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3685 ui.pager('outgoing')
3686 ui.pager('outgoing')
3686 return bookmarks.outgoing(ui, repo, other)
3687 return bookmarks.outgoing(ui, repo, other)
3687
3688
3688 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3689 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3689 try:
3690 try:
3690 return hg.outgoing(ui, repo, dest, opts)
3691 return hg.outgoing(ui, repo, dest, opts)
3691 finally:
3692 finally:
3692 del repo._subtoppath
3693 del repo._subtoppath
3693
3694
3694 @command('parents',
3695 @command('parents',
3695 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3696 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3696 ] + templateopts,
3697 ] + templateopts,
3697 _('[-r REV] [FILE]'),
3698 _('[-r REV] [FILE]'),
3698 inferrepo=True)
3699 inferrepo=True)
3699 def parents(ui, repo, file_=None, **opts):
3700 def parents(ui, repo, file_=None, **opts):
3700 """show the parents of the working directory or revision (DEPRECATED)
3701 """show the parents of the working directory or revision (DEPRECATED)
3701
3702
3702 Print the working directory's parent revisions. If a revision is
3703 Print the working directory's parent revisions. If a revision is
3703 given via -r/--rev, the parent of that revision will be printed.
3704 given via -r/--rev, the parent of that revision will be printed.
3704 If a file argument is given, the revision in which the file was
3705 If a file argument is given, the revision in which the file was
3705 last changed (before the working directory revision or the
3706 last changed (before the working directory revision or the
3706 argument to --rev if given) is printed.
3707 argument to --rev if given) is printed.
3707
3708
3708 This command is equivalent to::
3709 This command is equivalent to::
3709
3710
3710 hg log -r "p1()+p2()" or
3711 hg log -r "p1()+p2()" or
3711 hg log -r "p1(REV)+p2(REV)" or
3712 hg log -r "p1(REV)+p2(REV)" or
3712 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3713 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3713 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3714 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3714
3715
3715 See :hg:`summary` and :hg:`help revsets` for related information.
3716 See :hg:`summary` and :hg:`help revsets` for related information.
3716
3717
3717 Returns 0 on success.
3718 Returns 0 on success.
3718 """
3719 """
3719
3720
3720 opts = pycompat.byteskwargs(opts)
3721 opts = pycompat.byteskwargs(opts)
3721 rev = opts.get('rev')
3722 rev = opts.get('rev')
3722 if rev:
3723 if rev:
3723 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
3724 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
3724 ctx = scmutil.revsingle(repo, rev, None)
3725 ctx = scmutil.revsingle(repo, rev, None)
3725
3726
3726 if file_:
3727 if file_:
3727 m = scmutil.match(ctx, (file_,), opts)
3728 m = scmutil.match(ctx, (file_,), opts)
3728 if m.anypats() or len(m.files()) != 1:
3729 if m.anypats() or len(m.files()) != 1:
3729 raise error.Abort(_('can only specify an explicit filename'))
3730 raise error.Abort(_('can only specify an explicit filename'))
3730 file_ = m.files()[0]
3731 file_ = m.files()[0]
3731 filenodes = []
3732 filenodes = []
3732 for cp in ctx.parents():
3733 for cp in ctx.parents():
3733 if not cp:
3734 if not cp:
3734 continue
3735 continue
3735 try:
3736 try:
3736 filenodes.append(cp.filenode(file_))
3737 filenodes.append(cp.filenode(file_))
3737 except error.LookupError:
3738 except error.LookupError:
3738 pass
3739 pass
3739 if not filenodes:
3740 if not filenodes:
3740 raise error.Abort(_("'%s' not found in manifest!") % file_)
3741 raise error.Abort(_("'%s' not found in manifest!") % file_)
3741 p = []
3742 p = []
3742 for fn in filenodes:
3743 for fn in filenodes:
3743 fctx = repo.filectx(file_, fileid=fn)
3744 fctx = repo.filectx(file_, fileid=fn)
3744 p.append(fctx.node())
3745 p.append(fctx.node())
3745 else:
3746 else:
3746 p = [cp.node() for cp in ctx.parents()]
3747 p = [cp.node() for cp in ctx.parents()]
3747
3748
3748 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3749 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3749 for n in p:
3750 for n in p:
3750 if n != nullid:
3751 if n != nullid:
3751 displayer.show(repo[n])
3752 displayer.show(repo[n])
3752 displayer.close()
3753 displayer.close()
3753
3754
3754 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
3755 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
3755 cmdtype=readonly)
3756 cmdtype=readonly)
3756 def paths(ui, repo, search=None, **opts):
3757 def paths(ui, repo, search=None, **opts):
3757 """show aliases for remote repositories
3758 """show aliases for remote repositories
3758
3759
3759 Show definition of symbolic path name NAME. If no name is given,
3760 Show definition of symbolic path name NAME. If no name is given,
3760 show definition of all available names.
3761 show definition of all available names.
3761
3762
3762 Option -q/--quiet suppresses all output when searching for NAME
3763 Option -q/--quiet suppresses all output when searching for NAME
3763 and shows only the path names when listing all definitions.
3764 and shows only the path names when listing all definitions.
3764
3765
3765 Path names are defined in the [paths] section of your
3766 Path names are defined in the [paths] section of your
3766 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3767 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3767 repository, ``.hg/hgrc`` is used, too.
3768 repository, ``.hg/hgrc`` is used, too.
3768
3769
3769 The path names ``default`` and ``default-push`` have a special
3770 The path names ``default`` and ``default-push`` have a special
3770 meaning. When performing a push or pull operation, they are used
3771 meaning. When performing a push or pull operation, they are used
3771 as fallbacks if no location is specified on the command-line.
3772 as fallbacks if no location is specified on the command-line.
3772 When ``default-push`` is set, it will be used for push and
3773 When ``default-push`` is set, it will be used for push and
3773 ``default`` will be used for pull; otherwise ``default`` is used
3774 ``default`` will be used for pull; otherwise ``default`` is used
3774 as the fallback for both. When cloning a repository, the clone
3775 as the fallback for both. When cloning a repository, the clone
3775 source is written as ``default`` in ``.hg/hgrc``.
3776 source is written as ``default`` in ``.hg/hgrc``.
3776
3777
3777 .. note::
3778 .. note::
3778
3779
3779 ``default`` and ``default-push`` apply to all inbound (e.g.
3780 ``default`` and ``default-push`` apply to all inbound (e.g.
3780 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3781 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3781 and :hg:`bundle`) operations.
3782 and :hg:`bundle`) operations.
3782
3783
3783 See :hg:`help urls` for more information.
3784 See :hg:`help urls` for more information.
3784
3785
3785 Returns 0 on success.
3786 Returns 0 on success.
3786 """
3787 """
3787
3788
3788 opts = pycompat.byteskwargs(opts)
3789 opts = pycompat.byteskwargs(opts)
3789 ui.pager('paths')
3790 ui.pager('paths')
3790 if search:
3791 if search:
3791 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3792 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3792 if name == search]
3793 if name == search]
3793 else:
3794 else:
3794 pathitems = sorted(ui.paths.iteritems())
3795 pathitems = sorted(ui.paths.iteritems())
3795
3796
3796 fm = ui.formatter('paths', opts)
3797 fm = ui.formatter('paths', opts)
3797 if fm.isplain():
3798 if fm.isplain():
3798 hidepassword = util.hidepassword
3799 hidepassword = util.hidepassword
3799 else:
3800 else:
3800 hidepassword = bytes
3801 hidepassword = bytes
3801 if ui.quiet:
3802 if ui.quiet:
3802 namefmt = '%s\n'
3803 namefmt = '%s\n'
3803 else:
3804 else:
3804 namefmt = '%s = '
3805 namefmt = '%s = '
3805 showsubopts = not search and not ui.quiet
3806 showsubopts = not search and not ui.quiet
3806
3807
3807 for name, path in pathitems:
3808 for name, path in pathitems:
3808 fm.startitem()
3809 fm.startitem()
3809 fm.condwrite(not search, 'name', namefmt, name)
3810 fm.condwrite(not search, 'name', namefmt, name)
3810 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3811 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3811 for subopt, value in sorted(path.suboptions.items()):
3812 for subopt, value in sorted(path.suboptions.items()):
3812 assert subopt not in ('name', 'url')
3813 assert subopt not in ('name', 'url')
3813 if showsubopts:
3814 if showsubopts:
3814 fm.plain('%s:%s = ' % (name, subopt))
3815 fm.plain('%s:%s = ' % (name, subopt))
3815 fm.condwrite(showsubopts, subopt, '%s\n', value)
3816 fm.condwrite(showsubopts, subopt, '%s\n', value)
3816
3817
3817 fm.end()
3818 fm.end()
3818
3819
3819 if search and not pathitems:
3820 if search and not pathitems:
3820 if not ui.quiet:
3821 if not ui.quiet:
3821 ui.warn(_("not found!\n"))
3822 ui.warn(_("not found!\n"))
3822 return 1
3823 return 1
3823 else:
3824 else:
3824 return 0
3825 return 0
3825
3826
3826 @command('phase',
3827 @command('phase',
3827 [('p', 'public', False, _('set changeset phase to public')),
3828 [('p', 'public', False, _('set changeset phase to public')),
3828 ('d', 'draft', False, _('set changeset phase to draft')),
3829 ('d', 'draft', False, _('set changeset phase to draft')),
3829 ('s', 'secret', False, _('set changeset phase to secret')),
3830 ('s', 'secret', False, _('set changeset phase to secret')),
3830 ('f', 'force', False, _('allow to move boundary backward')),
3831 ('f', 'force', False, _('allow to move boundary backward')),
3831 ('r', 'rev', [], _('target revision'), _('REV')),
3832 ('r', 'rev', [], _('target revision'), _('REV')),
3832 ],
3833 ],
3833 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3834 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3834 def phase(ui, repo, *revs, **opts):
3835 def phase(ui, repo, *revs, **opts):
3835 """set or show the current phase name
3836 """set or show the current phase name
3836
3837
3837 With no argument, show the phase name of the current revision(s).
3838 With no argument, show the phase name of the current revision(s).
3838
3839
3839 With one of -p/--public, -d/--draft or -s/--secret, change the
3840 With one of -p/--public, -d/--draft or -s/--secret, change the
3840 phase value of the specified revisions.
3841 phase value of the specified revisions.
3841
3842
3842 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
3843 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
3843 lower phase to a higher phase. Phases are ordered as follows::
3844 lower phase to a higher phase. Phases are ordered as follows::
3844
3845
3845 public < draft < secret
3846 public < draft < secret
3846
3847
3847 Returns 0 on success, 1 if some phases could not be changed.
3848 Returns 0 on success, 1 if some phases could not be changed.
3848
3849
3849 (For more information about the phases concept, see :hg:`help phases`.)
3850 (For more information about the phases concept, see :hg:`help phases`.)
3850 """
3851 """
3851 opts = pycompat.byteskwargs(opts)
3852 opts = pycompat.byteskwargs(opts)
3852 # search for a unique phase argument
3853 # search for a unique phase argument
3853 targetphase = None
3854 targetphase = None
3854 for idx, name in enumerate(phases.phasenames):
3855 for idx, name in enumerate(phases.phasenames):
3855 if opts[name]:
3856 if opts[name]:
3856 if targetphase is not None:
3857 if targetphase is not None:
3857 raise error.Abort(_('only one phase can be specified'))
3858 raise error.Abort(_('only one phase can be specified'))
3858 targetphase = idx
3859 targetphase = idx
3859
3860
3860 # look for specified revision
3861 # look for specified revision
3861 revs = list(revs)
3862 revs = list(revs)
3862 revs.extend(opts['rev'])
3863 revs.extend(opts['rev'])
3863 if not revs:
3864 if not revs:
3864 # display both parents as the second parent phase can influence
3865 # display both parents as the second parent phase can influence
3865 # the phase of a merge commit
3866 # the phase of a merge commit
3866 revs = [c.rev() for c in repo[None].parents()]
3867 revs = [c.rev() for c in repo[None].parents()]
3867
3868
3868 revs = scmutil.revrange(repo, revs)
3869 revs = scmutil.revrange(repo, revs)
3869
3870
3870 ret = 0
3871 ret = 0
3871 if targetphase is None:
3872 if targetphase is None:
3872 # display
3873 # display
3873 for r in revs:
3874 for r in revs:
3874 ctx = repo[r]
3875 ctx = repo[r]
3875 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3876 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3876 else:
3877 else:
3877 with repo.lock(), repo.transaction("phase") as tr:
3878 with repo.lock(), repo.transaction("phase") as tr:
3878 # set phase
3879 # set phase
3879 if not revs:
3880 if not revs:
3880 raise error.Abort(_('empty revision set'))
3881 raise error.Abort(_('empty revision set'))
3881 nodes = [repo[r].node() for r in revs]
3882 nodes = [repo[r].node() for r in revs]
3882 # moving revision from public to draft may hide them
3883 # moving revision from public to draft may hide them
3883 # We have to check result on an unfiltered repository
3884 # We have to check result on an unfiltered repository
3884 unfi = repo.unfiltered()
3885 unfi = repo.unfiltered()
3885 getphase = unfi._phasecache.phase
3886 getphase = unfi._phasecache.phase
3886 olddata = [getphase(unfi, r) for r in unfi]
3887 olddata = [getphase(unfi, r) for r in unfi]
3887 phases.advanceboundary(repo, tr, targetphase, nodes)
3888 phases.advanceboundary(repo, tr, targetphase, nodes)
3888 if opts['force']:
3889 if opts['force']:
3889 phases.retractboundary(repo, tr, targetphase, nodes)
3890 phases.retractboundary(repo, tr, targetphase, nodes)
3890 getphase = unfi._phasecache.phase
3891 getphase = unfi._phasecache.phase
3891 newdata = [getphase(unfi, r) for r in unfi]
3892 newdata = [getphase(unfi, r) for r in unfi]
3892 changes = sum(newdata[r] != olddata[r] for r in unfi)
3893 changes = sum(newdata[r] != olddata[r] for r in unfi)
3893 cl = unfi.changelog
3894 cl = unfi.changelog
3894 rejected = [n for n in nodes
3895 rejected = [n for n in nodes
3895 if newdata[cl.rev(n)] < targetphase]
3896 if newdata[cl.rev(n)] < targetphase]
3896 if rejected:
3897 if rejected:
3897 ui.warn(_('cannot move %i changesets to a higher '
3898 ui.warn(_('cannot move %i changesets to a higher '
3898 'phase, use --force\n') % len(rejected))
3899 'phase, use --force\n') % len(rejected))
3899 ret = 1
3900 ret = 1
3900 if changes:
3901 if changes:
3901 msg = _('phase changed for %i changesets\n') % changes
3902 msg = _('phase changed for %i changesets\n') % changes
3902 if ret:
3903 if ret:
3903 ui.status(msg)
3904 ui.status(msg)
3904 else:
3905 else:
3905 ui.note(msg)
3906 ui.note(msg)
3906 else:
3907 else:
3907 ui.warn(_('no phases changed\n'))
3908 ui.warn(_('no phases changed\n'))
3908 return ret
3909 return ret
3909
3910
3910 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3911 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3911 """Run after a changegroup has been added via pull/unbundle
3912 """Run after a changegroup has been added via pull/unbundle
3912
3913
3913 This takes arguments below:
3914 This takes arguments below:
3914
3915
3915 :modheads: change of heads by pull/unbundle
3916 :modheads: change of heads by pull/unbundle
3916 :optupdate: updating working directory is needed or not
3917 :optupdate: updating working directory is needed or not
3917 :checkout: update destination revision (or None to default destination)
3918 :checkout: update destination revision (or None to default destination)
3918 :brev: a name, which might be a bookmark to be activated after updating
3919 :brev: a name, which might be a bookmark to be activated after updating
3919 """
3920 """
3920 if modheads == 0:
3921 if modheads == 0:
3921 return
3922 return
3922 if optupdate:
3923 if optupdate:
3923 try:
3924 try:
3924 return hg.updatetotally(ui, repo, checkout, brev)
3925 return hg.updatetotally(ui, repo, checkout, brev)
3925 except error.UpdateAbort as inst:
3926 except error.UpdateAbort as inst:
3926 msg = _("not updating: %s") % stringutil.forcebytestr(inst)
3927 msg = _("not updating: %s") % stringutil.forcebytestr(inst)
3927 hint = inst.hint
3928 hint = inst.hint
3928 raise error.UpdateAbort(msg, hint=hint)
3929 raise error.UpdateAbort(msg, hint=hint)
3929 if modheads > 1:
3930 if modheads > 1:
3930 currentbranchheads = len(repo.branchheads())
3931 currentbranchheads = len(repo.branchheads())
3931 if currentbranchheads == modheads:
3932 if currentbranchheads == modheads:
3932 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3933 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3933 elif currentbranchheads > 1:
3934 elif currentbranchheads > 1:
3934 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3935 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3935 "merge)\n"))
3936 "merge)\n"))
3936 else:
3937 else:
3937 ui.status(_("(run 'hg heads' to see heads)\n"))
3938 ui.status(_("(run 'hg heads' to see heads)\n"))
3938 elif not ui.configbool('commands', 'update.requiredest'):
3939 elif not ui.configbool('commands', 'update.requiredest'):
3939 ui.status(_("(run 'hg update' to get a working copy)\n"))
3940 ui.status(_("(run 'hg update' to get a working copy)\n"))
3940
3941
3941 @command('^pull',
3942 @command('^pull',
3942 [('u', 'update', None,
3943 [('u', 'update', None,
3943 _('update to new branch head if new descendants were pulled')),
3944 _('update to new branch head if new descendants were pulled')),
3944 ('f', 'force', None, _('run even when remote repository is unrelated')),
3945 ('f', 'force', None, _('run even when remote repository is unrelated')),
3945 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3946 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3946 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3947 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3947 ('b', 'branch', [], _('a specific branch you would like to pull'),
3948 ('b', 'branch', [], _('a specific branch you would like to pull'),
3948 _('BRANCH')),
3949 _('BRANCH')),
3949 ] + remoteopts,
3950 ] + remoteopts,
3950 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3951 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3951 def pull(ui, repo, source="default", **opts):
3952 def pull(ui, repo, source="default", **opts):
3952 """pull changes from the specified source
3953 """pull changes from the specified source
3953
3954
3954 Pull changes from a remote repository to a local one.
3955 Pull changes from a remote repository to a local one.
3955
3956
3956 This finds all changes from the repository at the specified path
3957 This finds all changes from the repository at the specified path
3957 or URL and adds them to a local repository (the current one unless
3958 or URL and adds them to a local repository (the current one unless
3958 -R is specified). By default, this does not update the copy of the
3959 -R is specified). By default, this does not update the copy of the
3959 project in the working directory.
3960 project in the working directory.
3960
3961
3961 Use :hg:`incoming` if you want to see what would have been added
3962 Use :hg:`incoming` if you want to see what would have been added
3962 by a pull at the time you issued this command. If you then decide
3963 by a pull at the time you issued this command. If you then decide
3963 to add those changes to the repository, you should use :hg:`pull
3964 to add those changes to the repository, you should use :hg:`pull
3964 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3965 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3965
3966
3966 If SOURCE is omitted, the 'default' path will be used.
3967 If SOURCE is omitted, the 'default' path will be used.
3967 See :hg:`help urls` for more information.
3968 See :hg:`help urls` for more information.
3968
3969
3969 Specifying bookmark as ``.`` is equivalent to specifying the active
3970 Specifying bookmark as ``.`` is equivalent to specifying the active
3970 bookmark's name.
3971 bookmark's name.
3971
3972
3972 Returns 0 on success, 1 if an update had unresolved files.
3973 Returns 0 on success, 1 if an update had unresolved files.
3973 """
3974 """
3974
3975
3975 opts = pycompat.byteskwargs(opts)
3976 opts = pycompat.byteskwargs(opts)
3976 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3977 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3977 msg = _('update destination required by configuration')
3978 msg = _('update destination required by configuration')
3978 hint = _('use hg pull followed by hg update DEST')
3979 hint = _('use hg pull followed by hg update DEST')
3979 raise error.Abort(msg, hint=hint)
3980 raise error.Abort(msg, hint=hint)
3980
3981
3981 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3982 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3982 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3983 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3983 other = hg.peer(repo, opts, source)
3984 other = hg.peer(repo, opts, source)
3984 try:
3985 try:
3985 revs, checkout = hg.addbranchrevs(repo, other, branches,
3986 revs, checkout = hg.addbranchrevs(repo, other, branches,
3986 opts.get('rev'))
3987 opts.get('rev'))
3987
3988
3988
3989
3989 pullopargs = {}
3990 pullopargs = {}
3990 if opts.get('bookmark'):
3991 if opts.get('bookmark'):
3991 if not revs:
3992 if not revs:
3992 revs = []
3993 revs = []
3993 # The list of bookmark used here is not the one used to actually
3994 # The list of bookmark used here is not the one used to actually
3994 # update the bookmark name. This can result in the revision pulled
3995 # update the bookmark name. This can result in the revision pulled
3995 # not ending up with the name of the bookmark because of a race
3996 # not ending up with the name of the bookmark because of a race
3996 # condition on the server. (See issue 4689 for details)
3997 # condition on the server. (See issue 4689 for details)
3997 remotebookmarks = other.listkeys('bookmarks')
3998 remotebookmarks = other.listkeys('bookmarks')
3998 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
3999 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
3999 pullopargs['remotebookmarks'] = remotebookmarks
4000 pullopargs['remotebookmarks'] = remotebookmarks
4000 for b in opts['bookmark']:
4001 for b in opts['bookmark']:
4001 b = repo._bookmarks.expandname(b)
4002 b = repo._bookmarks.expandname(b)
4002 if b not in remotebookmarks:
4003 if b not in remotebookmarks:
4003 raise error.Abort(_('remote bookmark %s not found!') % b)
4004 raise error.Abort(_('remote bookmark %s not found!') % b)
4004 revs.append(hex(remotebookmarks[b]))
4005 revs.append(hex(remotebookmarks[b]))
4005
4006
4006 if revs:
4007 if revs:
4007 try:
4008 try:
4008 # When 'rev' is a bookmark name, we cannot guarantee that it
4009 # When 'rev' is a bookmark name, we cannot guarantee that it
4009 # will be updated with that name because of a race condition
4010 # will be updated with that name because of a race condition
4010 # server side. (See issue 4689 for details)
4011 # server side. (See issue 4689 for details)
4011 oldrevs = revs
4012 oldrevs = revs
4012 revs = [] # actually, nodes
4013 revs = [] # actually, nodes
4013 for r in oldrevs:
4014 for r in oldrevs:
4014 node = other.lookup(r)
4015 node = other.lookup(r)
4015 revs.append(node)
4016 revs.append(node)
4016 if r == checkout:
4017 if r == checkout:
4017 checkout = node
4018 checkout = node
4018 except error.CapabilityError:
4019 except error.CapabilityError:
4019 err = _("other repository doesn't support revision lookup, "
4020 err = _("other repository doesn't support revision lookup, "
4020 "so a rev cannot be specified.")
4021 "so a rev cannot be specified.")
4021 raise error.Abort(err)
4022 raise error.Abort(err)
4022
4023
4023 wlock = util.nullcontextmanager()
4024 wlock = util.nullcontextmanager()
4024 if opts.get('update'):
4025 if opts.get('update'):
4025 wlock = repo.wlock()
4026 wlock = repo.wlock()
4026 with wlock:
4027 with wlock:
4027 pullopargs.update(opts.get('opargs', {}))
4028 pullopargs.update(opts.get('opargs', {}))
4028 modheads = exchange.pull(repo, other, heads=revs,
4029 modheads = exchange.pull(repo, other, heads=revs,
4029 force=opts.get('force'),
4030 force=opts.get('force'),
4030 bookmarks=opts.get('bookmark', ()),
4031 bookmarks=opts.get('bookmark', ()),
4031 opargs=pullopargs).cgresult
4032 opargs=pullopargs).cgresult
4032
4033
4033 # brev is a name, which might be a bookmark to be activated at
4034 # brev is a name, which might be a bookmark to be activated at
4034 # the end of the update. In other words, it is an explicit
4035 # the end of the update. In other words, it is an explicit
4035 # destination of the update
4036 # destination of the update
4036 brev = None
4037 brev = None
4037
4038
4038 if checkout:
4039 if checkout:
4039 checkout = "%d" % repo.changelog.rev(checkout)
4040 checkout = "%d" % repo.changelog.rev(checkout)
4040
4041
4041 # order below depends on implementation of
4042 # order below depends on implementation of
4042 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4043 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4043 # because 'checkout' is determined without it.
4044 # because 'checkout' is determined without it.
4044 if opts.get('rev'):
4045 if opts.get('rev'):
4045 brev = opts['rev'][0]
4046 brev = opts['rev'][0]
4046 elif opts.get('branch'):
4047 elif opts.get('branch'):
4047 brev = opts['branch'][0]
4048 brev = opts['branch'][0]
4048 else:
4049 else:
4049 brev = branches[0]
4050 brev = branches[0]
4050 repo._subtoppath = source
4051 repo._subtoppath = source
4051 try:
4052 try:
4052 ret = postincoming(ui, repo, modheads, opts.get('update'),
4053 ret = postincoming(ui, repo, modheads, opts.get('update'),
4053 checkout, brev)
4054 checkout, brev)
4054
4055
4055 finally:
4056 finally:
4056 del repo._subtoppath
4057 del repo._subtoppath
4057
4058
4058 finally:
4059 finally:
4059 other.close()
4060 other.close()
4060 return ret
4061 return ret
4061
4062
4062 @command('^push',
4063 @command('^push',
4063 [('f', 'force', None, _('force push')),
4064 [('f', 'force', None, _('force push')),
4064 ('r', 'rev', [],
4065 ('r', 'rev', [],
4065 _('a changeset intended to be included in the destination'),
4066 _('a changeset intended to be included in the destination'),
4066 _('REV')),
4067 _('REV')),
4067 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4068 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4068 ('b', 'branch', [],
4069 ('b', 'branch', [],
4069 _('a specific branch you would like to push'), _('BRANCH')),
4070 _('a specific branch you would like to push'), _('BRANCH')),
4070 ('', 'new-branch', False, _('allow pushing a new branch')),
4071 ('', 'new-branch', False, _('allow pushing a new branch')),
4071 ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
4072 ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
4072 ] + remoteopts,
4073 ] + remoteopts,
4073 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4074 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4074 def push(ui, repo, dest=None, **opts):
4075 def push(ui, repo, dest=None, **opts):
4075 """push changes to the specified destination
4076 """push changes to the specified destination
4076
4077
4077 Push changesets from the local repository to the specified
4078 Push changesets from the local repository to the specified
4078 destination.
4079 destination.
4079
4080
4080 This operation is symmetrical to pull: it is identical to a pull
4081 This operation is symmetrical to pull: it is identical to a pull
4081 in the destination repository from the current one.
4082 in the destination repository from the current one.
4082
4083
4083 By default, push will not allow creation of new heads at the
4084 By default, push will not allow creation of new heads at the
4084 destination, since multiple heads would make it unclear which head
4085 destination, since multiple heads would make it unclear which head
4085 to use. In this situation, it is recommended to pull and merge
4086 to use. In this situation, it is recommended to pull and merge
4086 before pushing.
4087 before pushing.
4087
4088
4088 Use --new-branch if you want to allow push to create a new named
4089 Use --new-branch if you want to allow push to create a new named
4089 branch that is not present at the destination. This allows you to
4090 branch that is not present at the destination. This allows you to
4090 only create a new branch without forcing other changes.
4091 only create a new branch without forcing other changes.
4091
4092
4092 .. note::
4093 .. note::
4093
4094
4094 Extra care should be taken with the -f/--force option,
4095 Extra care should be taken with the -f/--force option,
4095 which will push all new heads on all branches, an action which will
4096 which will push all new heads on all branches, an action which will
4096 almost always cause confusion for collaborators.
4097 almost always cause confusion for collaborators.
4097
4098
4098 If -r/--rev is used, the specified revision and all its ancestors
4099 If -r/--rev is used, the specified revision and all its ancestors
4099 will be pushed to the remote repository.
4100 will be pushed to the remote repository.
4100
4101
4101 If -B/--bookmark is used, the specified bookmarked revision, its
4102 If -B/--bookmark is used, the specified bookmarked revision, its
4102 ancestors, and the bookmark will be pushed to the remote
4103 ancestors, and the bookmark will be pushed to the remote
4103 repository. Specifying ``.`` is equivalent to specifying the active
4104 repository. Specifying ``.`` is equivalent to specifying the active
4104 bookmark's name.
4105 bookmark's name.
4105
4106
4106 Please see :hg:`help urls` for important details about ``ssh://``
4107 Please see :hg:`help urls` for important details about ``ssh://``
4107 URLs. If DESTINATION is omitted, a default path will be used.
4108 URLs. If DESTINATION is omitted, a default path will be used.
4108
4109
4109 .. container:: verbose
4110 .. container:: verbose
4110
4111
4111 The --pushvars option sends strings to the server that become
4112 The --pushvars option sends strings to the server that become
4112 environment variables prepended with ``HG_USERVAR_``. For example,
4113 environment variables prepended with ``HG_USERVAR_``. For example,
4113 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
4114 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
4114 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
4115 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
4115
4116
4116 pushvars can provide for user-overridable hooks as well as set debug
4117 pushvars can provide for user-overridable hooks as well as set debug
4117 levels. One example is having a hook that blocks commits containing
4118 levels. One example is having a hook that blocks commits containing
4118 conflict markers, but enables the user to override the hook if the file
4119 conflict markers, but enables the user to override the hook if the file
4119 is using conflict markers for testing purposes or the file format has
4120 is using conflict markers for testing purposes or the file format has
4120 strings that look like conflict markers.
4121 strings that look like conflict markers.
4121
4122
4122 By default, servers will ignore `--pushvars`. To enable it add the
4123 By default, servers will ignore `--pushvars`. To enable it add the
4123 following to your configuration file::
4124 following to your configuration file::
4124
4125
4125 [push]
4126 [push]
4126 pushvars.server = true
4127 pushvars.server = true
4127
4128
4128 Returns 0 if push was successful, 1 if nothing to push.
4129 Returns 0 if push was successful, 1 if nothing to push.
4129 """
4130 """
4130
4131
4131 opts = pycompat.byteskwargs(opts)
4132 opts = pycompat.byteskwargs(opts)
4132 if opts.get('bookmark'):
4133 if opts.get('bookmark'):
4133 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4134 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4134 for b in opts['bookmark']:
4135 for b in opts['bookmark']:
4135 # translate -B options to -r so changesets get pushed
4136 # translate -B options to -r so changesets get pushed
4136 b = repo._bookmarks.expandname(b)
4137 b = repo._bookmarks.expandname(b)
4137 if b in repo._bookmarks:
4138 if b in repo._bookmarks:
4138 opts.setdefault('rev', []).append(b)
4139 opts.setdefault('rev', []).append(b)
4139 else:
4140 else:
4140 # if we try to push a deleted bookmark, translate it to null
4141 # if we try to push a deleted bookmark, translate it to null
4141 # this lets simultaneous -r, -b options continue working
4142 # this lets simultaneous -r, -b options continue working
4142 opts.setdefault('rev', []).append("null")
4143 opts.setdefault('rev', []).append("null")
4143
4144
4144 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4145 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4145 if not path:
4146 if not path:
4146 raise error.Abort(_('default repository not configured!'),
4147 raise error.Abort(_('default repository not configured!'),
4147 hint=_("see 'hg help config.paths'"))
4148 hint=_("see 'hg help config.paths'"))
4148 dest = path.pushloc or path.loc
4149 dest = path.pushloc or path.loc
4149 branches = (path.branch, opts.get('branch') or [])
4150 branches = (path.branch, opts.get('branch') or [])
4150 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4151 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4151 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4152 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4152 other = hg.peer(repo, opts, dest)
4153 other = hg.peer(repo, opts, dest)
4153
4154
4154 if revs:
4155 if revs:
4155 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4156 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4156 if not revs:
4157 if not revs:
4157 raise error.Abort(_("specified revisions evaluate to an empty set"),
4158 raise error.Abort(_("specified revisions evaluate to an empty set"),
4158 hint=_("use different revision arguments"))
4159 hint=_("use different revision arguments"))
4159 elif path.pushrev:
4160 elif path.pushrev:
4160 # It doesn't make any sense to specify ancestor revisions. So limit
4161 # It doesn't make any sense to specify ancestor revisions. So limit
4161 # to DAG heads to make discovery simpler.
4162 # to DAG heads to make discovery simpler.
4162 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4163 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4163 revs = scmutil.revrange(repo, [expr])
4164 revs = scmutil.revrange(repo, [expr])
4164 revs = [repo[rev].node() for rev in revs]
4165 revs = [repo[rev].node() for rev in revs]
4165 if not revs:
4166 if not revs:
4166 raise error.Abort(_('default push revset for path evaluates to an '
4167 raise error.Abort(_('default push revset for path evaluates to an '
4167 'empty set'))
4168 'empty set'))
4168
4169
4169 repo._subtoppath = dest
4170 repo._subtoppath = dest
4170 try:
4171 try:
4171 # push subrepos depth-first for coherent ordering
4172 # push subrepos depth-first for coherent ordering
4172 c = repo['.']
4173 c = repo['.']
4173 subs = c.substate # only repos that are committed
4174 subs = c.substate # only repos that are committed
4174 for s in sorted(subs):
4175 for s in sorted(subs):
4175 result = c.sub(s).push(opts)
4176 result = c.sub(s).push(opts)
4176 if result == 0:
4177 if result == 0:
4177 return not result
4178 return not result
4178 finally:
4179 finally:
4179 del repo._subtoppath
4180 del repo._subtoppath
4180
4181
4181 opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
4182 opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
4182 opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
4183 opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
4183
4184
4184 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4185 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4185 newbranch=opts.get('new_branch'),
4186 newbranch=opts.get('new_branch'),
4186 bookmarks=opts.get('bookmark', ()),
4187 bookmarks=opts.get('bookmark', ()),
4187 opargs=opargs)
4188 opargs=opargs)
4188
4189
4189 result = not pushop.cgresult
4190 result = not pushop.cgresult
4190
4191
4191 if pushop.bkresult is not None:
4192 if pushop.bkresult is not None:
4192 if pushop.bkresult == 2:
4193 if pushop.bkresult == 2:
4193 result = 2
4194 result = 2
4194 elif not result and pushop.bkresult:
4195 elif not result and pushop.bkresult:
4195 result = 2
4196 result = 2
4196
4197
4197 return result
4198 return result
4198
4199
4199 @command('recover', [])
4200 @command('recover', [])
4200 def recover(ui, repo):
4201 def recover(ui, repo):
4201 """roll back an interrupted transaction
4202 """roll back an interrupted transaction
4202
4203
4203 Recover from an interrupted commit or pull.
4204 Recover from an interrupted commit or pull.
4204
4205
4205 This command tries to fix the repository status after an
4206 This command tries to fix the repository status after an
4206 interrupted operation. It should only be necessary when Mercurial
4207 interrupted operation. It should only be necessary when Mercurial
4207 suggests it.
4208 suggests it.
4208
4209
4209 Returns 0 if successful, 1 if nothing to recover or verify fails.
4210 Returns 0 if successful, 1 if nothing to recover or verify fails.
4210 """
4211 """
4211 if repo.recover():
4212 if repo.recover():
4212 return hg.verify(repo)
4213 return hg.verify(repo)
4213 return 1
4214 return 1
4214
4215
4215 @command('^remove|rm',
4216 @command('^remove|rm',
4216 [('A', 'after', None, _('record delete for missing files')),
4217 [('A', 'after', None, _('record delete for missing files')),
4217 ('f', 'force', None,
4218 ('f', 'force', None,
4218 _('forget added files, delete modified files')),
4219 _('forget added files, delete modified files')),
4219 ] + subrepoopts + walkopts + dryrunopts,
4220 ] + subrepoopts + walkopts + dryrunopts,
4220 _('[OPTION]... FILE...'),
4221 _('[OPTION]... FILE...'),
4221 inferrepo=True)
4222 inferrepo=True)
4222 def remove(ui, repo, *pats, **opts):
4223 def remove(ui, repo, *pats, **opts):
4223 """remove the specified files on the next commit
4224 """remove the specified files on the next commit
4224
4225
4225 Schedule the indicated files for removal from the current branch.
4226 Schedule the indicated files for removal from the current branch.
4226
4227
4227 This command schedules the files to be removed at the next commit.
4228 This command schedules the files to be removed at the next commit.
4228 To undo a remove before that, see :hg:`revert`. To undo added
4229 To undo a remove before that, see :hg:`revert`. To undo added
4229 files, see :hg:`forget`.
4230 files, see :hg:`forget`.
4230
4231
4231 .. container:: verbose
4232 .. container:: verbose
4232
4233
4233 -A/--after can be used to remove only files that have already
4234 -A/--after can be used to remove only files that have already
4234 been deleted, -f/--force can be used to force deletion, and -Af
4235 been deleted, -f/--force can be used to force deletion, and -Af
4235 can be used to remove files from the next revision without
4236 can be used to remove files from the next revision without
4236 deleting them from the working directory.
4237 deleting them from the working directory.
4237
4238
4238 The following table details the behavior of remove for different
4239 The following table details the behavior of remove for different
4239 file states (columns) and option combinations (rows). The file
4240 file states (columns) and option combinations (rows). The file
4240 states are Added [A], Clean [C], Modified [M] and Missing [!]
4241 states are Added [A], Clean [C], Modified [M] and Missing [!]
4241 (as reported by :hg:`status`). The actions are Warn, Remove
4242 (as reported by :hg:`status`). The actions are Warn, Remove
4242 (from branch) and Delete (from disk):
4243 (from branch) and Delete (from disk):
4243
4244
4244 ========= == == == ==
4245 ========= == == == ==
4245 opt/state A C M !
4246 opt/state A C M !
4246 ========= == == == ==
4247 ========= == == == ==
4247 none W RD W R
4248 none W RD W R
4248 -f R RD RD R
4249 -f R RD RD R
4249 -A W W W R
4250 -A W W W R
4250 -Af R R R R
4251 -Af R R R R
4251 ========= == == == ==
4252 ========= == == == ==
4252
4253
4253 .. note::
4254 .. note::
4254
4255
4255 :hg:`remove` never deletes files in Added [A] state from the
4256 :hg:`remove` never deletes files in Added [A] state from the
4256 working directory, not even if ``--force`` is specified.
4257 working directory, not even if ``--force`` is specified.
4257
4258
4258 Returns 0 on success, 1 if any warnings encountered.
4259 Returns 0 on success, 1 if any warnings encountered.
4259 """
4260 """
4260
4261
4261 opts = pycompat.byteskwargs(opts)
4262 opts = pycompat.byteskwargs(opts)
4262 after, force = opts.get('after'), opts.get('force')
4263 after, force = opts.get('after'), opts.get('force')
4263 dryrun = opts.get('dry_run')
4264 dryrun = opts.get('dry_run')
4264 if not pats and not after:
4265 if not pats and not after:
4265 raise error.Abort(_('no files specified'))
4266 raise error.Abort(_('no files specified'))
4266
4267
4267 m = scmutil.match(repo[None], pats, opts)
4268 m = scmutil.match(repo[None], pats, opts)
4268 subrepos = opts.get('subrepos')
4269 subrepos = opts.get('subrepos')
4269 return cmdutil.remove(ui, repo, m, "", after, force, subrepos,
4270 return cmdutil.remove(ui, repo, m, "", after, force, subrepos,
4270 dryrun=dryrun)
4271 dryrun=dryrun)
4271
4272
4272 @command('rename|move|mv',
4273 @command('rename|move|mv',
4273 [('A', 'after', None, _('record a rename that has already occurred')),
4274 [('A', 'after', None, _('record a rename that has already occurred')),
4274 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4275 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4275 ] + walkopts + dryrunopts,
4276 ] + walkopts + dryrunopts,
4276 _('[OPTION]... SOURCE... DEST'))
4277 _('[OPTION]... SOURCE... DEST'))
4277 def rename(ui, repo, *pats, **opts):
4278 def rename(ui, repo, *pats, **opts):
4278 """rename files; equivalent of copy + remove
4279 """rename files; equivalent of copy + remove
4279
4280
4280 Mark dest as copies of sources; mark sources for deletion. If dest
4281 Mark dest as copies of sources; mark sources for deletion. If dest
4281 is a directory, copies are put in that directory. If dest is a
4282 is a directory, copies are put in that directory. If dest is a
4282 file, there can only be one source.
4283 file, there can only be one source.
4283
4284
4284 By default, this command copies the contents of files as they
4285 By default, this command copies the contents of files as they
4285 exist in the working directory. If invoked with -A/--after, the
4286 exist in the working directory. If invoked with -A/--after, the
4286 operation is recorded, but no copying is performed.
4287 operation is recorded, but no copying is performed.
4287
4288
4288 This command takes effect at the next commit. To undo a rename
4289 This command takes effect at the next commit. To undo a rename
4289 before that, see :hg:`revert`.
4290 before that, see :hg:`revert`.
4290
4291
4291 Returns 0 on success, 1 if errors are encountered.
4292 Returns 0 on success, 1 if errors are encountered.
4292 """
4293 """
4293 opts = pycompat.byteskwargs(opts)
4294 opts = pycompat.byteskwargs(opts)
4294 with repo.wlock(False):
4295 with repo.wlock(False):
4295 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4296 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4296
4297
4297 @command('resolve',
4298 @command('resolve',
4298 [('a', 'all', None, _('select all unresolved files')),
4299 [('a', 'all', None, _('select all unresolved files')),
4299 ('l', 'list', None, _('list state of files needing merge')),
4300 ('l', 'list', None, _('list state of files needing merge')),
4300 ('m', 'mark', None, _('mark files as resolved')),
4301 ('m', 'mark', None, _('mark files as resolved')),
4301 ('u', 'unmark', None, _('mark files as unresolved')),
4302 ('u', 'unmark', None, _('mark files as unresolved')),
4302 ('n', 'no-status', None, _('hide status prefix'))]
4303 ('n', 'no-status', None, _('hide status prefix'))]
4303 + mergetoolopts + walkopts + formatteropts,
4304 + mergetoolopts + walkopts + formatteropts,
4304 _('[OPTION]... [FILE]...'),
4305 _('[OPTION]... [FILE]...'),
4305 inferrepo=True)
4306 inferrepo=True)
4306 def resolve(ui, repo, *pats, **opts):
4307 def resolve(ui, repo, *pats, **opts):
4307 """redo merges or set/view the merge status of files
4308 """redo merges or set/view the merge status of files
4308
4309
4309 Merges with unresolved conflicts are often the result of
4310 Merges with unresolved conflicts are often the result of
4310 non-interactive merging using the ``internal:merge`` configuration
4311 non-interactive merging using the ``internal:merge`` configuration
4311 setting, or a command-line merge tool like ``diff3``. The resolve
4312 setting, or a command-line merge tool like ``diff3``. The resolve
4312 command is used to manage the files involved in a merge, after
4313 command is used to manage the files involved in a merge, after
4313 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4314 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4314 working directory must have two parents). See :hg:`help
4315 working directory must have two parents). See :hg:`help
4315 merge-tools` for information on configuring merge tools.
4316 merge-tools` for information on configuring merge tools.
4316
4317
4317 The resolve command can be used in the following ways:
4318 The resolve command can be used in the following ways:
4318
4319
4319 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4320 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4320 files, discarding any previous merge attempts. Re-merging is not
4321 files, discarding any previous merge attempts. Re-merging is not
4321 performed for files already marked as resolved. Use ``--all/-a``
4322 performed for files already marked as resolved. Use ``--all/-a``
4322 to select all unresolved files. ``--tool`` can be used to specify
4323 to select all unresolved files. ``--tool`` can be used to specify
4323 the merge tool used for the given files. It overrides the HGMERGE
4324 the merge tool used for the given files. It overrides the HGMERGE
4324 environment variable and your configuration files. Previous file
4325 environment variable and your configuration files. Previous file
4325 contents are saved with a ``.orig`` suffix.
4326 contents are saved with a ``.orig`` suffix.
4326
4327
4327 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4328 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4328 (e.g. after having manually fixed-up the files). The default is
4329 (e.g. after having manually fixed-up the files). The default is
4329 to mark all unresolved files.
4330 to mark all unresolved files.
4330
4331
4331 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4332 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4332 default is to mark all resolved files.
4333 default is to mark all resolved files.
4333
4334
4334 - :hg:`resolve -l`: list files which had or still have conflicts.
4335 - :hg:`resolve -l`: list files which had or still have conflicts.
4335 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4336 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4336 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4337 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4337 the list. See :hg:`help filesets` for details.
4338 the list. See :hg:`help filesets` for details.
4338
4339
4339 .. note::
4340 .. note::
4340
4341
4341 Mercurial will not let you commit files with unresolved merge
4342 Mercurial will not let you commit files with unresolved merge
4342 conflicts. You must use :hg:`resolve -m ...` before you can
4343 conflicts. You must use :hg:`resolve -m ...` before you can
4343 commit after a conflicting merge.
4344 commit after a conflicting merge.
4344
4345
4345 Returns 0 on success, 1 if any files fail a resolve attempt.
4346 Returns 0 on success, 1 if any files fail a resolve attempt.
4346 """
4347 """
4347
4348
4348 opts = pycompat.byteskwargs(opts)
4349 opts = pycompat.byteskwargs(opts)
4349 flaglist = 'all mark unmark list no_status'.split()
4350 flaglist = 'all mark unmark list no_status'.split()
4350 all, mark, unmark, show, nostatus = \
4351 all, mark, unmark, show, nostatus = \
4351 [opts.get(o) for o in flaglist]
4352 [opts.get(o) for o in flaglist]
4352
4353
4353 if (show and (mark or unmark)) or (mark and unmark):
4354 if (show and (mark or unmark)) or (mark and unmark):
4354 raise error.Abort(_("too many options specified"))
4355 raise error.Abort(_("too many options specified"))
4355 if pats and all:
4356 if pats and all:
4356 raise error.Abort(_("can't specify --all and patterns"))
4357 raise error.Abort(_("can't specify --all and patterns"))
4357 if not (all or pats or show or mark or unmark):
4358 if not (all or pats or show or mark or unmark):
4358 raise error.Abort(_('no files or directories specified'),
4359 raise error.Abort(_('no files or directories specified'),
4359 hint=('use --all to re-merge all unresolved files'))
4360 hint=('use --all to re-merge all unresolved files'))
4360
4361
4361 if show:
4362 if show:
4362 ui.pager('resolve')
4363 ui.pager('resolve')
4363 fm = ui.formatter('resolve', opts)
4364 fm = ui.formatter('resolve', opts)
4364 ms = mergemod.mergestate.read(repo)
4365 ms = mergemod.mergestate.read(repo)
4365 m = scmutil.match(repo[None], pats, opts)
4366 m = scmutil.match(repo[None], pats, opts)
4366
4367
4367 # Labels and keys based on merge state. Unresolved path conflicts show
4368 # Labels and keys based on merge state. Unresolved path conflicts show
4368 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4369 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4369 # resolved conflicts.
4370 # resolved conflicts.
4370 mergestateinfo = {
4371 mergestateinfo = {
4371 mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
4372 mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
4372 mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
4373 mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
4373 mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
4374 mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
4374 mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
4375 mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
4375 mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
4376 mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
4376 'D'),
4377 'D'),
4377 }
4378 }
4378
4379
4379 for f in ms:
4380 for f in ms:
4380 if not m(f):
4381 if not m(f):
4381 continue
4382 continue
4382
4383
4383 label, key = mergestateinfo[ms[f]]
4384 label, key = mergestateinfo[ms[f]]
4384 fm.startitem()
4385 fm.startitem()
4385 fm.condwrite(not nostatus, 'status', '%s ', key, label=label)
4386 fm.condwrite(not nostatus, 'status', '%s ', key, label=label)
4386 fm.write('path', '%s\n', f, label=label)
4387 fm.write('path', '%s\n', f, label=label)
4387 fm.end()
4388 fm.end()
4388 return 0
4389 return 0
4389
4390
4390 with repo.wlock():
4391 with repo.wlock():
4391 ms = mergemod.mergestate.read(repo)
4392 ms = mergemod.mergestate.read(repo)
4392
4393
4393 if not (ms.active() or repo.dirstate.p2() != nullid):
4394 if not (ms.active() or repo.dirstate.p2() != nullid):
4394 raise error.Abort(
4395 raise error.Abort(
4395 _('resolve command not applicable when not merging'))
4396 _('resolve command not applicable when not merging'))
4396
4397
4397 wctx = repo[None]
4398 wctx = repo[None]
4398
4399
4399 if (ms.mergedriver
4400 if (ms.mergedriver
4400 and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
4401 and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
4401 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4402 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4402 ms.commit()
4403 ms.commit()
4403 # allow mark and unmark to go through
4404 # allow mark and unmark to go through
4404 if not mark and not unmark and not proceed:
4405 if not mark and not unmark and not proceed:
4405 return 1
4406 return 1
4406
4407
4407 m = scmutil.match(wctx, pats, opts)
4408 m = scmutil.match(wctx, pats, opts)
4408 ret = 0
4409 ret = 0
4409 didwork = False
4410 didwork = False
4410 runconclude = False
4411 runconclude = False
4411
4412
4412 tocomplete = []
4413 tocomplete = []
4413 for f in ms:
4414 for f in ms:
4414 if not m(f):
4415 if not m(f):
4415 continue
4416 continue
4416
4417
4417 didwork = True
4418 didwork = True
4418
4419
4419 # don't let driver-resolved files be marked, and run the conclude
4420 # don't let driver-resolved files be marked, and run the conclude
4420 # step if asked to resolve
4421 # step if asked to resolve
4421 if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
4422 if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
4422 exact = m.exact(f)
4423 exact = m.exact(f)
4423 if mark:
4424 if mark:
4424 if exact:
4425 if exact:
4425 ui.warn(_('not marking %s as it is driver-resolved\n')
4426 ui.warn(_('not marking %s as it is driver-resolved\n')
4426 % f)
4427 % f)
4427 elif unmark:
4428 elif unmark:
4428 if exact:
4429 if exact:
4429 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4430 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4430 % f)
4431 % f)
4431 else:
4432 else:
4432 runconclude = True
4433 runconclude = True
4433 continue
4434 continue
4434
4435
4435 # path conflicts must be resolved manually
4436 # path conflicts must be resolved manually
4436 if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
4437 if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
4437 mergemod.MERGE_RECORD_RESOLVED_PATH):
4438 mergemod.MERGE_RECORD_RESOLVED_PATH):
4438 if mark:
4439 if mark:
4439 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
4440 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
4440 elif unmark:
4441 elif unmark:
4441 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
4442 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
4442 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
4443 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
4443 ui.warn(_('%s: path conflict must be resolved manually\n')
4444 ui.warn(_('%s: path conflict must be resolved manually\n')
4444 % f)
4445 % f)
4445 continue
4446 continue
4446
4447
4447 if mark:
4448 if mark:
4448 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4449 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4449 elif unmark:
4450 elif unmark:
4450 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED)
4451 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED)
4451 else:
4452 else:
4452 # backup pre-resolve (merge uses .orig for its own purposes)
4453 # backup pre-resolve (merge uses .orig for its own purposes)
4453 a = repo.wjoin(f)
4454 a = repo.wjoin(f)
4454 try:
4455 try:
4455 util.copyfile(a, a + ".resolve")
4456 util.copyfile(a, a + ".resolve")
4456 except (IOError, OSError) as inst:
4457 except (IOError, OSError) as inst:
4457 if inst.errno != errno.ENOENT:
4458 if inst.errno != errno.ENOENT:
4458 raise
4459 raise
4459
4460
4460 try:
4461 try:
4461 # preresolve file
4462 # preresolve file
4462 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4463 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4463 'resolve')
4464 'resolve')
4464 complete, r = ms.preresolve(f, wctx)
4465 complete, r = ms.preresolve(f, wctx)
4465 if not complete:
4466 if not complete:
4466 tocomplete.append(f)
4467 tocomplete.append(f)
4467 elif r:
4468 elif r:
4468 ret = 1
4469 ret = 1
4469 finally:
4470 finally:
4470 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4471 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4471 ms.commit()
4472 ms.commit()
4472
4473
4473 # replace filemerge's .orig file with our resolve file, but only
4474 # replace filemerge's .orig file with our resolve file, but only
4474 # for merges that are complete
4475 # for merges that are complete
4475 if complete:
4476 if complete:
4476 try:
4477 try:
4477 util.rename(a + ".resolve",
4478 util.rename(a + ".resolve",
4478 scmutil.origpath(ui, repo, a))
4479 scmutil.origpath(ui, repo, a))
4479 except OSError as inst:
4480 except OSError as inst:
4480 if inst.errno != errno.ENOENT:
4481 if inst.errno != errno.ENOENT:
4481 raise
4482 raise
4482
4483
4483 for f in tocomplete:
4484 for f in tocomplete:
4484 try:
4485 try:
4485 # resolve file
4486 # resolve file
4486 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4487 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4487 'resolve')
4488 'resolve')
4488 r = ms.resolve(f, wctx)
4489 r = ms.resolve(f, wctx)
4489 if r:
4490 if r:
4490 ret = 1
4491 ret = 1
4491 finally:
4492 finally:
4492 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4493 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4493 ms.commit()
4494 ms.commit()
4494
4495
4495 # replace filemerge's .orig file with our resolve file
4496 # replace filemerge's .orig file with our resolve file
4496 a = repo.wjoin(f)
4497 a = repo.wjoin(f)
4497 try:
4498 try:
4498 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4499 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4499 except OSError as inst:
4500 except OSError as inst:
4500 if inst.errno != errno.ENOENT:
4501 if inst.errno != errno.ENOENT:
4501 raise
4502 raise
4502
4503
4503 ms.commit()
4504 ms.commit()
4504 ms.recordactions()
4505 ms.recordactions()
4505
4506
4506 if not didwork and pats:
4507 if not didwork and pats:
4507 hint = None
4508 hint = None
4508 if not any([p for p in pats if p.find(':') >= 0]):
4509 if not any([p for p in pats if p.find(':') >= 0]):
4509 pats = ['path:%s' % p for p in pats]
4510 pats = ['path:%s' % p for p in pats]
4510 m = scmutil.match(wctx, pats, opts)
4511 m = scmutil.match(wctx, pats, opts)
4511 for f in ms:
4512 for f in ms:
4512 if not m(f):
4513 if not m(f):
4513 continue
4514 continue
4514 flags = ''.join(['-%s ' % o[0:1] for o in flaglist
4515 flags = ''.join(['-%s ' % o[0:1] for o in flaglist
4515 if opts.get(o)])
4516 if opts.get(o)])
4516 hint = _("(try: hg resolve %s%s)\n") % (
4517 hint = _("(try: hg resolve %s%s)\n") % (
4517 flags,
4518 flags,
4518 ' '.join(pats))
4519 ' '.join(pats))
4519 break
4520 break
4520 ui.warn(_("arguments do not match paths that need resolving\n"))
4521 ui.warn(_("arguments do not match paths that need resolving\n"))
4521 if hint:
4522 if hint:
4522 ui.warn(hint)
4523 ui.warn(hint)
4523 elif ms.mergedriver and ms.mdstate() != 's':
4524 elif ms.mergedriver and ms.mdstate() != 's':
4524 # run conclude step when either a driver-resolved file is requested
4525 # run conclude step when either a driver-resolved file is requested
4525 # or there are no driver-resolved files
4526 # or there are no driver-resolved files
4526 # we can't use 'ret' to determine whether any files are unresolved
4527 # we can't use 'ret' to determine whether any files are unresolved
4527 # because we might not have tried to resolve some
4528 # because we might not have tried to resolve some
4528 if ((runconclude or not list(ms.driverresolved()))
4529 if ((runconclude or not list(ms.driverresolved()))
4529 and not list(ms.unresolved())):
4530 and not list(ms.unresolved())):
4530 proceed = mergemod.driverconclude(repo, ms, wctx)
4531 proceed = mergemod.driverconclude(repo, ms, wctx)
4531 ms.commit()
4532 ms.commit()
4532 if not proceed:
4533 if not proceed:
4533 return 1
4534 return 1
4534
4535
4535 # Nudge users into finishing an unfinished operation
4536 # Nudge users into finishing an unfinished operation
4536 unresolvedf = list(ms.unresolved())
4537 unresolvedf = list(ms.unresolved())
4537 driverresolvedf = list(ms.driverresolved())
4538 driverresolvedf = list(ms.driverresolved())
4538 if not unresolvedf and not driverresolvedf:
4539 if not unresolvedf and not driverresolvedf:
4539 ui.status(_('(no more unresolved files)\n'))
4540 ui.status(_('(no more unresolved files)\n'))
4540 cmdutil.checkafterresolved(repo)
4541 cmdutil.checkafterresolved(repo)
4541 elif not unresolvedf:
4542 elif not unresolvedf:
4542 ui.status(_('(no more unresolved files -- '
4543 ui.status(_('(no more unresolved files -- '
4543 'run "hg resolve --all" to conclude)\n'))
4544 'run "hg resolve --all" to conclude)\n'))
4544
4545
4545 return ret
4546 return ret
4546
4547
4547 @command('revert',
4548 @command('revert',
4548 [('a', 'all', None, _('revert all changes when no arguments given')),
4549 [('a', 'all', None, _('revert all changes when no arguments given')),
4549 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4550 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4550 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4551 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4551 ('C', 'no-backup', None, _('do not save backup copies of files')),
4552 ('C', 'no-backup', None, _('do not save backup copies of files')),
4552 ('i', 'interactive', None, _('interactively select the changes')),
4553 ('i', 'interactive', None, _('interactively select the changes')),
4553 ] + walkopts + dryrunopts,
4554 ] + walkopts + dryrunopts,
4554 _('[OPTION]... [-r REV] [NAME]...'))
4555 _('[OPTION]... [-r REV] [NAME]...'))
4555 def revert(ui, repo, *pats, **opts):
4556 def revert(ui, repo, *pats, **opts):
4556 """restore files to their checkout state
4557 """restore files to their checkout state
4557
4558
4558 .. note::
4559 .. note::
4559
4560
4560 To check out earlier revisions, you should use :hg:`update REV`.
4561 To check out earlier revisions, you should use :hg:`update REV`.
4561 To cancel an uncommitted merge (and lose your changes),
4562 To cancel an uncommitted merge (and lose your changes),
4562 use :hg:`merge --abort`.
4563 use :hg:`merge --abort`.
4563
4564
4564 With no revision specified, revert the specified files or directories
4565 With no revision specified, revert the specified files or directories
4565 to the contents they had in the parent of the working directory.
4566 to the contents they had in the parent of the working directory.
4566 This restores the contents of files to an unmodified
4567 This restores the contents of files to an unmodified
4567 state and unschedules adds, removes, copies, and renames. If the
4568 state and unschedules adds, removes, copies, and renames. If the
4568 working directory has two parents, you must explicitly specify a
4569 working directory has two parents, you must explicitly specify a
4569 revision.
4570 revision.
4570
4571
4571 Using the -r/--rev or -d/--date options, revert the given files or
4572 Using the -r/--rev or -d/--date options, revert the given files or
4572 directories to their states as of a specific revision. Because
4573 directories to their states as of a specific revision. Because
4573 revert does not change the working directory parents, this will
4574 revert does not change the working directory parents, this will
4574 cause these files to appear modified. This can be helpful to "back
4575 cause these files to appear modified. This can be helpful to "back
4575 out" some or all of an earlier change. See :hg:`backout` for a
4576 out" some or all of an earlier change. See :hg:`backout` for a
4576 related method.
4577 related method.
4577
4578
4578 Modified files are saved with a .orig suffix before reverting.
4579 Modified files are saved with a .orig suffix before reverting.
4579 To disable these backups, use --no-backup. It is possible to store
4580 To disable these backups, use --no-backup. It is possible to store
4580 the backup files in a custom directory relative to the root of the
4581 the backup files in a custom directory relative to the root of the
4581 repository by setting the ``ui.origbackuppath`` configuration
4582 repository by setting the ``ui.origbackuppath`` configuration
4582 option.
4583 option.
4583
4584
4584 See :hg:`help dates` for a list of formats valid for -d/--date.
4585 See :hg:`help dates` for a list of formats valid for -d/--date.
4585
4586
4586 See :hg:`help backout` for a way to reverse the effect of an
4587 See :hg:`help backout` for a way to reverse the effect of an
4587 earlier changeset.
4588 earlier changeset.
4588
4589
4589 Returns 0 on success.
4590 Returns 0 on success.
4590 """
4591 """
4591
4592
4592 opts = pycompat.byteskwargs(opts)
4593 opts = pycompat.byteskwargs(opts)
4593 if opts.get("date"):
4594 if opts.get("date"):
4594 if opts.get("rev"):
4595 if opts.get("rev"):
4595 raise error.Abort(_("you can't specify a revision and a date"))
4596 raise error.Abort(_("you can't specify a revision and a date"))
4596 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4597 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4597
4598
4598 parent, p2 = repo.dirstate.parents()
4599 parent, p2 = repo.dirstate.parents()
4599 if not opts.get('rev') and p2 != nullid:
4600 if not opts.get('rev') and p2 != nullid:
4600 # revert after merge is a trap for new users (issue2915)
4601 # revert after merge is a trap for new users (issue2915)
4601 raise error.Abort(_('uncommitted merge with no revision specified'),
4602 raise error.Abort(_('uncommitted merge with no revision specified'),
4602 hint=_("use 'hg update' or see 'hg help revert'"))
4603 hint=_("use 'hg update' or see 'hg help revert'"))
4603
4604
4604 rev = opts.get('rev')
4605 rev = opts.get('rev')
4605 if rev:
4606 if rev:
4606 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4607 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4607 ctx = scmutil.revsingle(repo, rev)
4608 ctx = scmutil.revsingle(repo, rev)
4608
4609
4609 if (not (pats or opts.get('include') or opts.get('exclude') or
4610 if (not (pats or opts.get('include') or opts.get('exclude') or
4610 opts.get('all') or opts.get('interactive'))):
4611 opts.get('all') or opts.get('interactive'))):
4611 msg = _("no files or directories specified")
4612 msg = _("no files or directories specified")
4612 if p2 != nullid:
4613 if p2 != nullid:
4613 hint = _("uncommitted merge, use --all to discard all changes,"
4614 hint = _("uncommitted merge, use --all to discard all changes,"
4614 " or 'hg update -C .' to abort the merge")
4615 " or 'hg update -C .' to abort the merge")
4615 raise error.Abort(msg, hint=hint)
4616 raise error.Abort(msg, hint=hint)
4616 dirty = any(repo.status())
4617 dirty = any(repo.status())
4617 node = ctx.node()
4618 node = ctx.node()
4618 if node != parent:
4619 if node != parent:
4619 if dirty:
4620 if dirty:
4620 hint = _("uncommitted changes, use --all to discard all"
4621 hint = _("uncommitted changes, use --all to discard all"
4621 " changes, or 'hg update %s' to update") % ctx.rev()
4622 " changes, or 'hg update %s' to update") % ctx.rev()
4622 else:
4623 else:
4623 hint = _("use --all to revert all files,"
4624 hint = _("use --all to revert all files,"
4624 " or 'hg update %s' to update") % ctx.rev()
4625 " or 'hg update %s' to update") % ctx.rev()
4625 elif dirty:
4626 elif dirty:
4626 hint = _("uncommitted changes, use --all to discard all changes")
4627 hint = _("uncommitted changes, use --all to discard all changes")
4627 else:
4628 else:
4628 hint = _("use --all to revert all files")
4629 hint = _("use --all to revert all files")
4629 raise error.Abort(msg, hint=hint)
4630 raise error.Abort(msg, hint=hint)
4630
4631
4631 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
4632 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
4632 **pycompat.strkwargs(opts))
4633 **pycompat.strkwargs(opts))
4633
4634
4634 @command('rollback', dryrunopts +
4635 @command('rollback', dryrunopts +
4635 [('f', 'force', False, _('ignore safety measures'))])
4636 [('f', 'force', False, _('ignore safety measures'))])
4636 def rollback(ui, repo, **opts):
4637 def rollback(ui, repo, **opts):
4637 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4638 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4638
4639
4639 Please use :hg:`commit --amend` instead of rollback to correct
4640 Please use :hg:`commit --amend` instead of rollback to correct
4640 mistakes in the last commit.
4641 mistakes in the last commit.
4641
4642
4642 This command should be used with care. There is only one level of
4643 This command should be used with care. There is only one level of
4643 rollback, and there is no way to undo a rollback. It will also
4644 rollback, and there is no way to undo a rollback. It will also
4644 restore the dirstate at the time of the last transaction, losing
4645 restore the dirstate at the time of the last transaction, losing
4645 any dirstate changes since that time. This command does not alter
4646 any dirstate changes since that time. This command does not alter
4646 the working directory.
4647 the working directory.
4647
4648
4648 Transactions are used to encapsulate the effects of all commands
4649 Transactions are used to encapsulate the effects of all commands
4649 that create new changesets or propagate existing changesets into a
4650 that create new changesets or propagate existing changesets into a
4650 repository.
4651 repository.
4651
4652
4652 .. container:: verbose
4653 .. container:: verbose
4653
4654
4654 For example, the following commands are transactional, and their
4655 For example, the following commands are transactional, and their
4655 effects can be rolled back:
4656 effects can be rolled back:
4656
4657
4657 - commit
4658 - commit
4658 - import
4659 - import
4659 - pull
4660 - pull
4660 - push (with this repository as the destination)
4661 - push (with this repository as the destination)
4661 - unbundle
4662 - unbundle
4662
4663
4663 To avoid permanent data loss, rollback will refuse to rollback a
4664 To avoid permanent data loss, rollback will refuse to rollback a
4664 commit transaction if it isn't checked out. Use --force to
4665 commit transaction if it isn't checked out. Use --force to
4665 override this protection.
4666 override this protection.
4666
4667
4667 The rollback command can be entirely disabled by setting the
4668 The rollback command can be entirely disabled by setting the
4668 ``ui.rollback`` configuration setting to false. If you're here
4669 ``ui.rollback`` configuration setting to false. If you're here
4669 because you want to use rollback and it's disabled, you can
4670 because you want to use rollback and it's disabled, you can
4670 re-enable the command by setting ``ui.rollback`` to true.
4671 re-enable the command by setting ``ui.rollback`` to true.
4671
4672
4672 This command is not intended for use on public repositories. Once
4673 This command is not intended for use on public repositories. Once
4673 changes are visible for pull by other users, rolling a transaction
4674 changes are visible for pull by other users, rolling a transaction
4674 back locally is ineffective (someone else may already have pulled
4675 back locally is ineffective (someone else may already have pulled
4675 the changes). Furthermore, a race is possible with readers of the
4676 the changes). Furthermore, a race is possible with readers of the
4676 repository; for example an in-progress pull from the repository
4677 repository; for example an in-progress pull from the repository
4677 may fail if a rollback is performed.
4678 may fail if a rollback is performed.
4678
4679
4679 Returns 0 on success, 1 if no rollback data is available.
4680 Returns 0 on success, 1 if no rollback data is available.
4680 """
4681 """
4681 if not ui.configbool('ui', 'rollback'):
4682 if not ui.configbool('ui', 'rollback'):
4682 raise error.Abort(_('rollback is disabled because it is unsafe'),
4683 raise error.Abort(_('rollback is disabled because it is unsafe'),
4683 hint=('see `hg help -v rollback` for information'))
4684 hint=('see `hg help -v rollback` for information'))
4684 return repo.rollback(dryrun=opts.get(r'dry_run'),
4685 return repo.rollback(dryrun=opts.get(r'dry_run'),
4685 force=opts.get(r'force'))
4686 force=opts.get(r'force'))
4686
4687
4687 @command('root', [], cmdtype=readonly)
4688 @command('root', [], cmdtype=readonly)
4688 def root(ui, repo):
4689 def root(ui, repo):
4689 """print the root (top) of the current working directory
4690 """print the root (top) of the current working directory
4690
4691
4691 Print the root directory of the current repository.
4692 Print the root directory of the current repository.
4692
4693
4693 Returns 0 on success.
4694 Returns 0 on success.
4694 """
4695 """
4695 ui.write(repo.root + "\n")
4696 ui.write(repo.root + "\n")
4696
4697
4697 @command('^serve',
4698 @command('^serve',
4698 [('A', 'accesslog', '', _('name of access log file to write to'),
4699 [('A', 'accesslog', '', _('name of access log file to write to'),
4699 _('FILE')),
4700 _('FILE')),
4700 ('d', 'daemon', None, _('run server in background')),
4701 ('d', 'daemon', None, _('run server in background')),
4701 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4702 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4702 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4703 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4703 # use string type, then we can check if something was passed
4704 # use string type, then we can check if something was passed
4704 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4705 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4705 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4706 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4706 _('ADDR')),
4707 _('ADDR')),
4707 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4708 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4708 _('PREFIX')),
4709 _('PREFIX')),
4709 ('n', 'name', '',
4710 ('n', 'name', '',
4710 _('name to show in web pages (default: working directory)'), _('NAME')),
4711 _('name to show in web pages (default: working directory)'), _('NAME')),
4711 ('', 'web-conf', '',
4712 ('', 'web-conf', '',
4712 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4713 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4713 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4714 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4714 _('FILE')),
4715 _('FILE')),
4715 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4716 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4716 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4717 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4717 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4718 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4718 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4719 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4719 ('', 'style', '', _('template style to use'), _('STYLE')),
4720 ('', 'style', '', _('template style to use'), _('STYLE')),
4720 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4721 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4721 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4722 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4722 + subrepoopts,
4723 + subrepoopts,
4723 _('[OPTION]...'),
4724 _('[OPTION]...'),
4724 optionalrepo=True)
4725 optionalrepo=True)
4725 def serve(ui, repo, **opts):
4726 def serve(ui, repo, **opts):
4726 """start stand-alone webserver
4727 """start stand-alone webserver
4727
4728
4728 Start a local HTTP repository browser and pull server. You can use
4729 Start a local HTTP repository browser and pull server. You can use
4729 this for ad-hoc sharing and browsing of repositories. It is
4730 this for ad-hoc sharing and browsing of repositories. It is
4730 recommended to use a real web server to serve a repository for
4731 recommended to use a real web server to serve a repository for
4731 longer periods of time.
4732 longer periods of time.
4732
4733
4733 Please note that the server does not implement access control.
4734 Please note that the server does not implement access control.
4734 This means that, by default, anybody can read from the server and
4735 This means that, by default, anybody can read from the server and
4735 nobody can write to it by default. Set the ``web.allow-push``
4736 nobody can write to it by default. Set the ``web.allow-push``
4736 option to ``*`` to allow everybody to push to the server. You
4737 option to ``*`` to allow everybody to push to the server. You
4737 should use a real web server if you need to authenticate users.
4738 should use a real web server if you need to authenticate users.
4738
4739
4739 By default, the server logs accesses to stdout and errors to
4740 By default, the server logs accesses to stdout and errors to
4740 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4741 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4741 files.
4742 files.
4742
4743
4743 To have the server choose a free port number to listen on, specify
4744 To have the server choose a free port number to listen on, specify
4744 a port number of 0; in this case, the server will print the port
4745 a port number of 0; in this case, the server will print the port
4745 number it uses.
4746 number it uses.
4746
4747
4747 Returns 0 on success.
4748 Returns 0 on success.
4748 """
4749 """
4749
4750
4750 opts = pycompat.byteskwargs(opts)
4751 opts = pycompat.byteskwargs(opts)
4751 if opts["stdio"] and opts["cmdserver"]:
4752 if opts["stdio"] and opts["cmdserver"]:
4752 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4753 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4753
4754
4754 if opts["stdio"]:
4755 if opts["stdio"]:
4755 if repo is None:
4756 if repo is None:
4756 raise error.RepoError(_("there is no Mercurial repository here"
4757 raise error.RepoError(_("there is no Mercurial repository here"
4757 " (.hg not found)"))
4758 " (.hg not found)"))
4758 s = wireprotoserver.sshserver(ui, repo)
4759 s = wireprotoserver.sshserver(ui, repo)
4759 s.serve_forever()
4760 s.serve_forever()
4760
4761
4761 service = server.createservice(ui, repo, opts)
4762 service = server.createservice(ui, repo, opts)
4762 return server.runservice(opts, initfn=service.init, runfn=service.run)
4763 return server.runservice(opts, initfn=service.init, runfn=service.run)
4763
4764
4764 @command('^status|st',
4765 @command('^status|st',
4765 [('A', 'all', None, _('show status of all files')),
4766 [('A', 'all', None, _('show status of all files')),
4766 ('m', 'modified', None, _('show only modified files')),
4767 ('m', 'modified', None, _('show only modified files')),
4767 ('a', 'added', None, _('show only added files')),
4768 ('a', 'added', None, _('show only added files')),
4768 ('r', 'removed', None, _('show only removed files')),
4769 ('r', 'removed', None, _('show only removed files')),
4769 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4770 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4770 ('c', 'clean', None, _('show only files without changes')),
4771 ('c', 'clean', None, _('show only files without changes')),
4771 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4772 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4772 ('i', 'ignored', None, _('show only ignored files')),
4773 ('i', 'ignored', None, _('show only ignored files')),
4773 ('n', 'no-status', None, _('hide status prefix')),
4774 ('n', 'no-status', None, _('hide status prefix')),
4774 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4775 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4775 ('C', 'copies', None, _('show source of copied files')),
4776 ('C', 'copies', None, _('show source of copied files')),
4776 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4777 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4777 ('', 'rev', [], _('show difference from revision'), _('REV')),
4778 ('', 'rev', [], _('show difference from revision'), _('REV')),
4778 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4779 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4779 ] + walkopts + subrepoopts + formatteropts,
4780 ] + walkopts + subrepoopts + formatteropts,
4780 _('[OPTION]... [FILE]...'),
4781 _('[OPTION]... [FILE]...'),
4781 inferrepo=True, cmdtype=readonly)
4782 inferrepo=True, cmdtype=readonly)
4782 def status(ui, repo, *pats, **opts):
4783 def status(ui, repo, *pats, **opts):
4783 """show changed files in the working directory
4784 """show changed files in the working directory
4784
4785
4785 Show status of files in the repository. If names are given, only
4786 Show status of files in the repository. If names are given, only
4786 files that match are shown. Files that are clean or ignored or
4787 files that match are shown. Files that are clean or ignored or
4787 the source of a copy/move operation, are not listed unless
4788 the source of a copy/move operation, are not listed unless
4788 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4789 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4789 Unless options described with "show only ..." are given, the
4790 Unless options described with "show only ..." are given, the
4790 options -mardu are used.
4791 options -mardu are used.
4791
4792
4792 Option -q/--quiet hides untracked (unknown and ignored) files
4793 Option -q/--quiet hides untracked (unknown and ignored) files
4793 unless explicitly requested with -u/--unknown or -i/--ignored.
4794 unless explicitly requested with -u/--unknown or -i/--ignored.
4794
4795
4795 .. note::
4796 .. note::
4796
4797
4797 :hg:`status` may appear to disagree with diff if permissions have
4798 :hg:`status` may appear to disagree with diff if permissions have
4798 changed or a merge has occurred. The standard diff format does
4799 changed or a merge has occurred. The standard diff format does
4799 not report permission changes and diff only reports changes
4800 not report permission changes and diff only reports changes
4800 relative to one merge parent.
4801 relative to one merge parent.
4801
4802
4802 If one revision is given, it is used as the base revision.
4803 If one revision is given, it is used as the base revision.
4803 If two revisions are given, the differences between them are
4804 If two revisions are given, the differences between them are
4804 shown. The --change option can also be used as a shortcut to list
4805 shown. The --change option can also be used as a shortcut to list
4805 the changed files of a revision from its first parent.
4806 the changed files of a revision from its first parent.
4806
4807
4807 The codes used to show the status of files are::
4808 The codes used to show the status of files are::
4808
4809
4809 M = modified
4810 M = modified
4810 A = added
4811 A = added
4811 R = removed
4812 R = removed
4812 C = clean
4813 C = clean
4813 ! = missing (deleted by non-hg command, but still tracked)
4814 ! = missing (deleted by non-hg command, but still tracked)
4814 ? = not tracked
4815 ? = not tracked
4815 I = ignored
4816 I = ignored
4816 = origin of the previous file (with --copies)
4817 = origin of the previous file (with --copies)
4817
4818
4818 .. container:: verbose
4819 .. container:: verbose
4819
4820
4820 The -t/--terse option abbreviates the output by showing only the directory
4821 The -t/--terse option abbreviates the output by showing only the directory
4821 name if all the files in it share the same status. The option takes an
4822 name if all the files in it share the same status. The option takes an
4822 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
4823 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
4823 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
4824 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
4824 for 'ignored' and 'c' for clean.
4825 for 'ignored' and 'c' for clean.
4825
4826
4826 It abbreviates only those statuses which are passed. Note that clean and
4827 It abbreviates only those statuses which are passed. Note that clean and
4827 ignored files are not displayed with '--terse ic' unless the -c/--clean
4828 ignored files are not displayed with '--terse ic' unless the -c/--clean
4828 and -i/--ignored options are also used.
4829 and -i/--ignored options are also used.
4829
4830
4830 The -v/--verbose option shows information when the repository is in an
4831 The -v/--verbose option shows information when the repository is in an
4831 unfinished merge, shelve, rebase state etc. You can have this behavior
4832 unfinished merge, shelve, rebase state etc. You can have this behavior
4832 turned on by default by enabling the ``commands.status.verbose`` option.
4833 turned on by default by enabling the ``commands.status.verbose`` option.
4833
4834
4834 You can skip displaying some of these states by setting
4835 You can skip displaying some of these states by setting
4835 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
4836 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
4836 'histedit', 'merge', 'rebase', or 'unshelve'.
4837 'histedit', 'merge', 'rebase', or 'unshelve'.
4837
4838
4838 Examples:
4839 Examples:
4839
4840
4840 - show changes in the working directory relative to a
4841 - show changes in the working directory relative to a
4841 changeset::
4842 changeset::
4842
4843
4843 hg status --rev 9353
4844 hg status --rev 9353
4844
4845
4845 - show changes in the working directory relative to the
4846 - show changes in the working directory relative to the
4846 current directory (see :hg:`help patterns` for more information)::
4847 current directory (see :hg:`help patterns` for more information)::
4847
4848
4848 hg status re:
4849 hg status re:
4849
4850
4850 - show all changes including copies in an existing changeset::
4851 - show all changes including copies in an existing changeset::
4851
4852
4852 hg status --copies --change 9353
4853 hg status --copies --change 9353
4853
4854
4854 - get a NUL separated list of added files, suitable for xargs::
4855 - get a NUL separated list of added files, suitable for xargs::
4855
4856
4856 hg status -an0
4857 hg status -an0
4857
4858
4858 - show more information about the repository status, abbreviating
4859 - show more information about the repository status, abbreviating
4859 added, removed, modified, deleted, and untracked paths::
4860 added, removed, modified, deleted, and untracked paths::
4860
4861
4861 hg status -v -t mardu
4862 hg status -v -t mardu
4862
4863
4863 Returns 0 on success.
4864 Returns 0 on success.
4864
4865
4865 """
4866 """
4866
4867
4867 opts = pycompat.byteskwargs(opts)
4868 opts = pycompat.byteskwargs(opts)
4868 revs = opts.get('rev')
4869 revs = opts.get('rev')
4869 change = opts.get('change')
4870 change = opts.get('change')
4870 terse = opts.get('terse')
4871 terse = opts.get('terse')
4871
4872
4872 if revs and change:
4873 if revs and change:
4873 msg = _('cannot specify --rev and --change at the same time')
4874 msg = _('cannot specify --rev and --change at the same time')
4874 raise error.Abort(msg)
4875 raise error.Abort(msg)
4875 elif revs and terse:
4876 elif revs and terse:
4876 msg = _('cannot use --terse with --rev')
4877 msg = _('cannot use --terse with --rev')
4877 raise error.Abort(msg)
4878 raise error.Abort(msg)
4878 elif change:
4879 elif change:
4879 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
4880 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
4880 ctx2 = scmutil.revsingle(repo, change, None)
4881 ctx2 = scmutil.revsingle(repo, change, None)
4881 ctx1 = ctx2.p1()
4882 ctx1 = ctx2.p1()
4882 else:
4883 else:
4883 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
4884 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
4884 ctx1, ctx2 = scmutil.revpair(repo, revs)
4885 ctx1, ctx2 = scmutil.revpair(repo, revs)
4885
4886
4886 if pats or ui.configbool('commands', 'status.relative'):
4887 if pats or ui.configbool('commands', 'status.relative'):
4887 cwd = repo.getcwd()
4888 cwd = repo.getcwd()
4888 else:
4889 else:
4889 cwd = ''
4890 cwd = ''
4890
4891
4891 if opts.get('print0'):
4892 if opts.get('print0'):
4892 end = '\0'
4893 end = '\0'
4893 else:
4894 else:
4894 end = '\n'
4895 end = '\n'
4895 copy = {}
4896 copy = {}
4896 states = 'modified added removed deleted unknown ignored clean'.split()
4897 states = 'modified added removed deleted unknown ignored clean'.split()
4897 show = [k for k in states if opts.get(k)]
4898 show = [k for k in states if opts.get(k)]
4898 if opts.get('all'):
4899 if opts.get('all'):
4899 show += ui.quiet and (states[:4] + ['clean']) or states
4900 show += ui.quiet and (states[:4] + ['clean']) or states
4900
4901
4901 if not show:
4902 if not show:
4902 if ui.quiet:
4903 if ui.quiet:
4903 show = states[:4]
4904 show = states[:4]
4904 else:
4905 else:
4905 show = states[:5]
4906 show = states[:5]
4906
4907
4907 m = scmutil.match(ctx2, pats, opts)
4908 m = scmutil.match(ctx2, pats, opts)
4908 if terse:
4909 if terse:
4909 # we need to compute clean and unknown to terse
4910 # we need to compute clean and unknown to terse
4910 stat = repo.status(ctx1.node(), ctx2.node(), m,
4911 stat = repo.status(ctx1.node(), ctx2.node(), m,
4911 'ignored' in show or 'i' in terse,
4912 'ignored' in show or 'i' in terse,
4912 True, True, opts.get('subrepos'))
4913 True, True, opts.get('subrepos'))
4913
4914
4914 stat = cmdutil.tersedir(stat, terse)
4915 stat = cmdutil.tersedir(stat, terse)
4915 else:
4916 else:
4916 stat = repo.status(ctx1.node(), ctx2.node(), m,
4917 stat = repo.status(ctx1.node(), ctx2.node(), m,
4917 'ignored' in show, 'clean' in show,
4918 'ignored' in show, 'clean' in show,
4918 'unknown' in show, opts.get('subrepos'))
4919 'unknown' in show, opts.get('subrepos'))
4919
4920
4920 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4921 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4921
4922
4922 if (opts.get('all') or opts.get('copies')
4923 if (opts.get('all') or opts.get('copies')
4923 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4924 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4924 copy = copies.pathcopies(ctx1, ctx2, m)
4925 copy = copies.pathcopies(ctx1, ctx2, m)
4925
4926
4926 ui.pager('status')
4927 ui.pager('status')
4927 fm = ui.formatter('status', opts)
4928 fm = ui.formatter('status', opts)
4928 fmt = '%s' + end
4929 fmt = '%s' + end
4929 showchar = not opts.get('no_status')
4930 showchar = not opts.get('no_status')
4930
4931
4931 for state, char, files in changestates:
4932 for state, char, files in changestates:
4932 if state in show:
4933 if state in show:
4933 label = 'status.' + state
4934 label = 'status.' + state
4934 for f in files:
4935 for f in files:
4935 fm.startitem()
4936 fm.startitem()
4936 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4937 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4937 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4938 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4938 if f in copy:
4939 if f in copy:
4939 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4940 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4940 label='status.copied')
4941 label='status.copied')
4941
4942
4942 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
4943 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
4943 and not ui.plain()):
4944 and not ui.plain()):
4944 cmdutil.morestatus(repo, fm)
4945 cmdutil.morestatus(repo, fm)
4945 fm.end()
4946 fm.end()
4946
4947
4947 @command('^summary|sum',
4948 @command('^summary|sum',
4948 [('', 'remote', None, _('check for push and pull'))],
4949 [('', 'remote', None, _('check for push and pull'))],
4949 '[--remote]', cmdtype=readonly)
4950 '[--remote]', cmdtype=readonly)
4950 def summary(ui, repo, **opts):
4951 def summary(ui, repo, **opts):
4951 """summarize working directory state
4952 """summarize working directory state
4952
4953
4953 This generates a brief summary of the working directory state,
4954 This generates a brief summary of the working directory state,
4954 including parents, branch, commit status, phase and available updates.
4955 including parents, branch, commit status, phase and available updates.
4955
4956
4956 With the --remote option, this will check the default paths for
4957 With the --remote option, this will check the default paths for
4957 incoming and outgoing changes. This can be time-consuming.
4958 incoming and outgoing changes. This can be time-consuming.
4958
4959
4959 Returns 0 on success.
4960 Returns 0 on success.
4960 """
4961 """
4961
4962
4962 opts = pycompat.byteskwargs(opts)
4963 opts = pycompat.byteskwargs(opts)
4963 ui.pager('summary')
4964 ui.pager('summary')
4964 ctx = repo[None]
4965 ctx = repo[None]
4965 parents = ctx.parents()
4966 parents = ctx.parents()
4966 pnode = parents[0].node()
4967 pnode = parents[0].node()
4967 marks = []
4968 marks = []
4968
4969
4969 ms = None
4970 ms = None
4970 try:
4971 try:
4971 ms = mergemod.mergestate.read(repo)
4972 ms = mergemod.mergestate.read(repo)
4972 except error.UnsupportedMergeRecords as e:
4973 except error.UnsupportedMergeRecords as e:
4973 s = ' '.join(e.recordtypes)
4974 s = ' '.join(e.recordtypes)
4974 ui.warn(
4975 ui.warn(
4975 _('warning: merge state has unsupported record types: %s\n') % s)
4976 _('warning: merge state has unsupported record types: %s\n') % s)
4976 unresolved = []
4977 unresolved = []
4977 else:
4978 else:
4978 unresolved = list(ms.unresolved())
4979 unresolved = list(ms.unresolved())
4979
4980
4980 for p in parents:
4981 for p in parents:
4981 # label with log.changeset (instead of log.parent) since this
4982 # label with log.changeset (instead of log.parent) since this
4982 # shows a working directory parent *changeset*:
4983 # shows a working directory parent *changeset*:
4983 # i18n: column positioning for "hg summary"
4984 # i18n: column positioning for "hg summary"
4984 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4985 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4985 label=logcmdutil.changesetlabels(p))
4986 label=logcmdutil.changesetlabels(p))
4986 ui.write(' '.join(p.tags()), label='log.tag')
4987 ui.write(' '.join(p.tags()), label='log.tag')
4987 if p.bookmarks():
4988 if p.bookmarks():
4988 marks.extend(p.bookmarks())
4989 marks.extend(p.bookmarks())
4989 if p.rev() == -1:
4990 if p.rev() == -1:
4990 if not len(repo):
4991 if not len(repo):
4991 ui.write(_(' (empty repository)'))
4992 ui.write(_(' (empty repository)'))
4992 else:
4993 else:
4993 ui.write(_(' (no revision checked out)'))
4994 ui.write(_(' (no revision checked out)'))
4994 if p.obsolete():
4995 if p.obsolete():
4995 ui.write(_(' (obsolete)'))
4996 ui.write(_(' (obsolete)'))
4996 if p.isunstable():
4997 if p.isunstable():
4997 instabilities = (ui.label(instability, 'trouble.%s' % instability)
4998 instabilities = (ui.label(instability, 'trouble.%s' % instability)
4998 for instability in p.instabilities())
4999 for instability in p.instabilities())
4999 ui.write(' ('
5000 ui.write(' ('
5000 + ', '.join(instabilities)
5001 + ', '.join(instabilities)
5001 + ')')
5002 + ')')
5002 ui.write('\n')
5003 ui.write('\n')
5003 if p.description():
5004 if p.description():
5004 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5005 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5005 label='log.summary')
5006 label='log.summary')
5006
5007
5007 branch = ctx.branch()
5008 branch = ctx.branch()
5008 bheads = repo.branchheads(branch)
5009 bheads = repo.branchheads(branch)
5009 # i18n: column positioning for "hg summary"
5010 # i18n: column positioning for "hg summary"
5010 m = _('branch: %s\n') % branch
5011 m = _('branch: %s\n') % branch
5011 if branch != 'default':
5012 if branch != 'default':
5012 ui.write(m, label='log.branch')
5013 ui.write(m, label='log.branch')
5013 else:
5014 else:
5014 ui.status(m, label='log.branch')
5015 ui.status(m, label='log.branch')
5015
5016
5016 if marks:
5017 if marks:
5017 active = repo._activebookmark
5018 active = repo._activebookmark
5018 # i18n: column positioning for "hg summary"
5019 # i18n: column positioning for "hg summary"
5019 ui.write(_('bookmarks:'), label='log.bookmark')
5020 ui.write(_('bookmarks:'), label='log.bookmark')
5020 if active is not None:
5021 if active is not None:
5021 if active in marks:
5022 if active in marks:
5022 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
5023 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
5023 marks.remove(active)
5024 marks.remove(active)
5024 else:
5025 else:
5025 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
5026 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
5026 for m in marks:
5027 for m in marks:
5027 ui.write(' ' + m, label='log.bookmark')
5028 ui.write(' ' + m, label='log.bookmark')
5028 ui.write('\n', label='log.bookmark')
5029 ui.write('\n', label='log.bookmark')
5029
5030
5030 status = repo.status(unknown=True)
5031 status = repo.status(unknown=True)
5031
5032
5032 c = repo.dirstate.copies()
5033 c = repo.dirstate.copies()
5033 copied, renamed = [], []
5034 copied, renamed = [], []
5034 for d, s in c.iteritems():
5035 for d, s in c.iteritems():
5035 if s in status.removed:
5036 if s in status.removed:
5036 status.removed.remove(s)
5037 status.removed.remove(s)
5037 renamed.append(d)
5038 renamed.append(d)
5038 else:
5039 else:
5039 copied.append(d)
5040 copied.append(d)
5040 if d in status.added:
5041 if d in status.added:
5041 status.added.remove(d)
5042 status.added.remove(d)
5042
5043
5043 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5044 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5044
5045
5045 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5046 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5046 (ui.label(_('%d added'), 'status.added'), status.added),
5047 (ui.label(_('%d added'), 'status.added'), status.added),
5047 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5048 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5048 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5049 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5049 (ui.label(_('%d copied'), 'status.copied'), copied),
5050 (ui.label(_('%d copied'), 'status.copied'), copied),
5050 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5051 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5051 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5052 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5052 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5053 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5053 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5054 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5054 t = []
5055 t = []
5055 for l, s in labels:
5056 for l, s in labels:
5056 if s:
5057 if s:
5057 t.append(l % len(s))
5058 t.append(l % len(s))
5058
5059
5059 t = ', '.join(t)
5060 t = ', '.join(t)
5060 cleanworkdir = False
5061 cleanworkdir = False
5061
5062
5062 if repo.vfs.exists('graftstate'):
5063 if repo.vfs.exists('graftstate'):
5063 t += _(' (graft in progress)')
5064 t += _(' (graft in progress)')
5064 if repo.vfs.exists('updatestate'):
5065 if repo.vfs.exists('updatestate'):
5065 t += _(' (interrupted update)')
5066 t += _(' (interrupted update)')
5066 elif len(parents) > 1:
5067 elif len(parents) > 1:
5067 t += _(' (merge)')
5068 t += _(' (merge)')
5068 elif branch != parents[0].branch():
5069 elif branch != parents[0].branch():
5069 t += _(' (new branch)')
5070 t += _(' (new branch)')
5070 elif (parents[0].closesbranch() and
5071 elif (parents[0].closesbranch() and
5071 pnode in repo.branchheads(branch, closed=True)):
5072 pnode in repo.branchheads(branch, closed=True)):
5072 t += _(' (head closed)')
5073 t += _(' (head closed)')
5073 elif not (status.modified or status.added or status.removed or renamed or
5074 elif not (status.modified or status.added or status.removed or renamed or
5074 copied or subs):
5075 copied or subs):
5075 t += _(' (clean)')
5076 t += _(' (clean)')
5076 cleanworkdir = True
5077 cleanworkdir = True
5077 elif pnode not in bheads:
5078 elif pnode not in bheads:
5078 t += _(' (new branch head)')
5079 t += _(' (new branch head)')
5079
5080
5080 if parents:
5081 if parents:
5081 pendingphase = max(p.phase() for p in parents)
5082 pendingphase = max(p.phase() for p in parents)
5082 else:
5083 else:
5083 pendingphase = phases.public
5084 pendingphase = phases.public
5084
5085
5085 if pendingphase > phases.newcommitphase(ui):
5086 if pendingphase > phases.newcommitphase(ui):
5086 t += ' (%s)' % phases.phasenames[pendingphase]
5087 t += ' (%s)' % phases.phasenames[pendingphase]
5087
5088
5088 if cleanworkdir:
5089 if cleanworkdir:
5089 # i18n: column positioning for "hg summary"
5090 # i18n: column positioning for "hg summary"
5090 ui.status(_('commit: %s\n') % t.strip())
5091 ui.status(_('commit: %s\n') % t.strip())
5091 else:
5092 else:
5092 # i18n: column positioning for "hg summary"
5093 # i18n: column positioning for "hg summary"
5093 ui.write(_('commit: %s\n') % t.strip())
5094 ui.write(_('commit: %s\n') % t.strip())
5094
5095
5095 # all ancestors of branch heads - all ancestors of parent = new csets
5096 # all ancestors of branch heads - all ancestors of parent = new csets
5096 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5097 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5097 bheads))
5098 bheads))
5098
5099
5099 if new == 0:
5100 if new == 0:
5100 # i18n: column positioning for "hg summary"
5101 # i18n: column positioning for "hg summary"
5101 ui.status(_('update: (current)\n'))
5102 ui.status(_('update: (current)\n'))
5102 elif pnode not in bheads:
5103 elif pnode not in bheads:
5103 # i18n: column positioning for "hg summary"
5104 # i18n: column positioning for "hg summary"
5104 ui.write(_('update: %d new changesets (update)\n') % new)
5105 ui.write(_('update: %d new changesets (update)\n') % new)
5105 else:
5106 else:
5106 # i18n: column positioning for "hg summary"
5107 # i18n: column positioning for "hg summary"
5107 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5108 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5108 (new, len(bheads)))
5109 (new, len(bheads)))
5109
5110
5110 t = []
5111 t = []
5111 draft = len(repo.revs('draft()'))
5112 draft = len(repo.revs('draft()'))
5112 if draft:
5113 if draft:
5113 t.append(_('%d draft') % draft)
5114 t.append(_('%d draft') % draft)
5114 secret = len(repo.revs('secret()'))
5115 secret = len(repo.revs('secret()'))
5115 if secret:
5116 if secret:
5116 t.append(_('%d secret') % secret)
5117 t.append(_('%d secret') % secret)
5117
5118
5118 if draft or secret:
5119 if draft or secret:
5119 ui.status(_('phases: %s\n') % ', '.join(t))
5120 ui.status(_('phases: %s\n') % ', '.join(t))
5120
5121
5121 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5122 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5122 for trouble in ("orphan", "contentdivergent", "phasedivergent"):
5123 for trouble in ("orphan", "contentdivergent", "phasedivergent"):
5123 numtrouble = len(repo.revs(trouble + "()"))
5124 numtrouble = len(repo.revs(trouble + "()"))
5124 # We write all the possibilities to ease translation
5125 # We write all the possibilities to ease translation
5125 troublemsg = {
5126 troublemsg = {
5126 "orphan": _("orphan: %d changesets"),
5127 "orphan": _("orphan: %d changesets"),
5127 "contentdivergent": _("content-divergent: %d changesets"),
5128 "contentdivergent": _("content-divergent: %d changesets"),
5128 "phasedivergent": _("phase-divergent: %d changesets"),
5129 "phasedivergent": _("phase-divergent: %d changesets"),
5129 }
5130 }
5130 if numtrouble > 0:
5131 if numtrouble > 0:
5131 ui.status(troublemsg[trouble] % numtrouble + "\n")
5132 ui.status(troublemsg[trouble] % numtrouble + "\n")
5132
5133
5133 cmdutil.summaryhooks(ui, repo)
5134 cmdutil.summaryhooks(ui, repo)
5134
5135
5135 if opts.get('remote'):
5136 if opts.get('remote'):
5136 needsincoming, needsoutgoing = True, True
5137 needsincoming, needsoutgoing = True, True
5137 else:
5138 else:
5138 needsincoming, needsoutgoing = False, False
5139 needsincoming, needsoutgoing = False, False
5139 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5140 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5140 if i:
5141 if i:
5141 needsincoming = True
5142 needsincoming = True
5142 if o:
5143 if o:
5143 needsoutgoing = True
5144 needsoutgoing = True
5144 if not needsincoming and not needsoutgoing:
5145 if not needsincoming and not needsoutgoing:
5145 return
5146 return
5146
5147
5147 def getincoming():
5148 def getincoming():
5148 source, branches = hg.parseurl(ui.expandpath('default'))
5149 source, branches = hg.parseurl(ui.expandpath('default'))
5149 sbranch = branches[0]
5150 sbranch = branches[0]
5150 try:
5151 try:
5151 other = hg.peer(repo, {}, source)
5152 other = hg.peer(repo, {}, source)
5152 except error.RepoError:
5153 except error.RepoError:
5153 if opts.get('remote'):
5154 if opts.get('remote'):
5154 raise
5155 raise
5155 return source, sbranch, None, None, None
5156 return source, sbranch, None, None, None
5156 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5157 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5157 if revs:
5158 if revs:
5158 revs = [other.lookup(rev) for rev in revs]
5159 revs = [other.lookup(rev) for rev in revs]
5159 ui.debug('comparing with %s\n' % util.hidepassword(source))
5160 ui.debug('comparing with %s\n' % util.hidepassword(source))
5160 repo.ui.pushbuffer()
5161 repo.ui.pushbuffer()
5161 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5162 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5162 repo.ui.popbuffer()
5163 repo.ui.popbuffer()
5163 return source, sbranch, other, commoninc, commoninc[1]
5164 return source, sbranch, other, commoninc, commoninc[1]
5164
5165
5165 if needsincoming:
5166 if needsincoming:
5166 source, sbranch, sother, commoninc, incoming = getincoming()
5167 source, sbranch, sother, commoninc, incoming = getincoming()
5167 else:
5168 else:
5168 source = sbranch = sother = commoninc = incoming = None
5169 source = sbranch = sother = commoninc = incoming = None
5169
5170
5170 def getoutgoing():
5171 def getoutgoing():
5171 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5172 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5172 dbranch = branches[0]
5173 dbranch = branches[0]
5173 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5174 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5174 if source != dest:
5175 if source != dest:
5175 try:
5176 try:
5176 dother = hg.peer(repo, {}, dest)
5177 dother = hg.peer(repo, {}, dest)
5177 except error.RepoError:
5178 except error.RepoError:
5178 if opts.get('remote'):
5179 if opts.get('remote'):
5179 raise
5180 raise
5180 return dest, dbranch, None, None
5181 return dest, dbranch, None, None
5181 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5182 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5182 elif sother is None:
5183 elif sother is None:
5183 # there is no explicit destination peer, but source one is invalid
5184 # there is no explicit destination peer, but source one is invalid
5184 return dest, dbranch, None, None
5185 return dest, dbranch, None, None
5185 else:
5186 else:
5186 dother = sother
5187 dother = sother
5187 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5188 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5188 common = None
5189 common = None
5189 else:
5190 else:
5190 common = commoninc
5191 common = commoninc
5191 if revs:
5192 if revs:
5192 revs = [repo.lookup(rev) for rev in revs]
5193 revs = [repo.lookup(rev) for rev in revs]
5193 repo.ui.pushbuffer()
5194 repo.ui.pushbuffer()
5194 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5195 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5195 commoninc=common)
5196 commoninc=common)
5196 repo.ui.popbuffer()
5197 repo.ui.popbuffer()
5197 return dest, dbranch, dother, outgoing
5198 return dest, dbranch, dother, outgoing
5198
5199
5199 if needsoutgoing:
5200 if needsoutgoing:
5200 dest, dbranch, dother, outgoing = getoutgoing()
5201 dest, dbranch, dother, outgoing = getoutgoing()
5201 else:
5202 else:
5202 dest = dbranch = dother = outgoing = None
5203 dest = dbranch = dother = outgoing = None
5203
5204
5204 if opts.get('remote'):
5205 if opts.get('remote'):
5205 t = []
5206 t = []
5206 if incoming:
5207 if incoming:
5207 t.append(_('1 or more incoming'))
5208 t.append(_('1 or more incoming'))
5208 o = outgoing.missing
5209 o = outgoing.missing
5209 if o:
5210 if o:
5210 t.append(_('%d outgoing') % len(o))
5211 t.append(_('%d outgoing') % len(o))
5211 other = dother or sother
5212 other = dother or sother
5212 if 'bookmarks' in other.listkeys('namespaces'):
5213 if 'bookmarks' in other.listkeys('namespaces'):
5213 counts = bookmarks.summary(repo, other)
5214 counts = bookmarks.summary(repo, other)
5214 if counts[0] > 0:
5215 if counts[0] > 0:
5215 t.append(_('%d incoming bookmarks') % counts[0])
5216 t.append(_('%d incoming bookmarks') % counts[0])
5216 if counts[1] > 0:
5217 if counts[1] > 0:
5217 t.append(_('%d outgoing bookmarks') % counts[1])
5218 t.append(_('%d outgoing bookmarks') % counts[1])
5218
5219
5219 if t:
5220 if t:
5220 # i18n: column positioning for "hg summary"
5221 # i18n: column positioning for "hg summary"
5221 ui.write(_('remote: %s\n') % (', '.join(t)))
5222 ui.write(_('remote: %s\n') % (', '.join(t)))
5222 else:
5223 else:
5223 # i18n: column positioning for "hg summary"
5224 # i18n: column positioning for "hg summary"
5224 ui.status(_('remote: (synced)\n'))
5225 ui.status(_('remote: (synced)\n'))
5225
5226
5226 cmdutil.summaryremotehooks(ui, repo, opts,
5227 cmdutil.summaryremotehooks(ui, repo, opts,
5227 ((source, sbranch, sother, commoninc),
5228 ((source, sbranch, sother, commoninc),
5228 (dest, dbranch, dother, outgoing)))
5229 (dest, dbranch, dother, outgoing)))
5229
5230
5230 @command('tag',
5231 @command('tag',
5231 [('f', 'force', None, _('force tag')),
5232 [('f', 'force', None, _('force tag')),
5232 ('l', 'local', None, _('make the tag local')),
5233 ('l', 'local', None, _('make the tag local')),
5233 ('r', 'rev', '', _('revision to tag'), _('REV')),
5234 ('r', 'rev', '', _('revision to tag'), _('REV')),
5234 ('', 'remove', None, _('remove a tag')),
5235 ('', 'remove', None, _('remove a tag')),
5235 # -l/--local is already there, commitopts cannot be used
5236 # -l/--local is already there, commitopts cannot be used
5236 ('e', 'edit', None, _('invoke editor on commit messages')),
5237 ('e', 'edit', None, _('invoke editor on commit messages')),
5237 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5238 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5238 ] + commitopts2,
5239 ] + commitopts2,
5239 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5240 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5240 def tag(ui, repo, name1, *names, **opts):
5241 def tag(ui, repo, name1, *names, **opts):
5241 """add one or more tags for the current or given revision
5242 """add one or more tags for the current or given revision
5242
5243
5243 Name a particular revision using <name>.
5244 Name a particular revision using <name>.
5244
5245
5245 Tags are used to name particular revisions of the repository and are
5246 Tags are used to name particular revisions of the repository and are
5246 very useful to compare different revisions, to go back to significant
5247 very useful to compare different revisions, to go back to significant
5247 earlier versions or to mark branch points as releases, etc. Changing
5248 earlier versions or to mark branch points as releases, etc. Changing
5248 an existing tag is normally disallowed; use -f/--force to override.
5249 an existing tag is normally disallowed; use -f/--force to override.
5249
5250
5250 If no revision is given, the parent of the working directory is
5251 If no revision is given, the parent of the working directory is
5251 used.
5252 used.
5252
5253
5253 To facilitate version control, distribution, and merging of tags,
5254 To facilitate version control, distribution, and merging of tags,
5254 they are stored as a file named ".hgtags" which is managed similarly
5255 they are stored as a file named ".hgtags" which is managed similarly
5255 to other project files and can be hand-edited if necessary. This
5256 to other project files and can be hand-edited if necessary. This
5256 also means that tagging creates a new commit. The file
5257 also means that tagging creates a new commit. The file
5257 ".hg/localtags" is used for local tags (not shared among
5258 ".hg/localtags" is used for local tags (not shared among
5258 repositories).
5259 repositories).
5259
5260
5260 Tag commits are usually made at the head of a branch. If the parent
5261 Tag commits are usually made at the head of a branch. If the parent
5261 of the working directory is not a branch head, :hg:`tag` aborts; use
5262 of the working directory is not a branch head, :hg:`tag` aborts; use
5262 -f/--force to force the tag commit to be based on a non-head
5263 -f/--force to force the tag commit to be based on a non-head
5263 changeset.
5264 changeset.
5264
5265
5265 See :hg:`help dates` for a list of formats valid for -d/--date.
5266 See :hg:`help dates` for a list of formats valid for -d/--date.
5266
5267
5267 Since tag names have priority over branch names during revision
5268 Since tag names have priority over branch names during revision
5268 lookup, using an existing branch name as a tag name is discouraged.
5269 lookup, using an existing branch name as a tag name is discouraged.
5269
5270
5270 Returns 0 on success.
5271 Returns 0 on success.
5271 """
5272 """
5272 opts = pycompat.byteskwargs(opts)
5273 opts = pycompat.byteskwargs(opts)
5273 wlock = lock = None
5274 wlock = lock = None
5274 try:
5275 try:
5275 wlock = repo.wlock()
5276 wlock = repo.wlock()
5276 lock = repo.lock()
5277 lock = repo.lock()
5277 rev_ = "."
5278 rev_ = "."
5278 names = [t.strip() for t in (name1,) + names]
5279 names = [t.strip() for t in (name1,) + names]
5279 if len(names) != len(set(names)):
5280 if len(names) != len(set(names)):
5280 raise error.Abort(_('tag names must be unique'))
5281 raise error.Abort(_('tag names must be unique'))
5281 for n in names:
5282 for n in names:
5282 scmutil.checknewlabel(repo, n, 'tag')
5283 scmutil.checknewlabel(repo, n, 'tag')
5283 if not n:
5284 if not n:
5284 raise error.Abort(_('tag names cannot consist entirely of '
5285 raise error.Abort(_('tag names cannot consist entirely of '
5285 'whitespace'))
5286 'whitespace'))
5286 if opts.get('rev') and opts.get('remove'):
5287 if opts.get('rev') and opts.get('remove'):
5287 raise error.Abort(_("--rev and --remove are incompatible"))
5288 raise error.Abort(_("--rev and --remove are incompatible"))
5288 if opts.get('rev'):
5289 if opts.get('rev'):
5289 rev_ = opts['rev']
5290 rev_ = opts['rev']
5290 message = opts.get('message')
5291 message = opts.get('message')
5291 if opts.get('remove'):
5292 if opts.get('remove'):
5292 if opts.get('local'):
5293 if opts.get('local'):
5293 expectedtype = 'local'
5294 expectedtype = 'local'
5294 else:
5295 else:
5295 expectedtype = 'global'
5296 expectedtype = 'global'
5296
5297
5297 for n in names:
5298 for n in names:
5298 if not repo.tagtype(n):
5299 if not repo.tagtype(n):
5299 raise error.Abort(_("tag '%s' does not exist") % n)
5300 raise error.Abort(_("tag '%s' does not exist") % n)
5300 if repo.tagtype(n) != expectedtype:
5301 if repo.tagtype(n) != expectedtype:
5301 if expectedtype == 'global':
5302 if expectedtype == 'global':
5302 raise error.Abort(_("tag '%s' is not a global tag") % n)
5303 raise error.Abort(_("tag '%s' is not a global tag") % n)
5303 else:
5304 else:
5304 raise error.Abort(_("tag '%s' is not a local tag") % n)
5305 raise error.Abort(_("tag '%s' is not a local tag") % n)
5305 rev_ = 'null'
5306 rev_ = 'null'
5306 if not message:
5307 if not message:
5307 # we don't translate commit messages
5308 # we don't translate commit messages
5308 message = 'Removed tag %s' % ', '.join(names)
5309 message = 'Removed tag %s' % ', '.join(names)
5309 elif not opts.get('force'):
5310 elif not opts.get('force'):
5310 for n in names:
5311 for n in names:
5311 if n in repo.tags():
5312 if n in repo.tags():
5312 raise error.Abort(_("tag '%s' already exists "
5313 raise error.Abort(_("tag '%s' already exists "
5313 "(use -f to force)") % n)
5314 "(use -f to force)") % n)
5314 if not opts.get('local'):
5315 if not opts.get('local'):
5315 p1, p2 = repo.dirstate.parents()
5316 p1, p2 = repo.dirstate.parents()
5316 if p2 != nullid:
5317 if p2 != nullid:
5317 raise error.Abort(_('uncommitted merge'))
5318 raise error.Abort(_('uncommitted merge'))
5318 bheads = repo.branchheads()
5319 bheads = repo.branchheads()
5319 if not opts.get('force') and bheads and p1 not in bheads:
5320 if not opts.get('force') and bheads and p1 not in bheads:
5320 raise error.Abort(_('working directory is not at a branch head '
5321 raise error.Abort(_('working directory is not at a branch head '
5321 '(use -f to force)'))
5322 '(use -f to force)'))
5322 node = scmutil.revsingle(repo, rev_).node()
5323 node = scmutil.revsingle(repo, rev_).node()
5323
5324
5324 if not message:
5325 if not message:
5325 # we don't translate commit messages
5326 # we don't translate commit messages
5326 message = ('Added tag %s for changeset %s' %
5327 message = ('Added tag %s for changeset %s' %
5327 (', '.join(names), short(node)))
5328 (', '.join(names), short(node)))
5328
5329
5329 date = opts.get('date')
5330 date = opts.get('date')
5330 if date:
5331 if date:
5331 date = dateutil.parsedate(date)
5332 date = dateutil.parsedate(date)
5332
5333
5333 if opts.get('remove'):
5334 if opts.get('remove'):
5334 editform = 'tag.remove'
5335 editform = 'tag.remove'
5335 else:
5336 else:
5336 editform = 'tag.add'
5337 editform = 'tag.add'
5337 editor = cmdutil.getcommiteditor(editform=editform,
5338 editor = cmdutil.getcommiteditor(editform=editform,
5338 **pycompat.strkwargs(opts))
5339 **pycompat.strkwargs(opts))
5339
5340
5340 # don't allow tagging the null rev
5341 # don't allow tagging the null rev
5341 if (not opts.get('remove') and
5342 if (not opts.get('remove') and
5342 scmutil.revsingle(repo, rev_).rev() == nullrev):
5343 scmutil.revsingle(repo, rev_).rev() == nullrev):
5343 raise error.Abort(_("cannot tag null revision"))
5344 raise error.Abort(_("cannot tag null revision"))
5344
5345
5345 tagsmod.tag(repo, names, node, message, opts.get('local'),
5346 tagsmod.tag(repo, names, node, message, opts.get('local'),
5346 opts.get('user'), date, editor=editor)
5347 opts.get('user'), date, editor=editor)
5347 finally:
5348 finally:
5348 release(lock, wlock)
5349 release(lock, wlock)
5349
5350
5350 @command('tags', formatteropts, '', cmdtype=readonly)
5351 @command('tags', formatteropts, '', cmdtype=readonly)
5351 def tags(ui, repo, **opts):
5352 def tags(ui, repo, **opts):
5352 """list repository tags
5353 """list repository tags
5353
5354
5354 This lists both regular and local tags. When the -v/--verbose
5355 This lists both regular and local tags. When the -v/--verbose
5355 switch is used, a third column "local" is printed for local tags.
5356 switch is used, a third column "local" is printed for local tags.
5356 When the -q/--quiet switch is used, only the tag name is printed.
5357 When the -q/--quiet switch is used, only the tag name is printed.
5357
5358
5358 Returns 0 on success.
5359 Returns 0 on success.
5359 """
5360 """
5360
5361
5361 opts = pycompat.byteskwargs(opts)
5362 opts = pycompat.byteskwargs(opts)
5362 ui.pager('tags')
5363 ui.pager('tags')
5363 fm = ui.formatter('tags', opts)
5364 fm = ui.formatter('tags', opts)
5364 hexfunc = fm.hexfunc
5365 hexfunc = fm.hexfunc
5365 tagtype = ""
5366 tagtype = ""
5366
5367
5367 for t, n in reversed(repo.tagslist()):
5368 for t, n in reversed(repo.tagslist()):
5368 hn = hexfunc(n)
5369 hn = hexfunc(n)
5369 label = 'tags.normal'
5370 label = 'tags.normal'
5370 tagtype = ''
5371 tagtype = ''
5371 if repo.tagtype(t) == 'local':
5372 if repo.tagtype(t) == 'local':
5372 label = 'tags.local'
5373 label = 'tags.local'
5373 tagtype = 'local'
5374 tagtype = 'local'
5374
5375
5375 fm.startitem()
5376 fm.startitem()
5376 fm.write('tag', '%s', t, label=label)
5377 fm.write('tag', '%s', t, label=label)
5377 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5378 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5378 fm.condwrite(not ui.quiet, 'rev node', fmt,
5379 fm.condwrite(not ui.quiet, 'rev node', fmt,
5379 repo.changelog.rev(n), hn, label=label)
5380 repo.changelog.rev(n), hn, label=label)
5380 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5381 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5381 tagtype, label=label)
5382 tagtype, label=label)
5382 fm.plain('\n')
5383 fm.plain('\n')
5383 fm.end()
5384 fm.end()
5384
5385
5385 @command('tip',
5386 @command('tip',
5386 [('p', 'patch', None, _('show patch')),
5387 [('p', 'patch', None, _('show patch')),
5387 ('g', 'git', None, _('use git extended diff format')),
5388 ('g', 'git', None, _('use git extended diff format')),
5388 ] + templateopts,
5389 ] + templateopts,
5389 _('[-p] [-g]'))
5390 _('[-p] [-g]'))
5390 def tip(ui, repo, **opts):
5391 def tip(ui, repo, **opts):
5391 """show the tip revision (DEPRECATED)
5392 """show the tip revision (DEPRECATED)
5392
5393
5393 The tip revision (usually just called the tip) is the changeset
5394 The tip revision (usually just called the tip) is the changeset
5394 most recently added to the repository (and therefore the most
5395 most recently added to the repository (and therefore the most
5395 recently changed head).
5396 recently changed head).
5396
5397
5397 If you have just made a commit, that commit will be the tip. If
5398 If you have just made a commit, that commit will be the tip. If
5398 you have just pulled changes from another repository, the tip of
5399 you have just pulled changes from another repository, the tip of
5399 that repository becomes the current tip. The "tip" tag is special
5400 that repository becomes the current tip. The "tip" tag is special
5400 and cannot be renamed or assigned to a different changeset.
5401 and cannot be renamed or assigned to a different changeset.
5401
5402
5402 This command is deprecated, please use :hg:`heads` instead.
5403 This command is deprecated, please use :hg:`heads` instead.
5403
5404
5404 Returns 0 on success.
5405 Returns 0 on success.
5405 """
5406 """
5406 opts = pycompat.byteskwargs(opts)
5407 opts = pycompat.byteskwargs(opts)
5407 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5408 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5408 displayer.show(repo['tip'])
5409 displayer.show(repo['tip'])
5409 displayer.close()
5410 displayer.close()
5410
5411
5411 @command('unbundle',
5412 @command('unbundle',
5412 [('u', 'update', None,
5413 [('u', 'update', None,
5413 _('update to new branch head if changesets were unbundled'))],
5414 _('update to new branch head if changesets were unbundled'))],
5414 _('[-u] FILE...'))
5415 _('[-u] FILE...'))
5415 def unbundle(ui, repo, fname1, *fnames, **opts):
5416 def unbundle(ui, repo, fname1, *fnames, **opts):
5416 """apply one or more bundle files
5417 """apply one or more bundle files
5417
5418
5418 Apply one or more bundle files generated by :hg:`bundle`.
5419 Apply one or more bundle files generated by :hg:`bundle`.
5419
5420
5420 Returns 0 on success, 1 if an update has unresolved files.
5421 Returns 0 on success, 1 if an update has unresolved files.
5421 """
5422 """
5422 fnames = (fname1,) + fnames
5423 fnames = (fname1,) + fnames
5423
5424
5424 with repo.lock():
5425 with repo.lock():
5425 for fname in fnames:
5426 for fname in fnames:
5426 f = hg.openpath(ui, fname)
5427 f = hg.openpath(ui, fname)
5427 gen = exchange.readbundle(ui, f, fname)
5428 gen = exchange.readbundle(ui, f, fname)
5428 if isinstance(gen, streamclone.streamcloneapplier):
5429 if isinstance(gen, streamclone.streamcloneapplier):
5429 raise error.Abort(
5430 raise error.Abort(
5430 _('packed bundles cannot be applied with '
5431 _('packed bundles cannot be applied with '
5431 '"hg unbundle"'),
5432 '"hg unbundle"'),
5432 hint=_('use "hg debugapplystreamclonebundle"'))
5433 hint=_('use "hg debugapplystreamclonebundle"'))
5433 url = 'bundle:' + fname
5434 url = 'bundle:' + fname
5434 try:
5435 try:
5435 txnname = 'unbundle'
5436 txnname = 'unbundle'
5436 if not isinstance(gen, bundle2.unbundle20):
5437 if not isinstance(gen, bundle2.unbundle20):
5437 txnname = 'unbundle\n%s' % util.hidepassword(url)
5438 txnname = 'unbundle\n%s' % util.hidepassword(url)
5438 with repo.transaction(txnname) as tr:
5439 with repo.transaction(txnname) as tr:
5439 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5440 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5440 url=url)
5441 url=url)
5441 except error.BundleUnknownFeatureError as exc:
5442 except error.BundleUnknownFeatureError as exc:
5442 raise error.Abort(
5443 raise error.Abort(
5443 _('%s: unknown bundle feature, %s') % (fname, exc),
5444 _('%s: unknown bundle feature, %s') % (fname, exc),
5444 hint=_("see https://mercurial-scm.org/"
5445 hint=_("see https://mercurial-scm.org/"
5445 "wiki/BundleFeature for more "
5446 "wiki/BundleFeature for more "
5446 "information"))
5447 "information"))
5447 modheads = bundle2.combinechangegroupresults(op)
5448 modheads = bundle2.combinechangegroupresults(op)
5448
5449
5449 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5450 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5450
5451
5451 @command('^update|up|checkout|co',
5452 @command('^update|up|checkout|co',
5452 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5453 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5453 ('c', 'check', None, _('require clean working directory')),
5454 ('c', 'check', None, _('require clean working directory')),
5454 ('m', 'merge', None, _('merge uncommitted changes')),
5455 ('m', 'merge', None, _('merge uncommitted changes')),
5455 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5456 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5456 ('r', 'rev', '', _('revision'), _('REV'))
5457 ('r', 'rev', '', _('revision'), _('REV'))
5457 ] + mergetoolopts,
5458 ] + mergetoolopts,
5458 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5459 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5459 def update(ui, repo, node=None, **opts):
5460 def update(ui, repo, node=None, **opts):
5460 """update working directory (or switch revisions)
5461 """update working directory (or switch revisions)
5461
5462
5462 Update the repository's working directory to the specified
5463 Update the repository's working directory to the specified
5463 changeset. If no changeset is specified, update to the tip of the
5464 changeset. If no changeset is specified, update to the tip of the
5464 current named branch and move the active bookmark (see :hg:`help
5465 current named branch and move the active bookmark (see :hg:`help
5465 bookmarks`).
5466 bookmarks`).
5466
5467
5467 Update sets the working directory's parent revision to the specified
5468 Update sets the working directory's parent revision to the specified
5468 changeset (see :hg:`help parents`).
5469 changeset (see :hg:`help parents`).
5469
5470
5470 If the changeset is not a descendant or ancestor of the working
5471 If the changeset is not a descendant or ancestor of the working
5471 directory's parent and there are uncommitted changes, the update is
5472 directory's parent and there are uncommitted changes, the update is
5472 aborted. With the -c/--check option, the working directory is checked
5473 aborted. With the -c/--check option, the working directory is checked
5473 for uncommitted changes; if none are found, the working directory is
5474 for uncommitted changes; if none are found, the working directory is
5474 updated to the specified changeset.
5475 updated to the specified changeset.
5475
5476
5476 .. container:: verbose
5477 .. container:: verbose
5477
5478
5478 The -C/--clean, -c/--check, and -m/--merge options control what
5479 The -C/--clean, -c/--check, and -m/--merge options control what
5479 happens if the working directory contains uncommitted changes.
5480 happens if the working directory contains uncommitted changes.
5480 At most of one of them can be specified.
5481 At most of one of them can be specified.
5481
5482
5482 1. If no option is specified, and if
5483 1. If no option is specified, and if
5483 the requested changeset is an ancestor or descendant of
5484 the requested changeset is an ancestor or descendant of
5484 the working directory's parent, the uncommitted changes
5485 the working directory's parent, the uncommitted changes
5485 are merged into the requested changeset and the merged
5486 are merged into the requested changeset and the merged
5486 result is left uncommitted. If the requested changeset is
5487 result is left uncommitted. If the requested changeset is
5487 not an ancestor or descendant (that is, it is on another
5488 not an ancestor or descendant (that is, it is on another
5488 branch), the update is aborted and the uncommitted changes
5489 branch), the update is aborted and the uncommitted changes
5489 are preserved.
5490 are preserved.
5490
5491
5491 2. With the -m/--merge option, the update is allowed even if the
5492 2. With the -m/--merge option, the update is allowed even if the
5492 requested changeset is not an ancestor or descendant of
5493 requested changeset is not an ancestor or descendant of
5493 the working directory's parent.
5494 the working directory's parent.
5494
5495
5495 3. With the -c/--check option, the update is aborted and the
5496 3. With the -c/--check option, the update is aborted and the
5496 uncommitted changes are preserved.
5497 uncommitted changes are preserved.
5497
5498
5498 4. With the -C/--clean option, uncommitted changes are discarded and
5499 4. With the -C/--clean option, uncommitted changes are discarded and
5499 the working directory is updated to the requested changeset.
5500 the working directory is updated to the requested changeset.
5500
5501
5501 To cancel an uncommitted merge (and lose your changes), use
5502 To cancel an uncommitted merge (and lose your changes), use
5502 :hg:`merge --abort`.
5503 :hg:`merge --abort`.
5503
5504
5504 Use null as the changeset to remove the working directory (like
5505 Use null as the changeset to remove the working directory (like
5505 :hg:`clone -U`).
5506 :hg:`clone -U`).
5506
5507
5507 If you want to revert just one file to an older revision, use
5508 If you want to revert just one file to an older revision, use
5508 :hg:`revert [-r REV] NAME`.
5509 :hg:`revert [-r REV] NAME`.
5509
5510
5510 See :hg:`help dates` for a list of formats valid for -d/--date.
5511 See :hg:`help dates` for a list of formats valid for -d/--date.
5511
5512
5512 Returns 0 on success, 1 if there are unresolved files.
5513 Returns 0 on success, 1 if there are unresolved files.
5513 """
5514 """
5514 rev = opts.get(r'rev')
5515 rev = opts.get(r'rev')
5515 date = opts.get(r'date')
5516 date = opts.get(r'date')
5516 clean = opts.get(r'clean')
5517 clean = opts.get(r'clean')
5517 check = opts.get(r'check')
5518 check = opts.get(r'check')
5518 merge = opts.get(r'merge')
5519 merge = opts.get(r'merge')
5519 if rev and node:
5520 if rev and node:
5520 raise error.Abort(_("please specify just one revision"))
5521 raise error.Abort(_("please specify just one revision"))
5521
5522
5522 if ui.configbool('commands', 'update.requiredest'):
5523 if ui.configbool('commands', 'update.requiredest'):
5523 if not node and not rev and not date:
5524 if not node and not rev and not date:
5524 raise error.Abort(_('you must specify a destination'),
5525 raise error.Abort(_('you must specify a destination'),
5525 hint=_('for example: hg update ".::"'))
5526 hint=_('for example: hg update ".::"'))
5526
5527
5527 if rev is None or rev == '':
5528 if rev is None or rev == '':
5528 rev = node
5529 rev = node
5529
5530
5530 if date and rev is not None:
5531 if date and rev is not None:
5531 raise error.Abort(_("you can't specify a revision and a date"))
5532 raise error.Abort(_("you can't specify a revision and a date"))
5532
5533
5533 if len([x for x in (clean, check, merge) if x]) > 1:
5534 if len([x for x in (clean, check, merge) if x]) > 1:
5534 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5535 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5535 "or -m/--merge"))
5536 "or -m/--merge"))
5536
5537
5537 updatecheck = None
5538 updatecheck = None
5538 if check:
5539 if check:
5539 updatecheck = 'abort'
5540 updatecheck = 'abort'
5540 elif merge:
5541 elif merge:
5541 updatecheck = 'none'
5542 updatecheck = 'none'
5542
5543
5543 with repo.wlock():
5544 with repo.wlock():
5544 cmdutil.clearunfinished(repo)
5545 cmdutil.clearunfinished(repo)
5545
5546
5546 if date:
5547 if date:
5547 rev = cmdutil.finddate(ui, repo, date)
5548 rev = cmdutil.finddate(ui, repo, date)
5548
5549
5549 # if we defined a bookmark, we have to remember the original name
5550 # if we defined a bookmark, we have to remember the original name
5550 brev = rev
5551 brev = rev
5551 if rev:
5552 if rev:
5552 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5553 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5553 ctx = scmutil.revsingle(repo, rev, rev)
5554 ctx = scmutil.revsingle(repo, rev, rev)
5554 rev = ctx.rev()
5555 rev = ctx.rev()
5555 if ctx.hidden():
5556 if ctx.hidden():
5556 ctxstr = ctx.hex()[:12]
5557 ctxstr = ctx.hex()[:12]
5557 ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
5558 ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
5558
5559
5559 if ctx.obsolete():
5560 if ctx.obsolete():
5560 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5561 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5561 ui.warn("(%s)\n" % obsfatemsg)
5562 ui.warn("(%s)\n" % obsfatemsg)
5562
5563
5563 repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
5564 repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
5564
5565
5565 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5566 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5566 updatecheck=updatecheck)
5567 updatecheck=updatecheck)
5567
5568
5568 @command('verify', [])
5569 @command('verify', [])
5569 def verify(ui, repo):
5570 def verify(ui, repo):
5570 """verify the integrity of the repository
5571 """verify the integrity of the repository
5571
5572
5572 Verify the integrity of the current repository.
5573 Verify the integrity of the current repository.
5573
5574
5574 This will perform an extensive check of the repository's
5575 This will perform an extensive check of the repository's
5575 integrity, validating the hashes and checksums of each entry in
5576 integrity, validating the hashes and checksums of each entry in
5576 the changelog, manifest, and tracked files, as well as the
5577 the changelog, manifest, and tracked files, as well as the
5577 integrity of their crosslinks and indices.
5578 integrity of their crosslinks and indices.
5578
5579
5579 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5580 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5580 for more information about recovery from corruption of the
5581 for more information about recovery from corruption of the
5581 repository.
5582 repository.
5582
5583
5583 Returns 0 on success, 1 if errors are encountered.
5584 Returns 0 on success, 1 if errors are encountered.
5584 """
5585 """
5585 return hg.verify(repo)
5586 return hg.verify(repo)
5586
5587
5587 @command('version', [] + formatteropts, norepo=True, cmdtype=readonly)
5588 @command('version', [] + formatteropts, norepo=True, cmdtype=readonly)
5588 def version_(ui, **opts):
5589 def version_(ui, **opts):
5589 """output version and copyright information"""
5590 """output version and copyright information"""
5590 opts = pycompat.byteskwargs(opts)
5591 opts = pycompat.byteskwargs(opts)
5591 if ui.verbose:
5592 if ui.verbose:
5592 ui.pager('version')
5593 ui.pager('version')
5593 fm = ui.formatter("version", opts)
5594 fm = ui.formatter("version", opts)
5594 fm.startitem()
5595 fm.startitem()
5595 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5596 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5596 util.version())
5597 util.version())
5597 license = _(
5598 license = _(
5598 "(see https://mercurial-scm.org for more information)\n"
5599 "(see https://mercurial-scm.org for more information)\n"
5599 "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
5600 "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
5600 "This is free software; see the source for copying conditions. "
5601 "This is free software; see the source for copying conditions. "
5601 "There is NO\nwarranty; "
5602 "There is NO\nwarranty; "
5602 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5603 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5603 )
5604 )
5604 if not ui.quiet:
5605 if not ui.quiet:
5605 fm.plain(license)
5606 fm.plain(license)
5606
5607
5607 if ui.verbose:
5608 if ui.verbose:
5608 fm.plain(_("\nEnabled extensions:\n\n"))
5609 fm.plain(_("\nEnabled extensions:\n\n"))
5609 # format names and versions into columns
5610 # format names and versions into columns
5610 names = []
5611 names = []
5611 vers = []
5612 vers = []
5612 isinternals = []
5613 isinternals = []
5613 for name, module in extensions.extensions():
5614 for name, module in extensions.extensions():
5614 names.append(name)
5615 names.append(name)
5615 vers.append(extensions.moduleversion(module) or None)
5616 vers.append(extensions.moduleversion(module) or None)
5616 isinternals.append(extensions.ismoduleinternal(module))
5617 isinternals.append(extensions.ismoduleinternal(module))
5617 fn = fm.nested("extensions")
5618 fn = fm.nested("extensions")
5618 if names:
5619 if names:
5619 namefmt = " %%-%ds " % max(len(n) for n in names)
5620 namefmt = " %%-%ds " % max(len(n) for n in names)
5620 places = [_("external"), _("internal")]
5621 places = [_("external"), _("internal")]
5621 for n, v, p in zip(names, vers, isinternals):
5622 for n, v, p in zip(names, vers, isinternals):
5622 fn.startitem()
5623 fn.startitem()
5623 fn.condwrite(ui.verbose, "name", namefmt, n)
5624 fn.condwrite(ui.verbose, "name", namefmt, n)
5624 if ui.verbose:
5625 if ui.verbose:
5625 fn.plain("%s " % places[p])
5626 fn.plain("%s " % places[p])
5626 fn.data(bundled=p)
5627 fn.data(bundled=p)
5627 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5628 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5628 if ui.verbose:
5629 if ui.verbose:
5629 fn.plain("\n")
5630 fn.plain("\n")
5630 fn.end()
5631 fn.end()
5631 fm.end()
5632 fm.end()
5632
5633
5633 def loadcmdtable(ui, name, cmdtable):
5634 def loadcmdtable(ui, name, cmdtable):
5634 """Load command functions from specified cmdtable
5635 """Load command functions from specified cmdtable
5635 """
5636 """
5636 overrides = [cmd for cmd in cmdtable if cmd in table]
5637 overrides = [cmd for cmd in cmdtable if cmd in table]
5637 if overrides:
5638 if overrides:
5638 ui.warn(_("extension '%s' overrides commands: %s\n")
5639 ui.warn(_("extension '%s' overrides commands: %s\n")
5639 % (name, " ".join(overrides)))
5640 % (name, " ".join(overrides)))
5640 table.update(cmdtable)
5641 table.update(cmdtable)
@@ -1,1436 +1,1434 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 from .utils import (
44 from .utils import (
45 procutil,
45 procutil,
46 stringutil,
46 stringutil,
47 )
47 )
48
48
49 if pycompat.iswindows:
49 if pycompat.iswindows:
50 from . import scmwindows as scmplatform
50 from . import scmwindows as scmplatform
51 else:
51 else:
52 from . import scmposix as scmplatform
52 from . import scmposix as scmplatform
53
53
54 termsize = scmplatform.termsize
54 termsize = scmplatform.termsize
55
55
56 class status(tuple):
56 class status(tuple):
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 and 'ignored' properties are only relevant to the working copy.
58 and 'ignored' properties are only relevant to the working copy.
59 '''
59 '''
60
60
61 __slots__ = ()
61 __slots__ = ()
62
62
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 clean):
64 clean):
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 ignored, clean))
66 ignored, clean))
67
67
68 @property
68 @property
69 def modified(self):
69 def modified(self):
70 '''files that have been modified'''
70 '''files that have been modified'''
71 return self[0]
71 return self[0]
72
72
73 @property
73 @property
74 def added(self):
74 def added(self):
75 '''files that have been added'''
75 '''files that have been added'''
76 return self[1]
76 return self[1]
77
77
78 @property
78 @property
79 def removed(self):
79 def removed(self):
80 '''files that have been removed'''
80 '''files that have been removed'''
81 return self[2]
81 return self[2]
82
82
83 @property
83 @property
84 def deleted(self):
84 def deleted(self):
85 '''files that are in the dirstate, but have been deleted from the
85 '''files that are in the dirstate, but have been deleted from the
86 working copy (aka "missing")
86 working copy (aka "missing")
87 '''
87 '''
88 return self[3]
88 return self[3]
89
89
90 @property
90 @property
91 def unknown(self):
91 def unknown(self):
92 '''files not in the dirstate that are not ignored'''
92 '''files not in the dirstate that are not ignored'''
93 return self[4]
93 return self[4]
94
94
95 @property
95 @property
96 def ignored(self):
96 def ignored(self):
97 '''files not in the dirstate that are ignored (by _dirignore())'''
97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 return self[5]
98 return self[5]
99
99
100 @property
100 @property
101 def clean(self):
101 def clean(self):
102 '''files that have not been modified'''
102 '''files that have not been modified'''
103 return self[6]
103 return self[6]
104
104
105 def __repr__(self, *args, **kwargs):
105 def __repr__(self, *args, **kwargs):
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 'unknown=%r, ignored=%r, clean=%r>') % self)
107 'unknown=%r, ignored=%r, clean=%r>') % self)
108
108
109 def itersubrepos(ctx1, ctx2):
109 def itersubrepos(ctx1, ctx2):
110 """find subrepos in ctx1 or ctx2"""
110 """find subrepos in ctx1 or ctx2"""
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # has been modified (in ctx2) but not yet committed (in ctx1).
113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116
116
117 missing = set()
117 missing = set()
118
118
119 for subpath in ctx2.substate:
119 for subpath in ctx2.substate:
120 if subpath not in ctx1.substate:
120 if subpath not in ctx1.substate:
121 del subpaths[subpath]
121 del subpaths[subpath]
122 missing.add(subpath)
122 missing.add(subpath)
123
123
124 for subpath, ctx in sorted(subpaths.iteritems()):
124 for subpath, ctx in sorted(subpaths.iteritems()):
125 yield subpath, ctx.sub(subpath)
125 yield subpath, ctx.sub(subpath)
126
126
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # status and diff will have an accurate result when it does
128 # status and diff will have an accurate result when it does
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # against itself.
130 # against itself.
131 for subpath in missing:
131 for subpath in missing:
132 yield subpath, ctx2.nullsub(subpath, ctx1)
132 yield subpath, ctx2.nullsub(subpath, ctx1)
133
133
134 def nochangesfound(ui, repo, excluded=None):
134 def nochangesfound(ui, repo, excluded=None):
135 '''Report no changes for push/pull, excluded is None or a list of
135 '''Report no changes for push/pull, excluded is None or a list of
136 nodes excluded from the push/pull.
136 nodes excluded from the push/pull.
137 '''
137 '''
138 secretlist = []
138 secretlist = []
139 if excluded:
139 if excluded:
140 for n in excluded:
140 for n in excluded:
141 ctx = repo[n]
141 ctx = repo[n]
142 if ctx.phase() >= phases.secret and not ctx.extinct():
142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 secretlist.append(n)
143 secretlist.append(n)
144
144
145 if secretlist:
145 if secretlist:
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 % len(secretlist))
147 % len(secretlist))
148 else:
148 else:
149 ui.status(_("no changes found\n"))
149 ui.status(_("no changes found\n"))
150
150
151 def callcatch(ui, func):
151 def callcatch(ui, func):
152 """call func() with global exception handling
152 """call func() with global exception handling
153
153
154 return func() if no exception happens. otherwise do some error handling
154 return func() if no exception happens. otherwise do some error handling
155 and return an exit code accordingly. does not handle all exceptions.
155 and return an exit code accordingly. does not handle all exceptions.
156 """
156 """
157 try:
157 try:
158 try:
158 try:
159 return func()
159 return func()
160 except: # re-raises
160 except: # re-raises
161 ui.traceback()
161 ui.traceback()
162 raise
162 raise
163 # Global exception handling, alphabetically
163 # Global exception handling, alphabetically
164 # Mercurial-specific first, followed by built-in and library exceptions
164 # Mercurial-specific first, followed by built-in and library exceptions
165 except error.LockHeld as inst:
165 except error.LockHeld as inst:
166 if inst.errno == errno.ETIMEDOUT:
166 if inst.errno == errno.ETIMEDOUT:
167 reason = _('timed out waiting for lock held by %r') % inst.locker
167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 else:
168 else:
169 reason = _('lock held by %r') % inst.locker
169 reason = _('lock held by %r') % inst.locker
170 ui.warn(_("abort: %s: %s\n")
170 ui.warn(_("abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 if not inst.locker:
172 if not inst.locker:
173 ui.warn(_("(lock might be very busy)\n"))
173 ui.warn(_("(lock might be very busy)\n"))
174 except error.LockUnavailable as inst:
174 except error.LockUnavailable as inst:
175 ui.warn(_("abort: could not lock %s: %s\n") %
175 ui.warn(_("abort: could not lock %s: %s\n") %
176 (inst.desc or stringutil.forcebytestr(inst.filename),
176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 encoding.strtolocal(inst.strerror)))
177 encoding.strtolocal(inst.strerror)))
178 except error.OutOfBandError as inst:
178 except error.OutOfBandError as inst:
179 if inst.args:
179 if inst.args:
180 msg = _("abort: remote error:\n")
180 msg = _("abort: remote error:\n")
181 else:
181 else:
182 msg = _("abort: remote error\n")
182 msg = _("abort: remote error\n")
183 ui.warn(msg)
183 ui.warn(msg)
184 if inst.args:
184 if inst.args:
185 ui.warn(''.join(inst.args))
185 ui.warn(''.join(inst.args))
186 if inst.hint:
186 if inst.hint:
187 ui.warn('(%s)\n' % inst.hint)
187 ui.warn('(%s)\n' % inst.hint)
188 except error.RepoError as inst:
188 except error.RepoError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
189 ui.warn(_("abort: %s!\n") % inst)
190 if inst.hint:
190 if inst.hint:
191 ui.warn(_("(%s)\n") % inst.hint)
191 ui.warn(_("(%s)\n") % inst.hint)
192 except error.ResponseError as inst:
192 except error.ResponseError as inst:
193 ui.warn(_("abort: %s") % inst.args[0])
193 ui.warn(_("abort: %s") % inst.args[0])
194 msg = inst.args[1]
194 msg = inst.args[1]
195 if isinstance(msg, type(u'')):
195 if isinstance(msg, type(u'')):
196 msg = pycompat.sysbytes(msg)
196 msg = pycompat.sysbytes(msg)
197 if not isinstance(msg, bytes):
197 if not isinstance(msg, bytes):
198 ui.warn(" %r\n" % (msg,))
198 ui.warn(" %r\n" % (msg,))
199 elif not msg:
199 elif not msg:
200 ui.warn(_(" empty string\n"))
200 ui.warn(_(" empty string\n"))
201 else:
201 else:
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 except error.CensoredNodeError as inst:
203 except error.CensoredNodeError as inst:
204 ui.warn(_("abort: file censored %s!\n") % inst)
204 ui.warn(_("abort: file censored %s!\n") % inst)
205 except error.RevlogError as inst:
205 except error.RevlogError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
206 ui.warn(_("abort: %s!\n") % inst)
207 except error.InterventionRequired as inst:
207 except error.InterventionRequired as inst:
208 ui.warn("%s\n" % inst)
208 ui.warn("%s\n" % inst)
209 if inst.hint:
209 if inst.hint:
210 ui.warn(_("(%s)\n") % inst.hint)
210 ui.warn(_("(%s)\n") % inst.hint)
211 return 1
211 return 1
212 except error.WdirUnsupported:
212 except error.WdirUnsupported:
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 except error.Abort as inst:
214 except error.Abort as inst:
215 ui.warn(_("abort: %s\n") % inst)
215 ui.warn(_("abort: %s\n") % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.warn(_("(%s)\n") % inst.hint)
217 ui.warn(_("(%s)\n") % inst.hint)
218 except ImportError as inst:
218 except ImportError as inst:
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 m = stringutil.forcebytestr(inst).split()[-1]
220 m = stringutil.forcebytestr(inst).split()[-1]
221 if m in "mpatch bdiff".split():
221 if m in "mpatch bdiff".split():
222 ui.warn(_("(did you forget to compile extensions?)\n"))
222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 elif m in "zlib".split():
223 elif m in "zlib".split():
224 ui.warn(_("(is your Python install correct?)\n"))
224 ui.warn(_("(is your Python install correct?)\n"))
225 except IOError as inst:
225 except IOError as inst:
226 if util.safehasattr(inst, "code"):
226 if util.safehasattr(inst, "code"):
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 elif util.safehasattr(inst, "reason"):
228 elif util.safehasattr(inst, "reason"):
229 try: # usually it is in the form (errno, strerror)
229 try: # usually it is in the form (errno, strerror)
230 reason = inst.reason.args[1]
230 reason = inst.reason.args[1]
231 except (AttributeError, IndexError):
231 except (AttributeError, IndexError):
232 # it might be anything, for example a string
232 # it might be anything, for example a string
233 reason = inst.reason
233 reason = inst.reason
234 if isinstance(reason, unicode):
234 if isinstance(reason, unicode):
235 # SSLError of Python 2.7.9 contains a unicode
235 # SSLError of Python 2.7.9 contains a unicode
236 reason = encoding.unitolocal(reason)
236 reason = encoding.unitolocal(reason)
237 ui.warn(_("abort: error: %s\n") % reason)
237 ui.warn(_("abort: error: %s\n") % reason)
238 elif (util.safehasattr(inst, "args")
238 elif (util.safehasattr(inst, "args")
239 and inst.args and inst.args[0] == errno.EPIPE):
239 and inst.args and inst.args[0] == errno.EPIPE):
240 pass
240 pass
241 elif getattr(inst, "strerror", None):
241 elif getattr(inst, "strerror", None):
242 if getattr(inst, "filename", None):
242 if getattr(inst, "filename", None):
243 ui.warn(_("abort: %s: %s\n") % (
243 ui.warn(_("abort: %s: %s\n") % (
244 encoding.strtolocal(inst.strerror),
244 encoding.strtolocal(inst.strerror),
245 stringutil.forcebytestr(inst.filename)))
245 stringutil.forcebytestr(inst.filename)))
246 else:
246 else:
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 else:
248 else:
249 raise
249 raise
250 except OSError as inst:
250 except OSError as inst:
251 if getattr(inst, "filename", None) is not None:
251 if getattr(inst, "filename", None) is not None:
252 ui.warn(_("abort: %s: '%s'\n") % (
252 ui.warn(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 except MemoryError:
257 except MemoryError:
258 ui.warn(_("abort: out of memory\n"))
258 ui.warn(_("abort: out of memory\n"))
259 except SystemExit as inst:
259 except SystemExit as inst:
260 # Commands shouldn't sys.exit directly, but give a return code.
260 # Commands shouldn't sys.exit directly, but give a return code.
261 # Just in case catch this and and pass exit code to caller.
261 # Just in case catch this and and pass exit code to caller.
262 return inst.code
262 return inst.code
263 except socket.error as inst:
263 except socket.error as inst:
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265
265
266 return -1
266 return -1
267
267
268 def checknewlabel(repo, lbl, kind):
268 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
269 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
270 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
271 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
273 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
274 if c in lbl:
275 raise error.Abort(
275 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
277 try:
278 int(lbl)
278 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
279 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
280 except ValueError:
281 pass
281 pass
282 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
284
285 def checkfilename(f):
285 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
287 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289
289
290 def checkportable(ui, f):
290 def checkportable(ui, f):
291 '''Check if filename f is portable and warn or abort depending on config'''
291 '''Check if filename f is portable and warn or abort depending on config'''
292 checkfilename(f)
292 checkfilename(f)
293 abort, warn = checkportabilityalert(ui)
293 abort, warn = checkportabilityalert(ui)
294 if abort or warn:
294 if abort or warn:
295 msg = util.checkwinfilename(f)
295 msg = util.checkwinfilename(f)
296 if msg:
296 if msg:
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 if abort:
298 if abort:
299 raise error.Abort(msg)
299 raise error.Abort(msg)
300 ui.warn(_("warning: %s\n") % msg)
300 ui.warn(_("warning: %s\n") % msg)
301
301
302 def checkportabilityalert(ui):
302 def checkportabilityalert(ui):
303 '''check if the user's config requests nothing, a warning, or abort for
303 '''check if the user's config requests nothing, a warning, or abort for
304 non-portable filenames'''
304 non-portable filenames'''
305 val = ui.config('ui', 'portablefilenames')
305 val = ui.config('ui', 'portablefilenames')
306 lval = val.lower()
306 lval = val.lower()
307 bval = stringutil.parsebool(val)
307 bval = stringutil.parsebool(val)
308 abort = pycompat.iswindows or lval == 'abort'
308 abort = pycompat.iswindows or lval == 'abort'
309 warn = bval or lval == 'warn'
309 warn = bval or lval == 'warn'
310 if bval is None and not (warn or abort or lval == 'ignore'):
310 if bval is None and not (warn or abort or lval == 'ignore'):
311 raise error.ConfigError(
311 raise error.ConfigError(
312 _("ui.portablefilenames value is invalid ('%s')") % val)
312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 return abort, warn
313 return abort, warn
314
314
315 class casecollisionauditor(object):
315 class casecollisionauditor(object):
316 def __init__(self, ui, abort, dirstate):
316 def __init__(self, ui, abort, dirstate):
317 self._ui = ui
317 self._ui = ui
318 self._abort = abort
318 self._abort = abort
319 allfiles = '\0'.join(dirstate._map)
319 allfiles = '\0'.join(dirstate._map)
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._dirstate = dirstate
321 self._dirstate = dirstate
322 # The purpose of _newfiles is so that we don't complain about
322 # The purpose of _newfiles is so that we don't complain about
323 # case collisions if someone were to call this object with the
323 # case collisions if someone were to call this object with the
324 # same filename twice.
324 # same filename twice.
325 self._newfiles = set()
325 self._newfiles = set()
326
326
327 def __call__(self, f):
327 def __call__(self, f):
328 if f in self._newfiles:
328 if f in self._newfiles:
329 return
329 return
330 fl = encoding.lower(f)
330 fl = encoding.lower(f)
331 if fl in self._loweredfiles and f not in self._dirstate:
331 if fl in self._loweredfiles and f not in self._dirstate:
332 msg = _('possible case-folding collision for %s') % f
332 msg = _('possible case-folding collision for %s') % f
333 if self._abort:
333 if self._abort:
334 raise error.Abort(msg)
334 raise error.Abort(msg)
335 self._ui.warn(_("warning: %s\n") % msg)
335 self._ui.warn(_("warning: %s\n") % msg)
336 self._loweredfiles.add(fl)
336 self._loweredfiles.add(fl)
337 self._newfiles.add(f)
337 self._newfiles.add(f)
338
338
339 def filteredhash(repo, maxrev):
339 def filteredhash(repo, maxrev):
340 """build hash of filtered revisions in the current repoview.
340 """build hash of filtered revisions in the current repoview.
341
341
342 Multiple caches perform up-to-date validation by checking that the
342 Multiple caches perform up-to-date validation by checking that the
343 tiprev and tipnode stored in the cache file match the current repository.
343 tiprev and tipnode stored in the cache file match the current repository.
344 However, this is not sufficient for validating repoviews because the set
344 However, this is not sufficient for validating repoviews because the set
345 of revisions in the view may change without the repository tiprev and
345 of revisions in the view may change without the repository tiprev and
346 tipnode changing.
346 tipnode changing.
347
347
348 This function hashes all the revs filtered from the view and returns
348 This function hashes all the revs filtered from the view and returns
349 that SHA-1 digest.
349 that SHA-1 digest.
350 """
350 """
351 cl = repo.changelog
351 cl = repo.changelog
352 if not cl.filteredrevs:
352 if not cl.filteredrevs:
353 return None
353 return None
354 key = None
354 key = None
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 if revs:
356 if revs:
357 s = hashlib.sha1()
357 s = hashlib.sha1()
358 for rev in revs:
358 for rev in revs:
359 s.update('%d;' % rev)
359 s.update('%d;' % rev)
360 key = s.digest()
360 key = s.digest()
361 return key
361 return key
362
362
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 '''yield every hg repository under path, always recursively.
364 '''yield every hg repository under path, always recursively.
365 The recurse flag will only control recursion into repo working dirs'''
365 The recurse flag will only control recursion into repo working dirs'''
366 def errhandler(err):
366 def errhandler(err):
367 if err.filename == path:
367 if err.filename == path:
368 raise err
368 raise err
369 samestat = getattr(os.path, 'samestat', None)
369 samestat = getattr(os.path, 'samestat', None)
370 if followsym and samestat is not None:
370 if followsym and samestat is not None:
371 def adddir(dirlst, dirname):
371 def adddir(dirlst, dirname):
372 dirstat = os.stat(dirname)
372 dirstat = os.stat(dirname)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 if not match:
374 if not match:
375 dirlst.append(dirstat)
375 dirlst.append(dirstat)
376 return not match
376 return not match
377 else:
377 else:
378 followsym = False
378 followsym = False
379
379
380 if (seen_dirs is None) and followsym:
380 if (seen_dirs is None) and followsym:
381 seen_dirs = []
381 seen_dirs = []
382 adddir(seen_dirs, path)
382 adddir(seen_dirs, path)
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 dirs.sort()
384 dirs.sort()
385 if '.hg' in dirs:
385 if '.hg' in dirs:
386 yield root # found a repository
386 yield root # found a repository
387 qroot = os.path.join(root, '.hg', 'patches')
387 qroot = os.path.join(root, '.hg', 'patches')
388 if os.path.isdir(os.path.join(qroot, '.hg')):
388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 yield qroot # we have a patch queue repo here
389 yield qroot # we have a patch queue repo here
390 if recurse:
390 if recurse:
391 # avoid recursing inside the .hg directory
391 # avoid recursing inside the .hg directory
392 dirs.remove('.hg')
392 dirs.remove('.hg')
393 else:
393 else:
394 dirs[:] = [] # don't descend further
394 dirs[:] = [] # don't descend further
395 elif followsym:
395 elif followsym:
396 newdirs = []
396 newdirs = []
397 for d in dirs:
397 for d in dirs:
398 fname = os.path.join(root, d)
398 fname = os.path.join(root, d)
399 if adddir(seen_dirs, fname):
399 if adddir(seen_dirs, fname):
400 if os.path.islink(fname):
400 if os.path.islink(fname):
401 for hgname in walkrepos(fname, True, seen_dirs):
401 for hgname in walkrepos(fname, True, seen_dirs):
402 yield hgname
402 yield hgname
403 else:
403 else:
404 newdirs.append(d)
404 newdirs.append(d)
405 dirs[:] = newdirs
405 dirs[:] = newdirs
406
406
407 def binnode(ctx):
407 def binnode(ctx):
408 """Return binary node id for a given basectx"""
408 """Return binary node id for a given basectx"""
409 node = ctx.node()
409 node = ctx.node()
410 if node is None:
410 if node is None:
411 return wdirid
411 return wdirid
412 return node
412 return node
413
413
414 def intrev(ctx):
414 def intrev(ctx):
415 """Return integer for a given basectx that can be used in comparison or
415 """Return integer for a given basectx that can be used in comparison or
416 arithmetic operation"""
416 arithmetic operation"""
417 rev = ctx.rev()
417 rev = ctx.rev()
418 if rev is None:
418 if rev is None:
419 return wdirrev
419 return wdirrev
420 return rev
420 return rev
421
421
422 def formatchangeid(ctx):
422 def formatchangeid(ctx):
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 template provided by logcmdutil.changesettemplater"""
424 template provided by logcmdutil.changesettemplater"""
425 repo = ctx.repo()
425 repo = ctx.repo()
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427
427
428 def formatrevnode(ui, rev, node):
428 def formatrevnode(ui, rev, node):
429 """Format given revision and node depending on the current verbosity"""
429 """Format given revision and node depending on the current verbosity"""
430 if ui.debugflag:
430 if ui.debugflag:
431 hexfunc = hex
431 hexfunc = hex
432 else:
432 else:
433 hexfunc = short
433 hexfunc = short
434 return '%d:%s' % (rev, hexfunc(node))
434 return '%d:%s' % (rev, hexfunc(node))
435
435
436 def revsingle(repo, revspec, default='.', localalias=None):
436 def revsingle(repo, revspec, default='.', localalias=None):
437 if not revspec and revspec != 0:
437 if not revspec and revspec != 0:
438 return repo[default]
438 return repo[default]
439
439
440 l = revrange(repo, [revspec], localalias=localalias)
440 l = revrange(repo, [revspec], localalias=localalias)
441 if not l:
441 if not l:
442 raise error.Abort(_('empty revision set'))
442 raise error.Abort(_('empty revision set'))
443 return repo[l.last()]
443 return repo[l.last()]
444
444
445 def _pairspec(revspec):
445 def _pairspec(revspec):
446 tree = revsetlang.parse(revspec)
446 tree = revsetlang.parse(revspec)
447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
448
448
449 def revpairnodes(repo, revs):
449 def revpairnodes(repo, revs):
450 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
450 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
451 ctx1, ctx2 = revpair(repo, revs)
451 ctx1, ctx2 = revpair(repo, revs)
452 return ctx1.node(), ctx2.node()
452 return ctx1.node(), ctx2.node()
453
453
454 def revpair(repo, revs):
454 def revpair(repo, revs):
455 if not revs:
455 if not revs:
456 return repo['.'], repo[None]
456 return repo['.'], repo[None]
457
457
458 l = revrange(repo, revs)
458 l = revrange(repo, revs)
459
459
460 if not l:
460 if not l:
461 first = second = None
461 first = second = None
462 elif l.isascending():
462 elif l.isascending():
463 first = l.min()
463 first = l.min()
464 second = l.max()
464 second = l.max()
465 elif l.isdescending():
465 elif l.isdescending():
466 first = l.max()
466 first = l.max()
467 second = l.min()
467 second = l.min()
468 else:
468 else:
469 first = l.first()
469 first = l.first()
470 second = l.last()
470 second = l.last()
471
471
472 if first is None:
472 if first is None:
473 raise error.Abort(_('empty revision range'))
473 raise error.Abort(_('empty revision range'))
474 if (first == second and len(revs) >= 2
474 if (first == second and len(revs) >= 2
475 and not all(revrange(repo, [r]) for r in revs)):
475 and not all(revrange(repo, [r]) for r in revs)):
476 raise error.Abort(_('empty revision on one side of range'))
476 raise error.Abort(_('empty revision on one side of range'))
477
477
478 # if top-level is range expression, the result must always be a pair
478 # if top-level is range expression, the result must always be a pair
479 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
479 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
480 return repo[first], repo[None]
480 return repo[first], repo[None]
481
481
482 return repo[first], repo[second]
482 return repo[first], repo[second]
483
483
484 def revrange(repo, specs, localalias=None):
484 def revrange(repo, specs, localalias=None):
485 """Execute 1 to many revsets and return the union.
485 """Execute 1 to many revsets and return the union.
486
486
487 This is the preferred mechanism for executing revsets using user-specified
487 This is the preferred mechanism for executing revsets using user-specified
488 config options, such as revset aliases.
488 config options, such as revset aliases.
489
489
490 The revsets specified by ``specs`` will be executed via a chained ``OR``
490 The revsets specified by ``specs`` will be executed via a chained ``OR``
491 expression. If ``specs`` is empty, an empty result is returned.
491 expression. If ``specs`` is empty, an empty result is returned.
492
492
493 ``specs`` can contain integers, in which case they are assumed to be
493 ``specs`` can contain integers, in which case they are assumed to be
494 revision numbers.
494 revision numbers.
495
495
496 It is assumed the revsets are already formatted. If you have arguments
496 It is assumed the revsets are already formatted. If you have arguments
497 that need to be expanded in the revset, call ``revsetlang.formatspec()``
497 that need to be expanded in the revset, call ``revsetlang.formatspec()``
498 and pass the result as an element of ``specs``.
498 and pass the result as an element of ``specs``.
499
499
500 Specifying a single revset is allowed.
500 Specifying a single revset is allowed.
501
501
502 Returns a ``revset.abstractsmartset`` which is a list-like interface over
502 Returns a ``revset.abstractsmartset`` which is a list-like interface over
503 integer revisions.
503 integer revisions.
504 """
504 """
505 allspecs = []
505 allspecs = []
506 for spec in specs:
506 for spec in specs:
507 if isinstance(spec, int):
507 if isinstance(spec, int):
508 spec = revsetlang.formatspec('rev(%d)', spec)
508 spec = revsetlang.formatspec('rev(%d)', spec)
509 allspecs.append(spec)
509 allspecs.append(spec)
510 return repo.anyrevs(allspecs, user=True, localalias=localalias)
510 return repo.anyrevs(allspecs, user=True, localalias=localalias)
511
511
512 def meaningfulparents(repo, ctx):
512 def meaningfulparents(repo, ctx):
513 """Return list of meaningful (or all if debug) parentrevs for rev.
513 """Return list of meaningful (or all if debug) parentrevs for rev.
514
514
515 For merges (two non-nullrev revisions) both parents are meaningful.
515 For merges (two non-nullrev revisions) both parents are meaningful.
516 Otherwise the first parent revision is considered meaningful if it
516 Otherwise the first parent revision is considered meaningful if it
517 is not the preceding revision.
517 is not the preceding revision.
518 """
518 """
519 parents = ctx.parents()
519 parents = ctx.parents()
520 if len(parents) > 1:
520 if len(parents) > 1:
521 return parents
521 return parents
522 if repo.ui.debugflag:
522 if repo.ui.debugflag:
523 return [parents[0], repo['null']]
523 return [parents[0], repo['null']]
524 if parents[0].rev() >= intrev(ctx) - 1:
524 if parents[0].rev() >= intrev(ctx) - 1:
525 return []
525 return []
526 return parents
526 return parents
527
527
528 def expandpats(pats):
528 def expandpats(pats):
529 '''Expand bare globs when running on windows.
529 '''Expand bare globs when running on windows.
530 On posix we assume it already has already been done by sh.'''
530 On posix we assume it already has already been done by sh.'''
531 if not util.expandglobs:
531 if not util.expandglobs:
532 return list(pats)
532 return list(pats)
533 ret = []
533 ret = []
534 for kindpat in pats:
534 for kindpat in pats:
535 kind, pat = matchmod._patsplit(kindpat, None)
535 kind, pat = matchmod._patsplit(kindpat, None)
536 if kind is None:
536 if kind is None:
537 try:
537 try:
538 globbed = glob.glob(pat)
538 globbed = glob.glob(pat)
539 except re.error:
539 except re.error:
540 globbed = [pat]
540 globbed = [pat]
541 if globbed:
541 if globbed:
542 ret.extend(globbed)
542 ret.extend(globbed)
543 continue
543 continue
544 ret.append(kindpat)
544 ret.append(kindpat)
545 return ret
545 return ret
546
546
547 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
547 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
548 badfn=None):
548 badfn=None):
549 '''Return a matcher and the patterns that were used.
549 '''Return a matcher and the patterns that were used.
550 The matcher will warn about bad matches, unless an alternate badfn callback
550 The matcher will warn about bad matches, unless an alternate badfn callback
551 is provided.'''
551 is provided.'''
552 if pats == ("",):
552 if pats == ("",):
553 pats = []
553 pats = []
554 if opts is None:
554 if opts is None:
555 opts = {}
555 opts = {}
556 if not globbed and default == 'relpath':
556 if not globbed and default == 'relpath':
557 pats = expandpats(pats or [])
557 pats = expandpats(pats or [])
558
558
559 def bad(f, msg):
559 def bad(f, msg):
560 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
560 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
561
561
562 if badfn is None:
562 if badfn is None:
563 badfn = bad
563 badfn = bad
564
564
565 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
565 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
566 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
566 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
567
567
568 if m.always():
568 if m.always():
569 pats = []
569 pats = []
570 return m, pats
570 return m, pats
571
571
572 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
572 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
573 badfn=None):
573 badfn=None):
574 '''Return a matcher that will warn about bad matches.'''
574 '''Return a matcher that will warn about bad matches.'''
575 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
575 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
576
576
577 def matchall(repo):
577 def matchall(repo):
578 '''Return a matcher that will efficiently match everything.'''
578 '''Return a matcher that will efficiently match everything.'''
579 return matchmod.always(repo.root, repo.getcwd())
579 return matchmod.always(repo.root, repo.getcwd())
580
580
581 def matchfiles(repo, files, badfn=None):
581 def matchfiles(repo, files, badfn=None):
582 '''Return a matcher that will efficiently match exactly these files.'''
582 '''Return a matcher that will efficiently match exactly these files.'''
583 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
583 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
584
584
585 def parsefollowlinespattern(repo, rev, pat, msg):
585 def parsefollowlinespattern(repo, rev, pat, msg):
586 """Return a file name from `pat` pattern suitable for usage in followlines
586 """Return a file name from `pat` pattern suitable for usage in followlines
587 logic.
587 logic.
588 """
588 """
589 if not matchmod.patkind(pat):
589 if not matchmod.patkind(pat):
590 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
590 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
591 else:
591 else:
592 ctx = repo[rev]
592 ctx = repo[rev]
593 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
593 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
594 files = [f for f in ctx if m(f)]
594 files = [f for f in ctx if m(f)]
595 if len(files) != 1:
595 if len(files) != 1:
596 raise error.ParseError(msg)
596 raise error.ParseError(msg)
597 return files[0]
597 return files[0]
598
598
599 def origpath(ui, repo, filepath):
599 def origpath(ui, repo, filepath):
600 '''customize where .orig files are created
600 '''customize where .orig files are created
601
601
602 Fetch user defined path from config file: [ui] origbackuppath = <path>
602 Fetch user defined path from config file: [ui] origbackuppath = <path>
603 Fall back to default (filepath with .orig suffix) if not specified
603 Fall back to default (filepath with .orig suffix) if not specified
604 '''
604 '''
605 origbackuppath = ui.config('ui', 'origbackuppath')
605 origbackuppath = ui.config('ui', 'origbackuppath')
606 if not origbackuppath:
606 if not origbackuppath:
607 return filepath + ".orig"
607 return filepath + ".orig"
608
608
609 # Convert filepath from an absolute path into a path inside the repo.
609 # Convert filepath from an absolute path into a path inside the repo.
610 filepathfromroot = util.normpath(os.path.relpath(filepath,
610 filepathfromroot = util.normpath(os.path.relpath(filepath,
611 start=repo.root))
611 start=repo.root))
612
612
613 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
613 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
614 origbackupdir = origvfs.dirname(filepathfromroot)
614 origbackupdir = origvfs.dirname(filepathfromroot)
615 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
615 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
616 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
616 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
617
617
618 # Remove any files that conflict with the backup file's path
618 # Remove any files that conflict with the backup file's path
619 for f in reversed(list(util.finddirs(filepathfromroot))):
619 for f in reversed(list(util.finddirs(filepathfromroot))):
620 if origvfs.isfileorlink(f):
620 if origvfs.isfileorlink(f):
621 ui.note(_('removing conflicting file: %s\n')
621 ui.note(_('removing conflicting file: %s\n')
622 % origvfs.join(f))
622 % origvfs.join(f))
623 origvfs.unlink(f)
623 origvfs.unlink(f)
624 break
624 break
625
625
626 origvfs.makedirs(origbackupdir)
626 origvfs.makedirs(origbackupdir)
627
627
628 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
628 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
629 ui.note(_('removing conflicting directory: %s\n')
629 ui.note(_('removing conflicting directory: %s\n')
630 % origvfs.join(filepathfromroot))
630 % origvfs.join(filepathfromroot))
631 origvfs.rmtree(filepathfromroot, forcibly=True)
631 origvfs.rmtree(filepathfromroot, forcibly=True)
632
632
633 return origvfs.join(filepathfromroot)
633 return origvfs.join(filepathfromroot)
634
634
635 class _containsnode(object):
635 class _containsnode(object):
636 """proxy __contains__(node) to container.__contains__ which accepts revs"""
636 """proxy __contains__(node) to container.__contains__ which accepts revs"""
637
637
638 def __init__(self, repo, revcontainer):
638 def __init__(self, repo, revcontainer):
639 self._torev = repo.changelog.rev
639 self._torev = repo.changelog.rev
640 self._revcontains = revcontainer.__contains__
640 self._revcontains = revcontainer.__contains__
641
641
642 def __contains__(self, node):
642 def __contains__(self, node):
643 return self._revcontains(self._torev(node))
643 return self._revcontains(self._torev(node))
644
644
645 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
645 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
646 """do common cleanups when old nodes are replaced by new nodes
646 """do common cleanups when old nodes are replaced by new nodes
647
647
648 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
648 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
649 (we might also want to move working directory parent in the future)
649 (we might also want to move working directory parent in the future)
650
650
651 By default, bookmark moves are calculated automatically from 'replacements',
651 By default, bookmark moves are calculated automatically from 'replacements',
652 but 'moves' can be used to override that. Also, 'moves' may include
652 but 'moves' can be used to override that. Also, 'moves' may include
653 additional bookmark moves that should not have associated obsmarkers.
653 additional bookmark moves that should not have associated obsmarkers.
654
654
655 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
655 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
656 have replacements. operation is a string, like "rebase".
656 have replacements. operation is a string, like "rebase".
657
657
658 metadata is dictionary containing metadata to be stored in obsmarker if
658 metadata is dictionary containing metadata to be stored in obsmarker if
659 obsolescence is enabled.
659 obsolescence is enabled.
660 """
660 """
661 if not replacements and not moves:
661 if not replacements and not moves:
662 return
662 return
663
663
664 # translate mapping's other forms
664 # translate mapping's other forms
665 if not util.safehasattr(replacements, 'items'):
665 if not util.safehasattr(replacements, 'items'):
666 replacements = {n: () for n in replacements}
666 replacements = {n: () for n in replacements}
667
667
668 # Calculate bookmark movements
668 # Calculate bookmark movements
669 if moves is None:
669 if moves is None:
670 moves = {}
670 moves = {}
671 # Unfiltered repo is needed since nodes in replacements might be hidden.
671 # Unfiltered repo is needed since nodes in replacements might be hidden.
672 unfi = repo.unfiltered()
672 unfi = repo.unfiltered()
673 for oldnode, newnodes in replacements.items():
673 for oldnode, newnodes in replacements.items():
674 if oldnode in moves:
674 if oldnode in moves:
675 continue
675 continue
676 if len(newnodes) > 1:
676 if len(newnodes) > 1:
677 # usually a split, take the one with biggest rev number
677 # usually a split, take the one with biggest rev number
678 newnode = next(unfi.set('max(%ln)', newnodes)).node()
678 newnode = next(unfi.set('max(%ln)', newnodes)).node()
679 elif len(newnodes) == 0:
679 elif len(newnodes) == 0:
680 # move bookmark backwards
680 # move bookmark backwards
681 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
681 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
682 list(replacements)))
682 list(replacements)))
683 if roots:
683 if roots:
684 newnode = roots[0].node()
684 newnode = roots[0].node()
685 else:
685 else:
686 newnode = nullid
686 newnode = nullid
687 else:
687 else:
688 newnode = newnodes[0]
688 newnode = newnodes[0]
689 moves[oldnode] = newnode
689 moves[oldnode] = newnode
690
690
691 with repo.transaction('cleanup') as tr:
691 with repo.transaction('cleanup') as tr:
692 # Move bookmarks
692 # Move bookmarks
693 bmarks = repo._bookmarks
693 bmarks = repo._bookmarks
694 bmarkchanges = []
694 bmarkchanges = []
695 allnewnodes = [n for ns in replacements.values() for n in ns]
695 allnewnodes = [n for ns in replacements.values() for n in ns]
696 for oldnode, newnode in moves.items():
696 for oldnode, newnode in moves.items():
697 oldbmarks = repo.nodebookmarks(oldnode)
697 oldbmarks = repo.nodebookmarks(oldnode)
698 if not oldbmarks:
698 if not oldbmarks:
699 continue
699 continue
700 from . import bookmarks # avoid import cycle
700 from . import bookmarks # avoid import cycle
701 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
701 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
702 (util.rapply(pycompat.maybebytestr, oldbmarks),
702 (util.rapply(pycompat.maybebytestr, oldbmarks),
703 hex(oldnode), hex(newnode)))
703 hex(oldnode), hex(newnode)))
704 # Delete divergent bookmarks being parents of related newnodes
704 # Delete divergent bookmarks being parents of related newnodes
705 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
705 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
706 allnewnodes, newnode, oldnode)
706 allnewnodes, newnode, oldnode)
707 deletenodes = _containsnode(repo, deleterevs)
707 deletenodes = _containsnode(repo, deleterevs)
708 for name in oldbmarks:
708 for name in oldbmarks:
709 bmarkchanges.append((name, newnode))
709 bmarkchanges.append((name, newnode))
710 for b in bookmarks.divergent2delete(repo, deletenodes, name):
710 for b in bookmarks.divergent2delete(repo, deletenodes, name):
711 bmarkchanges.append((b, None))
711 bmarkchanges.append((b, None))
712
712
713 if bmarkchanges:
713 if bmarkchanges:
714 bmarks.applychanges(repo, tr, bmarkchanges)
714 bmarks.applychanges(repo, tr, bmarkchanges)
715
715
716 # Obsolete or strip nodes
716 # Obsolete or strip nodes
717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
718 # If a node is already obsoleted, and we want to obsolete it
718 # If a node is already obsoleted, and we want to obsolete it
719 # without a successor, skip that obssolete request since it's
719 # without a successor, skip that obssolete request since it's
720 # unnecessary. That's the "if s or not isobs(n)" check below.
720 # unnecessary. That's the "if s or not isobs(n)" check below.
721 # Also sort the node in topology order, that might be useful for
721 # Also sort the node in topology order, that might be useful for
722 # some obsstore logic.
722 # some obsstore logic.
723 # NOTE: the filtering and sorting might belong to createmarkers.
723 # NOTE: the filtering and sorting might belong to createmarkers.
724 isobs = unfi.obsstore.successors.__contains__
724 isobs = unfi.obsstore.successors.__contains__
725 torev = unfi.changelog.rev
725 torev = unfi.changelog.rev
726 sortfunc = lambda ns: torev(ns[0])
726 sortfunc = lambda ns: torev(ns[0])
727 rels = [(unfi[n], tuple(unfi[m] for m in s))
727 rels = [(unfi[n], tuple(unfi[m] for m in s))
728 for n, s in sorted(replacements.items(), key=sortfunc)
728 for n, s in sorted(replacements.items(), key=sortfunc)
729 if s or not isobs(n)]
729 if s or not isobs(n)]
730 if rels:
730 if rels:
731 obsolete.createmarkers(repo, rels, operation=operation,
731 obsolete.createmarkers(repo, rels, operation=operation,
732 metadata=metadata)
732 metadata=metadata)
733 else:
733 else:
734 from . import repair # avoid import cycle
734 from . import repair # avoid import cycle
735 tostrip = list(replacements)
735 tostrip = list(replacements)
736 if tostrip:
736 if tostrip:
737 repair.delayedstrip(repo.ui, repo, tostrip, operation)
737 repair.delayedstrip(repo.ui, repo, tostrip, operation)
738
738
739 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
739 def addremove(repo, matcher, prefix, opts=None):
740 if opts is None:
740 if opts is None:
741 opts = {}
741 opts = {}
742 m = matcher
742 m = matcher
743 if dry_run is None:
744 dry_run = opts.get('dry_run')
743 dry_run = opts.get('dry_run')
745 if similarity is None:
746 similarity = float(opts.get('similarity') or 0)
744 similarity = float(opts.get('similarity') or 0)
747
745
748 ret = 0
746 ret = 0
749 join = lambda f: os.path.join(prefix, f)
747 join = lambda f: os.path.join(prefix, f)
750
748
751 wctx = repo[None]
749 wctx = repo[None]
752 for subpath in sorted(wctx.substate):
750 for subpath in sorted(wctx.substate):
753 submatch = matchmod.subdirmatcher(subpath, m)
751 submatch = matchmod.subdirmatcher(subpath, m)
754 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
752 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
755 sub = wctx.sub(subpath)
753 sub = wctx.sub(subpath)
756 try:
754 try:
757 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
755 if sub.addremove(submatch, prefix, opts):
758 ret = 1
756 ret = 1
759 except error.LookupError:
757 except error.LookupError:
760 repo.ui.status(_("skipping missing subrepository: %s\n")
758 repo.ui.status(_("skipping missing subrepository: %s\n")
761 % join(subpath))
759 % join(subpath))
762
760
763 rejected = []
761 rejected = []
764 def badfn(f, msg):
762 def badfn(f, msg):
765 if f in m.files():
763 if f in m.files():
766 m.bad(f, msg)
764 m.bad(f, msg)
767 rejected.append(f)
765 rejected.append(f)
768
766
769 badmatch = matchmod.badmatch(m, badfn)
767 badmatch = matchmod.badmatch(m, badfn)
770 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
768 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
771 badmatch)
769 badmatch)
772
770
773 unknownset = set(unknown + forgotten)
771 unknownset = set(unknown + forgotten)
774 toprint = unknownset.copy()
772 toprint = unknownset.copy()
775 toprint.update(deleted)
773 toprint.update(deleted)
776 for abs in sorted(toprint):
774 for abs in sorted(toprint):
777 if repo.ui.verbose or not m.exact(abs):
775 if repo.ui.verbose or not m.exact(abs):
778 if abs in unknownset:
776 if abs in unknownset:
779 status = _('adding %s\n') % m.uipath(abs)
777 status = _('adding %s\n') % m.uipath(abs)
780 else:
778 else:
781 status = _('removing %s\n') % m.uipath(abs)
779 status = _('removing %s\n') % m.uipath(abs)
782 repo.ui.status(status)
780 repo.ui.status(status)
783
781
784 renames = _findrenames(repo, m, added + unknown, removed + deleted,
782 renames = _findrenames(repo, m, added + unknown, removed + deleted,
785 similarity)
783 similarity)
786
784
787 if not dry_run:
785 if not dry_run:
788 _markchanges(repo, unknown + forgotten, deleted, renames)
786 _markchanges(repo, unknown + forgotten, deleted, renames)
789
787
790 for f in rejected:
788 for f in rejected:
791 if f in m.files():
789 if f in m.files():
792 return 1
790 return 1
793 return ret
791 return ret
794
792
795 def marktouched(repo, files, similarity=0.0):
793 def marktouched(repo, files, similarity=0.0):
796 '''Assert that files have somehow been operated upon. files are relative to
794 '''Assert that files have somehow been operated upon. files are relative to
797 the repo root.'''
795 the repo root.'''
798 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
796 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
799 rejected = []
797 rejected = []
800
798
801 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
799 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
802
800
803 if repo.ui.verbose:
801 if repo.ui.verbose:
804 unknownset = set(unknown + forgotten)
802 unknownset = set(unknown + forgotten)
805 toprint = unknownset.copy()
803 toprint = unknownset.copy()
806 toprint.update(deleted)
804 toprint.update(deleted)
807 for abs in sorted(toprint):
805 for abs in sorted(toprint):
808 if abs in unknownset:
806 if abs in unknownset:
809 status = _('adding %s\n') % abs
807 status = _('adding %s\n') % abs
810 else:
808 else:
811 status = _('removing %s\n') % abs
809 status = _('removing %s\n') % abs
812 repo.ui.status(status)
810 repo.ui.status(status)
813
811
814 renames = _findrenames(repo, m, added + unknown, removed + deleted,
812 renames = _findrenames(repo, m, added + unknown, removed + deleted,
815 similarity)
813 similarity)
816
814
817 _markchanges(repo, unknown + forgotten, deleted, renames)
815 _markchanges(repo, unknown + forgotten, deleted, renames)
818
816
819 for f in rejected:
817 for f in rejected:
820 if f in m.files():
818 if f in m.files():
821 return 1
819 return 1
822 return 0
820 return 0
823
821
824 def _interestingfiles(repo, matcher):
822 def _interestingfiles(repo, matcher):
825 '''Walk dirstate with matcher, looking for files that addremove would care
823 '''Walk dirstate with matcher, looking for files that addremove would care
826 about.
824 about.
827
825
828 This is different from dirstate.status because it doesn't care about
826 This is different from dirstate.status because it doesn't care about
829 whether files are modified or clean.'''
827 whether files are modified or clean.'''
830 added, unknown, deleted, removed, forgotten = [], [], [], [], []
828 added, unknown, deleted, removed, forgotten = [], [], [], [], []
831 audit_path = pathutil.pathauditor(repo.root, cached=True)
829 audit_path = pathutil.pathauditor(repo.root, cached=True)
832
830
833 ctx = repo[None]
831 ctx = repo[None]
834 dirstate = repo.dirstate
832 dirstate = repo.dirstate
835 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
833 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
836 unknown=True, ignored=False, full=False)
834 unknown=True, ignored=False, full=False)
837 for abs, st in walkresults.iteritems():
835 for abs, st in walkresults.iteritems():
838 dstate = dirstate[abs]
836 dstate = dirstate[abs]
839 if dstate == '?' and audit_path.check(abs):
837 if dstate == '?' and audit_path.check(abs):
840 unknown.append(abs)
838 unknown.append(abs)
841 elif dstate != 'r' and not st:
839 elif dstate != 'r' and not st:
842 deleted.append(abs)
840 deleted.append(abs)
843 elif dstate == 'r' and st:
841 elif dstate == 'r' and st:
844 forgotten.append(abs)
842 forgotten.append(abs)
845 # for finding renames
843 # for finding renames
846 elif dstate == 'r' and not st:
844 elif dstate == 'r' and not st:
847 removed.append(abs)
845 removed.append(abs)
848 elif dstate == 'a':
846 elif dstate == 'a':
849 added.append(abs)
847 added.append(abs)
850
848
851 return added, unknown, deleted, removed, forgotten
849 return added, unknown, deleted, removed, forgotten
852
850
853 def _findrenames(repo, matcher, added, removed, similarity):
851 def _findrenames(repo, matcher, added, removed, similarity):
854 '''Find renames from removed files to added ones.'''
852 '''Find renames from removed files to added ones.'''
855 renames = {}
853 renames = {}
856 if similarity > 0:
854 if similarity > 0:
857 for old, new, score in similar.findrenames(repo, added, removed,
855 for old, new, score in similar.findrenames(repo, added, removed,
858 similarity):
856 similarity):
859 if (repo.ui.verbose or not matcher.exact(old)
857 if (repo.ui.verbose or not matcher.exact(old)
860 or not matcher.exact(new)):
858 or not matcher.exact(new)):
861 repo.ui.status(_('recording removal of %s as rename to %s '
859 repo.ui.status(_('recording removal of %s as rename to %s '
862 '(%d%% similar)\n') %
860 '(%d%% similar)\n') %
863 (matcher.rel(old), matcher.rel(new),
861 (matcher.rel(old), matcher.rel(new),
864 score * 100))
862 score * 100))
865 renames[new] = old
863 renames[new] = old
866 return renames
864 return renames
867
865
868 def _markchanges(repo, unknown, deleted, renames):
866 def _markchanges(repo, unknown, deleted, renames):
869 '''Marks the files in unknown as added, the files in deleted as removed,
867 '''Marks the files in unknown as added, the files in deleted as removed,
870 and the files in renames as copied.'''
868 and the files in renames as copied.'''
871 wctx = repo[None]
869 wctx = repo[None]
872 with repo.wlock():
870 with repo.wlock():
873 wctx.forget(deleted)
871 wctx.forget(deleted)
874 wctx.add(unknown)
872 wctx.add(unknown)
875 for new, old in renames.iteritems():
873 for new, old in renames.iteritems():
876 wctx.copy(old, new)
874 wctx.copy(old, new)
877
875
878 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
876 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
879 """Update the dirstate to reflect the intent of copying src to dst. For
877 """Update the dirstate to reflect the intent of copying src to dst. For
880 different reasons it might not end with dst being marked as copied from src.
878 different reasons it might not end with dst being marked as copied from src.
881 """
879 """
882 origsrc = repo.dirstate.copied(src) or src
880 origsrc = repo.dirstate.copied(src) or src
883 if dst == origsrc: # copying back a copy?
881 if dst == origsrc: # copying back a copy?
884 if repo.dirstate[dst] not in 'mn' and not dryrun:
882 if repo.dirstate[dst] not in 'mn' and not dryrun:
885 repo.dirstate.normallookup(dst)
883 repo.dirstate.normallookup(dst)
886 else:
884 else:
887 if repo.dirstate[origsrc] == 'a' and origsrc == src:
885 if repo.dirstate[origsrc] == 'a' and origsrc == src:
888 if not ui.quiet:
886 if not ui.quiet:
889 ui.warn(_("%s has not been committed yet, so no copy "
887 ui.warn(_("%s has not been committed yet, so no copy "
890 "data will be stored for %s.\n")
888 "data will be stored for %s.\n")
891 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
889 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
892 if repo.dirstate[dst] in '?r' and not dryrun:
890 if repo.dirstate[dst] in '?r' and not dryrun:
893 wctx.add([dst])
891 wctx.add([dst])
894 elif not dryrun:
892 elif not dryrun:
895 wctx.copy(origsrc, dst)
893 wctx.copy(origsrc, dst)
896
894
897 def readrequires(opener, supported):
895 def readrequires(opener, supported):
898 '''Reads and parses .hg/requires and checks if all entries found
896 '''Reads and parses .hg/requires and checks if all entries found
899 are in the list of supported features.'''
897 are in the list of supported features.'''
900 requirements = set(opener.read("requires").splitlines())
898 requirements = set(opener.read("requires").splitlines())
901 missings = []
899 missings = []
902 for r in requirements:
900 for r in requirements:
903 if r not in supported:
901 if r not in supported:
904 if not r or not r[0:1].isalnum():
902 if not r or not r[0:1].isalnum():
905 raise error.RequirementError(_(".hg/requires file is corrupt"))
903 raise error.RequirementError(_(".hg/requires file is corrupt"))
906 missings.append(r)
904 missings.append(r)
907 missings.sort()
905 missings.sort()
908 if missings:
906 if missings:
909 raise error.RequirementError(
907 raise error.RequirementError(
910 _("repository requires features unknown to this Mercurial: %s")
908 _("repository requires features unknown to this Mercurial: %s")
911 % " ".join(missings),
909 % " ".join(missings),
912 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
910 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
913 " for more information"))
911 " for more information"))
914 return requirements
912 return requirements
915
913
916 def writerequires(opener, requirements):
914 def writerequires(opener, requirements):
917 with opener('requires', 'w') as fp:
915 with opener('requires', 'w') as fp:
918 for r in sorted(requirements):
916 for r in sorted(requirements):
919 fp.write("%s\n" % r)
917 fp.write("%s\n" % r)
920
918
921 class filecachesubentry(object):
919 class filecachesubentry(object):
922 def __init__(self, path, stat):
920 def __init__(self, path, stat):
923 self.path = path
921 self.path = path
924 self.cachestat = None
922 self.cachestat = None
925 self._cacheable = None
923 self._cacheable = None
926
924
927 if stat:
925 if stat:
928 self.cachestat = filecachesubentry.stat(self.path)
926 self.cachestat = filecachesubentry.stat(self.path)
929
927
930 if self.cachestat:
928 if self.cachestat:
931 self._cacheable = self.cachestat.cacheable()
929 self._cacheable = self.cachestat.cacheable()
932 else:
930 else:
933 # None means we don't know yet
931 # None means we don't know yet
934 self._cacheable = None
932 self._cacheable = None
935
933
936 def refresh(self):
934 def refresh(self):
937 if self.cacheable():
935 if self.cacheable():
938 self.cachestat = filecachesubentry.stat(self.path)
936 self.cachestat = filecachesubentry.stat(self.path)
939
937
940 def cacheable(self):
938 def cacheable(self):
941 if self._cacheable is not None:
939 if self._cacheable is not None:
942 return self._cacheable
940 return self._cacheable
943
941
944 # we don't know yet, assume it is for now
942 # we don't know yet, assume it is for now
945 return True
943 return True
946
944
947 def changed(self):
945 def changed(self):
948 # no point in going further if we can't cache it
946 # no point in going further if we can't cache it
949 if not self.cacheable():
947 if not self.cacheable():
950 return True
948 return True
951
949
952 newstat = filecachesubentry.stat(self.path)
950 newstat = filecachesubentry.stat(self.path)
953
951
954 # we may not know if it's cacheable yet, check again now
952 # we may not know if it's cacheable yet, check again now
955 if newstat and self._cacheable is None:
953 if newstat and self._cacheable is None:
956 self._cacheable = newstat.cacheable()
954 self._cacheable = newstat.cacheable()
957
955
958 # check again
956 # check again
959 if not self._cacheable:
957 if not self._cacheable:
960 return True
958 return True
961
959
962 if self.cachestat != newstat:
960 if self.cachestat != newstat:
963 self.cachestat = newstat
961 self.cachestat = newstat
964 return True
962 return True
965 else:
963 else:
966 return False
964 return False
967
965
968 @staticmethod
966 @staticmethod
969 def stat(path):
967 def stat(path):
970 try:
968 try:
971 return util.cachestat(path)
969 return util.cachestat(path)
972 except OSError as e:
970 except OSError as e:
973 if e.errno != errno.ENOENT:
971 if e.errno != errno.ENOENT:
974 raise
972 raise
975
973
976 class filecacheentry(object):
974 class filecacheentry(object):
977 def __init__(self, paths, stat=True):
975 def __init__(self, paths, stat=True):
978 self._entries = []
976 self._entries = []
979 for path in paths:
977 for path in paths:
980 self._entries.append(filecachesubentry(path, stat))
978 self._entries.append(filecachesubentry(path, stat))
981
979
982 def changed(self):
980 def changed(self):
983 '''true if any entry has changed'''
981 '''true if any entry has changed'''
984 for entry in self._entries:
982 for entry in self._entries:
985 if entry.changed():
983 if entry.changed():
986 return True
984 return True
987 return False
985 return False
988
986
989 def refresh(self):
987 def refresh(self):
990 for entry in self._entries:
988 for entry in self._entries:
991 entry.refresh()
989 entry.refresh()
992
990
993 class filecache(object):
991 class filecache(object):
994 '''A property like decorator that tracks files under .hg/ for updates.
992 '''A property like decorator that tracks files under .hg/ for updates.
995
993
996 Records stat info when called in _filecache.
994 Records stat info when called in _filecache.
997
995
998 On subsequent calls, compares old stat info with new info, and recreates the
996 On subsequent calls, compares old stat info with new info, and recreates the
999 object when any of the files changes, updating the new stat info in
997 object when any of the files changes, updating the new stat info in
1000 _filecache.
998 _filecache.
1001
999
1002 Mercurial either atomic renames or appends for files under .hg,
1000 Mercurial either atomic renames or appends for files under .hg,
1003 so to ensure the cache is reliable we need the filesystem to be able
1001 so to ensure the cache is reliable we need the filesystem to be able
1004 to tell us if a file has been replaced. If it can't, we fallback to
1002 to tell us if a file has been replaced. If it can't, we fallback to
1005 recreating the object on every call (essentially the same behavior as
1003 recreating the object on every call (essentially the same behavior as
1006 propertycache).
1004 propertycache).
1007
1005
1008 '''
1006 '''
1009 def __init__(self, *paths):
1007 def __init__(self, *paths):
1010 self.paths = paths
1008 self.paths = paths
1011
1009
1012 def join(self, obj, fname):
1010 def join(self, obj, fname):
1013 """Used to compute the runtime path of a cached file.
1011 """Used to compute the runtime path of a cached file.
1014
1012
1015 Users should subclass filecache and provide their own version of this
1013 Users should subclass filecache and provide their own version of this
1016 function to call the appropriate join function on 'obj' (an instance
1014 function to call the appropriate join function on 'obj' (an instance
1017 of the class that its member function was decorated).
1015 of the class that its member function was decorated).
1018 """
1016 """
1019 raise NotImplementedError
1017 raise NotImplementedError
1020
1018
1021 def __call__(self, func):
1019 def __call__(self, func):
1022 self.func = func
1020 self.func = func
1023 self.name = func.__name__.encode('ascii')
1021 self.name = func.__name__.encode('ascii')
1024 return self
1022 return self
1025
1023
1026 def __get__(self, obj, type=None):
1024 def __get__(self, obj, type=None):
1027 # if accessed on the class, return the descriptor itself.
1025 # if accessed on the class, return the descriptor itself.
1028 if obj is None:
1026 if obj is None:
1029 return self
1027 return self
1030 # do we need to check if the file changed?
1028 # do we need to check if the file changed?
1031 if self.name in obj.__dict__:
1029 if self.name in obj.__dict__:
1032 assert self.name in obj._filecache, self.name
1030 assert self.name in obj._filecache, self.name
1033 return obj.__dict__[self.name]
1031 return obj.__dict__[self.name]
1034
1032
1035 entry = obj._filecache.get(self.name)
1033 entry = obj._filecache.get(self.name)
1036
1034
1037 if entry:
1035 if entry:
1038 if entry.changed():
1036 if entry.changed():
1039 entry.obj = self.func(obj)
1037 entry.obj = self.func(obj)
1040 else:
1038 else:
1041 paths = [self.join(obj, path) for path in self.paths]
1039 paths = [self.join(obj, path) for path in self.paths]
1042
1040
1043 # We stat -before- creating the object so our cache doesn't lie if
1041 # We stat -before- creating the object so our cache doesn't lie if
1044 # a writer modified between the time we read and stat
1042 # a writer modified between the time we read and stat
1045 entry = filecacheentry(paths, True)
1043 entry = filecacheentry(paths, True)
1046 entry.obj = self.func(obj)
1044 entry.obj = self.func(obj)
1047
1045
1048 obj._filecache[self.name] = entry
1046 obj._filecache[self.name] = entry
1049
1047
1050 obj.__dict__[self.name] = entry.obj
1048 obj.__dict__[self.name] = entry.obj
1051 return entry.obj
1049 return entry.obj
1052
1050
1053 def __set__(self, obj, value):
1051 def __set__(self, obj, value):
1054 if self.name not in obj._filecache:
1052 if self.name not in obj._filecache:
1055 # we add an entry for the missing value because X in __dict__
1053 # we add an entry for the missing value because X in __dict__
1056 # implies X in _filecache
1054 # implies X in _filecache
1057 paths = [self.join(obj, path) for path in self.paths]
1055 paths = [self.join(obj, path) for path in self.paths]
1058 ce = filecacheentry(paths, False)
1056 ce = filecacheentry(paths, False)
1059 obj._filecache[self.name] = ce
1057 obj._filecache[self.name] = ce
1060 else:
1058 else:
1061 ce = obj._filecache[self.name]
1059 ce = obj._filecache[self.name]
1062
1060
1063 ce.obj = value # update cached copy
1061 ce.obj = value # update cached copy
1064 obj.__dict__[self.name] = value # update copy returned by obj.x
1062 obj.__dict__[self.name] = value # update copy returned by obj.x
1065
1063
1066 def __delete__(self, obj):
1064 def __delete__(self, obj):
1067 try:
1065 try:
1068 del obj.__dict__[self.name]
1066 del obj.__dict__[self.name]
1069 except KeyError:
1067 except KeyError:
1070 raise AttributeError(self.name)
1068 raise AttributeError(self.name)
1071
1069
1072 def extdatasource(repo, source):
1070 def extdatasource(repo, source):
1073 """Gather a map of rev -> value dict from the specified source
1071 """Gather a map of rev -> value dict from the specified source
1074
1072
1075 A source spec is treated as a URL, with a special case shell: type
1073 A source spec is treated as a URL, with a special case shell: type
1076 for parsing the output from a shell command.
1074 for parsing the output from a shell command.
1077
1075
1078 The data is parsed as a series of newline-separated records where
1076 The data is parsed as a series of newline-separated records where
1079 each record is a revision specifier optionally followed by a space
1077 each record is a revision specifier optionally followed by a space
1080 and a freeform string value. If the revision is known locally, it
1078 and a freeform string value. If the revision is known locally, it
1081 is converted to a rev, otherwise the record is skipped.
1079 is converted to a rev, otherwise the record is skipped.
1082
1080
1083 Note that both key and value are treated as UTF-8 and converted to
1081 Note that both key and value are treated as UTF-8 and converted to
1084 the local encoding. This allows uniformity between local and
1082 the local encoding. This allows uniformity between local and
1085 remote data sources.
1083 remote data sources.
1086 """
1084 """
1087
1085
1088 spec = repo.ui.config("extdata", source)
1086 spec = repo.ui.config("extdata", source)
1089 if not spec:
1087 if not spec:
1090 raise error.Abort(_("unknown extdata source '%s'") % source)
1088 raise error.Abort(_("unknown extdata source '%s'") % source)
1091
1089
1092 data = {}
1090 data = {}
1093 src = proc = None
1091 src = proc = None
1094 try:
1092 try:
1095 if spec.startswith("shell:"):
1093 if spec.startswith("shell:"):
1096 # external commands should be run relative to the repo root
1094 # external commands should be run relative to the repo root
1097 cmd = spec[6:]
1095 cmd = spec[6:]
1098 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1096 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1099 close_fds=procutil.closefds,
1097 close_fds=procutil.closefds,
1100 stdout=subprocess.PIPE, cwd=repo.root)
1098 stdout=subprocess.PIPE, cwd=repo.root)
1101 src = proc.stdout
1099 src = proc.stdout
1102 else:
1100 else:
1103 # treat as a URL or file
1101 # treat as a URL or file
1104 src = url.open(repo.ui, spec)
1102 src = url.open(repo.ui, spec)
1105 for l in src:
1103 for l in src:
1106 if " " in l:
1104 if " " in l:
1107 k, v = l.strip().split(" ", 1)
1105 k, v = l.strip().split(" ", 1)
1108 else:
1106 else:
1109 k, v = l.strip(), ""
1107 k, v = l.strip(), ""
1110
1108
1111 k = encoding.tolocal(k)
1109 k = encoding.tolocal(k)
1112 try:
1110 try:
1113 data[repo[k].rev()] = encoding.tolocal(v)
1111 data[repo[k].rev()] = encoding.tolocal(v)
1114 except (error.LookupError, error.RepoLookupError):
1112 except (error.LookupError, error.RepoLookupError):
1115 pass # we ignore data for nodes that don't exist locally
1113 pass # we ignore data for nodes that don't exist locally
1116 finally:
1114 finally:
1117 if proc:
1115 if proc:
1118 proc.communicate()
1116 proc.communicate()
1119 if src:
1117 if src:
1120 src.close()
1118 src.close()
1121 if proc and proc.returncode != 0:
1119 if proc and proc.returncode != 0:
1122 raise error.Abort(_("extdata command '%s' failed: %s")
1120 raise error.Abort(_("extdata command '%s' failed: %s")
1123 % (cmd, procutil.explainexit(proc.returncode)[0]))
1121 % (cmd, procutil.explainexit(proc.returncode)[0]))
1124
1122
1125 return data
1123 return data
1126
1124
1127 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1125 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1128 if lock is None:
1126 if lock is None:
1129 raise error.LockInheritanceContractViolation(
1127 raise error.LockInheritanceContractViolation(
1130 'lock can only be inherited while held')
1128 'lock can only be inherited while held')
1131 if environ is None:
1129 if environ is None:
1132 environ = {}
1130 environ = {}
1133 with lock.inherit() as locker:
1131 with lock.inherit() as locker:
1134 environ[envvar] = locker
1132 environ[envvar] = locker
1135 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1133 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1136
1134
1137 def wlocksub(repo, cmd, *args, **kwargs):
1135 def wlocksub(repo, cmd, *args, **kwargs):
1138 """run cmd as a subprocess that allows inheriting repo's wlock
1136 """run cmd as a subprocess that allows inheriting repo's wlock
1139
1137
1140 This can only be called while the wlock is held. This takes all the
1138 This can only be called while the wlock is held. This takes all the
1141 arguments that ui.system does, and returns the exit code of the
1139 arguments that ui.system does, and returns the exit code of the
1142 subprocess."""
1140 subprocess."""
1143 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1141 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1144 **kwargs)
1142 **kwargs)
1145
1143
1146 def gdinitconfig(ui):
1144 def gdinitconfig(ui):
1147 """helper function to know if a repo should be created as general delta
1145 """helper function to know if a repo should be created as general delta
1148 """
1146 """
1149 # experimental config: format.generaldelta
1147 # experimental config: format.generaldelta
1150 return (ui.configbool('format', 'generaldelta')
1148 return (ui.configbool('format', 'generaldelta')
1151 or ui.configbool('format', 'usegeneraldelta'))
1149 or ui.configbool('format', 'usegeneraldelta'))
1152
1150
1153 def gddeltaconfig(ui):
1151 def gddeltaconfig(ui):
1154 """helper function to know if incoming delta should be optimised
1152 """helper function to know if incoming delta should be optimised
1155 """
1153 """
1156 # experimental config: format.generaldelta
1154 # experimental config: format.generaldelta
1157 return ui.configbool('format', 'generaldelta')
1155 return ui.configbool('format', 'generaldelta')
1158
1156
1159 class simplekeyvaluefile(object):
1157 class simplekeyvaluefile(object):
1160 """A simple file with key=value lines
1158 """A simple file with key=value lines
1161
1159
1162 Keys must be alphanumerics and start with a letter, values must not
1160 Keys must be alphanumerics and start with a letter, values must not
1163 contain '\n' characters"""
1161 contain '\n' characters"""
1164 firstlinekey = '__firstline'
1162 firstlinekey = '__firstline'
1165
1163
1166 def __init__(self, vfs, path, keys=None):
1164 def __init__(self, vfs, path, keys=None):
1167 self.vfs = vfs
1165 self.vfs = vfs
1168 self.path = path
1166 self.path = path
1169
1167
1170 def read(self, firstlinenonkeyval=False):
1168 def read(self, firstlinenonkeyval=False):
1171 """Read the contents of a simple key-value file
1169 """Read the contents of a simple key-value file
1172
1170
1173 'firstlinenonkeyval' indicates whether the first line of file should
1171 'firstlinenonkeyval' indicates whether the first line of file should
1174 be treated as a key-value pair or reuturned fully under the
1172 be treated as a key-value pair or reuturned fully under the
1175 __firstline key."""
1173 __firstline key."""
1176 lines = self.vfs.readlines(self.path)
1174 lines = self.vfs.readlines(self.path)
1177 d = {}
1175 d = {}
1178 if firstlinenonkeyval:
1176 if firstlinenonkeyval:
1179 if not lines:
1177 if not lines:
1180 e = _("empty simplekeyvalue file")
1178 e = _("empty simplekeyvalue file")
1181 raise error.CorruptedState(e)
1179 raise error.CorruptedState(e)
1182 # we don't want to include '\n' in the __firstline
1180 # we don't want to include '\n' in the __firstline
1183 d[self.firstlinekey] = lines[0][:-1]
1181 d[self.firstlinekey] = lines[0][:-1]
1184 del lines[0]
1182 del lines[0]
1185
1183
1186 try:
1184 try:
1187 # the 'if line.strip()' part prevents us from failing on empty
1185 # the 'if line.strip()' part prevents us from failing on empty
1188 # lines which only contain '\n' therefore are not skipped
1186 # lines which only contain '\n' therefore are not skipped
1189 # by 'if line'
1187 # by 'if line'
1190 updatedict = dict(line[:-1].split('=', 1) for line in lines
1188 updatedict = dict(line[:-1].split('=', 1) for line in lines
1191 if line.strip())
1189 if line.strip())
1192 if self.firstlinekey in updatedict:
1190 if self.firstlinekey in updatedict:
1193 e = _("%r can't be used as a key")
1191 e = _("%r can't be used as a key")
1194 raise error.CorruptedState(e % self.firstlinekey)
1192 raise error.CorruptedState(e % self.firstlinekey)
1195 d.update(updatedict)
1193 d.update(updatedict)
1196 except ValueError as e:
1194 except ValueError as e:
1197 raise error.CorruptedState(str(e))
1195 raise error.CorruptedState(str(e))
1198 return d
1196 return d
1199
1197
1200 def write(self, data, firstline=None):
1198 def write(self, data, firstline=None):
1201 """Write key=>value mapping to a file
1199 """Write key=>value mapping to a file
1202 data is a dict. Keys must be alphanumerical and start with a letter.
1200 data is a dict. Keys must be alphanumerical and start with a letter.
1203 Values must not contain newline characters.
1201 Values must not contain newline characters.
1204
1202
1205 If 'firstline' is not None, it is written to file before
1203 If 'firstline' is not None, it is written to file before
1206 everything else, as it is, not in a key=value form"""
1204 everything else, as it is, not in a key=value form"""
1207 lines = []
1205 lines = []
1208 if firstline is not None:
1206 if firstline is not None:
1209 lines.append('%s\n' % firstline)
1207 lines.append('%s\n' % firstline)
1210
1208
1211 for k, v in data.items():
1209 for k, v in data.items():
1212 if k == self.firstlinekey:
1210 if k == self.firstlinekey:
1213 e = "key name '%s' is reserved" % self.firstlinekey
1211 e = "key name '%s' is reserved" % self.firstlinekey
1214 raise error.ProgrammingError(e)
1212 raise error.ProgrammingError(e)
1215 if not k[0:1].isalpha():
1213 if not k[0:1].isalpha():
1216 e = "keys must start with a letter in a key-value file"
1214 e = "keys must start with a letter in a key-value file"
1217 raise error.ProgrammingError(e)
1215 raise error.ProgrammingError(e)
1218 if not k.isalnum():
1216 if not k.isalnum():
1219 e = "invalid key name in a simple key-value file"
1217 e = "invalid key name in a simple key-value file"
1220 raise error.ProgrammingError(e)
1218 raise error.ProgrammingError(e)
1221 if '\n' in v:
1219 if '\n' in v:
1222 e = "invalid value in a simple key-value file"
1220 e = "invalid value in a simple key-value file"
1223 raise error.ProgrammingError(e)
1221 raise error.ProgrammingError(e)
1224 lines.append("%s=%s\n" % (k, v))
1222 lines.append("%s=%s\n" % (k, v))
1225 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1223 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1226 fp.write(''.join(lines))
1224 fp.write(''.join(lines))
1227
1225
1228 _reportobsoletedsource = [
1226 _reportobsoletedsource = [
1229 'debugobsolete',
1227 'debugobsolete',
1230 'pull',
1228 'pull',
1231 'push',
1229 'push',
1232 'serve',
1230 'serve',
1233 'unbundle',
1231 'unbundle',
1234 ]
1232 ]
1235
1233
1236 _reportnewcssource = [
1234 _reportnewcssource = [
1237 'pull',
1235 'pull',
1238 'unbundle',
1236 'unbundle',
1239 ]
1237 ]
1240
1238
1241 # a list of (repo, ctx, files) functions called by various commands to allow
1239 # a list of (repo, ctx, files) functions called by various commands to allow
1242 # extensions to ensure the corresponding files are available locally, before the
1240 # extensions to ensure the corresponding files are available locally, before the
1243 # command uses them.
1241 # command uses them.
1244 fileprefetchhooks = util.hooks()
1242 fileprefetchhooks = util.hooks()
1245
1243
1246 # A marker that tells the evolve extension to suppress its own reporting
1244 # A marker that tells the evolve extension to suppress its own reporting
1247 _reportstroubledchangesets = True
1245 _reportstroubledchangesets = True
1248
1246
1249 def registersummarycallback(repo, otr, txnname=''):
1247 def registersummarycallback(repo, otr, txnname=''):
1250 """register a callback to issue a summary after the transaction is closed
1248 """register a callback to issue a summary after the transaction is closed
1251 """
1249 """
1252 def txmatch(sources):
1250 def txmatch(sources):
1253 return any(txnname.startswith(source) for source in sources)
1251 return any(txnname.startswith(source) for source in sources)
1254
1252
1255 categories = []
1253 categories = []
1256
1254
1257 def reportsummary(func):
1255 def reportsummary(func):
1258 """decorator for report callbacks."""
1256 """decorator for report callbacks."""
1259 # The repoview life cycle is shorter than the one of the actual
1257 # The repoview life cycle is shorter than the one of the actual
1260 # underlying repository. So the filtered object can die before the
1258 # underlying repository. So the filtered object can die before the
1261 # weakref is used leading to troubles. We keep a reference to the
1259 # weakref is used leading to troubles. We keep a reference to the
1262 # unfiltered object and restore the filtering when retrieving the
1260 # unfiltered object and restore the filtering when retrieving the
1263 # repository through the weakref.
1261 # repository through the weakref.
1264 filtername = repo.filtername
1262 filtername = repo.filtername
1265 reporef = weakref.ref(repo.unfiltered())
1263 reporef = weakref.ref(repo.unfiltered())
1266 def wrapped(tr):
1264 def wrapped(tr):
1267 repo = reporef()
1265 repo = reporef()
1268 if filtername:
1266 if filtername:
1269 repo = repo.filtered(filtername)
1267 repo = repo.filtered(filtername)
1270 func(repo, tr)
1268 func(repo, tr)
1271 newcat = '%02i-txnreport' % len(categories)
1269 newcat = '%02i-txnreport' % len(categories)
1272 otr.addpostclose(newcat, wrapped)
1270 otr.addpostclose(newcat, wrapped)
1273 categories.append(newcat)
1271 categories.append(newcat)
1274 return wrapped
1272 return wrapped
1275
1273
1276 if txmatch(_reportobsoletedsource):
1274 if txmatch(_reportobsoletedsource):
1277 @reportsummary
1275 @reportsummary
1278 def reportobsoleted(repo, tr):
1276 def reportobsoleted(repo, tr):
1279 obsoleted = obsutil.getobsoleted(repo, tr)
1277 obsoleted = obsutil.getobsoleted(repo, tr)
1280 if obsoleted:
1278 if obsoleted:
1281 repo.ui.status(_('obsoleted %i changesets\n')
1279 repo.ui.status(_('obsoleted %i changesets\n')
1282 % len(obsoleted))
1280 % len(obsoleted))
1283
1281
1284 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1282 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1285 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1283 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1286 instabilitytypes = [
1284 instabilitytypes = [
1287 ('orphan', 'orphan'),
1285 ('orphan', 'orphan'),
1288 ('phase-divergent', 'phasedivergent'),
1286 ('phase-divergent', 'phasedivergent'),
1289 ('content-divergent', 'contentdivergent'),
1287 ('content-divergent', 'contentdivergent'),
1290 ]
1288 ]
1291
1289
1292 def getinstabilitycounts(repo):
1290 def getinstabilitycounts(repo):
1293 filtered = repo.changelog.filteredrevs
1291 filtered = repo.changelog.filteredrevs
1294 counts = {}
1292 counts = {}
1295 for instability, revset in instabilitytypes:
1293 for instability, revset in instabilitytypes:
1296 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1294 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1297 filtered)
1295 filtered)
1298 return counts
1296 return counts
1299
1297
1300 oldinstabilitycounts = getinstabilitycounts(repo)
1298 oldinstabilitycounts = getinstabilitycounts(repo)
1301 @reportsummary
1299 @reportsummary
1302 def reportnewinstabilities(repo, tr):
1300 def reportnewinstabilities(repo, tr):
1303 newinstabilitycounts = getinstabilitycounts(repo)
1301 newinstabilitycounts = getinstabilitycounts(repo)
1304 for instability, revset in instabilitytypes:
1302 for instability, revset in instabilitytypes:
1305 delta = (newinstabilitycounts[instability] -
1303 delta = (newinstabilitycounts[instability] -
1306 oldinstabilitycounts[instability])
1304 oldinstabilitycounts[instability])
1307 if delta > 0:
1305 if delta > 0:
1308 repo.ui.warn(_('%i new %s changesets\n') %
1306 repo.ui.warn(_('%i new %s changesets\n') %
1309 (delta, instability))
1307 (delta, instability))
1310
1308
1311 if txmatch(_reportnewcssource):
1309 if txmatch(_reportnewcssource):
1312 @reportsummary
1310 @reportsummary
1313 def reportnewcs(repo, tr):
1311 def reportnewcs(repo, tr):
1314 """Report the range of new revisions pulled/unbundled."""
1312 """Report the range of new revisions pulled/unbundled."""
1315 newrevs = tr.changes.get('revs', xrange(0, 0))
1313 newrevs = tr.changes.get('revs', xrange(0, 0))
1316 if not newrevs:
1314 if not newrevs:
1317 return
1315 return
1318
1316
1319 # Compute the bounds of new revisions' range, excluding obsoletes.
1317 # Compute the bounds of new revisions' range, excluding obsoletes.
1320 unfi = repo.unfiltered()
1318 unfi = repo.unfiltered()
1321 revs = unfi.revs('%ld and not obsolete()', newrevs)
1319 revs = unfi.revs('%ld and not obsolete()', newrevs)
1322 if not revs:
1320 if not revs:
1323 # Got only obsoletes.
1321 # Got only obsoletes.
1324 return
1322 return
1325 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1323 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1326
1324
1327 if minrev == maxrev:
1325 if minrev == maxrev:
1328 revrange = minrev
1326 revrange = minrev
1329 else:
1327 else:
1330 revrange = '%s:%s' % (minrev, maxrev)
1328 revrange = '%s:%s' % (minrev, maxrev)
1331 repo.ui.status(_('new changesets %s\n') % revrange)
1329 repo.ui.status(_('new changesets %s\n') % revrange)
1332
1330
1333 def nodesummaries(repo, nodes, maxnumnodes=4):
1331 def nodesummaries(repo, nodes, maxnumnodes=4):
1334 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1332 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1335 return ' '.join(short(h) for h in nodes)
1333 return ' '.join(short(h) for h in nodes)
1336 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1334 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1337 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1335 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1338
1336
1339 def enforcesinglehead(repo, tr, desc):
1337 def enforcesinglehead(repo, tr, desc):
1340 """check that no named branch has multiple heads"""
1338 """check that no named branch has multiple heads"""
1341 if desc in ('strip', 'repair'):
1339 if desc in ('strip', 'repair'):
1342 # skip the logic during strip
1340 # skip the logic during strip
1343 return
1341 return
1344 visible = repo.filtered('visible')
1342 visible = repo.filtered('visible')
1345 # possible improvement: we could restrict the check to affected branch
1343 # possible improvement: we could restrict the check to affected branch
1346 for name, heads in visible.branchmap().iteritems():
1344 for name, heads in visible.branchmap().iteritems():
1347 if len(heads) > 1:
1345 if len(heads) > 1:
1348 msg = _('rejecting multiple heads on branch "%s"')
1346 msg = _('rejecting multiple heads on branch "%s"')
1349 msg %= name
1347 msg %= name
1350 hint = _('%d heads: %s')
1348 hint = _('%d heads: %s')
1351 hint %= (len(heads), nodesummaries(repo, heads))
1349 hint %= (len(heads), nodesummaries(repo, heads))
1352 raise error.Abort(msg, hint=hint)
1350 raise error.Abort(msg, hint=hint)
1353
1351
1354 def wrapconvertsink(sink):
1352 def wrapconvertsink(sink):
1355 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1353 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1356 before it is used, whether or not the convert extension was formally loaded.
1354 before it is used, whether or not the convert extension was formally loaded.
1357 """
1355 """
1358 return sink
1356 return sink
1359
1357
1360 def unhidehashlikerevs(repo, specs, hiddentype):
1358 def unhidehashlikerevs(repo, specs, hiddentype):
1361 """parse the user specs and unhide changesets whose hash or revision number
1359 """parse the user specs and unhide changesets whose hash or revision number
1362 is passed.
1360 is passed.
1363
1361
1364 hiddentype can be: 1) 'warn': warn while unhiding changesets
1362 hiddentype can be: 1) 'warn': warn while unhiding changesets
1365 2) 'nowarn': don't warn while unhiding changesets
1363 2) 'nowarn': don't warn while unhiding changesets
1366
1364
1367 returns a repo object with the required changesets unhidden
1365 returns a repo object with the required changesets unhidden
1368 """
1366 """
1369 if not repo.filtername or not repo.ui.configbool('experimental',
1367 if not repo.filtername or not repo.ui.configbool('experimental',
1370 'directaccess'):
1368 'directaccess'):
1371 return repo
1369 return repo
1372
1370
1373 if repo.filtername not in ('visible', 'visible-hidden'):
1371 if repo.filtername not in ('visible', 'visible-hidden'):
1374 return repo
1372 return repo
1375
1373
1376 symbols = set()
1374 symbols = set()
1377 for spec in specs:
1375 for spec in specs:
1378 try:
1376 try:
1379 tree = revsetlang.parse(spec)
1377 tree = revsetlang.parse(spec)
1380 except error.ParseError: # will be reported by scmutil.revrange()
1378 except error.ParseError: # will be reported by scmutil.revrange()
1381 continue
1379 continue
1382
1380
1383 symbols.update(revsetlang.gethashlikesymbols(tree))
1381 symbols.update(revsetlang.gethashlikesymbols(tree))
1384
1382
1385 if not symbols:
1383 if not symbols:
1386 return repo
1384 return repo
1387
1385
1388 revs = _getrevsfromsymbols(repo, symbols)
1386 revs = _getrevsfromsymbols(repo, symbols)
1389
1387
1390 if not revs:
1388 if not revs:
1391 return repo
1389 return repo
1392
1390
1393 if hiddentype == 'warn':
1391 if hiddentype == 'warn':
1394 unfi = repo.unfiltered()
1392 unfi = repo.unfiltered()
1395 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1393 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1396 repo.ui.warn(_("warning: accessing hidden changesets for write "
1394 repo.ui.warn(_("warning: accessing hidden changesets for write "
1397 "operation: %s\n") % revstr)
1395 "operation: %s\n") % revstr)
1398
1396
1399 # we have to use new filtername to separate branch/tags cache until we can
1397 # we have to use new filtername to separate branch/tags cache until we can
1400 # disbale these cache when revisions are dynamically pinned.
1398 # disbale these cache when revisions are dynamically pinned.
1401 return repo.filtered('visible-hidden', revs)
1399 return repo.filtered('visible-hidden', revs)
1402
1400
1403 def _getrevsfromsymbols(repo, symbols):
1401 def _getrevsfromsymbols(repo, symbols):
1404 """parse the list of symbols and returns a set of revision numbers of hidden
1402 """parse the list of symbols and returns a set of revision numbers of hidden
1405 changesets present in symbols"""
1403 changesets present in symbols"""
1406 revs = set()
1404 revs = set()
1407 unfi = repo.unfiltered()
1405 unfi = repo.unfiltered()
1408 unficl = unfi.changelog
1406 unficl = unfi.changelog
1409 cl = repo.changelog
1407 cl = repo.changelog
1410 tiprev = len(unficl)
1408 tiprev = len(unficl)
1411 pmatch = unficl._partialmatch
1409 pmatch = unficl._partialmatch
1412 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1410 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1413 for s in symbols:
1411 for s in symbols:
1414 try:
1412 try:
1415 n = int(s)
1413 n = int(s)
1416 if n <= tiprev:
1414 if n <= tiprev:
1417 if not allowrevnums:
1415 if not allowrevnums:
1418 continue
1416 continue
1419 else:
1417 else:
1420 if n not in cl:
1418 if n not in cl:
1421 revs.add(n)
1419 revs.add(n)
1422 continue
1420 continue
1423 except ValueError:
1421 except ValueError:
1424 pass
1422 pass
1425
1423
1426 try:
1424 try:
1427 s = pmatch(s)
1425 s = pmatch(s)
1428 except (error.LookupError, error.WdirUnsupported):
1426 except (error.LookupError, error.WdirUnsupported):
1429 s = None
1427 s = None
1430
1428
1431 if s is not None:
1429 if s is not None:
1432 rev = unficl.rev(s)
1430 rev = unficl.rev(s)
1433 if rev not in cl:
1431 if rev not in cl:
1434 revs.add(rev)
1432 revs.add(rev)
1435
1433
1436 return revs
1434 return revs
@@ -1,1815 +1,1814 b''
1 # subrepo.py - sub-repository classes and factory
1 # subrepo.py - sub-repository classes and factory
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22 from .i18n import _
22 from .i18n import _
23 from . import (
23 from . import (
24 cmdutil,
24 cmdutil,
25 encoding,
25 encoding,
26 error,
26 error,
27 exchange,
27 exchange,
28 logcmdutil,
28 logcmdutil,
29 match as matchmod,
29 match as matchmod,
30 node,
30 node,
31 pathutil,
31 pathutil,
32 phases,
32 phases,
33 pycompat,
33 pycompat,
34 scmutil,
34 scmutil,
35 subrepoutil,
35 subrepoutil,
36 util,
36 util,
37 vfs as vfsmod,
37 vfs as vfsmod,
38 )
38 )
39 from .utils import (
39 from .utils import (
40 dateutil,
40 dateutil,
41 procutil,
41 procutil,
42 stringutil,
42 stringutil,
43 )
43 )
44
44
45 hg = None
45 hg = None
46 reporelpath = subrepoutil.reporelpath
46 reporelpath = subrepoutil.reporelpath
47 subrelpath = subrepoutil.subrelpath
47 subrelpath = subrepoutil.subrelpath
48 _abssource = subrepoutil._abssource
48 _abssource = subrepoutil._abssource
49 propertycache = util.propertycache
49 propertycache = util.propertycache
50
50
51 def _expandedabspath(path):
51 def _expandedabspath(path):
52 '''
52 '''
53 get a path or url and if it is a path expand it and return an absolute path
53 get a path or url and if it is a path expand it and return an absolute path
54 '''
54 '''
55 expandedpath = util.urllocalpath(util.expandpath(path))
55 expandedpath = util.urllocalpath(util.expandpath(path))
56 u = util.url(expandedpath)
56 u = util.url(expandedpath)
57 if not u.scheme:
57 if not u.scheme:
58 path = util.normpath(os.path.abspath(u.path))
58 path = util.normpath(os.path.abspath(u.path))
59 return path
59 return path
60
60
61 def _getstorehashcachename(remotepath):
61 def _getstorehashcachename(remotepath):
62 '''get a unique filename for the store hash cache of a remote repository'''
62 '''get a unique filename for the store hash cache of a remote repository'''
63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64
64
65 class SubrepoAbort(error.Abort):
65 class SubrepoAbort(error.Abort):
66 """Exception class used to avoid handling a subrepo error more than once"""
66 """Exception class used to avoid handling a subrepo error more than once"""
67 def __init__(self, *args, **kw):
67 def __init__(self, *args, **kw):
68 self.subrepo = kw.pop(r'subrepo', None)
68 self.subrepo = kw.pop(r'subrepo', None)
69 self.cause = kw.pop(r'cause', None)
69 self.cause = kw.pop(r'cause', None)
70 error.Abort.__init__(self, *args, **kw)
70 error.Abort.__init__(self, *args, **kw)
71
71
72 def annotatesubrepoerror(func):
72 def annotatesubrepoerror(func):
73 def decoratedmethod(self, *args, **kargs):
73 def decoratedmethod(self, *args, **kargs):
74 try:
74 try:
75 res = func(self, *args, **kargs)
75 res = func(self, *args, **kargs)
76 except SubrepoAbort as ex:
76 except SubrepoAbort as ex:
77 # This exception has already been handled
77 # This exception has already been handled
78 raise ex
78 raise ex
79 except error.Abort as ex:
79 except error.Abort as ex:
80 subrepo = subrelpath(self)
80 subrepo = subrelpath(self)
81 errormsg = (stringutil.forcebytestr(ex) + ' '
81 errormsg = (stringutil.forcebytestr(ex) + ' '
82 + _('(in subrepository "%s")') % subrepo)
82 + _('(in subrepository "%s")') % subrepo)
83 # avoid handling this exception by raising a SubrepoAbort exception
83 # avoid handling this exception by raising a SubrepoAbort exception
84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
85 cause=sys.exc_info())
85 cause=sys.exc_info())
86 return res
86 return res
87 return decoratedmethod
87 return decoratedmethod
88
88
89 def _updateprompt(ui, sub, dirty, local, remote):
89 def _updateprompt(ui, sub, dirty, local, remote):
90 if dirty:
90 if dirty:
91 msg = (_(' subrepository sources for %s differ\n'
91 msg = (_(' subrepository sources for %s differ\n'
92 'use (l)ocal source (%s) or (r)emote source (%s)?'
92 'use (l)ocal source (%s) or (r)emote source (%s)?'
93 '$$ &Local $$ &Remote')
93 '$$ &Local $$ &Remote')
94 % (subrelpath(sub), local, remote))
94 % (subrelpath(sub), local, remote))
95 else:
95 else:
96 msg = (_(' subrepository sources for %s differ (in checked out '
96 msg = (_(' subrepository sources for %s differ (in checked out '
97 'version)\n'
97 'version)\n'
98 'use (l)ocal source (%s) or (r)emote source (%s)?'
98 'use (l)ocal source (%s) or (r)emote source (%s)?'
99 '$$ &Local $$ &Remote')
99 '$$ &Local $$ &Remote')
100 % (subrelpath(sub), local, remote))
100 % (subrelpath(sub), local, remote))
101 return ui.promptchoice(msg, 0)
101 return ui.promptchoice(msg, 0)
102
102
103 def _sanitize(ui, vfs, ignore):
103 def _sanitize(ui, vfs, ignore):
104 for dirname, dirs, names in vfs.walk():
104 for dirname, dirs, names in vfs.walk():
105 for i, d in enumerate(dirs):
105 for i, d in enumerate(dirs):
106 if d.lower() == ignore:
106 if d.lower() == ignore:
107 del dirs[i]
107 del dirs[i]
108 break
108 break
109 if vfs.basename(dirname).lower() != '.hg':
109 if vfs.basename(dirname).lower() != '.hg':
110 continue
110 continue
111 for f in names:
111 for f in names:
112 if f.lower() == 'hgrc':
112 if f.lower() == 'hgrc':
113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
114 "in '%s'\n") % vfs.join(dirname))
114 "in '%s'\n") % vfs.join(dirname))
115 vfs.unlink(vfs.reljoin(dirname, f))
115 vfs.unlink(vfs.reljoin(dirname, f))
116
116
117 def _auditsubrepopath(repo, path):
117 def _auditsubrepopath(repo, path):
118 # auditor doesn't check if the path itself is a symlink
118 # auditor doesn't check if the path itself is a symlink
119 pathutil.pathauditor(repo.root)(path)
119 pathutil.pathauditor(repo.root)(path)
120 if repo.wvfs.islink(path):
120 if repo.wvfs.islink(path):
121 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
121 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
122
122
123 SUBREPO_ALLOWED_DEFAULTS = {
123 SUBREPO_ALLOWED_DEFAULTS = {
124 'hg': True,
124 'hg': True,
125 'git': False,
125 'git': False,
126 'svn': False,
126 'svn': False,
127 }
127 }
128
128
129 def _checktype(ui, kind):
129 def _checktype(ui, kind):
130 # subrepos.allowed is a master kill switch. If disabled, subrepos are
130 # subrepos.allowed is a master kill switch. If disabled, subrepos are
131 # disabled period.
131 # disabled period.
132 if not ui.configbool('subrepos', 'allowed', True):
132 if not ui.configbool('subrepos', 'allowed', True):
133 raise error.Abort(_('subrepos not enabled'),
133 raise error.Abort(_('subrepos not enabled'),
134 hint=_("see 'hg help config.subrepos' for details"))
134 hint=_("see 'hg help config.subrepos' for details"))
135
135
136 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
136 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
137 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
137 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
138 raise error.Abort(_('%s subrepos not allowed') % kind,
138 raise error.Abort(_('%s subrepos not allowed') % kind,
139 hint=_("see 'hg help config.subrepos' for details"))
139 hint=_("see 'hg help config.subrepos' for details"))
140
140
141 if kind not in types:
141 if kind not in types:
142 raise error.Abort(_('unknown subrepo type %s') % kind)
142 raise error.Abort(_('unknown subrepo type %s') % kind)
143
143
144 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
144 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
145 """return instance of the right subrepo class for subrepo in path"""
145 """return instance of the right subrepo class for subrepo in path"""
146 # subrepo inherently violates our import layering rules
146 # subrepo inherently violates our import layering rules
147 # because it wants to make repo objects from deep inside the stack
147 # because it wants to make repo objects from deep inside the stack
148 # so we manually delay the circular imports to not break
148 # so we manually delay the circular imports to not break
149 # scripts that don't use our demand-loading
149 # scripts that don't use our demand-loading
150 global hg
150 global hg
151 from . import hg as h
151 from . import hg as h
152 hg = h
152 hg = h
153
153
154 repo = ctx.repo()
154 repo = ctx.repo()
155 _auditsubrepopath(repo, path)
155 _auditsubrepopath(repo, path)
156 state = ctx.substate[path]
156 state = ctx.substate[path]
157 _checktype(repo.ui, state[2])
157 _checktype(repo.ui, state[2])
158 if allowwdir:
158 if allowwdir:
159 state = (state[0], ctx.subrev(path), state[2])
159 state = (state[0], ctx.subrev(path), state[2])
160 return types[state[2]](ctx, path, state[:2], allowcreate)
160 return types[state[2]](ctx, path, state[:2], allowcreate)
161
161
162 def nullsubrepo(ctx, path, pctx):
162 def nullsubrepo(ctx, path, pctx):
163 """return an empty subrepo in pctx for the extant subrepo in ctx"""
163 """return an empty subrepo in pctx for the extant subrepo in ctx"""
164 # subrepo inherently violates our import layering rules
164 # subrepo inherently violates our import layering rules
165 # because it wants to make repo objects from deep inside the stack
165 # because it wants to make repo objects from deep inside the stack
166 # so we manually delay the circular imports to not break
166 # so we manually delay the circular imports to not break
167 # scripts that don't use our demand-loading
167 # scripts that don't use our demand-loading
168 global hg
168 global hg
169 from . import hg as h
169 from . import hg as h
170 hg = h
170 hg = h
171
171
172 repo = ctx.repo()
172 repo = ctx.repo()
173 _auditsubrepopath(repo, path)
173 _auditsubrepopath(repo, path)
174 state = ctx.substate[path]
174 state = ctx.substate[path]
175 _checktype(repo.ui, state[2])
175 _checktype(repo.ui, state[2])
176 subrev = ''
176 subrev = ''
177 if state[2] == 'hg':
177 if state[2] == 'hg':
178 subrev = "0" * 40
178 subrev = "0" * 40
179 return types[state[2]](pctx, path, (state[0], subrev), True)
179 return types[state[2]](pctx, path, (state[0], subrev), True)
180
180
181 # subrepo classes need to implement the following abstract class:
181 # subrepo classes need to implement the following abstract class:
182
182
183 class abstractsubrepo(object):
183 class abstractsubrepo(object):
184
184
185 def __init__(self, ctx, path):
185 def __init__(self, ctx, path):
186 """Initialize abstractsubrepo part
186 """Initialize abstractsubrepo part
187
187
188 ``ctx`` is the context referring this subrepository in the
188 ``ctx`` is the context referring this subrepository in the
189 parent repository.
189 parent repository.
190
190
191 ``path`` is the path to this subrepository as seen from
191 ``path`` is the path to this subrepository as seen from
192 innermost repository.
192 innermost repository.
193 """
193 """
194 self.ui = ctx.repo().ui
194 self.ui = ctx.repo().ui
195 self._ctx = ctx
195 self._ctx = ctx
196 self._path = path
196 self._path = path
197
197
198 def addwebdirpath(self, serverpath, webconf):
198 def addwebdirpath(self, serverpath, webconf):
199 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
199 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
200
200
201 ``serverpath`` is the path component of the URL for this repo.
201 ``serverpath`` is the path component of the URL for this repo.
202
202
203 ``webconf`` is the dictionary of hgwebdir entries.
203 ``webconf`` is the dictionary of hgwebdir entries.
204 """
204 """
205 pass
205 pass
206
206
207 def storeclean(self, path):
207 def storeclean(self, path):
208 """
208 """
209 returns true if the repository has not changed since it was last
209 returns true if the repository has not changed since it was last
210 cloned from or pushed to a given repository.
210 cloned from or pushed to a given repository.
211 """
211 """
212 return False
212 return False
213
213
214 def dirty(self, ignoreupdate=False, missing=False):
214 def dirty(self, ignoreupdate=False, missing=False):
215 """returns true if the dirstate of the subrepo is dirty or does not
215 """returns true if the dirstate of the subrepo is dirty or does not
216 match current stored state. If ignoreupdate is true, only check
216 match current stored state. If ignoreupdate is true, only check
217 whether the subrepo has uncommitted changes in its dirstate. If missing
217 whether the subrepo has uncommitted changes in its dirstate. If missing
218 is true, check for deleted files.
218 is true, check for deleted files.
219 """
219 """
220 raise NotImplementedError
220 raise NotImplementedError
221
221
222 def dirtyreason(self, ignoreupdate=False, missing=False):
222 def dirtyreason(self, ignoreupdate=False, missing=False):
223 """return reason string if it is ``dirty()``
223 """return reason string if it is ``dirty()``
224
224
225 Returned string should have enough information for the message
225 Returned string should have enough information for the message
226 of exception.
226 of exception.
227
227
228 This returns None, otherwise.
228 This returns None, otherwise.
229 """
229 """
230 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
230 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
231 return _('uncommitted changes in subrepository "%s"'
231 return _('uncommitted changes in subrepository "%s"'
232 ) % subrelpath(self)
232 ) % subrelpath(self)
233
233
234 def bailifchanged(self, ignoreupdate=False, hint=None):
234 def bailifchanged(self, ignoreupdate=False, hint=None):
235 """raise Abort if subrepository is ``dirty()``
235 """raise Abort if subrepository is ``dirty()``
236 """
236 """
237 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
237 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
238 missing=True)
238 missing=True)
239 if dirtyreason:
239 if dirtyreason:
240 raise error.Abort(dirtyreason, hint=hint)
240 raise error.Abort(dirtyreason, hint=hint)
241
241
242 def basestate(self):
242 def basestate(self):
243 """current working directory base state, disregarding .hgsubstate
243 """current working directory base state, disregarding .hgsubstate
244 state and working directory modifications"""
244 state and working directory modifications"""
245 raise NotImplementedError
245 raise NotImplementedError
246
246
247 def checknested(self, path):
247 def checknested(self, path):
248 """check if path is a subrepository within this repository"""
248 """check if path is a subrepository within this repository"""
249 return False
249 return False
250
250
251 def commit(self, text, user, date):
251 def commit(self, text, user, date):
252 """commit the current changes to the subrepo with the given
252 """commit the current changes to the subrepo with the given
253 log message. Use given user and date if possible. Return the
253 log message. Use given user and date if possible. Return the
254 new state of the subrepo.
254 new state of the subrepo.
255 """
255 """
256 raise NotImplementedError
256 raise NotImplementedError
257
257
258 def phase(self, state):
258 def phase(self, state):
259 """returns phase of specified state in the subrepository.
259 """returns phase of specified state in the subrepository.
260 """
260 """
261 return phases.public
261 return phases.public
262
262
263 def remove(self):
263 def remove(self):
264 """remove the subrepo
264 """remove the subrepo
265
265
266 (should verify the dirstate is not dirty first)
266 (should verify the dirstate is not dirty first)
267 """
267 """
268 raise NotImplementedError
268 raise NotImplementedError
269
269
270 def get(self, state, overwrite=False):
270 def get(self, state, overwrite=False):
271 """run whatever commands are needed to put the subrepo into
271 """run whatever commands are needed to put the subrepo into
272 this state
272 this state
273 """
273 """
274 raise NotImplementedError
274 raise NotImplementedError
275
275
276 def merge(self, state):
276 def merge(self, state):
277 """merge currently-saved state with the new state."""
277 """merge currently-saved state with the new state."""
278 raise NotImplementedError
278 raise NotImplementedError
279
279
280 def push(self, opts):
280 def push(self, opts):
281 """perform whatever action is analogous to 'hg push'
281 """perform whatever action is analogous to 'hg push'
282
282
283 This may be a no-op on some systems.
283 This may be a no-op on some systems.
284 """
284 """
285 raise NotImplementedError
285 raise NotImplementedError
286
286
287 def add(self, ui, match, prefix, explicitonly, **opts):
287 def add(self, ui, match, prefix, explicitonly, **opts):
288 return []
288 return []
289
289
290 def addremove(self, matcher, prefix, opts, dry_run, similarity):
290 def addremove(self, matcher, prefix, opts):
291 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
291 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
292 return 1
292 return 1
293
293
294 def cat(self, match, fm, fntemplate, prefix, **opts):
294 def cat(self, match, fm, fntemplate, prefix, **opts):
295 return 1
295 return 1
296
296
297 def status(self, rev2, **opts):
297 def status(self, rev2, **opts):
298 return scmutil.status([], [], [], [], [], [], [])
298 return scmutil.status([], [], [], [], [], [], [])
299
299
300 def diff(self, ui, diffopts, node2, match, prefix, **opts):
300 def diff(self, ui, diffopts, node2, match, prefix, **opts):
301 pass
301 pass
302
302
303 def outgoing(self, ui, dest, opts):
303 def outgoing(self, ui, dest, opts):
304 return 1
304 return 1
305
305
306 def incoming(self, ui, source, opts):
306 def incoming(self, ui, source, opts):
307 return 1
307 return 1
308
308
309 def files(self):
309 def files(self):
310 """return filename iterator"""
310 """return filename iterator"""
311 raise NotImplementedError
311 raise NotImplementedError
312
312
313 def filedata(self, name, decode):
313 def filedata(self, name, decode):
314 """return file data, optionally passed through repo decoders"""
314 """return file data, optionally passed through repo decoders"""
315 raise NotImplementedError
315 raise NotImplementedError
316
316
317 def fileflags(self, name):
317 def fileflags(self, name):
318 """return file flags"""
318 """return file flags"""
319 return ''
319 return ''
320
320
321 def getfileset(self, expr):
321 def getfileset(self, expr):
322 """Resolve the fileset expression for this repo"""
322 """Resolve the fileset expression for this repo"""
323 return set()
323 return set()
324
324
325 def printfiles(self, ui, m, fm, fmt, subrepos):
325 def printfiles(self, ui, m, fm, fmt, subrepos):
326 """handle the files command for this subrepo"""
326 """handle the files command for this subrepo"""
327 return 1
327 return 1
328
328
329 def archive(self, archiver, prefix, match=None, decode=True):
329 def archive(self, archiver, prefix, match=None, decode=True):
330 if match is not None:
330 if match is not None:
331 files = [f for f in self.files() if match(f)]
331 files = [f for f in self.files() if match(f)]
332 else:
332 else:
333 files = self.files()
333 files = self.files()
334 total = len(files)
334 total = len(files)
335 relpath = subrelpath(self)
335 relpath = subrelpath(self)
336 self.ui.progress(_('archiving (%s)') % relpath, 0,
336 self.ui.progress(_('archiving (%s)') % relpath, 0,
337 unit=_('files'), total=total)
337 unit=_('files'), total=total)
338 for i, name in enumerate(files):
338 for i, name in enumerate(files):
339 flags = self.fileflags(name)
339 flags = self.fileflags(name)
340 mode = 'x' in flags and 0o755 or 0o644
340 mode = 'x' in flags and 0o755 or 0o644
341 symlink = 'l' in flags
341 symlink = 'l' in flags
342 archiver.addfile(prefix + self._path + '/' + name,
342 archiver.addfile(prefix + self._path + '/' + name,
343 mode, symlink, self.filedata(name, decode))
343 mode, symlink, self.filedata(name, decode))
344 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
344 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
345 unit=_('files'), total=total)
345 unit=_('files'), total=total)
346 self.ui.progress(_('archiving (%s)') % relpath, None)
346 self.ui.progress(_('archiving (%s)') % relpath, None)
347 return total
347 return total
348
348
349 def walk(self, match):
349 def walk(self, match):
350 '''
350 '''
351 walk recursively through the directory tree, finding all files
351 walk recursively through the directory tree, finding all files
352 matched by the match function
352 matched by the match function
353 '''
353 '''
354
354
355 def forget(self, match, prefix, dryrun):
355 def forget(self, match, prefix, dryrun):
356 return ([], [])
356 return ([], [])
357
357
358 def removefiles(self, matcher, prefix, after, force, subrepos,
358 def removefiles(self, matcher, prefix, after, force, subrepos,
359 dryrun, warnings):
359 dryrun, warnings):
360 """remove the matched files from the subrepository and the filesystem,
360 """remove the matched files from the subrepository and the filesystem,
361 possibly by force and/or after the file has been removed from the
361 possibly by force and/or after the file has been removed from the
362 filesystem. Return 0 on success, 1 on any warning.
362 filesystem. Return 0 on success, 1 on any warning.
363 """
363 """
364 warnings.append(_("warning: removefiles not implemented (%s)")
364 warnings.append(_("warning: removefiles not implemented (%s)")
365 % self._path)
365 % self._path)
366 return 1
366 return 1
367
367
368 def revert(self, substate, *pats, **opts):
368 def revert(self, substate, *pats, **opts):
369 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
369 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
370 % (substate[0], substate[2]))
370 % (substate[0], substate[2]))
371 return []
371 return []
372
372
373 def shortid(self, revid):
373 def shortid(self, revid):
374 return revid
374 return revid
375
375
376 def unshare(self):
376 def unshare(self):
377 '''
377 '''
378 convert this repository from shared to normal storage.
378 convert this repository from shared to normal storage.
379 '''
379 '''
380
380
381 def verify(self):
381 def verify(self):
382 '''verify the integrity of the repository. Return 0 on success or
382 '''verify the integrity of the repository. Return 0 on success or
383 warning, 1 on any error.
383 warning, 1 on any error.
384 '''
384 '''
385 return 0
385 return 0
386
386
387 @propertycache
387 @propertycache
388 def wvfs(self):
388 def wvfs(self):
389 """return vfs to access the working directory of this subrepository
389 """return vfs to access the working directory of this subrepository
390 """
390 """
391 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
391 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
392
392
393 @propertycache
393 @propertycache
394 def _relpath(self):
394 def _relpath(self):
395 """return path to this subrepository as seen from outermost repository
395 """return path to this subrepository as seen from outermost repository
396 """
396 """
397 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
397 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
398
398
399 class hgsubrepo(abstractsubrepo):
399 class hgsubrepo(abstractsubrepo):
400 def __init__(self, ctx, path, state, allowcreate):
400 def __init__(self, ctx, path, state, allowcreate):
401 super(hgsubrepo, self).__init__(ctx, path)
401 super(hgsubrepo, self).__init__(ctx, path)
402 self._state = state
402 self._state = state
403 r = ctx.repo()
403 r = ctx.repo()
404 root = r.wjoin(path)
404 root = r.wjoin(path)
405 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
405 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
406 self._repo = hg.repository(r.baseui, root, create=create)
406 self._repo = hg.repository(r.baseui, root, create=create)
407
407
408 # Propagate the parent's --hidden option
408 # Propagate the parent's --hidden option
409 if r is r.unfiltered():
409 if r is r.unfiltered():
410 self._repo = self._repo.unfiltered()
410 self._repo = self._repo.unfiltered()
411
411
412 self.ui = self._repo.ui
412 self.ui = self._repo.ui
413 for s, k in [('ui', 'commitsubrepos')]:
413 for s, k in [('ui', 'commitsubrepos')]:
414 v = r.ui.config(s, k)
414 v = r.ui.config(s, k)
415 if v:
415 if v:
416 self.ui.setconfig(s, k, v, 'subrepo')
416 self.ui.setconfig(s, k, v, 'subrepo')
417 # internal config: ui._usedassubrepo
417 # internal config: ui._usedassubrepo
418 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
418 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
419 self._initrepo(r, state[0], create)
419 self._initrepo(r, state[0], create)
420
420
421 @annotatesubrepoerror
421 @annotatesubrepoerror
422 def addwebdirpath(self, serverpath, webconf):
422 def addwebdirpath(self, serverpath, webconf):
423 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
423 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
424
424
425 def storeclean(self, path):
425 def storeclean(self, path):
426 with self._repo.lock():
426 with self._repo.lock():
427 return self._storeclean(path)
427 return self._storeclean(path)
428
428
429 def _storeclean(self, path):
429 def _storeclean(self, path):
430 clean = True
430 clean = True
431 itercache = self._calcstorehash(path)
431 itercache = self._calcstorehash(path)
432 for filehash in self._readstorehashcache(path):
432 for filehash in self._readstorehashcache(path):
433 if filehash != next(itercache, None):
433 if filehash != next(itercache, None):
434 clean = False
434 clean = False
435 break
435 break
436 if clean:
436 if clean:
437 # if not empty:
437 # if not empty:
438 # the cached and current pull states have a different size
438 # the cached and current pull states have a different size
439 clean = next(itercache, None) is None
439 clean = next(itercache, None) is None
440 return clean
440 return clean
441
441
442 def _calcstorehash(self, remotepath):
442 def _calcstorehash(self, remotepath):
443 '''calculate a unique "store hash"
443 '''calculate a unique "store hash"
444
444
445 This method is used to to detect when there are changes that may
445 This method is used to to detect when there are changes that may
446 require a push to a given remote path.'''
446 require a push to a given remote path.'''
447 # sort the files that will be hashed in increasing (likely) file size
447 # sort the files that will be hashed in increasing (likely) file size
448 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
448 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
449 yield '# %s\n' % _expandedabspath(remotepath)
449 yield '# %s\n' % _expandedabspath(remotepath)
450 vfs = self._repo.vfs
450 vfs = self._repo.vfs
451 for relname in filelist:
451 for relname in filelist:
452 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
452 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
453 yield '%s = %s\n' % (relname, filehash)
453 yield '%s = %s\n' % (relname, filehash)
454
454
455 @propertycache
455 @propertycache
456 def _cachestorehashvfs(self):
456 def _cachestorehashvfs(self):
457 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
457 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
458
458
459 def _readstorehashcache(self, remotepath):
459 def _readstorehashcache(self, remotepath):
460 '''read the store hash cache for a given remote repository'''
460 '''read the store hash cache for a given remote repository'''
461 cachefile = _getstorehashcachename(remotepath)
461 cachefile = _getstorehashcachename(remotepath)
462 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
462 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
463
463
464 def _cachestorehash(self, remotepath):
464 def _cachestorehash(self, remotepath):
465 '''cache the current store hash
465 '''cache the current store hash
466
466
467 Each remote repo requires its own store hash cache, because a subrepo
467 Each remote repo requires its own store hash cache, because a subrepo
468 store may be "clean" versus a given remote repo, but not versus another
468 store may be "clean" versus a given remote repo, but not versus another
469 '''
469 '''
470 cachefile = _getstorehashcachename(remotepath)
470 cachefile = _getstorehashcachename(remotepath)
471 with self._repo.lock():
471 with self._repo.lock():
472 storehash = list(self._calcstorehash(remotepath))
472 storehash = list(self._calcstorehash(remotepath))
473 vfs = self._cachestorehashvfs
473 vfs = self._cachestorehashvfs
474 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
474 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
475
475
476 def _getctx(self):
476 def _getctx(self):
477 '''fetch the context for this subrepo revision, possibly a workingctx
477 '''fetch the context for this subrepo revision, possibly a workingctx
478 '''
478 '''
479 if self._ctx.rev() is None:
479 if self._ctx.rev() is None:
480 return self._repo[None] # workingctx if parent is workingctx
480 return self._repo[None] # workingctx if parent is workingctx
481 else:
481 else:
482 rev = self._state[1]
482 rev = self._state[1]
483 return self._repo[rev]
483 return self._repo[rev]
484
484
485 @annotatesubrepoerror
485 @annotatesubrepoerror
486 def _initrepo(self, parentrepo, source, create):
486 def _initrepo(self, parentrepo, source, create):
487 self._repo._subparent = parentrepo
487 self._repo._subparent = parentrepo
488 self._repo._subsource = source
488 self._repo._subsource = source
489
489
490 if create:
490 if create:
491 lines = ['[paths]\n']
491 lines = ['[paths]\n']
492
492
493 def addpathconfig(key, value):
493 def addpathconfig(key, value):
494 if value:
494 if value:
495 lines.append('%s = %s\n' % (key, value))
495 lines.append('%s = %s\n' % (key, value))
496 self.ui.setconfig('paths', key, value, 'subrepo')
496 self.ui.setconfig('paths', key, value, 'subrepo')
497
497
498 defpath = _abssource(self._repo, abort=False)
498 defpath = _abssource(self._repo, abort=False)
499 defpushpath = _abssource(self._repo, True, abort=False)
499 defpushpath = _abssource(self._repo, True, abort=False)
500 addpathconfig('default', defpath)
500 addpathconfig('default', defpath)
501 if defpath != defpushpath:
501 if defpath != defpushpath:
502 addpathconfig('default-push', defpushpath)
502 addpathconfig('default-push', defpushpath)
503
503
504 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
504 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
505
505
506 @annotatesubrepoerror
506 @annotatesubrepoerror
507 def add(self, ui, match, prefix, explicitonly, **opts):
507 def add(self, ui, match, prefix, explicitonly, **opts):
508 return cmdutil.add(ui, self._repo, match,
508 return cmdutil.add(ui, self._repo, match,
509 self.wvfs.reljoin(prefix, self._path),
509 self.wvfs.reljoin(prefix, self._path),
510 explicitonly, **opts)
510 explicitonly, **opts)
511
511
512 @annotatesubrepoerror
512 @annotatesubrepoerror
513 def addremove(self, m, prefix, opts, dry_run, similarity):
513 def addremove(self, m, prefix, opts):
514 # In the same way as sub directories are processed, once in a subrepo,
514 # In the same way as sub directories are processed, once in a subrepo,
515 # always entry any of its subrepos. Don't corrupt the options that will
515 # always entry any of its subrepos. Don't corrupt the options that will
516 # be used to process sibling subrepos however.
516 # be used to process sibling subrepos however.
517 opts = copy.copy(opts)
517 opts = copy.copy(opts)
518 opts['subrepos'] = True
518 opts['subrepos'] = True
519 return scmutil.addremove(self._repo, m,
519 return scmutil.addremove(self._repo, m,
520 self.wvfs.reljoin(prefix, self._path), opts,
520 self.wvfs.reljoin(prefix, self._path), opts)
521 dry_run, similarity)
522
521
523 @annotatesubrepoerror
522 @annotatesubrepoerror
524 def cat(self, match, fm, fntemplate, prefix, **opts):
523 def cat(self, match, fm, fntemplate, prefix, **opts):
525 rev = self._state[1]
524 rev = self._state[1]
526 ctx = self._repo[rev]
525 ctx = self._repo[rev]
527 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
526 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
528 prefix, **opts)
527 prefix, **opts)
529
528
530 @annotatesubrepoerror
529 @annotatesubrepoerror
531 def status(self, rev2, **opts):
530 def status(self, rev2, **opts):
532 try:
531 try:
533 rev1 = self._state[1]
532 rev1 = self._state[1]
534 ctx1 = self._repo[rev1]
533 ctx1 = self._repo[rev1]
535 ctx2 = self._repo[rev2]
534 ctx2 = self._repo[rev2]
536 return self._repo.status(ctx1, ctx2, **opts)
535 return self._repo.status(ctx1, ctx2, **opts)
537 except error.RepoLookupError as inst:
536 except error.RepoLookupError as inst:
538 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
537 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
539 % (inst, subrelpath(self)))
538 % (inst, subrelpath(self)))
540 return scmutil.status([], [], [], [], [], [], [])
539 return scmutil.status([], [], [], [], [], [], [])
541
540
542 @annotatesubrepoerror
541 @annotatesubrepoerror
543 def diff(self, ui, diffopts, node2, match, prefix, **opts):
542 def diff(self, ui, diffopts, node2, match, prefix, **opts):
544 try:
543 try:
545 node1 = node.bin(self._state[1])
544 node1 = node.bin(self._state[1])
546 # We currently expect node2 to come from substate and be
545 # We currently expect node2 to come from substate and be
547 # in hex format
546 # in hex format
548 if node2 is not None:
547 if node2 is not None:
549 node2 = node.bin(node2)
548 node2 = node.bin(node2)
550 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
549 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
551 node1, node2, match,
550 node1, node2, match,
552 prefix=posixpath.join(prefix, self._path),
551 prefix=posixpath.join(prefix, self._path),
553 listsubrepos=True, **opts)
552 listsubrepos=True, **opts)
554 except error.RepoLookupError as inst:
553 except error.RepoLookupError as inst:
555 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
554 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
556 % (inst, subrelpath(self)))
555 % (inst, subrelpath(self)))
557
556
558 @annotatesubrepoerror
557 @annotatesubrepoerror
559 def archive(self, archiver, prefix, match=None, decode=True):
558 def archive(self, archiver, prefix, match=None, decode=True):
560 self._get(self._state + ('hg',))
559 self._get(self._state + ('hg',))
561 files = self.files()
560 files = self.files()
562 if match:
561 if match:
563 files = [f for f in files if match(f)]
562 files = [f for f in files if match(f)]
564 rev = self._state[1]
563 rev = self._state[1]
565 ctx = self._repo[rev]
564 ctx = self._repo[rev]
566 scmutil.fileprefetchhooks(self._repo, ctx, files)
565 scmutil.fileprefetchhooks(self._repo, ctx, files)
567 total = abstractsubrepo.archive(self, archiver, prefix, match)
566 total = abstractsubrepo.archive(self, archiver, prefix, match)
568 for subpath in ctx.substate:
567 for subpath in ctx.substate:
569 s = subrepo(ctx, subpath, True)
568 s = subrepo(ctx, subpath, True)
570 submatch = matchmod.subdirmatcher(subpath, match)
569 submatch = matchmod.subdirmatcher(subpath, match)
571 total += s.archive(archiver, prefix + self._path + '/', submatch,
570 total += s.archive(archiver, prefix + self._path + '/', submatch,
572 decode)
571 decode)
573 return total
572 return total
574
573
575 @annotatesubrepoerror
574 @annotatesubrepoerror
576 def dirty(self, ignoreupdate=False, missing=False):
575 def dirty(self, ignoreupdate=False, missing=False):
577 r = self._state[1]
576 r = self._state[1]
578 if r == '' and not ignoreupdate: # no state recorded
577 if r == '' and not ignoreupdate: # no state recorded
579 return True
578 return True
580 w = self._repo[None]
579 w = self._repo[None]
581 if r != w.p1().hex() and not ignoreupdate:
580 if r != w.p1().hex() and not ignoreupdate:
582 # different version checked out
581 # different version checked out
583 return True
582 return True
584 return w.dirty(missing=missing) # working directory changed
583 return w.dirty(missing=missing) # working directory changed
585
584
586 def basestate(self):
585 def basestate(self):
587 return self._repo['.'].hex()
586 return self._repo['.'].hex()
588
587
589 def checknested(self, path):
588 def checknested(self, path):
590 return self._repo._checknested(self._repo.wjoin(path))
589 return self._repo._checknested(self._repo.wjoin(path))
591
590
592 @annotatesubrepoerror
591 @annotatesubrepoerror
593 def commit(self, text, user, date):
592 def commit(self, text, user, date):
594 # don't bother committing in the subrepo if it's only been
593 # don't bother committing in the subrepo if it's only been
595 # updated
594 # updated
596 if not self.dirty(True):
595 if not self.dirty(True):
597 return self._repo['.'].hex()
596 return self._repo['.'].hex()
598 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
597 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
599 n = self._repo.commit(text, user, date)
598 n = self._repo.commit(text, user, date)
600 if not n:
599 if not n:
601 return self._repo['.'].hex() # different version checked out
600 return self._repo['.'].hex() # different version checked out
602 return node.hex(n)
601 return node.hex(n)
603
602
604 @annotatesubrepoerror
603 @annotatesubrepoerror
605 def phase(self, state):
604 def phase(self, state):
606 return self._repo[state or '.'].phase()
605 return self._repo[state or '.'].phase()
607
606
608 @annotatesubrepoerror
607 @annotatesubrepoerror
609 def remove(self):
608 def remove(self):
610 # we can't fully delete the repository as it may contain
609 # we can't fully delete the repository as it may contain
611 # local-only history
610 # local-only history
612 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
611 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
613 hg.clean(self._repo, node.nullid, False)
612 hg.clean(self._repo, node.nullid, False)
614
613
615 def _get(self, state):
614 def _get(self, state):
616 source, revision, kind = state
615 source, revision, kind = state
617 parentrepo = self._repo._subparent
616 parentrepo = self._repo._subparent
618
617
619 if revision in self._repo.unfiltered():
618 if revision in self._repo.unfiltered():
620 # Allow shared subrepos tracked at null to setup the sharedpath
619 # Allow shared subrepos tracked at null to setup the sharedpath
621 if len(self._repo) != 0 or not parentrepo.shared():
620 if len(self._repo) != 0 or not parentrepo.shared():
622 return True
621 return True
623 self._repo._subsource = source
622 self._repo._subsource = source
624 srcurl = _abssource(self._repo)
623 srcurl = _abssource(self._repo)
625 other = hg.peer(self._repo, {}, srcurl)
624 other = hg.peer(self._repo, {}, srcurl)
626 if len(self._repo) == 0:
625 if len(self._repo) == 0:
627 # use self._repo.vfs instead of self.wvfs to remove .hg only
626 # use self._repo.vfs instead of self.wvfs to remove .hg only
628 self._repo.vfs.rmtree()
627 self._repo.vfs.rmtree()
629
628
630 # A remote subrepo could be shared if there is a local copy
629 # A remote subrepo could be shared if there is a local copy
631 # relative to the parent's share source. But clone pooling doesn't
630 # relative to the parent's share source. But clone pooling doesn't
632 # assemble the repos in a tree, so that can't be consistently done.
631 # assemble the repos in a tree, so that can't be consistently done.
633 # A simpler option is for the user to configure clone pooling, and
632 # A simpler option is for the user to configure clone pooling, and
634 # work with that.
633 # work with that.
635 if parentrepo.shared() and hg.islocal(srcurl):
634 if parentrepo.shared() and hg.islocal(srcurl):
636 self.ui.status(_('sharing subrepo %s from %s\n')
635 self.ui.status(_('sharing subrepo %s from %s\n')
637 % (subrelpath(self), srcurl))
636 % (subrelpath(self), srcurl))
638 shared = hg.share(self._repo._subparent.baseui,
637 shared = hg.share(self._repo._subparent.baseui,
639 other, self._repo.root,
638 other, self._repo.root,
640 update=False, bookmarks=False)
639 update=False, bookmarks=False)
641 self._repo = shared.local()
640 self._repo = shared.local()
642 else:
641 else:
643 # TODO: find a common place for this and this code in the
642 # TODO: find a common place for this and this code in the
644 # share.py wrap of the clone command.
643 # share.py wrap of the clone command.
645 if parentrepo.shared():
644 if parentrepo.shared():
646 pool = self.ui.config('share', 'pool')
645 pool = self.ui.config('share', 'pool')
647 if pool:
646 if pool:
648 pool = util.expandpath(pool)
647 pool = util.expandpath(pool)
649
648
650 shareopts = {
649 shareopts = {
651 'pool': pool,
650 'pool': pool,
652 'mode': self.ui.config('share', 'poolnaming'),
651 'mode': self.ui.config('share', 'poolnaming'),
653 }
652 }
654 else:
653 else:
655 shareopts = {}
654 shareopts = {}
656
655
657 self.ui.status(_('cloning subrepo %s from %s\n')
656 self.ui.status(_('cloning subrepo %s from %s\n')
658 % (subrelpath(self), srcurl))
657 % (subrelpath(self), srcurl))
659 other, cloned = hg.clone(self._repo._subparent.baseui, {},
658 other, cloned = hg.clone(self._repo._subparent.baseui, {},
660 other, self._repo.root,
659 other, self._repo.root,
661 update=False, shareopts=shareopts)
660 update=False, shareopts=shareopts)
662 self._repo = cloned.local()
661 self._repo = cloned.local()
663 self._initrepo(parentrepo, source, create=True)
662 self._initrepo(parentrepo, source, create=True)
664 self._cachestorehash(srcurl)
663 self._cachestorehash(srcurl)
665 else:
664 else:
666 self.ui.status(_('pulling subrepo %s from %s\n')
665 self.ui.status(_('pulling subrepo %s from %s\n')
667 % (subrelpath(self), srcurl))
666 % (subrelpath(self), srcurl))
668 cleansub = self.storeclean(srcurl)
667 cleansub = self.storeclean(srcurl)
669 exchange.pull(self._repo, other)
668 exchange.pull(self._repo, other)
670 if cleansub:
669 if cleansub:
671 # keep the repo clean after pull
670 # keep the repo clean after pull
672 self._cachestorehash(srcurl)
671 self._cachestorehash(srcurl)
673 return False
672 return False
674
673
675 @annotatesubrepoerror
674 @annotatesubrepoerror
676 def get(self, state, overwrite=False):
675 def get(self, state, overwrite=False):
677 inrepo = self._get(state)
676 inrepo = self._get(state)
678 source, revision, kind = state
677 source, revision, kind = state
679 repo = self._repo
678 repo = self._repo
680 repo.ui.debug("getting subrepo %s\n" % self._path)
679 repo.ui.debug("getting subrepo %s\n" % self._path)
681 if inrepo:
680 if inrepo:
682 urepo = repo.unfiltered()
681 urepo = repo.unfiltered()
683 ctx = urepo[revision]
682 ctx = urepo[revision]
684 if ctx.hidden():
683 if ctx.hidden():
685 urepo.ui.warn(
684 urepo.ui.warn(
686 _('revision %s in subrepository "%s" is hidden\n') \
685 _('revision %s in subrepository "%s" is hidden\n') \
687 % (revision[0:12], self._path))
686 % (revision[0:12], self._path))
688 repo = urepo
687 repo = urepo
689 hg.updaterepo(repo, revision, overwrite)
688 hg.updaterepo(repo, revision, overwrite)
690
689
691 @annotatesubrepoerror
690 @annotatesubrepoerror
692 def merge(self, state):
691 def merge(self, state):
693 self._get(state)
692 self._get(state)
694 cur = self._repo['.']
693 cur = self._repo['.']
695 dst = self._repo[state[1]]
694 dst = self._repo[state[1]]
696 anc = dst.ancestor(cur)
695 anc = dst.ancestor(cur)
697
696
698 def mergefunc():
697 def mergefunc():
699 if anc == cur and dst.branch() == cur.branch():
698 if anc == cur and dst.branch() == cur.branch():
700 self.ui.debug('updating subrepository "%s"\n'
699 self.ui.debug('updating subrepository "%s"\n'
701 % subrelpath(self))
700 % subrelpath(self))
702 hg.update(self._repo, state[1])
701 hg.update(self._repo, state[1])
703 elif anc == dst:
702 elif anc == dst:
704 self.ui.debug('skipping subrepository "%s"\n'
703 self.ui.debug('skipping subrepository "%s"\n'
705 % subrelpath(self))
704 % subrelpath(self))
706 else:
705 else:
707 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
706 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
708 hg.merge(self._repo, state[1], remind=False)
707 hg.merge(self._repo, state[1], remind=False)
709
708
710 wctx = self._repo[None]
709 wctx = self._repo[None]
711 if self.dirty():
710 if self.dirty():
712 if anc != dst:
711 if anc != dst:
713 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
712 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
714 mergefunc()
713 mergefunc()
715 else:
714 else:
716 mergefunc()
715 mergefunc()
717 else:
716 else:
718 mergefunc()
717 mergefunc()
719
718
720 @annotatesubrepoerror
719 @annotatesubrepoerror
721 def push(self, opts):
720 def push(self, opts):
722 force = opts.get('force')
721 force = opts.get('force')
723 newbranch = opts.get('new_branch')
722 newbranch = opts.get('new_branch')
724 ssh = opts.get('ssh')
723 ssh = opts.get('ssh')
725
724
726 # push subrepos depth-first for coherent ordering
725 # push subrepos depth-first for coherent ordering
727 c = self._repo['.']
726 c = self._repo['.']
728 subs = c.substate # only repos that are committed
727 subs = c.substate # only repos that are committed
729 for s in sorted(subs):
728 for s in sorted(subs):
730 if c.sub(s).push(opts) == 0:
729 if c.sub(s).push(opts) == 0:
731 return False
730 return False
732
731
733 dsturl = _abssource(self._repo, True)
732 dsturl = _abssource(self._repo, True)
734 if not force:
733 if not force:
735 if self.storeclean(dsturl):
734 if self.storeclean(dsturl):
736 self.ui.status(
735 self.ui.status(
737 _('no changes made to subrepo %s since last push to %s\n')
736 _('no changes made to subrepo %s since last push to %s\n')
738 % (subrelpath(self), dsturl))
737 % (subrelpath(self), dsturl))
739 return None
738 return None
740 self.ui.status(_('pushing subrepo %s to %s\n') %
739 self.ui.status(_('pushing subrepo %s to %s\n') %
741 (subrelpath(self), dsturl))
740 (subrelpath(self), dsturl))
742 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
741 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
743 res = exchange.push(self._repo, other, force, newbranch=newbranch)
742 res = exchange.push(self._repo, other, force, newbranch=newbranch)
744
743
745 # the repo is now clean
744 # the repo is now clean
746 self._cachestorehash(dsturl)
745 self._cachestorehash(dsturl)
747 return res.cgresult
746 return res.cgresult
748
747
749 @annotatesubrepoerror
748 @annotatesubrepoerror
750 def outgoing(self, ui, dest, opts):
749 def outgoing(self, ui, dest, opts):
751 if 'rev' in opts or 'branch' in opts:
750 if 'rev' in opts or 'branch' in opts:
752 opts = copy.copy(opts)
751 opts = copy.copy(opts)
753 opts.pop('rev', None)
752 opts.pop('rev', None)
754 opts.pop('branch', None)
753 opts.pop('branch', None)
755 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
754 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
756
755
757 @annotatesubrepoerror
756 @annotatesubrepoerror
758 def incoming(self, ui, source, opts):
757 def incoming(self, ui, source, opts):
759 if 'rev' in opts or 'branch' in opts:
758 if 'rev' in opts or 'branch' in opts:
760 opts = copy.copy(opts)
759 opts = copy.copy(opts)
761 opts.pop('rev', None)
760 opts.pop('rev', None)
762 opts.pop('branch', None)
761 opts.pop('branch', None)
763 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
762 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
764
763
765 @annotatesubrepoerror
764 @annotatesubrepoerror
766 def files(self):
765 def files(self):
767 rev = self._state[1]
766 rev = self._state[1]
768 ctx = self._repo[rev]
767 ctx = self._repo[rev]
769 return ctx.manifest().keys()
768 return ctx.manifest().keys()
770
769
771 def filedata(self, name, decode):
770 def filedata(self, name, decode):
772 rev = self._state[1]
771 rev = self._state[1]
773 data = self._repo[rev][name].data()
772 data = self._repo[rev][name].data()
774 if decode:
773 if decode:
775 data = self._repo.wwritedata(name, data)
774 data = self._repo.wwritedata(name, data)
776 return data
775 return data
777
776
778 def fileflags(self, name):
777 def fileflags(self, name):
779 rev = self._state[1]
778 rev = self._state[1]
780 ctx = self._repo[rev]
779 ctx = self._repo[rev]
781 return ctx.flags(name)
780 return ctx.flags(name)
782
781
783 @annotatesubrepoerror
782 @annotatesubrepoerror
784 def printfiles(self, ui, m, fm, fmt, subrepos):
783 def printfiles(self, ui, m, fm, fmt, subrepos):
785 # If the parent context is a workingctx, use the workingctx here for
784 # If the parent context is a workingctx, use the workingctx here for
786 # consistency.
785 # consistency.
787 if self._ctx.rev() is None:
786 if self._ctx.rev() is None:
788 ctx = self._repo[None]
787 ctx = self._repo[None]
789 else:
788 else:
790 rev = self._state[1]
789 rev = self._state[1]
791 ctx = self._repo[rev]
790 ctx = self._repo[rev]
792 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
791 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
793
792
794 @annotatesubrepoerror
793 @annotatesubrepoerror
795 def getfileset(self, expr):
794 def getfileset(self, expr):
796 if self._ctx.rev() is None:
795 if self._ctx.rev() is None:
797 ctx = self._repo[None]
796 ctx = self._repo[None]
798 else:
797 else:
799 rev = self._state[1]
798 rev = self._state[1]
800 ctx = self._repo[rev]
799 ctx = self._repo[rev]
801
800
802 files = ctx.getfileset(expr)
801 files = ctx.getfileset(expr)
803
802
804 for subpath in ctx.substate:
803 for subpath in ctx.substate:
805 sub = ctx.sub(subpath)
804 sub = ctx.sub(subpath)
806
805
807 try:
806 try:
808 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
807 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
809 except error.LookupError:
808 except error.LookupError:
810 self.ui.status(_("skipping missing subrepository: %s\n")
809 self.ui.status(_("skipping missing subrepository: %s\n")
811 % self.wvfs.reljoin(reporelpath(self), subpath))
810 % self.wvfs.reljoin(reporelpath(self), subpath))
812 return files
811 return files
813
812
814 def walk(self, match):
813 def walk(self, match):
815 ctx = self._repo[None]
814 ctx = self._repo[None]
816 return ctx.walk(match)
815 return ctx.walk(match)
817
816
818 @annotatesubrepoerror
817 @annotatesubrepoerror
819 def forget(self, match, prefix, dryrun):
818 def forget(self, match, prefix, dryrun):
820 return cmdutil.forget(self.ui, self._repo, match,
819 return cmdutil.forget(self.ui, self._repo, match,
821 self.wvfs.reljoin(prefix, self._path),
820 self.wvfs.reljoin(prefix, self._path),
822 True, dryrun=dryrun)
821 True, dryrun=dryrun)
823
822
824 @annotatesubrepoerror
823 @annotatesubrepoerror
825 def removefiles(self, matcher, prefix, after, force, subrepos,
824 def removefiles(self, matcher, prefix, after, force, subrepos,
826 dryrun, warnings):
825 dryrun, warnings):
827 return cmdutil.remove(self.ui, self._repo, matcher,
826 return cmdutil.remove(self.ui, self._repo, matcher,
828 self.wvfs.reljoin(prefix, self._path),
827 self.wvfs.reljoin(prefix, self._path),
829 after, force, subrepos, dryrun)
828 after, force, subrepos, dryrun)
830
829
831 @annotatesubrepoerror
830 @annotatesubrepoerror
832 def revert(self, substate, *pats, **opts):
831 def revert(self, substate, *pats, **opts):
833 # reverting a subrepo is a 2 step process:
832 # reverting a subrepo is a 2 step process:
834 # 1. if the no_backup is not set, revert all modified
833 # 1. if the no_backup is not set, revert all modified
835 # files inside the subrepo
834 # files inside the subrepo
836 # 2. update the subrepo to the revision specified in
835 # 2. update the subrepo to the revision specified in
837 # the corresponding substate dictionary
836 # the corresponding substate dictionary
838 self.ui.status(_('reverting subrepo %s\n') % substate[0])
837 self.ui.status(_('reverting subrepo %s\n') % substate[0])
839 if not opts.get(r'no_backup'):
838 if not opts.get(r'no_backup'):
840 # Revert all files on the subrepo, creating backups
839 # Revert all files on the subrepo, creating backups
841 # Note that this will not recursively revert subrepos
840 # Note that this will not recursively revert subrepos
842 # We could do it if there was a set:subrepos() predicate
841 # We could do it if there was a set:subrepos() predicate
843 opts = opts.copy()
842 opts = opts.copy()
844 opts[r'date'] = None
843 opts[r'date'] = None
845 opts[r'rev'] = substate[1]
844 opts[r'rev'] = substate[1]
846
845
847 self.filerevert(*pats, **opts)
846 self.filerevert(*pats, **opts)
848
847
849 # Update the repo to the revision specified in the given substate
848 # Update the repo to the revision specified in the given substate
850 if not opts.get(r'dry_run'):
849 if not opts.get(r'dry_run'):
851 self.get(substate, overwrite=True)
850 self.get(substate, overwrite=True)
852
851
853 def filerevert(self, *pats, **opts):
852 def filerevert(self, *pats, **opts):
854 ctx = self._repo[opts[r'rev']]
853 ctx = self._repo[opts[r'rev']]
855 parents = self._repo.dirstate.parents()
854 parents = self._repo.dirstate.parents()
856 if opts.get(r'all'):
855 if opts.get(r'all'):
857 pats = ['set:modified()']
856 pats = ['set:modified()']
858 else:
857 else:
859 pats = []
858 pats = []
860 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
859 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
861
860
862 def shortid(self, revid):
861 def shortid(self, revid):
863 return revid[:12]
862 return revid[:12]
864
863
865 @annotatesubrepoerror
864 @annotatesubrepoerror
866 def unshare(self):
865 def unshare(self):
867 # subrepo inherently violates our import layering rules
866 # subrepo inherently violates our import layering rules
868 # because it wants to make repo objects from deep inside the stack
867 # because it wants to make repo objects from deep inside the stack
869 # so we manually delay the circular imports to not break
868 # so we manually delay the circular imports to not break
870 # scripts that don't use our demand-loading
869 # scripts that don't use our demand-loading
871 global hg
870 global hg
872 from . import hg as h
871 from . import hg as h
873 hg = h
872 hg = h
874
873
875 # Nothing prevents a user from sharing in a repo, and then making that a
874 # Nothing prevents a user from sharing in a repo, and then making that a
876 # subrepo. Alternately, the previous unshare attempt may have failed
875 # subrepo. Alternately, the previous unshare attempt may have failed
877 # part way through. So recurse whether or not this layer is shared.
876 # part way through. So recurse whether or not this layer is shared.
878 if self._repo.shared():
877 if self._repo.shared():
879 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
878 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
880
879
881 hg.unshare(self.ui, self._repo)
880 hg.unshare(self.ui, self._repo)
882
881
883 def verify(self):
882 def verify(self):
884 try:
883 try:
885 rev = self._state[1]
884 rev = self._state[1]
886 ctx = self._repo.unfiltered()[rev]
885 ctx = self._repo.unfiltered()[rev]
887 if ctx.hidden():
886 if ctx.hidden():
888 # Since hidden revisions aren't pushed/pulled, it seems worth an
887 # Since hidden revisions aren't pushed/pulled, it seems worth an
889 # explicit warning.
888 # explicit warning.
890 ui = self._repo.ui
889 ui = self._repo.ui
891 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
890 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
892 (self._relpath, node.short(self._ctx.node())))
891 (self._relpath, node.short(self._ctx.node())))
893 return 0
892 return 0
894 except error.RepoLookupError:
893 except error.RepoLookupError:
895 # A missing subrepo revision may be a case of needing to pull it, so
894 # A missing subrepo revision may be a case of needing to pull it, so
896 # don't treat this as an error.
895 # don't treat this as an error.
897 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
896 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
898 (self._relpath, node.short(self._ctx.node())))
897 (self._relpath, node.short(self._ctx.node())))
899 return 0
898 return 0
900
899
901 @propertycache
900 @propertycache
902 def wvfs(self):
901 def wvfs(self):
903 """return own wvfs for efficiency and consistency
902 """return own wvfs for efficiency and consistency
904 """
903 """
905 return self._repo.wvfs
904 return self._repo.wvfs
906
905
907 @propertycache
906 @propertycache
908 def _relpath(self):
907 def _relpath(self):
909 """return path to this subrepository as seen from outermost repository
908 """return path to this subrepository as seen from outermost repository
910 """
909 """
911 # Keep consistent dir separators by avoiding vfs.join(self._path)
910 # Keep consistent dir separators by avoiding vfs.join(self._path)
912 return reporelpath(self._repo)
911 return reporelpath(self._repo)
913
912
914 class svnsubrepo(abstractsubrepo):
913 class svnsubrepo(abstractsubrepo):
915 def __init__(self, ctx, path, state, allowcreate):
914 def __init__(self, ctx, path, state, allowcreate):
916 super(svnsubrepo, self).__init__(ctx, path)
915 super(svnsubrepo, self).__init__(ctx, path)
917 self._state = state
916 self._state = state
918 self._exe = procutil.findexe('svn')
917 self._exe = procutil.findexe('svn')
919 if not self._exe:
918 if not self._exe:
920 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
919 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
921 % self._path)
920 % self._path)
922
921
923 def _svncommand(self, commands, filename='', failok=False):
922 def _svncommand(self, commands, filename='', failok=False):
924 cmd = [self._exe]
923 cmd = [self._exe]
925 extrakw = {}
924 extrakw = {}
926 if not self.ui.interactive():
925 if not self.ui.interactive():
927 # Making stdin be a pipe should prevent svn from behaving
926 # Making stdin be a pipe should prevent svn from behaving
928 # interactively even if we can't pass --non-interactive.
927 # interactively even if we can't pass --non-interactive.
929 extrakw[r'stdin'] = subprocess.PIPE
928 extrakw[r'stdin'] = subprocess.PIPE
930 # Starting in svn 1.5 --non-interactive is a global flag
929 # Starting in svn 1.5 --non-interactive is a global flag
931 # instead of being per-command, but we need to support 1.4 so
930 # instead of being per-command, but we need to support 1.4 so
932 # we have to be intelligent about what commands take
931 # we have to be intelligent about what commands take
933 # --non-interactive.
932 # --non-interactive.
934 if commands[0] in ('update', 'checkout', 'commit'):
933 if commands[0] in ('update', 'checkout', 'commit'):
935 cmd.append('--non-interactive')
934 cmd.append('--non-interactive')
936 cmd.extend(commands)
935 cmd.extend(commands)
937 if filename is not None:
936 if filename is not None:
938 path = self.wvfs.reljoin(self._ctx.repo().origroot,
937 path = self.wvfs.reljoin(self._ctx.repo().origroot,
939 self._path, filename)
938 self._path, filename)
940 cmd.append(path)
939 cmd.append(path)
941 env = dict(encoding.environ)
940 env = dict(encoding.environ)
942 # Avoid localized output, preserve current locale for everything else.
941 # Avoid localized output, preserve current locale for everything else.
943 lc_all = env.get('LC_ALL')
942 lc_all = env.get('LC_ALL')
944 if lc_all:
943 if lc_all:
945 env['LANG'] = lc_all
944 env['LANG'] = lc_all
946 del env['LC_ALL']
945 del env['LC_ALL']
947 env['LC_MESSAGES'] = 'C'
946 env['LC_MESSAGES'] = 'C'
948 p = subprocess.Popen(cmd, bufsize=-1, close_fds=procutil.closefds,
947 p = subprocess.Popen(cmd, bufsize=-1, close_fds=procutil.closefds,
949 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
948 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
950 universal_newlines=True, env=env, **extrakw)
949 universal_newlines=True, env=env, **extrakw)
951 stdout, stderr = p.communicate()
950 stdout, stderr = p.communicate()
952 stderr = stderr.strip()
951 stderr = stderr.strip()
953 if not failok:
952 if not failok:
954 if p.returncode:
953 if p.returncode:
955 raise error.Abort(stderr or 'exited with code %d'
954 raise error.Abort(stderr or 'exited with code %d'
956 % p.returncode)
955 % p.returncode)
957 if stderr:
956 if stderr:
958 self.ui.warn(stderr + '\n')
957 self.ui.warn(stderr + '\n')
959 return stdout, stderr
958 return stdout, stderr
960
959
961 @propertycache
960 @propertycache
962 def _svnversion(self):
961 def _svnversion(self):
963 output, err = self._svncommand(['--version', '--quiet'], filename=None)
962 output, err = self._svncommand(['--version', '--quiet'], filename=None)
964 m = re.search(br'^(\d+)\.(\d+)', output)
963 m = re.search(br'^(\d+)\.(\d+)', output)
965 if not m:
964 if not m:
966 raise error.Abort(_('cannot retrieve svn tool version'))
965 raise error.Abort(_('cannot retrieve svn tool version'))
967 return (int(m.group(1)), int(m.group(2)))
966 return (int(m.group(1)), int(m.group(2)))
968
967
969 def _svnmissing(self):
968 def _svnmissing(self):
970 return not self.wvfs.exists('.svn')
969 return not self.wvfs.exists('.svn')
971
970
972 def _wcrevs(self):
971 def _wcrevs(self):
973 # Get the working directory revision as well as the last
972 # Get the working directory revision as well as the last
974 # commit revision so we can compare the subrepo state with
973 # commit revision so we can compare the subrepo state with
975 # both. We used to store the working directory one.
974 # both. We used to store the working directory one.
976 output, err = self._svncommand(['info', '--xml'])
975 output, err = self._svncommand(['info', '--xml'])
977 doc = xml.dom.minidom.parseString(output)
976 doc = xml.dom.minidom.parseString(output)
978 entries = doc.getElementsByTagName('entry')
977 entries = doc.getElementsByTagName('entry')
979 lastrev, rev = '0', '0'
978 lastrev, rev = '0', '0'
980 if entries:
979 if entries:
981 rev = str(entries[0].getAttribute('revision')) or '0'
980 rev = str(entries[0].getAttribute('revision')) or '0'
982 commits = entries[0].getElementsByTagName('commit')
981 commits = entries[0].getElementsByTagName('commit')
983 if commits:
982 if commits:
984 lastrev = str(commits[0].getAttribute('revision')) or '0'
983 lastrev = str(commits[0].getAttribute('revision')) or '0'
985 return (lastrev, rev)
984 return (lastrev, rev)
986
985
987 def _wcrev(self):
986 def _wcrev(self):
988 return self._wcrevs()[0]
987 return self._wcrevs()[0]
989
988
990 def _wcchanged(self):
989 def _wcchanged(self):
991 """Return (changes, extchanges, missing) where changes is True
990 """Return (changes, extchanges, missing) where changes is True
992 if the working directory was changed, extchanges is
991 if the working directory was changed, extchanges is
993 True if any of these changes concern an external entry and missing
992 True if any of these changes concern an external entry and missing
994 is True if any change is a missing entry.
993 is True if any change is a missing entry.
995 """
994 """
996 output, err = self._svncommand(['status', '--xml'])
995 output, err = self._svncommand(['status', '--xml'])
997 externals, changes, missing = [], [], []
996 externals, changes, missing = [], [], []
998 doc = xml.dom.minidom.parseString(output)
997 doc = xml.dom.minidom.parseString(output)
999 for e in doc.getElementsByTagName('entry'):
998 for e in doc.getElementsByTagName('entry'):
1000 s = e.getElementsByTagName('wc-status')
999 s = e.getElementsByTagName('wc-status')
1001 if not s:
1000 if not s:
1002 continue
1001 continue
1003 item = s[0].getAttribute('item')
1002 item = s[0].getAttribute('item')
1004 props = s[0].getAttribute('props')
1003 props = s[0].getAttribute('props')
1005 path = e.getAttribute('path')
1004 path = e.getAttribute('path')
1006 if item == 'external':
1005 if item == 'external':
1007 externals.append(path)
1006 externals.append(path)
1008 elif item == 'missing':
1007 elif item == 'missing':
1009 missing.append(path)
1008 missing.append(path)
1010 if (item not in ('', 'normal', 'unversioned', 'external')
1009 if (item not in ('', 'normal', 'unversioned', 'external')
1011 or props not in ('', 'none', 'normal')):
1010 or props not in ('', 'none', 'normal')):
1012 changes.append(path)
1011 changes.append(path)
1013 for path in changes:
1012 for path in changes:
1014 for ext in externals:
1013 for ext in externals:
1015 if path == ext or path.startswith(ext + pycompat.ossep):
1014 if path == ext or path.startswith(ext + pycompat.ossep):
1016 return True, True, bool(missing)
1015 return True, True, bool(missing)
1017 return bool(changes), False, bool(missing)
1016 return bool(changes), False, bool(missing)
1018
1017
1019 @annotatesubrepoerror
1018 @annotatesubrepoerror
1020 def dirty(self, ignoreupdate=False, missing=False):
1019 def dirty(self, ignoreupdate=False, missing=False):
1021 if self._svnmissing():
1020 if self._svnmissing():
1022 return self._state[1] != ''
1021 return self._state[1] != ''
1023 wcchanged = self._wcchanged()
1022 wcchanged = self._wcchanged()
1024 changed = wcchanged[0] or (missing and wcchanged[2])
1023 changed = wcchanged[0] or (missing and wcchanged[2])
1025 if not changed:
1024 if not changed:
1026 if self._state[1] in self._wcrevs() or ignoreupdate:
1025 if self._state[1] in self._wcrevs() or ignoreupdate:
1027 return False
1026 return False
1028 return True
1027 return True
1029
1028
1030 def basestate(self):
1029 def basestate(self):
1031 lastrev, rev = self._wcrevs()
1030 lastrev, rev = self._wcrevs()
1032 if lastrev != rev:
1031 if lastrev != rev:
1033 # Last committed rev is not the same than rev. We would
1032 # Last committed rev is not the same than rev. We would
1034 # like to take lastrev but we do not know if the subrepo
1033 # like to take lastrev but we do not know if the subrepo
1035 # URL exists at lastrev. Test it and fallback to rev it
1034 # URL exists at lastrev. Test it and fallback to rev it
1036 # is not there.
1035 # is not there.
1037 try:
1036 try:
1038 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1037 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1039 return lastrev
1038 return lastrev
1040 except error.Abort:
1039 except error.Abort:
1041 pass
1040 pass
1042 return rev
1041 return rev
1043
1042
1044 @annotatesubrepoerror
1043 @annotatesubrepoerror
1045 def commit(self, text, user, date):
1044 def commit(self, text, user, date):
1046 # user and date are out of our hands since svn is centralized
1045 # user and date are out of our hands since svn is centralized
1047 changed, extchanged, missing = self._wcchanged()
1046 changed, extchanged, missing = self._wcchanged()
1048 if not changed:
1047 if not changed:
1049 return self.basestate()
1048 return self.basestate()
1050 if extchanged:
1049 if extchanged:
1051 # Do not try to commit externals
1050 # Do not try to commit externals
1052 raise error.Abort(_('cannot commit svn externals'))
1051 raise error.Abort(_('cannot commit svn externals'))
1053 if missing:
1052 if missing:
1054 # svn can commit with missing entries but aborting like hg
1053 # svn can commit with missing entries but aborting like hg
1055 # seems a better approach.
1054 # seems a better approach.
1056 raise error.Abort(_('cannot commit missing svn entries'))
1055 raise error.Abort(_('cannot commit missing svn entries'))
1057 commitinfo, err = self._svncommand(['commit', '-m', text])
1056 commitinfo, err = self._svncommand(['commit', '-m', text])
1058 self.ui.status(commitinfo)
1057 self.ui.status(commitinfo)
1059 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1058 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1060 if not newrev:
1059 if not newrev:
1061 if not commitinfo.strip():
1060 if not commitinfo.strip():
1062 # Sometimes, our definition of "changed" differs from
1061 # Sometimes, our definition of "changed" differs from
1063 # svn one. For instance, svn ignores missing files
1062 # svn one. For instance, svn ignores missing files
1064 # when committing. If there are only missing files, no
1063 # when committing. If there are only missing files, no
1065 # commit is made, no output and no error code.
1064 # commit is made, no output and no error code.
1066 raise error.Abort(_('failed to commit svn changes'))
1065 raise error.Abort(_('failed to commit svn changes'))
1067 raise error.Abort(commitinfo.splitlines()[-1])
1066 raise error.Abort(commitinfo.splitlines()[-1])
1068 newrev = newrev.groups()[0]
1067 newrev = newrev.groups()[0]
1069 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1068 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1070 return newrev
1069 return newrev
1071
1070
1072 @annotatesubrepoerror
1071 @annotatesubrepoerror
1073 def remove(self):
1072 def remove(self):
1074 if self.dirty():
1073 if self.dirty():
1075 self.ui.warn(_('not removing repo %s because '
1074 self.ui.warn(_('not removing repo %s because '
1076 'it has changes.\n') % self._path)
1075 'it has changes.\n') % self._path)
1077 return
1076 return
1078 self.ui.note(_('removing subrepo %s\n') % self._path)
1077 self.ui.note(_('removing subrepo %s\n') % self._path)
1079
1078
1080 self.wvfs.rmtree(forcibly=True)
1079 self.wvfs.rmtree(forcibly=True)
1081 try:
1080 try:
1082 pwvfs = self._ctx.repo().wvfs
1081 pwvfs = self._ctx.repo().wvfs
1083 pwvfs.removedirs(pwvfs.dirname(self._path))
1082 pwvfs.removedirs(pwvfs.dirname(self._path))
1084 except OSError:
1083 except OSError:
1085 pass
1084 pass
1086
1085
1087 @annotatesubrepoerror
1086 @annotatesubrepoerror
1088 def get(self, state, overwrite=False):
1087 def get(self, state, overwrite=False):
1089 if overwrite:
1088 if overwrite:
1090 self._svncommand(['revert', '--recursive'])
1089 self._svncommand(['revert', '--recursive'])
1091 args = ['checkout']
1090 args = ['checkout']
1092 if self._svnversion >= (1, 5):
1091 if self._svnversion >= (1, 5):
1093 args.append('--force')
1092 args.append('--force')
1094 # The revision must be specified at the end of the URL to properly
1093 # The revision must be specified at the end of the URL to properly
1095 # update to a directory which has since been deleted and recreated.
1094 # update to a directory which has since been deleted and recreated.
1096 args.append('%s@%s' % (state[0], state[1]))
1095 args.append('%s@%s' % (state[0], state[1]))
1097
1096
1098 # SEC: check that the ssh url is safe
1097 # SEC: check that the ssh url is safe
1099 util.checksafessh(state[0])
1098 util.checksafessh(state[0])
1100
1099
1101 status, err = self._svncommand(args, failok=True)
1100 status, err = self._svncommand(args, failok=True)
1102 _sanitize(self.ui, self.wvfs, '.svn')
1101 _sanitize(self.ui, self.wvfs, '.svn')
1103 if not re.search('Checked out revision [0-9]+.', status):
1102 if not re.search('Checked out revision [0-9]+.', status):
1104 if ('is already a working copy for a different URL' in err
1103 if ('is already a working copy for a different URL' in err
1105 and (self._wcchanged()[:2] == (False, False))):
1104 and (self._wcchanged()[:2] == (False, False))):
1106 # obstructed but clean working copy, so just blow it away.
1105 # obstructed but clean working copy, so just blow it away.
1107 self.remove()
1106 self.remove()
1108 self.get(state, overwrite=False)
1107 self.get(state, overwrite=False)
1109 return
1108 return
1110 raise error.Abort((status or err).splitlines()[-1])
1109 raise error.Abort((status or err).splitlines()[-1])
1111 self.ui.status(status)
1110 self.ui.status(status)
1112
1111
1113 @annotatesubrepoerror
1112 @annotatesubrepoerror
1114 def merge(self, state):
1113 def merge(self, state):
1115 old = self._state[1]
1114 old = self._state[1]
1116 new = state[1]
1115 new = state[1]
1117 wcrev = self._wcrev()
1116 wcrev = self._wcrev()
1118 if new != wcrev:
1117 if new != wcrev:
1119 dirty = old == wcrev or self._wcchanged()[0]
1118 dirty = old == wcrev or self._wcchanged()[0]
1120 if _updateprompt(self.ui, self, dirty, wcrev, new):
1119 if _updateprompt(self.ui, self, dirty, wcrev, new):
1121 self.get(state, False)
1120 self.get(state, False)
1122
1121
1123 def push(self, opts):
1122 def push(self, opts):
1124 # push is a no-op for SVN
1123 # push is a no-op for SVN
1125 return True
1124 return True
1126
1125
1127 @annotatesubrepoerror
1126 @annotatesubrepoerror
1128 def files(self):
1127 def files(self):
1129 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1128 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1130 doc = xml.dom.minidom.parseString(output)
1129 doc = xml.dom.minidom.parseString(output)
1131 paths = []
1130 paths = []
1132 for e in doc.getElementsByTagName('entry'):
1131 for e in doc.getElementsByTagName('entry'):
1133 kind = pycompat.bytestr(e.getAttribute('kind'))
1132 kind = pycompat.bytestr(e.getAttribute('kind'))
1134 if kind != 'file':
1133 if kind != 'file':
1135 continue
1134 continue
1136 name = ''.join(c.data for c
1135 name = ''.join(c.data for c
1137 in e.getElementsByTagName('name')[0].childNodes
1136 in e.getElementsByTagName('name')[0].childNodes
1138 if c.nodeType == c.TEXT_NODE)
1137 if c.nodeType == c.TEXT_NODE)
1139 paths.append(name.encode('utf-8'))
1138 paths.append(name.encode('utf-8'))
1140 return paths
1139 return paths
1141
1140
1142 def filedata(self, name, decode):
1141 def filedata(self, name, decode):
1143 return self._svncommand(['cat'], name)[0]
1142 return self._svncommand(['cat'], name)[0]
1144
1143
1145
1144
1146 class gitsubrepo(abstractsubrepo):
1145 class gitsubrepo(abstractsubrepo):
1147 def __init__(self, ctx, path, state, allowcreate):
1146 def __init__(self, ctx, path, state, allowcreate):
1148 super(gitsubrepo, self).__init__(ctx, path)
1147 super(gitsubrepo, self).__init__(ctx, path)
1149 self._state = state
1148 self._state = state
1150 self._abspath = ctx.repo().wjoin(path)
1149 self._abspath = ctx.repo().wjoin(path)
1151 self._subparent = ctx.repo()
1150 self._subparent = ctx.repo()
1152 self._ensuregit()
1151 self._ensuregit()
1153
1152
1154 def _ensuregit(self):
1153 def _ensuregit(self):
1155 try:
1154 try:
1156 self._gitexecutable = 'git'
1155 self._gitexecutable = 'git'
1157 out, err = self._gitnodir(['--version'])
1156 out, err = self._gitnodir(['--version'])
1158 except OSError as e:
1157 except OSError as e:
1159 genericerror = _("error executing git for subrepo '%s': %s")
1158 genericerror = _("error executing git for subrepo '%s': %s")
1160 notfoundhint = _("check git is installed and in your PATH")
1159 notfoundhint = _("check git is installed and in your PATH")
1161 if e.errno != errno.ENOENT:
1160 if e.errno != errno.ENOENT:
1162 raise error.Abort(genericerror % (
1161 raise error.Abort(genericerror % (
1163 self._path, encoding.strtolocal(e.strerror)))
1162 self._path, encoding.strtolocal(e.strerror)))
1164 elif pycompat.iswindows:
1163 elif pycompat.iswindows:
1165 try:
1164 try:
1166 self._gitexecutable = 'git.cmd'
1165 self._gitexecutable = 'git.cmd'
1167 out, err = self._gitnodir(['--version'])
1166 out, err = self._gitnodir(['--version'])
1168 except OSError as e2:
1167 except OSError as e2:
1169 if e2.errno == errno.ENOENT:
1168 if e2.errno == errno.ENOENT:
1170 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1169 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1171 " for subrepo '%s'") % self._path,
1170 " for subrepo '%s'") % self._path,
1172 hint=notfoundhint)
1171 hint=notfoundhint)
1173 else:
1172 else:
1174 raise error.Abort(genericerror % (self._path,
1173 raise error.Abort(genericerror % (self._path,
1175 encoding.strtolocal(e2.strerror)))
1174 encoding.strtolocal(e2.strerror)))
1176 else:
1175 else:
1177 raise error.Abort(_("couldn't find git for subrepo '%s'")
1176 raise error.Abort(_("couldn't find git for subrepo '%s'")
1178 % self._path, hint=notfoundhint)
1177 % self._path, hint=notfoundhint)
1179 versionstatus = self._checkversion(out)
1178 versionstatus = self._checkversion(out)
1180 if versionstatus == 'unknown':
1179 if versionstatus == 'unknown':
1181 self.ui.warn(_('cannot retrieve git version\n'))
1180 self.ui.warn(_('cannot retrieve git version\n'))
1182 elif versionstatus == 'abort':
1181 elif versionstatus == 'abort':
1183 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1182 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1184 elif versionstatus == 'warning':
1183 elif versionstatus == 'warning':
1185 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1184 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1186
1185
1187 @staticmethod
1186 @staticmethod
1188 def _gitversion(out):
1187 def _gitversion(out):
1189 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1188 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1190 if m:
1189 if m:
1191 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1190 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1192
1191
1193 m = re.search(br'^git version (\d+)\.(\d+)', out)
1192 m = re.search(br'^git version (\d+)\.(\d+)', out)
1194 if m:
1193 if m:
1195 return (int(m.group(1)), int(m.group(2)), 0)
1194 return (int(m.group(1)), int(m.group(2)), 0)
1196
1195
1197 return -1
1196 return -1
1198
1197
1199 @staticmethod
1198 @staticmethod
1200 def _checkversion(out):
1199 def _checkversion(out):
1201 '''ensure git version is new enough
1200 '''ensure git version is new enough
1202
1201
1203 >>> _checkversion = gitsubrepo._checkversion
1202 >>> _checkversion = gitsubrepo._checkversion
1204 >>> _checkversion(b'git version 1.6.0')
1203 >>> _checkversion(b'git version 1.6.0')
1205 'ok'
1204 'ok'
1206 >>> _checkversion(b'git version 1.8.5')
1205 >>> _checkversion(b'git version 1.8.5')
1207 'ok'
1206 'ok'
1208 >>> _checkversion(b'git version 1.4.0')
1207 >>> _checkversion(b'git version 1.4.0')
1209 'abort'
1208 'abort'
1210 >>> _checkversion(b'git version 1.5.0')
1209 >>> _checkversion(b'git version 1.5.0')
1211 'warning'
1210 'warning'
1212 >>> _checkversion(b'git version 1.9-rc0')
1211 >>> _checkversion(b'git version 1.9-rc0')
1213 'ok'
1212 'ok'
1214 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1213 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1215 'ok'
1214 'ok'
1216 >>> _checkversion(b'git version 1.9.0.GIT')
1215 >>> _checkversion(b'git version 1.9.0.GIT')
1217 'ok'
1216 'ok'
1218 >>> _checkversion(b'git version 12345')
1217 >>> _checkversion(b'git version 12345')
1219 'unknown'
1218 'unknown'
1220 >>> _checkversion(b'no')
1219 >>> _checkversion(b'no')
1221 'unknown'
1220 'unknown'
1222 '''
1221 '''
1223 version = gitsubrepo._gitversion(out)
1222 version = gitsubrepo._gitversion(out)
1224 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1223 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1225 # despite the docstring comment. For now, error on 1.4.0, warn on
1224 # despite the docstring comment. For now, error on 1.4.0, warn on
1226 # 1.5.0 but attempt to continue.
1225 # 1.5.0 but attempt to continue.
1227 if version == -1:
1226 if version == -1:
1228 return 'unknown'
1227 return 'unknown'
1229 if version < (1, 5, 0):
1228 if version < (1, 5, 0):
1230 return 'abort'
1229 return 'abort'
1231 elif version < (1, 6, 0):
1230 elif version < (1, 6, 0):
1232 return 'warning'
1231 return 'warning'
1233 return 'ok'
1232 return 'ok'
1234
1233
1235 def _gitcommand(self, commands, env=None, stream=False):
1234 def _gitcommand(self, commands, env=None, stream=False):
1236 return self._gitdir(commands, env=env, stream=stream)[0]
1235 return self._gitdir(commands, env=env, stream=stream)[0]
1237
1236
1238 def _gitdir(self, commands, env=None, stream=False):
1237 def _gitdir(self, commands, env=None, stream=False):
1239 return self._gitnodir(commands, env=env, stream=stream,
1238 return self._gitnodir(commands, env=env, stream=stream,
1240 cwd=self._abspath)
1239 cwd=self._abspath)
1241
1240
1242 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1241 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1243 """Calls the git command
1242 """Calls the git command
1244
1243
1245 The methods tries to call the git command. versions prior to 1.6.0
1244 The methods tries to call the git command. versions prior to 1.6.0
1246 are not supported and very probably fail.
1245 are not supported and very probably fail.
1247 """
1246 """
1248 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1247 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1249 if env is None:
1248 if env is None:
1250 env = encoding.environ.copy()
1249 env = encoding.environ.copy()
1251 # disable localization for Git output (issue5176)
1250 # disable localization for Git output (issue5176)
1252 env['LC_ALL'] = 'C'
1251 env['LC_ALL'] = 'C'
1253 # fix for Git CVE-2015-7545
1252 # fix for Git CVE-2015-7545
1254 if 'GIT_ALLOW_PROTOCOL' not in env:
1253 if 'GIT_ALLOW_PROTOCOL' not in env:
1255 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1254 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1256 # unless ui.quiet is set, print git's stderr,
1255 # unless ui.quiet is set, print git's stderr,
1257 # which is mostly progress and useful info
1256 # which is mostly progress and useful info
1258 errpipe = None
1257 errpipe = None
1259 if self.ui.quiet:
1258 if self.ui.quiet:
1260 errpipe = open(os.devnull, 'w')
1259 errpipe = open(os.devnull, 'w')
1261 if self.ui._colormode and len(commands) and commands[0] == "diff":
1260 if self.ui._colormode and len(commands) and commands[0] == "diff":
1262 # insert the argument in the front,
1261 # insert the argument in the front,
1263 # the end of git diff arguments is used for paths
1262 # the end of git diff arguments is used for paths
1264 commands.insert(1, '--color')
1263 commands.insert(1, '--color')
1265 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1264 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1266 cwd=cwd, env=env, close_fds=procutil.closefds,
1265 cwd=cwd, env=env, close_fds=procutil.closefds,
1267 stdout=subprocess.PIPE, stderr=errpipe)
1266 stdout=subprocess.PIPE, stderr=errpipe)
1268 if stream:
1267 if stream:
1269 return p.stdout, None
1268 return p.stdout, None
1270
1269
1271 retdata = p.stdout.read().strip()
1270 retdata = p.stdout.read().strip()
1272 # wait for the child to exit to avoid race condition.
1271 # wait for the child to exit to avoid race condition.
1273 p.wait()
1272 p.wait()
1274
1273
1275 if p.returncode != 0 and p.returncode != 1:
1274 if p.returncode != 0 and p.returncode != 1:
1276 # there are certain error codes that are ok
1275 # there are certain error codes that are ok
1277 command = commands[0]
1276 command = commands[0]
1278 if command in ('cat-file', 'symbolic-ref'):
1277 if command in ('cat-file', 'symbolic-ref'):
1279 return retdata, p.returncode
1278 return retdata, p.returncode
1280 # for all others, abort
1279 # for all others, abort
1281 raise error.Abort(_('git %s error %d in %s') %
1280 raise error.Abort(_('git %s error %d in %s') %
1282 (command, p.returncode, self._relpath))
1281 (command, p.returncode, self._relpath))
1283
1282
1284 return retdata, p.returncode
1283 return retdata, p.returncode
1285
1284
1286 def _gitmissing(self):
1285 def _gitmissing(self):
1287 return not self.wvfs.exists('.git')
1286 return not self.wvfs.exists('.git')
1288
1287
1289 def _gitstate(self):
1288 def _gitstate(self):
1290 return self._gitcommand(['rev-parse', 'HEAD'])
1289 return self._gitcommand(['rev-parse', 'HEAD'])
1291
1290
1292 def _gitcurrentbranch(self):
1291 def _gitcurrentbranch(self):
1293 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1292 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1294 if err:
1293 if err:
1295 current = None
1294 current = None
1296 return current
1295 return current
1297
1296
1298 def _gitremote(self, remote):
1297 def _gitremote(self, remote):
1299 out = self._gitcommand(['remote', 'show', '-n', remote])
1298 out = self._gitcommand(['remote', 'show', '-n', remote])
1300 line = out.split('\n')[1]
1299 line = out.split('\n')[1]
1301 i = line.index('URL: ') + len('URL: ')
1300 i = line.index('URL: ') + len('URL: ')
1302 return line[i:]
1301 return line[i:]
1303
1302
1304 def _githavelocally(self, revision):
1303 def _githavelocally(self, revision):
1305 out, code = self._gitdir(['cat-file', '-e', revision])
1304 out, code = self._gitdir(['cat-file', '-e', revision])
1306 return code == 0
1305 return code == 0
1307
1306
1308 def _gitisancestor(self, r1, r2):
1307 def _gitisancestor(self, r1, r2):
1309 base = self._gitcommand(['merge-base', r1, r2])
1308 base = self._gitcommand(['merge-base', r1, r2])
1310 return base == r1
1309 return base == r1
1311
1310
1312 def _gitisbare(self):
1311 def _gitisbare(self):
1313 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1312 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1314
1313
1315 def _gitupdatestat(self):
1314 def _gitupdatestat(self):
1316 """This must be run before git diff-index.
1315 """This must be run before git diff-index.
1317 diff-index only looks at changes to file stat;
1316 diff-index only looks at changes to file stat;
1318 this command looks at file contents and updates the stat."""
1317 this command looks at file contents and updates the stat."""
1319 self._gitcommand(['update-index', '-q', '--refresh'])
1318 self._gitcommand(['update-index', '-q', '--refresh'])
1320
1319
1321 def _gitbranchmap(self):
1320 def _gitbranchmap(self):
1322 '''returns 2 things:
1321 '''returns 2 things:
1323 a map from git branch to revision
1322 a map from git branch to revision
1324 a map from revision to branches'''
1323 a map from revision to branches'''
1325 branch2rev = {}
1324 branch2rev = {}
1326 rev2branch = {}
1325 rev2branch = {}
1327
1326
1328 out = self._gitcommand(['for-each-ref', '--format',
1327 out = self._gitcommand(['for-each-ref', '--format',
1329 '%(objectname) %(refname)'])
1328 '%(objectname) %(refname)'])
1330 for line in out.split('\n'):
1329 for line in out.split('\n'):
1331 revision, ref = line.split(' ')
1330 revision, ref = line.split(' ')
1332 if (not ref.startswith('refs/heads/') and
1331 if (not ref.startswith('refs/heads/') and
1333 not ref.startswith('refs/remotes/')):
1332 not ref.startswith('refs/remotes/')):
1334 continue
1333 continue
1335 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1334 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1336 continue # ignore remote/HEAD redirects
1335 continue # ignore remote/HEAD redirects
1337 branch2rev[ref] = revision
1336 branch2rev[ref] = revision
1338 rev2branch.setdefault(revision, []).append(ref)
1337 rev2branch.setdefault(revision, []).append(ref)
1339 return branch2rev, rev2branch
1338 return branch2rev, rev2branch
1340
1339
1341 def _gittracking(self, branches):
1340 def _gittracking(self, branches):
1342 'return map of remote branch to local tracking branch'
1341 'return map of remote branch to local tracking branch'
1343 # assumes no more than one local tracking branch for each remote
1342 # assumes no more than one local tracking branch for each remote
1344 tracking = {}
1343 tracking = {}
1345 for b in branches:
1344 for b in branches:
1346 if b.startswith('refs/remotes/'):
1345 if b.startswith('refs/remotes/'):
1347 continue
1346 continue
1348 bname = b.split('/', 2)[2]
1347 bname = b.split('/', 2)[2]
1349 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1348 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1350 if remote:
1349 if remote:
1351 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1350 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1352 tracking['refs/remotes/%s/%s' %
1351 tracking['refs/remotes/%s/%s' %
1353 (remote, ref.split('/', 2)[2])] = b
1352 (remote, ref.split('/', 2)[2])] = b
1354 return tracking
1353 return tracking
1355
1354
1356 def _abssource(self, source):
1355 def _abssource(self, source):
1357 if '://' not in source:
1356 if '://' not in source:
1358 # recognize the scp syntax as an absolute source
1357 # recognize the scp syntax as an absolute source
1359 colon = source.find(':')
1358 colon = source.find(':')
1360 if colon != -1 and '/' not in source[:colon]:
1359 if colon != -1 and '/' not in source[:colon]:
1361 return source
1360 return source
1362 self._subsource = source
1361 self._subsource = source
1363 return _abssource(self)
1362 return _abssource(self)
1364
1363
1365 def _fetch(self, source, revision):
1364 def _fetch(self, source, revision):
1366 if self._gitmissing():
1365 if self._gitmissing():
1367 # SEC: check for safe ssh url
1366 # SEC: check for safe ssh url
1368 util.checksafessh(source)
1367 util.checksafessh(source)
1369
1368
1370 source = self._abssource(source)
1369 source = self._abssource(source)
1371 self.ui.status(_('cloning subrepo %s from %s\n') %
1370 self.ui.status(_('cloning subrepo %s from %s\n') %
1372 (self._relpath, source))
1371 (self._relpath, source))
1373 self._gitnodir(['clone', source, self._abspath])
1372 self._gitnodir(['clone', source, self._abspath])
1374 if self._githavelocally(revision):
1373 if self._githavelocally(revision):
1375 return
1374 return
1376 self.ui.status(_('pulling subrepo %s from %s\n') %
1375 self.ui.status(_('pulling subrepo %s from %s\n') %
1377 (self._relpath, self._gitremote('origin')))
1376 (self._relpath, self._gitremote('origin')))
1378 # try only origin: the originally cloned repo
1377 # try only origin: the originally cloned repo
1379 self._gitcommand(['fetch'])
1378 self._gitcommand(['fetch'])
1380 if not self._githavelocally(revision):
1379 if not self._githavelocally(revision):
1381 raise error.Abort(_('revision %s does not exist in subrepository '
1380 raise error.Abort(_('revision %s does not exist in subrepository '
1382 '"%s"\n') % (revision, self._relpath))
1381 '"%s"\n') % (revision, self._relpath))
1383
1382
1384 @annotatesubrepoerror
1383 @annotatesubrepoerror
1385 def dirty(self, ignoreupdate=False, missing=False):
1384 def dirty(self, ignoreupdate=False, missing=False):
1386 if self._gitmissing():
1385 if self._gitmissing():
1387 return self._state[1] != ''
1386 return self._state[1] != ''
1388 if self._gitisbare():
1387 if self._gitisbare():
1389 return True
1388 return True
1390 if not ignoreupdate and self._state[1] != self._gitstate():
1389 if not ignoreupdate and self._state[1] != self._gitstate():
1391 # different version checked out
1390 # different version checked out
1392 return True
1391 return True
1393 # check for staged changes or modified files; ignore untracked files
1392 # check for staged changes or modified files; ignore untracked files
1394 self._gitupdatestat()
1393 self._gitupdatestat()
1395 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1394 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1396 return code == 1
1395 return code == 1
1397
1396
1398 def basestate(self):
1397 def basestate(self):
1399 return self._gitstate()
1398 return self._gitstate()
1400
1399
1401 @annotatesubrepoerror
1400 @annotatesubrepoerror
1402 def get(self, state, overwrite=False):
1401 def get(self, state, overwrite=False):
1403 source, revision, kind = state
1402 source, revision, kind = state
1404 if not revision:
1403 if not revision:
1405 self.remove()
1404 self.remove()
1406 return
1405 return
1407 self._fetch(source, revision)
1406 self._fetch(source, revision)
1408 # if the repo was set to be bare, unbare it
1407 # if the repo was set to be bare, unbare it
1409 if self._gitisbare():
1408 if self._gitisbare():
1410 self._gitcommand(['config', 'core.bare', 'false'])
1409 self._gitcommand(['config', 'core.bare', 'false'])
1411 if self._gitstate() == revision:
1410 if self._gitstate() == revision:
1412 self._gitcommand(['reset', '--hard', 'HEAD'])
1411 self._gitcommand(['reset', '--hard', 'HEAD'])
1413 return
1412 return
1414 elif self._gitstate() == revision:
1413 elif self._gitstate() == revision:
1415 if overwrite:
1414 if overwrite:
1416 # first reset the index to unmark new files for commit, because
1415 # first reset the index to unmark new files for commit, because
1417 # reset --hard will otherwise throw away files added for commit,
1416 # reset --hard will otherwise throw away files added for commit,
1418 # not just unmark them.
1417 # not just unmark them.
1419 self._gitcommand(['reset', 'HEAD'])
1418 self._gitcommand(['reset', 'HEAD'])
1420 self._gitcommand(['reset', '--hard', 'HEAD'])
1419 self._gitcommand(['reset', '--hard', 'HEAD'])
1421 return
1420 return
1422 branch2rev, rev2branch = self._gitbranchmap()
1421 branch2rev, rev2branch = self._gitbranchmap()
1423
1422
1424 def checkout(args):
1423 def checkout(args):
1425 cmd = ['checkout']
1424 cmd = ['checkout']
1426 if overwrite:
1425 if overwrite:
1427 # first reset the index to unmark new files for commit, because
1426 # first reset the index to unmark new files for commit, because
1428 # the -f option will otherwise throw away files added for
1427 # the -f option will otherwise throw away files added for
1429 # commit, not just unmark them.
1428 # commit, not just unmark them.
1430 self._gitcommand(['reset', 'HEAD'])
1429 self._gitcommand(['reset', 'HEAD'])
1431 cmd.append('-f')
1430 cmd.append('-f')
1432 self._gitcommand(cmd + args)
1431 self._gitcommand(cmd + args)
1433 _sanitize(self.ui, self.wvfs, '.git')
1432 _sanitize(self.ui, self.wvfs, '.git')
1434
1433
1435 def rawcheckout():
1434 def rawcheckout():
1436 # no branch to checkout, check it out with no branch
1435 # no branch to checkout, check it out with no branch
1437 self.ui.warn(_('checking out detached HEAD in '
1436 self.ui.warn(_('checking out detached HEAD in '
1438 'subrepository "%s"\n') % self._relpath)
1437 'subrepository "%s"\n') % self._relpath)
1439 self.ui.warn(_('check out a git branch if you intend '
1438 self.ui.warn(_('check out a git branch if you intend '
1440 'to make changes\n'))
1439 'to make changes\n'))
1441 checkout(['-q', revision])
1440 checkout(['-q', revision])
1442
1441
1443 if revision not in rev2branch:
1442 if revision not in rev2branch:
1444 rawcheckout()
1443 rawcheckout()
1445 return
1444 return
1446 branches = rev2branch[revision]
1445 branches = rev2branch[revision]
1447 firstlocalbranch = None
1446 firstlocalbranch = None
1448 for b in branches:
1447 for b in branches:
1449 if b == 'refs/heads/master':
1448 if b == 'refs/heads/master':
1450 # master trumps all other branches
1449 # master trumps all other branches
1451 checkout(['refs/heads/master'])
1450 checkout(['refs/heads/master'])
1452 return
1451 return
1453 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1452 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1454 firstlocalbranch = b
1453 firstlocalbranch = b
1455 if firstlocalbranch:
1454 if firstlocalbranch:
1456 checkout([firstlocalbranch])
1455 checkout([firstlocalbranch])
1457 return
1456 return
1458
1457
1459 tracking = self._gittracking(branch2rev.keys())
1458 tracking = self._gittracking(branch2rev.keys())
1460 # choose a remote branch already tracked if possible
1459 # choose a remote branch already tracked if possible
1461 remote = branches[0]
1460 remote = branches[0]
1462 if remote not in tracking:
1461 if remote not in tracking:
1463 for b in branches:
1462 for b in branches:
1464 if b in tracking:
1463 if b in tracking:
1465 remote = b
1464 remote = b
1466 break
1465 break
1467
1466
1468 if remote not in tracking:
1467 if remote not in tracking:
1469 # create a new local tracking branch
1468 # create a new local tracking branch
1470 local = remote.split('/', 3)[3]
1469 local = remote.split('/', 3)[3]
1471 checkout(['-b', local, remote])
1470 checkout(['-b', local, remote])
1472 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1471 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1473 # When updating to a tracked remote branch,
1472 # When updating to a tracked remote branch,
1474 # if the local tracking branch is downstream of it,
1473 # if the local tracking branch is downstream of it,
1475 # a normal `git pull` would have performed a "fast-forward merge"
1474 # a normal `git pull` would have performed a "fast-forward merge"
1476 # which is equivalent to updating the local branch to the remote.
1475 # which is equivalent to updating the local branch to the remote.
1477 # Since we are only looking at branching at update, we need to
1476 # Since we are only looking at branching at update, we need to
1478 # detect this situation and perform this action lazily.
1477 # detect this situation and perform this action lazily.
1479 if tracking[remote] != self._gitcurrentbranch():
1478 if tracking[remote] != self._gitcurrentbranch():
1480 checkout([tracking[remote]])
1479 checkout([tracking[remote]])
1481 self._gitcommand(['merge', '--ff', remote])
1480 self._gitcommand(['merge', '--ff', remote])
1482 _sanitize(self.ui, self.wvfs, '.git')
1481 _sanitize(self.ui, self.wvfs, '.git')
1483 else:
1482 else:
1484 # a real merge would be required, just checkout the revision
1483 # a real merge would be required, just checkout the revision
1485 rawcheckout()
1484 rawcheckout()
1486
1485
1487 @annotatesubrepoerror
1486 @annotatesubrepoerror
1488 def commit(self, text, user, date):
1487 def commit(self, text, user, date):
1489 if self._gitmissing():
1488 if self._gitmissing():
1490 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1489 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1491 cmd = ['commit', '-a', '-m', text]
1490 cmd = ['commit', '-a', '-m', text]
1492 env = encoding.environ.copy()
1491 env = encoding.environ.copy()
1493 if user:
1492 if user:
1494 cmd += ['--author', user]
1493 cmd += ['--author', user]
1495 if date:
1494 if date:
1496 # git's date parser silently ignores when seconds < 1e9
1495 # git's date parser silently ignores when seconds < 1e9
1497 # convert to ISO8601
1496 # convert to ISO8601
1498 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1497 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1499 '%Y-%m-%dT%H:%M:%S %1%2')
1498 '%Y-%m-%dT%H:%M:%S %1%2')
1500 self._gitcommand(cmd, env=env)
1499 self._gitcommand(cmd, env=env)
1501 # make sure commit works otherwise HEAD might not exist under certain
1500 # make sure commit works otherwise HEAD might not exist under certain
1502 # circumstances
1501 # circumstances
1503 return self._gitstate()
1502 return self._gitstate()
1504
1503
1505 @annotatesubrepoerror
1504 @annotatesubrepoerror
1506 def merge(self, state):
1505 def merge(self, state):
1507 source, revision, kind = state
1506 source, revision, kind = state
1508 self._fetch(source, revision)
1507 self._fetch(source, revision)
1509 base = self._gitcommand(['merge-base', revision, self._state[1]])
1508 base = self._gitcommand(['merge-base', revision, self._state[1]])
1510 self._gitupdatestat()
1509 self._gitupdatestat()
1511 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1510 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1512
1511
1513 def mergefunc():
1512 def mergefunc():
1514 if base == revision:
1513 if base == revision:
1515 self.get(state) # fast forward merge
1514 self.get(state) # fast forward merge
1516 elif base != self._state[1]:
1515 elif base != self._state[1]:
1517 self._gitcommand(['merge', '--no-commit', revision])
1516 self._gitcommand(['merge', '--no-commit', revision])
1518 _sanitize(self.ui, self.wvfs, '.git')
1517 _sanitize(self.ui, self.wvfs, '.git')
1519
1518
1520 if self.dirty():
1519 if self.dirty():
1521 if self._gitstate() != revision:
1520 if self._gitstate() != revision:
1522 dirty = self._gitstate() == self._state[1] or code != 0
1521 dirty = self._gitstate() == self._state[1] or code != 0
1523 if _updateprompt(self.ui, self, dirty,
1522 if _updateprompt(self.ui, self, dirty,
1524 self._state[1][:7], revision[:7]):
1523 self._state[1][:7], revision[:7]):
1525 mergefunc()
1524 mergefunc()
1526 else:
1525 else:
1527 mergefunc()
1526 mergefunc()
1528
1527
1529 @annotatesubrepoerror
1528 @annotatesubrepoerror
1530 def push(self, opts):
1529 def push(self, opts):
1531 force = opts.get('force')
1530 force = opts.get('force')
1532
1531
1533 if not self._state[1]:
1532 if not self._state[1]:
1534 return True
1533 return True
1535 if self._gitmissing():
1534 if self._gitmissing():
1536 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1535 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1537 # if a branch in origin contains the revision, nothing to do
1536 # if a branch in origin contains the revision, nothing to do
1538 branch2rev, rev2branch = self._gitbranchmap()
1537 branch2rev, rev2branch = self._gitbranchmap()
1539 if self._state[1] in rev2branch:
1538 if self._state[1] in rev2branch:
1540 for b in rev2branch[self._state[1]]:
1539 for b in rev2branch[self._state[1]]:
1541 if b.startswith('refs/remotes/origin/'):
1540 if b.startswith('refs/remotes/origin/'):
1542 return True
1541 return True
1543 for b, revision in branch2rev.iteritems():
1542 for b, revision in branch2rev.iteritems():
1544 if b.startswith('refs/remotes/origin/'):
1543 if b.startswith('refs/remotes/origin/'):
1545 if self._gitisancestor(self._state[1], revision):
1544 if self._gitisancestor(self._state[1], revision):
1546 return True
1545 return True
1547 # otherwise, try to push the currently checked out branch
1546 # otherwise, try to push the currently checked out branch
1548 cmd = ['push']
1547 cmd = ['push']
1549 if force:
1548 if force:
1550 cmd.append('--force')
1549 cmd.append('--force')
1551
1550
1552 current = self._gitcurrentbranch()
1551 current = self._gitcurrentbranch()
1553 if current:
1552 if current:
1554 # determine if the current branch is even useful
1553 # determine if the current branch is even useful
1555 if not self._gitisancestor(self._state[1], current):
1554 if not self._gitisancestor(self._state[1], current):
1556 self.ui.warn(_('unrelated git branch checked out '
1555 self.ui.warn(_('unrelated git branch checked out '
1557 'in subrepository "%s"\n') % self._relpath)
1556 'in subrepository "%s"\n') % self._relpath)
1558 return False
1557 return False
1559 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1558 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1560 (current.split('/', 2)[2], self._relpath))
1559 (current.split('/', 2)[2], self._relpath))
1561 ret = self._gitdir(cmd + ['origin', current])
1560 ret = self._gitdir(cmd + ['origin', current])
1562 return ret[1] == 0
1561 return ret[1] == 0
1563 else:
1562 else:
1564 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1563 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1565 'cannot push revision %s\n') %
1564 'cannot push revision %s\n') %
1566 (self._relpath, self._state[1]))
1565 (self._relpath, self._state[1]))
1567 return False
1566 return False
1568
1567
1569 @annotatesubrepoerror
1568 @annotatesubrepoerror
1570 def add(self, ui, match, prefix, explicitonly, **opts):
1569 def add(self, ui, match, prefix, explicitonly, **opts):
1571 if self._gitmissing():
1570 if self._gitmissing():
1572 return []
1571 return []
1573
1572
1574 (modified, added, removed,
1573 (modified, added, removed,
1575 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1574 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1576 clean=True)
1575 clean=True)
1577
1576
1578 tracked = set()
1577 tracked = set()
1579 # dirstates 'amn' warn, 'r' is added again
1578 # dirstates 'amn' warn, 'r' is added again
1580 for l in (modified, added, deleted, clean):
1579 for l in (modified, added, deleted, clean):
1581 tracked.update(l)
1580 tracked.update(l)
1582
1581
1583 # Unknown files not of interest will be rejected by the matcher
1582 # Unknown files not of interest will be rejected by the matcher
1584 files = unknown
1583 files = unknown
1585 files.extend(match.files())
1584 files.extend(match.files())
1586
1585
1587 rejected = []
1586 rejected = []
1588
1587
1589 files = [f for f in sorted(set(files)) if match(f)]
1588 files = [f for f in sorted(set(files)) if match(f)]
1590 for f in files:
1589 for f in files:
1591 exact = match.exact(f)
1590 exact = match.exact(f)
1592 command = ["add"]
1591 command = ["add"]
1593 if exact:
1592 if exact:
1594 command.append("-f") #should be added, even if ignored
1593 command.append("-f") #should be added, even if ignored
1595 if ui.verbose or not exact:
1594 if ui.verbose or not exact:
1596 ui.status(_('adding %s\n') % match.rel(f))
1595 ui.status(_('adding %s\n') % match.rel(f))
1597
1596
1598 if f in tracked: # hg prints 'adding' even if already tracked
1597 if f in tracked: # hg prints 'adding' even if already tracked
1599 if exact:
1598 if exact:
1600 rejected.append(f)
1599 rejected.append(f)
1601 continue
1600 continue
1602 if not opts.get(r'dry_run'):
1601 if not opts.get(r'dry_run'):
1603 self._gitcommand(command + [f])
1602 self._gitcommand(command + [f])
1604
1603
1605 for f in rejected:
1604 for f in rejected:
1606 ui.warn(_("%s already tracked!\n") % match.abs(f))
1605 ui.warn(_("%s already tracked!\n") % match.abs(f))
1607
1606
1608 return rejected
1607 return rejected
1609
1608
1610 @annotatesubrepoerror
1609 @annotatesubrepoerror
1611 def remove(self):
1610 def remove(self):
1612 if self._gitmissing():
1611 if self._gitmissing():
1613 return
1612 return
1614 if self.dirty():
1613 if self.dirty():
1615 self.ui.warn(_('not removing repo %s because '
1614 self.ui.warn(_('not removing repo %s because '
1616 'it has changes.\n') % self._relpath)
1615 'it has changes.\n') % self._relpath)
1617 return
1616 return
1618 # we can't fully delete the repository as it may contain
1617 # we can't fully delete the repository as it may contain
1619 # local-only history
1618 # local-only history
1620 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1619 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1621 self._gitcommand(['config', 'core.bare', 'true'])
1620 self._gitcommand(['config', 'core.bare', 'true'])
1622 for f, kind in self.wvfs.readdir():
1621 for f, kind in self.wvfs.readdir():
1623 if f == '.git':
1622 if f == '.git':
1624 continue
1623 continue
1625 if kind == stat.S_IFDIR:
1624 if kind == stat.S_IFDIR:
1626 self.wvfs.rmtree(f)
1625 self.wvfs.rmtree(f)
1627 else:
1626 else:
1628 self.wvfs.unlink(f)
1627 self.wvfs.unlink(f)
1629
1628
1630 def archive(self, archiver, prefix, match=None, decode=True):
1629 def archive(self, archiver, prefix, match=None, decode=True):
1631 total = 0
1630 total = 0
1632 source, revision = self._state
1631 source, revision = self._state
1633 if not revision:
1632 if not revision:
1634 return total
1633 return total
1635 self._fetch(source, revision)
1634 self._fetch(source, revision)
1636
1635
1637 # Parse git's native archive command.
1636 # Parse git's native archive command.
1638 # This should be much faster than manually traversing the trees
1637 # This should be much faster than manually traversing the trees
1639 # and objects with many subprocess calls.
1638 # and objects with many subprocess calls.
1640 tarstream = self._gitcommand(['archive', revision], stream=True)
1639 tarstream = self._gitcommand(['archive', revision], stream=True)
1641 tar = tarfile.open(fileobj=tarstream, mode='r|')
1640 tar = tarfile.open(fileobj=tarstream, mode='r|')
1642 relpath = subrelpath(self)
1641 relpath = subrelpath(self)
1643 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1642 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1644 for i, info in enumerate(tar):
1643 for i, info in enumerate(tar):
1645 if info.isdir():
1644 if info.isdir():
1646 continue
1645 continue
1647 if match and not match(info.name):
1646 if match and not match(info.name):
1648 continue
1647 continue
1649 if info.issym():
1648 if info.issym():
1650 data = info.linkname
1649 data = info.linkname
1651 else:
1650 else:
1652 data = tar.extractfile(info).read()
1651 data = tar.extractfile(info).read()
1653 archiver.addfile(prefix + self._path + '/' + info.name,
1652 archiver.addfile(prefix + self._path + '/' + info.name,
1654 info.mode, info.issym(), data)
1653 info.mode, info.issym(), data)
1655 total += 1
1654 total += 1
1656 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1655 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1657 unit=_('files'))
1656 unit=_('files'))
1658 self.ui.progress(_('archiving (%s)') % relpath, None)
1657 self.ui.progress(_('archiving (%s)') % relpath, None)
1659 return total
1658 return total
1660
1659
1661
1660
1662 @annotatesubrepoerror
1661 @annotatesubrepoerror
1663 def cat(self, match, fm, fntemplate, prefix, **opts):
1662 def cat(self, match, fm, fntemplate, prefix, **opts):
1664 rev = self._state[1]
1663 rev = self._state[1]
1665 if match.anypats():
1664 if match.anypats():
1666 return 1 #No support for include/exclude yet
1665 return 1 #No support for include/exclude yet
1667
1666
1668 if not match.files():
1667 if not match.files():
1669 return 1
1668 return 1
1670
1669
1671 # TODO: add support for non-plain formatter (see cmdutil.cat())
1670 # TODO: add support for non-plain formatter (see cmdutil.cat())
1672 for f in match.files():
1671 for f in match.files():
1673 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1672 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1674 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1673 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1675 pathname=self.wvfs.reljoin(prefix, f))
1674 pathname=self.wvfs.reljoin(prefix, f))
1676 fp.write(output)
1675 fp.write(output)
1677 fp.close()
1676 fp.close()
1678 return 0
1677 return 0
1679
1678
1680
1679
1681 @annotatesubrepoerror
1680 @annotatesubrepoerror
1682 def status(self, rev2, **opts):
1681 def status(self, rev2, **opts):
1683 rev1 = self._state[1]
1682 rev1 = self._state[1]
1684 if self._gitmissing() or not rev1:
1683 if self._gitmissing() or not rev1:
1685 # if the repo is missing, return no results
1684 # if the repo is missing, return no results
1686 return scmutil.status([], [], [], [], [], [], [])
1685 return scmutil.status([], [], [], [], [], [], [])
1687 modified, added, removed = [], [], []
1686 modified, added, removed = [], [], []
1688 self._gitupdatestat()
1687 self._gitupdatestat()
1689 if rev2:
1688 if rev2:
1690 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1689 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1691 else:
1690 else:
1692 command = ['diff-index', '--no-renames', rev1]
1691 command = ['diff-index', '--no-renames', rev1]
1693 out = self._gitcommand(command)
1692 out = self._gitcommand(command)
1694 for line in out.split('\n'):
1693 for line in out.split('\n'):
1695 tab = line.find('\t')
1694 tab = line.find('\t')
1696 if tab == -1:
1695 if tab == -1:
1697 continue
1696 continue
1698 status, f = line[tab - 1], line[tab + 1:]
1697 status, f = line[tab - 1], line[tab + 1:]
1699 if status == 'M':
1698 if status == 'M':
1700 modified.append(f)
1699 modified.append(f)
1701 elif status == 'A':
1700 elif status == 'A':
1702 added.append(f)
1701 added.append(f)
1703 elif status == 'D':
1702 elif status == 'D':
1704 removed.append(f)
1703 removed.append(f)
1705
1704
1706 deleted, unknown, ignored, clean = [], [], [], []
1705 deleted, unknown, ignored, clean = [], [], [], []
1707
1706
1708 command = ['status', '--porcelain', '-z']
1707 command = ['status', '--porcelain', '-z']
1709 if opts.get(r'unknown'):
1708 if opts.get(r'unknown'):
1710 command += ['--untracked-files=all']
1709 command += ['--untracked-files=all']
1711 if opts.get(r'ignored'):
1710 if opts.get(r'ignored'):
1712 command += ['--ignored']
1711 command += ['--ignored']
1713 out = self._gitcommand(command)
1712 out = self._gitcommand(command)
1714
1713
1715 changedfiles = set()
1714 changedfiles = set()
1716 changedfiles.update(modified)
1715 changedfiles.update(modified)
1717 changedfiles.update(added)
1716 changedfiles.update(added)
1718 changedfiles.update(removed)
1717 changedfiles.update(removed)
1719 for line in out.split('\0'):
1718 for line in out.split('\0'):
1720 if not line:
1719 if not line:
1721 continue
1720 continue
1722 st = line[0:2]
1721 st = line[0:2]
1723 #moves and copies show 2 files on one line
1722 #moves and copies show 2 files on one line
1724 if line.find('\0') >= 0:
1723 if line.find('\0') >= 0:
1725 filename1, filename2 = line[3:].split('\0')
1724 filename1, filename2 = line[3:].split('\0')
1726 else:
1725 else:
1727 filename1 = line[3:]
1726 filename1 = line[3:]
1728 filename2 = None
1727 filename2 = None
1729
1728
1730 changedfiles.add(filename1)
1729 changedfiles.add(filename1)
1731 if filename2:
1730 if filename2:
1732 changedfiles.add(filename2)
1731 changedfiles.add(filename2)
1733
1732
1734 if st == '??':
1733 if st == '??':
1735 unknown.append(filename1)
1734 unknown.append(filename1)
1736 elif st == '!!':
1735 elif st == '!!':
1737 ignored.append(filename1)
1736 ignored.append(filename1)
1738
1737
1739 if opts.get(r'clean'):
1738 if opts.get(r'clean'):
1740 out = self._gitcommand(['ls-files'])
1739 out = self._gitcommand(['ls-files'])
1741 for f in out.split('\n'):
1740 for f in out.split('\n'):
1742 if not f in changedfiles:
1741 if not f in changedfiles:
1743 clean.append(f)
1742 clean.append(f)
1744
1743
1745 return scmutil.status(modified, added, removed, deleted,
1744 return scmutil.status(modified, added, removed, deleted,
1746 unknown, ignored, clean)
1745 unknown, ignored, clean)
1747
1746
1748 @annotatesubrepoerror
1747 @annotatesubrepoerror
1749 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1748 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1750 node1 = self._state[1]
1749 node1 = self._state[1]
1751 cmd = ['diff', '--no-renames']
1750 cmd = ['diff', '--no-renames']
1752 if opts[r'stat']:
1751 if opts[r'stat']:
1753 cmd.append('--stat')
1752 cmd.append('--stat')
1754 else:
1753 else:
1755 # for Git, this also implies '-p'
1754 # for Git, this also implies '-p'
1756 cmd.append('-U%d' % diffopts.context)
1755 cmd.append('-U%d' % diffopts.context)
1757
1756
1758 gitprefix = self.wvfs.reljoin(prefix, self._path)
1757 gitprefix = self.wvfs.reljoin(prefix, self._path)
1759
1758
1760 if diffopts.noprefix:
1759 if diffopts.noprefix:
1761 cmd.extend(['--src-prefix=%s/' % gitprefix,
1760 cmd.extend(['--src-prefix=%s/' % gitprefix,
1762 '--dst-prefix=%s/' % gitprefix])
1761 '--dst-prefix=%s/' % gitprefix])
1763 else:
1762 else:
1764 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1763 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1765 '--dst-prefix=b/%s/' % gitprefix])
1764 '--dst-prefix=b/%s/' % gitprefix])
1766
1765
1767 if diffopts.ignorews:
1766 if diffopts.ignorews:
1768 cmd.append('--ignore-all-space')
1767 cmd.append('--ignore-all-space')
1769 if diffopts.ignorewsamount:
1768 if diffopts.ignorewsamount:
1770 cmd.append('--ignore-space-change')
1769 cmd.append('--ignore-space-change')
1771 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1770 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1772 and diffopts.ignoreblanklines:
1771 and diffopts.ignoreblanklines:
1773 cmd.append('--ignore-blank-lines')
1772 cmd.append('--ignore-blank-lines')
1774
1773
1775 cmd.append(node1)
1774 cmd.append(node1)
1776 if node2:
1775 if node2:
1777 cmd.append(node2)
1776 cmd.append(node2)
1778
1777
1779 output = ""
1778 output = ""
1780 if match.always():
1779 if match.always():
1781 output += self._gitcommand(cmd) + '\n'
1780 output += self._gitcommand(cmd) + '\n'
1782 else:
1781 else:
1783 st = self.status(node2)[:3]
1782 st = self.status(node2)[:3]
1784 files = [f for sublist in st for f in sublist]
1783 files = [f for sublist in st for f in sublist]
1785 for f in files:
1784 for f in files:
1786 if match(f):
1785 if match(f):
1787 output += self._gitcommand(cmd + ['--', f]) + '\n'
1786 output += self._gitcommand(cmd + ['--', f]) + '\n'
1788
1787
1789 if output.strip():
1788 if output.strip():
1790 ui.write(output)
1789 ui.write(output)
1791
1790
1792 @annotatesubrepoerror
1791 @annotatesubrepoerror
1793 def revert(self, substate, *pats, **opts):
1792 def revert(self, substate, *pats, **opts):
1794 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1793 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1795 if not opts.get(r'no_backup'):
1794 if not opts.get(r'no_backup'):
1796 status = self.status(None)
1795 status = self.status(None)
1797 names = status.modified
1796 names = status.modified
1798 for name in names:
1797 for name in names:
1799 bakname = scmutil.origpath(self.ui, self._subparent, name)
1798 bakname = scmutil.origpath(self.ui, self._subparent, name)
1800 self.ui.note(_('saving current version of %s as %s\n') %
1799 self.ui.note(_('saving current version of %s as %s\n') %
1801 (name, bakname))
1800 (name, bakname))
1802 self.wvfs.rename(name, bakname)
1801 self.wvfs.rename(name, bakname)
1803
1802
1804 if not opts.get(r'dry_run'):
1803 if not opts.get(r'dry_run'):
1805 self.get(substate, overwrite=True)
1804 self.get(substate, overwrite=True)
1806 return []
1805 return []
1807
1806
1808 def shortid(self, revid):
1807 def shortid(self, revid):
1809 return revid[:7]
1808 return revid[:7]
1810
1809
1811 types = {
1810 types = {
1812 'hg': hgsubrepo,
1811 'hg': hgsubrepo,
1813 'svn': svnsubrepo,
1812 'svn': svnsubrepo,
1814 'git': gitsubrepo,
1813 'git': gitsubrepo,
1815 }
1814 }
General Comments 0
You need to be logged in to leave comments. Login now