##// END OF EJS Templates
vfs: replace 'scmutil.opener' usage with 'scmutil.vfs'...
Pierre-Yves David -
r31216:21fa3d36 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,46 +1,46 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Undump a dump from dumprevlog
2 # Undump a dump from dumprevlog
3 # $ hg init
3 # $ hg init
4 # $ undumprevlog < repo.dump
4 # $ undumprevlog < repo.dump
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import sys
8 import sys
9 from mercurial import (
9 from mercurial import (
10 node,
10 node,
11 revlog,
11 revlog,
12 scmutil,
12 scmutil,
13 transaction,
13 transaction,
14 util,
14 util,
15 )
15 )
16
16
17 for fp in (sys.stdin, sys.stdout, sys.stderr):
17 for fp in (sys.stdin, sys.stdout, sys.stderr):
18 util.setbinary(fp)
18 util.setbinary(fp)
19
19
20 opener = scmutil.opener('.', False)
20 opener = scmutil.vfs('.', False)
21 tr = transaction.transaction(sys.stderr.write, opener, {'store': opener},
21 tr = transaction.transaction(sys.stderr.write, opener, {'store': opener},
22 "undump.journal")
22 "undump.journal")
23 while True:
23 while True:
24 l = sys.stdin.readline()
24 l = sys.stdin.readline()
25 if not l:
25 if not l:
26 break
26 break
27 if l.startswith("file:"):
27 if l.startswith("file:"):
28 f = l[6:-1]
28 f = l[6:-1]
29 r = revlog.revlog(opener, f)
29 r = revlog.revlog(opener, f)
30 print f
30 print f
31 elif l.startswith("node:"):
31 elif l.startswith("node:"):
32 n = node.bin(l[6:-1])
32 n = node.bin(l[6:-1])
33 elif l.startswith("linkrev:"):
33 elif l.startswith("linkrev:"):
34 lr = int(l[9:-1])
34 lr = int(l[9:-1])
35 elif l.startswith("parents:"):
35 elif l.startswith("parents:"):
36 p = l[9:-1].split()
36 p = l[9:-1].split()
37 p1 = node.bin(p[0])
37 p1 = node.bin(p[0])
38 p2 = node.bin(p[1])
38 p2 = node.bin(p[1])
39 elif l.startswith("length:"):
39 elif l.startswith("length:"):
40 length = int(l[8:-1])
40 length = int(l[8:-1])
41 sys.stdin.readline() # start marker
41 sys.stdin.readline() # start marker
42 d = sys.stdin.read(length)
42 d = sys.stdin.read(length)
43 sys.stdin.readline() # end marker
43 sys.stdin.readline() # end marker
44 r.addrevision(d, tr, lr, p1, p2)
44 r.addrevision(d, tr, lr, p1, p2)
45
45
46 tr.close()
46 tr.close()
@@ -1,1353 +1,1353 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5
5
6 import os
6 import os
7 import re
7 import re
8 import tempfile
8 import tempfile
9 import xml.dom.minidom
9 import xml.dom.minidom
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import (
12 from mercurial import (
13 encoding,
13 encoding,
14 error,
14 error,
15 pycompat,
15 pycompat,
16 scmutil,
16 scmutil,
17 util,
17 util,
18 )
18 )
19
19
20 from . import common
20 from . import common
21
21
22 pickle = util.pickle
22 pickle = util.pickle
23 stringio = util.stringio
23 stringio = util.stringio
24 propertycache = util.propertycache
24 propertycache = util.propertycache
25 urlerr = util.urlerr
25 urlerr = util.urlerr
26 urlreq = util.urlreq
26 urlreq = util.urlreq
27
27
28 commandline = common.commandline
28 commandline = common.commandline
29 commit = common.commit
29 commit = common.commit
30 converter_sink = common.converter_sink
30 converter_sink = common.converter_sink
31 converter_source = common.converter_source
31 converter_source = common.converter_source
32 decodeargs = common.decodeargs
32 decodeargs = common.decodeargs
33 encodeargs = common.encodeargs
33 encodeargs = common.encodeargs
34 makedatetimestamp = common.makedatetimestamp
34 makedatetimestamp = common.makedatetimestamp
35 mapfile = common.mapfile
35 mapfile = common.mapfile
36 MissingTool = common.MissingTool
36 MissingTool = common.MissingTool
37 NoRepo = common.NoRepo
37 NoRepo = common.NoRepo
38
38
39 # Subversion stuff. Works best with very recent Python SVN bindings
39 # Subversion stuff. Works best with very recent Python SVN bindings
40 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
40 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
41 # these bindings.
41 # these bindings.
42
42
43 try:
43 try:
44 import svn
44 import svn
45 import svn.client
45 import svn.client
46 import svn.core
46 import svn.core
47 import svn.ra
47 import svn.ra
48 import svn.delta
48 import svn.delta
49 from . import transport
49 from . import transport
50 import warnings
50 import warnings
51 warnings.filterwarnings('ignore',
51 warnings.filterwarnings('ignore',
52 module='svn.core',
52 module='svn.core',
53 category=DeprecationWarning)
53 category=DeprecationWarning)
54 svn.core.SubversionException # trigger import to catch error
54 svn.core.SubversionException # trigger import to catch error
55
55
56 except ImportError:
56 except ImportError:
57 svn = None
57 svn = None
58
58
59 class SvnPathNotFound(Exception):
59 class SvnPathNotFound(Exception):
60 pass
60 pass
61
61
62 def revsplit(rev):
62 def revsplit(rev):
63 """Parse a revision string and return (uuid, path, revnum).
63 """Parse a revision string and return (uuid, path, revnum).
64 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
64 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
65 ... '/proj%20B/mytrunk/mytrunk@1')
65 ... '/proj%20B/mytrunk/mytrunk@1')
66 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
66 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
67 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
67 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
68 ('', '', 1)
68 ('', '', 1)
69 >>> revsplit('@7')
69 >>> revsplit('@7')
70 ('', '', 7)
70 ('', '', 7)
71 >>> revsplit('7')
71 >>> revsplit('7')
72 ('', '', 0)
72 ('', '', 0)
73 >>> revsplit('bad')
73 >>> revsplit('bad')
74 ('', '', 0)
74 ('', '', 0)
75 """
75 """
76 parts = rev.rsplit('@', 1)
76 parts = rev.rsplit('@', 1)
77 revnum = 0
77 revnum = 0
78 if len(parts) > 1:
78 if len(parts) > 1:
79 revnum = int(parts[1])
79 revnum = int(parts[1])
80 parts = parts[0].split('/', 1)
80 parts = parts[0].split('/', 1)
81 uuid = ''
81 uuid = ''
82 mod = ''
82 mod = ''
83 if len(parts) > 1 and parts[0].startswith('svn:'):
83 if len(parts) > 1 and parts[0].startswith('svn:'):
84 uuid = parts[0][4:]
84 uuid = parts[0][4:]
85 mod = '/' + parts[1]
85 mod = '/' + parts[1]
86 return uuid, mod, revnum
86 return uuid, mod, revnum
87
87
88 def quote(s):
88 def quote(s):
89 # As of svn 1.7, many svn calls expect "canonical" paths. In
89 # As of svn 1.7, many svn calls expect "canonical" paths. In
90 # theory, we should call svn.core.*canonicalize() on all paths
90 # theory, we should call svn.core.*canonicalize() on all paths
91 # before passing them to the API. Instead, we assume the base url
91 # before passing them to the API. Instead, we assume the base url
92 # is canonical and copy the behaviour of svn URL encoding function
92 # is canonical and copy the behaviour of svn URL encoding function
93 # so we can extend it safely with new components. The "safe"
93 # so we can extend it safely with new components. The "safe"
94 # characters were taken from the "svn_uri__char_validity" table in
94 # characters were taken from the "svn_uri__char_validity" table in
95 # libsvn_subr/path.c.
95 # libsvn_subr/path.c.
96 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
96 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
97
97
98 def geturl(path):
98 def geturl(path):
99 try:
99 try:
100 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
100 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
101 except svn.core.SubversionException:
101 except svn.core.SubversionException:
102 # svn.client.url_from_path() fails with local repositories
102 # svn.client.url_from_path() fails with local repositories
103 pass
103 pass
104 if os.path.isdir(path):
104 if os.path.isdir(path):
105 path = os.path.normpath(os.path.abspath(path))
105 path = os.path.normpath(os.path.abspath(path))
106 if pycompat.osname == 'nt':
106 if pycompat.osname == 'nt':
107 path = '/' + util.normpath(path)
107 path = '/' + util.normpath(path)
108 # Module URL is later compared with the repository URL returned
108 # Module URL is later compared with the repository URL returned
109 # by svn API, which is UTF-8.
109 # by svn API, which is UTF-8.
110 path = encoding.tolocal(path)
110 path = encoding.tolocal(path)
111 path = 'file://%s' % quote(path)
111 path = 'file://%s' % quote(path)
112 return svn.core.svn_path_canonicalize(path)
112 return svn.core.svn_path_canonicalize(path)
113
113
114 def optrev(number):
114 def optrev(number):
115 optrev = svn.core.svn_opt_revision_t()
115 optrev = svn.core.svn_opt_revision_t()
116 optrev.kind = svn.core.svn_opt_revision_number
116 optrev.kind = svn.core.svn_opt_revision_number
117 optrev.value.number = number
117 optrev.value.number = number
118 return optrev
118 return optrev
119
119
120 class changedpath(object):
120 class changedpath(object):
121 def __init__(self, p):
121 def __init__(self, p):
122 self.copyfrom_path = p.copyfrom_path
122 self.copyfrom_path = p.copyfrom_path
123 self.copyfrom_rev = p.copyfrom_rev
123 self.copyfrom_rev = p.copyfrom_rev
124 self.action = p.action
124 self.action = p.action
125
125
126 def get_log_child(fp, url, paths, start, end, limit=0,
126 def get_log_child(fp, url, paths, start, end, limit=0,
127 discover_changed_paths=True, strict_node_history=False):
127 discover_changed_paths=True, strict_node_history=False):
128 protocol = -1
128 protocol = -1
129 def receiver(orig_paths, revnum, author, date, message, pool):
129 def receiver(orig_paths, revnum, author, date, message, pool):
130 paths = {}
130 paths = {}
131 if orig_paths is not None:
131 if orig_paths is not None:
132 for k, v in orig_paths.iteritems():
132 for k, v in orig_paths.iteritems():
133 paths[k] = changedpath(v)
133 paths[k] = changedpath(v)
134 pickle.dump((paths, revnum, author, date, message),
134 pickle.dump((paths, revnum, author, date, message),
135 fp, protocol)
135 fp, protocol)
136
136
137 try:
137 try:
138 # Use an ra of our own so that our parent can consume
138 # Use an ra of our own so that our parent can consume
139 # our results without confusing the server.
139 # our results without confusing the server.
140 t = transport.SvnRaTransport(url=url)
140 t = transport.SvnRaTransport(url=url)
141 svn.ra.get_log(t.ra, paths, start, end, limit,
141 svn.ra.get_log(t.ra, paths, start, end, limit,
142 discover_changed_paths,
142 discover_changed_paths,
143 strict_node_history,
143 strict_node_history,
144 receiver)
144 receiver)
145 except IOError:
145 except IOError:
146 # Caller may interrupt the iteration
146 # Caller may interrupt the iteration
147 pickle.dump(None, fp, protocol)
147 pickle.dump(None, fp, protocol)
148 except Exception as inst:
148 except Exception as inst:
149 pickle.dump(str(inst), fp, protocol)
149 pickle.dump(str(inst), fp, protocol)
150 else:
150 else:
151 pickle.dump(None, fp, protocol)
151 pickle.dump(None, fp, protocol)
152 fp.close()
152 fp.close()
153 # With large history, cleanup process goes crazy and suddenly
153 # With large history, cleanup process goes crazy and suddenly
154 # consumes *huge* amount of memory. The output file being closed,
154 # consumes *huge* amount of memory. The output file being closed,
155 # there is no need for clean termination.
155 # there is no need for clean termination.
156 os._exit(0)
156 os._exit(0)
157
157
158 def debugsvnlog(ui, **opts):
158 def debugsvnlog(ui, **opts):
159 """Fetch SVN log in a subprocess and channel them back to parent to
159 """Fetch SVN log in a subprocess and channel them back to parent to
160 avoid memory collection issues.
160 avoid memory collection issues.
161 """
161 """
162 if svn is None:
162 if svn is None:
163 raise error.Abort(_('debugsvnlog could not load Subversion python '
163 raise error.Abort(_('debugsvnlog could not load Subversion python '
164 'bindings'))
164 'bindings'))
165
165
166 args = decodeargs(ui.fin.read())
166 args = decodeargs(ui.fin.read())
167 get_log_child(ui.fout, *args)
167 get_log_child(ui.fout, *args)
168
168
169 class logstream(object):
169 class logstream(object):
170 """Interruptible revision log iterator."""
170 """Interruptible revision log iterator."""
171 def __init__(self, stdout):
171 def __init__(self, stdout):
172 self._stdout = stdout
172 self._stdout = stdout
173
173
174 def __iter__(self):
174 def __iter__(self):
175 while True:
175 while True:
176 try:
176 try:
177 entry = pickle.load(self._stdout)
177 entry = pickle.load(self._stdout)
178 except EOFError:
178 except EOFError:
179 raise error.Abort(_('Mercurial failed to run itself, check'
179 raise error.Abort(_('Mercurial failed to run itself, check'
180 ' hg executable is in PATH'))
180 ' hg executable is in PATH'))
181 try:
181 try:
182 orig_paths, revnum, author, date, message = entry
182 orig_paths, revnum, author, date, message = entry
183 except (TypeError, ValueError):
183 except (TypeError, ValueError):
184 if entry is None:
184 if entry is None:
185 break
185 break
186 raise error.Abort(_("log stream exception '%s'") % entry)
186 raise error.Abort(_("log stream exception '%s'") % entry)
187 yield entry
187 yield entry
188
188
189 def close(self):
189 def close(self):
190 if self._stdout:
190 if self._stdout:
191 self._stdout.close()
191 self._stdout.close()
192 self._stdout = None
192 self._stdout = None
193
193
194 class directlogstream(list):
194 class directlogstream(list):
195 """Direct revision log iterator.
195 """Direct revision log iterator.
196 This can be used for debugging and development but it will probably leak
196 This can be used for debugging and development but it will probably leak
197 memory and is not suitable for real conversions."""
197 memory and is not suitable for real conversions."""
198 def __init__(self, url, paths, start, end, limit=0,
198 def __init__(self, url, paths, start, end, limit=0,
199 discover_changed_paths=True, strict_node_history=False):
199 discover_changed_paths=True, strict_node_history=False):
200
200
201 def receiver(orig_paths, revnum, author, date, message, pool):
201 def receiver(orig_paths, revnum, author, date, message, pool):
202 paths = {}
202 paths = {}
203 if orig_paths is not None:
203 if orig_paths is not None:
204 for k, v in orig_paths.iteritems():
204 for k, v in orig_paths.iteritems():
205 paths[k] = changedpath(v)
205 paths[k] = changedpath(v)
206 self.append((paths, revnum, author, date, message))
206 self.append((paths, revnum, author, date, message))
207
207
208 # Use an ra of our own so that our parent can consume
208 # Use an ra of our own so that our parent can consume
209 # our results without confusing the server.
209 # our results without confusing the server.
210 t = transport.SvnRaTransport(url=url)
210 t = transport.SvnRaTransport(url=url)
211 svn.ra.get_log(t.ra, paths, start, end, limit,
211 svn.ra.get_log(t.ra, paths, start, end, limit,
212 discover_changed_paths,
212 discover_changed_paths,
213 strict_node_history,
213 strict_node_history,
214 receiver)
214 receiver)
215
215
216 def close(self):
216 def close(self):
217 pass
217 pass
218
218
219 # Check to see if the given path is a local Subversion repo. Verify this by
219 # Check to see if the given path is a local Subversion repo. Verify this by
220 # looking for several svn-specific files and directories in the given
220 # looking for several svn-specific files and directories in the given
221 # directory.
221 # directory.
222 def filecheck(ui, path, proto):
222 def filecheck(ui, path, proto):
223 for x in ('locks', 'hooks', 'format', 'db'):
223 for x in ('locks', 'hooks', 'format', 'db'):
224 if not os.path.exists(os.path.join(path, x)):
224 if not os.path.exists(os.path.join(path, x)):
225 return False
225 return False
226 return True
226 return True
227
227
228 # Check to see if a given path is the root of an svn repo over http. We verify
228 # Check to see if a given path is the root of an svn repo over http. We verify
229 # this by requesting a version-controlled URL we know can't exist and looking
229 # this by requesting a version-controlled URL we know can't exist and looking
230 # for the svn-specific "not found" XML.
230 # for the svn-specific "not found" XML.
231 def httpcheck(ui, path, proto):
231 def httpcheck(ui, path, proto):
232 try:
232 try:
233 opener = urlreq.buildopener()
233 opener = urlreq.buildopener()
234 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
234 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
235 data = rsp.read()
235 data = rsp.read()
236 except urlerr.httperror as inst:
236 except urlerr.httperror as inst:
237 if inst.code != 404:
237 if inst.code != 404:
238 # Except for 404 we cannot know for sure this is not an svn repo
238 # Except for 404 we cannot know for sure this is not an svn repo
239 ui.warn(_('svn: cannot probe remote repository, assume it could '
239 ui.warn(_('svn: cannot probe remote repository, assume it could '
240 'be a subversion repository. Use --source-type if you '
240 'be a subversion repository. Use --source-type if you '
241 'know better.\n'))
241 'know better.\n'))
242 return True
242 return True
243 data = inst.fp.read()
243 data = inst.fp.read()
244 except Exception:
244 except Exception:
245 # Could be urlerr.urlerror if the URL is invalid or anything else.
245 # Could be urlerr.urlerror if the URL is invalid or anything else.
246 return False
246 return False
247 return '<m:human-readable errcode="160013">' in data
247 return '<m:human-readable errcode="160013">' in data
248
248
249 protomap = {'http': httpcheck,
249 protomap = {'http': httpcheck,
250 'https': httpcheck,
250 'https': httpcheck,
251 'file': filecheck,
251 'file': filecheck,
252 }
252 }
253 def issvnurl(ui, url):
253 def issvnurl(ui, url):
254 try:
254 try:
255 proto, path = url.split('://', 1)
255 proto, path = url.split('://', 1)
256 if proto == 'file':
256 if proto == 'file':
257 if (pycompat.osname == 'nt' and path[:1] == '/'
257 if (pycompat.osname == 'nt' and path[:1] == '/'
258 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
258 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
259 path = path[:2] + ':/' + path[6:]
259 path = path[:2] + ':/' + path[6:]
260 path = urlreq.url2pathname(path)
260 path = urlreq.url2pathname(path)
261 except ValueError:
261 except ValueError:
262 proto = 'file'
262 proto = 'file'
263 path = os.path.abspath(url)
263 path = os.path.abspath(url)
264 if proto == 'file':
264 if proto == 'file':
265 path = util.pconvert(path)
265 path = util.pconvert(path)
266 check = protomap.get(proto, lambda *args: False)
266 check = protomap.get(proto, lambda *args: False)
267 while '/' in path:
267 while '/' in path:
268 if check(ui, path, proto):
268 if check(ui, path, proto):
269 return True
269 return True
270 path = path.rsplit('/', 1)[0]
270 path = path.rsplit('/', 1)[0]
271 return False
271 return False
272
272
273 # SVN conversion code stolen from bzr-svn and tailor
273 # SVN conversion code stolen from bzr-svn and tailor
274 #
274 #
275 # Subversion looks like a versioned filesystem, branches structures
275 # Subversion looks like a versioned filesystem, branches structures
276 # are defined by conventions and not enforced by the tool. First,
276 # are defined by conventions and not enforced by the tool. First,
277 # we define the potential branches (modules) as "trunk" and "branches"
277 # we define the potential branches (modules) as "trunk" and "branches"
278 # children directories. Revisions are then identified by their
278 # children directories. Revisions are then identified by their
279 # module and revision number (and a repository identifier).
279 # module and revision number (and a repository identifier).
280 #
280 #
281 # The revision graph is really a tree (or a forest). By default, a
281 # The revision graph is really a tree (or a forest). By default, a
282 # revision parent is the previous revision in the same module. If the
282 # revision parent is the previous revision in the same module. If the
283 # module directory is copied/moved from another module then the
283 # module directory is copied/moved from another module then the
284 # revision is the module root and its parent the source revision in
284 # revision is the module root and its parent the source revision in
285 # the parent module. A revision has at most one parent.
285 # the parent module. A revision has at most one parent.
286 #
286 #
287 class svn_source(converter_source):
287 class svn_source(converter_source):
288 def __init__(self, ui, url, revs=None):
288 def __init__(self, ui, url, revs=None):
289 super(svn_source, self).__init__(ui, url, revs=revs)
289 super(svn_source, self).__init__(ui, url, revs=revs)
290
290
291 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
291 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
292 (os.path.exists(url) and
292 (os.path.exists(url) and
293 os.path.exists(os.path.join(url, '.svn'))) or
293 os.path.exists(os.path.join(url, '.svn'))) or
294 issvnurl(ui, url)):
294 issvnurl(ui, url)):
295 raise NoRepo(_("%s does not look like a Subversion repository")
295 raise NoRepo(_("%s does not look like a Subversion repository")
296 % url)
296 % url)
297 if svn is None:
297 if svn is None:
298 raise MissingTool(_('could not load Subversion python bindings'))
298 raise MissingTool(_('could not load Subversion python bindings'))
299
299
300 try:
300 try:
301 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
301 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
302 if version < (1, 4):
302 if version < (1, 4):
303 raise MissingTool(_('Subversion python bindings %d.%d found, '
303 raise MissingTool(_('Subversion python bindings %d.%d found, '
304 '1.4 or later required') % version)
304 '1.4 or later required') % version)
305 except AttributeError:
305 except AttributeError:
306 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
306 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
307 'or later required'))
307 'or later required'))
308
308
309 self.lastrevs = {}
309 self.lastrevs = {}
310
310
311 latest = None
311 latest = None
312 try:
312 try:
313 # Support file://path@rev syntax. Useful e.g. to convert
313 # Support file://path@rev syntax. Useful e.g. to convert
314 # deleted branches.
314 # deleted branches.
315 at = url.rfind('@')
315 at = url.rfind('@')
316 if at >= 0:
316 if at >= 0:
317 latest = int(url[at + 1:])
317 latest = int(url[at + 1:])
318 url = url[:at]
318 url = url[:at]
319 except ValueError:
319 except ValueError:
320 pass
320 pass
321 self.url = geturl(url)
321 self.url = geturl(url)
322 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
322 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
323 try:
323 try:
324 self.transport = transport.SvnRaTransport(url=self.url)
324 self.transport = transport.SvnRaTransport(url=self.url)
325 self.ra = self.transport.ra
325 self.ra = self.transport.ra
326 self.ctx = self.transport.client
326 self.ctx = self.transport.client
327 self.baseurl = svn.ra.get_repos_root(self.ra)
327 self.baseurl = svn.ra.get_repos_root(self.ra)
328 # Module is either empty or a repository path starting with
328 # Module is either empty or a repository path starting with
329 # a slash and not ending with a slash.
329 # a slash and not ending with a slash.
330 self.module = urlreq.unquote(self.url[len(self.baseurl):])
330 self.module = urlreq.unquote(self.url[len(self.baseurl):])
331 self.prevmodule = None
331 self.prevmodule = None
332 self.rootmodule = self.module
332 self.rootmodule = self.module
333 self.commits = {}
333 self.commits = {}
334 self.paths = {}
334 self.paths = {}
335 self.uuid = svn.ra.get_uuid(self.ra)
335 self.uuid = svn.ra.get_uuid(self.ra)
336 except svn.core.SubversionException:
336 except svn.core.SubversionException:
337 ui.traceback()
337 ui.traceback()
338 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
338 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
339 svn.core.SVN_VER_MINOR,
339 svn.core.SVN_VER_MINOR,
340 svn.core.SVN_VER_MICRO)
340 svn.core.SVN_VER_MICRO)
341 raise NoRepo(_("%s does not look like a Subversion repository "
341 raise NoRepo(_("%s does not look like a Subversion repository "
342 "to libsvn version %s")
342 "to libsvn version %s")
343 % (self.url, svnversion))
343 % (self.url, svnversion))
344
344
345 if revs:
345 if revs:
346 if len(revs) > 1:
346 if len(revs) > 1:
347 raise error.Abort(_('subversion source does not support '
347 raise error.Abort(_('subversion source does not support '
348 'specifying multiple revisions'))
348 'specifying multiple revisions'))
349 try:
349 try:
350 latest = int(revs[0])
350 latest = int(revs[0])
351 except ValueError:
351 except ValueError:
352 raise error.Abort(_('svn: revision %s is not an integer') %
352 raise error.Abort(_('svn: revision %s is not an integer') %
353 revs[0])
353 revs[0])
354
354
355 self.trunkname = self.ui.config('convert', 'svn.trunk',
355 self.trunkname = self.ui.config('convert', 'svn.trunk',
356 'trunk').strip('/')
356 'trunk').strip('/')
357 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
357 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
358 try:
358 try:
359 self.startrev = int(self.startrev)
359 self.startrev = int(self.startrev)
360 if self.startrev < 0:
360 if self.startrev < 0:
361 self.startrev = 0
361 self.startrev = 0
362 except ValueError:
362 except ValueError:
363 raise error.Abort(_('svn: start revision %s is not an integer')
363 raise error.Abort(_('svn: start revision %s is not an integer')
364 % self.startrev)
364 % self.startrev)
365
365
366 try:
366 try:
367 self.head = self.latest(self.module, latest)
367 self.head = self.latest(self.module, latest)
368 except SvnPathNotFound:
368 except SvnPathNotFound:
369 self.head = None
369 self.head = None
370 if not self.head:
370 if not self.head:
371 raise error.Abort(_('no revision found in module %s')
371 raise error.Abort(_('no revision found in module %s')
372 % self.module)
372 % self.module)
373 self.last_changed = self.revnum(self.head)
373 self.last_changed = self.revnum(self.head)
374
374
375 self._changescache = (None, None)
375 self._changescache = (None, None)
376
376
377 if os.path.exists(os.path.join(url, '.svn/entries')):
377 if os.path.exists(os.path.join(url, '.svn/entries')):
378 self.wc = url
378 self.wc = url
379 else:
379 else:
380 self.wc = None
380 self.wc = None
381 self.convertfp = None
381 self.convertfp = None
382
382
383 def setrevmap(self, revmap):
383 def setrevmap(self, revmap):
384 lastrevs = {}
384 lastrevs = {}
385 for revid in revmap.iterkeys():
385 for revid in revmap.iterkeys():
386 uuid, module, revnum = revsplit(revid)
386 uuid, module, revnum = revsplit(revid)
387 lastrevnum = lastrevs.setdefault(module, revnum)
387 lastrevnum = lastrevs.setdefault(module, revnum)
388 if revnum > lastrevnum:
388 if revnum > lastrevnum:
389 lastrevs[module] = revnum
389 lastrevs[module] = revnum
390 self.lastrevs = lastrevs
390 self.lastrevs = lastrevs
391
391
392 def exists(self, path, optrev):
392 def exists(self, path, optrev):
393 try:
393 try:
394 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
394 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
395 optrev, False, self.ctx)
395 optrev, False, self.ctx)
396 return True
396 return True
397 except svn.core.SubversionException:
397 except svn.core.SubversionException:
398 return False
398 return False
399
399
400 def getheads(self):
400 def getheads(self):
401
401
402 def isdir(path, revnum):
402 def isdir(path, revnum):
403 kind = self._checkpath(path, revnum)
403 kind = self._checkpath(path, revnum)
404 return kind == svn.core.svn_node_dir
404 return kind == svn.core.svn_node_dir
405
405
406 def getcfgpath(name, rev):
406 def getcfgpath(name, rev):
407 cfgpath = self.ui.config('convert', 'svn.' + name)
407 cfgpath = self.ui.config('convert', 'svn.' + name)
408 if cfgpath is not None and cfgpath.strip() == '':
408 if cfgpath is not None and cfgpath.strip() == '':
409 return None
409 return None
410 path = (cfgpath or name).strip('/')
410 path = (cfgpath or name).strip('/')
411 if not self.exists(path, rev):
411 if not self.exists(path, rev):
412 if self.module.endswith(path) and name == 'trunk':
412 if self.module.endswith(path) and name == 'trunk':
413 # we are converting from inside this directory
413 # we are converting from inside this directory
414 return None
414 return None
415 if cfgpath:
415 if cfgpath:
416 raise error.Abort(_('expected %s to be at %r, but not found'
416 raise error.Abort(_('expected %s to be at %r, but not found'
417 ) % (name, path))
417 ) % (name, path))
418 return None
418 return None
419 self.ui.note(_('found %s at %r\n') % (name, path))
419 self.ui.note(_('found %s at %r\n') % (name, path))
420 return path
420 return path
421
421
422 rev = optrev(self.last_changed)
422 rev = optrev(self.last_changed)
423 oldmodule = ''
423 oldmodule = ''
424 trunk = getcfgpath('trunk', rev)
424 trunk = getcfgpath('trunk', rev)
425 self.tags = getcfgpath('tags', rev)
425 self.tags = getcfgpath('tags', rev)
426 branches = getcfgpath('branches', rev)
426 branches = getcfgpath('branches', rev)
427
427
428 # If the project has a trunk or branches, we will extract heads
428 # If the project has a trunk or branches, we will extract heads
429 # from them. We keep the project root otherwise.
429 # from them. We keep the project root otherwise.
430 if trunk:
430 if trunk:
431 oldmodule = self.module or ''
431 oldmodule = self.module or ''
432 self.module += '/' + trunk
432 self.module += '/' + trunk
433 self.head = self.latest(self.module, self.last_changed)
433 self.head = self.latest(self.module, self.last_changed)
434 if not self.head:
434 if not self.head:
435 raise error.Abort(_('no revision found in module %s')
435 raise error.Abort(_('no revision found in module %s')
436 % self.module)
436 % self.module)
437
437
438 # First head in the list is the module's head
438 # First head in the list is the module's head
439 self.heads = [self.head]
439 self.heads = [self.head]
440 if self.tags is not None:
440 if self.tags is not None:
441 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
441 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
442
442
443 # Check if branches bring a few more heads to the list
443 # Check if branches bring a few more heads to the list
444 if branches:
444 if branches:
445 rpath = self.url.strip('/')
445 rpath = self.url.strip('/')
446 branchnames = svn.client.ls(rpath + '/' + quote(branches),
446 branchnames = svn.client.ls(rpath + '/' + quote(branches),
447 rev, False, self.ctx)
447 rev, False, self.ctx)
448 for branch in sorted(branchnames):
448 for branch in sorted(branchnames):
449 module = '%s/%s/%s' % (oldmodule, branches, branch)
449 module = '%s/%s/%s' % (oldmodule, branches, branch)
450 if not isdir(module, self.last_changed):
450 if not isdir(module, self.last_changed):
451 continue
451 continue
452 brevid = self.latest(module, self.last_changed)
452 brevid = self.latest(module, self.last_changed)
453 if not brevid:
453 if not brevid:
454 self.ui.note(_('ignoring empty branch %s\n') % branch)
454 self.ui.note(_('ignoring empty branch %s\n') % branch)
455 continue
455 continue
456 self.ui.note(_('found branch %s at %d\n') %
456 self.ui.note(_('found branch %s at %d\n') %
457 (branch, self.revnum(brevid)))
457 (branch, self.revnum(brevid)))
458 self.heads.append(brevid)
458 self.heads.append(brevid)
459
459
460 if self.startrev and self.heads:
460 if self.startrev and self.heads:
461 if len(self.heads) > 1:
461 if len(self.heads) > 1:
462 raise error.Abort(_('svn: start revision is not supported '
462 raise error.Abort(_('svn: start revision is not supported '
463 'with more than one branch'))
463 'with more than one branch'))
464 revnum = self.revnum(self.heads[0])
464 revnum = self.revnum(self.heads[0])
465 if revnum < self.startrev:
465 if revnum < self.startrev:
466 raise error.Abort(
466 raise error.Abort(
467 _('svn: no revision found after start revision %d')
467 _('svn: no revision found after start revision %d')
468 % self.startrev)
468 % self.startrev)
469
469
470 return self.heads
470 return self.heads
471
471
472 def _getchanges(self, rev, full):
472 def _getchanges(self, rev, full):
473 (paths, parents) = self.paths[rev]
473 (paths, parents) = self.paths[rev]
474 copies = {}
474 copies = {}
475 if parents:
475 if parents:
476 files, self.removed, copies = self.expandpaths(rev, paths, parents)
476 files, self.removed, copies = self.expandpaths(rev, paths, parents)
477 if full or not parents:
477 if full or not parents:
478 # Perform a full checkout on roots
478 # Perform a full checkout on roots
479 uuid, module, revnum = revsplit(rev)
479 uuid, module, revnum = revsplit(rev)
480 entries = svn.client.ls(self.baseurl + quote(module),
480 entries = svn.client.ls(self.baseurl + quote(module),
481 optrev(revnum), True, self.ctx)
481 optrev(revnum), True, self.ctx)
482 files = [n for n, e in entries.iteritems()
482 files = [n for n, e in entries.iteritems()
483 if e.kind == svn.core.svn_node_file]
483 if e.kind == svn.core.svn_node_file]
484 self.removed = set()
484 self.removed = set()
485
485
486 files.sort()
486 files.sort()
487 files = zip(files, [rev] * len(files))
487 files = zip(files, [rev] * len(files))
488 return (files, copies)
488 return (files, copies)
489
489
490 def getchanges(self, rev, full):
490 def getchanges(self, rev, full):
491 # reuse cache from getchangedfiles
491 # reuse cache from getchangedfiles
492 if self._changescache[0] == rev and not full:
492 if self._changescache[0] == rev and not full:
493 (files, copies) = self._changescache[1]
493 (files, copies) = self._changescache[1]
494 else:
494 else:
495 (files, copies) = self._getchanges(rev, full)
495 (files, copies) = self._getchanges(rev, full)
496 # caller caches the result, so free it here to release memory
496 # caller caches the result, so free it here to release memory
497 del self.paths[rev]
497 del self.paths[rev]
498 return (files, copies, set())
498 return (files, copies, set())
499
499
500 def getchangedfiles(self, rev, i):
500 def getchangedfiles(self, rev, i):
501 # called from filemap - cache computed values for reuse in getchanges
501 # called from filemap - cache computed values for reuse in getchanges
502 (files, copies) = self._getchanges(rev, False)
502 (files, copies) = self._getchanges(rev, False)
503 self._changescache = (rev, (files, copies))
503 self._changescache = (rev, (files, copies))
504 return [f[0] for f in files]
504 return [f[0] for f in files]
505
505
506 def getcommit(self, rev):
506 def getcommit(self, rev):
507 if rev not in self.commits:
507 if rev not in self.commits:
508 uuid, module, revnum = revsplit(rev)
508 uuid, module, revnum = revsplit(rev)
509 self.module = module
509 self.module = module
510 self.reparent(module)
510 self.reparent(module)
511 # We assume that:
511 # We assume that:
512 # - requests for revisions after "stop" come from the
512 # - requests for revisions after "stop" come from the
513 # revision graph backward traversal. Cache all of them
513 # revision graph backward traversal. Cache all of them
514 # down to stop, they will be used eventually.
514 # down to stop, they will be used eventually.
515 # - requests for revisions before "stop" come to get
515 # - requests for revisions before "stop" come to get
516 # isolated branches parents. Just fetch what is needed.
516 # isolated branches parents. Just fetch what is needed.
517 stop = self.lastrevs.get(module, 0)
517 stop = self.lastrevs.get(module, 0)
518 if revnum < stop:
518 if revnum < stop:
519 stop = revnum + 1
519 stop = revnum + 1
520 self._fetch_revisions(revnum, stop)
520 self._fetch_revisions(revnum, stop)
521 if rev not in self.commits:
521 if rev not in self.commits:
522 raise error.Abort(_('svn: revision %s not found') % revnum)
522 raise error.Abort(_('svn: revision %s not found') % revnum)
523 revcommit = self.commits[rev]
523 revcommit = self.commits[rev]
524 # caller caches the result, so free it here to release memory
524 # caller caches the result, so free it here to release memory
525 del self.commits[rev]
525 del self.commits[rev]
526 return revcommit
526 return revcommit
527
527
528 def checkrevformat(self, revstr, mapname='splicemap'):
528 def checkrevformat(self, revstr, mapname='splicemap'):
529 """ fails if revision format does not match the correct format"""
529 """ fails if revision format does not match the correct format"""
530 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
530 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
531 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
531 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
532 r'{12,12}(.*)\@[0-9]+$',revstr):
532 r'{12,12}(.*)\@[0-9]+$',revstr):
533 raise error.Abort(_('%s entry %s is not a valid revision'
533 raise error.Abort(_('%s entry %s is not a valid revision'
534 ' identifier') % (mapname, revstr))
534 ' identifier') % (mapname, revstr))
535
535
536 def numcommits(self):
536 def numcommits(self):
537 return int(self.head.rsplit('@', 1)[1]) - self.startrev
537 return int(self.head.rsplit('@', 1)[1]) - self.startrev
538
538
539 def gettags(self):
539 def gettags(self):
540 tags = {}
540 tags = {}
541 if self.tags is None:
541 if self.tags is None:
542 return tags
542 return tags
543
543
544 # svn tags are just a convention, project branches left in a
544 # svn tags are just a convention, project branches left in a
545 # 'tags' directory. There is no other relationship than
545 # 'tags' directory. There is no other relationship than
546 # ancestry, which is expensive to discover and makes them hard
546 # ancestry, which is expensive to discover and makes them hard
547 # to update incrementally. Worse, past revisions may be
547 # to update incrementally. Worse, past revisions may be
548 # referenced by tags far away in the future, requiring a deep
548 # referenced by tags far away in the future, requiring a deep
549 # history traversal on every calculation. Current code
549 # history traversal on every calculation. Current code
550 # performs a single backward traversal, tracking moves within
550 # performs a single backward traversal, tracking moves within
551 # the tags directory (tag renaming) and recording a new tag
551 # the tags directory (tag renaming) and recording a new tag
552 # everytime a project is copied from outside the tags
552 # everytime a project is copied from outside the tags
553 # directory. It also lists deleted tags, this behaviour may
553 # directory. It also lists deleted tags, this behaviour may
554 # change in the future.
554 # change in the future.
555 pendings = []
555 pendings = []
556 tagspath = self.tags
556 tagspath = self.tags
557 start = svn.ra.get_latest_revnum(self.ra)
557 start = svn.ra.get_latest_revnum(self.ra)
558 stream = self._getlog([self.tags], start, self.startrev)
558 stream = self._getlog([self.tags], start, self.startrev)
559 try:
559 try:
560 for entry in stream:
560 for entry in stream:
561 origpaths, revnum, author, date, message = entry
561 origpaths, revnum, author, date, message = entry
562 if not origpaths:
562 if not origpaths:
563 origpaths = []
563 origpaths = []
564 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
564 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
565 in origpaths.iteritems() if e.copyfrom_path]
565 in origpaths.iteritems() if e.copyfrom_path]
566 # Apply moves/copies from more specific to general
566 # Apply moves/copies from more specific to general
567 copies.sort(reverse=True)
567 copies.sort(reverse=True)
568
568
569 srctagspath = tagspath
569 srctagspath = tagspath
570 if copies and copies[-1][2] == tagspath:
570 if copies and copies[-1][2] == tagspath:
571 # Track tags directory moves
571 # Track tags directory moves
572 srctagspath = copies.pop()[0]
572 srctagspath = copies.pop()[0]
573
573
574 for source, sourcerev, dest in copies:
574 for source, sourcerev, dest in copies:
575 if not dest.startswith(tagspath + '/'):
575 if not dest.startswith(tagspath + '/'):
576 continue
576 continue
577 for tag in pendings:
577 for tag in pendings:
578 if tag[0].startswith(dest):
578 if tag[0].startswith(dest):
579 tagpath = source + tag[0][len(dest):]
579 tagpath = source + tag[0][len(dest):]
580 tag[:2] = [tagpath, sourcerev]
580 tag[:2] = [tagpath, sourcerev]
581 break
581 break
582 else:
582 else:
583 pendings.append([source, sourcerev, dest])
583 pendings.append([source, sourcerev, dest])
584
584
585 # Filter out tags with children coming from different
585 # Filter out tags with children coming from different
586 # parts of the repository like:
586 # parts of the repository like:
587 # /tags/tag.1 (from /trunk:10)
587 # /tags/tag.1 (from /trunk:10)
588 # /tags/tag.1/foo (from /branches/foo:12)
588 # /tags/tag.1/foo (from /branches/foo:12)
589 # Here/tags/tag.1 discarded as well as its children.
589 # Here/tags/tag.1 discarded as well as its children.
590 # It happens with tools like cvs2svn. Such tags cannot
590 # It happens with tools like cvs2svn. Such tags cannot
591 # be represented in mercurial.
591 # be represented in mercurial.
592 addeds = dict((p, e.copyfrom_path) for p, e
592 addeds = dict((p, e.copyfrom_path) for p, e
593 in origpaths.iteritems()
593 in origpaths.iteritems()
594 if e.action == 'A' and e.copyfrom_path)
594 if e.action == 'A' and e.copyfrom_path)
595 badroots = set()
595 badroots = set()
596 for destroot in addeds:
596 for destroot in addeds:
597 for source, sourcerev, dest in pendings:
597 for source, sourcerev, dest in pendings:
598 if (not dest.startswith(destroot + '/')
598 if (not dest.startswith(destroot + '/')
599 or source.startswith(addeds[destroot] + '/')):
599 or source.startswith(addeds[destroot] + '/')):
600 continue
600 continue
601 badroots.add(destroot)
601 badroots.add(destroot)
602 break
602 break
603
603
604 for badroot in badroots:
604 for badroot in badroots:
605 pendings = [p for p in pendings if p[2] != badroot
605 pendings = [p for p in pendings if p[2] != badroot
606 and not p[2].startswith(badroot + '/')]
606 and not p[2].startswith(badroot + '/')]
607
607
608 # Tell tag renamings from tag creations
608 # Tell tag renamings from tag creations
609 renamings = []
609 renamings = []
610 for source, sourcerev, dest in pendings:
610 for source, sourcerev, dest in pendings:
611 tagname = dest.split('/')[-1]
611 tagname = dest.split('/')[-1]
612 if source.startswith(srctagspath):
612 if source.startswith(srctagspath):
613 renamings.append([source, sourcerev, tagname])
613 renamings.append([source, sourcerev, tagname])
614 continue
614 continue
615 if tagname in tags:
615 if tagname in tags:
616 # Keep the latest tag value
616 # Keep the latest tag value
617 continue
617 continue
618 # From revision may be fake, get one with changes
618 # From revision may be fake, get one with changes
619 try:
619 try:
620 tagid = self.latest(source, sourcerev)
620 tagid = self.latest(source, sourcerev)
621 if tagid and tagname not in tags:
621 if tagid and tagname not in tags:
622 tags[tagname] = tagid
622 tags[tagname] = tagid
623 except SvnPathNotFound:
623 except SvnPathNotFound:
624 # It happens when we are following directories
624 # It happens when we are following directories
625 # we assumed were copied with their parents
625 # we assumed were copied with their parents
626 # but were really created in the tag
626 # but were really created in the tag
627 # directory.
627 # directory.
628 pass
628 pass
629 pendings = renamings
629 pendings = renamings
630 tagspath = srctagspath
630 tagspath = srctagspath
631 finally:
631 finally:
632 stream.close()
632 stream.close()
633 return tags
633 return tags
634
634
635 def converted(self, rev, destrev):
635 def converted(self, rev, destrev):
636 if not self.wc:
636 if not self.wc:
637 return
637 return
638 if self.convertfp is None:
638 if self.convertfp is None:
639 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
639 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
640 'a')
640 'a')
641 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
641 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
642 self.convertfp.flush()
642 self.convertfp.flush()
643
643
644 def revid(self, revnum, module=None):
644 def revid(self, revnum, module=None):
645 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
645 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
646
646
647 def revnum(self, rev):
647 def revnum(self, rev):
648 return int(rev.split('@')[-1])
648 return int(rev.split('@')[-1])
649
649
650 def latest(self, path, stop=None):
650 def latest(self, path, stop=None):
651 """Find the latest revid affecting path, up to stop revision
651 """Find the latest revid affecting path, up to stop revision
652 number. If stop is None, default to repository latest
652 number. If stop is None, default to repository latest
653 revision. It may return a revision in a different module,
653 revision. It may return a revision in a different module,
654 since a branch may be moved without a change being
654 since a branch may be moved without a change being
655 reported. Return None if computed module does not belong to
655 reported. Return None if computed module does not belong to
656 rootmodule subtree.
656 rootmodule subtree.
657 """
657 """
658 def findchanges(path, start, stop=None):
658 def findchanges(path, start, stop=None):
659 stream = self._getlog([path], start, stop or 1)
659 stream = self._getlog([path], start, stop or 1)
660 try:
660 try:
661 for entry in stream:
661 for entry in stream:
662 paths, revnum, author, date, message = entry
662 paths, revnum, author, date, message = entry
663 if stop is None and paths:
663 if stop is None and paths:
664 # We do not know the latest changed revision,
664 # We do not know the latest changed revision,
665 # keep the first one with changed paths.
665 # keep the first one with changed paths.
666 break
666 break
667 if revnum <= stop:
667 if revnum <= stop:
668 break
668 break
669
669
670 for p in paths:
670 for p in paths:
671 if (not path.startswith(p) or
671 if (not path.startswith(p) or
672 not paths[p].copyfrom_path):
672 not paths[p].copyfrom_path):
673 continue
673 continue
674 newpath = paths[p].copyfrom_path + path[len(p):]
674 newpath = paths[p].copyfrom_path + path[len(p):]
675 self.ui.debug("branch renamed from %s to %s at %d\n" %
675 self.ui.debug("branch renamed from %s to %s at %d\n" %
676 (path, newpath, revnum))
676 (path, newpath, revnum))
677 path = newpath
677 path = newpath
678 break
678 break
679 if not paths:
679 if not paths:
680 revnum = None
680 revnum = None
681 return revnum, path
681 return revnum, path
682 finally:
682 finally:
683 stream.close()
683 stream.close()
684
684
685 if not path.startswith(self.rootmodule):
685 if not path.startswith(self.rootmodule):
686 # Requests on foreign branches may be forbidden at server level
686 # Requests on foreign branches may be forbidden at server level
687 self.ui.debug('ignoring foreign branch %r\n' % path)
687 self.ui.debug('ignoring foreign branch %r\n' % path)
688 return None
688 return None
689
689
690 if stop is None:
690 if stop is None:
691 stop = svn.ra.get_latest_revnum(self.ra)
691 stop = svn.ra.get_latest_revnum(self.ra)
692 try:
692 try:
693 prevmodule = self.reparent('')
693 prevmodule = self.reparent('')
694 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
694 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
695 self.reparent(prevmodule)
695 self.reparent(prevmodule)
696 except svn.core.SubversionException:
696 except svn.core.SubversionException:
697 dirent = None
697 dirent = None
698 if not dirent:
698 if not dirent:
699 raise SvnPathNotFound(_('%s not found up to revision %d')
699 raise SvnPathNotFound(_('%s not found up to revision %d')
700 % (path, stop))
700 % (path, stop))
701
701
702 # stat() gives us the previous revision on this line of
702 # stat() gives us the previous revision on this line of
703 # development, but it might be in *another module*. Fetch the
703 # development, but it might be in *another module*. Fetch the
704 # log and detect renames down to the latest revision.
704 # log and detect renames down to the latest revision.
705 revnum, realpath = findchanges(path, stop, dirent.created_rev)
705 revnum, realpath = findchanges(path, stop, dirent.created_rev)
706 if revnum is None:
706 if revnum is None:
707 # Tools like svnsync can create empty revision, when
707 # Tools like svnsync can create empty revision, when
708 # synchronizing only a subtree for instance. These empty
708 # synchronizing only a subtree for instance. These empty
709 # revisions created_rev still have their original values
709 # revisions created_rev still have their original values
710 # despite all changes having disappeared and can be
710 # despite all changes having disappeared and can be
711 # returned by ra.stat(), at least when stating the root
711 # returned by ra.stat(), at least when stating the root
712 # module. In that case, do not trust created_rev and scan
712 # module. In that case, do not trust created_rev and scan
713 # the whole history.
713 # the whole history.
714 revnum, realpath = findchanges(path, stop)
714 revnum, realpath = findchanges(path, stop)
715 if revnum is None:
715 if revnum is None:
716 self.ui.debug('ignoring empty branch %r\n' % realpath)
716 self.ui.debug('ignoring empty branch %r\n' % realpath)
717 return None
717 return None
718
718
719 if not realpath.startswith(self.rootmodule):
719 if not realpath.startswith(self.rootmodule):
720 self.ui.debug('ignoring foreign branch %r\n' % realpath)
720 self.ui.debug('ignoring foreign branch %r\n' % realpath)
721 return None
721 return None
722 return self.revid(revnum, realpath)
722 return self.revid(revnum, realpath)
723
723
724 def reparent(self, module):
724 def reparent(self, module):
725 """Reparent the svn transport and return the previous parent."""
725 """Reparent the svn transport and return the previous parent."""
726 if self.prevmodule == module:
726 if self.prevmodule == module:
727 return module
727 return module
728 svnurl = self.baseurl + quote(module)
728 svnurl = self.baseurl + quote(module)
729 prevmodule = self.prevmodule
729 prevmodule = self.prevmodule
730 if prevmodule is None:
730 if prevmodule is None:
731 prevmodule = ''
731 prevmodule = ''
732 self.ui.debug("reparent to %s\n" % svnurl)
732 self.ui.debug("reparent to %s\n" % svnurl)
733 svn.ra.reparent(self.ra, svnurl)
733 svn.ra.reparent(self.ra, svnurl)
734 self.prevmodule = module
734 self.prevmodule = module
735 return prevmodule
735 return prevmodule
736
736
737 def expandpaths(self, rev, paths, parents):
737 def expandpaths(self, rev, paths, parents):
738 changed, removed = set(), set()
738 changed, removed = set(), set()
739 copies = {}
739 copies = {}
740
740
741 new_module, revnum = revsplit(rev)[1:]
741 new_module, revnum = revsplit(rev)[1:]
742 if new_module != self.module:
742 if new_module != self.module:
743 self.module = new_module
743 self.module = new_module
744 self.reparent(self.module)
744 self.reparent(self.module)
745
745
746 for i, (path, ent) in enumerate(paths):
746 for i, (path, ent) in enumerate(paths):
747 self.ui.progress(_('scanning paths'), i, item=path,
747 self.ui.progress(_('scanning paths'), i, item=path,
748 total=len(paths), unit=_('paths'))
748 total=len(paths), unit=_('paths'))
749 entrypath = self.getrelpath(path)
749 entrypath = self.getrelpath(path)
750
750
751 kind = self._checkpath(entrypath, revnum)
751 kind = self._checkpath(entrypath, revnum)
752 if kind == svn.core.svn_node_file:
752 if kind == svn.core.svn_node_file:
753 changed.add(self.recode(entrypath))
753 changed.add(self.recode(entrypath))
754 if not ent.copyfrom_path or not parents:
754 if not ent.copyfrom_path or not parents:
755 continue
755 continue
756 # Copy sources not in parent revisions cannot be
756 # Copy sources not in parent revisions cannot be
757 # represented, ignore their origin for now
757 # represented, ignore their origin for now
758 pmodule, prevnum = revsplit(parents[0])[1:]
758 pmodule, prevnum = revsplit(parents[0])[1:]
759 if ent.copyfrom_rev < prevnum:
759 if ent.copyfrom_rev < prevnum:
760 continue
760 continue
761 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
761 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
762 if not copyfrom_path:
762 if not copyfrom_path:
763 continue
763 continue
764 self.ui.debug("copied to %s from %s@%s\n" %
764 self.ui.debug("copied to %s from %s@%s\n" %
765 (entrypath, copyfrom_path, ent.copyfrom_rev))
765 (entrypath, copyfrom_path, ent.copyfrom_rev))
766 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
766 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
767 elif kind == 0: # gone, but had better be a deleted *file*
767 elif kind == 0: # gone, but had better be a deleted *file*
768 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
768 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
769 pmodule, prevnum = revsplit(parents[0])[1:]
769 pmodule, prevnum = revsplit(parents[0])[1:]
770 parentpath = pmodule + "/" + entrypath
770 parentpath = pmodule + "/" + entrypath
771 fromkind = self._checkpath(entrypath, prevnum, pmodule)
771 fromkind = self._checkpath(entrypath, prevnum, pmodule)
772
772
773 if fromkind == svn.core.svn_node_file:
773 if fromkind == svn.core.svn_node_file:
774 removed.add(self.recode(entrypath))
774 removed.add(self.recode(entrypath))
775 elif fromkind == svn.core.svn_node_dir:
775 elif fromkind == svn.core.svn_node_dir:
776 oroot = parentpath.strip('/')
776 oroot = parentpath.strip('/')
777 nroot = path.strip('/')
777 nroot = path.strip('/')
778 children = self._iterfiles(oroot, prevnum)
778 children = self._iterfiles(oroot, prevnum)
779 for childpath in children:
779 for childpath in children:
780 childpath = childpath.replace(oroot, nroot)
780 childpath = childpath.replace(oroot, nroot)
781 childpath = self.getrelpath("/" + childpath, pmodule)
781 childpath = self.getrelpath("/" + childpath, pmodule)
782 if childpath:
782 if childpath:
783 removed.add(self.recode(childpath))
783 removed.add(self.recode(childpath))
784 else:
784 else:
785 self.ui.debug('unknown path in revision %d: %s\n' % \
785 self.ui.debug('unknown path in revision %d: %s\n' % \
786 (revnum, path))
786 (revnum, path))
787 elif kind == svn.core.svn_node_dir:
787 elif kind == svn.core.svn_node_dir:
788 if ent.action == 'M':
788 if ent.action == 'M':
789 # If the directory just had a prop change,
789 # If the directory just had a prop change,
790 # then we shouldn't need to look for its children.
790 # then we shouldn't need to look for its children.
791 continue
791 continue
792 if ent.action == 'R' and parents:
792 if ent.action == 'R' and parents:
793 # If a directory is replacing a file, mark the previous
793 # If a directory is replacing a file, mark the previous
794 # file as deleted
794 # file as deleted
795 pmodule, prevnum = revsplit(parents[0])[1:]
795 pmodule, prevnum = revsplit(parents[0])[1:]
796 pkind = self._checkpath(entrypath, prevnum, pmodule)
796 pkind = self._checkpath(entrypath, prevnum, pmodule)
797 if pkind == svn.core.svn_node_file:
797 if pkind == svn.core.svn_node_file:
798 removed.add(self.recode(entrypath))
798 removed.add(self.recode(entrypath))
799 elif pkind == svn.core.svn_node_dir:
799 elif pkind == svn.core.svn_node_dir:
800 # We do not know what files were kept or removed,
800 # We do not know what files were kept or removed,
801 # mark them all as changed.
801 # mark them all as changed.
802 for childpath in self._iterfiles(pmodule, prevnum):
802 for childpath in self._iterfiles(pmodule, prevnum):
803 childpath = self.getrelpath("/" + childpath)
803 childpath = self.getrelpath("/" + childpath)
804 if childpath:
804 if childpath:
805 changed.add(self.recode(childpath))
805 changed.add(self.recode(childpath))
806
806
807 for childpath in self._iterfiles(path, revnum):
807 for childpath in self._iterfiles(path, revnum):
808 childpath = self.getrelpath("/" + childpath)
808 childpath = self.getrelpath("/" + childpath)
809 if childpath:
809 if childpath:
810 changed.add(self.recode(childpath))
810 changed.add(self.recode(childpath))
811
811
812 # Handle directory copies
812 # Handle directory copies
813 if not ent.copyfrom_path or not parents:
813 if not ent.copyfrom_path or not parents:
814 continue
814 continue
815 # Copy sources not in parent revisions cannot be
815 # Copy sources not in parent revisions cannot be
816 # represented, ignore their origin for now
816 # represented, ignore their origin for now
817 pmodule, prevnum = revsplit(parents[0])[1:]
817 pmodule, prevnum = revsplit(parents[0])[1:]
818 if ent.copyfrom_rev < prevnum:
818 if ent.copyfrom_rev < prevnum:
819 continue
819 continue
820 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
820 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
821 if not copyfrompath:
821 if not copyfrompath:
822 continue
822 continue
823 self.ui.debug("mark %s came from %s:%d\n"
823 self.ui.debug("mark %s came from %s:%d\n"
824 % (path, copyfrompath, ent.copyfrom_rev))
824 % (path, copyfrompath, ent.copyfrom_rev))
825 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
825 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
826 for childpath in children:
826 for childpath in children:
827 childpath = self.getrelpath("/" + childpath, pmodule)
827 childpath = self.getrelpath("/" + childpath, pmodule)
828 if not childpath:
828 if not childpath:
829 continue
829 continue
830 copytopath = path + childpath[len(copyfrompath):]
830 copytopath = path + childpath[len(copyfrompath):]
831 copytopath = self.getrelpath(copytopath)
831 copytopath = self.getrelpath(copytopath)
832 copies[self.recode(copytopath)] = self.recode(childpath)
832 copies[self.recode(copytopath)] = self.recode(childpath)
833
833
834 self.ui.progress(_('scanning paths'), None)
834 self.ui.progress(_('scanning paths'), None)
835 changed.update(removed)
835 changed.update(removed)
836 return (list(changed), removed, copies)
836 return (list(changed), removed, copies)
837
837
838 def _fetch_revisions(self, from_revnum, to_revnum):
838 def _fetch_revisions(self, from_revnum, to_revnum):
839 if from_revnum < to_revnum:
839 if from_revnum < to_revnum:
840 from_revnum, to_revnum = to_revnum, from_revnum
840 from_revnum, to_revnum = to_revnum, from_revnum
841
841
842 self.child_cset = None
842 self.child_cset = None
843
843
844 def parselogentry(orig_paths, revnum, author, date, message):
844 def parselogentry(orig_paths, revnum, author, date, message):
845 """Return the parsed commit object or None, and True if
845 """Return the parsed commit object or None, and True if
846 the revision is a branch root.
846 the revision is a branch root.
847 """
847 """
848 self.ui.debug("parsing revision %d (%d changes)\n" %
848 self.ui.debug("parsing revision %d (%d changes)\n" %
849 (revnum, len(orig_paths)))
849 (revnum, len(orig_paths)))
850
850
851 branched = False
851 branched = False
852 rev = self.revid(revnum)
852 rev = self.revid(revnum)
853 # branch log might return entries for a parent we already have
853 # branch log might return entries for a parent we already have
854
854
855 if rev in self.commits or revnum < to_revnum:
855 if rev in self.commits or revnum < to_revnum:
856 return None, branched
856 return None, branched
857
857
858 parents = []
858 parents = []
859 # check whether this revision is the start of a branch or part
859 # check whether this revision is the start of a branch or part
860 # of a branch renaming
860 # of a branch renaming
861 orig_paths = sorted(orig_paths.iteritems())
861 orig_paths = sorted(orig_paths.iteritems())
862 root_paths = [(p, e) for p, e in orig_paths
862 root_paths = [(p, e) for p, e in orig_paths
863 if self.module.startswith(p)]
863 if self.module.startswith(p)]
864 if root_paths:
864 if root_paths:
865 path, ent = root_paths[-1]
865 path, ent = root_paths[-1]
866 if ent.copyfrom_path:
866 if ent.copyfrom_path:
867 branched = True
867 branched = True
868 newpath = ent.copyfrom_path + self.module[len(path):]
868 newpath = ent.copyfrom_path + self.module[len(path):]
869 # ent.copyfrom_rev may not be the actual last revision
869 # ent.copyfrom_rev may not be the actual last revision
870 previd = self.latest(newpath, ent.copyfrom_rev)
870 previd = self.latest(newpath, ent.copyfrom_rev)
871 if previd is not None:
871 if previd is not None:
872 prevmodule, prevnum = revsplit(previd)[1:]
872 prevmodule, prevnum = revsplit(previd)[1:]
873 if prevnum >= self.startrev:
873 if prevnum >= self.startrev:
874 parents = [previd]
874 parents = [previd]
875 self.ui.note(
875 self.ui.note(
876 _('found parent of branch %s at %d: %s\n') %
876 _('found parent of branch %s at %d: %s\n') %
877 (self.module, prevnum, prevmodule))
877 (self.module, prevnum, prevmodule))
878 else:
878 else:
879 self.ui.debug("no copyfrom path, don't know what to do.\n")
879 self.ui.debug("no copyfrom path, don't know what to do.\n")
880
880
881 paths = []
881 paths = []
882 # filter out unrelated paths
882 # filter out unrelated paths
883 for path, ent in orig_paths:
883 for path, ent in orig_paths:
884 if self.getrelpath(path) is None:
884 if self.getrelpath(path) is None:
885 continue
885 continue
886 paths.append((path, ent))
886 paths.append((path, ent))
887
887
888 # Example SVN datetime. Includes microseconds.
888 # Example SVN datetime. Includes microseconds.
889 # ISO-8601 conformant
889 # ISO-8601 conformant
890 # '2007-01-04T17:35:00.902377Z'
890 # '2007-01-04T17:35:00.902377Z'
891 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
891 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
892 if self.ui.configbool('convert', 'localtimezone'):
892 if self.ui.configbool('convert', 'localtimezone'):
893 date = makedatetimestamp(date[0])
893 date = makedatetimestamp(date[0])
894
894
895 if message:
895 if message:
896 log = self.recode(message)
896 log = self.recode(message)
897 else:
897 else:
898 log = ''
898 log = ''
899
899
900 if author:
900 if author:
901 author = self.recode(author)
901 author = self.recode(author)
902 else:
902 else:
903 author = ''
903 author = ''
904
904
905 try:
905 try:
906 branch = self.module.split("/")[-1]
906 branch = self.module.split("/")[-1]
907 if branch == self.trunkname:
907 if branch == self.trunkname:
908 branch = None
908 branch = None
909 except IndexError:
909 except IndexError:
910 branch = None
910 branch = None
911
911
912 cset = commit(author=author,
912 cset = commit(author=author,
913 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
913 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
914 desc=log,
914 desc=log,
915 parents=parents,
915 parents=parents,
916 branch=branch,
916 branch=branch,
917 rev=rev)
917 rev=rev)
918
918
919 self.commits[rev] = cset
919 self.commits[rev] = cset
920 # The parents list is *shared* among self.paths and the
920 # The parents list is *shared* among self.paths and the
921 # commit object. Both will be updated below.
921 # commit object. Both will be updated below.
922 self.paths[rev] = (paths, cset.parents)
922 self.paths[rev] = (paths, cset.parents)
923 if self.child_cset and not self.child_cset.parents:
923 if self.child_cset and not self.child_cset.parents:
924 self.child_cset.parents[:] = [rev]
924 self.child_cset.parents[:] = [rev]
925 self.child_cset = cset
925 self.child_cset = cset
926 return cset, branched
926 return cset, branched
927
927
928 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
928 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
929 (self.module, from_revnum, to_revnum))
929 (self.module, from_revnum, to_revnum))
930
930
931 try:
931 try:
932 firstcset = None
932 firstcset = None
933 lastonbranch = False
933 lastonbranch = False
934 stream = self._getlog([self.module], from_revnum, to_revnum)
934 stream = self._getlog([self.module], from_revnum, to_revnum)
935 try:
935 try:
936 for entry in stream:
936 for entry in stream:
937 paths, revnum, author, date, message = entry
937 paths, revnum, author, date, message = entry
938 if revnum < self.startrev:
938 if revnum < self.startrev:
939 lastonbranch = True
939 lastonbranch = True
940 break
940 break
941 if not paths:
941 if not paths:
942 self.ui.debug('revision %d has no entries\n' % revnum)
942 self.ui.debug('revision %d has no entries\n' % revnum)
943 # If we ever leave the loop on an empty
943 # If we ever leave the loop on an empty
944 # revision, do not try to get a parent branch
944 # revision, do not try to get a parent branch
945 lastonbranch = lastonbranch or revnum == 0
945 lastonbranch = lastonbranch or revnum == 0
946 continue
946 continue
947 cset, lastonbranch = parselogentry(paths, revnum, author,
947 cset, lastonbranch = parselogentry(paths, revnum, author,
948 date, message)
948 date, message)
949 if cset:
949 if cset:
950 firstcset = cset
950 firstcset = cset
951 if lastonbranch:
951 if lastonbranch:
952 break
952 break
953 finally:
953 finally:
954 stream.close()
954 stream.close()
955
955
956 if not lastonbranch and firstcset and not firstcset.parents:
956 if not lastonbranch and firstcset and not firstcset.parents:
957 # The first revision of the sequence (the last fetched one)
957 # The first revision of the sequence (the last fetched one)
958 # has invalid parents if not a branch root. Find the parent
958 # has invalid parents if not a branch root. Find the parent
959 # revision now, if any.
959 # revision now, if any.
960 try:
960 try:
961 firstrevnum = self.revnum(firstcset.rev)
961 firstrevnum = self.revnum(firstcset.rev)
962 if firstrevnum > 1:
962 if firstrevnum > 1:
963 latest = self.latest(self.module, firstrevnum - 1)
963 latest = self.latest(self.module, firstrevnum - 1)
964 if latest:
964 if latest:
965 firstcset.parents.append(latest)
965 firstcset.parents.append(latest)
966 except SvnPathNotFound:
966 except SvnPathNotFound:
967 pass
967 pass
968 except svn.core.SubversionException as xxx_todo_changeme:
968 except svn.core.SubversionException as xxx_todo_changeme:
969 (inst, num) = xxx_todo_changeme.args
969 (inst, num) = xxx_todo_changeme.args
970 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
970 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
971 raise error.Abort(_('svn: branch has no revision %s')
971 raise error.Abort(_('svn: branch has no revision %s')
972 % to_revnum)
972 % to_revnum)
973 raise
973 raise
974
974
975 def getfile(self, file, rev):
975 def getfile(self, file, rev):
976 # TODO: ra.get_file transmits the whole file instead of diffs.
976 # TODO: ra.get_file transmits the whole file instead of diffs.
977 if file in self.removed:
977 if file in self.removed:
978 return None, None
978 return None, None
979 mode = ''
979 mode = ''
980 try:
980 try:
981 new_module, revnum = revsplit(rev)[1:]
981 new_module, revnum = revsplit(rev)[1:]
982 if self.module != new_module:
982 if self.module != new_module:
983 self.module = new_module
983 self.module = new_module
984 self.reparent(self.module)
984 self.reparent(self.module)
985 io = stringio()
985 io = stringio()
986 info = svn.ra.get_file(self.ra, file, revnum, io)
986 info = svn.ra.get_file(self.ra, file, revnum, io)
987 data = io.getvalue()
987 data = io.getvalue()
988 # ra.get_file() seems to keep a reference on the input buffer
988 # ra.get_file() seems to keep a reference on the input buffer
989 # preventing collection. Release it explicitly.
989 # preventing collection. Release it explicitly.
990 io.close()
990 io.close()
991 if isinstance(info, list):
991 if isinstance(info, list):
992 info = info[-1]
992 info = info[-1]
993 mode = ("svn:executable" in info) and 'x' or ''
993 mode = ("svn:executable" in info) and 'x' or ''
994 mode = ("svn:special" in info) and 'l' or mode
994 mode = ("svn:special" in info) and 'l' or mode
995 except svn.core.SubversionException as e:
995 except svn.core.SubversionException as e:
996 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
996 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
997 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
997 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
998 if e.apr_err in notfound: # File not found
998 if e.apr_err in notfound: # File not found
999 return None, None
999 return None, None
1000 raise
1000 raise
1001 if mode == 'l':
1001 if mode == 'l':
1002 link_prefix = "link "
1002 link_prefix = "link "
1003 if data.startswith(link_prefix):
1003 if data.startswith(link_prefix):
1004 data = data[len(link_prefix):]
1004 data = data[len(link_prefix):]
1005 return data, mode
1005 return data, mode
1006
1006
1007 def _iterfiles(self, path, revnum):
1007 def _iterfiles(self, path, revnum):
1008 """Enumerate all files in path at revnum, recursively."""
1008 """Enumerate all files in path at revnum, recursively."""
1009 path = path.strip('/')
1009 path = path.strip('/')
1010 pool = svn.core.Pool()
1010 pool = svn.core.Pool()
1011 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1011 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1012 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1012 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1013 if path:
1013 if path:
1014 path += '/'
1014 path += '/'
1015 return ((path + p) for p, e in entries.iteritems()
1015 return ((path + p) for p, e in entries.iteritems()
1016 if e.kind == svn.core.svn_node_file)
1016 if e.kind == svn.core.svn_node_file)
1017
1017
1018 def getrelpath(self, path, module=None):
1018 def getrelpath(self, path, module=None):
1019 if module is None:
1019 if module is None:
1020 module = self.module
1020 module = self.module
1021 # Given the repository url of this wc, say
1021 # Given the repository url of this wc, say
1022 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1022 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1023 # extract the "entry" portion (a relative path) from what
1023 # extract the "entry" portion (a relative path) from what
1024 # svn log --xml says, i.e.
1024 # svn log --xml says, i.e.
1025 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1025 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1026 # that is to say "tests/PloneTestCase.py"
1026 # that is to say "tests/PloneTestCase.py"
1027 if path.startswith(module):
1027 if path.startswith(module):
1028 relative = path.rstrip('/')[len(module):]
1028 relative = path.rstrip('/')[len(module):]
1029 if relative.startswith('/'):
1029 if relative.startswith('/'):
1030 return relative[1:]
1030 return relative[1:]
1031 elif relative == '':
1031 elif relative == '':
1032 return relative
1032 return relative
1033
1033
1034 # The path is outside our tracked tree...
1034 # The path is outside our tracked tree...
1035 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1035 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1036 return None
1036 return None
1037
1037
1038 def _checkpath(self, path, revnum, module=None):
1038 def _checkpath(self, path, revnum, module=None):
1039 if module is not None:
1039 if module is not None:
1040 prevmodule = self.reparent('')
1040 prevmodule = self.reparent('')
1041 path = module + '/' + path
1041 path = module + '/' + path
1042 try:
1042 try:
1043 # ra.check_path does not like leading slashes very much, it leads
1043 # ra.check_path does not like leading slashes very much, it leads
1044 # to PROPFIND subversion errors
1044 # to PROPFIND subversion errors
1045 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1045 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1046 finally:
1046 finally:
1047 if module is not None:
1047 if module is not None:
1048 self.reparent(prevmodule)
1048 self.reparent(prevmodule)
1049
1049
1050 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1050 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1051 strict_node_history=False):
1051 strict_node_history=False):
1052 # Normalize path names, svn >= 1.5 only wants paths relative to
1052 # Normalize path names, svn >= 1.5 only wants paths relative to
1053 # supplied URL
1053 # supplied URL
1054 relpaths = []
1054 relpaths = []
1055 for p in paths:
1055 for p in paths:
1056 if not p.startswith('/'):
1056 if not p.startswith('/'):
1057 p = self.module + '/' + p
1057 p = self.module + '/' + p
1058 relpaths.append(p.strip('/'))
1058 relpaths.append(p.strip('/'))
1059 args = [self.baseurl, relpaths, start, end, limit,
1059 args = [self.baseurl, relpaths, start, end, limit,
1060 discover_changed_paths, strict_node_history]
1060 discover_changed_paths, strict_node_history]
1061 # developer config: convert.svn.debugsvnlog
1061 # developer config: convert.svn.debugsvnlog
1062 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1062 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1063 return directlogstream(*args)
1063 return directlogstream(*args)
1064 arg = encodeargs(args)
1064 arg = encodeargs(args)
1065 hgexe = util.hgexecutable()
1065 hgexe = util.hgexecutable()
1066 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1066 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1067 stdin, stdout = util.popen2(util.quotecommand(cmd))
1067 stdin, stdout = util.popen2(util.quotecommand(cmd))
1068 stdin.write(arg)
1068 stdin.write(arg)
1069 try:
1069 try:
1070 stdin.close()
1070 stdin.close()
1071 except IOError:
1071 except IOError:
1072 raise error.Abort(_('Mercurial failed to run itself, check'
1072 raise error.Abort(_('Mercurial failed to run itself, check'
1073 ' hg executable is in PATH'))
1073 ' hg executable is in PATH'))
1074 return logstream(stdout)
1074 return logstream(stdout)
1075
1075
1076 pre_revprop_change = '''#!/bin/sh
1076 pre_revprop_change = '''#!/bin/sh
1077
1077
1078 REPOS="$1"
1078 REPOS="$1"
1079 REV="$2"
1079 REV="$2"
1080 USER="$3"
1080 USER="$3"
1081 PROPNAME="$4"
1081 PROPNAME="$4"
1082 ACTION="$5"
1082 ACTION="$5"
1083
1083
1084 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1084 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1085 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1085 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1086 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1086 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1087
1087
1088 echo "Changing prohibited revision property" >&2
1088 echo "Changing prohibited revision property" >&2
1089 exit 1
1089 exit 1
1090 '''
1090 '''
1091
1091
1092 class svn_sink(converter_sink, commandline):
1092 class svn_sink(converter_sink, commandline):
1093 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1093 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1094 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1094 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1095
1095
1096 def prerun(self):
1096 def prerun(self):
1097 if self.wc:
1097 if self.wc:
1098 os.chdir(self.wc)
1098 os.chdir(self.wc)
1099
1099
1100 def postrun(self):
1100 def postrun(self):
1101 if self.wc:
1101 if self.wc:
1102 os.chdir(self.cwd)
1102 os.chdir(self.cwd)
1103
1103
1104 def join(self, name):
1104 def join(self, name):
1105 return os.path.join(self.wc, '.svn', name)
1105 return os.path.join(self.wc, '.svn', name)
1106
1106
1107 def revmapfile(self):
1107 def revmapfile(self):
1108 return self.join('hg-shamap')
1108 return self.join('hg-shamap')
1109
1109
1110 def authorfile(self):
1110 def authorfile(self):
1111 return self.join('hg-authormap')
1111 return self.join('hg-authormap')
1112
1112
1113 def __init__(self, ui, path):
1113 def __init__(self, ui, path):
1114
1114
1115 converter_sink.__init__(self, ui, path)
1115 converter_sink.__init__(self, ui, path)
1116 commandline.__init__(self, ui, 'svn')
1116 commandline.__init__(self, ui, 'svn')
1117 self.delete = []
1117 self.delete = []
1118 self.setexec = []
1118 self.setexec = []
1119 self.delexec = []
1119 self.delexec = []
1120 self.copies = []
1120 self.copies = []
1121 self.wc = None
1121 self.wc = None
1122 self.cwd = pycompat.getcwd()
1122 self.cwd = pycompat.getcwd()
1123
1123
1124 created = False
1124 created = False
1125 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1125 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1126 self.wc = os.path.realpath(path)
1126 self.wc = os.path.realpath(path)
1127 self.run0('update')
1127 self.run0('update')
1128 else:
1128 else:
1129 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1129 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1130 path = os.path.realpath(path)
1130 path = os.path.realpath(path)
1131 if os.path.isdir(os.path.dirname(path)):
1131 if os.path.isdir(os.path.dirname(path)):
1132 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1132 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1133 ui.status(_('initializing svn repository %r\n') %
1133 ui.status(_('initializing svn repository %r\n') %
1134 os.path.basename(path))
1134 os.path.basename(path))
1135 commandline(ui, 'svnadmin').run0('create', path)
1135 commandline(ui, 'svnadmin').run0('create', path)
1136 created = path
1136 created = path
1137 path = util.normpath(path)
1137 path = util.normpath(path)
1138 if not path.startswith('/'):
1138 if not path.startswith('/'):
1139 path = '/' + path
1139 path = '/' + path
1140 path = 'file://' + path
1140 path = 'file://' + path
1141
1141
1142 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1142 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1143 '-wc')
1143 '-wc')
1144 ui.status(_('initializing svn working copy %r\n')
1144 ui.status(_('initializing svn working copy %r\n')
1145 % os.path.basename(wcpath))
1145 % os.path.basename(wcpath))
1146 self.run0('checkout', path, wcpath)
1146 self.run0('checkout', path, wcpath)
1147
1147
1148 self.wc = wcpath
1148 self.wc = wcpath
1149 self.opener = scmutil.opener(self.wc)
1149 self.opener = scmutil.vfs(self.wc)
1150 self.wopener = scmutil.opener(self.wc)
1150 self.wopener = scmutil.vfs(self.wc)
1151 self.childmap = mapfile(ui, self.join('hg-childmap'))
1151 self.childmap = mapfile(ui, self.join('hg-childmap'))
1152 if util.checkexec(self.wc):
1152 if util.checkexec(self.wc):
1153 self.is_exec = util.isexec
1153 self.is_exec = util.isexec
1154 else:
1154 else:
1155 self.is_exec = None
1155 self.is_exec = None
1156
1156
1157 if created:
1157 if created:
1158 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1158 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1159 fp = open(hook, 'w')
1159 fp = open(hook, 'w')
1160 fp.write(pre_revprop_change)
1160 fp.write(pre_revprop_change)
1161 fp.close()
1161 fp.close()
1162 util.setflags(hook, False, True)
1162 util.setflags(hook, False, True)
1163
1163
1164 output = self.run0('info')
1164 output = self.run0('info')
1165 self.uuid = self.uuid_re.search(output).group(1).strip()
1165 self.uuid = self.uuid_re.search(output).group(1).strip()
1166
1166
1167 def wjoin(self, *names):
1167 def wjoin(self, *names):
1168 return os.path.join(self.wc, *names)
1168 return os.path.join(self.wc, *names)
1169
1169
1170 @propertycache
1170 @propertycache
1171 def manifest(self):
1171 def manifest(self):
1172 # As of svn 1.7, the "add" command fails when receiving
1172 # As of svn 1.7, the "add" command fails when receiving
1173 # already tracked entries, so we have to track and filter them
1173 # already tracked entries, so we have to track and filter them
1174 # ourselves.
1174 # ourselves.
1175 m = set()
1175 m = set()
1176 output = self.run0('ls', recursive=True, xml=True)
1176 output = self.run0('ls', recursive=True, xml=True)
1177 doc = xml.dom.minidom.parseString(output)
1177 doc = xml.dom.minidom.parseString(output)
1178 for e in doc.getElementsByTagName('entry'):
1178 for e in doc.getElementsByTagName('entry'):
1179 for n in e.childNodes:
1179 for n in e.childNodes:
1180 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1180 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1181 continue
1181 continue
1182 name = ''.join(c.data for c in n.childNodes
1182 name = ''.join(c.data for c in n.childNodes
1183 if c.nodeType == c.TEXT_NODE)
1183 if c.nodeType == c.TEXT_NODE)
1184 # Entries are compared with names coming from
1184 # Entries are compared with names coming from
1185 # mercurial, so bytes with undefined encoding. Our
1185 # mercurial, so bytes with undefined encoding. Our
1186 # best bet is to assume they are in local
1186 # best bet is to assume they are in local
1187 # encoding. They will be passed to command line calls
1187 # encoding. They will be passed to command line calls
1188 # later anyway, so they better be.
1188 # later anyway, so they better be.
1189 m.add(encoding.tolocal(name.encode('utf-8')))
1189 m.add(encoding.tolocal(name.encode('utf-8')))
1190 break
1190 break
1191 return m
1191 return m
1192
1192
1193 def putfile(self, filename, flags, data):
1193 def putfile(self, filename, flags, data):
1194 if 'l' in flags:
1194 if 'l' in flags:
1195 self.wopener.symlink(data, filename)
1195 self.wopener.symlink(data, filename)
1196 else:
1196 else:
1197 try:
1197 try:
1198 if os.path.islink(self.wjoin(filename)):
1198 if os.path.islink(self.wjoin(filename)):
1199 os.unlink(filename)
1199 os.unlink(filename)
1200 except OSError:
1200 except OSError:
1201 pass
1201 pass
1202 self.wopener.write(filename, data)
1202 self.wopener.write(filename, data)
1203
1203
1204 if self.is_exec:
1204 if self.is_exec:
1205 if self.is_exec(self.wjoin(filename)):
1205 if self.is_exec(self.wjoin(filename)):
1206 if 'x' not in flags:
1206 if 'x' not in flags:
1207 self.delexec.append(filename)
1207 self.delexec.append(filename)
1208 else:
1208 else:
1209 if 'x' in flags:
1209 if 'x' in flags:
1210 self.setexec.append(filename)
1210 self.setexec.append(filename)
1211 util.setflags(self.wjoin(filename), False, 'x' in flags)
1211 util.setflags(self.wjoin(filename), False, 'x' in flags)
1212
1212
1213 def _copyfile(self, source, dest):
1213 def _copyfile(self, source, dest):
1214 # SVN's copy command pukes if the destination file exists, but
1214 # SVN's copy command pukes if the destination file exists, but
1215 # our copyfile method expects to record a copy that has
1215 # our copyfile method expects to record a copy that has
1216 # already occurred. Cross the semantic gap.
1216 # already occurred. Cross the semantic gap.
1217 wdest = self.wjoin(dest)
1217 wdest = self.wjoin(dest)
1218 exists = os.path.lexists(wdest)
1218 exists = os.path.lexists(wdest)
1219 if exists:
1219 if exists:
1220 fd, tempname = tempfile.mkstemp(
1220 fd, tempname = tempfile.mkstemp(
1221 prefix='hg-copy-', dir=os.path.dirname(wdest))
1221 prefix='hg-copy-', dir=os.path.dirname(wdest))
1222 os.close(fd)
1222 os.close(fd)
1223 os.unlink(tempname)
1223 os.unlink(tempname)
1224 os.rename(wdest, tempname)
1224 os.rename(wdest, tempname)
1225 try:
1225 try:
1226 self.run0('copy', source, dest)
1226 self.run0('copy', source, dest)
1227 finally:
1227 finally:
1228 self.manifest.add(dest)
1228 self.manifest.add(dest)
1229 if exists:
1229 if exists:
1230 try:
1230 try:
1231 os.unlink(wdest)
1231 os.unlink(wdest)
1232 except OSError:
1232 except OSError:
1233 pass
1233 pass
1234 os.rename(tempname, wdest)
1234 os.rename(tempname, wdest)
1235
1235
1236 def dirs_of(self, files):
1236 def dirs_of(self, files):
1237 dirs = set()
1237 dirs = set()
1238 for f in files:
1238 for f in files:
1239 if os.path.isdir(self.wjoin(f)):
1239 if os.path.isdir(self.wjoin(f)):
1240 dirs.add(f)
1240 dirs.add(f)
1241 i = len(f)
1241 i = len(f)
1242 for i in iter(lambda: f.rfind('/', 0, i), -1):
1242 for i in iter(lambda: f.rfind('/', 0, i), -1):
1243 dirs.add(f[:i])
1243 dirs.add(f[:i])
1244 return dirs
1244 return dirs
1245
1245
1246 def add_dirs(self, files):
1246 def add_dirs(self, files):
1247 add_dirs = [d for d in sorted(self.dirs_of(files))
1247 add_dirs = [d for d in sorted(self.dirs_of(files))
1248 if d not in self.manifest]
1248 if d not in self.manifest]
1249 if add_dirs:
1249 if add_dirs:
1250 self.manifest.update(add_dirs)
1250 self.manifest.update(add_dirs)
1251 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1251 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1252 return add_dirs
1252 return add_dirs
1253
1253
1254 def add_files(self, files):
1254 def add_files(self, files):
1255 files = [f for f in files if f not in self.manifest]
1255 files = [f for f in files if f not in self.manifest]
1256 if files:
1256 if files:
1257 self.manifest.update(files)
1257 self.manifest.update(files)
1258 self.xargs(files, 'add', quiet=True)
1258 self.xargs(files, 'add', quiet=True)
1259 return files
1259 return files
1260
1260
1261 def addchild(self, parent, child):
1261 def addchild(self, parent, child):
1262 self.childmap[parent] = child
1262 self.childmap[parent] = child
1263
1263
1264 def revid(self, rev):
1264 def revid(self, rev):
1265 return u"svn:%s@%s" % (self.uuid, rev)
1265 return u"svn:%s@%s" % (self.uuid, rev)
1266
1266
1267 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1267 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1268 cleanp2):
1268 cleanp2):
1269 for parent in parents:
1269 for parent in parents:
1270 try:
1270 try:
1271 return self.revid(self.childmap[parent])
1271 return self.revid(self.childmap[parent])
1272 except KeyError:
1272 except KeyError:
1273 pass
1273 pass
1274
1274
1275 # Apply changes to working copy
1275 # Apply changes to working copy
1276 for f, v in files:
1276 for f, v in files:
1277 data, mode = source.getfile(f, v)
1277 data, mode = source.getfile(f, v)
1278 if data is None:
1278 if data is None:
1279 self.delete.append(f)
1279 self.delete.append(f)
1280 else:
1280 else:
1281 self.putfile(f, mode, data)
1281 self.putfile(f, mode, data)
1282 if f in copies:
1282 if f in copies:
1283 self.copies.append([copies[f], f])
1283 self.copies.append([copies[f], f])
1284 if full:
1284 if full:
1285 self.delete.extend(sorted(self.manifest.difference(files)))
1285 self.delete.extend(sorted(self.manifest.difference(files)))
1286 files = [f[0] for f in files]
1286 files = [f[0] for f in files]
1287
1287
1288 entries = set(self.delete)
1288 entries = set(self.delete)
1289 files = frozenset(files)
1289 files = frozenset(files)
1290 entries.update(self.add_dirs(files.difference(entries)))
1290 entries.update(self.add_dirs(files.difference(entries)))
1291 if self.copies:
1291 if self.copies:
1292 for s, d in self.copies:
1292 for s, d in self.copies:
1293 self._copyfile(s, d)
1293 self._copyfile(s, d)
1294 self.copies = []
1294 self.copies = []
1295 if self.delete:
1295 if self.delete:
1296 self.xargs(self.delete, 'delete')
1296 self.xargs(self.delete, 'delete')
1297 for f in self.delete:
1297 for f in self.delete:
1298 self.manifest.remove(f)
1298 self.manifest.remove(f)
1299 self.delete = []
1299 self.delete = []
1300 entries.update(self.add_files(files.difference(entries)))
1300 entries.update(self.add_files(files.difference(entries)))
1301 if self.delexec:
1301 if self.delexec:
1302 self.xargs(self.delexec, 'propdel', 'svn:executable')
1302 self.xargs(self.delexec, 'propdel', 'svn:executable')
1303 self.delexec = []
1303 self.delexec = []
1304 if self.setexec:
1304 if self.setexec:
1305 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1305 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1306 self.setexec = []
1306 self.setexec = []
1307
1307
1308 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1308 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1309 fp = os.fdopen(fd, pycompat.sysstr('w'))
1309 fp = os.fdopen(fd, pycompat.sysstr('w'))
1310 fp.write(commit.desc)
1310 fp.write(commit.desc)
1311 fp.close()
1311 fp.close()
1312 try:
1312 try:
1313 output = self.run0('commit',
1313 output = self.run0('commit',
1314 username=util.shortuser(commit.author),
1314 username=util.shortuser(commit.author),
1315 file=messagefile,
1315 file=messagefile,
1316 encoding='utf-8')
1316 encoding='utf-8')
1317 try:
1317 try:
1318 rev = self.commit_re.search(output).group(1)
1318 rev = self.commit_re.search(output).group(1)
1319 except AttributeError:
1319 except AttributeError:
1320 if parents and not files:
1320 if parents and not files:
1321 return parents[0]
1321 return parents[0]
1322 self.ui.warn(_('unexpected svn output:\n'))
1322 self.ui.warn(_('unexpected svn output:\n'))
1323 self.ui.warn(output)
1323 self.ui.warn(output)
1324 raise error.Abort(_('unable to cope with svn output'))
1324 raise error.Abort(_('unable to cope with svn output'))
1325 if commit.rev:
1325 if commit.rev:
1326 self.run('propset', 'hg:convert-rev', commit.rev,
1326 self.run('propset', 'hg:convert-rev', commit.rev,
1327 revprop=True, revision=rev)
1327 revprop=True, revision=rev)
1328 if commit.branch and commit.branch != 'default':
1328 if commit.branch and commit.branch != 'default':
1329 self.run('propset', 'hg:convert-branch', commit.branch,
1329 self.run('propset', 'hg:convert-branch', commit.branch,
1330 revprop=True, revision=rev)
1330 revprop=True, revision=rev)
1331 for parent in parents:
1331 for parent in parents:
1332 self.addchild(parent, rev)
1332 self.addchild(parent, rev)
1333 return self.revid(rev)
1333 return self.revid(rev)
1334 finally:
1334 finally:
1335 os.unlink(messagefile)
1335 os.unlink(messagefile)
1336
1336
1337 def puttags(self, tags):
1337 def puttags(self, tags):
1338 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1338 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1339 return None, None
1339 return None, None
1340
1340
1341 def hascommitfrommap(self, rev):
1341 def hascommitfrommap(self, rev):
1342 # We trust that revisions referenced in a map still is present
1342 # We trust that revisions referenced in a map still is present
1343 # TODO: implement something better if necessary and feasible
1343 # TODO: implement something better if necessary and feasible
1344 return True
1344 return True
1345
1345
1346 def hascommitforsplicemap(self, rev):
1346 def hascommitforsplicemap(self, rev):
1347 # This is not correct as one can convert to an existing subversion
1347 # This is not correct as one can convert to an existing subversion
1348 # repository and childmap would not list all revisions. Too bad.
1348 # repository and childmap would not list all revisions. Too bad.
1349 if rev in self.childmap:
1349 if rev in self.childmap:
1350 return True
1350 return True
1351 raise error.Abort(_('splice map revision %s not found in subversion '
1351 raise error.Abort(_('splice map revision %s not found in subversion '
1352 'child map (revision lookups are not implemented)')
1352 'child map (revision lookups are not implemented)')
1353 % rev)
1353 % rev)
@@ -1,666 +1,666 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import platform
15 import platform
16 import stat
16 import stat
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19
19
20 from mercurial import (
20 from mercurial import (
21 dirstate,
21 dirstate,
22 encoding,
22 encoding,
23 error,
23 error,
24 httpconnection,
24 httpconnection,
25 match as matchmod,
25 match as matchmod,
26 node,
26 node,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 util,
29 util,
30 )
30 )
31
31
32 shortname = '.hglf'
32 shortname = '.hglf'
33 shortnameslash = shortname + '/'
33 shortnameslash = shortname + '/'
34 longname = 'largefiles'
34 longname = 'largefiles'
35
35
36 # -- Private worker functions ------------------------------------------
36 # -- Private worker functions ------------------------------------------
37
37
38 def getminsize(ui, assumelfiles, opt, default=10):
38 def getminsize(ui, assumelfiles, opt, default=10):
39 lfsize = opt
39 lfsize = opt
40 if not lfsize and assumelfiles:
40 if not lfsize and assumelfiles:
41 lfsize = ui.config(longname, 'minsize', default=default)
41 lfsize = ui.config(longname, 'minsize', default=default)
42 if lfsize:
42 if lfsize:
43 try:
43 try:
44 lfsize = float(lfsize)
44 lfsize = float(lfsize)
45 except ValueError:
45 except ValueError:
46 raise error.Abort(_('largefiles: size must be number (not %s)\n')
46 raise error.Abort(_('largefiles: size must be number (not %s)\n')
47 % lfsize)
47 % lfsize)
48 if lfsize is None:
48 if lfsize is None:
49 raise error.Abort(_('minimum size for largefiles must be specified'))
49 raise error.Abort(_('minimum size for largefiles must be specified'))
50 return lfsize
50 return lfsize
51
51
52 def link(src, dest):
52 def link(src, dest):
53 """Try to create hardlink - if that fails, efficiently make a copy."""
53 """Try to create hardlink - if that fails, efficiently make a copy."""
54 util.makedirs(os.path.dirname(dest))
54 util.makedirs(os.path.dirname(dest))
55 try:
55 try:
56 util.oslink(src, dest)
56 util.oslink(src, dest)
57 except OSError:
57 except OSError:
58 # if hardlinks fail, fallback on atomic copy
58 # if hardlinks fail, fallback on atomic copy
59 with open(src, 'rb') as srcf:
59 with open(src, 'rb') as srcf:
60 with util.atomictempfile(dest) as dstf:
60 with util.atomictempfile(dest) as dstf:
61 for chunk in util.filechunkiter(srcf):
61 for chunk in util.filechunkiter(srcf):
62 dstf.write(chunk)
62 dstf.write(chunk)
63 os.chmod(dest, os.stat(src).st_mode)
63 os.chmod(dest, os.stat(src).st_mode)
64
64
65 def usercachepath(ui, hash):
65 def usercachepath(ui, hash):
66 '''Return the correct location in the "global" largefiles cache for a file
66 '''Return the correct location in the "global" largefiles cache for a file
67 with the given hash.
67 with the given hash.
68 This cache is used for sharing of largefiles across repositories - both
68 This cache is used for sharing of largefiles across repositories - both
69 to preserve download bandwidth and storage space.'''
69 to preserve download bandwidth and storage space.'''
70 return os.path.join(_usercachedir(ui), hash)
70 return os.path.join(_usercachedir(ui), hash)
71
71
72 def _usercachedir(ui):
72 def _usercachedir(ui):
73 '''Return the location of the "global" largefiles cache.'''
73 '''Return the location of the "global" largefiles cache.'''
74 path = ui.configpath(longname, 'usercache', None)
74 path = ui.configpath(longname, 'usercache', None)
75 if path:
75 if path:
76 return path
76 return path
77 if pycompat.osname == 'nt':
77 if pycompat.osname == 'nt':
78 appdata = encoding.environ.get('LOCALAPPDATA',\
78 appdata = encoding.environ.get('LOCALAPPDATA',\
79 encoding.environ.get('APPDATA'))
79 encoding.environ.get('APPDATA'))
80 if appdata:
80 if appdata:
81 return os.path.join(appdata, longname)
81 return os.path.join(appdata, longname)
82 elif platform.system() == 'Darwin':
82 elif platform.system() == 'Darwin':
83 home = encoding.environ.get('HOME')
83 home = encoding.environ.get('HOME')
84 if home:
84 if home:
85 return os.path.join(home, 'Library', 'Caches', longname)
85 return os.path.join(home, 'Library', 'Caches', longname)
86 elif pycompat.osname == 'posix':
86 elif pycompat.osname == 'posix':
87 path = encoding.environ.get('XDG_CACHE_HOME')
87 path = encoding.environ.get('XDG_CACHE_HOME')
88 if path:
88 if path:
89 return os.path.join(path, longname)
89 return os.path.join(path, longname)
90 home = encoding.environ.get('HOME')
90 home = encoding.environ.get('HOME')
91 if home:
91 if home:
92 return os.path.join(home, '.cache', longname)
92 return os.path.join(home, '.cache', longname)
93 else:
93 else:
94 raise error.Abort(_('unknown operating system: %s\n')
94 raise error.Abort(_('unknown operating system: %s\n')
95 % pycompat.osname)
95 % pycompat.osname)
96 raise error.Abort(_('unknown %s usercache location') % longname)
96 raise error.Abort(_('unknown %s usercache location') % longname)
97
97
98 def inusercache(ui, hash):
98 def inusercache(ui, hash):
99 path = usercachepath(ui, hash)
99 path = usercachepath(ui, hash)
100 return os.path.exists(path)
100 return os.path.exists(path)
101
101
102 def findfile(repo, hash):
102 def findfile(repo, hash):
103 '''Return store path of the largefile with the specified hash.
103 '''Return store path of the largefile with the specified hash.
104 As a side effect, the file might be linked from user cache.
104 As a side effect, the file might be linked from user cache.
105 Return None if the file can't be found locally.'''
105 Return None if the file can't be found locally.'''
106 path, exists = findstorepath(repo, hash)
106 path, exists = findstorepath(repo, hash)
107 if exists:
107 if exists:
108 repo.ui.note(_('found %s in store\n') % hash)
108 repo.ui.note(_('found %s in store\n') % hash)
109 return path
109 return path
110 elif inusercache(repo.ui, hash):
110 elif inusercache(repo.ui, hash):
111 repo.ui.note(_('found %s in system cache\n') % hash)
111 repo.ui.note(_('found %s in system cache\n') % hash)
112 path = storepath(repo, hash)
112 path = storepath(repo, hash)
113 link(usercachepath(repo.ui, hash), path)
113 link(usercachepath(repo.ui, hash), path)
114 return path
114 return path
115 return None
115 return None
116
116
117 class largefilesdirstate(dirstate.dirstate):
117 class largefilesdirstate(dirstate.dirstate):
118 def __getitem__(self, key):
118 def __getitem__(self, key):
119 return super(largefilesdirstate, self).__getitem__(unixpath(key))
119 return super(largefilesdirstate, self).__getitem__(unixpath(key))
120 def normal(self, f):
120 def normal(self, f):
121 return super(largefilesdirstate, self).normal(unixpath(f))
121 return super(largefilesdirstate, self).normal(unixpath(f))
122 def remove(self, f):
122 def remove(self, f):
123 return super(largefilesdirstate, self).remove(unixpath(f))
123 return super(largefilesdirstate, self).remove(unixpath(f))
124 def add(self, f):
124 def add(self, f):
125 return super(largefilesdirstate, self).add(unixpath(f))
125 return super(largefilesdirstate, self).add(unixpath(f))
126 def drop(self, f):
126 def drop(self, f):
127 return super(largefilesdirstate, self).drop(unixpath(f))
127 return super(largefilesdirstate, self).drop(unixpath(f))
128 def forget(self, f):
128 def forget(self, f):
129 return super(largefilesdirstate, self).forget(unixpath(f))
129 return super(largefilesdirstate, self).forget(unixpath(f))
130 def normallookup(self, f):
130 def normallookup(self, f):
131 return super(largefilesdirstate, self).normallookup(unixpath(f))
131 return super(largefilesdirstate, self).normallookup(unixpath(f))
132 def _ignore(self, f):
132 def _ignore(self, f):
133 return False
133 return False
134 def write(self, tr=False):
134 def write(self, tr=False):
135 # (1) disable PENDING mode always
135 # (1) disable PENDING mode always
136 # (lfdirstate isn't yet managed as a part of the transaction)
136 # (lfdirstate isn't yet managed as a part of the transaction)
137 # (2) avoid develwarn 'use dirstate.write with ....'
137 # (2) avoid develwarn 'use dirstate.write with ....'
138 super(largefilesdirstate, self).write(None)
138 super(largefilesdirstate, self).write(None)
139
139
140 def openlfdirstate(ui, repo, create=True):
140 def openlfdirstate(ui, repo, create=True):
141 '''
141 '''
142 Return a dirstate object that tracks largefiles: i.e. its root is
142 Return a dirstate object that tracks largefiles: i.e. its root is
143 the repo root, but it is saved in .hg/largefiles/dirstate.
143 the repo root, but it is saved in .hg/largefiles/dirstate.
144 '''
144 '''
145 vfs = repo.vfs
145 vfs = repo.vfs
146 lfstoredir = longname
146 lfstoredir = longname
147 opener = scmutil.opener(vfs.join(lfstoredir))
147 opener = scmutil.vfs(vfs.join(lfstoredir))
148 lfdirstate = largefilesdirstate(opener, ui, repo.root,
148 lfdirstate = largefilesdirstate(opener, ui, repo.root,
149 repo.dirstate._validate)
149 repo.dirstate._validate)
150
150
151 # If the largefiles dirstate does not exist, populate and create
151 # If the largefiles dirstate does not exist, populate and create
152 # it. This ensures that we create it on the first meaningful
152 # it. This ensures that we create it on the first meaningful
153 # largefiles operation in a new clone.
153 # largefiles operation in a new clone.
154 if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
154 if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
155 matcher = getstandinmatcher(repo)
155 matcher = getstandinmatcher(repo)
156 standins = repo.dirstate.walk(matcher, [], False, False)
156 standins = repo.dirstate.walk(matcher, [], False, False)
157
157
158 if len(standins) > 0:
158 if len(standins) > 0:
159 vfs.makedirs(lfstoredir)
159 vfs.makedirs(lfstoredir)
160
160
161 for standin in standins:
161 for standin in standins:
162 lfile = splitstandin(standin)
162 lfile = splitstandin(standin)
163 lfdirstate.normallookup(lfile)
163 lfdirstate.normallookup(lfile)
164 return lfdirstate
164 return lfdirstate
165
165
166 def lfdirstatestatus(lfdirstate, repo):
166 def lfdirstatestatus(lfdirstate, repo):
167 wctx = repo['.']
167 wctx = repo['.']
168 match = matchmod.always(repo.root, repo.getcwd())
168 match = matchmod.always(repo.root, repo.getcwd())
169 unsure, s = lfdirstate.status(match, [], False, False, False)
169 unsure, s = lfdirstate.status(match, [], False, False, False)
170 modified, clean = s.modified, s.clean
170 modified, clean = s.modified, s.clean
171 for lfile in unsure:
171 for lfile in unsure:
172 try:
172 try:
173 fctx = wctx[standin(lfile)]
173 fctx = wctx[standin(lfile)]
174 except LookupError:
174 except LookupError:
175 fctx = None
175 fctx = None
176 if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
176 if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
177 modified.append(lfile)
177 modified.append(lfile)
178 else:
178 else:
179 clean.append(lfile)
179 clean.append(lfile)
180 lfdirstate.normal(lfile)
180 lfdirstate.normal(lfile)
181 return s
181 return s
182
182
183 def listlfiles(repo, rev=None, matcher=None):
183 def listlfiles(repo, rev=None, matcher=None):
184 '''return a list of largefiles in the working copy or the
184 '''return a list of largefiles in the working copy or the
185 specified changeset'''
185 specified changeset'''
186
186
187 if matcher is None:
187 if matcher is None:
188 matcher = getstandinmatcher(repo)
188 matcher = getstandinmatcher(repo)
189
189
190 # ignore unknown files in working directory
190 # ignore unknown files in working directory
191 return [splitstandin(f)
191 return [splitstandin(f)
192 for f in repo[rev].walk(matcher)
192 for f in repo[rev].walk(matcher)
193 if rev is not None or repo.dirstate[f] != '?']
193 if rev is not None or repo.dirstate[f] != '?']
194
194
195 def instore(repo, hash, forcelocal=False):
195 def instore(repo, hash, forcelocal=False):
196 '''Return true if a largefile with the given hash exists in the store'''
196 '''Return true if a largefile with the given hash exists in the store'''
197 return os.path.exists(storepath(repo, hash, forcelocal))
197 return os.path.exists(storepath(repo, hash, forcelocal))
198
198
199 def storepath(repo, hash, forcelocal=False):
199 def storepath(repo, hash, forcelocal=False):
200 '''Return the correct location in the repository largefiles store for a
200 '''Return the correct location in the repository largefiles store for a
201 file with the given hash.'''
201 file with the given hash.'''
202 if not forcelocal and repo.shared():
202 if not forcelocal and repo.shared():
203 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
203 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
204 return repo.join(longname, hash)
204 return repo.join(longname, hash)
205
205
206 def findstorepath(repo, hash):
206 def findstorepath(repo, hash):
207 '''Search through the local store path(s) to find the file for the given
207 '''Search through the local store path(s) to find the file for the given
208 hash. If the file is not found, its path in the primary store is returned.
208 hash. If the file is not found, its path in the primary store is returned.
209 The return value is a tuple of (path, exists(path)).
209 The return value is a tuple of (path, exists(path)).
210 '''
210 '''
211 # For shared repos, the primary store is in the share source. But for
211 # For shared repos, the primary store is in the share source. But for
212 # backward compatibility, force a lookup in the local store if it wasn't
212 # backward compatibility, force a lookup in the local store if it wasn't
213 # found in the share source.
213 # found in the share source.
214 path = storepath(repo, hash, False)
214 path = storepath(repo, hash, False)
215
215
216 if instore(repo, hash):
216 if instore(repo, hash):
217 return (path, True)
217 return (path, True)
218 elif repo.shared() and instore(repo, hash, True):
218 elif repo.shared() and instore(repo, hash, True):
219 return storepath(repo, hash, True), True
219 return storepath(repo, hash, True), True
220
220
221 return (path, False)
221 return (path, False)
222
222
223 def copyfromcache(repo, hash, filename):
223 def copyfromcache(repo, hash, filename):
224 '''Copy the specified largefile from the repo or system cache to
224 '''Copy the specified largefile from the repo or system cache to
225 filename in the repository. Return true on success or false if the
225 filename in the repository. Return true on success or false if the
226 file was not found in either cache (which should not happened:
226 file was not found in either cache (which should not happened:
227 this is meant to be called only after ensuring that the needed
227 this is meant to be called only after ensuring that the needed
228 largefile exists in the cache).'''
228 largefile exists in the cache).'''
229 wvfs = repo.wvfs
229 wvfs = repo.wvfs
230 path = findfile(repo, hash)
230 path = findfile(repo, hash)
231 if path is None:
231 if path is None:
232 return False
232 return False
233 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
233 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
234 # The write may fail before the file is fully written, but we
234 # The write may fail before the file is fully written, but we
235 # don't use atomic writes in the working copy.
235 # don't use atomic writes in the working copy.
236 with open(path, 'rb') as srcfd:
236 with open(path, 'rb') as srcfd:
237 with wvfs(filename, 'wb') as destfd:
237 with wvfs(filename, 'wb') as destfd:
238 gothash = copyandhash(
238 gothash = copyandhash(
239 util.filechunkiter(srcfd), destfd)
239 util.filechunkiter(srcfd), destfd)
240 if gothash != hash:
240 if gothash != hash:
241 repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
241 repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
242 % (filename, path, gothash))
242 % (filename, path, gothash))
243 wvfs.unlink(filename)
243 wvfs.unlink(filename)
244 return False
244 return False
245 return True
245 return True
246
246
247 def copytostore(repo, rev, file, uploaded=False):
247 def copytostore(repo, rev, file, uploaded=False):
248 wvfs = repo.wvfs
248 wvfs = repo.wvfs
249 hash = readstandin(repo, file, rev)
249 hash = readstandin(repo, file, rev)
250 if instore(repo, hash):
250 if instore(repo, hash):
251 return
251 return
252 if wvfs.exists(file):
252 if wvfs.exists(file):
253 copytostoreabsolute(repo, wvfs.join(file), hash)
253 copytostoreabsolute(repo, wvfs.join(file), hash)
254 else:
254 else:
255 repo.ui.warn(_("%s: largefile %s not available from local store\n") %
255 repo.ui.warn(_("%s: largefile %s not available from local store\n") %
256 (file, hash))
256 (file, hash))
257
257
258 def copyalltostore(repo, node):
258 def copyalltostore(repo, node):
259 '''Copy all largefiles in a given revision to the store'''
259 '''Copy all largefiles in a given revision to the store'''
260
260
261 ctx = repo[node]
261 ctx = repo[node]
262 for filename in ctx.files():
262 for filename in ctx.files():
263 if isstandin(filename) and filename in ctx.manifest():
263 if isstandin(filename) and filename in ctx.manifest():
264 realfile = splitstandin(filename)
264 realfile = splitstandin(filename)
265 copytostore(repo, ctx.node(), realfile)
265 copytostore(repo, ctx.node(), realfile)
266
266
267 def copytostoreabsolute(repo, file, hash):
267 def copytostoreabsolute(repo, file, hash):
268 if inusercache(repo.ui, hash):
268 if inusercache(repo.ui, hash):
269 link(usercachepath(repo.ui, hash), storepath(repo, hash))
269 link(usercachepath(repo.ui, hash), storepath(repo, hash))
270 else:
270 else:
271 util.makedirs(os.path.dirname(storepath(repo, hash)))
271 util.makedirs(os.path.dirname(storepath(repo, hash)))
272 with open(file, 'rb') as srcf:
272 with open(file, 'rb') as srcf:
273 with util.atomictempfile(storepath(repo, hash),
273 with util.atomictempfile(storepath(repo, hash),
274 createmode=repo.store.createmode) as dstf:
274 createmode=repo.store.createmode) as dstf:
275 for chunk in util.filechunkiter(srcf):
275 for chunk in util.filechunkiter(srcf):
276 dstf.write(chunk)
276 dstf.write(chunk)
277 linktousercache(repo, hash)
277 linktousercache(repo, hash)
278
278
279 def linktousercache(repo, hash):
279 def linktousercache(repo, hash):
280 '''Link / copy the largefile with the specified hash from the store
280 '''Link / copy the largefile with the specified hash from the store
281 to the cache.'''
281 to the cache.'''
282 path = usercachepath(repo.ui, hash)
282 path = usercachepath(repo.ui, hash)
283 link(storepath(repo, hash), path)
283 link(storepath(repo, hash), path)
284
284
285 def getstandinmatcher(repo, rmatcher=None):
285 def getstandinmatcher(repo, rmatcher=None):
286 '''Return a match object that applies rmatcher to the standin directory'''
286 '''Return a match object that applies rmatcher to the standin directory'''
287 wvfs = repo.wvfs
287 wvfs = repo.wvfs
288 standindir = shortname
288 standindir = shortname
289
289
290 # no warnings about missing files or directories
290 # no warnings about missing files or directories
291 badfn = lambda f, msg: None
291 badfn = lambda f, msg: None
292
292
293 if rmatcher and not rmatcher.always():
293 if rmatcher and not rmatcher.always():
294 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
294 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
295 if not pats:
295 if not pats:
296 pats = [wvfs.join(standindir)]
296 pats = [wvfs.join(standindir)]
297 match = scmutil.match(repo[None], pats, badfn=badfn)
297 match = scmutil.match(repo[None], pats, badfn=badfn)
298 # if pats is empty, it would incorrectly always match, so clear _always
298 # if pats is empty, it would incorrectly always match, so clear _always
299 match._always = False
299 match._always = False
300 else:
300 else:
301 # no patterns: relative to repo root
301 # no patterns: relative to repo root
302 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
302 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
303 return match
303 return match
304
304
305 def composestandinmatcher(repo, rmatcher):
305 def composestandinmatcher(repo, rmatcher):
306 '''Return a matcher that accepts standins corresponding to the
306 '''Return a matcher that accepts standins corresponding to the
307 files accepted by rmatcher. Pass the list of files in the matcher
307 files accepted by rmatcher. Pass the list of files in the matcher
308 as the paths specified by the user.'''
308 as the paths specified by the user.'''
309 smatcher = getstandinmatcher(repo, rmatcher)
309 smatcher = getstandinmatcher(repo, rmatcher)
310 isstandin = smatcher.matchfn
310 isstandin = smatcher.matchfn
311 def composedmatchfn(f):
311 def composedmatchfn(f):
312 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
312 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
313 smatcher.matchfn = composedmatchfn
313 smatcher.matchfn = composedmatchfn
314
314
315 return smatcher
315 return smatcher
316
316
317 def standin(filename):
317 def standin(filename):
318 '''Return the repo-relative path to the standin for the specified big
318 '''Return the repo-relative path to the standin for the specified big
319 file.'''
319 file.'''
320 # Notes:
320 # Notes:
321 # 1) Some callers want an absolute path, but for instance addlargefiles
321 # 1) Some callers want an absolute path, but for instance addlargefiles
322 # needs it repo-relative so it can be passed to repo[None].add(). So
322 # needs it repo-relative so it can be passed to repo[None].add(). So
323 # leave it up to the caller to use repo.wjoin() to get an absolute path.
323 # leave it up to the caller to use repo.wjoin() to get an absolute path.
324 # 2) Join with '/' because that's what dirstate always uses, even on
324 # 2) Join with '/' because that's what dirstate always uses, even on
325 # Windows. Change existing separator to '/' first in case we are
325 # Windows. Change existing separator to '/' first in case we are
326 # passed filenames from an external source (like the command line).
326 # passed filenames from an external source (like the command line).
327 return shortnameslash + util.pconvert(filename)
327 return shortnameslash + util.pconvert(filename)
328
328
329 def isstandin(filename):
329 def isstandin(filename):
330 '''Return true if filename is a big file standin. filename must be
330 '''Return true if filename is a big file standin. filename must be
331 in Mercurial's internal form (slash-separated).'''
331 in Mercurial's internal form (slash-separated).'''
332 return filename.startswith(shortnameslash)
332 return filename.startswith(shortnameslash)
333
333
334 def splitstandin(filename):
334 def splitstandin(filename):
335 # Split on / because that's what dirstate always uses, even on Windows.
335 # Split on / because that's what dirstate always uses, even on Windows.
336 # Change local separator to / first just in case we are passed filenames
336 # Change local separator to / first just in case we are passed filenames
337 # from an external source (like the command line).
337 # from an external source (like the command line).
338 bits = util.pconvert(filename).split('/', 1)
338 bits = util.pconvert(filename).split('/', 1)
339 if len(bits) == 2 and bits[0] == shortname:
339 if len(bits) == 2 and bits[0] == shortname:
340 return bits[1]
340 return bits[1]
341 else:
341 else:
342 return None
342 return None
343
343
344 def updatestandin(repo, standin):
344 def updatestandin(repo, standin):
345 file = repo.wjoin(splitstandin(standin))
345 file = repo.wjoin(splitstandin(standin))
346 if repo.wvfs.exists(splitstandin(standin)):
346 if repo.wvfs.exists(splitstandin(standin)):
347 hash = hashfile(file)
347 hash = hashfile(file)
348 executable = getexecutable(file)
348 executable = getexecutable(file)
349 writestandin(repo, standin, hash, executable)
349 writestandin(repo, standin, hash, executable)
350 else:
350 else:
351 raise error.Abort(_('%s: file not found!') % splitstandin(standin))
351 raise error.Abort(_('%s: file not found!') % splitstandin(standin))
352
352
353 def readstandin(repo, filename, node=None):
353 def readstandin(repo, filename, node=None):
354 '''read hex hash from standin for filename at given node, or working
354 '''read hex hash from standin for filename at given node, or working
355 directory if no node is given'''
355 directory if no node is given'''
356 return repo[node][standin(filename)].data().strip()
356 return repo[node][standin(filename)].data().strip()
357
357
358 def writestandin(repo, standin, hash, executable):
358 def writestandin(repo, standin, hash, executable):
359 '''write hash to <repo.root>/<standin>'''
359 '''write hash to <repo.root>/<standin>'''
360 repo.wwrite(standin, hash + '\n', executable and 'x' or '')
360 repo.wwrite(standin, hash + '\n', executable and 'x' or '')
361
361
362 def copyandhash(instream, outfile):
362 def copyandhash(instream, outfile):
363 '''Read bytes from instream (iterable) and write them to outfile,
363 '''Read bytes from instream (iterable) and write them to outfile,
364 computing the SHA-1 hash of the data along the way. Return the hash.'''
364 computing the SHA-1 hash of the data along the way. Return the hash.'''
365 hasher = hashlib.sha1('')
365 hasher = hashlib.sha1('')
366 for data in instream:
366 for data in instream:
367 hasher.update(data)
367 hasher.update(data)
368 outfile.write(data)
368 outfile.write(data)
369 return hasher.hexdigest()
369 return hasher.hexdigest()
370
370
371 def hashrepofile(repo, file):
371 def hashrepofile(repo, file):
372 return hashfile(repo.wjoin(file))
372 return hashfile(repo.wjoin(file))
373
373
374 def hashfile(file):
374 def hashfile(file):
375 if not os.path.exists(file):
375 if not os.path.exists(file):
376 return ''
376 return ''
377 hasher = hashlib.sha1('')
377 hasher = hashlib.sha1('')
378 with open(file, 'rb') as fd:
378 with open(file, 'rb') as fd:
379 for data in util.filechunkiter(fd):
379 for data in util.filechunkiter(fd):
380 hasher.update(data)
380 hasher.update(data)
381 return hasher.hexdigest()
381 return hasher.hexdigest()
382
382
383 def getexecutable(filename):
383 def getexecutable(filename):
384 mode = os.stat(filename).st_mode
384 mode = os.stat(filename).st_mode
385 return ((mode & stat.S_IXUSR) and
385 return ((mode & stat.S_IXUSR) and
386 (mode & stat.S_IXGRP) and
386 (mode & stat.S_IXGRP) and
387 (mode & stat.S_IXOTH))
387 (mode & stat.S_IXOTH))
388
388
389 def urljoin(first, second, *arg):
389 def urljoin(first, second, *arg):
390 def join(left, right):
390 def join(left, right):
391 if not left.endswith('/'):
391 if not left.endswith('/'):
392 left += '/'
392 left += '/'
393 if right.startswith('/'):
393 if right.startswith('/'):
394 right = right[1:]
394 right = right[1:]
395 return left + right
395 return left + right
396
396
397 url = join(first, second)
397 url = join(first, second)
398 for a in arg:
398 for a in arg:
399 url = join(url, a)
399 url = join(url, a)
400 return url
400 return url
401
401
402 def hexsha1(data):
402 def hexsha1(data):
403 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
403 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
404 object data"""
404 object data"""
405 h = hashlib.sha1()
405 h = hashlib.sha1()
406 for chunk in util.filechunkiter(data):
406 for chunk in util.filechunkiter(data):
407 h.update(chunk)
407 h.update(chunk)
408 return h.hexdigest()
408 return h.hexdigest()
409
409
410 def httpsendfile(ui, filename):
410 def httpsendfile(ui, filename):
411 return httpconnection.httpsendfile(ui, filename, 'rb')
411 return httpconnection.httpsendfile(ui, filename, 'rb')
412
412
413 def unixpath(path):
413 def unixpath(path):
414 '''Return a version of path normalized for use with the lfdirstate.'''
414 '''Return a version of path normalized for use with the lfdirstate.'''
415 return util.pconvert(os.path.normpath(path))
415 return util.pconvert(os.path.normpath(path))
416
416
417 def islfilesrepo(repo):
417 def islfilesrepo(repo):
418 '''Return true if the repo is a largefile repo.'''
418 '''Return true if the repo is a largefile repo.'''
419 if ('largefiles' in repo.requirements and
419 if ('largefiles' in repo.requirements and
420 any(shortnameslash in f[0] for f in repo.store.datafiles())):
420 any(shortnameslash in f[0] for f in repo.store.datafiles())):
421 return True
421 return True
422
422
423 return any(openlfdirstate(repo.ui, repo, False))
423 return any(openlfdirstate(repo.ui, repo, False))
424
424
425 class storeprotonotcapable(Exception):
425 class storeprotonotcapable(Exception):
426 def __init__(self, storetypes):
426 def __init__(self, storetypes):
427 self.storetypes = storetypes
427 self.storetypes = storetypes
428
428
429 def getstandinsstate(repo):
429 def getstandinsstate(repo):
430 standins = []
430 standins = []
431 matcher = getstandinmatcher(repo)
431 matcher = getstandinmatcher(repo)
432 for standin in repo.dirstate.walk(matcher, [], False, False):
432 for standin in repo.dirstate.walk(matcher, [], False, False):
433 lfile = splitstandin(standin)
433 lfile = splitstandin(standin)
434 try:
434 try:
435 hash = readstandin(repo, lfile)
435 hash = readstandin(repo, lfile)
436 except IOError:
436 except IOError:
437 hash = None
437 hash = None
438 standins.append((lfile, hash))
438 standins.append((lfile, hash))
439 return standins
439 return standins
440
440
441 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
441 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
442 lfstandin = standin(lfile)
442 lfstandin = standin(lfile)
443 if lfstandin in repo.dirstate:
443 if lfstandin in repo.dirstate:
444 stat = repo.dirstate._map[lfstandin]
444 stat = repo.dirstate._map[lfstandin]
445 state, mtime = stat[0], stat[3]
445 state, mtime = stat[0], stat[3]
446 else:
446 else:
447 state, mtime = '?', -1
447 state, mtime = '?', -1
448 if state == 'n':
448 if state == 'n':
449 if (normallookup or mtime < 0 or
449 if (normallookup or mtime < 0 or
450 not repo.wvfs.exists(lfile)):
450 not repo.wvfs.exists(lfile)):
451 # state 'n' doesn't ensure 'clean' in this case
451 # state 'n' doesn't ensure 'clean' in this case
452 lfdirstate.normallookup(lfile)
452 lfdirstate.normallookup(lfile)
453 else:
453 else:
454 lfdirstate.normal(lfile)
454 lfdirstate.normal(lfile)
455 elif state == 'm':
455 elif state == 'm':
456 lfdirstate.normallookup(lfile)
456 lfdirstate.normallookup(lfile)
457 elif state == 'r':
457 elif state == 'r':
458 lfdirstate.remove(lfile)
458 lfdirstate.remove(lfile)
459 elif state == 'a':
459 elif state == 'a':
460 lfdirstate.add(lfile)
460 lfdirstate.add(lfile)
461 elif state == '?':
461 elif state == '?':
462 lfdirstate.drop(lfile)
462 lfdirstate.drop(lfile)
463
463
464 def markcommitted(orig, ctx, node):
464 def markcommitted(orig, ctx, node):
465 repo = ctx.repo()
465 repo = ctx.repo()
466
466
467 orig(node)
467 orig(node)
468
468
469 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
469 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
470 # because files coming from the 2nd parent are omitted in the latter.
470 # because files coming from the 2nd parent are omitted in the latter.
471 #
471 #
472 # The former should be used to get targets of "synclfdirstate",
472 # The former should be used to get targets of "synclfdirstate",
473 # because such files:
473 # because such files:
474 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
474 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
475 # - have to be marked as "n" after commit, but
475 # - have to be marked as "n" after commit, but
476 # - aren't listed in "repo[node].files()"
476 # - aren't listed in "repo[node].files()"
477
477
478 lfdirstate = openlfdirstate(repo.ui, repo)
478 lfdirstate = openlfdirstate(repo.ui, repo)
479 for f in ctx.files():
479 for f in ctx.files():
480 if isstandin(f):
480 if isstandin(f):
481 lfile = splitstandin(f)
481 lfile = splitstandin(f)
482 synclfdirstate(repo, lfdirstate, lfile, False)
482 synclfdirstate(repo, lfdirstate, lfile, False)
483 lfdirstate.write()
483 lfdirstate.write()
484
484
485 # As part of committing, copy all of the largefiles into the cache.
485 # As part of committing, copy all of the largefiles into the cache.
486 copyalltostore(repo, node)
486 copyalltostore(repo, node)
487
487
488 def getlfilestoupdate(oldstandins, newstandins):
488 def getlfilestoupdate(oldstandins, newstandins):
489 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
489 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
490 filelist = []
490 filelist = []
491 for f in changedstandins:
491 for f in changedstandins:
492 if f[0] not in filelist:
492 if f[0] not in filelist:
493 filelist.append(f[0])
493 filelist.append(f[0])
494 return filelist
494 return filelist
495
495
496 def getlfilestoupload(repo, missing, addfunc):
496 def getlfilestoupload(repo, missing, addfunc):
497 for i, n in enumerate(missing):
497 for i, n in enumerate(missing):
498 repo.ui.progress(_('finding outgoing largefiles'), i,
498 repo.ui.progress(_('finding outgoing largefiles'), i,
499 unit=_('revisions'), total=len(missing))
499 unit=_('revisions'), total=len(missing))
500 parents = [p for p in repo[n].parents() if p != node.nullid]
500 parents = [p for p in repo[n].parents() if p != node.nullid]
501
501
502 oldlfstatus = repo.lfstatus
502 oldlfstatus = repo.lfstatus
503 repo.lfstatus = False
503 repo.lfstatus = False
504 try:
504 try:
505 ctx = repo[n]
505 ctx = repo[n]
506 finally:
506 finally:
507 repo.lfstatus = oldlfstatus
507 repo.lfstatus = oldlfstatus
508
508
509 files = set(ctx.files())
509 files = set(ctx.files())
510 if len(parents) == 2:
510 if len(parents) == 2:
511 mc = ctx.manifest()
511 mc = ctx.manifest()
512 mp1 = ctx.parents()[0].manifest()
512 mp1 = ctx.parents()[0].manifest()
513 mp2 = ctx.parents()[1].manifest()
513 mp2 = ctx.parents()[1].manifest()
514 for f in mp1:
514 for f in mp1:
515 if f not in mc:
515 if f not in mc:
516 files.add(f)
516 files.add(f)
517 for f in mp2:
517 for f in mp2:
518 if f not in mc:
518 if f not in mc:
519 files.add(f)
519 files.add(f)
520 for f in mc:
520 for f in mc:
521 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
521 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
522 files.add(f)
522 files.add(f)
523 for fn in files:
523 for fn in files:
524 if isstandin(fn) and fn in ctx:
524 if isstandin(fn) and fn in ctx:
525 addfunc(fn, ctx[fn].data().strip())
525 addfunc(fn, ctx[fn].data().strip())
526 repo.ui.progress(_('finding outgoing largefiles'), None)
526 repo.ui.progress(_('finding outgoing largefiles'), None)
527
527
528 def updatestandinsbymatch(repo, match):
528 def updatestandinsbymatch(repo, match):
529 '''Update standins in the working directory according to specified match
529 '''Update standins in the working directory according to specified match
530
530
531 This returns (possibly modified) ``match`` object to be used for
531 This returns (possibly modified) ``match`` object to be used for
532 subsequent commit process.
532 subsequent commit process.
533 '''
533 '''
534
534
535 ui = repo.ui
535 ui = repo.ui
536
536
537 # Case 1: user calls commit with no specific files or
537 # Case 1: user calls commit with no specific files or
538 # include/exclude patterns: refresh and commit all files that
538 # include/exclude patterns: refresh and commit all files that
539 # are "dirty".
539 # are "dirty".
540 if match is None or match.always():
540 if match is None or match.always():
541 # Spend a bit of time here to get a list of files we know
541 # Spend a bit of time here to get a list of files we know
542 # are modified so we can compare only against those.
542 # are modified so we can compare only against those.
543 # It can cost a lot of time (several seconds)
543 # It can cost a lot of time (several seconds)
544 # otherwise to update all standins if the largefiles are
544 # otherwise to update all standins if the largefiles are
545 # large.
545 # large.
546 lfdirstate = openlfdirstate(ui, repo)
546 lfdirstate = openlfdirstate(ui, repo)
547 dirtymatch = matchmod.always(repo.root, repo.getcwd())
547 dirtymatch = matchmod.always(repo.root, repo.getcwd())
548 unsure, s = lfdirstate.status(dirtymatch, [], False, False,
548 unsure, s = lfdirstate.status(dirtymatch, [], False, False,
549 False)
549 False)
550 modifiedfiles = unsure + s.modified + s.added + s.removed
550 modifiedfiles = unsure + s.modified + s.added + s.removed
551 lfiles = listlfiles(repo)
551 lfiles = listlfiles(repo)
552 # this only loops through largefiles that exist (not
552 # this only loops through largefiles that exist (not
553 # removed/renamed)
553 # removed/renamed)
554 for lfile in lfiles:
554 for lfile in lfiles:
555 if lfile in modifiedfiles:
555 if lfile in modifiedfiles:
556 if repo.wvfs.exists(standin(lfile)):
556 if repo.wvfs.exists(standin(lfile)):
557 # this handles the case where a rebase is being
557 # this handles the case where a rebase is being
558 # performed and the working copy is not updated
558 # performed and the working copy is not updated
559 # yet.
559 # yet.
560 if repo.wvfs.exists(lfile):
560 if repo.wvfs.exists(lfile):
561 updatestandin(repo,
561 updatestandin(repo,
562 standin(lfile))
562 standin(lfile))
563
563
564 return match
564 return match
565
565
566 lfiles = listlfiles(repo)
566 lfiles = listlfiles(repo)
567 match._files = repo._subdirlfs(match.files(), lfiles)
567 match._files = repo._subdirlfs(match.files(), lfiles)
568
568
569 # Case 2: user calls commit with specified patterns: refresh
569 # Case 2: user calls commit with specified patterns: refresh
570 # any matching big files.
570 # any matching big files.
571 smatcher = composestandinmatcher(repo, match)
571 smatcher = composestandinmatcher(repo, match)
572 standins = repo.dirstate.walk(smatcher, [], False, False)
572 standins = repo.dirstate.walk(smatcher, [], False, False)
573
573
574 # No matching big files: get out of the way and pass control to
574 # No matching big files: get out of the way and pass control to
575 # the usual commit() method.
575 # the usual commit() method.
576 if not standins:
576 if not standins:
577 return match
577 return match
578
578
579 # Refresh all matching big files. It's possible that the
579 # Refresh all matching big files. It's possible that the
580 # commit will end up failing, in which case the big files will
580 # commit will end up failing, in which case the big files will
581 # stay refreshed. No harm done: the user modified them and
581 # stay refreshed. No harm done: the user modified them and
582 # asked to commit them, so sooner or later we're going to
582 # asked to commit them, so sooner or later we're going to
583 # refresh the standins. Might as well leave them refreshed.
583 # refresh the standins. Might as well leave them refreshed.
584 lfdirstate = openlfdirstate(ui, repo)
584 lfdirstate = openlfdirstate(ui, repo)
585 for fstandin in standins:
585 for fstandin in standins:
586 lfile = splitstandin(fstandin)
586 lfile = splitstandin(fstandin)
587 if lfdirstate[lfile] != 'r':
587 if lfdirstate[lfile] != 'r':
588 updatestandin(repo, fstandin)
588 updatestandin(repo, fstandin)
589
589
590 # Cook up a new matcher that only matches regular files or
590 # Cook up a new matcher that only matches regular files or
591 # standins corresponding to the big files requested by the
591 # standins corresponding to the big files requested by the
592 # user. Have to modify _files to prevent commit() from
592 # user. Have to modify _files to prevent commit() from
593 # complaining "not tracked" for big files.
593 # complaining "not tracked" for big files.
594 match = copy.copy(match)
594 match = copy.copy(match)
595 origmatchfn = match.matchfn
595 origmatchfn = match.matchfn
596
596
597 # Check both the list of largefiles and the list of
597 # Check both the list of largefiles and the list of
598 # standins because if a largefile was removed, it
598 # standins because if a largefile was removed, it
599 # won't be in the list of largefiles at this point
599 # won't be in the list of largefiles at this point
600 match._files += sorted(standins)
600 match._files += sorted(standins)
601
601
602 actualfiles = []
602 actualfiles = []
603 for f in match._files:
603 for f in match._files:
604 fstandin = standin(f)
604 fstandin = standin(f)
605
605
606 # For largefiles, only one of the normal and standin should be
606 # For largefiles, only one of the normal and standin should be
607 # committed (except if one of them is a remove). In the case of a
607 # committed (except if one of them is a remove). In the case of a
608 # standin removal, drop the normal file if it is unknown to dirstate.
608 # standin removal, drop the normal file if it is unknown to dirstate.
609 # Thus, skip plain largefile names but keep the standin.
609 # Thus, skip plain largefile names but keep the standin.
610 if f in lfiles or fstandin in standins:
610 if f in lfiles or fstandin in standins:
611 if repo.dirstate[fstandin] != 'r':
611 if repo.dirstate[fstandin] != 'r':
612 if repo.dirstate[f] != 'r':
612 if repo.dirstate[f] != 'r':
613 continue
613 continue
614 elif repo.dirstate[f] == '?':
614 elif repo.dirstate[f] == '?':
615 continue
615 continue
616
616
617 actualfiles.append(f)
617 actualfiles.append(f)
618 match._files = actualfiles
618 match._files = actualfiles
619
619
620 def matchfn(f):
620 def matchfn(f):
621 if origmatchfn(f):
621 if origmatchfn(f):
622 return f not in lfiles
622 return f not in lfiles
623 else:
623 else:
624 return f in standins
624 return f in standins
625
625
626 match.matchfn = matchfn
626 match.matchfn = matchfn
627
627
628 return match
628 return match
629
629
630 class automatedcommithook(object):
630 class automatedcommithook(object):
631 '''Stateful hook to update standins at the 1st commit of resuming
631 '''Stateful hook to update standins at the 1st commit of resuming
632
632
633 For efficiency, updating standins in the working directory should
633 For efficiency, updating standins in the working directory should
634 be avoided while automated committing (like rebase, transplant and
634 be avoided while automated committing (like rebase, transplant and
635 so on), because they should be updated before committing.
635 so on), because they should be updated before committing.
636
636
637 But the 1st commit of resuming automated committing (e.g. ``rebase
637 But the 1st commit of resuming automated committing (e.g. ``rebase
638 --continue``) should update them, because largefiles may be
638 --continue``) should update them, because largefiles may be
639 modified manually.
639 modified manually.
640 '''
640 '''
641 def __init__(self, resuming):
641 def __init__(self, resuming):
642 self.resuming = resuming
642 self.resuming = resuming
643
643
644 def __call__(self, repo, match):
644 def __call__(self, repo, match):
645 if self.resuming:
645 if self.resuming:
646 self.resuming = False # avoids updating at subsequent commits
646 self.resuming = False # avoids updating at subsequent commits
647 return updatestandinsbymatch(repo, match)
647 return updatestandinsbymatch(repo, match)
648 else:
648 else:
649 return match
649 return match
650
650
651 def getstatuswriter(ui, repo, forcibly=None):
651 def getstatuswriter(ui, repo, forcibly=None):
652 '''Return the function to write largefiles specific status out
652 '''Return the function to write largefiles specific status out
653
653
654 If ``forcibly`` is ``None``, this returns the last element of
654 If ``forcibly`` is ``None``, this returns the last element of
655 ``repo._lfstatuswriters`` as "default" writer function.
655 ``repo._lfstatuswriters`` as "default" writer function.
656
656
657 Otherwise, this returns the function to always write out (or
657 Otherwise, this returns the function to always write out (or
658 ignore if ``not forcibly``) status.
658 ignore if ``not forcibly``) status.
659 '''
659 '''
660 if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
660 if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
661 return repo._lfstatuswriters[-1]
661 return repo._lfstatuswriters[-1]
662 else:
662 else:
663 if forcibly:
663 if forcibly:
664 return ui.status # forcibly WRITE OUT
664 return ui.status # forcibly WRITE OUT
665 else:
665 else:
666 return lambda *msg, **opts: None # forcibly IGNORE
666 return lambda *msg, **opts: None # forcibly IGNORE
@@ -1,3611 +1,3611 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help COMMAND` for more details)::
17 Common tasks (use :hg:`help COMMAND` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import
65 from __future__ import absolute_import
66
66
67 import errno
67 import errno
68 import os
68 import os
69 import re
69 import re
70 import shutil
70 import shutil
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import (
72 from mercurial.node import (
73 bin,
73 bin,
74 hex,
74 hex,
75 nullid,
75 nullid,
76 nullrev,
76 nullrev,
77 short,
77 short,
78 )
78 )
79 from mercurial import (
79 from mercurial import (
80 cmdutil,
80 cmdutil,
81 commands,
81 commands,
82 dirstateguard,
82 dirstateguard,
83 error,
83 error,
84 extensions,
84 extensions,
85 hg,
85 hg,
86 localrepo,
86 localrepo,
87 lock as lockmod,
87 lock as lockmod,
88 patch as patchmod,
88 patch as patchmod,
89 phases,
89 phases,
90 pycompat,
90 pycompat,
91 registrar,
91 registrar,
92 revsetlang,
92 revsetlang,
93 scmutil,
93 scmutil,
94 smartset,
94 smartset,
95 subrepo,
95 subrepo,
96 util,
96 util,
97 )
97 )
98
98
99 release = lockmod.release
99 release = lockmod.release
100 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
100 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
101
101
102 cmdtable = {}
102 cmdtable = {}
103 command = cmdutil.command(cmdtable)
103 command = cmdutil.command(cmdtable)
104 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
104 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
105 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
105 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
106 # be specifying the version(s) of Mercurial they are tested with, or
106 # be specifying the version(s) of Mercurial they are tested with, or
107 # leave the attribute unspecified.
107 # leave the attribute unspecified.
108 testedwith = 'ships-with-hg-core'
108 testedwith = 'ships-with-hg-core'
109
109
110 # force load strip extension formerly included in mq and import some utility
110 # force load strip extension formerly included in mq and import some utility
111 try:
111 try:
112 stripext = extensions.find('strip')
112 stripext = extensions.find('strip')
113 except KeyError:
113 except KeyError:
114 # note: load is lazy so we could avoid the try-except,
114 # note: load is lazy so we could avoid the try-except,
115 # but I (marmoute) prefer this explicit code.
115 # but I (marmoute) prefer this explicit code.
116 class dummyui(object):
116 class dummyui(object):
117 def debug(self, msg):
117 def debug(self, msg):
118 pass
118 pass
119 stripext = extensions.load(dummyui(), 'strip', '')
119 stripext = extensions.load(dummyui(), 'strip', '')
120
120
121 strip = stripext.strip
121 strip = stripext.strip
122 checksubstate = stripext.checksubstate
122 checksubstate = stripext.checksubstate
123 checklocalchanges = stripext.checklocalchanges
123 checklocalchanges = stripext.checklocalchanges
124
124
125
125
126 # Patch names looks like unix-file names.
126 # Patch names looks like unix-file names.
127 # They must be joinable with queue directory and result in the patch path.
127 # They must be joinable with queue directory and result in the patch path.
128 normname = util.normpath
128 normname = util.normpath
129
129
130 class statusentry(object):
130 class statusentry(object):
131 def __init__(self, node, name):
131 def __init__(self, node, name):
132 self.node, self.name = node, name
132 self.node, self.name = node, name
133 def __repr__(self):
133 def __repr__(self):
134 return hex(self.node) + ':' + self.name
134 return hex(self.node) + ':' + self.name
135
135
136 # The order of the headers in 'hg export' HG patches:
136 # The order of the headers in 'hg export' HG patches:
137 HGHEADERS = [
137 HGHEADERS = [
138 # '# HG changeset patch',
138 # '# HG changeset patch',
139 '# User ',
139 '# User ',
140 '# Date ',
140 '# Date ',
141 '# ',
141 '# ',
142 '# Branch ',
142 '# Branch ',
143 '# Node ID ',
143 '# Node ID ',
144 '# Parent ', # can occur twice for merges - but that is not relevant for mq
144 '# Parent ', # can occur twice for merges - but that is not relevant for mq
145 ]
145 ]
146 # The order of headers in plain 'mail style' patches:
146 # The order of headers in plain 'mail style' patches:
147 PLAINHEADERS = {
147 PLAINHEADERS = {
148 'from': 0,
148 'from': 0,
149 'date': 1,
149 'date': 1,
150 'subject': 2,
150 'subject': 2,
151 }
151 }
152
152
153 def inserthgheader(lines, header, value):
153 def inserthgheader(lines, header, value):
154 """Assuming lines contains a HG patch header, add a header line with value.
154 """Assuming lines contains a HG patch header, add a header line with value.
155 >>> try: inserthgheader([], '# Date ', 'z')
155 >>> try: inserthgheader([], '# Date ', 'z')
156 ... except ValueError, inst: print "oops"
156 ... except ValueError, inst: print "oops"
157 oops
157 oops
158 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
158 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
159 ['# HG changeset patch', '# Date z']
159 ['# HG changeset patch', '# Date z']
160 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
160 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
161 ['# HG changeset patch', '# Date z', '']
161 ['# HG changeset patch', '# Date z', '']
162 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
162 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
163 ['# HG changeset patch', '# User y', '# Date z']
163 ['# HG changeset patch', '# User y', '# Date z']
164 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
164 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
165 ... '# User ', 'z')
165 ... '# User ', 'z')
166 ['# HG changeset patch', '# Date x', '# User z']
166 ['# HG changeset patch', '# Date x', '# User z']
167 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
167 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
168 ['# HG changeset patch', '# Date z']
168 ['# HG changeset patch', '# Date z']
169 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
169 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
170 ['# HG changeset patch', '# Date z', '', '# Date y']
170 ['# HG changeset patch', '# Date z', '', '# Date y']
171 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
171 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
172 ['# HG changeset patch', '# Date z', '# Parent y']
172 ['# HG changeset patch', '# Date z', '# Parent y']
173 """
173 """
174 start = lines.index('# HG changeset patch') + 1
174 start = lines.index('# HG changeset patch') + 1
175 newindex = HGHEADERS.index(header)
175 newindex = HGHEADERS.index(header)
176 bestpos = len(lines)
176 bestpos = len(lines)
177 for i in range(start, len(lines)):
177 for i in range(start, len(lines)):
178 line = lines[i]
178 line = lines[i]
179 if not line.startswith('# '):
179 if not line.startswith('# '):
180 bestpos = min(bestpos, i)
180 bestpos = min(bestpos, i)
181 break
181 break
182 for lineindex, h in enumerate(HGHEADERS):
182 for lineindex, h in enumerate(HGHEADERS):
183 if line.startswith(h):
183 if line.startswith(h):
184 if lineindex == newindex:
184 if lineindex == newindex:
185 lines[i] = header + value
185 lines[i] = header + value
186 return lines
186 return lines
187 if lineindex > newindex:
187 if lineindex > newindex:
188 bestpos = min(bestpos, i)
188 bestpos = min(bestpos, i)
189 break # next line
189 break # next line
190 lines.insert(bestpos, header + value)
190 lines.insert(bestpos, header + value)
191 return lines
191 return lines
192
192
193 def insertplainheader(lines, header, value):
193 def insertplainheader(lines, header, value):
194 """For lines containing a plain patch header, add a header line with value.
194 """For lines containing a plain patch header, add a header line with value.
195 >>> insertplainheader([], 'Date', 'z')
195 >>> insertplainheader([], 'Date', 'z')
196 ['Date: z']
196 ['Date: z']
197 >>> insertplainheader([''], 'Date', 'z')
197 >>> insertplainheader([''], 'Date', 'z')
198 ['Date: z', '']
198 ['Date: z', '']
199 >>> insertplainheader(['x'], 'Date', 'z')
199 >>> insertplainheader(['x'], 'Date', 'z')
200 ['Date: z', '', 'x']
200 ['Date: z', '', 'x']
201 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
201 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
202 ['From: y', 'Date: z', '', 'x']
202 ['From: y', 'Date: z', '', 'x']
203 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
203 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
204 [' date : x', 'From: z', '']
204 [' date : x', 'From: z', '']
205 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
205 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
206 ['Date: z', '', 'Date: y']
206 ['Date: z', '', 'Date: y']
207 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
207 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
208 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
208 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
209 """
209 """
210 newprio = PLAINHEADERS[header.lower()]
210 newprio = PLAINHEADERS[header.lower()]
211 bestpos = len(lines)
211 bestpos = len(lines)
212 for i, line in enumerate(lines):
212 for i, line in enumerate(lines):
213 if ':' in line:
213 if ':' in line:
214 lheader = line.split(':', 1)[0].strip().lower()
214 lheader = line.split(':', 1)[0].strip().lower()
215 lprio = PLAINHEADERS.get(lheader, newprio + 1)
215 lprio = PLAINHEADERS.get(lheader, newprio + 1)
216 if lprio == newprio:
216 if lprio == newprio:
217 lines[i] = '%s: %s' % (header, value)
217 lines[i] = '%s: %s' % (header, value)
218 return lines
218 return lines
219 if lprio > newprio and i < bestpos:
219 if lprio > newprio and i < bestpos:
220 bestpos = i
220 bestpos = i
221 else:
221 else:
222 if line:
222 if line:
223 lines.insert(i, '')
223 lines.insert(i, '')
224 if i < bestpos:
224 if i < bestpos:
225 bestpos = i
225 bestpos = i
226 break
226 break
227 lines.insert(bestpos, '%s: %s' % (header, value))
227 lines.insert(bestpos, '%s: %s' % (header, value))
228 return lines
228 return lines
229
229
230 class patchheader(object):
230 class patchheader(object):
231 def __init__(self, pf, plainmode=False):
231 def __init__(self, pf, plainmode=False):
232 def eatdiff(lines):
232 def eatdiff(lines):
233 while lines:
233 while lines:
234 l = lines[-1]
234 l = lines[-1]
235 if (l.startswith("diff -") or
235 if (l.startswith("diff -") or
236 l.startswith("Index:") or
236 l.startswith("Index:") or
237 l.startswith("===========")):
237 l.startswith("===========")):
238 del lines[-1]
238 del lines[-1]
239 else:
239 else:
240 break
240 break
241 def eatempty(lines):
241 def eatempty(lines):
242 while lines:
242 while lines:
243 if not lines[-1].strip():
243 if not lines[-1].strip():
244 del lines[-1]
244 del lines[-1]
245 else:
245 else:
246 break
246 break
247
247
248 message = []
248 message = []
249 comments = []
249 comments = []
250 user = None
250 user = None
251 date = None
251 date = None
252 parent = None
252 parent = None
253 format = None
253 format = None
254 subject = None
254 subject = None
255 branch = None
255 branch = None
256 nodeid = None
256 nodeid = None
257 diffstart = 0
257 diffstart = 0
258
258
259 for line in file(pf):
259 for line in file(pf):
260 line = line.rstrip()
260 line = line.rstrip()
261 if (line.startswith('diff --git')
261 if (line.startswith('diff --git')
262 or (diffstart and line.startswith('+++ '))):
262 or (diffstart and line.startswith('+++ '))):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 diffstart = 0 # reset
265 diffstart = 0 # reset
266 if line.startswith("--- "):
266 if line.startswith("--- "):
267 diffstart = 1
267 diffstart = 1
268 continue
268 continue
269 elif format == "hgpatch":
269 elif format == "hgpatch":
270 # parse values when importing the result of an hg export
270 # parse values when importing the result of an hg export
271 if line.startswith("# User "):
271 if line.startswith("# User "):
272 user = line[7:]
272 user = line[7:]
273 elif line.startswith("# Date "):
273 elif line.startswith("# Date "):
274 date = line[7:]
274 date = line[7:]
275 elif line.startswith("# Parent "):
275 elif line.startswith("# Parent "):
276 parent = line[9:].lstrip() # handle double trailing space
276 parent = line[9:].lstrip() # handle double trailing space
277 elif line.startswith("# Branch "):
277 elif line.startswith("# Branch "):
278 branch = line[9:]
278 branch = line[9:]
279 elif line.startswith("# Node ID "):
279 elif line.startswith("# Node ID "):
280 nodeid = line[10:]
280 nodeid = line[10:]
281 elif not line.startswith("# ") and line:
281 elif not line.startswith("# ") and line:
282 message.append(line)
282 message.append(line)
283 format = None
283 format = None
284 elif line == '# HG changeset patch':
284 elif line == '# HG changeset patch':
285 message = []
285 message = []
286 format = "hgpatch"
286 format = "hgpatch"
287 elif (format != "tagdone" and (line.startswith("Subject: ") or
287 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 line.startswith("subject: "))):
288 line.startswith("subject: "))):
289 subject = line[9:]
289 subject = line[9:]
290 format = "tag"
290 format = "tag"
291 elif (format != "tagdone" and (line.startswith("From: ") or
291 elif (format != "tagdone" and (line.startswith("From: ") or
292 line.startswith("from: "))):
292 line.startswith("from: "))):
293 user = line[6:]
293 user = line[6:]
294 format = "tag"
294 format = "tag"
295 elif (format != "tagdone" and (line.startswith("Date: ") or
295 elif (format != "tagdone" and (line.startswith("Date: ") or
296 line.startswith("date: "))):
296 line.startswith("date: "))):
297 date = line[6:]
297 date = line[6:]
298 format = "tag"
298 format = "tag"
299 elif format == "tag" and line == "":
299 elif format == "tag" and line == "":
300 # when looking for tags (subject: from: etc) they
300 # when looking for tags (subject: from: etc) they
301 # end once you find a blank line in the source
301 # end once you find a blank line in the source
302 format = "tagdone"
302 format = "tagdone"
303 elif message or line:
303 elif message or line:
304 message.append(line)
304 message.append(line)
305 comments.append(line)
305 comments.append(line)
306
306
307 eatdiff(message)
307 eatdiff(message)
308 eatdiff(comments)
308 eatdiff(comments)
309 # Remember the exact starting line of the patch diffs before consuming
309 # Remember the exact starting line of the patch diffs before consuming
310 # empty lines, for external use by TortoiseHg and others
310 # empty lines, for external use by TortoiseHg and others
311 self.diffstartline = len(comments)
311 self.diffstartline = len(comments)
312 eatempty(message)
312 eatempty(message)
313 eatempty(comments)
313 eatempty(comments)
314
314
315 # make sure message isn't empty
315 # make sure message isn't empty
316 if format and format.startswith("tag") and subject:
316 if format and format.startswith("tag") and subject:
317 message.insert(0, subject)
317 message.insert(0, subject)
318
318
319 self.message = message
319 self.message = message
320 self.comments = comments
320 self.comments = comments
321 self.user = user
321 self.user = user
322 self.date = date
322 self.date = date
323 self.parent = parent
323 self.parent = parent
324 # nodeid and branch are for external use by TortoiseHg and others
324 # nodeid and branch are for external use by TortoiseHg and others
325 self.nodeid = nodeid
325 self.nodeid = nodeid
326 self.branch = branch
326 self.branch = branch
327 self.haspatch = diffstart > 1
327 self.haspatch = diffstart > 1
328 self.plainmode = (plainmode or
328 self.plainmode = (plainmode or
329 '# HG changeset patch' not in self.comments and
329 '# HG changeset patch' not in self.comments and
330 any(c.startswith('Date: ') or
330 any(c.startswith('Date: ') or
331 c.startswith('From: ')
331 c.startswith('From: ')
332 for c in self.comments))
332 for c in self.comments))
333
333
334 def setuser(self, user):
334 def setuser(self, user):
335 try:
335 try:
336 inserthgheader(self.comments, '# User ', user)
336 inserthgheader(self.comments, '# User ', user)
337 except ValueError:
337 except ValueError:
338 if self.plainmode:
338 if self.plainmode:
339 insertplainheader(self.comments, 'From', user)
339 insertplainheader(self.comments, 'From', user)
340 else:
340 else:
341 tmp = ['# HG changeset patch', '# User ' + user]
341 tmp = ['# HG changeset patch', '# User ' + user]
342 self.comments = tmp + self.comments
342 self.comments = tmp + self.comments
343 self.user = user
343 self.user = user
344
344
345 def setdate(self, date):
345 def setdate(self, date):
346 try:
346 try:
347 inserthgheader(self.comments, '# Date ', date)
347 inserthgheader(self.comments, '# Date ', date)
348 except ValueError:
348 except ValueError:
349 if self.plainmode:
349 if self.plainmode:
350 insertplainheader(self.comments, 'Date', date)
350 insertplainheader(self.comments, 'Date', date)
351 else:
351 else:
352 tmp = ['# HG changeset patch', '# Date ' + date]
352 tmp = ['# HG changeset patch', '# Date ' + date]
353 self.comments = tmp + self.comments
353 self.comments = tmp + self.comments
354 self.date = date
354 self.date = date
355
355
356 def setparent(self, parent):
356 def setparent(self, parent):
357 try:
357 try:
358 inserthgheader(self.comments, '# Parent ', parent)
358 inserthgheader(self.comments, '# Parent ', parent)
359 except ValueError:
359 except ValueError:
360 if not self.plainmode:
360 if not self.plainmode:
361 tmp = ['# HG changeset patch', '# Parent ' + parent]
361 tmp = ['# HG changeset patch', '# Parent ' + parent]
362 self.comments = tmp + self.comments
362 self.comments = tmp + self.comments
363 self.parent = parent
363 self.parent = parent
364
364
365 def setmessage(self, message):
365 def setmessage(self, message):
366 if self.comments:
366 if self.comments:
367 self._delmsg()
367 self._delmsg()
368 self.message = [message]
368 self.message = [message]
369 if message:
369 if message:
370 if self.plainmode and self.comments and self.comments[-1]:
370 if self.plainmode and self.comments and self.comments[-1]:
371 self.comments.append('')
371 self.comments.append('')
372 self.comments.append(message)
372 self.comments.append(message)
373
373
374 def __str__(self):
374 def __str__(self):
375 s = '\n'.join(self.comments).rstrip()
375 s = '\n'.join(self.comments).rstrip()
376 if not s:
376 if not s:
377 return ''
377 return ''
378 return s + '\n\n'
378 return s + '\n\n'
379
379
380 def _delmsg(self):
380 def _delmsg(self):
381 '''Remove existing message, keeping the rest of the comments fields.
381 '''Remove existing message, keeping the rest of the comments fields.
382 If comments contains 'subject: ', message will prepend
382 If comments contains 'subject: ', message will prepend
383 the field and a blank line.'''
383 the field and a blank line.'''
384 if self.message:
384 if self.message:
385 subj = 'subject: ' + self.message[0].lower()
385 subj = 'subject: ' + self.message[0].lower()
386 for i in xrange(len(self.comments)):
386 for i in xrange(len(self.comments)):
387 if subj == self.comments[i].lower():
387 if subj == self.comments[i].lower():
388 del self.comments[i]
388 del self.comments[i]
389 self.message = self.message[2:]
389 self.message = self.message[2:]
390 break
390 break
391 ci = 0
391 ci = 0
392 for mi in self.message:
392 for mi in self.message:
393 while mi != self.comments[ci]:
393 while mi != self.comments[ci]:
394 ci += 1
394 ci += 1
395 del self.comments[ci]
395 del self.comments[ci]
396
396
397 def newcommit(repo, phase, *args, **kwargs):
397 def newcommit(repo, phase, *args, **kwargs):
398 """helper dedicated to ensure a commit respect mq.secret setting
398 """helper dedicated to ensure a commit respect mq.secret setting
399
399
400 It should be used instead of repo.commit inside the mq source for operation
400 It should be used instead of repo.commit inside the mq source for operation
401 creating new changeset.
401 creating new changeset.
402 """
402 """
403 repo = repo.unfiltered()
403 repo = repo.unfiltered()
404 if phase is None:
404 if phase is None:
405 if repo.ui.configbool('mq', 'secret', False):
405 if repo.ui.configbool('mq', 'secret', False):
406 phase = phases.secret
406 phase = phases.secret
407 if phase is not None:
407 if phase is not None:
408 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
408 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
409 allowemptybackup = repo.ui.backupconfig('ui', 'allowemptycommit')
409 allowemptybackup = repo.ui.backupconfig('ui', 'allowemptycommit')
410 try:
410 try:
411 if phase is not None:
411 if phase is not None:
412 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
412 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
413 repo.ui.setconfig('ui', 'allowemptycommit', True)
413 repo.ui.setconfig('ui', 'allowemptycommit', True)
414 return repo.commit(*args, **kwargs)
414 return repo.commit(*args, **kwargs)
415 finally:
415 finally:
416 repo.ui.restoreconfig(allowemptybackup)
416 repo.ui.restoreconfig(allowemptybackup)
417 if phase is not None:
417 if phase is not None:
418 repo.ui.restoreconfig(phasebackup)
418 repo.ui.restoreconfig(phasebackup)
419
419
420 class AbortNoCleanup(error.Abort):
420 class AbortNoCleanup(error.Abort):
421 pass
421 pass
422
422
423 class queue(object):
423 class queue(object):
424 def __init__(self, ui, baseui, path, patchdir=None):
424 def __init__(self, ui, baseui, path, patchdir=None):
425 self.basepath = path
425 self.basepath = path
426 try:
426 try:
427 fh = open(os.path.join(path, 'patches.queue'))
427 fh = open(os.path.join(path, 'patches.queue'))
428 cur = fh.read().rstrip()
428 cur = fh.read().rstrip()
429 fh.close()
429 fh.close()
430 if not cur:
430 if not cur:
431 curpath = os.path.join(path, 'patches')
431 curpath = os.path.join(path, 'patches')
432 else:
432 else:
433 curpath = os.path.join(path, 'patches-' + cur)
433 curpath = os.path.join(path, 'patches-' + cur)
434 except IOError:
434 except IOError:
435 curpath = os.path.join(path, 'patches')
435 curpath = os.path.join(path, 'patches')
436 self.path = patchdir or curpath
436 self.path = patchdir or curpath
437 self.opener = scmutil.opener(self.path)
437 self.opener = scmutil.vfs(self.path)
438 self.ui = ui
438 self.ui = ui
439 self.baseui = baseui
439 self.baseui = baseui
440 self.applieddirty = False
440 self.applieddirty = False
441 self.seriesdirty = False
441 self.seriesdirty = False
442 self.added = []
442 self.added = []
443 self.seriespath = "series"
443 self.seriespath = "series"
444 self.statuspath = "status"
444 self.statuspath = "status"
445 self.guardspath = "guards"
445 self.guardspath = "guards"
446 self.activeguards = None
446 self.activeguards = None
447 self.guardsdirty = False
447 self.guardsdirty = False
448 # Handle mq.git as a bool with extended values
448 # Handle mq.git as a bool with extended values
449 try:
449 try:
450 gitmode = ui.configbool('mq', 'git', None)
450 gitmode = ui.configbool('mq', 'git', None)
451 if gitmode is None:
451 if gitmode is None:
452 raise error.ConfigError
452 raise error.ConfigError
453 if gitmode:
453 if gitmode:
454 self.gitmode = 'yes'
454 self.gitmode = 'yes'
455 else:
455 else:
456 self.gitmode = 'no'
456 self.gitmode = 'no'
457 except error.ConfigError:
457 except error.ConfigError:
458 # let's have check-config ignore the type mismatch
458 # let's have check-config ignore the type mismatch
459 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
459 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
460 # deprecated config: mq.plain
460 # deprecated config: mq.plain
461 self.plainmode = ui.configbool('mq', 'plain', False)
461 self.plainmode = ui.configbool('mq', 'plain', False)
462 self.checkapplied = True
462 self.checkapplied = True
463
463
464 @util.propertycache
464 @util.propertycache
465 def applied(self):
465 def applied(self):
466 def parselines(lines):
466 def parselines(lines):
467 for l in lines:
467 for l in lines:
468 entry = l.split(':', 1)
468 entry = l.split(':', 1)
469 if len(entry) > 1:
469 if len(entry) > 1:
470 n, name = entry
470 n, name = entry
471 yield statusentry(bin(n), name)
471 yield statusentry(bin(n), name)
472 elif l.strip():
472 elif l.strip():
473 self.ui.warn(_('malformated mq status line: %s\n') % entry)
473 self.ui.warn(_('malformated mq status line: %s\n') % entry)
474 # else we ignore empty lines
474 # else we ignore empty lines
475 try:
475 try:
476 lines = self.opener.read(self.statuspath).splitlines()
476 lines = self.opener.read(self.statuspath).splitlines()
477 return list(parselines(lines))
477 return list(parselines(lines))
478 except IOError as e:
478 except IOError as e:
479 if e.errno == errno.ENOENT:
479 if e.errno == errno.ENOENT:
480 return []
480 return []
481 raise
481 raise
482
482
483 @util.propertycache
483 @util.propertycache
484 def fullseries(self):
484 def fullseries(self):
485 try:
485 try:
486 return self.opener.read(self.seriespath).splitlines()
486 return self.opener.read(self.seriespath).splitlines()
487 except IOError as e:
487 except IOError as e:
488 if e.errno == errno.ENOENT:
488 if e.errno == errno.ENOENT:
489 return []
489 return []
490 raise
490 raise
491
491
492 @util.propertycache
492 @util.propertycache
493 def series(self):
493 def series(self):
494 self.parseseries()
494 self.parseseries()
495 return self.series
495 return self.series
496
496
497 @util.propertycache
497 @util.propertycache
498 def seriesguards(self):
498 def seriesguards(self):
499 self.parseseries()
499 self.parseseries()
500 return self.seriesguards
500 return self.seriesguards
501
501
502 def invalidate(self):
502 def invalidate(self):
503 for a in 'applied fullseries series seriesguards'.split():
503 for a in 'applied fullseries series seriesguards'.split():
504 if a in self.__dict__:
504 if a in self.__dict__:
505 delattr(self, a)
505 delattr(self, a)
506 self.applieddirty = False
506 self.applieddirty = False
507 self.seriesdirty = False
507 self.seriesdirty = False
508 self.guardsdirty = False
508 self.guardsdirty = False
509 self.activeguards = None
509 self.activeguards = None
510
510
511 def diffopts(self, opts=None, patchfn=None):
511 def diffopts(self, opts=None, patchfn=None):
512 diffopts = patchmod.diffopts(self.ui, opts)
512 diffopts = patchmod.diffopts(self.ui, opts)
513 if self.gitmode == 'auto':
513 if self.gitmode == 'auto':
514 diffopts.upgrade = True
514 diffopts.upgrade = True
515 elif self.gitmode == 'keep':
515 elif self.gitmode == 'keep':
516 pass
516 pass
517 elif self.gitmode in ('yes', 'no'):
517 elif self.gitmode in ('yes', 'no'):
518 diffopts.git = self.gitmode == 'yes'
518 diffopts.git = self.gitmode == 'yes'
519 else:
519 else:
520 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
520 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
521 ' got %s') % self.gitmode)
521 ' got %s') % self.gitmode)
522 if patchfn:
522 if patchfn:
523 diffopts = self.patchopts(diffopts, patchfn)
523 diffopts = self.patchopts(diffopts, patchfn)
524 return diffopts
524 return diffopts
525
525
526 def patchopts(self, diffopts, *patches):
526 def patchopts(self, diffopts, *patches):
527 """Return a copy of input diff options with git set to true if
527 """Return a copy of input diff options with git set to true if
528 referenced patch is a git patch and should be preserved as such.
528 referenced patch is a git patch and should be preserved as such.
529 """
529 """
530 diffopts = diffopts.copy()
530 diffopts = diffopts.copy()
531 if not diffopts.git and self.gitmode == 'keep':
531 if not diffopts.git and self.gitmode == 'keep':
532 for patchfn in patches:
532 for patchfn in patches:
533 patchf = self.opener(patchfn, 'r')
533 patchf = self.opener(patchfn, 'r')
534 # if the patch was a git patch, refresh it as a git patch
534 # if the patch was a git patch, refresh it as a git patch
535 for line in patchf:
535 for line in patchf:
536 if line.startswith('diff --git'):
536 if line.startswith('diff --git'):
537 diffopts.git = True
537 diffopts.git = True
538 break
538 break
539 patchf.close()
539 patchf.close()
540 return diffopts
540 return diffopts
541
541
542 def join(self, *p):
542 def join(self, *p):
543 return os.path.join(self.path, *p)
543 return os.path.join(self.path, *p)
544
544
545 def findseries(self, patch):
545 def findseries(self, patch):
546 def matchpatch(l):
546 def matchpatch(l):
547 l = l.split('#', 1)[0]
547 l = l.split('#', 1)[0]
548 return l.strip() == patch
548 return l.strip() == patch
549 for index, l in enumerate(self.fullseries):
549 for index, l in enumerate(self.fullseries):
550 if matchpatch(l):
550 if matchpatch(l):
551 return index
551 return index
552 return None
552 return None
553
553
554 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
554 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
555
555
556 def parseseries(self):
556 def parseseries(self):
557 self.series = []
557 self.series = []
558 self.seriesguards = []
558 self.seriesguards = []
559 for l in self.fullseries:
559 for l in self.fullseries:
560 h = l.find('#')
560 h = l.find('#')
561 if h == -1:
561 if h == -1:
562 patch = l
562 patch = l
563 comment = ''
563 comment = ''
564 elif h == 0:
564 elif h == 0:
565 continue
565 continue
566 else:
566 else:
567 patch = l[:h]
567 patch = l[:h]
568 comment = l[h:]
568 comment = l[h:]
569 patch = patch.strip()
569 patch = patch.strip()
570 if patch:
570 if patch:
571 if patch in self.series:
571 if patch in self.series:
572 raise error.Abort(_('%s appears more than once in %s') %
572 raise error.Abort(_('%s appears more than once in %s') %
573 (patch, self.join(self.seriespath)))
573 (patch, self.join(self.seriespath)))
574 self.series.append(patch)
574 self.series.append(patch)
575 self.seriesguards.append(self.guard_re.findall(comment))
575 self.seriesguards.append(self.guard_re.findall(comment))
576
576
577 def checkguard(self, guard):
577 def checkguard(self, guard):
578 if not guard:
578 if not guard:
579 return _('guard cannot be an empty string')
579 return _('guard cannot be an empty string')
580 bad_chars = '# \t\r\n\f'
580 bad_chars = '# \t\r\n\f'
581 first = guard[0]
581 first = guard[0]
582 if first in '-+':
582 if first in '-+':
583 return (_('guard %r starts with invalid character: %r') %
583 return (_('guard %r starts with invalid character: %r') %
584 (guard, first))
584 (guard, first))
585 for c in bad_chars:
585 for c in bad_chars:
586 if c in guard:
586 if c in guard:
587 return _('invalid character in guard %r: %r') % (guard, c)
587 return _('invalid character in guard %r: %r') % (guard, c)
588
588
589 def setactive(self, guards):
589 def setactive(self, guards):
590 for guard in guards:
590 for guard in guards:
591 bad = self.checkguard(guard)
591 bad = self.checkguard(guard)
592 if bad:
592 if bad:
593 raise error.Abort(bad)
593 raise error.Abort(bad)
594 guards = sorted(set(guards))
594 guards = sorted(set(guards))
595 self.ui.debug('active guards: %s\n' % ' '.join(guards))
595 self.ui.debug('active guards: %s\n' % ' '.join(guards))
596 self.activeguards = guards
596 self.activeguards = guards
597 self.guardsdirty = True
597 self.guardsdirty = True
598
598
599 def active(self):
599 def active(self):
600 if self.activeguards is None:
600 if self.activeguards is None:
601 self.activeguards = []
601 self.activeguards = []
602 try:
602 try:
603 guards = self.opener.read(self.guardspath).split()
603 guards = self.opener.read(self.guardspath).split()
604 except IOError as err:
604 except IOError as err:
605 if err.errno != errno.ENOENT:
605 if err.errno != errno.ENOENT:
606 raise
606 raise
607 guards = []
607 guards = []
608 for i, guard in enumerate(guards):
608 for i, guard in enumerate(guards):
609 bad = self.checkguard(guard)
609 bad = self.checkguard(guard)
610 if bad:
610 if bad:
611 self.ui.warn('%s:%d: %s\n' %
611 self.ui.warn('%s:%d: %s\n' %
612 (self.join(self.guardspath), i + 1, bad))
612 (self.join(self.guardspath), i + 1, bad))
613 else:
613 else:
614 self.activeguards.append(guard)
614 self.activeguards.append(guard)
615 return self.activeguards
615 return self.activeguards
616
616
617 def setguards(self, idx, guards):
617 def setguards(self, idx, guards):
618 for g in guards:
618 for g in guards:
619 if len(g) < 2:
619 if len(g) < 2:
620 raise error.Abort(_('guard %r too short') % g)
620 raise error.Abort(_('guard %r too short') % g)
621 if g[0] not in '-+':
621 if g[0] not in '-+':
622 raise error.Abort(_('guard %r starts with invalid char') % g)
622 raise error.Abort(_('guard %r starts with invalid char') % g)
623 bad = self.checkguard(g[1:])
623 bad = self.checkguard(g[1:])
624 if bad:
624 if bad:
625 raise error.Abort(bad)
625 raise error.Abort(bad)
626 drop = self.guard_re.sub('', self.fullseries[idx])
626 drop = self.guard_re.sub('', self.fullseries[idx])
627 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
627 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
628 self.parseseries()
628 self.parseseries()
629 self.seriesdirty = True
629 self.seriesdirty = True
630
630
631 def pushable(self, idx):
631 def pushable(self, idx):
632 if isinstance(idx, str):
632 if isinstance(idx, str):
633 idx = self.series.index(idx)
633 idx = self.series.index(idx)
634 patchguards = self.seriesguards[idx]
634 patchguards = self.seriesguards[idx]
635 if not patchguards:
635 if not patchguards:
636 return True, None
636 return True, None
637 guards = self.active()
637 guards = self.active()
638 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
638 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
639 if exactneg:
639 if exactneg:
640 return False, repr(exactneg[0])
640 return False, repr(exactneg[0])
641 pos = [g for g in patchguards if g[0] == '+']
641 pos = [g for g in patchguards if g[0] == '+']
642 exactpos = [g for g in pos if g[1:] in guards]
642 exactpos = [g for g in pos if g[1:] in guards]
643 if pos:
643 if pos:
644 if exactpos:
644 if exactpos:
645 return True, repr(exactpos[0])
645 return True, repr(exactpos[0])
646 return False, ' '.join(map(repr, pos))
646 return False, ' '.join(map(repr, pos))
647 return True, ''
647 return True, ''
648
648
649 def explainpushable(self, idx, all_patches=False):
649 def explainpushable(self, idx, all_patches=False):
650 if all_patches:
650 if all_patches:
651 write = self.ui.write
651 write = self.ui.write
652 else:
652 else:
653 write = self.ui.warn
653 write = self.ui.warn
654
654
655 if all_patches or self.ui.verbose:
655 if all_patches or self.ui.verbose:
656 if isinstance(idx, str):
656 if isinstance(idx, str):
657 idx = self.series.index(idx)
657 idx = self.series.index(idx)
658 pushable, why = self.pushable(idx)
658 pushable, why = self.pushable(idx)
659 if all_patches and pushable:
659 if all_patches and pushable:
660 if why is None:
660 if why is None:
661 write(_('allowing %s - no guards in effect\n') %
661 write(_('allowing %s - no guards in effect\n') %
662 self.series[idx])
662 self.series[idx])
663 else:
663 else:
664 if not why:
664 if not why:
665 write(_('allowing %s - no matching negative guards\n') %
665 write(_('allowing %s - no matching negative guards\n') %
666 self.series[idx])
666 self.series[idx])
667 else:
667 else:
668 write(_('allowing %s - guarded by %s\n') %
668 write(_('allowing %s - guarded by %s\n') %
669 (self.series[idx], why))
669 (self.series[idx], why))
670 if not pushable:
670 if not pushable:
671 if why:
671 if why:
672 write(_('skipping %s - guarded by %s\n') %
672 write(_('skipping %s - guarded by %s\n') %
673 (self.series[idx], why))
673 (self.series[idx], why))
674 else:
674 else:
675 write(_('skipping %s - no matching guards\n') %
675 write(_('skipping %s - no matching guards\n') %
676 self.series[idx])
676 self.series[idx])
677
677
678 def savedirty(self):
678 def savedirty(self):
679 def writelist(items, path):
679 def writelist(items, path):
680 fp = self.opener(path, 'w')
680 fp = self.opener(path, 'w')
681 for i in items:
681 for i in items:
682 fp.write("%s\n" % i)
682 fp.write("%s\n" % i)
683 fp.close()
683 fp.close()
684 if self.applieddirty:
684 if self.applieddirty:
685 writelist(map(str, self.applied), self.statuspath)
685 writelist(map(str, self.applied), self.statuspath)
686 self.applieddirty = False
686 self.applieddirty = False
687 if self.seriesdirty:
687 if self.seriesdirty:
688 writelist(self.fullseries, self.seriespath)
688 writelist(self.fullseries, self.seriespath)
689 self.seriesdirty = False
689 self.seriesdirty = False
690 if self.guardsdirty:
690 if self.guardsdirty:
691 writelist(self.activeguards, self.guardspath)
691 writelist(self.activeguards, self.guardspath)
692 self.guardsdirty = False
692 self.guardsdirty = False
693 if self.added:
693 if self.added:
694 qrepo = self.qrepo()
694 qrepo = self.qrepo()
695 if qrepo:
695 if qrepo:
696 qrepo[None].add(f for f in self.added if f not in qrepo[None])
696 qrepo[None].add(f for f in self.added if f not in qrepo[None])
697 self.added = []
697 self.added = []
698
698
699 def removeundo(self, repo):
699 def removeundo(self, repo):
700 undo = repo.sjoin('undo')
700 undo = repo.sjoin('undo')
701 if not os.path.exists(undo):
701 if not os.path.exists(undo):
702 return
702 return
703 try:
703 try:
704 os.unlink(undo)
704 os.unlink(undo)
705 except OSError as inst:
705 except OSError as inst:
706 self.ui.warn(_('error removing undo: %s\n') % str(inst))
706 self.ui.warn(_('error removing undo: %s\n') % str(inst))
707
707
708 def backup(self, repo, files, copy=False):
708 def backup(self, repo, files, copy=False):
709 # backup local changes in --force case
709 # backup local changes in --force case
710 for f in sorted(files):
710 for f in sorted(files):
711 absf = repo.wjoin(f)
711 absf = repo.wjoin(f)
712 if os.path.lexists(absf):
712 if os.path.lexists(absf):
713 self.ui.note(_('saving current version of %s as %s\n') %
713 self.ui.note(_('saving current version of %s as %s\n') %
714 (f, scmutil.origpath(self.ui, repo, f)))
714 (f, scmutil.origpath(self.ui, repo, f)))
715
715
716 absorig = scmutil.origpath(self.ui, repo, absf)
716 absorig = scmutil.origpath(self.ui, repo, absf)
717 if copy:
717 if copy:
718 util.copyfile(absf, absorig)
718 util.copyfile(absf, absorig)
719 else:
719 else:
720 util.rename(absf, absorig)
720 util.rename(absf, absorig)
721
721
722 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
722 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
723 fp=None, changes=None, opts={}):
723 fp=None, changes=None, opts={}):
724 stat = opts.get('stat')
724 stat = opts.get('stat')
725 m = scmutil.match(repo[node1], files, opts)
725 m = scmutil.match(repo[node1], files, opts)
726 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
726 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
727 changes, stat, fp)
727 changes, stat, fp)
728
728
729 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
729 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
730 # first try just applying the patch
730 # first try just applying the patch
731 (err, n) = self.apply(repo, [patch], update_status=False,
731 (err, n) = self.apply(repo, [patch], update_status=False,
732 strict=True, merge=rev)
732 strict=True, merge=rev)
733
733
734 if err == 0:
734 if err == 0:
735 return (err, n)
735 return (err, n)
736
736
737 if n is None:
737 if n is None:
738 raise error.Abort(_("apply failed for patch %s") % patch)
738 raise error.Abort(_("apply failed for patch %s") % patch)
739
739
740 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
740 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
741
741
742 # apply failed, strip away that rev and merge.
742 # apply failed, strip away that rev and merge.
743 hg.clean(repo, head)
743 hg.clean(repo, head)
744 strip(self.ui, repo, [n], update=False, backup=False)
744 strip(self.ui, repo, [n], update=False, backup=False)
745
745
746 ctx = repo[rev]
746 ctx = repo[rev]
747 ret = hg.merge(repo, rev)
747 ret = hg.merge(repo, rev)
748 if ret:
748 if ret:
749 raise error.Abort(_("update returned %d") % ret)
749 raise error.Abort(_("update returned %d") % ret)
750 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
750 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
751 if n is None:
751 if n is None:
752 raise error.Abort(_("repo commit failed"))
752 raise error.Abort(_("repo commit failed"))
753 try:
753 try:
754 ph = patchheader(mergeq.join(patch), self.plainmode)
754 ph = patchheader(mergeq.join(patch), self.plainmode)
755 except Exception:
755 except Exception:
756 raise error.Abort(_("unable to read %s") % patch)
756 raise error.Abort(_("unable to read %s") % patch)
757
757
758 diffopts = self.patchopts(diffopts, patch)
758 diffopts = self.patchopts(diffopts, patch)
759 patchf = self.opener(patch, "w")
759 patchf = self.opener(patch, "w")
760 comments = str(ph)
760 comments = str(ph)
761 if comments:
761 if comments:
762 patchf.write(comments)
762 patchf.write(comments)
763 self.printdiff(repo, diffopts, head, n, fp=patchf)
763 self.printdiff(repo, diffopts, head, n, fp=patchf)
764 patchf.close()
764 patchf.close()
765 self.removeundo(repo)
765 self.removeundo(repo)
766 return (0, n)
766 return (0, n)
767
767
768 def qparents(self, repo, rev=None):
768 def qparents(self, repo, rev=None):
769 """return the mq handled parent or p1
769 """return the mq handled parent or p1
770
770
771 In some case where mq get himself in being the parent of a merge the
771 In some case where mq get himself in being the parent of a merge the
772 appropriate parent may be p2.
772 appropriate parent may be p2.
773 (eg: an in progress merge started with mq disabled)
773 (eg: an in progress merge started with mq disabled)
774
774
775 If no parent are managed by mq, p1 is returned.
775 If no parent are managed by mq, p1 is returned.
776 """
776 """
777 if rev is None:
777 if rev is None:
778 (p1, p2) = repo.dirstate.parents()
778 (p1, p2) = repo.dirstate.parents()
779 if p2 == nullid:
779 if p2 == nullid:
780 return p1
780 return p1
781 if not self.applied:
781 if not self.applied:
782 return None
782 return None
783 return self.applied[-1].node
783 return self.applied[-1].node
784 p1, p2 = repo.changelog.parents(rev)
784 p1, p2 = repo.changelog.parents(rev)
785 if p2 != nullid and p2 in [x.node for x in self.applied]:
785 if p2 != nullid and p2 in [x.node for x in self.applied]:
786 return p2
786 return p2
787 return p1
787 return p1
788
788
789 def mergepatch(self, repo, mergeq, series, diffopts):
789 def mergepatch(self, repo, mergeq, series, diffopts):
790 if not self.applied:
790 if not self.applied:
791 # each of the patches merged in will have two parents. This
791 # each of the patches merged in will have two parents. This
792 # can confuse the qrefresh, qdiff, and strip code because it
792 # can confuse the qrefresh, qdiff, and strip code because it
793 # needs to know which parent is actually in the patch queue.
793 # needs to know which parent is actually in the patch queue.
794 # so, we insert a merge marker with only one parent. This way
794 # so, we insert a merge marker with only one parent. This way
795 # the first patch in the queue is never a merge patch
795 # the first patch in the queue is never a merge patch
796 #
796 #
797 pname = ".hg.patches.merge.marker"
797 pname = ".hg.patches.merge.marker"
798 n = newcommit(repo, None, '[mq]: merge marker', force=True)
798 n = newcommit(repo, None, '[mq]: merge marker', force=True)
799 self.removeundo(repo)
799 self.removeundo(repo)
800 self.applied.append(statusentry(n, pname))
800 self.applied.append(statusentry(n, pname))
801 self.applieddirty = True
801 self.applieddirty = True
802
802
803 head = self.qparents(repo)
803 head = self.qparents(repo)
804
804
805 for patch in series:
805 for patch in series:
806 patch = mergeq.lookup(patch, strict=True)
806 patch = mergeq.lookup(patch, strict=True)
807 if not patch:
807 if not patch:
808 self.ui.warn(_("patch %s does not exist\n") % patch)
808 self.ui.warn(_("patch %s does not exist\n") % patch)
809 return (1, None)
809 return (1, None)
810 pushable, reason = self.pushable(patch)
810 pushable, reason = self.pushable(patch)
811 if not pushable:
811 if not pushable:
812 self.explainpushable(patch, all_patches=True)
812 self.explainpushable(patch, all_patches=True)
813 continue
813 continue
814 info = mergeq.isapplied(patch)
814 info = mergeq.isapplied(patch)
815 if not info:
815 if not info:
816 self.ui.warn(_("patch %s is not applied\n") % patch)
816 self.ui.warn(_("patch %s is not applied\n") % patch)
817 return (1, None)
817 return (1, None)
818 rev = info[1]
818 rev = info[1]
819 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
819 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
820 if head:
820 if head:
821 self.applied.append(statusentry(head, patch))
821 self.applied.append(statusentry(head, patch))
822 self.applieddirty = True
822 self.applieddirty = True
823 if err:
823 if err:
824 return (err, head)
824 return (err, head)
825 self.savedirty()
825 self.savedirty()
826 return (0, head)
826 return (0, head)
827
827
828 def patch(self, repo, patchfile):
828 def patch(self, repo, patchfile):
829 '''Apply patchfile to the working directory.
829 '''Apply patchfile to the working directory.
830 patchfile: name of patch file'''
830 patchfile: name of patch file'''
831 files = set()
831 files = set()
832 try:
832 try:
833 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
833 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
834 files=files, eolmode=None)
834 files=files, eolmode=None)
835 return (True, list(files), fuzz)
835 return (True, list(files), fuzz)
836 except Exception as inst:
836 except Exception as inst:
837 self.ui.note(str(inst) + '\n')
837 self.ui.note(str(inst) + '\n')
838 if not self.ui.verbose:
838 if not self.ui.verbose:
839 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
839 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
840 self.ui.traceback()
840 self.ui.traceback()
841 return (False, list(files), False)
841 return (False, list(files), False)
842
842
843 def apply(self, repo, series, list=False, update_status=True,
843 def apply(self, repo, series, list=False, update_status=True,
844 strict=False, patchdir=None, merge=None, all_files=None,
844 strict=False, patchdir=None, merge=None, all_files=None,
845 tobackup=None, keepchanges=False):
845 tobackup=None, keepchanges=False):
846 wlock = lock = tr = None
846 wlock = lock = tr = None
847 try:
847 try:
848 wlock = repo.wlock()
848 wlock = repo.wlock()
849 lock = repo.lock()
849 lock = repo.lock()
850 tr = repo.transaction("qpush")
850 tr = repo.transaction("qpush")
851 try:
851 try:
852 ret = self._apply(repo, series, list, update_status,
852 ret = self._apply(repo, series, list, update_status,
853 strict, patchdir, merge, all_files=all_files,
853 strict, patchdir, merge, all_files=all_files,
854 tobackup=tobackup, keepchanges=keepchanges)
854 tobackup=tobackup, keepchanges=keepchanges)
855 tr.close()
855 tr.close()
856 self.savedirty()
856 self.savedirty()
857 return ret
857 return ret
858 except AbortNoCleanup:
858 except AbortNoCleanup:
859 tr.close()
859 tr.close()
860 self.savedirty()
860 self.savedirty()
861 raise
861 raise
862 except: # re-raises
862 except: # re-raises
863 try:
863 try:
864 tr.abort()
864 tr.abort()
865 finally:
865 finally:
866 self.invalidate()
866 self.invalidate()
867 raise
867 raise
868 finally:
868 finally:
869 release(tr, lock, wlock)
869 release(tr, lock, wlock)
870 self.removeundo(repo)
870 self.removeundo(repo)
871
871
872 def _apply(self, repo, series, list=False, update_status=True,
872 def _apply(self, repo, series, list=False, update_status=True,
873 strict=False, patchdir=None, merge=None, all_files=None,
873 strict=False, patchdir=None, merge=None, all_files=None,
874 tobackup=None, keepchanges=False):
874 tobackup=None, keepchanges=False):
875 """returns (error, hash)
875 """returns (error, hash)
876
876
877 error = 1 for unable to read, 2 for patch failed, 3 for patch
877 error = 1 for unable to read, 2 for patch failed, 3 for patch
878 fuzz. tobackup is None or a set of files to backup before they
878 fuzz. tobackup is None or a set of files to backup before they
879 are modified by a patch.
879 are modified by a patch.
880 """
880 """
881 # TODO unify with commands.py
881 # TODO unify with commands.py
882 if not patchdir:
882 if not patchdir:
883 patchdir = self.path
883 patchdir = self.path
884 err = 0
884 err = 0
885 n = None
885 n = None
886 for patchname in series:
886 for patchname in series:
887 pushable, reason = self.pushable(patchname)
887 pushable, reason = self.pushable(patchname)
888 if not pushable:
888 if not pushable:
889 self.explainpushable(patchname, all_patches=True)
889 self.explainpushable(patchname, all_patches=True)
890 continue
890 continue
891 self.ui.status(_("applying %s\n") % patchname)
891 self.ui.status(_("applying %s\n") % patchname)
892 pf = os.path.join(patchdir, patchname)
892 pf = os.path.join(patchdir, patchname)
893
893
894 try:
894 try:
895 ph = patchheader(self.join(patchname), self.plainmode)
895 ph = patchheader(self.join(patchname), self.plainmode)
896 except IOError:
896 except IOError:
897 self.ui.warn(_("unable to read %s\n") % patchname)
897 self.ui.warn(_("unable to read %s\n") % patchname)
898 err = 1
898 err = 1
899 break
899 break
900
900
901 message = ph.message
901 message = ph.message
902 if not message:
902 if not message:
903 # The commit message should not be translated
903 # The commit message should not be translated
904 message = "imported patch %s\n" % patchname
904 message = "imported patch %s\n" % patchname
905 else:
905 else:
906 if list:
906 if list:
907 # The commit message should not be translated
907 # The commit message should not be translated
908 message.append("\nimported patch %s" % patchname)
908 message.append("\nimported patch %s" % patchname)
909 message = '\n'.join(message)
909 message = '\n'.join(message)
910
910
911 if ph.haspatch:
911 if ph.haspatch:
912 if tobackup:
912 if tobackup:
913 touched = patchmod.changedfiles(self.ui, repo, pf)
913 touched = patchmod.changedfiles(self.ui, repo, pf)
914 touched = set(touched) & tobackup
914 touched = set(touched) & tobackup
915 if touched and keepchanges:
915 if touched and keepchanges:
916 raise AbortNoCleanup(
916 raise AbortNoCleanup(
917 _("conflicting local changes found"),
917 _("conflicting local changes found"),
918 hint=_("did you forget to qrefresh?"))
918 hint=_("did you forget to qrefresh?"))
919 self.backup(repo, touched, copy=True)
919 self.backup(repo, touched, copy=True)
920 tobackup = tobackup - touched
920 tobackup = tobackup - touched
921 (patcherr, files, fuzz) = self.patch(repo, pf)
921 (patcherr, files, fuzz) = self.patch(repo, pf)
922 if all_files is not None:
922 if all_files is not None:
923 all_files.update(files)
923 all_files.update(files)
924 patcherr = not patcherr
924 patcherr = not patcherr
925 else:
925 else:
926 self.ui.warn(_("patch %s is empty\n") % patchname)
926 self.ui.warn(_("patch %s is empty\n") % patchname)
927 patcherr, files, fuzz = 0, [], 0
927 patcherr, files, fuzz = 0, [], 0
928
928
929 if merge and files:
929 if merge and files:
930 # Mark as removed/merged and update dirstate parent info
930 # Mark as removed/merged and update dirstate parent info
931 removed = []
931 removed = []
932 merged = []
932 merged = []
933 for f in files:
933 for f in files:
934 if os.path.lexists(repo.wjoin(f)):
934 if os.path.lexists(repo.wjoin(f)):
935 merged.append(f)
935 merged.append(f)
936 else:
936 else:
937 removed.append(f)
937 removed.append(f)
938 repo.dirstate.beginparentchange()
938 repo.dirstate.beginparentchange()
939 for f in removed:
939 for f in removed:
940 repo.dirstate.remove(f)
940 repo.dirstate.remove(f)
941 for f in merged:
941 for f in merged:
942 repo.dirstate.merge(f)
942 repo.dirstate.merge(f)
943 p1, p2 = repo.dirstate.parents()
943 p1, p2 = repo.dirstate.parents()
944 repo.setparents(p1, merge)
944 repo.setparents(p1, merge)
945 repo.dirstate.endparentchange()
945 repo.dirstate.endparentchange()
946
946
947 if all_files and '.hgsubstate' in all_files:
947 if all_files and '.hgsubstate' in all_files:
948 wctx = repo[None]
948 wctx = repo[None]
949 pctx = repo['.']
949 pctx = repo['.']
950 overwrite = False
950 overwrite = False
951 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
951 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
952 overwrite)
952 overwrite)
953 files += mergedsubstate.keys()
953 files += mergedsubstate.keys()
954
954
955 match = scmutil.matchfiles(repo, files or [])
955 match = scmutil.matchfiles(repo, files or [])
956 oldtip = repo['tip']
956 oldtip = repo['tip']
957 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
957 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
958 force=True)
958 force=True)
959 if repo['tip'] == oldtip:
959 if repo['tip'] == oldtip:
960 raise error.Abort(_("qpush exactly duplicates child changeset"))
960 raise error.Abort(_("qpush exactly duplicates child changeset"))
961 if n is None:
961 if n is None:
962 raise error.Abort(_("repository commit failed"))
962 raise error.Abort(_("repository commit failed"))
963
963
964 if update_status:
964 if update_status:
965 self.applied.append(statusentry(n, patchname))
965 self.applied.append(statusentry(n, patchname))
966
966
967 if patcherr:
967 if patcherr:
968 self.ui.warn(_("patch failed, rejects left in working "
968 self.ui.warn(_("patch failed, rejects left in working "
969 "directory\n"))
969 "directory\n"))
970 err = 2
970 err = 2
971 break
971 break
972
972
973 if fuzz and strict:
973 if fuzz and strict:
974 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
974 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
975 err = 3
975 err = 3
976 break
976 break
977 return (err, n)
977 return (err, n)
978
978
979 def _cleanup(self, patches, numrevs, keep=False):
979 def _cleanup(self, patches, numrevs, keep=False):
980 if not keep:
980 if not keep:
981 r = self.qrepo()
981 r = self.qrepo()
982 if r:
982 if r:
983 r[None].forget(patches)
983 r[None].forget(patches)
984 for p in patches:
984 for p in patches:
985 try:
985 try:
986 os.unlink(self.join(p))
986 os.unlink(self.join(p))
987 except OSError as inst:
987 except OSError as inst:
988 if inst.errno != errno.ENOENT:
988 if inst.errno != errno.ENOENT:
989 raise
989 raise
990
990
991 qfinished = []
991 qfinished = []
992 if numrevs:
992 if numrevs:
993 qfinished = self.applied[:numrevs]
993 qfinished = self.applied[:numrevs]
994 del self.applied[:numrevs]
994 del self.applied[:numrevs]
995 self.applieddirty = True
995 self.applieddirty = True
996
996
997 unknown = []
997 unknown = []
998
998
999 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
999 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
1000 reverse=True):
1000 reverse=True):
1001 if i is not None:
1001 if i is not None:
1002 del self.fullseries[i]
1002 del self.fullseries[i]
1003 else:
1003 else:
1004 unknown.append(p)
1004 unknown.append(p)
1005
1005
1006 if unknown:
1006 if unknown:
1007 if numrevs:
1007 if numrevs:
1008 rev = dict((entry.name, entry.node) for entry in qfinished)
1008 rev = dict((entry.name, entry.node) for entry in qfinished)
1009 for p in unknown:
1009 for p in unknown:
1010 msg = _('revision %s refers to unknown patches: %s\n')
1010 msg = _('revision %s refers to unknown patches: %s\n')
1011 self.ui.warn(msg % (short(rev[p]), p))
1011 self.ui.warn(msg % (short(rev[p]), p))
1012 else:
1012 else:
1013 msg = _('unknown patches: %s\n')
1013 msg = _('unknown patches: %s\n')
1014 raise error.Abort(''.join(msg % p for p in unknown))
1014 raise error.Abort(''.join(msg % p for p in unknown))
1015
1015
1016 self.parseseries()
1016 self.parseseries()
1017 self.seriesdirty = True
1017 self.seriesdirty = True
1018 return [entry.node for entry in qfinished]
1018 return [entry.node for entry in qfinished]
1019
1019
1020 def _revpatches(self, repo, revs):
1020 def _revpatches(self, repo, revs):
1021 firstrev = repo[self.applied[0].node].rev()
1021 firstrev = repo[self.applied[0].node].rev()
1022 patches = []
1022 patches = []
1023 for i, rev in enumerate(revs):
1023 for i, rev in enumerate(revs):
1024
1024
1025 if rev < firstrev:
1025 if rev < firstrev:
1026 raise error.Abort(_('revision %d is not managed') % rev)
1026 raise error.Abort(_('revision %d is not managed') % rev)
1027
1027
1028 ctx = repo[rev]
1028 ctx = repo[rev]
1029 base = self.applied[i].node
1029 base = self.applied[i].node
1030 if ctx.node() != base:
1030 if ctx.node() != base:
1031 msg = _('cannot delete revision %d above applied patches')
1031 msg = _('cannot delete revision %d above applied patches')
1032 raise error.Abort(msg % rev)
1032 raise error.Abort(msg % rev)
1033
1033
1034 patch = self.applied[i].name
1034 patch = self.applied[i].name
1035 for fmt in ('[mq]: %s', 'imported patch %s'):
1035 for fmt in ('[mq]: %s', 'imported patch %s'):
1036 if ctx.description() == fmt % patch:
1036 if ctx.description() == fmt % patch:
1037 msg = _('patch %s finalized without changeset message\n')
1037 msg = _('patch %s finalized without changeset message\n')
1038 repo.ui.status(msg % patch)
1038 repo.ui.status(msg % patch)
1039 break
1039 break
1040
1040
1041 patches.append(patch)
1041 patches.append(patch)
1042 return patches
1042 return patches
1043
1043
1044 def finish(self, repo, revs):
1044 def finish(self, repo, revs):
1045 # Manually trigger phase computation to ensure phasedefaults is
1045 # Manually trigger phase computation to ensure phasedefaults is
1046 # executed before we remove the patches.
1046 # executed before we remove the patches.
1047 repo._phasecache
1047 repo._phasecache
1048 patches = self._revpatches(repo, sorted(revs))
1048 patches = self._revpatches(repo, sorted(revs))
1049 qfinished = self._cleanup(patches, len(patches))
1049 qfinished = self._cleanup(patches, len(patches))
1050 if qfinished and repo.ui.configbool('mq', 'secret', False):
1050 if qfinished and repo.ui.configbool('mq', 'secret', False):
1051 # only use this logic when the secret option is added
1051 # only use this logic when the secret option is added
1052 oldqbase = repo[qfinished[0]]
1052 oldqbase = repo[qfinished[0]]
1053 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1053 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1054 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1054 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1055 with repo.transaction('qfinish') as tr:
1055 with repo.transaction('qfinish') as tr:
1056 phases.advanceboundary(repo, tr, tphase, qfinished)
1056 phases.advanceboundary(repo, tr, tphase, qfinished)
1057
1057
1058 def delete(self, repo, patches, opts):
1058 def delete(self, repo, patches, opts):
1059 if not patches and not opts.get('rev'):
1059 if not patches and not opts.get('rev'):
1060 raise error.Abort(_('qdelete requires at least one revision or '
1060 raise error.Abort(_('qdelete requires at least one revision or '
1061 'patch name'))
1061 'patch name'))
1062
1062
1063 realpatches = []
1063 realpatches = []
1064 for patch in patches:
1064 for patch in patches:
1065 patch = self.lookup(patch, strict=True)
1065 patch = self.lookup(patch, strict=True)
1066 info = self.isapplied(patch)
1066 info = self.isapplied(patch)
1067 if info:
1067 if info:
1068 raise error.Abort(_("cannot delete applied patch %s") % patch)
1068 raise error.Abort(_("cannot delete applied patch %s") % patch)
1069 if patch not in self.series:
1069 if patch not in self.series:
1070 raise error.Abort(_("patch %s not in series file") % patch)
1070 raise error.Abort(_("patch %s not in series file") % patch)
1071 if patch not in realpatches:
1071 if patch not in realpatches:
1072 realpatches.append(patch)
1072 realpatches.append(patch)
1073
1073
1074 numrevs = 0
1074 numrevs = 0
1075 if opts.get('rev'):
1075 if opts.get('rev'):
1076 if not self.applied:
1076 if not self.applied:
1077 raise error.Abort(_('no patches applied'))
1077 raise error.Abort(_('no patches applied'))
1078 revs = scmutil.revrange(repo, opts.get('rev'))
1078 revs = scmutil.revrange(repo, opts.get('rev'))
1079 revs.sort()
1079 revs.sort()
1080 revpatches = self._revpatches(repo, revs)
1080 revpatches = self._revpatches(repo, revs)
1081 realpatches += revpatches
1081 realpatches += revpatches
1082 numrevs = len(revpatches)
1082 numrevs = len(revpatches)
1083
1083
1084 self._cleanup(realpatches, numrevs, opts.get('keep'))
1084 self._cleanup(realpatches, numrevs, opts.get('keep'))
1085
1085
1086 def checktoppatch(self, repo):
1086 def checktoppatch(self, repo):
1087 '''check that working directory is at qtip'''
1087 '''check that working directory is at qtip'''
1088 if self.applied:
1088 if self.applied:
1089 top = self.applied[-1].node
1089 top = self.applied[-1].node
1090 patch = self.applied[-1].name
1090 patch = self.applied[-1].name
1091 if repo.dirstate.p1() != top:
1091 if repo.dirstate.p1() != top:
1092 raise error.Abort(_("working directory revision is not qtip"))
1092 raise error.Abort(_("working directory revision is not qtip"))
1093 return top, patch
1093 return top, patch
1094 return None, None
1094 return None, None
1095
1095
1096 def putsubstate2changes(self, substatestate, changes):
1096 def putsubstate2changes(self, substatestate, changes):
1097 for files in changes[:3]:
1097 for files in changes[:3]:
1098 if '.hgsubstate' in files:
1098 if '.hgsubstate' in files:
1099 return # already listed up
1099 return # already listed up
1100 # not yet listed up
1100 # not yet listed up
1101 if substatestate in 'a?':
1101 if substatestate in 'a?':
1102 changes[1].append('.hgsubstate')
1102 changes[1].append('.hgsubstate')
1103 elif substatestate in 'r':
1103 elif substatestate in 'r':
1104 changes[2].append('.hgsubstate')
1104 changes[2].append('.hgsubstate')
1105 else: # modified
1105 else: # modified
1106 changes[0].append('.hgsubstate')
1106 changes[0].append('.hgsubstate')
1107
1107
1108 def checklocalchanges(self, repo, force=False, refresh=True):
1108 def checklocalchanges(self, repo, force=False, refresh=True):
1109 excsuffix = ''
1109 excsuffix = ''
1110 if refresh:
1110 if refresh:
1111 excsuffix = ', qrefresh first'
1111 excsuffix = ', qrefresh first'
1112 # plain versions for i18n tool to detect them
1112 # plain versions for i18n tool to detect them
1113 _("local changes found, qrefresh first")
1113 _("local changes found, qrefresh first")
1114 _("local changed subrepos found, qrefresh first")
1114 _("local changed subrepos found, qrefresh first")
1115 return checklocalchanges(repo, force, excsuffix)
1115 return checklocalchanges(repo, force, excsuffix)
1116
1116
1117 _reserved = ('series', 'status', 'guards', '.', '..')
1117 _reserved = ('series', 'status', 'guards', '.', '..')
1118 def checkreservedname(self, name):
1118 def checkreservedname(self, name):
1119 if name in self._reserved:
1119 if name in self._reserved:
1120 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1120 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1121 % name)
1121 % name)
1122 for prefix in ('.hg', '.mq'):
1122 for prefix in ('.hg', '.mq'):
1123 if name.startswith(prefix):
1123 if name.startswith(prefix):
1124 raise error.Abort(_('patch name cannot begin with "%s"')
1124 raise error.Abort(_('patch name cannot begin with "%s"')
1125 % prefix)
1125 % prefix)
1126 for c in ('#', ':', '\r', '\n'):
1126 for c in ('#', ':', '\r', '\n'):
1127 if c in name:
1127 if c in name:
1128 raise error.Abort(_('%r cannot be used in the name of a patch')
1128 raise error.Abort(_('%r cannot be used in the name of a patch')
1129 % c)
1129 % c)
1130
1130
1131 def checkpatchname(self, name, force=False):
1131 def checkpatchname(self, name, force=False):
1132 self.checkreservedname(name)
1132 self.checkreservedname(name)
1133 if not force and os.path.exists(self.join(name)):
1133 if not force and os.path.exists(self.join(name)):
1134 if os.path.isdir(self.join(name)):
1134 if os.path.isdir(self.join(name)):
1135 raise error.Abort(_('"%s" already exists as a directory')
1135 raise error.Abort(_('"%s" already exists as a directory')
1136 % name)
1136 % name)
1137 else:
1137 else:
1138 raise error.Abort(_('patch "%s" already exists') % name)
1138 raise error.Abort(_('patch "%s" already exists') % name)
1139
1139
1140 def makepatchname(self, title, fallbackname):
1140 def makepatchname(self, title, fallbackname):
1141 """Return a suitable filename for title, adding a suffix to make
1141 """Return a suitable filename for title, adding a suffix to make
1142 it unique in the existing list"""
1142 it unique in the existing list"""
1143 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1143 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1144 namebase = namebase[:75] # avoid too long name (issue5117)
1144 namebase = namebase[:75] # avoid too long name (issue5117)
1145 if namebase:
1145 if namebase:
1146 try:
1146 try:
1147 self.checkreservedname(namebase)
1147 self.checkreservedname(namebase)
1148 except error.Abort:
1148 except error.Abort:
1149 namebase = fallbackname
1149 namebase = fallbackname
1150 else:
1150 else:
1151 namebase = fallbackname
1151 namebase = fallbackname
1152 name = namebase
1152 name = namebase
1153 i = 0
1153 i = 0
1154 while True:
1154 while True:
1155 if name not in self.fullseries:
1155 if name not in self.fullseries:
1156 try:
1156 try:
1157 self.checkpatchname(name)
1157 self.checkpatchname(name)
1158 break
1158 break
1159 except error.Abort:
1159 except error.Abort:
1160 pass
1160 pass
1161 i += 1
1161 i += 1
1162 name = '%s__%s' % (namebase, i)
1162 name = '%s__%s' % (namebase, i)
1163 return name
1163 return name
1164
1164
1165 def checkkeepchanges(self, keepchanges, force):
1165 def checkkeepchanges(self, keepchanges, force):
1166 if force and keepchanges:
1166 if force and keepchanges:
1167 raise error.Abort(_('cannot use both --force and --keep-changes'))
1167 raise error.Abort(_('cannot use both --force and --keep-changes'))
1168
1168
1169 def new(self, repo, patchfn, *pats, **opts):
1169 def new(self, repo, patchfn, *pats, **opts):
1170 """options:
1170 """options:
1171 msg: a string or a no-argument function returning a string
1171 msg: a string or a no-argument function returning a string
1172 """
1172 """
1173 msg = opts.get('msg')
1173 msg = opts.get('msg')
1174 edit = opts.get('edit')
1174 edit = opts.get('edit')
1175 editform = opts.get('editform', 'mq.qnew')
1175 editform = opts.get('editform', 'mq.qnew')
1176 user = opts.get('user')
1176 user = opts.get('user')
1177 date = opts.get('date')
1177 date = opts.get('date')
1178 if date:
1178 if date:
1179 date = util.parsedate(date)
1179 date = util.parsedate(date)
1180 diffopts = self.diffopts({'git': opts.get('git')})
1180 diffopts = self.diffopts({'git': opts.get('git')})
1181 if opts.get('checkname', True):
1181 if opts.get('checkname', True):
1182 self.checkpatchname(patchfn)
1182 self.checkpatchname(patchfn)
1183 inclsubs = checksubstate(repo)
1183 inclsubs = checksubstate(repo)
1184 if inclsubs:
1184 if inclsubs:
1185 substatestate = repo.dirstate['.hgsubstate']
1185 substatestate = repo.dirstate['.hgsubstate']
1186 if opts.get('include') or opts.get('exclude') or pats:
1186 if opts.get('include') or opts.get('exclude') or pats:
1187 # detect missing files in pats
1187 # detect missing files in pats
1188 def badfn(f, msg):
1188 def badfn(f, msg):
1189 if f != '.hgsubstate': # .hgsubstate is auto-created
1189 if f != '.hgsubstate': # .hgsubstate is auto-created
1190 raise error.Abort('%s: %s' % (f, msg))
1190 raise error.Abort('%s: %s' % (f, msg))
1191 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1191 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1192 changes = repo.status(match=match)
1192 changes = repo.status(match=match)
1193 else:
1193 else:
1194 changes = self.checklocalchanges(repo, force=True)
1194 changes = self.checklocalchanges(repo, force=True)
1195 commitfiles = list(inclsubs)
1195 commitfiles = list(inclsubs)
1196 for files in changes[:3]:
1196 for files in changes[:3]:
1197 commitfiles.extend(files)
1197 commitfiles.extend(files)
1198 match = scmutil.matchfiles(repo, commitfiles)
1198 match = scmutil.matchfiles(repo, commitfiles)
1199 if len(repo[None].parents()) > 1:
1199 if len(repo[None].parents()) > 1:
1200 raise error.Abort(_('cannot manage merge changesets'))
1200 raise error.Abort(_('cannot manage merge changesets'))
1201 self.checktoppatch(repo)
1201 self.checktoppatch(repo)
1202 insert = self.fullseriesend()
1202 insert = self.fullseriesend()
1203 with repo.wlock():
1203 with repo.wlock():
1204 try:
1204 try:
1205 # if patch file write fails, abort early
1205 # if patch file write fails, abort early
1206 p = self.opener(patchfn, "w")
1206 p = self.opener(patchfn, "w")
1207 except IOError as e:
1207 except IOError as e:
1208 raise error.Abort(_('cannot write patch "%s": %s')
1208 raise error.Abort(_('cannot write patch "%s": %s')
1209 % (patchfn, e.strerror))
1209 % (patchfn, e.strerror))
1210 try:
1210 try:
1211 defaultmsg = "[mq]: %s" % patchfn
1211 defaultmsg = "[mq]: %s" % patchfn
1212 editor = cmdutil.getcommiteditor(editform=editform)
1212 editor = cmdutil.getcommiteditor(editform=editform)
1213 if edit:
1213 if edit:
1214 def finishdesc(desc):
1214 def finishdesc(desc):
1215 if desc.rstrip():
1215 if desc.rstrip():
1216 return desc
1216 return desc
1217 else:
1217 else:
1218 return defaultmsg
1218 return defaultmsg
1219 # i18n: this message is shown in editor with "HG: " prefix
1219 # i18n: this message is shown in editor with "HG: " prefix
1220 extramsg = _('Leave message empty to use default message.')
1220 extramsg = _('Leave message empty to use default message.')
1221 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1221 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1222 extramsg=extramsg,
1222 extramsg=extramsg,
1223 editform=editform)
1223 editform=editform)
1224 commitmsg = msg
1224 commitmsg = msg
1225 else:
1225 else:
1226 commitmsg = msg or defaultmsg
1226 commitmsg = msg or defaultmsg
1227
1227
1228 n = newcommit(repo, None, commitmsg, user, date, match=match,
1228 n = newcommit(repo, None, commitmsg, user, date, match=match,
1229 force=True, editor=editor)
1229 force=True, editor=editor)
1230 if n is None:
1230 if n is None:
1231 raise error.Abort(_("repo commit failed"))
1231 raise error.Abort(_("repo commit failed"))
1232 try:
1232 try:
1233 self.fullseries[insert:insert] = [patchfn]
1233 self.fullseries[insert:insert] = [patchfn]
1234 self.applied.append(statusentry(n, patchfn))
1234 self.applied.append(statusentry(n, patchfn))
1235 self.parseseries()
1235 self.parseseries()
1236 self.seriesdirty = True
1236 self.seriesdirty = True
1237 self.applieddirty = True
1237 self.applieddirty = True
1238 nctx = repo[n]
1238 nctx = repo[n]
1239 ph = patchheader(self.join(patchfn), self.plainmode)
1239 ph = patchheader(self.join(patchfn), self.plainmode)
1240 if user:
1240 if user:
1241 ph.setuser(user)
1241 ph.setuser(user)
1242 if date:
1242 if date:
1243 ph.setdate('%s %s' % date)
1243 ph.setdate('%s %s' % date)
1244 ph.setparent(hex(nctx.p1().node()))
1244 ph.setparent(hex(nctx.p1().node()))
1245 msg = nctx.description().strip()
1245 msg = nctx.description().strip()
1246 if msg == defaultmsg.strip():
1246 if msg == defaultmsg.strip():
1247 msg = ''
1247 msg = ''
1248 ph.setmessage(msg)
1248 ph.setmessage(msg)
1249 p.write(str(ph))
1249 p.write(str(ph))
1250 if commitfiles:
1250 if commitfiles:
1251 parent = self.qparents(repo, n)
1251 parent = self.qparents(repo, n)
1252 if inclsubs:
1252 if inclsubs:
1253 self.putsubstate2changes(substatestate, changes)
1253 self.putsubstate2changes(substatestate, changes)
1254 chunks = patchmod.diff(repo, node1=parent, node2=n,
1254 chunks = patchmod.diff(repo, node1=parent, node2=n,
1255 changes=changes, opts=diffopts)
1255 changes=changes, opts=diffopts)
1256 for chunk in chunks:
1256 for chunk in chunks:
1257 p.write(chunk)
1257 p.write(chunk)
1258 p.close()
1258 p.close()
1259 r = self.qrepo()
1259 r = self.qrepo()
1260 if r:
1260 if r:
1261 r[None].add([patchfn])
1261 r[None].add([patchfn])
1262 except: # re-raises
1262 except: # re-raises
1263 repo.rollback()
1263 repo.rollback()
1264 raise
1264 raise
1265 except Exception:
1265 except Exception:
1266 patchpath = self.join(patchfn)
1266 patchpath = self.join(patchfn)
1267 try:
1267 try:
1268 os.unlink(patchpath)
1268 os.unlink(patchpath)
1269 except OSError:
1269 except OSError:
1270 self.ui.warn(_('error unlinking %s\n') % patchpath)
1270 self.ui.warn(_('error unlinking %s\n') % patchpath)
1271 raise
1271 raise
1272 self.removeundo(repo)
1272 self.removeundo(repo)
1273
1273
1274 def isapplied(self, patch):
1274 def isapplied(self, patch):
1275 """returns (index, rev, patch)"""
1275 """returns (index, rev, patch)"""
1276 for i, a in enumerate(self.applied):
1276 for i, a in enumerate(self.applied):
1277 if a.name == patch:
1277 if a.name == patch:
1278 return (i, a.node, a.name)
1278 return (i, a.node, a.name)
1279 return None
1279 return None
1280
1280
1281 # if the exact patch name does not exist, we try a few
1281 # if the exact patch name does not exist, we try a few
1282 # variations. If strict is passed, we try only #1
1282 # variations. If strict is passed, we try only #1
1283 #
1283 #
1284 # 1) a number (as string) to indicate an offset in the series file
1284 # 1) a number (as string) to indicate an offset in the series file
1285 # 2) a unique substring of the patch name was given
1285 # 2) a unique substring of the patch name was given
1286 # 3) patchname[-+]num to indicate an offset in the series file
1286 # 3) patchname[-+]num to indicate an offset in the series file
1287 def lookup(self, patch, strict=False):
1287 def lookup(self, patch, strict=False):
1288 def partialname(s):
1288 def partialname(s):
1289 if s in self.series:
1289 if s in self.series:
1290 return s
1290 return s
1291 matches = [x for x in self.series if s in x]
1291 matches = [x for x in self.series if s in x]
1292 if len(matches) > 1:
1292 if len(matches) > 1:
1293 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1293 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1294 for m in matches:
1294 for m in matches:
1295 self.ui.warn(' %s\n' % m)
1295 self.ui.warn(' %s\n' % m)
1296 return None
1296 return None
1297 if matches:
1297 if matches:
1298 return matches[0]
1298 return matches[0]
1299 if self.series and self.applied:
1299 if self.series and self.applied:
1300 if s == 'qtip':
1300 if s == 'qtip':
1301 return self.series[self.seriesend(True) - 1]
1301 return self.series[self.seriesend(True) - 1]
1302 if s == 'qbase':
1302 if s == 'qbase':
1303 return self.series[0]
1303 return self.series[0]
1304 return None
1304 return None
1305
1305
1306 if patch in self.series:
1306 if patch in self.series:
1307 return patch
1307 return patch
1308
1308
1309 if not os.path.isfile(self.join(patch)):
1309 if not os.path.isfile(self.join(patch)):
1310 try:
1310 try:
1311 sno = int(patch)
1311 sno = int(patch)
1312 except (ValueError, OverflowError):
1312 except (ValueError, OverflowError):
1313 pass
1313 pass
1314 else:
1314 else:
1315 if -len(self.series) <= sno < len(self.series):
1315 if -len(self.series) <= sno < len(self.series):
1316 return self.series[sno]
1316 return self.series[sno]
1317
1317
1318 if not strict:
1318 if not strict:
1319 res = partialname(patch)
1319 res = partialname(patch)
1320 if res:
1320 if res:
1321 return res
1321 return res
1322 minus = patch.rfind('-')
1322 minus = patch.rfind('-')
1323 if minus >= 0:
1323 if minus >= 0:
1324 res = partialname(patch[:minus])
1324 res = partialname(patch[:minus])
1325 if res:
1325 if res:
1326 i = self.series.index(res)
1326 i = self.series.index(res)
1327 try:
1327 try:
1328 off = int(patch[minus + 1:] or 1)
1328 off = int(patch[minus + 1:] or 1)
1329 except (ValueError, OverflowError):
1329 except (ValueError, OverflowError):
1330 pass
1330 pass
1331 else:
1331 else:
1332 if i - off >= 0:
1332 if i - off >= 0:
1333 return self.series[i - off]
1333 return self.series[i - off]
1334 plus = patch.rfind('+')
1334 plus = patch.rfind('+')
1335 if plus >= 0:
1335 if plus >= 0:
1336 res = partialname(patch[:plus])
1336 res = partialname(patch[:plus])
1337 if res:
1337 if res:
1338 i = self.series.index(res)
1338 i = self.series.index(res)
1339 try:
1339 try:
1340 off = int(patch[plus + 1:] or 1)
1340 off = int(patch[plus + 1:] or 1)
1341 except (ValueError, OverflowError):
1341 except (ValueError, OverflowError):
1342 pass
1342 pass
1343 else:
1343 else:
1344 if i + off < len(self.series):
1344 if i + off < len(self.series):
1345 return self.series[i + off]
1345 return self.series[i + off]
1346 raise error.Abort(_("patch %s not in series") % patch)
1346 raise error.Abort(_("patch %s not in series") % patch)
1347
1347
1348 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1348 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1349 all=False, move=False, exact=False, nobackup=False,
1349 all=False, move=False, exact=False, nobackup=False,
1350 keepchanges=False):
1350 keepchanges=False):
1351 self.checkkeepchanges(keepchanges, force)
1351 self.checkkeepchanges(keepchanges, force)
1352 diffopts = self.diffopts()
1352 diffopts = self.diffopts()
1353 with repo.wlock():
1353 with repo.wlock():
1354 heads = []
1354 heads = []
1355 for hs in repo.branchmap().itervalues():
1355 for hs in repo.branchmap().itervalues():
1356 heads.extend(hs)
1356 heads.extend(hs)
1357 if not heads:
1357 if not heads:
1358 heads = [nullid]
1358 heads = [nullid]
1359 if repo.dirstate.p1() not in heads and not exact:
1359 if repo.dirstate.p1() not in heads and not exact:
1360 self.ui.status(_("(working directory not at a head)\n"))
1360 self.ui.status(_("(working directory not at a head)\n"))
1361
1361
1362 if not self.series:
1362 if not self.series:
1363 self.ui.warn(_('no patches in series\n'))
1363 self.ui.warn(_('no patches in series\n'))
1364 return 0
1364 return 0
1365
1365
1366 # Suppose our series file is: A B C and the current 'top'
1366 # Suppose our series file is: A B C and the current 'top'
1367 # patch is B. qpush C should be performed (moving forward)
1367 # patch is B. qpush C should be performed (moving forward)
1368 # qpush B is a NOP (no change) qpush A is an error (can't
1368 # qpush B is a NOP (no change) qpush A is an error (can't
1369 # go backwards with qpush)
1369 # go backwards with qpush)
1370 if patch:
1370 if patch:
1371 patch = self.lookup(patch)
1371 patch = self.lookup(patch)
1372 info = self.isapplied(patch)
1372 info = self.isapplied(patch)
1373 if info and info[0] >= len(self.applied) - 1:
1373 if info and info[0] >= len(self.applied) - 1:
1374 self.ui.warn(
1374 self.ui.warn(
1375 _('qpush: %s is already at the top\n') % patch)
1375 _('qpush: %s is already at the top\n') % patch)
1376 return 0
1376 return 0
1377
1377
1378 pushable, reason = self.pushable(patch)
1378 pushable, reason = self.pushable(patch)
1379 if pushable:
1379 if pushable:
1380 if self.series.index(patch) < self.seriesend():
1380 if self.series.index(patch) < self.seriesend():
1381 raise error.Abort(
1381 raise error.Abort(
1382 _("cannot push to a previous patch: %s") % patch)
1382 _("cannot push to a previous patch: %s") % patch)
1383 else:
1383 else:
1384 if reason:
1384 if reason:
1385 reason = _('guarded by %s') % reason
1385 reason = _('guarded by %s') % reason
1386 else:
1386 else:
1387 reason = _('no matching guards')
1387 reason = _('no matching guards')
1388 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1388 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1389 return 1
1389 return 1
1390 elif all:
1390 elif all:
1391 patch = self.series[-1]
1391 patch = self.series[-1]
1392 if self.isapplied(patch):
1392 if self.isapplied(patch):
1393 self.ui.warn(_('all patches are currently applied\n'))
1393 self.ui.warn(_('all patches are currently applied\n'))
1394 return 0
1394 return 0
1395
1395
1396 # Following the above example, starting at 'top' of B:
1396 # Following the above example, starting at 'top' of B:
1397 # qpush should be performed (pushes C), but a subsequent
1397 # qpush should be performed (pushes C), but a subsequent
1398 # qpush without an argument is an error (nothing to
1398 # qpush without an argument is an error (nothing to
1399 # apply). This allows a loop of "...while hg qpush..." to
1399 # apply). This allows a loop of "...while hg qpush..." to
1400 # work as it detects an error when done
1400 # work as it detects an error when done
1401 start = self.seriesend()
1401 start = self.seriesend()
1402 if start == len(self.series):
1402 if start == len(self.series):
1403 self.ui.warn(_('patch series already fully applied\n'))
1403 self.ui.warn(_('patch series already fully applied\n'))
1404 return 1
1404 return 1
1405 if not force and not keepchanges:
1405 if not force and not keepchanges:
1406 self.checklocalchanges(repo, refresh=self.applied)
1406 self.checklocalchanges(repo, refresh=self.applied)
1407
1407
1408 if exact:
1408 if exact:
1409 if keepchanges:
1409 if keepchanges:
1410 raise error.Abort(
1410 raise error.Abort(
1411 _("cannot use --exact and --keep-changes together"))
1411 _("cannot use --exact and --keep-changes together"))
1412 if move:
1412 if move:
1413 raise error.Abort(_('cannot use --exact and --move '
1413 raise error.Abort(_('cannot use --exact and --move '
1414 'together'))
1414 'together'))
1415 if self.applied:
1415 if self.applied:
1416 raise error.Abort(_('cannot push --exact with applied '
1416 raise error.Abort(_('cannot push --exact with applied '
1417 'patches'))
1417 'patches'))
1418 root = self.series[start]
1418 root = self.series[start]
1419 target = patchheader(self.join(root), self.plainmode).parent
1419 target = patchheader(self.join(root), self.plainmode).parent
1420 if not target:
1420 if not target:
1421 raise error.Abort(
1421 raise error.Abort(
1422 _("%s does not have a parent recorded") % root)
1422 _("%s does not have a parent recorded") % root)
1423 if not repo[target] == repo['.']:
1423 if not repo[target] == repo['.']:
1424 hg.update(repo, target)
1424 hg.update(repo, target)
1425
1425
1426 if move:
1426 if move:
1427 if not patch:
1427 if not patch:
1428 raise error.Abort(_("please specify the patch to move"))
1428 raise error.Abort(_("please specify the patch to move"))
1429 for fullstart, rpn in enumerate(self.fullseries):
1429 for fullstart, rpn in enumerate(self.fullseries):
1430 # strip markers for patch guards
1430 # strip markers for patch guards
1431 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1431 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1432 break
1432 break
1433 for i, rpn in enumerate(self.fullseries[fullstart:]):
1433 for i, rpn in enumerate(self.fullseries[fullstart:]):
1434 # strip markers for patch guards
1434 # strip markers for patch guards
1435 if self.guard_re.split(rpn, 1)[0] == patch:
1435 if self.guard_re.split(rpn, 1)[0] == patch:
1436 break
1436 break
1437 index = fullstart + i
1437 index = fullstart + i
1438 assert index < len(self.fullseries)
1438 assert index < len(self.fullseries)
1439 fullpatch = self.fullseries[index]
1439 fullpatch = self.fullseries[index]
1440 del self.fullseries[index]
1440 del self.fullseries[index]
1441 self.fullseries.insert(fullstart, fullpatch)
1441 self.fullseries.insert(fullstart, fullpatch)
1442 self.parseseries()
1442 self.parseseries()
1443 self.seriesdirty = True
1443 self.seriesdirty = True
1444
1444
1445 self.applieddirty = True
1445 self.applieddirty = True
1446 if start > 0:
1446 if start > 0:
1447 self.checktoppatch(repo)
1447 self.checktoppatch(repo)
1448 if not patch:
1448 if not patch:
1449 patch = self.series[start]
1449 patch = self.series[start]
1450 end = start + 1
1450 end = start + 1
1451 else:
1451 else:
1452 end = self.series.index(patch, start) + 1
1452 end = self.series.index(patch, start) + 1
1453
1453
1454 tobackup = set()
1454 tobackup = set()
1455 if (not nobackup and force) or keepchanges:
1455 if (not nobackup and force) or keepchanges:
1456 status = self.checklocalchanges(repo, force=True)
1456 status = self.checklocalchanges(repo, force=True)
1457 if keepchanges:
1457 if keepchanges:
1458 tobackup.update(status.modified + status.added +
1458 tobackup.update(status.modified + status.added +
1459 status.removed + status.deleted)
1459 status.removed + status.deleted)
1460 else:
1460 else:
1461 tobackup.update(status.modified + status.added)
1461 tobackup.update(status.modified + status.added)
1462
1462
1463 s = self.series[start:end]
1463 s = self.series[start:end]
1464 all_files = set()
1464 all_files = set()
1465 try:
1465 try:
1466 if mergeq:
1466 if mergeq:
1467 ret = self.mergepatch(repo, mergeq, s, diffopts)
1467 ret = self.mergepatch(repo, mergeq, s, diffopts)
1468 else:
1468 else:
1469 ret = self.apply(repo, s, list, all_files=all_files,
1469 ret = self.apply(repo, s, list, all_files=all_files,
1470 tobackup=tobackup, keepchanges=keepchanges)
1470 tobackup=tobackup, keepchanges=keepchanges)
1471 except AbortNoCleanup:
1471 except AbortNoCleanup:
1472 raise
1472 raise
1473 except: # re-raises
1473 except: # re-raises
1474 self.ui.warn(_('cleaning up working directory...\n'))
1474 self.ui.warn(_('cleaning up working directory...\n'))
1475 cmdutil.revert(self.ui, repo, repo['.'],
1475 cmdutil.revert(self.ui, repo, repo['.'],
1476 repo.dirstate.parents(), no_backup=True)
1476 repo.dirstate.parents(), no_backup=True)
1477 # only remove unknown files that we know we touched or
1477 # only remove unknown files that we know we touched or
1478 # created while patching
1478 # created while patching
1479 for f in all_files:
1479 for f in all_files:
1480 if f not in repo.dirstate:
1480 if f not in repo.dirstate:
1481 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1481 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1482 self.ui.warn(_('done\n'))
1482 self.ui.warn(_('done\n'))
1483 raise
1483 raise
1484
1484
1485 if not self.applied:
1485 if not self.applied:
1486 return ret[0]
1486 return ret[0]
1487 top = self.applied[-1].name
1487 top = self.applied[-1].name
1488 if ret[0] and ret[0] > 1:
1488 if ret[0] and ret[0] > 1:
1489 msg = _("errors during apply, please fix and qrefresh %s\n")
1489 msg = _("errors during apply, please fix and qrefresh %s\n")
1490 self.ui.write(msg % top)
1490 self.ui.write(msg % top)
1491 else:
1491 else:
1492 self.ui.write(_("now at: %s\n") % top)
1492 self.ui.write(_("now at: %s\n") % top)
1493 return ret[0]
1493 return ret[0]
1494
1494
1495 def pop(self, repo, patch=None, force=False, update=True, all=False,
1495 def pop(self, repo, patch=None, force=False, update=True, all=False,
1496 nobackup=False, keepchanges=False):
1496 nobackup=False, keepchanges=False):
1497 self.checkkeepchanges(keepchanges, force)
1497 self.checkkeepchanges(keepchanges, force)
1498 with repo.wlock():
1498 with repo.wlock():
1499 if patch:
1499 if patch:
1500 # index, rev, patch
1500 # index, rev, patch
1501 info = self.isapplied(patch)
1501 info = self.isapplied(patch)
1502 if not info:
1502 if not info:
1503 patch = self.lookup(patch)
1503 patch = self.lookup(patch)
1504 info = self.isapplied(patch)
1504 info = self.isapplied(patch)
1505 if not info:
1505 if not info:
1506 raise error.Abort(_("patch %s is not applied") % patch)
1506 raise error.Abort(_("patch %s is not applied") % patch)
1507
1507
1508 if not self.applied:
1508 if not self.applied:
1509 # Allow qpop -a to work repeatedly,
1509 # Allow qpop -a to work repeatedly,
1510 # but not qpop without an argument
1510 # but not qpop without an argument
1511 self.ui.warn(_("no patches applied\n"))
1511 self.ui.warn(_("no patches applied\n"))
1512 return not all
1512 return not all
1513
1513
1514 if all:
1514 if all:
1515 start = 0
1515 start = 0
1516 elif patch:
1516 elif patch:
1517 start = info[0] + 1
1517 start = info[0] + 1
1518 else:
1518 else:
1519 start = len(self.applied) - 1
1519 start = len(self.applied) - 1
1520
1520
1521 if start >= len(self.applied):
1521 if start >= len(self.applied):
1522 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1522 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1523 return
1523 return
1524
1524
1525 if not update:
1525 if not update:
1526 parents = repo.dirstate.parents()
1526 parents = repo.dirstate.parents()
1527 rr = [x.node for x in self.applied]
1527 rr = [x.node for x in self.applied]
1528 for p in parents:
1528 for p in parents:
1529 if p in rr:
1529 if p in rr:
1530 self.ui.warn(_("qpop: forcing dirstate update\n"))
1530 self.ui.warn(_("qpop: forcing dirstate update\n"))
1531 update = True
1531 update = True
1532 else:
1532 else:
1533 parents = [p.node() for p in repo[None].parents()]
1533 parents = [p.node() for p in repo[None].parents()]
1534 needupdate = False
1534 needupdate = False
1535 for entry in self.applied[start:]:
1535 for entry in self.applied[start:]:
1536 if entry.node in parents:
1536 if entry.node in parents:
1537 needupdate = True
1537 needupdate = True
1538 break
1538 break
1539 update = needupdate
1539 update = needupdate
1540
1540
1541 tobackup = set()
1541 tobackup = set()
1542 if update:
1542 if update:
1543 s = self.checklocalchanges(repo, force=force or keepchanges)
1543 s = self.checklocalchanges(repo, force=force or keepchanges)
1544 if force:
1544 if force:
1545 if not nobackup:
1545 if not nobackup:
1546 tobackup.update(s.modified + s.added)
1546 tobackup.update(s.modified + s.added)
1547 elif keepchanges:
1547 elif keepchanges:
1548 tobackup.update(s.modified + s.added +
1548 tobackup.update(s.modified + s.added +
1549 s.removed + s.deleted)
1549 s.removed + s.deleted)
1550
1550
1551 self.applieddirty = True
1551 self.applieddirty = True
1552 end = len(self.applied)
1552 end = len(self.applied)
1553 rev = self.applied[start].node
1553 rev = self.applied[start].node
1554
1554
1555 try:
1555 try:
1556 heads = repo.changelog.heads(rev)
1556 heads = repo.changelog.heads(rev)
1557 except error.LookupError:
1557 except error.LookupError:
1558 node = short(rev)
1558 node = short(rev)
1559 raise error.Abort(_('trying to pop unknown node %s') % node)
1559 raise error.Abort(_('trying to pop unknown node %s') % node)
1560
1560
1561 if heads != [self.applied[-1].node]:
1561 if heads != [self.applied[-1].node]:
1562 raise error.Abort(_("popping would remove a revision not "
1562 raise error.Abort(_("popping would remove a revision not "
1563 "managed by this patch queue"))
1563 "managed by this patch queue"))
1564 if not repo[self.applied[-1].node].mutable():
1564 if not repo[self.applied[-1].node].mutable():
1565 raise error.Abort(
1565 raise error.Abort(
1566 _("popping would remove a public revision"),
1566 _("popping would remove a public revision"),
1567 hint=_("see 'hg help phases' for details"))
1567 hint=_("see 'hg help phases' for details"))
1568
1568
1569 # we know there are no local changes, so we can make a simplified
1569 # we know there are no local changes, so we can make a simplified
1570 # form of hg.update.
1570 # form of hg.update.
1571 if update:
1571 if update:
1572 qp = self.qparents(repo, rev)
1572 qp = self.qparents(repo, rev)
1573 ctx = repo[qp]
1573 ctx = repo[qp]
1574 m, a, r, d = repo.status(qp, '.')[:4]
1574 m, a, r, d = repo.status(qp, '.')[:4]
1575 if d:
1575 if d:
1576 raise error.Abort(_("deletions found between repo revs"))
1576 raise error.Abort(_("deletions found between repo revs"))
1577
1577
1578 tobackup = set(a + m + r) & tobackup
1578 tobackup = set(a + m + r) & tobackup
1579 if keepchanges and tobackup:
1579 if keepchanges and tobackup:
1580 raise error.Abort(_("local changes found, qrefresh first"))
1580 raise error.Abort(_("local changes found, qrefresh first"))
1581 self.backup(repo, tobackup)
1581 self.backup(repo, tobackup)
1582 repo.dirstate.beginparentchange()
1582 repo.dirstate.beginparentchange()
1583 for f in a:
1583 for f in a:
1584 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1584 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1585 repo.dirstate.drop(f)
1585 repo.dirstate.drop(f)
1586 for f in m + r:
1586 for f in m + r:
1587 fctx = ctx[f]
1587 fctx = ctx[f]
1588 repo.wwrite(f, fctx.data(), fctx.flags())
1588 repo.wwrite(f, fctx.data(), fctx.flags())
1589 repo.dirstate.normal(f)
1589 repo.dirstate.normal(f)
1590 repo.setparents(qp, nullid)
1590 repo.setparents(qp, nullid)
1591 repo.dirstate.endparentchange()
1591 repo.dirstate.endparentchange()
1592 for patch in reversed(self.applied[start:end]):
1592 for patch in reversed(self.applied[start:end]):
1593 self.ui.status(_("popping %s\n") % patch.name)
1593 self.ui.status(_("popping %s\n") % patch.name)
1594 del self.applied[start:end]
1594 del self.applied[start:end]
1595 strip(self.ui, repo, [rev], update=False, backup=False)
1595 strip(self.ui, repo, [rev], update=False, backup=False)
1596 for s, state in repo['.'].substate.items():
1596 for s, state in repo['.'].substate.items():
1597 repo['.'].sub(s).get(state)
1597 repo['.'].sub(s).get(state)
1598 if self.applied:
1598 if self.applied:
1599 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1599 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1600 else:
1600 else:
1601 self.ui.write(_("patch queue now empty\n"))
1601 self.ui.write(_("patch queue now empty\n"))
1602
1602
1603 def diff(self, repo, pats, opts):
1603 def diff(self, repo, pats, opts):
1604 top, patch = self.checktoppatch(repo)
1604 top, patch = self.checktoppatch(repo)
1605 if not top:
1605 if not top:
1606 self.ui.write(_("no patches applied\n"))
1606 self.ui.write(_("no patches applied\n"))
1607 return
1607 return
1608 qp = self.qparents(repo, top)
1608 qp = self.qparents(repo, top)
1609 if opts.get('reverse'):
1609 if opts.get('reverse'):
1610 node1, node2 = None, qp
1610 node1, node2 = None, qp
1611 else:
1611 else:
1612 node1, node2 = qp, None
1612 node1, node2 = qp, None
1613 diffopts = self.diffopts(opts, patch)
1613 diffopts = self.diffopts(opts, patch)
1614 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1614 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1615
1615
1616 def refresh(self, repo, pats=None, **opts):
1616 def refresh(self, repo, pats=None, **opts):
1617 if not self.applied:
1617 if not self.applied:
1618 self.ui.write(_("no patches applied\n"))
1618 self.ui.write(_("no patches applied\n"))
1619 return 1
1619 return 1
1620 msg = opts.get('msg', '').rstrip()
1620 msg = opts.get('msg', '').rstrip()
1621 edit = opts.get('edit')
1621 edit = opts.get('edit')
1622 editform = opts.get('editform', 'mq.qrefresh')
1622 editform = opts.get('editform', 'mq.qrefresh')
1623 newuser = opts.get('user')
1623 newuser = opts.get('user')
1624 newdate = opts.get('date')
1624 newdate = opts.get('date')
1625 if newdate:
1625 if newdate:
1626 newdate = '%d %d' % util.parsedate(newdate)
1626 newdate = '%d %d' % util.parsedate(newdate)
1627 wlock = repo.wlock()
1627 wlock = repo.wlock()
1628
1628
1629 try:
1629 try:
1630 self.checktoppatch(repo)
1630 self.checktoppatch(repo)
1631 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1631 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1632 if repo.changelog.heads(top) != [top]:
1632 if repo.changelog.heads(top) != [top]:
1633 raise error.Abort(_("cannot qrefresh a revision with children"))
1633 raise error.Abort(_("cannot qrefresh a revision with children"))
1634 if not repo[top].mutable():
1634 if not repo[top].mutable():
1635 raise error.Abort(_("cannot qrefresh public revision"),
1635 raise error.Abort(_("cannot qrefresh public revision"),
1636 hint=_("see 'hg help phases' for details"))
1636 hint=_("see 'hg help phases' for details"))
1637
1637
1638 cparents = repo.changelog.parents(top)
1638 cparents = repo.changelog.parents(top)
1639 patchparent = self.qparents(repo, top)
1639 patchparent = self.qparents(repo, top)
1640
1640
1641 inclsubs = checksubstate(repo, hex(patchparent))
1641 inclsubs = checksubstate(repo, hex(patchparent))
1642 if inclsubs:
1642 if inclsubs:
1643 substatestate = repo.dirstate['.hgsubstate']
1643 substatestate = repo.dirstate['.hgsubstate']
1644
1644
1645 ph = patchheader(self.join(patchfn), self.plainmode)
1645 ph = patchheader(self.join(patchfn), self.plainmode)
1646 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1646 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1647 if newuser:
1647 if newuser:
1648 ph.setuser(newuser)
1648 ph.setuser(newuser)
1649 if newdate:
1649 if newdate:
1650 ph.setdate(newdate)
1650 ph.setdate(newdate)
1651 ph.setparent(hex(patchparent))
1651 ph.setparent(hex(patchparent))
1652
1652
1653 # only commit new patch when write is complete
1653 # only commit new patch when write is complete
1654 patchf = self.opener(patchfn, 'w', atomictemp=True)
1654 patchf = self.opener(patchfn, 'w', atomictemp=True)
1655
1655
1656 # update the dirstate in place, strip off the qtip commit
1656 # update the dirstate in place, strip off the qtip commit
1657 # and then commit.
1657 # and then commit.
1658 #
1658 #
1659 # this should really read:
1659 # this should really read:
1660 # mm, dd, aa = repo.status(top, patchparent)[:3]
1660 # mm, dd, aa = repo.status(top, patchparent)[:3]
1661 # but we do it backwards to take advantage of manifest/changelog
1661 # but we do it backwards to take advantage of manifest/changelog
1662 # caching against the next repo.status call
1662 # caching against the next repo.status call
1663 mm, aa, dd = repo.status(patchparent, top)[:3]
1663 mm, aa, dd = repo.status(patchparent, top)[:3]
1664 changes = repo.changelog.read(top)
1664 changes = repo.changelog.read(top)
1665 man = repo.manifestlog[changes[0]].read()
1665 man = repo.manifestlog[changes[0]].read()
1666 aaa = aa[:]
1666 aaa = aa[:]
1667 matchfn = scmutil.match(repo[None], pats, opts)
1667 matchfn = scmutil.match(repo[None], pats, opts)
1668 # in short mode, we only diff the files included in the
1668 # in short mode, we only diff the files included in the
1669 # patch already plus specified files
1669 # patch already plus specified files
1670 if opts.get('short'):
1670 if opts.get('short'):
1671 # if amending a patch, we start with existing
1671 # if amending a patch, we start with existing
1672 # files plus specified files - unfiltered
1672 # files plus specified files - unfiltered
1673 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1673 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1674 # filter with include/exclude options
1674 # filter with include/exclude options
1675 matchfn = scmutil.match(repo[None], opts=opts)
1675 matchfn = scmutil.match(repo[None], opts=opts)
1676 else:
1676 else:
1677 match = scmutil.matchall(repo)
1677 match = scmutil.matchall(repo)
1678 m, a, r, d = repo.status(match=match)[:4]
1678 m, a, r, d = repo.status(match=match)[:4]
1679 mm = set(mm)
1679 mm = set(mm)
1680 aa = set(aa)
1680 aa = set(aa)
1681 dd = set(dd)
1681 dd = set(dd)
1682
1682
1683 # we might end up with files that were added between
1683 # we might end up with files that were added between
1684 # qtip and the dirstate parent, but then changed in the
1684 # qtip and the dirstate parent, but then changed in the
1685 # local dirstate. in this case, we want them to only
1685 # local dirstate. in this case, we want them to only
1686 # show up in the added section
1686 # show up in the added section
1687 for x in m:
1687 for x in m:
1688 if x not in aa:
1688 if x not in aa:
1689 mm.add(x)
1689 mm.add(x)
1690 # we might end up with files added by the local dirstate that
1690 # we might end up with files added by the local dirstate that
1691 # were deleted by the patch. In this case, they should only
1691 # were deleted by the patch. In this case, they should only
1692 # show up in the changed section.
1692 # show up in the changed section.
1693 for x in a:
1693 for x in a:
1694 if x in dd:
1694 if x in dd:
1695 dd.remove(x)
1695 dd.remove(x)
1696 mm.add(x)
1696 mm.add(x)
1697 else:
1697 else:
1698 aa.add(x)
1698 aa.add(x)
1699 # make sure any files deleted in the local dirstate
1699 # make sure any files deleted in the local dirstate
1700 # are not in the add or change column of the patch
1700 # are not in the add or change column of the patch
1701 forget = []
1701 forget = []
1702 for x in d + r:
1702 for x in d + r:
1703 if x in aa:
1703 if x in aa:
1704 aa.remove(x)
1704 aa.remove(x)
1705 forget.append(x)
1705 forget.append(x)
1706 continue
1706 continue
1707 else:
1707 else:
1708 mm.discard(x)
1708 mm.discard(x)
1709 dd.add(x)
1709 dd.add(x)
1710
1710
1711 m = list(mm)
1711 m = list(mm)
1712 r = list(dd)
1712 r = list(dd)
1713 a = list(aa)
1713 a = list(aa)
1714
1714
1715 # create 'match' that includes the files to be recommitted.
1715 # create 'match' that includes the files to be recommitted.
1716 # apply matchfn via repo.status to ensure correct case handling.
1716 # apply matchfn via repo.status to ensure correct case handling.
1717 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1717 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1718 allmatches = set(cm + ca + cr + cd)
1718 allmatches = set(cm + ca + cr + cd)
1719 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1719 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1720
1720
1721 files = set(inclsubs)
1721 files = set(inclsubs)
1722 for x in refreshchanges:
1722 for x in refreshchanges:
1723 files.update(x)
1723 files.update(x)
1724 match = scmutil.matchfiles(repo, files)
1724 match = scmutil.matchfiles(repo, files)
1725
1725
1726 bmlist = repo[top].bookmarks()
1726 bmlist = repo[top].bookmarks()
1727
1727
1728 dsguard = None
1728 dsguard = None
1729 try:
1729 try:
1730 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1730 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1731 if diffopts.git or diffopts.upgrade:
1731 if diffopts.git or diffopts.upgrade:
1732 copies = {}
1732 copies = {}
1733 for dst in a:
1733 for dst in a:
1734 src = repo.dirstate.copied(dst)
1734 src = repo.dirstate.copied(dst)
1735 # during qfold, the source file for copies may
1735 # during qfold, the source file for copies may
1736 # be removed. Treat this as a simple add.
1736 # be removed. Treat this as a simple add.
1737 if src is not None and src in repo.dirstate:
1737 if src is not None and src in repo.dirstate:
1738 copies.setdefault(src, []).append(dst)
1738 copies.setdefault(src, []).append(dst)
1739 repo.dirstate.add(dst)
1739 repo.dirstate.add(dst)
1740 # remember the copies between patchparent and qtip
1740 # remember the copies between patchparent and qtip
1741 for dst in aaa:
1741 for dst in aaa:
1742 f = repo.file(dst)
1742 f = repo.file(dst)
1743 src = f.renamed(man[dst])
1743 src = f.renamed(man[dst])
1744 if src:
1744 if src:
1745 copies.setdefault(src[0], []).extend(
1745 copies.setdefault(src[0], []).extend(
1746 copies.get(dst, []))
1746 copies.get(dst, []))
1747 if dst in a:
1747 if dst in a:
1748 copies[src[0]].append(dst)
1748 copies[src[0]].append(dst)
1749 # we can't copy a file created by the patch itself
1749 # we can't copy a file created by the patch itself
1750 if dst in copies:
1750 if dst in copies:
1751 del copies[dst]
1751 del copies[dst]
1752 for src, dsts in copies.iteritems():
1752 for src, dsts in copies.iteritems():
1753 for dst in dsts:
1753 for dst in dsts:
1754 repo.dirstate.copy(src, dst)
1754 repo.dirstate.copy(src, dst)
1755 else:
1755 else:
1756 for dst in a:
1756 for dst in a:
1757 repo.dirstate.add(dst)
1757 repo.dirstate.add(dst)
1758 # Drop useless copy information
1758 # Drop useless copy information
1759 for f in list(repo.dirstate.copies()):
1759 for f in list(repo.dirstate.copies()):
1760 repo.dirstate.copy(None, f)
1760 repo.dirstate.copy(None, f)
1761 for f in r:
1761 for f in r:
1762 repo.dirstate.remove(f)
1762 repo.dirstate.remove(f)
1763 # if the patch excludes a modified file, mark that
1763 # if the patch excludes a modified file, mark that
1764 # file with mtime=0 so status can see it.
1764 # file with mtime=0 so status can see it.
1765 mm = []
1765 mm = []
1766 for i in xrange(len(m) - 1, -1, -1):
1766 for i in xrange(len(m) - 1, -1, -1):
1767 if not matchfn(m[i]):
1767 if not matchfn(m[i]):
1768 mm.append(m[i])
1768 mm.append(m[i])
1769 del m[i]
1769 del m[i]
1770 for f in m:
1770 for f in m:
1771 repo.dirstate.normal(f)
1771 repo.dirstate.normal(f)
1772 for f in mm:
1772 for f in mm:
1773 repo.dirstate.normallookup(f)
1773 repo.dirstate.normallookup(f)
1774 for f in forget:
1774 for f in forget:
1775 repo.dirstate.drop(f)
1775 repo.dirstate.drop(f)
1776
1776
1777 user = ph.user or changes[1]
1777 user = ph.user or changes[1]
1778
1778
1779 oldphase = repo[top].phase()
1779 oldphase = repo[top].phase()
1780
1780
1781 # assumes strip can roll itself back if interrupted
1781 # assumes strip can roll itself back if interrupted
1782 repo.setparents(*cparents)
1782 repo.setparents(*cparents)
1783 self.applied.pop()
1783 self.applied.pop()
1784 self.applieddirty = True
1784 self.applieddirty = True
1785 strip(self.ui, repo, [top], update=False, backup=False)
1785 strip(self.ui, repo, [top], update=False, backup=False)
1786 dsguard.close()
1786 dsguard.close()
1787 finally:
1787 finally:
1788 release(dsguard)
1788 release(dsguard)
1789
1789
1790 try:
1790 try:
1791 # might be nice to attempt to roll back strip after this
1791 # might be nice to attempt to roll back strip after this
1792
1792
1793 defaultmsg = "[mq]: %s" % patchfn
1793 defaultmsg = "[mq]: %s" % patchfn
1794 editor = cmdutil.getcommiteditor(editform=editform)
1794 editor = cmdutil.getcommiteditor(editform=editform)
1795 if edit:
1795 if edit:
1796 def finishdesc(desc):
1796 def finishdesc(desc):
1797 if desc.rstrip():
1797 if desc.rstrip():
1798 ph.setmessage(desc)
1798 ph.setmessage(desc)
1799 return desc
1799 return desc
1800 return defaultmsg
1800 return defaultmsg
1801 # i18n: this message is shown in editor with "HG: " prefix
1801 # i18n: this message is shown in editor with "HG: " prefix
1802 extramsg = _('Leave message empty to use default message.')
1802 extramsg = _('Leave message empty to use default message.')
1803 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1803 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1804 extramsg=extramsg,
1804 extramsg=extramsg,
1805 editform=editform)
1805 editform=editform)
1806 message = msg or "\n".join(ph.message)
1806 message = msg or "\n".join(ph.message)
1807 elif not msg:
1807 elif not msg:
1808 if not ph.message:
1808 if not ph.message:
1809 message = defaultmsg
1809 message = defaultmsg
1810 else:
1810 else:
1811 message = "\n".join(ph.message)
1811 message = "\n".join(ph.message)
1812 else:
1812 else:
1813 message = msg
1813 message = msg
1814 ph.setmessage(msg)
1814 ph.setmessage(msg)
1815
1815
1816 # Ensure we create a new changeset in the same phase than
1816 # Ensure we create a new changeset in the same phase than
1817 # the old one.
1817 # the old one.
1818 lock = tr = None
1818 lock = tr = None
1819 try:
1819 try:
1820 lock = repo.lock()
1820 lock = repo.lock()
1821 tr = repo.transaction('mq')
1821 tr = repo.transaction('mq')
1822 n = newcommit(repo, oldphase, message, user, ph.date,
1822 n = newcommit(repo, oldphase, message, user, ph.date,
1823 match=match, force=True, editor=editor)
1823 match=match, force=True, editor=editor)
1824 # only write patch after a successful commit
1824 # only write patch after a successful commit
1825 c = [list(x) for x in refreshchanges]
1825 c = [list(x) for x in refreshchanges]
1826 if inclsubs:
1826 if inclsubs:
1827 self.putsubstate2changes(substatestate, c)
1827 self.putsubstate2changes(substatestate, c)
1828 chunks = patchmod.diff(repo, patchparent,
1828 chunks = patchmod.diff(repo, patchparent,
1829 changes=c, opts=diffopts)
1829 changes=c, opts=diffopts)
1830 comments = str(ph)
1830 comments = str(ph)
1831 if comments:
1831 if comments:
1832 patchf.write(comments)
1832 patchf.write(comments)
1833 for chunk in chunks:
1833 for chunk in chunks:
1834 patchf.write(chunk)
1834 patchf.write(chunk)
1835 patchf.close()
1835 patchf.close()
1836
1836
1837 marks = repo._bookmarks
1837 marks = repo._bookmarks
1838 for bm in bmlist:
1838 for bm in bmlist:
1839 marks[bm] = n
1839 marks[bm] = n
1840 marks.recordchange(tr)
1840 marks.recordchange(tr)
1841 tr.close()
1841 tr.close()
1842
1842
1843 self.applied.append(statusentry(n, patchfn))
1843 self.applied.append(statusentry(n, patchfn))
1844 finally:
1844 finally:
1845 lockmod.release(tr, lock)
1845 lockmod.release(tr, lock)
1846 except: # re-raises
1846 except: # re-raises
1847 ctx = repo[cparents[0]]
1847 ctx = repo[cparents[0]]
1848 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1848 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1849 self.savedirty()
1849 self.savedirty()
1850 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1850 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1851 '(revert --all, qpush to recover)\n'))
1851 '(revert --all, qpush to recover)\n'))
1852 raise
1852 raise
1853 finally:
1853 finally:
1854 wlock.release()
1854 wlock.release()
1855 self.removeundo(repo)
1855 self.removeundo(repo)
1856
1856
1857 def init(self, repo, create=False):
1857 def init(self, repo, create=False):
1858 if not create and os.path.isdir(self.path):
1858 if not create and os.path.isdir(self.path):
1859 raise error.Abort(_("patch queue directory already exists"))
1859 raise error.Abort(_("patch queue directory already exists"))
1860 try:
1860 try:
1861 os.mkdir(self.path)
1861 os.mkdir(self.path)
1862 except OSError as inst:
1862 except OSError as inst:
1863 if inst.errno != errno.EEXIST or not create:
1863 if inst.errno != errno.EEXIST or not create:
1864 raise
1864 raise
1865 if create:
1865 if create:
1866 return self.qrepo(create=True)
1866 return self.qrepo(create=True)
1867
1867
1868 def unapplied(self, repo, patch=None):
1868 def unapplied(self, repo, patch=None):
1869 if patch and patch not in self.series:
1869 if patch and patch not in self.series:
1870 raise error.Abort(_("patch %s is not in series file") % patch)
1870 raise error.Abort(_("patch %s is not in series file") % patch)
1871 if not patch:
1871 if not patch:
1872 start = self.seriesend()
1872 start = self.seriesend()
1873 else:
1873 else:
1874 start = self.series.index(patch) + 1
1874 start = self.series.index(patch) + 1
1875 unapplied = []
1875 unapplied = []
1876 for i in xrange(start, len(self.series)):
1876 for i in xrange(start, len(self.series)):
1877 pushable, reason = self.pushable(i)
1877 pushable, reason = self.pushable(i)
1878 if pushable:
1878 if pushable:
1879 unapplied.append((i, self.series[i]))
1879 unapplied.append((i, self.series[i]))
1880 self.explainpushable(i)
1880 self.explainpushable(i)
1881 return unapplied
1881 return unapplied
1882
1882
1883 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1883 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1884 summary=False):
1884 summary=False):
1885 def displayname(pfx, patchname, state):
1885 def displayname(pfx, patchname, state):
1886 if pfx:
1886 if pfx:
1887 self.ui.write(pfx)
1887 self.ui.write(pfx)
1888 if summary:
1888 if summary:
1889 ph = patchheader(self.join(patchname), self.plainmode)
1889 ph = patchheader(self.join(patchname), self.plainmode)
1890 if ph.message:
1890 if ph.message:
1891 msg = ph.message[0]
1891 msg = ph.message[0]
1892 else:
1892 else:
1893 msg = ''
1893 msg = ''
1894
1894
1895 if self.ui.formatted():
1895 if self.ui.formatted():
1896 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1896 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1897 if width > 0:
1897 if width > 0:
1898 msg = util.ellipsis(msg, width)
1898 msg = util.ellipsis(msg, width)
1899 else:
1899 else:
1900 msg = ''
1900 msg = ''
1901 self.ui.write(patchname, label='qseries.' + state)
1901 self.ui.write(patchname, label='qseries.' + state)
1902 self.ui.write(': ')
1902 self.ui.write(': ')
1903 self.ui.write(msg, label='qseries.message.' + state)
1903 self.ui.write(msg, label='qseries.message.' + state)
1904 else:
1904 else:
1905 self.ui.write(patchname, label='qseries.' + state)
1905 self.ui.write(patchname, label='qseries.' + state)
1906 self.ui.write('\n')
1906 self.ui.write('\n')
1907
1907
1908 applied = set([p.name for p in self.applied])
1908 applied = set([p.name for p in self.applied])
1909 if length is None:
1909 if length is None:
1910 length = len(self.series) - start
1910 length = len(self.series) - start
1911 if not missing:
1911 if not missing:
1912 if self.ui.verbose:
1912 if self.ui.verbose:
1913 idxwidth = len(str(start + length - 1))
1913 idxwidth = len(str(start + length - 1))
1914 for i in xrange(start, start + length):
1914 for i in xrange(start, start + length):
1915 patch = self.series[i]
1915 patch = self.series[i]
1916 if patch in applied:
1916 if patch in applied:
1917 char, state = 'A', 'applied'
1917 char, state = 'A', 'applied'
1918 elif self.pushable(i)[0]:
1918 elif self.pushable(i)[0]:
1919 char, state = 'U', 'unapplied'
1919 char, state = 'U', 'unapplied'
1920 else:
1920 else:
1921 char, state = 'G', 'guarded'
1921 char, state = 'G', 'guarded'
1922 pfx = ''
1922 pfx = ''
1923 if self.ui.verbose:
1923 if self.ui.verbose:
1924 pfx = '%*d %s ' % (idxwidth, i, char)
1924 pfx = '%*d %s ' % (idxwidth, i, char)
1925 elif status and status != char:
1925 elif status and status != char:
1926 continue
1926 continue
1927 displayname(pfx, patch, state)
1927 displayname(pfx, patch, state)
1928 else:
1928 else:
1929 msng_list = []
1929 msng_list = []
1930 for root, dirs, files in os.walk(self.path):
1930 for root, dirs, files in os.walk(self.path):
1931 d = root[len(self.path) + 1:]
1931 d = root[len(self.path) + 1:]
1932 for f in files:
1932 for f in files:
1933 fl = os.path.join(d, f)
1933 fl = os.path.join(d, f)
1934 if (fl not in self.series and
1934 if (fl not in self.series and
1935 fl not in (self.statuspath, self.seriespath,
1935 fl not in (self.statuspath, self.seriespath,
1936 self.guardspath)
1936 self.guardspath)
1937 and not fl.startswith('.')):
1937 and not fl.startswith('.')):
1938 msng_list.append(fl)
1938 msng_list.append(fl)
1939 for x in sorted(msng_list):
1939 for x in sorted(msng_list):
1940 pfx = self.ui.verbose and ('D ') or ''
1940 pfx = self.ui.verbose and ('D ') or ''
1941 displayname(pfx, x, 'missing')
1941 displayname(pfx, x, 'missing')
1942
1942
1943 def issaveline(self, l):
1943 def issaveline(self, l):
1944 if l.name == '.hg.patches.save.line':
1944 if l.name == '.hg.patches.save.line':
1945 return True
1945 return True
1946
1946
1947 def qrepo(self, create=False):
1947 def qrepo(self, create=False):
1948 ui = self.baseui.copy()
1948 ui = self.baseui.copy()
1949 if create or os.path.isdir(self.join(".hg")):
1949 if create or os.path.isdir(self.join(".hg")):
1950 return hg.repository(ui, path=self.path, create=create)
1950 return hg.repository(ui, path=self.path, create=create)
1951
1951
1952 def restore(self, repo, rev, delete=None, qupdate=None):
1952 def restore(self, repo, rev, delete=None, qupdate=None):
1953 desc = repo[rev].description().strip()
1953 desc = repo[rev].description().strip()
1954 lines = desc.splitlines()
1954 lines = desc.splitlines()
1955 i = 0
1955 i = 0
1956 datastart = None
1956 datastart = None
1957 series = []
1957 series = []
1958 applied = []
1958 applied = []
1959 qpp = None
1959 qpp = None
1960 for i, line in enumerate(lines):
1960 for i, line in enumerate(lines):
1961 if line == 'Patch Data:':
1961 if line == 'Patch Data:':
1962 datastart = i + 1
1962 datastart = i + 1
1963 elif line.startswith('Dirstate:'):
1963 elif line.startswith('Dirstate:'):
1964 l = line.rstrip()
1964 l = line.rstrip()
1965 l = l[10:].split(' ')
1965 l = l[10:].split(' ')
1966 qpp = [bin(x) for x in l]
1966 qpp = [bin(x) for x in l]
1967 elif datastart is not None:
1967 elif datastart is not None:
1968 l = line.rstrip()
1968 l = line.rstrip()
1969 n, name = l.split(':', 1)
1969 n, name = l.split(':', 1)
1970 if n:
1970 if n:
1971 applied.append(statusentry(bin(n), name))
1971 applied.append(statusentry(bin(n), name))
1972 else:
1972 else:
1973 series.append(l)
1973 series.append(l)
1974 if datastart is None:
1974 if datastart is None:
1975 self.ui.warn(_("no saved patch data found\n"))
1975 self.ui.warn(_("no saved patch data found\n"))
1976 return 1
1976 return 1
1977 self.ui.warn(_("restoring status: %s\n") % lines[0])
1977 self.ui.warn(_("restoring status: %s\n") % lines[0])
1978 self.fullseries = series
1978 self.fullseries = series
1979 self.applied = applied
1979 self.applied = applied
1980 self.parseseries()
1980 self.parseseries()
1981 self.seriesdirty = True
1981 self.seriesdirty = True
1982 self.applieddirty = True
1982 self.applieddirty = True
1983 heads = repo.changelog.heads()
1983 heads = repo.changelog.heads()
1984 if delete:
1984 if delete:
1985 if rev not in heads:
1985 if rev not in heads:
1986 self.ui.warn(_("save entry has children, leaving it alone\n"))
1986 self.ui.warn(_("save entry has children, leaving it alone\n"))
1987 else:
1987 else:
1988 self.ui.warn(_("removing save entry %s\n") % short(rev))
1988 self.ui.warn(_("removing save entry %s\n") % short(rev))
1989 pp = repo.dirstate.parents()
1989 pp = repo.dirstate.parents()
1990 if rev in pp:
1990 if rev in pp:
1991 update = True
1991 update = True
1992 else:
1992 else:
1993 update = False
1993 update = False
1994 strip(self.ui, repo, [rev], update=update, backup=False)
1994 strip(self.ui, repo, [rev], update=update, backup=False)
1995 if qpp:
1995 if qpp:
1996 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1996 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1997 (short(qpp[0]), short(qpp[1])))
1997 (short(qpp[0]), short(qpp[1])))
1998 if qupdate:
1998 if qupdate:
1999 self.ui.status(_("updating queue directory\n"))
1999 self.ui.status(_("updating queue directory\n"))
2000 r = self.qrepo()
2000 r = self.qrepo()
2001 if not r:
2001 if not r:
2002 self.ui.warn(_("unable to load queue repository\n"))
2002 self.ui.warn(_("unable to load queue repository\n"))
2003 return 1
2003 return 1
2004 hg.clean(r, qpp[0])
2004 hg.clean(r, qpp[0])
2005
2005
2006 def save(self, repo, msg=None):
2006 def save(self, repo, msg=None):
2007 if not self.applied:
2007 if not self.applied:
2008 self.ui.warn(_("save: no patches applied, exiting\n"))
2008 self.ui.warn(_("save: no patches applied, exiting\n"))
2009 return 1
2009 return 1
2010 if self.issaveline(self.applied[-1]):
2010 if self.issaveline(self.applied[-1]):
2011 self.ui.warn(_("status is already saved\n"))
2011 self.ui.warn(_("status is already saved\n"))
2012 return 1
2012 return 1
2013
2013
2014 if not msg:
2014 if not msg:
2015 msg = _("hg patches saved state")
2015 msg = _("hg patches saved state")
2016 else:
2016 else:
2017 msg = "hg patches: " + msg.rstrip('\r\n')
2017 msg = "hg patches: " + msg.rstrip('\r\n')
2018 r = self.qrepo()
2018 r = self.qrepo()
2019 if r:
2019 if r:
2020 pp = r.dirstate.parents()
2020 pp = r.dirstate.parents()
2021 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2021 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2022 msg += "\n\nPatch Data:\n"
2022 msg += "\n\nPatch Data:\n"
2023 msg += ''.join('%s\n' % x for x in self.applied)
2023 msg += ''.join('%s\n' % x for x in self.applied)
2024 msg += ''.join(':%s\n' % x for x in self.fullseries)
2024 msg += ''.join(':%s\n' % x for x in self.fullseries)
2025 n = repo.commit(msg, force=True)
2025 n = repo.commit(msg, force=True)
2026 if not n:
2026 if not n:
2027 self.ui.warn(_("repo commit failed\n"))
2027 self.ui.warn(_("repo commit failed\n"))
2028 return 1
2028 return 1
2029 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2029 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2030 self.applieddirty = True
2030 self.applieddirty = True
2031 self.removeundo(repo)
2031 self.removeundo(repo)
2032
2032
2033 def fullseriesend(self):
2033 def fullseriesend(self):
2034 if self.applied:
2034 if self.applied:
2035 p = self.applied[-1].name
2035 p = self.applied[-1].name
2036 end = self.findseries(p)
2036 end = self.findseries(p)
2037 if end is None:
2037 if end is None:
2038 return len(self.fullseries)
2038 return len(self.fullseries)
2039 return end + 1
2039 return end + 1
2040 return 0
2040 return 0
2041
2041
2042 def seriesend(self, all_patches=False):
2042 def seriesend(self, all_patches=False):
2043 """If all_patches is False, return the index of the next pushable patch
2043 """If all_patches is False, return the index of the next pushable patch
2044 in the series, or the series length. If all_patches is True, return the
2044 in the series, or the series length. If all_patches is True, return the
2045 index of the first patch past the last applied one.
2045 index of the first patch past the last applied one.
2046 """
2046 """
2047 end = 0
2047 end = 0
2048 def nextpatch(start):
2048 def nextpatch(start):
2049 if all_patches or start >= len(self.series):
2049 if all_patches or start >= len(self.series):
2050 return start
2050 return start
2051 for i in xrange(start, len(self.series)):
2051 for i in xrange(start, len(self.series)):
2052 p, reason = self.pushable(i)
2052 p, reason = self.pushable(i)
2053 if p:
2053 if p:
2054 return i
2054 return i
2055 self.explainpushable(i)
2055 self.explainpushable(i)
2056 return len(self.series)
2056 return len(self.series)
2057 if self.applied:
2057 if self.applied:
2058 p = self.applied[-1].name
2058 p = self.applied[-1].name
2059 try:
2059 try:
2060 end = self.series.index(p)
2060 end = self.series.index(p)
2061 except ValueError:
2061 except ValueError:
2062 return 0
2062 return 0
2063 return nextpatch(end + 1)
2063 return nextpatch(end + 1)
2064 return nextpatch(end)
2064 return nextpatch(end)
2065
2065
2066 def appliedname(self, index):
2066 def appliedname(self, index):
2067 pname = self.applied[index].name
2067 pname = self.applied[index].name
2068 if not self.ui.verbose:
2068 if not self.ui.verbose:
2069 p = pname
2069 p = pname
2070 else:
2070 else:
2071 p = str(self.series.index(pname)) + " " + pname
2071 p = str(self.series.index(pname)) + " " + pname
2072 return p
2072 return p
2073
2073
2074 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2074 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2075 force=None, git=False):
2075 force=None, git=False):
2076 def checkseries(patchname):
2076 def checkseries(patchname):
2077 if patchname in self.series:
2077 if patchname in self.series:
2078 raise error.Abort(_('patch %s is already in the series file')
2078 raise error.Abort(_('patch %s is already in the series file')
2079 % patchname)
2079 % patchname)
2080
2080
2081 if rev:
2081 if rev:
2082 if files:
2082 if files:
2083 raise error.Abort(_('option "-r" not valid when importing '
2083 raise error.Abort(_('option "-r" not valid when importing '
2084 'files'))
2084 'files'))
2085 rev = scmutil.revrange(repo, rev)
2085 rev = scmutil.revrange(repo, rev)
2086 rev.sort(reverse=True)
2086 rev.sort(reverse=True)
2087 elif not files:
2087 elif not files:
2088 raise error.Abort(_('no files or revisions specified'))
2088 raise error.Abort(_('no files or revisions specified'))
2089 if (len(files) > 1 or len(rev) > 1) and patchname:
2089 if (len(files) > 1 or len(rev) > 1) and patchname:
2090 raise error.Abort(_('option "-n" not valid when importing multiple '
2090 raise error.Abort(_('option "-n" not valid when importing multiple '
2091 'patches'))
2091 'patches'))
2092 imported = []
2092 imported = []
2093 if rev:
2093 if rev:
2094 # If mq patches are applied, we can only import revisions
2094 # If mq patches are applied, we can only import revisions
2095 # that form a linear path to qbase.
2095 # that form a linear path to qbase.
2096 # Otherwise, they should form a linear path to a head.
2096 # Otherwise, they should form a linear path to a head.
2097 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2097 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2098 if len(heads) > 1:
2098 if len(heads) > 1:
2099 raise error.Abort(_('revision %d is the root of more than one '
2099 raise error.Abort(_('revision %d is the root of more than one '
2100 'branch') % rev.last())
2100 'branch') % rev.last())
2101 if self.applied:
2101 if self.applied:
2102 base = repo.changelog.node(rev.first())
2102 base = repo.changelog.node(rev.first())
2103 if base in [n.node for n in self.applied]:
2103 if base in [n.node for n in self.applied]:
2104 raise error.Abort(_('revision %d is already managed')
2104 raise error.Abort(_('revision %d is already managed')
2105 % rev.first())
2105 % rev.first())
2106 if heads != [self.applied[-1].node]:
2106 if heads != [self.applied[-1].node]:
2107 raise error.Abort(_('revision %d is not the parent of '
2107 raise error.Abort(_('revision %d is not the parent of '
2108 'the queue') % rev.first())
2108 'the queue') % rev.first())
2109 base = repo.changelog.rev(self.applied[0].node)
2109 base = repo.changelog.rev(self.applied[0].node)
2110 lastparent = repo.changelog.parentrevs(base)[0]
2110 lastparent = repo.changelog.parentrevs(base)[0]
2111 else:
2111 else:
2112 if heads != [repo.changelog.node(rev.first())]:
2112 if heads != [repo.changelog.node(rev.first())]:
2113 raise error.Abort(_('revision %d has unmanaged children')
2113 raise error.Abort(_('revision %d has unmanaged children')
2114 % rev.first())
2114 % rev.first())
2115 lastparent = None
2115 lastparent = None
2116
2116
2117 diffopts = self.diffopts({'git': git})
2117 diffopts = self.diffopts({'git': git})
2118 with repo.transaction('qimport') as tr:
2118 with repo.transaction('qimport') as tr:
2119 for r in rev:
2119 for r in rev:
2120 if not repo[r].mutable():
2120 if not repo[r].mutable():
2121 raise error.Abort(_('revision %d is not mutable') % r,
2121 raise error.Abort(_('revision %d is not mutable') % r,
2122 hint=_("see 'hg help phases' "
2122 hint=_("see 'hg help phases' "
2123 'for details'))
2123 'for details'))
2124 p1, p2 = repo.changelog.parentrevs(r)
2124 p1, p2 = repo.changelog.parentrevs(r)
2125 n = repo.changelog.node(r)
2125 n = repo.changelog.node(r)
2126 if p2 != nullrev:
2126 if p2 != nullrev:
2127 raise error.Abort(_('cannot import merge revision %d')
2127 raise error.Abort(_('cannot import merge revision %d')
2128 % r)
2128 % r)
2129 if lastparent and lastparent != r:
2129 if lastparent and lastparent != r:
2130 raise error.Abort(_('revision %d is not the parent of '
2130 raise error.Abort(_('revision %d is not the parent of '
2131 '%d')
2131 '%d')
2132 % (r, lastparent))
2132 % (r, lastparent))
2133 lastparent = p1
2133 lastparent = p1
2134
2134
2135 if not patchname:
2135 if not patchname:
2136 patchname = self.makepatchname(
2136 patchname = self.makepatchname(
2137 repo[r].description().split('\n', 1)[0],
2137 repo[r].description().split('\n', 1)[0],
2138 '%d.diff' % r)
2138 '%d.diff' % r)
2139 checkseries(patchname)
2139 checkseries(patchname)
2140 self.checkpatchname(patchname, force)
2140 self.checkpatchname(patchname, force)
2141 self.fullseries.insert(0, patchname)
2141 self.fullseries.insert(0, patchname)
2142
2142
2143 patchf = self.opener(patchname, "w")
2143 patchf = self.opener(patchname, "w")
2144 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2144 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2145 patchf.close()
2145 patchf.close()
2146
2146
2147 se = statusentry(n, patchname)
2147 se = statusentry(n, patchname)
2148 self.applied.insert(0, se)
2148 self.applied.insert(0, se)
2149
2149
2150 self.added.append(patchname)
2150 self.added.append(patchname)
2151 imported.append(patchname)
2151 imported.append(patchname)
2152 patchname = None
2152 patchname = None
2153 if rev and repo.ui.configbool('mq', 'secret', False):
2153 if rev and repo.ui.configbool('mq', 'secret', False):
2154 # if we added anything with --rev, move the secret root
2154 # if we added anything with --rev, move the secret root
2155 phases.retractboundary(repo, tr, phases.secret, [n])
2155 phases.retractboundary(repo, tr, phases.secret, [n])
2156 self.parseseries()
2156 self.parseseries()
2157 self.applieddirty = True
2157 self.applieddirty = True
2158 self.seriesdirty = True
2158 self.seriesdirty = True
2159
2159
2160 for i, filename in enumerate(files):
2160 for i, filename in enumerate(files):
2161 if existing:
2161 if existing:
2162 if filename == '-':
2162 if filename == '-':
2163 raise error.Abort(_('-e is incompatible with import from -')
2163 raise error.Abort(_('-e is incompatible with import from -')
2164 )
2164 )
2165 filename = normname(filename)
2165 filename = normname(filename)
2166 self.checkreservedname(filename)
2166 self.checkreservedname(filename)
2167 if util.url(filename).islocal():
2167 if util.url(filename).islocal():
2168 originpath = self.join(filename)
2168 originpath = self.join(filename)
2169 if not os.path.isfile(originpath):
2169 if not os.path.isfile(originpath):
2170 raise error.Abort(
2170 raise error.Abort(
2171 _("patch %s does not exist") % filename)
2171 _("patch %s does not exist") % filename)
2172
2172
2173 if patchname:
2173 if patchname:
2174 self.checkpatchname(patchname, force)
2174 self.checkpatchname(patchname, force)
2175
2175
2176 self.ui.write(_('renaming %s to %s\n')
2176 self.ui.write(_('renaming %s to %s\n')
2177 % (filename, patchname))
2177 % (filename, patchname))
2178 util.rename(originpath, self.join(patchname))
2178 util.rename(originpath, self.join(patchname))
2179 else:
2179 else:
2180 patchname = filename
2180 patchname = filename
2181
2181
2182 else:
2182 else:
2183 if filename == '-' and not patchname:
2183 if filename == '-' and not patchname:
2184 raise error.Abort(_('need --name to import a patch from -'))
2184 raise error.Abort(_('need --name to import a patch from -'))
2185 elif not patchname:
2185 elif not patchname:
2186 patchname = normname(os.path.basename(filename.rstrip('/')))
2186 patchname = normname(os.path.basename(filename.rstrip('/')))
2187 self.checkpatchname(patchname, force)
2187 self.checkpatchname(patchname, force)
2188 try:
2188 try:
2189 if filename == '-':
2189 if filename == '-':
2190 text = self.ui.fin.read()
2190 text = self.ui.fin.read()
2191 else:
2191 else:
2192 fp = hg.openpath(self.ui, filename)
2192 fp = hg.openpath(self.ui, filename)
2193 text = fp.read()
2193 text = fp.read()
2194 fp.close()
2194 fp.close()
2195 except (OSError, IOError):
2195 except (OSError, IOError):
2196 raise error.Abort(_("unable to read file %s") % filename)
2196 raise error.Abort(_("unable to read file %s") % filename)
2197 patchf = self.opener(patchname, "w")
2197 patchf = self.opener(patchname, "w")
2198 patchf.write(text)
2198 patchf.write(text)
2199 patchf.close()
2199 patchf.close()
2200 if not force:
2200 if not force:
2201 checkseries(patchname)
2201 checkseries(patchname)
2202 if patchname not in self.series:
2202 if patchname not in self.series:
2203 index = self.fullseriesend() + i
2203 index = self.fullseriesend() + i
2204 self.fullseries[index:index] = [patchname]
2204 self.fullseries[index:index] = [patchname]
2205 self.parseseries()
2205 self.parseseries()
2206 self.seriesdirty = True
2206 self.seriesdirty = True
2207 self.ui.warn(_("adding %s to series file\n") % patchname)
2207 self.ui.warn(_("adding %s to series file\n") % patchname)
2208 self.added.append(patchname)
2208 self.added.append(patchname)
2209 imported.append(patchname)
2209 imported.append(patchname)
2210 patchname = None
2210 patchname = None
2211
2211
2212 self.removeundo(repo)
2212 self.removeundo(repo)
2213 return imported
2213 return imported
2214
2214
2215 def fixkeepchangesopts(ui, opts):
2215 def fixkeepchangesopts(ui, opts):
2216 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2216 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2217 or opts.get('exact')):
2217 or opts.get('exact')):
2218 return opts
2218 return opts
2219 opts = dict(opts)
2219 opts = dict(opts)
2220 opts['keep_changes'] = True
2220 opts['keep_changes'] = True
2221 return opts
2221 return opts
2222
2222
2223 @command("qdelete|qremove|qrm",
2223 @command("qdelete|qremove|qrm",
2224 [('k', 'keep', None, _('keep patch file')),
2224 [('k', 'keep', None, _('keep patch file')),
2225 ('r', 'rev', [],
2225 ('r', 'rev', [],
2226 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2226 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2227 _('hg qdelete [-k] [PATCH]...'))
2227 _('hg qdelete [-k] [PATCH]...'))
2228 def delete(ui, repo, *patches, **opts):
2228 def delete(ui, repo, *patches, **opts):
2229 """remove patches from queue
2229 """remove patches from queue
2230
2230
2231 The patches must not be applied, and at least one patch is required. Exact
2231 The patches must not be applied, and at least one patch is required. Exact
2232 patch identifiers must be given. With -k/--keep, the patch files are
2232 patch identifiers must be given. With -k/--keep, the patch files are
2233 preserved in the patch directory.
2233 preserved in the patch directory.
2234
2234
2235 To stop managing a patch and move it into permanent history,
2235 To stop managing a patch and move it into permanent history,
2236 use the :hg:`qfinish` command."""
2236 use the :hg:`qfinish` command."""
2237 q = repo.mq
2237 q = repo.mq
2238 q.delete(repo, patches, opts)
2238 q.delete(repo, patches, opts)
2239 q.savedirty()
2239 q.savedirty()
2240 return 0
2240 return 0
2241
2241
2242 @command("qapplied",
2242 @command("qapplied",
2243 [('1', 'last', None, _('show only the preceding applied patch'))
2243 [('1', 'last', None, _('show only the preceding applied patch'))
2244 ] + seriesopts,
2244 ] + seriesopts,
2245 _('hg qapplied [-1] [-s] [PATCH]'))
2245 _('hg qapplied [-1] [-s] [PATCH]'))
2246 def applied(ui, repo, patch=None, **opts):
2246 def applied(ui, repo, patch=None, **opts):
2247 """print the patches already applied
2247 """print the patches already applied
2248
2248
2249 Returns 0 on success."""
2249 Returns 0 on success."""
2250
2250
2251 q = repo.mq
2251 q = repo.mq
2252
2252
2253 if patch:
2253 if patch:
2254 if patch not in q.series:
2254 if patch not in q.series:
2255 raise error.Abort(_("patch %s is not in series file") % patch)
2255 raise error.Abort(_("patch %s is not in series file") % patch)
2256 end = q.series.index(patch) + 1
2256 end = q.series.index(patch) + 1
2257 else:
2257 else:
2258 end = q.seriesend(True)
2258 end = q.seriesend(True)
2259
2259
2260 if opts.get('last') and not end:
2260 if opts.get('last') and not end:
2261 ui.write(_("no patches applied\n"))
2261 ui.write(_("no patches applied\n"))
2262 return 1
2262 return 1
2263 elif opts.get('last') and end == 1:
2263 elif opts.get('last') and end == 1:
2264 ui.write(_("only one patch applied\n"))
2264 ui.write(_("only one patch applied\n"))
2265 return 1
2265 return 1
2266 elif opts.get('last'):
2266 elif opts.get('last'):
2267 start = end - 2
2267 start = end - 2
2268 end = 1
2268 end = 1
2269 else:
2269 else:
2270 start = 0
2270 start = 0
2271
2271
2272 q.qseries(repo, length=end, start=start, status='A',
2272 q.qseries(repo, length=end, start=start, status='A',
2273 summary=opts.get('summary'))
2273 summary=opts.get('summary'))
2274
2274
2275
2275
2276 @command("qunapplied",
2276 @command("qunapplied",
2277 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2277 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2278 _('hg qunapplied [-1] [-s] [PATCH]'))
2278 _('hg qunapplied [-1] [-s] [PATCH]'))
2279 def unapplied(ui, repo, patch=None, **opts):
2279 def unapplied(ui, repo, patch=None, **opts):
2280 """print the patches not yet applied
2280 """print the patches not yet applied
2281
2281
2282 Returns 0 on success."""
2282 Returns 0 on success."""
2283
2283
2284 q = repo.mq
2284 q = repo.mq
2285 if patch:
2285 if patch:
2286 if patch not in q.series:
2286 if patch not in q.series:
2287 raise error.Abort(_("patch %s is not in series file") % patch)
2287 raise error.Abort(_("patch %s is not in series file") % patch)
2288 start = q.series.index(patch) + 1
2288 start = q.series.index(patch) + 1
2289 else:
2289 else:
2290 start = q.seriesend(True)
2290 start = q.seriesend(True)
2291
2291
2292 if start == len(q.series) and opts.get('first'):
2292 if start == len(q.series) and opts.get('first'):
2293 ui.write(_("all patches applied\n"))
2293 ui.write(_("all patches applied\n"))
2294 return 1
2294 return 1
2295
2295
2296 if opts.get('first'):
2296 if opts.get('first'):
2297 length = 1
2297 length = 1
2298 else:
2298 else:
2299 length = None
2299 length = None
2300 q.qseries(repo, start=start, length=length, status='U',
2300 q.qseries(repo, start=start, length=length, status='U',
2301 summary=opts.get('summary'))
2301 summary=opts.get('summary'))
2302
2302
2303 @command("qimport",
2303 @command("qimport",
2304 [('e', 'existing', None, _('import file in patch directory')),
2304 [('e', 'existing', None, _('import file in patch directory')),
2305 ('n', 'name', '',
2305 ('n', 'name', '',
2306 _('name of patch file'), _('NAME')),
2306 _('name of patch file'), _('NAME')),
2307 ('f', 'force', None, _('overwrite existing files')),
2307 ('f', 'force', None, _('overwrite existing files')),
2308 ('r', 'rev', [],
2308 ('r', 'rev', [],
2309 _('place existing revisions under mq control'), _('REV')),
2309 _('place existing revisions under mq control'), _('REV')),
2310 ('g', 'git', None, _('use git extended diff format')),
2310 ('g', 'git', None, _('use git extended diff format')),
2311 ('P', 'push', None, _('qpush after importing'))],
2311 ('P', 'push', None, _('qpush after importing'))],
2312 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2312 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2313 def qimport(ui, repo, *filename, **opts):
2313 def qimport(ui, repo, *filename, **opts):
2314 """import a patch or existing changeset
2314 """import a patch or existing changeset
2315
2315
2316 The patch is inserted into the series after the last applied
2316 The patch is inserted into the series after the last applied
2317 patch. If no patches have been applied, qimport prepends the patch
2317 patch. If no patches have been applied, qimport prepends the patch
2318 to the series.
2318 to the series.
2319
2319
2320 The patch will have the same name as its source file unless you
2320 The patch will have the same name as its source file unless you
2321 give it a new one with -n/--name.
2321 give it a new one with -n/--name.
2322
2322
2323 You can register an existing patch inside the patch directory with
2323 You can register an existing patch inside the patch directory with
2324 the -e/--existing flag.
2324 the -e/--existing flag.
2325
2325
2326 With -f/--force, an existing patch of the same name will be
2326 With -f/--force, an existing patch of the same name will be
2327 overwritten.
2327 overwritten.
2328
2328
2329 An existing changeset may be placed under mq control with -r/--rev
2329 An existing changeset may be placed under mq control with -r/--rev
2330 (e.g. qimport --rev . -n patch will place the current revision
2330 (e.g. qimport --rev . -n patch will place the current revision
2331 under mq control). With -g/--git, patches imported with --rev will
2331 under mq control). With -g/--git, patches imported with --rev will
2332 use the git diff format. See the diffs help topic for information
2332 use the git diff format. See the diffs help topic for information
2333 on why this is important for preserving rename/copy information
2333 on why this is important for preserving rename/copy information
2334 and permission changes. Use :hg:`qfinish` to remove changesets
2334 and permission changes. Use :hg:`qfinish` to remove changesets
2335 from mq control.
2335 from mq control.
2336
2336
2337 To import a patch from standard input, pass - as the patch file.
2337 To import a patch from standard input, pass - as the patch file.
2338 When importing from standard input, a patch name must be specified
2338 When importing from standard input, a patch name must be specified
2339 using the --name flag.
2339 using the --name flag.
2340
2340
2341 To import an existing patch while renaming it::
2341 To import an existing patch while renaming it::
2342
2342
2343 hg qimport -e existing-patch -n new-name
2343 hg qimport -e existing-patch -n new-name
2344
2344
2345 Returns 0 if import succeeded.
2345 Returns 0 if import succeeded.
2346 """
2346 """
2347 with repo.lock(): # cause this may move phase
2347 with repo.lock(): # cause this may move phase
2348 q = repo.mq
2348 q = repo.mq
2349 try:
2349 try:
2350 imported = q.qimport(
2350 imported = q.qimport(
2351 repo, filename, patchname=opts.get('name'),
2351 repo, filename, patchname=opts.get('name'),
2352 existing=opts.get('existing'), force=opts.get('force'),
2352 existing=opts.get('existing'), force=opts.get('force'),
2353 rev=opts.get('rev'), git=opts.get('git'))
2353 rev=opts.get('rev'), git=opts.get('git'))
2354 finally:
2354 finally:
2355 q.savedirty()
2355 q.savedirty()
2356
2356
2357 if imported and opts.get('push') and not opts.get('rev'):
2357 if imported and opts.get('push') and not opts.get('rev'):
2358 return q.push(repo, imported[-1])
2358 return q.push(repo, imported[-1])
2359 return 0
2359 return 0
2360
2360
2361 def qinit(ui, repo, create):
2361 def qinit(ui, repo, create):
2362 """initialize a new queue repository
2362 """initialize a new queue repository
2363
2363
2364 This command also creates a series file for ordering patches, and
2364 This command also creates a series file for ordering patches, and
2365 an mq-specific .hgignore file in the queue repository, to exclude
2365 an mq-specific .hgignore file in the queue repository, to exclude
2366 the status and guards files (these contain mostly transient state).
2366 the status and guards files (these contain mostly transient state).
2367
2367
2368 Returns 0 if initialization succeeded."""
2368 Returns 0 if initialization succeeded."""
2369 q = repo.mq
2369 q = repo.mq
2370 r = q.init(repo, create)
2370 r = q.init(repo, create)
2371 q.savedirty()
2371 q.savedirty()
2372 if r:
2372 if r:
2373 if not os.path.exists(r.wjoin('.hgignore')):
2373 if not os.path.exists(r.wjoin('.hgignore')):
2374 fp = r.wvfs('.hgignore', 'w')
2374 fp = r.wvfs('.hgignore', 'w')
2375 fp.write('^\\.hg\n')
2375 fp.write('^\\.hg\n')
2376 fp.write('^\\.mq\n')
2376 fp.write('^\\.mq\n')
2377 fp.write('syntax: glob\n')
2377 fp.write('syntax: glob\n')
2378 fp.write('status\n')
2378 fp.write('status\n')
2379 fp.write('guards\n')
2379 fp.write('guards\n')
2380 fp.close()
2380 fp.close()
2381 if not os.path.exists(r.wjoin('series')):
2381 if not os.path.exists(r.wjoin('series')):
2382 r.wvfs('series', 'w').close()
2382 r.wvfs('series', 'w').close()
2383 r[None].add(['.hgignore', 'series'])
2383 r[None].add(['.hgignore', 'series'])
2384 commands.add(ui, r)
2384 commands.add(ui, r)
2385 return 0
2385 return 0
2386
2386
2387 @command("^qinit",
2387 @command("^qinit",
2388 [('c', 'create-repo', None, _('create queue repository'))],
2388 [('c', 'create-repo', None, _('create queue repository'))],
2389 _('hg qinit [-c]'))
2389 _('hg qinit [-c]'))
2390 def init(ui, repo, **opts):
2390 def init(ui, repo, **opts):
2391 """init a new queue repository (DEPRECATED)
2391 """init a new queue repository (DEPRECATED)
2392
2392
2393 The queue repository is unversioned by default. If
2393 The queue repository is unversioned by default. If
2394 -c/--create-repo is specified, qinit will create a separate nested
2394 -c/--create-repo is specified, qinit will create a separate nested
2395 repository for patches (qinit -c may also be run later to convert
2395 repository for patches (qinit -c may also be run later to convert
2396 an unversioned patch repository into a versioned one). You can use
2396 an unversioned patch repository into a versioned one). You can use
2397 qcommit to commit changes to this queue repository.
2397 qcommit to commit changes to this queue repository.
2398
2398
2399 This command is deprecated. Without -c, it's implied by other relevant
2399 This command is deprecated. Without -c, it's implied by other relevant
2400 commands. With -c, use :hg:`init --mq` instead."""
2400 commands. With -c, use :hg:`init --mq` instead."""
2401 return qinit(ui, repo, create=opts.get('create_repo'))
2401 return qinit(ui, repo, create=opts.get('create_repo'))
2402
2402
2403 @command("qclone",
2403 @command("qclone",
2404 [('', 'pull', None, _('use pull protocol to copy metadata')),
2404 [('', 'pull', None, _('use pull protocol to copy metadata')),
2405 ('U', 'noupdate', None,
2405 ('U', 'noupdate', None,
2406 _('do not update the new working directories')),
2406 _('do not update the new working directories')),
2407 ('', 'uncompressed', None,
2407 ('', 'uncompressed', None,
2408 _('use uncompressed transfer (fast over LAN)')),
2408 _('use uncompressed transfer (fast over LAN)')),
2409 ('p', 'patches', '',
2409 ('p', 'patches', '',
2410 _('location of source patch repository'), _('REPO')),
2410 _('location of source patch repository'), _('REPO')),
2411 ] + commands.remoteopts,
2411 ] + commands.remoteopts,
2412 _('hg qclone [OPTION]... SOURCE [DEST]'),
2412 _('hg qclone [OPTION]... SOURCE [DEST]'),
2413 norepo=True)
2413 norepo=True)
2414 def clone(ui, source, dest=None, **opts):
2414 def clone(ui, source, dest=None, **opts):
2415 '''clone main and patch repository at same time
2415 '''clone main and patch repository at same time
2416
2416
2417 If source is local, destination will have no patches applied. If
2417 If source is local, destination will have no patches applied. If
2418 source is remote, this command can not check if patches are
2418 source is remote, this command can not check if patches are
2419 applied in source, so cannot guarantee that patches are not
2419 applied in source, so cannot guarantee that patches are not
2420 applied in destination. If you clone remote repository, be sure
2420 applied in destination. If you clone remote repository, be sure
2421 before that it has no patches applied.
2421 before that it has no patches applied.
2422
2422
2423 Source patch repository is looked for in <src>/.hg/patches by
2423 Source patch repository is looked for in <src>/.hg/patches by
2424 default. Use -p <url> to change.
2424 default. Use -p <url> to change.
2425
2425
2426 The patch directory must be a nested Mercurial repository, as
2426 The patch directory must be a nested Mercurial repository, as
2427 would be created by :hg:`init --mq`.
2427 would be created by :hg:`init --mq`.
2428
2428
2429 Return 0 on success.
2429 Return 0 on success.
2430 '''
2430 '''
2431 def patchdir(repo):
2431 def patchdir(repo):
2432 """compute a patch repo url from a repo object"""
2432 """compute a patch repo url from a repo object"""
2433 url = repo.url()
2433 url = repo.url()
2434 if url.endswith('/'):
2434 if url.endswith('/'):
2435 url = url[:-1]
2435 url = url[:-1]
2436 return url + '/.hg/patches'
2436 return url + '/.hg/patches'
2437
2437
2438 # main repo (destination and sources)
2438 # main repo (destination and sources)
2439 if dest is None:
2439 if dest is None:
2440 dest = hg.defaultdest(source)
2440 dest = hg.defaultdest(source)
2441 sr = hg.peer(ui, opts, ui.expandpath(source))
2441 sr = hg.peer(ui, opts, ui.expandpath(source))
2442
2442
2443 # patches repo (source only)
2443 # patches repo (source only)
2444 if opts.get('patches'):
2444 if opts.get('patches'):
2445 patchespath = ui.expandpath(opts.get('patches'))
2445 patchespath = ui.expandpath(opts.get('patches'))
2446 else:
2446 else:
2447 patchespath = patchdir(sr)
2447 patchespath = patchdir(sr)
2448 try:
2448 try:
2449 hg.peer(ui, opts, patchespath)
2449 hg.peer(ui, opts, patchespath)
2450 except error.RepoError:
2450 except error.RepoError:
2451 raise error.Abort(_('versioned patch repository not found'
2451 raise error.Abort(_('versioned patch repository not found'
2452 ' (see init --mq)'))
2452 ' (see init --mq)'))
2453 qbase, destrev = None, None
2453 qbase, destrev = None, None
2454 if sr.local():
2454 if sr.local():
2455 repo = sr.local()
2455 repo = sr.local()
2456 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2456 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2457 qbase = repo.mq.applied[0].node
2457 qbase = repo.mq.applied[0].node
2458 if not hg.islocal(dest):
2458 if not hg.islocal(dest):
2459 heads = set(repo.heads())
2459 heads = set(repo.heads())
2460 destrev = list(heads.difference(repo.heads(qbase)))
2460 destrev = list(heads.difference(repo.heads(qbase)))
2461 destrev.append(repo.changelog.parents(qbase)[0])
2461 destrev.append(repo.changelog.parents(qbase)[0])
2462 elif sr.capable('lookup'):
2462 elif sr.capable('lookup'):
2463 try:
2463 try:
2464 qbase = sr.lookup('qbase')
2464 qbase = sr.lookup('qbase')
2465 except error.RepoError:
2465 except error.RepoError:
2466 pass
2466 pass
2467
2467
2468 ui.note(_('cloning main repository\n'))
2468 ui.note(_('cloning main repository\n'))
2469 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2469 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2470 pull=opts.get('pull'),
2470 pull=opts.get('pull'),
2471 rev=destrev,
2471 rev=destrev,
2472 update=False,
2472 update=False,
2473 stream=opts.get('uncompressed'))
2473 stream=opts.get('uncompressed'))
2474
2474
2475 ui.note(_('cloning patch repository\n'))
2475 ui.note(_('cloning patch repository\n'))
2476 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2476 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2477 pull=opts.get('pull'), update=not opts.get('noupdate'),
2477 pull=opts.get('pull'), update=not opts.get('noupdate'),
2478 stream=opts.get('uncompressed'))
2478 stream=opts.get('uncompressed'))
2479
2479
2480 if dr.local():
2480 if dr.local():
2481 repo = dr.local()
2481 repo = dr.local()
2482 if qbase:
2482 if qbase:
2483 ui.note(_('stripping applied patches from destination '
2483 ui.note(_('stripping applied patches from destination '
2484 'repository\n'))
2484 'repository\n'))
2485 strip(ui, repo, [qbase], update=False, backup=None)
2485 strip(ui, repo, [qbase], update=False, backup=None)
2486 if not opts.get('noupdate'):
2486 if not opts.get('noupdate'):
2487 ui.note(_('updating destination repository\n'))
2487 ui.note(_('updating destination repository\n'))
2488 hg.update(repo, repo.changelog.tip())
2488 hg.update(repo, repo.changelog.tip())
2489
2489
2490 @command("qcommit|qci",
2490 @command("qcommit|qci",
2491 commands.table["^commit|ci"][1],
2491 commands.table["^commit|ci"][1],
2492 _('hg qcommit [OPTION]... [FILE]...'),
2492 _('hg qcommit [OPTION]... [FILE]...'),
2493 inferrepo=True)
2493 inferrepo=True)
2494 def commit(ui, repo, *pats, **opts):
2494 def commit(ui, repo, *pats, **opts):
2495 """commit changes in the queue repository (DEPRECATED)
2495 """commit changes in the queue repository (DEPRECATED)
2496
2496
2497 This command is deprecated; use :hg:`commit --mq` instead."""
2497 This command is deprecated; use :hg:`commit --mq` instead."""
2498 q = repo.mq
2498 q = repo.mq
2499 r = q.qrepo()
2499 r = q.qrepo()
2500 if not r:
2500 if not r:
2501 raise error.Abort('no queue repository')
2501 raise error.Abort('no queue repository')
2502 commands.commit(r.ui, r, *pats, **opts)
2502 commands.commit(r.ui, r, *pats, **opts)
2503
2503
2504 @command("qseries",
2504 @command("qseries",
2505 [('m', 'missing', None, _('print patches not in series')),
2505 [('m', 'missing', None, _('print patches not in series')),
2506 ] + seriesopts,
2506 ] + seriesopts,
2507 _('hg qseries [-ms]'))
2507 _('hg qseries [-ms]'))
2508 def series(ui, repo, **opts):
2508 def series(ui, repo, **opts):
2509 """print the entire series file
2509 """print the entire series file
2510
2510
2511 Returns 0 on success."""
2511 Returns 0 on success."""
2512 repo.mq.qseries(repo, missing=opts.get('missing'),
2512 repo.mq.qseries(repo, missing=opts.get('missing'),
2513 summary=opts.get('summary'))
2513 summary=opts.get('summary'))
2514 return 0
2514 return 0
2515
2515
2516 @command("qtop", seriesopts, _('hg qtop [-s]'))
2516 @command("qtop", seriesopts, _('hg qtop [-s]'))
2517 def top(ui, repo, **opts):
2517 def top(ui, repo, **opts):
2518 """print the name of the current patch
2518 """print the name of the current patch
2519
2519
2520 Returns 0 on success."""
2520 Returns 0 on success."""
2521 q = repo.mq
2521 q = repo.mq
2522 if q.applied:
2522 if q.applied:
2523 t = q.seriesend(True)
2523 t = q.seriesend(True)
2524 else:
2524 else:
2525 t = 0
2525 t = 0
2526
2526
2527 if t:
2527 if t:
2528 q.qseries(repo, start=t - 1, length=1, status='A',
2528 q.qseries(repo, start=t - 1, length=1, status='A',
2529 summary=opts.get('summary'))
2529 summary=opts.get('summary'))
2530 else:
2530 else:
2531 ui.write(_("no patches applied\n"))
2531 ui.write(_("no patches applied\n"))
2532 return 1
2532 return 1
2533
2533
2534 @command("qnext", seriesopts, _('hg qnext [-s]'))
2534 @command("qnext", seriesopts, _('hg qnext [-s]'))
2535 def next(ui, repo, **opts):
2535 def next(ui, repo, **opts):
2536 """print the name of the next pushable patch
2536 """print the name of the next pushable patch
2537
2537
2538 Returns 0 on success."""
2538 Returns 0 on success."""
2539 q = repo.mq
2539 q = repo.mq
2540 end = q.seriesend()
2540 end = q.seriesend()
2541 if end == len(q.series):
2541 if end == len(q.series):
2542 ui.write(_("all patches applied\n"))
2542 ui.write(_("all patches applied\n"))
2543 return 1
2543 return 1
2544 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2544 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2545
2545
2546 @command("qprev", seriesopts, _('hg qprev [-s]'))
2546 @command("qprev", seriesopts, _('hg qprev [-s]'))
2547 def prev(ui, repo, **opts):
2547 def prev(ui, repo, **opts):
2548 """print the name of the preceding applied patch
2548 """print the name of the preceding applied patch
2549
2549
2550 Returns 0 on success."""
2550 Returns 0 on success."""
2551 q = repo.mq
2551 q = repo.mq
2552 l = len(q.applied)
2552 l = len(q.applied)
2553 if l == 1:
2553 if l == 1:
2554 ui.write(_("only one patch applied\n"))
2554 ui.write(_("only one patch applied\n"))
2555 return 1
2555 return 1
2556 if not l:
2556 if not l:
2557 ui.write(_("no patches applied\n"))
2557 ui.write(_("no patches applied\n"))
2558 return 1
2558 return 1
2559 idx = q.series.index(q.applied[-2].name)
2559 idx = q.series.index(q.applied[-2].name)
2560 q.qseries(repo, start=idx, length=1, status='A',
2560 q.qseries(repo, start=idx, length=1, status='A',
2561 summary=opts.get('summary'))
2561 summary=opts.get('summary'))
2562
2562
2563 def setupheaderopts(ui, opts):
2563 def setupheaderopts(ui, opts):
2564 if not opts.get('user') and opts.get('currentuser'):
2564 if not opts.get('user') and opts.get('currentuser'):
2565 opts['user'] = ui.username()
2565 opts['user'] = ui.username()
2566 if not opts.get('date') and opts.get('currentdate'):
2566 if not opts.get('date') and opts.get('currentdate'):
2567 opts['date'] = "%d %d" % util.makedate()
2567 opts['date'] = "%d %d" % util.makedate()
2568
2568
2569 @command("^qnew",
2569 @command("^qnew",
2570 [('e', 'edit', None, _('invoke editor on commit messages')),
2570 [('e', 'edit', None, _('invoke editor on commit messages')),
2571 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2571 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2572 ('g', 'git', None, _('use git extended diff format')),
2572 ('g', 'git', None, _('use git extended diff format')),
2573 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2573 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2574 ('u', 'user', '',
2574 ('u', 'user', '',
2575 _('add "From: <USER>" to patch'), _('USER')),
2575 _('add "From: <USER>" to patch'), _('USER')),
2576 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2576 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2577 ('d', 'date', '',
2577 ('d', 'date', '',
2578 _('add "Date: <DATE>" to patch'), _('DATE'))
2578 _('add "Date: <DATE>" to patch'), _('DATE'))
2579 ] + commands.walkopts + commands.commitopts,
2579 ] + commands.walkopts + commands.commitopts,
2580 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2580 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2581 inferrepo=True)
2581 inferrepo=True)
2582 def new(ui, repo, patch, *args, **opts):
2582 def new(ui, repo, patch, *args, **opts):
2583 """create a new patch
2583 """create a new patch
2584
2584
2585 qnew creates a new patch on top of the currently-applied patch (if
2585 qnew creates a new patch on top of the currently-applied patch (if
2586 any). The patch will be initialized with any outstanding changes
2586 any). The patch will be initialized with any outstanding changes
2587 in the working directory. You may also use -I/--include,
2587 in the working directory. You may also use -I/--include,
2588 -X/--exclude, and/or a list of files after the patch name to add
2588 -X/--exclude, and/or a list of files after the patch name to add
2589 only changes to matching files to the new patch, leaving the rest
2589 only changes to matching files to the new patch, leaving the rest
2590 as uncommitted modifications.
2590 as uncommitted modifications.
2591
2591
2592 -u/--user and -d/--date can be used to set the (given) user and
2592 -u/--user and -d/--date can be used to set the (given) user and
2593 date, respectively. -U/--currentuser and -D/--currentdate set user
2593 date, respectively. -U/--currentuser and -D/--currentdate set user
2594 to current user and date to current date.
2594 to current user and date to current date.
2595
2595
2596 -e/--edit, -m/--message or -l/--logfile set the patch header as
2596 -e/--edit, -m/--message or -l/--logfile set the patch header as
2597 well as the commit message. If none is specified, the header is
2597 well as the commit message. If none is specified, the header is
2598 empty and the commit message is '[mq]: PATCH'.
2598 empty and the commit message is '[mq]: PATCH'.
2599
2599
2600 Use the -g/--git option to keep the patch in the git extended diff
2600 Use the -g/--git option to keep the patch in the git extended diff
2601 format. Read the diffs help topic for more information on why this
2601 format. Read the diffs help topic for more information on why this
2602 is important for preserving permission changes and copy/rename
2602 is important for preserving permission changes and copy/rename
2603 information.
2603 information.
2604
2604
2605 Returns 0 on successful creation of a new patch.
2605 Returns 0 on successful creation of a new patch.
2606 """
2606 """
2607 msg = cmdutil.logmessage(ui, opts)
2607 msg = cmdutil.logmessage(ui, opts)
2608 q = repo.mq
2608 q = repo.mq
2609 opts['msg'] = msg
2609 opts['msg'] = msg
2610 setupheaderopts(ui, opts)
2610 setupheaderopts(ui, opts)
2611 q.new(repo, patch, *args, **opts)
2611 q.new(repo, patch, *args, **opts)
2612 q.savedirty()
2612 q.savedirty()
2613 return 0
2613 return 0
2614
2614
2615 @command("^qrefresh",
2615 @command("^qrefresh",
2616 [('e', 'edit', None, _('invoke editor on commit messages')),
2616 [('e', 'edit', None, _('invoke editor on commit messages')),
2617 ('g', 'git', None, _('use git extended diff format')),
2617 ('g', 'git', None, _('use git extended diff format')),
2618 ('s', 'short', None,
2618 ('s', 'short', None,
2619 _('refresh only files already in the patch and specified files')),
2619 _('refresh only files already in the patch and specified files')),
2620 ('U', 'currentuser', None,
2620 ('U', 'currentuser', None,
2621 _('add/update author field in patch with current user')),
2621 _('add/update author field in patch with current user')),
2622 ('u', 'user', '',
2622 ('u', 'user', '',
2623 _('add/update author field in patch with given user'), _('USER')),
2623 _('add/update author field in patch with given user'), _('USER')),
2624 ('D', 'currentdate', None,
2624 ('D', 'currentdate', None,
2625 _('add/update date field in patch with current date')),
2625 _('add/update date field in patch with current date')),
2626 ('d', 'date', '',
2626 ('d', 'date', '',
2627 _('add/update date field in patch with given date'), _('DATE'))
2627 _('add/update date field in patch with given date'), _('DATE'))
2628 ] + commands.walkopts + commands.commitopts,
2628 ] + commands.walkopts + commands.commitopts,
2629 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2629 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2630 inferrepo=True)
2630 inferrepo=True)
2631 def refresh(ui, repo, *pats, **opts):
2631 def refresh(ui, repo, *pats, **opts):
2632 """update the current patch
2632 """update the current patch
2633
2633
2634 If any file patterns are provided, the refreshed patch will
2634 If any file patterns are provided, the refreshed patch will
2635 contain only the modifications that match those patterns; the
2635 contain only the modifications that match those patterns; the
2636 remaining modifications will remain in the working directory.
2636 remaining modifications will remain in the working directory.
2637
2637
2638 If -s/--short is specified, files currently included in the patch
2638 If -s/--short is specified, files currently included in the patch
2639 will be refreshed just like matched files and remain in the patch.
2639 will be refreshed just like matched files and remain in the patch.
2640
2640
2641 If -e/--edit is specified, Mercurial will start your configured editor for
2641 If -e/--edit is specified, Mercurial will start your configured editor for
2642 you to enter a message. In case qrefresh fails, you will find a backup of
2642 you to enter a message. In case qrefresh fails, you will find a backup of
2643 your message in ``.hg/last-message.txt``.
2643 your message in ``.hg/last-message.txt``.
2644
2644
2645 hg add/remove/copy/rename work as usual, though you might want to
2645 hg add/remove/copy/rename work as usual, though you might want to
2646 use git-style patches (-g/--git or [diff] git=1) to track copies
2646 use git-style patches (-g/--git or [diff] git=1) to track copies
2647 and renames. See the diffs help topic for more information on the
2647 and renames. See the diffs help topic for more information on the
2648 git diff format.
2648 git diff format.
2649
2649
2650 Returns 0 on success.
2650 Returns 0 on success.
2651 """
2651 """
2652 q = repo.mq
2652 q = repo.mq
2653 message = cmdutil.logmessage(ui, opts)
2653 message = cmdutil.logmessage(ui, opts)
2654 setupheaderopts(ui, opts)
2654 setupheaderopts(ui, opts)
2655 with repo.wlock():
2655 with repo.wlock():
2656 ret = q.refresh(repo, pats, msg=message, **opts)
2656 ret = q.refresh(repo, pats, msg=message, **opts)
2657 q.savedirty()
2657 q.savedirty()
2658 return ret
2658 return ret
2659
2659
2660 @command("^qdiff",
2660 @command("^qdiff",
2661 commands.diffopts + commands.diffopts2 + commands.walkopts,
2661 commands.diffopts + commands.diffopts2 + commands.walkopts,
2662 _('hg qdiff [OPTION]... [FILE]...'),
2662 _('hg qdiff [OPTION]... [FILE]...'),
2663 inferrepo=True)
2663 inferrepo=True)
2664 def diff(ui, repo, *pats, **opts):
2664 def diff(ui, repo, *pats, **opts):
2665 """diff of the current patch and subsequent modifications
2665 """diff of the current patch and subsequent modifications
2666
2666
2667 Shows a diff which includes the current patch as well as any
2667 Shows a diff which includes the current patch as well as any
2668 changes which have been made in the working directory since the
2668 changes which have been made in the working directory since the
2669 last refresh (thus showing what the current patch would become
2669 last refresh (thus showing what the current patch would become
2670 after a qrefresh).
2670 after a qrefresh).
2671
2671
2672 Use :hg:`diff` if you only want to see the changes made since the
2672 Use :hg:`diff` if you only want to see the changes made since the
2673 last qrefresh, or :hg:`export qtip` if you want to see changes
2673 last qrefresh, or :hg:`export qtip` if you want to see changes
2674 made by the current patch without including changes made since the
2674 made by the current patch without including changes made since the
2675 qrefresh.
2675 qrefresh.
2676
2676
2677 Returns 0 on success.
2677 Returns 0 on success.
2678 """
2678 """
2679 ui.pager('qdiff')
2679 ui.pager('qdiff')
2680 repo.mq.diff(repo, pats, opts)
2680 repo.mq.diff(repo, pats, opts)
2681 return 0
2681 return 0
2682
2682
2683 @command('qfold',
2683 @command('qfold',
2684 [('e', 'edit', None, _('invoke editor on commit messages')),
2684 [('e', 'edit', None, _('invoke editor on commit messages')),
2685 ('k', 'keep', None, _('keep folded patch files')),
2685 ('k', 'keep', None, _('keep folded patch files')),
2686 ] + commands.commitopts,
2686 ] + commands.commitopts,
2687 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2687 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2688 def fold(ui, repo, *files, **opts):
2688 def fold(ui, repo, *files, **opts):
2689 """fold the named patches into the current patch
2689 """fold the named patches into the current patch
2690
2690
2691 Patches must not yet be applied. Each patch will be successively
2691 Patches must not yet be applied. Each patch will be successively
2692 applied to the current patch in the order given. If all the
2692 applied to the current patch in the order given. If all the
2693 patches apply successfully, the current patch will be refreshed
2693 patches apply successfully, the current patch will be refreshed
2694 with the new cumulative patch, and the folded patches will be
2694 with the new cumulative patch, and the folded patches will be
2695 deleted. With -k/--keep, the folded patch files will not be
2695 deleted. With -k/--keep, the folded patch files will not be
2696 removed afterwards.
2696 removed afterwards.
2697
2697
2698 The header for each folded patch will be concatenated with the
2698 The header for each folded patch will be concatenated with the
2699 current patch header, separated by a line of ``* * *``.
2699 current patch header, separated by a line of ``* * *``.
2700
2700
2701 Returns 0 on success."""
2701 Returns 0 on success."""
2702 q = repo.mq
2702 q = repo.mq
2703 if not files:
2703 if not files:
2704 raise error.Abort(_('qfold requires at least one patch name'))
2704 raise error.Abort(_('qfold requires at least one patch name'))
2705 if not q.checktoppatch(repo)[0]:
2705 if not q.checktoppatch(repo)[0]:
2706 raise error.Abort(_('no patches applied'))
2706 raise error.Abort(_('no patches applied'))
2707 q.checklocalchanges(repo)
2707 q.checklocalchanges(repo)
2708
2708
2709 message = cmdutil.logmessage(ui, opts)
2709 message = cmdutil.logmessage(ui, opts)
2710
2710
2711 parent = q.lookup('qtip')
2711 parent = q.lookup('qtip')
2712 patches = []
2712 patches = []
2713 messages = []
2713 messages = []
2714 for f in files:
2714 for f in files:
2715 p = q.lookup(f)
2715 p = q.lookup(f)
2716 if p in patches or p == parent:
2716 if p in patches or p == parent:
2717 ui.warn(_('skipping already folded patch %s\n') % p)
2717 ui.warn(_('skipping already folded patch %s\n') % p)
2718 if q.isapplied(p):
2718 if q.isapplied(p):
2719 raise error.Abort(_('qfold cannot fold already applied patch %s')
2719 raise error.Abort(_('qfold cannot fold already applied patch %s')
2720 % p)
2720 % p)
2721 patches.append(p)
2721 patches.append(p)
2722
2722
2723 for p in patches:
2723 for p in patches:
2724 if not message:
2724 if not message:
2725 ph = patchheader(q.join(p), q.plainmode)
2725 ph = patchheader(q.join(p), q.plainmode)
2726 if ph.message:
2726 if ph.message:
2727 messages.append(ph.message)
2727 messages.append(ph.message)
2728 pf = q.join(p)
2728 pf = q.join(p)
2729 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2729 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2730 if not patchsuccess:
2730 if not patchsuccess:
2731 raise error.Abort(_('error folding patch %s') % p)
2731 raise error.Abort(_('error folding patch %s') % p)
2732
2732
2733 if not message:
2733 if not message:
2734 ph = patchheader(q.join(parent), q.plainmode)
2734 ph = patchheader(q.join(parent), q.plainmode)
2735 message = ph.message
2735 message = ph.message
2736 for msg in messages:
2736 for msg in messages:
2737 if msg:
2737 if msg:
2738 if message:
2738 if message:
2739 message.append('* * *')
2739 message.append('* * *')
2740 message.extend(msg)
2740 message.extend(msg)
2741 message = '\n'.join(message)
2741 message = '\n'.join(message)
2742
2742
2743 diffopts = q.patchopts(q.diffopts(), *patches)
2743 diffopts = q.patchopts(q.diffopts(), *patches)
2744 with repo.wlock():
2744 with repo.wlock():
2745 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2745 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2746 editform='mq.qfold')
2746 editform='mq.qfold')
2747 q.delete(repo, patches, opts)
2747 q.delete(repo, patches, opts)
2748 q.savedirty()
2748 q.savedirty()
2749
2749
2750 @command("qgoto",
2750 @command("qgoto",
2751 [('', 'keep-changes', None,
2751 [('', 'keep-changes', None,
2752 _('tolerate non-conflicting local changes')),
2752 _('tolerate non-conflicting local changes')),
2753 ('f', 'force', None, _('overwrite any local changes')),
2753 ('f', 'force', None, _('overwrite any local changes')),
2754 ('', 'no-backup', None, _('do not save backup copies of files'))],
2754 ('', 'no-backup', None, _('do not save backup copies of files'))],
2755 _('hg qgoto [OPTION]... PATCH'))
2755 _('hg qgoto [OPTION]... PATCH'))
2756 def goto(ui, repo, patch, **opts):
2756 def goto(ui, repo, patch, **opts):
2757 '''push or pop patches until named patch is at top of stack
2757 '''push or pop patches until named patch is at top of stack
2758
2758
2759 Returns 0 on success.'''
2759 Returns 0 on success.'''
2760 opts = fixkeepchangesopts(ui, opts)
2760 opts = fixkeepchangesopts(ui, opts)
2761 q = repo.mq
2761 q = repo.mq
2762 patch = q.lookup(patch)
2762 patch = q.lookup(patch)
2763 nobackup = opts.get('no_backup')
2763 nobackup = opts.get('no_backup')
2764 keepchanges = opts.get('keep_changes')
2764 keepchanges = opts.get('keep_changes')
2765 if q.isapplied(patch):
2765 if q.isapplied(patch):
2766 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2766 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2767 keepchanges=keepchanges)
2767 keepchanges=keepchanges)
2768 else:
2768 else:
2769 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2769 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2770 keepchanges=keepchanges)
2770 keepchanges=keepchanges)
2771 q.savedirty()
2771 q.savedirty()
2772 return ret
2772 return ret
2773
2773
2774 @command("qguard",
2774 @command("qguard",
2775 [('l', 'list', None, _('list all patches and guards')),
2775 [('l', 'list', None, _('list all patches and guards')),
2776 ('n', 'none', None, _('drop all guards'))],
2776 ('n', 'none', None, _('drop all guards'))],
2777 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2777 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2778 def guard(ui, repo, *args, **opts):
2778 def guard(ui, repo, *args, **opts):
2779 '''set or print guards for a patch
2779 '''set or print guards for a patch
2780
2780
2781 Guards control whether a patch can be pushed. A patch with no
2781 Guards control whether a patch can be pushed. A patch with no
2782 guards is always pushed. A patch with a positive guard ("+foo") is
2782 guards is always pushed. A patch with a positive guard ("+foo") is
2783 pushed only if the :hg:`qselect` command has activated it. A patch with
2783 pushed only if the :hg:`qselect` command has activated it. A patch with
2784 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2784 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2785 has activated it.
2785 has activated it.
2786
2786
2787 With no arguments, print the currently active guards.
2787 With no arguments, print the currently active guards.
2788 With arguments, set guards for the named patch.
2788 With arguments, set guards for the named patch.
2789
2789
2790 .. note::
2790 .. note::
2791
2791
2792 Specifying negative guards now requires '--'.
2792 Specifying negative guards now requires '--'.
2793
2793
2794 To set guards on another patch::
2794 To set guards on another patch::
2795
2795
2796 hg qguard other.patch -- +2.6.17 -stable
2796 hg qguard other.patch -- +2.6.17 -stable
2797
2797
2798 Returns 0 on success.
2798 Returns 0 on success.
2799 '''
2799 '''
2800 def status(idx):
2800 def status(idx):
2801 guards = q.seriesguards[idx] or ['unguarded']
2801 guards = q.seriesguards[idx] or ['unguarded']
2802 if q.series[idx] in applied:
2802 if q.series[idx] in applied:
2803 state = 'applied'
2803 state = 'applied'
2804 elif q.pushable(idx)[0]:
2804 elif q.pushable(idx)[0]:
2805 state = 'unapplied'
2805 state = 'unapplied'
2806 else:
2806 else:
2807 state = 'guarded'
2807 state = 'guarded'
2808 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2808 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2809 ui.write('%s: ' % ui.label(q.series[idx], label))
2809 ui.write('%s: ' % ui.label(q.series[idx], label))
2810
2810
2811 for i, guard in enumerate(guards):
2811 for i, guard in enumerate(guards):
2812 if guard.startswith('+'):
2812 if guard.startswith('+'):
2813 ui.write(guard, label='qguard.positive')
2813 ui.write(guard, label='qguard.positive')
2814 elif guard.startswith('-'):
2814 elif guard.startswith('-'):
2815 ui.write(guard, label='qguard.negative')
2815 ui.write(guard, label='qguard.negative')
2816 else:
2816 else:
2817 ui.write(guard, label='qguard.unguarded')
2817 ui.write(guard, label='qguard.unguarded')
2818 if i != len(guards) - 1:
2818 if i != len(guards) - 1:
2819 ui.write(' ')
2819 ui.write(' ')
2820 ui.write('\n')
2820 ui.write('\n')
2821 q = repo.mq
2821 q = repo.mq
2822 applied = set(p.name for p in q.applied)
2822 applied = set(p.name for p in q.applied)
2823 patch = None
2823 patch = None
2824 args = list(args)
2824 args = list(args)
2825 if opts.get('list'):
2825 if opts.get('list'):
2826 if args or opts.get('none'):
2826 if args or opts.get('none'):
2827 raise error.Abort(_('cannot mix -l/--list with options or '
2827 raise error.Abort(_('cannot mix -l/--list with options or '
2828 'arguments'))
2828 'arguments'))
2829 for i in xrange(len(q.series)):
2829 for i in xrange(len(q.series)):
2830 status(i)
2830 status(i)
2831 return
2831 return
2832 if not args or args[0][0:1] in '-+':
2832 if not args or args[0][0:1] in '-+':
2833 if not q.applied:
2833 if not q.applied:
2834 raise error.Abort(_('no patches applied'))
2834 raise error.Abort(_('no patches applied'))
2835 patch = q.applied[-1].name
2835 patch = q.applied[-1].name
2836 if patch is None and args[0][0:1] not in '-+':
2836 if patch is None and args[0][0:1] not in '-+':
2837 patch = args.pop(0)
2837 patch = args.pop(0)
2838 if patch is None:
2838 if patch is None:
2839 raise error.Abort(_('no patch to work with'))
2839 raise error.Abort(_('no patch to work with'))
2840 if args or opts.get('none'):
2840 if args or opts.get('none'):
2841 idx = q.findseries(patch)
2841 idx = q.findseries(patch)
2842 if idx is None:
2842 if idx is None:
2843 raise error.Abort(_('no patch named %s') % patch)
2843 raise error.Abort(_('no patch named %s') % patch)
2844 q.setguards(idx, args)
2844 q.setguards(idx, args)
2845 q.savedirty()
2845 q.savedirty()
2846 else:
2846 else:
2847 status(q.series.index(q.lookup(patch)))
2847 status(q.series.index(q.lookup(patch)))
2848
2848
2849 @command("qheader", [], _('hg qheader [PATCH]'))
2849 @command("qheader", [], _('hg qheader [PATCH]'))
2850 def header(ui, repo, patch=None):
2850 def header(ui, repo, patch=None):
2851 """print the header of the topmost or specified patch
2851 """print the header of the topmost or specified patch
2852
2852
2853 Returns 0 on success."""
2853 Returns 0 on success."""
2854 q = repo.mq
2854 q = repo.mq
2855
2855
2856 if patch:
2856 if patch:
2857 patch = q.lookup(patch)
2857 patch = q.lookup(patch)
2858 else:
2858 else:
2859 if not q.applied:
2859 if not q.applied:
2860 ui.write(_('no patches applied\n'))
2860 ui.write(_('no patches applied\n'))
2861 return 1
2861 return 1
2862 patch = q.lookup('qtip')
2862 patch = q.lookup('qtip')
2863 ph = patchheader(q.join(patch), q.plainmode)
2863 ph = patchheader(q.join(patch), q.plainmode)
2864
2864
2865 ui.write('\n'.join(ph.message) + '\n')
2865 ui.write('\n'.join(ph.message) + '\n')
2866
2866
2867 def lastsavename(path):
2867 def lastsavename(path):
2868 (directory, base) = os.path.split(path)
2868 (directory, base) = os.path.split(path)
2869 names = os.listdir(directory)
2869 names = os.listdir(directory)
2870 namere = re.compile("%s.([0-9]+)" % base)
2870 namere = re.compile("%s.([0-9]+)" % base)
2871 maxindex = None
2871 maxindex = None
2872 maxname = None
2872 maxname = None
2873 for f in names:
2873 for f in names:
2874 m = namere.match(f)
2874 m = namere.match(f)
2875 if m:
2875 if m:
2876 index = int(m.group(1))
2876 index = int(m.group(1))
2877 if maxindex is None or index > maxindex:
2877 if maxindex is None or index > maxindex:
2878 maxindex = index
2878 maxindex = index
2879 maxname = f
2879 maxname = f
2880 if maxname:
2880 if maxname:
2881 return (os.path.join(directory, maxname), maxindex)
2881 return (os.path.join(directory, maxname), maxindex)
2882 return (None, None)
2882 return (None, None)
2883
2883
2884 def savename(path):
2884 def savename(path):
2885 (last, index) = lastsavename(path)
2885 (last, index) = lastsavename(path)
2886 if last is None:
2886 if last is None:
2887 index = 0
2887 index = 0
2888 newpath = path + ".%d" % (index + 1)
2888 newpath = path + ".%d" % (index + 1)
2889 return newpath
2889 return newpath
2890
2890
2891 @command("^qpush",
2891 @command("^qpush",
2892 [('', 'keep-changes', None,
2892 [('', 'keep-changes', None,
2893 _('tolerate non-conflicting local changes')),
2893 _('tolerate non-conflicting local changes')),
2894 ('f', 'force', None, _('apply on top of local changes')),
2894 ('f', 'force', None, _('apply on top of local changes')),
2895 ('e', 'exact', None,
2895 ('e', 'exact', None,
2896 _('apply the target patch to its recorded parent')),
2896 _('apply the target patch to its recorded parent')),
2897 ('l', 'list', None, _('list patch name in commit text')),
2897 ('l', 'list', None, _('list patch name in commit text')),
2898 ('a', 'all', None, _('apply all patches')),
2898 ('a', 'all', None, _('apply all patches')),
2899 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2899 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2900 ('n', 'name', '',
2900 ('n', 'name', '',
2901 _('merge queue name (DEPRECATED)'), _('NAME')),
2901 _('merge queue name (DEPRECATED)'), _('NAME')),
2902 ('', 'move', None,
2902 ('', 'move', None,
2903 _('reorder patch series and apply only the patch')),
2903 _('reorder patch series and apply only the patch')),
2904 ('', 'no-backup', None, _('do not save backup copies of files'))],
2904 ('', 'no-backup', None, _('do not save backup copies of files'))],
2905 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2905 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2906 def push(ui, repo, patch=None, **opts):
2906 def push(ui, repo, patch=None, **opts):
2907 """push the next patch onto the stack
2907 """push the next patch onto the stack
2908
2908
2909 By default, abort if the working directory contains uncommitted
2909 By default, abort if the working directory contains uncommitted
2910 changes. With --keep-changes, abort only if the uncommitted files
2910 changes. With --keep-changes, abort only if the uncommitted files
2911 overlap with patched files. With -f/--force, backup and patch over
2911 overlap with patched files. With -f/--force, backup and patch over
2912 uncommitted changes.
2912 uncommitted changes.
2913
2913
2914 Return 0 on success.
2914 Return 0 on success.
2915 """
2915 """
2916 q = repo.mq
2916 q = repo.mq
2917 mergeq = None
2917 mergeq = None
2918
2918
2919 opts = fixkeepchangesopts(ui, opts)
2919 opts = fixkeepchangesopts(ui, opts)
2920 if opts.get('merge'):
2920 if opts.get('merge'):
2921 if opts.get('name'):
2921 if opts.get('name'):
2922 newpath = repo.join(opts.get('name'))
2922 newpath = repo.join(opts.get('name'))
2923 else:
2923 else:
2924 newpath, i = lastsavename(q.path)
2924 newpath, i = lastsavename(q.path)
2925 if not newpath:
2925 if not newpath:
2926 ui.warn(_("no saved queues found, please use -n\n"))
2926 ui.warn(_("no saved queues found, please use -n\n"))
2927 return 1
2927 return 1
2928 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2928 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2929 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2929 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2930 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2930 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2931 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2931 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2932 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2932 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2933 keepchanges=opts.get('keep_changes'))
2933 keepchanges=opts.get('keep_changes'))
2934 return ret
2934 return ret
2935
2935
2936 @command("^qpop",
2936 @command("^qpop",
2937 [('a', 'all', None, _('pop all patches')),
2937 [('a', 'all', None, _('pop all patches')),
2938 ('n', 'name', '',
2938 ('n', 'name', '',
2939 _('queue name to pop (DEPRECATED)'), _('NAME')),
2939 _('queue name to pop (DEPRECATED)'), _('NAME')),
2940 ('', 'keep-changes', None,
2940 ('', 'keep-changes', None,
2941 _('tolerate non-conflicting local changes')),
2941 _('tolerate non-conflicting local changes')),
2942 ('f', 'force', None, _('forget any local changes to patched files')),
2942 ('f', 'force', None, _('forget any local changes to patched files')),
2943 ('', 'no-backup', None, _('do not save backup copies of files'))],
2943 ('', 'no-backup', None, _('do not save backup copies of files'))],
2944 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2944 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2945 def pop(ui, repo, patch=None, **opts):
2945 def pop(ui, repo, patch=None, **opts):
2946 """pop the current patch off the stack
2946 """pop the current patch off the stack
2947
2947
2948 Without argument, pops off the top of the patch stack. If given a
2948 Without argument, pops off the top of the patch stack. If given a
2949 patch name, keeps popping off patches until the named patch is at
2949 patch name, keeps popping off patches until the named patch is at
2950 the top of the stack.
2950 the top of the stack.
2951
2951
2952 By default, abort if the working directory contains uncommitted
2952 By default, abort if the working directory contains uncommitted
2953 changes. With --keep-changes, abort only if the uncommitted files
2953 changes. With --keep-changes, abort only if the uncommitted files
2954 overlap with patched files. With -f/--force, backup and discard
2954 overlap with patched files. With -f/--force, backup and discard
2955 changes made to such files.
2955 changes made to such files.
2956
2956
2957 Return 0 on success.
2957 Return 0 on success.
2958 """
2958 """
2959 opts = fixkeepchangesopts(ui, opts)
2959 opts = fixkeepchangesopts(ui, opts)
2960 localupdate = True
2960 localupdate = True
2961 if opts.get('name'):
2961 if opts.get('name'):
2962 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2962 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2963 ui.warn(_('using patch queue: %s\n') % q.path)
2963 ui.warn(_('using patch queue: %s\n') % q.path)
2964 localupdate = False
2964 localupdate = False
2965 else:
2965 else:
2966 q = repo.mq
2966 q = repo.mq
2967 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2967 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2968 all=opts.get('all'), nobackup=opts.get('no_backup'),
2968 all=opts.get('all'), nobackup=opts.get('no_backup'),
2969 keepchanges=opts.get('keep_changes'))
2969 keepchanges=opts.get('keep_changes'))
2970 q.savedirty()
2970 q.savedirty()
2971 return ret
2971 return ret
2972
2972
2973 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2973 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2974 def rename(ui, repo, patch, name=None, **opts):
2974 def rename(ui, repo, patch, name=None, **opts):
2975 """rename a patch
2975 """rename a patch
2976
2976
2977 With one argument, renames the current patch to PATCH1.
2977 With one argument, renames the current patch to PATCH1.
2978 With two arguments, renames PATCH1 to PATCH2.
2978 With two arguments, renames PATCH1 to PATCH2.
2979
2979
2980 Returns 0 on success."""
2980 Returns 0 on success."""
2981 q = repo.mq
2981 q = repo.mq
2982 if not name:
2982 if not name:
2983 name = patch
2983 name = patch
2984 patch = None
2984 patch = None
2985
2985
2986 if patch:
2986 if patch:
2987 patch = q.lookup(patch)
2987 patch = q.lookup(patch)
2988 else:
2988 else:
2989 if not q.applied:
2989 if not q.applied:
2990 ui.write(_('no patches applied\n'))
2990 ui.write(_('no patches applied\n'))
2991 return
2991 return
2992 patch = q.lookup('qtip')
2992 patch = q.lookup('qtip')
2993 absdest = q.join(name)
2993 absdest = q.join(name)
2994 if os.path.isdir(absdest):
2994 if os.path.isdir(absdest):
2995 name = normname(os.path.join(name, os.path.basename(patch)))
2995 name = normname(os.path.join(name, os.path.basename(patch)))
2996 absdest = q.join(name)
2996 absdest = q.join(name)
2997 q.checkpatchname(name)
2997 q.checkpatchname(name)
2998
2998
2999 ui.note(_('renaming %s to %s\n') % (patch, name))
2999 ui.note(_('renaming %s to %s\n') % (patch, name))
3000 i = q.findseries(patch)
3000 i = q.findseries(patch)
3001 guards = q.guard_re.findall(q.fullseries[i])
3001 guards = q.guard_re.findall(q.fullseries[i])
3002 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3002 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3003 q.parseseries()
3003 q.parseseries()
3004 q.seriesdirty = True
3004 q.seriesdirty = True
3005
3005
3006 info = q.isapplied(patch)
3006 info = q.isapplied(patch)
3007 if info:
3007 if info:
3008 q.applied[info[0]] = statusentry(info[1], name)
3008 q.applied[info[0]] = statusentry(info[1], name)
3009 q.applieddirty = True
3009 q.applieddirty = True
3010
3010
3011 destdir = os.path.dirname(absdest)
3011 destdir = os.path.dirname(absdest)
3012 if not os.path.isdir(destdir):
3012 if not os.path.isdir(destdir):
3013 os.makedirs(destdir)
3013 os.makedirs(destdir)
3014 util.rename(q.join(patch), absdest)
3014 util.rename(q.join(patch), absdest)
3015 r = q.qrepo()
3015 r = q.qrepo()
3016 if r and patch in r.dirstate:
3016 if r and patch in r.dirstate:
3017 wctx = r[None]
3017 wctx = r[None]
3018 with r.wlock():
3018 with r.wlock():
3019 if r.dirstate[patch] == 'a':
3019 if r.dirstate[patch] == 'a':
3020 r.dirstate.drop(patch)
3020 r.dirstate.drop(patch)
3021 r.dirstate.add(name)
3021 r.dirstate.add(name)
3022 else:
3022 else:
3023 wctx.copy(patch, name)
3023 wctx.copy(patch, name)
3024 wctx.forget([patch])
3024 wctx.forget([patch])
3025
3025
3026 q.savedirty()
3026 q.savedirty()
3027
3027
3028 @command("qrestore",
3028 @command("qrestore",
3029 [('d', 'delete', None, _('delete save entry')),
3029 [('d', 'delete', None, _('delete save entry')),
3030 ('u', 'update', None, _('update queue working directory'))],
3030 ('u', 'update', None, _('update queue working directory'))],
3031 _('hg qrestore [-d] [-u] REV'))
3031 _('hg qrestore [-d] [-u] REV'))
3032 def restore(ui, repo, rev, **opts):
3032 def restore(ui, repo, rev, **opts):
3033 """restore the queue state saved by a revision (DEPRECATED)
3033 """restore the queue state saved by a revision (DEPRECATED)
3034
3034
3035 This command is deprecated, use :hg:`rebase` instead."""
3035 This command is deprecated, use :hg:`rebase` instead."""
3036 rev = repo.lookup(rev)
3036 rev = repo.lookup(rev)
3037 q = repo.mq
3037 q = repo.mq
3038 q.restore(repo, rev, delete=opts.get('delete'),
3038 q.restore(repo, rev, delete=opts.get('delete'),
3039 qupdate=opts.get('update'))
3039 qupdate=opts.get('update'))
3040 q.savedirty()
3040 q.savedirty()
3041 return 0
3041 return 0
3042
3042
3043 @command("qsave",
3043 @command("qsave",
3044 [('c', 'copy', None, _('copy patch directory')),
3044 [('c', 'copy', None, _('copy patch directory')),
3045 ('n', 'name', '',
3045 ('n', 'name', '',
3046 _('copy directory name'), _('NAME')),
3046 _('copy directory name'), _('NAME')),
3047 ('e', 'empty', None, _('clear queue status file')),
3047 ('e', 'empty', None, _('clear queue status file')),
3048 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3048 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3049 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3049 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3050 def save(ui, repo, **opts):
3050 def save(ui, repo, **opts):
3051 """save current queue state (DEPRECATED)
3051 """save current queue state (DEPRECATED)
3052
3052
3053 This command is deprecated, use :hg:`rebase` instead."""
3053 This command is deprecated, use :hg:`rebase` instead."""
3054 q = repo.mq
3054 q = repo.mq
3055 message = cmdutil.logmessage(ui, opts)
3055 message = cmdutil.logmessage(ui, opts)
3056 ret = q.save(repo, msg=message)
3056 ret = q.save(repo, msg=message)
3057 if ret:
3057 if ret:
3058 return ret
3058 return ret
3059 q.savedirty() # save to .hg/patches before copying
3059 q.savedirty() # save to .hg/patches before copying
3060 if opts.get('copy'):
3060 if opts.get('copy'):
3061 path = q.path
3061 path = q.path
3062 if opts.get('name'):
3062 if opts.get('name'):
3063 newpath = os.path.join(q.basepath, opts.get('name'))
3063 newpath = os.path.join(q.basepath, opts.get('name'))
3064 if os.path.exists(newpath):
3064 if os.path.exists(newpath):
3065 if not os.path.isdir(newpath):
3065 if not os.path.isdir(newpath):
3066 raise error.Abort(_('destination %s exists and is not '
3066 raise error.Abort(_('destination %s exists and is not '
3067 'a directory') % newpath)
3067 'a directory') % newpath)
3068 if not opts.get('force'):
3068 if not opts.get('force'):
3069 raise error.Abort(_('destination %s exists, '
3069 raise error.Abort(_('destination %s exists, '
3070 'use -f to force') % newpath)
3070 'use -f to force') % newpath)
3071 else:
3071 else:
3072 newpath = savename(path)
3072 newpath = savename(path)
3073 ui.warn(_("copy %s to %s\n") % (path, newpath))
3073 ui.warn(_("copy %s to %s\n") % (path, newpath))
3074 util.copyfiles(path, newpath)
3074 util.copyfiles(path, newpath)
3075 if opts.get('empty'):
3075 if opts.get('empty'):
3076 del q.applied[:]
3076 del q.applied[:]
3077 q.applieddirty = True
3077 q.applieddirty = True
3078 q.savedirty()
3078 q.savedirty()
3079 return 0
3079 return 0
3080
3080
3081
3081
3082 @command("qselect",
3082 @command("qselect",
3083 [('n', 'none', None, _('disable all guards')),
3083 [('n', 'none', None, _('disable all guards')),
3084 ('s', 'series', None, _('list all guards in series file')),
3084 ('s', 'series', None, _('list all guards in series file')),
3085 ('', 'pop', None, _('pop to before first guarded applied patch')),
3085 ('', 'pop', None, _('pop to before first guarded applied patch')),
3086 ('', 'reapply', None, _('pop, then reapply patches'))],
3086 ('', 'reapply', None, _('pop, then reapply patches'))],
3087 _('hg qselect [OPTION]... [GUARD]...'))
3087 _('hg qselect [OPTION]... [GUARD]...'))
3088 def select(ui, repo, *args, **opts):
3088 def select(ui, repo, *args, **opts):
3089 '''set or print guarded patches to push
3089 '''set or print guarded patches to push
3090
3090
3091 Use the :hg:`qguard` command to set or print guards on patch, then use
3091 Use the :hg:`qguard` command to set or print guards on patch, then use
3092 qselect to tell mq which guards to use. A patch will be pushed if
3092 qselect to tell mq which guards to use. A patch will be pushed if
3093 it has no guards or any positive guards match the currently
3093 it has no guards or any positive guards match the currently
3094 selected guard, but will not be pushed if any negative guards
3094 selected guard, but will not be pushed if any negative guards
3095 match the current guard. For example::
3095 match the current guard. For example::
3096
3096
3097 qguard foo.patch -- -stable (negative guard)
3097 qguard foo.patch -- -stable (negative guard)
3098 qguard bar.patch +stable (positive guard)
3098 qguard bar.patch +stable (positive guard)
3099 qselect stable
3099 qselect stable
3100
3100
3101 This activates the "stable" guard. mq will skip foo.patch (because
3101 This activates the "stable" guard. mq will skip foo.patch (because
3102 it has a negative match) but push bar.patch (because it has a
3102 it has a negative match) but push bar.patch (because it has a
3103 positive match).
3103 positive match).
3104
3104
3105 With no arguments, prints the currently active guards.
3105 With no arguments, prints the currently active guards.
3106 With one argument, sets the active guard.
3106 With one argument, sets the active guard.
3107
3107
3108 Use -n/--none to deactivate guards (no other arguments needed).
3108 Use -n/--none to deactivate guards (no other arguments needed).
3109 When no guards are active, patches with positive guards are
3109 When no guards are active, patches with positive guards are
3110 skipped and patches with negative guards are pushed.
3110 skipped and patches with negative guards are pushed.
3111
3111
3112 qselect can change the guards on applied patches. It does not pop
3112 qselect can change the guards on applied patches. It does not pop
3113 guarded patches by default. Use --pop to pop back to the last
3113 guarded patches by default. Use --pop to pop back to the last
3114 applied patch that is not guarded. Use --reapply (which implies
3114 applied patch that is not guarded. Use --reapply (which implies
3115 --pop) to push back to the current patch afterwards, but skip
3115 --pop) to push back to the current patch afterwards, but skip
3116 guarded patches.
3116 guarded patches.
3117
3117
3118 Use -s/--series to print a list of all guards in the series file
3118 Use -s/--series to print a list of all guards in the series file
3119 (no other arguments needed). Use -v for more information.
3119 (no other arguments needed). Use -v for more information.
3120
3120
3121 Returns 0 on success.'''
3121 Returns 0 on success.'''
3122
3122
3123 q = repo.mq
3123 q = repo.mq
3124 guards = q.active()
3124 guards = q.active()
3125 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3125 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3126 if args or opts.get('none'):
3126 if args or opts.get('none'):
3127 old_unapplied = q.unapplied(repo)
3127 old_unapplied = q.unapplied(repo)
3128 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3128 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3129 q.setactive(args)
3129 q.setactive(args)
3130 q.savedirty()
3130 q.savedirty()
3131 if not args:
3131 if not args:
3132 ui.status(_('guards deactivated\n'))
3132 ui.status(_('guards deactivated\n'))
3133 if not opts.get('pop') and not opts.get('reapply'):
3133 if not opts.get('pop') and not opts.get('reapply'):
3134 unapplied = q.unapplied(repo)
3134 unapplied = q.unapplied(repo)
3135 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3135 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3136 if len(unapplied) != len(old_unapplied):
3136 if len(unapplied) != len(old_unapplied):
3137 ui.status(_('number of unguarded, unapplied patches has '
3137 ui.status(_('number of unguarded, unapplied patches has '
3138 'changed from %d to %d\n') %
3138 'changed from %d to %d\n') %
3139 (len(old_unapplied), len(unapplied)))
3139 (len(old_unapplied), len(unapplied)))
3140 if len(guarded) != len(old_guarded):
3140 if len(guarded) != len(old_guarded):
3141 ui.status(_('number of guarded, applied patches has changed '
3141 ui.status(_('number of guarded, applied patches has changed '
3142 'from %d to %d\n') %
3142 'from %d to %d\n') %
3143 (len(old_guarded), len(guarded)))
3143 (len(old_guarded), len(guarded)))
3144 elif opts.get('series'):
3144 elif opts.get('series'):
3145 guards = {}
3145 guards = {}
3146 noguards = 0
3146 noguards = 0
3147 for gs in q.seriesguards:
3147 for gs in q.seriesguards:
3148 if not gs:
3148 if not gs:
3149 noguards += 1
3149 noguards += 1
3150 for g in gs:
3150 for g in gs:
3151 guards.setdefault(g, 0)
3151 guards.setdefault(g, 0)
3152 guards[g] += 1
3152 guards[g] += 1
3153 if ui.verbose:
3153 if ui.verbose:
3154 guards['NONE'] = noguards
3154 guards['NONE'] = noguards
3155 guards = guards.items()
3155 guards = guards.items()
3156 guards.sort(key=lambda x: x[0][1:])
3156 guards.sort(key=lambda x: x[0][1:])
3157 if guards:
3157 if guards:
3158 ui.note(_('guards in series file:\n'))
3158 ui.note(_('guards in series file:\n'))
3159 for guard, count in guards:
3159 for guard, count in guards:
3160 ui.note('%2d ' % count)
3160 ui.note('%2d ' % count)
3161 ui.write(guard, '\n')
3161 ui.write(guard, '\n')
3162 else:
3162 else:
3163 ui.note(_('no guards in series file\n'))
3163 ui.note(_('no guards in series file\n'))
3164 else:
3164 else:
3165 if guards:
3165 if guards:
3166 ui.note(_('active guards:\n'))
3166 ui.note(_('active guards:\n'))
3167 for g in guards:
3167 for g in guards:
3168 ui.write(g, '\n')
3168 ui.write(g, '\n')
3169 else:
3169 else:
3170 ui.write(_('no active guards\n'))
3170 ui.write(_('no active guards\n'))
3171 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3171 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3172 popped = False
3172 popped = False
3173 if opts.get('pop') or opts.get('reapply'):
3173 if opts.get('pop') or opts.get('reapply'):
3174 for i in xrange(len(q.applied)):
3174 for i in xrange(len(q.applied)):
3175 if not pushable(i):
3175 if not pushable(i):
3176 ui.status(_('popping guarded patches\n'))
3176 ui.status(_('popping guarded patches\n'))
3177 popped = True
3177 popped = True
3178 if i == 0:
3178 if i == 0:
3179 q.pop(repo, all=True)
3179 q.pop(repo, all=True)
3180 else:
3180 else:
3181 q.pop(repo, q.applied[i - 1].name)
3181 q.pop(repo, q.applied[i - 1].name)
3182 break
3182 break
3183 if popped:
3183 if popped:
3184 try:
3184 try:
3185 if reapply:
3185 if reapply:
3186 ui.status(_('reapplying unguarded patches\n'))
3186 ui.status(_('reapplying unguarded patches\n'))
3187 q.push(repo, reapply)
3187 q.push(repo, reapply)
3188 finally:
3188 finally:
3189 q.savedirty()
3189 q.savedirty()
3190
3190
3191 @command("qfinish",
3191 @command("qfinish",
3192 [('a', 'applied', None, _('finish all applied changesets'))],
3192 [('a', 'applied', None, _('finish all applied changesets'))],
3193 _('hg qfinish [-a] [REV]...'))
3193 _('hg qfinish [-a] [REV]...'))
3194 def finish(ui, repo, *revrange, **opts):
3194 def finish(ui, repo, *revrange, **opts):
3195 """move applied patches into repository history
3195 """move applied patches into repository history
3196
3196
3197 Finishes the specified revisions (corresponding to applied
3197 Finishes the specified revisions (corresponding to applied
3198 patches) by moving them out of mq control into regular repository
3198 patches) by moving them out of mq control into regular repository
3199 history.
3199 history.
3200
3200
3201 Accepts a revision range or the -a/--applied option. If --applied
3201 Accepts a revision range or the -a/--applied option. If --applied
3202 is specified, all applied mq revisions are removed from mq
3202 is specified, all applied mq revisions are removed from mq
3203 control. Otherwise, the given revisions must be at the base of the
3203 control. Otherwise, the given revisions must be at the base of the
3204 stack of applied patches.
3204 stack of applied patches.
3205
3205
3206 This can be especially useful if your changes have been applied to
3206 This can be especially useful if your changes have been applied to
3207 an upstream repository, or if you are about to push your changes
3207 an upstream repository, or if you are about to push your changes
3208 to upstream.
3208 to upstream.
3209
3209
3210 Returns 0 on success.
3210 Returns 0 on success.
3211 """
3211 """
3212 if not opts.get('applied') and not revrange:
3212 if not opts.get('applied') and not revrange:
3213 raise error.Abort(_('no revisions specified'))
3213 raise error.Abort(_('no revisions specified'))
3214 elif opts.get('applied'):
3214 elif opts.get('applied'):
3215 revrange = ('qbase::qtip',) + revrange
3215 revrange = ('qbase::qtip',) + revrange
3216
3216
3217 q = repo.mq
3217 q = repo.mq
3218 if not q.applied:
3218 if not q.applied:
3219 ui.status(_('no patches applied\n'))
3219 ui.status(_('no patches applied\n'))
3220 return 0
3220 return 0
3221
3221
3222 revs = scmutil.revrange(repo, revrange)
3222 revs = scmutil.revrange(repo, revrange)
3223 if repo['.'].rev() in revs and repo[None].files():
3223 if repo['.'].rev() in revs and repo[None].files():
3224 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3224 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3225 # queue.finish may changes phases but leave the responsibility to lock the
3225 # queue.finish may changes phases but leave the responsibility to lock the
3226 # repo to the caller to avoid deadlock with wlock. This command code is
3226 # repo to the caller to avoid deadlock with wlock. This command code is
3227 # responsibility for this locking.
3227 # responsibility for this locking.
3228 with repo.lock():
3228 with repo.lock():
3229 q.finish(repo, revs)
3229 q.finish(repo, revs)
3230 q.savedirty()
3230 q.savedirty()
3231 return 0
3231 return 0
3232
3232
3233 @command("qqueue",
3233 @command("qqueue",
3234 [('l', 'list', False, _('list all available queues')),
3234 [('l', 'list', False, _('list all available queues')),
3235 ('', 'active', False, _('print name of active queue')),
3235 ('', 'active', False, _('print name of active queue')),
3236 ('c', 'create', False, _('create new queue')),
3236 ('c', 'create', False, _('create new queue')),
3237 ('', 'rename', False, _('rename active queue')),
3237 ('', 'rename', False, _('rename active queue')),
3238 ('', 'delete', False, _('delete reference to queue')),
3238 ('', 'delete', False, _('delete reference to queue')),
3239 ('', 'purge', False, _('delete queue, and remove patch dir')),
3239 ('', 'purge', False, _('delete queue, and remove patch dir')),
3240 ],
3240 ],
3241 _('[OPTION] [QUEUE]'))
3241 _('[OPTION] [QUEUE]'))
3242 def qqueue(ui, repo, name=None, **opts):
3242 def qqueue(ui, repo, name=None, **opts):
3243 '''manage multiple patch queues
3243 '''manage multiple patch queues
3244
3244
3245 Supports switching between different patch queues, as well as creating
3245 Supports switching between different patch queues, as well as creating
3246 new patch queues and deleting existing ones.
3246 new patch queues and deleting existing ones.
3247
3247
3248 Omitting a queue name or specifying -l/--list will show you the registered
3248 Omitting a queue name or specifying -l/--list will show you the registered
3249 queues - by default the "normal" patches queue is registered. The currently
3249 queues - by default the "normal" patches queue is registered. The currently
3250 active queue will be marked with "(active)". Specifying --active will print
3250 active queue will be marked with "(active)". Specifying --active will print
3251 only the name of the active queue.
3251 only the name of the active queue.
3252
3252
3253 To create a new queue, use -c/--create. The queue is automatically made
3253 To create a new queue, use -c/--create. The queue is automatically made
3254 active, except in the case where there are applied patches from the
3254 active, except in the case where there are applied patches from the
3255 currently active queue in the repository. Then the queue will only be
3255 currently active queue in the repository. Then the queue will only be
3256 created and switching will fail.
3256 created and switching will fail.
3257
3257
3258 To delete an existing queue, use --delete. You cannot delete the currently
3258 To delete an existing queue, use --delete. You cannot delete the currently
3259 active queue.
3259 active queue.
3260
3260
3261 Returns 0 on success.
3261 Returns 0 on success.
3262 '''
3262 '''
3263 q = repo.mq
3263 q = repo.mq
3264 _defaultqueue = 'patches'
3264 _defaultqueue = 'patches'
3265 _allqueues = 'patches.queues'
3265 _allqueues = 'patches.queues'
3266 _activequeue = 'patches.queue'
3266 _activequeue = 'patches.queue'
3267
3267
3268 def _getcurrent():
3268 def _getcurrent():
3269 cur = os.path.basename(q.path)
3269 cur = os.path.basename(q.path)
3270 if cur.startswith('patches-'):
3270 if cur.startswith('patches-'):
3271 cur = cur[8:]
3271 cur = cur[8:]
3272 return cur
3272 return cur
3273
3273
3274 def _noqueues():
3274 def _noqueues():
3275 try:
3275 try:
3276 fh = repo.vfs(_allqueues, 'r')
3276 fh = repo.vfs(_allqueues, 'r')
3277 fh.close()
3277 fh.close()
3278 except IOError:
3278 except IOError:
3279 return True
3279 return True
3280
3280
3281 return False
3281 return False
3282
3282
3283 def _getqueues():
3283 def _getqueues():
3284 current = _getcurrent()
3284 current = _getcurrent()
3285
3285
3286 try:
3286 try:
3287 fh = repo.vfs(_allqueues, 'r')
3287 fh = repo.vfs(_allqueues, 'r')
3288 queues = [queue.strip() for queue in fh if queue.strip()]
3288 queues = [queue.strip() for queue in fh if queue.strip()]
3289 fh.close()
3289 fh.close()
3290 if current not in queues:
3290 if current not in queues:
3291 queues.append(current)
3291 queues.append(current)
3292 except IOError:
3292 except IOError:
3293 queues = [_defaultqueue]
3293 queues = [_defaultqueue]
3294
3294
3295 return sorted(queues)
3295 return sorted(queues)
3296
3296
3297 def _setactive(name):
3297 def _setactive(name):
3298 if q.applied:
3298 if q.applied:
3299 raise error.Abort(_('new queue created, but cannot make active '
3299 raise error.Abort(_('new queue created, but cannot make active '
3300 'as patches are applied'))
3300 'as patches are applied'))
3301 _setactivenocheck(name)
3301 _setactivenocheck(name)
3302
3302
3303 def _setactivenocheck(name):
3303 def _setactivenocheck(name):
3304 fh = repo.vfs(_activequeue, 'w')
3304 fh = repo.vfs(_activequeue, 'w')
3305 if name != 'patches':
3305 if name != 'patches':
3306 fh.write(name)
3306 fh.write(name)
3307 fh.close()
3307 fh.close()
3308
3308
3309 def _addqueue(name):
3309 def _addqueue(name):
3310 fh = repo.vfs(_allqueues, 'a')
3310 fh = repo.vfs(_allqueues, 'a')
3311 fh.write('%s\n' % (name,))
3311 fh.write('%s\n' % (name,))
3312 fh.close()
3312 fh.close()
3313
3313
3314 def _queuedir(name):
3314 def _queuedir(name):
3315 if name == 'patches':
3315 if name == 'patches':
3316 return repo.join('patches')
3316 return repo.join('patches')
3317 else:
3317 else:
3318 return repo.join('patches-' + name)
3318 return repo.join('patches-' + name)
3319
3319
3320 def _validname(name):
3320 def _validname(name):
3321 for n in name:
3321 for n in name:
3322 if n in ':\\/.':
3322 if n in ':\\/.':
3323 return False
3323 return False
3324 return True
3324 return True
3325
3325
3326 def _delete(name):
3326 def _delete(name):
3327 if name not in existing:
3327 if name not in existing:
3328 raise error.Abort(_('cannot delete queue that does not exist'))
3328 raise error.Abort(_('cannot delete queue that does not exist'))
3329
3329
3330 current = _getcurrent()
3330 current = _getcurrent()
3331
3331
3332 if name == current:
3332 if name == current:
3333 raise error.Abort(_('cannot delete currently active queue'))
3333 raise error.Abort(_('cannot delete currently active queue'))
3334
3334
3335 fh = repo.vfs('patches.queues.new', 'w')
3335 fh = repo.vfs('patches.queues.new', 'w')
3336 for queue in existing:
3336 for queue in existing:
3337 if queue == name:
3337 if queue == name:
3338 continue
3338 continue
3339 fh.write('%s\n' % (queue,))
3339 fh.write('%s\n' % (queue,))
3340 fh.close()
3340 fh.close()
3341 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3341 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3342
3342
3343 if not name or opts.get('list') or opts.get('active'):
3343 if not name or opts.get('list') or opts.get('active'):
3344 current = _getcurrent()
3344 current = _getcurrent()
3345 if opts.get('active'):
3345 if opts.get('active'):
3346 ui.write('%s\n' % (current,))
3346 ui.write('%s\n' % (current,))
3347 return
3347 return
3348 for queue in _getqueues():
3348 for queue in _getqueues():
3349 ui.write('%s' % (queue,))
3349 ui.write('%s' % (queue,))
3350 if queue == current and not ui.quiet:
3350 if queue == current and not ui.quiet:
3351 ui.write(_(' (active)\n'))
3351 ui.write(_(' (active)\n'))
3352 else:
3352 else:
3353 ui.write('\n')
3353 ui.write('\n')
3354 return
3354 return
3355
3355
3356 if not _validname(name):
3356 if not _validname(name):
3357 raise error.Abort(
3357 raise error.Abort(
3358 _('invalid queue name, may not contain the characters ":\\/."'))
3358 _('invalid queue name, may not contain the characters ":\\/."'))
3359
3359
3360 with repo.wlock():
3360 with repo.wlock():
3361 existing = _getqueues()
3361 existing = _getqueues()
3362
3362
3363 if opts.get('create'):
3363 if opts.get('create'):
3364 if name in existing:
3364 if name in existing:
3365 raise error.Abort(_('queue "%s" already exists') % name)
3365 raise error.Abort(_('queue "%s" already exists') % name)
3366 if _noqueues():
3366 if _noqueues():
3367 _addqueue(_defaultqueue)
3367 _addqueue(_defaultqueue)
3368 _addqueue(name)
3368 _addqueue(name)
3369 _setactive(name)
3369 _setactive(name)
3370 elif opts.get('rename'):
3370 elif opts.get('rename'):
3371 current = _getcurrent()
3371 current = _getcurrent()
3372 if name == current:
3372 if name == current:
3373 raise error.Abort(_('can\'t rename "%s" to its current name')
3373 raise error.Abort(_('can\'t rename "%s" to its current name')
3374 % name)
3374 % name)
3375 if name in existing:
3375 if name in existing:
3376 raise error.Abort(_('queue "%s" already exists') % name)
3376 raise error.Abort(_('queue "%s" already exists') % name)
3377
3377
3378 olddir = _queuedir(current)
3378 olddir = _queuedir(current)
3379 newdir = _queuedir(name)
3379 newdir = _queuedir(name)
3380
3380
3381 if os.path.exists(newdir):
3381 if os.path.exists(newdir):
3382 raise error.Abort(_('non-queue directory "%s" already exists') %
3382 raise error.Abort(_('non-queue directory "%s" already exists') %
3383 newdir)
3383 newdir)
3384
3384
3385 fh = repo.vfs('patches.queues.new', 'w')
3385 fh = repo.vfs('patches.queues.new', 'w')
3386 for queue in existing:
3386 for queue in existing:
3387 if queue == current:
3387 if queue == current:
3388 fh.write('%s\n' % (name,))
3388 fh.write('%s\n' % (name,))
3389 if os.path.exists(olddir):
3389 if os.path.exists(olddir):
3390 util.rename(olddir, newdir)
3390 util.rename(olddir, newdir)
3391 else:
3391 else:
3392 fh.write('%s\n' % (queue,))
3392 fh.write('%s\n' % (queue,))
3393 fh.close()
3393 fh.close()
3394 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3394 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3395 _setactivenocheck(name)
3395 _setactivenocheck(name)
3396 elif opts.get('delete'):
3396 elif opts.get('delete'):
3397 _delete(name)
3397 _delete(name)
3398 elif opts.get('purge'):
3398 elif opts.get('purge'):
3399 if name in existing:
3399 if name in existing:
3400 _delete(name)
3400 _delete(name)
3401 qdir = _queuedir(name)
3401 qdir = _queuedir(name)
3402 if os.path.exists(qdir):
3402 if os.path.exists(qdir):
3403 shutil.rmtree(qdir)
3403 shutil.rmtree(qdir)
3404 else:
3404 else:
3405 if name not in existing:
3405 if name not in existing:
3406 raise error.Abort(_('use --create to create a new queue'))
3406 raise error.Abort(_('use --create to create a new queue'))
3407 _setactive(name)
3407 _setactive(name)
3408
3408
3409 def mqphasedefaults(repo, roots):
3409 def mqphasedefaults(repo, roots):
3410 """callback used to set mq changeset as secret when no phase data exists"""
3410 """callback used to set mq changeset as secret when no phase data exists"""
3411 if repo.mq.applied:
3411 if repo.mq.applied:
3412 if repo.ui.configbool('mq', 'secret', False):
3412 if repo.ui.configbool('mq', 'secret', False):
3413 mqphase = phases.secret
3413 mqphase = phases.secret
3414 else:
3414 else:
3415 mqphase = phases.draft
3415 mqphase = phases.draft
3416 qbase = repo[repo.mq.applied[0].node]
3416 qbase = repo[repo.mq.applied[0].node]
3417 roots[mqphase].add(qbase.node())
3417 roots[mqphase].add(qbase.node())
3418 return roots
3418 return roots
3419
3419
3420 def reposetup(ui, repo):
3420 def reposetup(ui, repo):
3421 class mqrepo(repo.__class__):
3421 class mqrepo(repo.__class__):
3422 @localrepo.unfilteredpropertycache
3422 @localrepo.unfilteredpropertycache
3423 def mq(self):
3423 def mq(self):
3424 return queue(self.ui, self.baseui, self.path)
3424 return queue(self.ui, self.baseui, self.path)
3425
3425
3426 def invalidateall(self):
3426 def invalidateall(self):
3427 super(mqrepo, self).invalidateall()
3427 super(mqrepo, self).invalidateall()
3428 if localrepo.hasunfilteredcache(self, 'mq'):
3428 if localrepo.hasunfilteredcache(self, 'mq'):
3429 # recreate mq in case queue path was changed
3429 # recreate mq in case queue path was changed
3430 delattr(self.unfiltered(), 'mq')
3430 delattr(self.unfiltered(), 'mq')
3431
3431
3432 def abortifwdirpatched(self, errmsg, force=False):
3432 def abortifwdirpatched(self, errmsg, force=False):
3433 if self.mq.applied and self.mq.checkapplied and not force:
3433 if self.mq.applied and self.mq.checkapplied and not force:
3434 parents = self.dirstate.parents()
3434 parents = self.dirstate.parents()
3435 patches = [s.node for s in self.mq.applied]
3435 patches = [s.node for s in self.mq.applied]
3436 if parents[0] in patches or parents[1] in patches:
3436 if parents[0] in patches or parents[1] in patches:
3437 raise error.Abort(errmsg)
3437 raise error.Abort(errmsg)
3438
3438
3439 def commit(self, text="", user=None, date=None, match=None,
3439 def commit(self, text="", user=None, date=None, match=None,
3440 force=False, editor=False, extra={}):
3440 force=False, editor=False, extra={}):
3441 self.abortifwdirpatched(
3441 self.abortifwdirpatched(
3442 _('cannot commit over an applied mq patch'),
3442 _('cannot commit over an applied mq patch'),
3443 force)
3443 force)
3444
3444
3445 return super(mqrepo, self).commit(text, user, date, match, force,
3445 return super(mqrepo, self).commit(text, user, date, match, force,
3446 editor, extra)
3446 editor, extra)
3447
3447
3448 def checkpush(self, pushop):
3448 def checkpush(self, pushop):
3449 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3449 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3450 outapplied = [e.node for e in self.mq.applied]
3450 outapplied = [e.node for e in self.mq.applied]
3451 if pushop.revs:
3451 if pushop.revs:
3452 # Assume applied patches have no non-patch descendants and
3452 # Assume applied patches have no non-patch descendants and
3453 # are not on remote already. Filtering any changeset not
3453 # are not on remote already. Filtering any changeset not
3454 # pushed.
3454 # pushed.
3455 heads = set(pushop.revs)
3455 heads = set(pushop.revs)
3456 for node in reversed(outapplied):
3456 for node in reversed(outapplied):
3457 if node in heads:
3457 if node in heads:
3458 break
3458 break
3459 else:
3459 else:
3460 outapplied.pop()
3460 outapplied.pop()
3461 # looking for pushed and shared changeset
3461 # looking for pushed and shared changeset
3462 for node in outapplied:
3462 for node in outapplied:
3463 if self[node].phase() < phases.secret:
3463 if self[node].phase() < phases.secret:
3464 raise error.Abort(_('source has mq patches applied'))
3464 raise error.Abort(_('source has mq patches applied'))
3465 # no non-secret patches pushed
3465 # no non-secret patches pushed
3466 super(mqrepo, self).checkpush(pushop)
3466 super(mqrepo, self).checkpush(pushop)
3467
3467
3468 def _findtags(self):
3468 def _findtags(self):
3469 '''augment tags from base class with patch tags'''
3469 '''augment tags from base class with patch tags'''
3470 result = super(mqrepo, self)._findtags()
3470 result = super(mqrepo, self)._findtags()
3471
3471
3472 q = self.mq
3472 q = self.mq
3473 if not q.applied:
3473 if not q.applied:
3474 return result
3474 return result
3475
3475
3476 mqtags = [(patch.node, patch.name) for patch in q.applied]
3476 mqtags = [(patch.node, patch.name) for patch in q.applied]
3477
3477
3478 try:
3478 try:
3479 # for now ignore filtering business
3479 # for now ignore filtering business
3480 self.unfiltered().changelog.rev(mqtags[-1][0])
3480 self.unfiltered().changelog.rev(mqtags[-1][0])
3481 except error.LookupError:
3481 except error.LookupError:
3482 self.ui.warn(_('mq status file refers to unknown node %s\n')
3482 self.ui.warn(_('mq status file refers to unknown node %s\n')
3483 % short(mqtags[-1][0]))
3483 % short(mqtags[-1][0]))
3484 return result
3484 return result
3485
3485
3486 # do not add fake tags for filtered revisions
3486 # do not add fake tags for filtered revisions
3487 included = self.changelog.hasnode
3487 included = self.changelog.hasnode
3488 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3488 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3489 if not mqtags:
3489 if not mqtags:
3490 return result
3490 return result
3491
3491
3492 mqtags.append((mqtags[-1][0], 'qtip'))
3492 mqtags.append((mqtags[-1][0], 'qtip'))
3493 mqtags.append((mqtags[0][0], 'qbase'))
3493 mqtags.append((mqtags[0][0], 'qbase'))
3494 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3494 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3495 tags = result[0]
3495 tags = result[0]
3496 for patch in mqtags:
3496 for patch in mqtags:
3497 if patch[1] in tags:
3497 if patch[1] in tags:
3498 self.ui.warn(_('tag %s overrides mq patch of the same '
3498 self.ui.warn(_('tag %s overrides mq patch of the same '
3499 'name\n') % patch[1])
3499 'name\n') % patch[1])
3500 else:
3500 else:
3501 tags[patch[1]] = patch[0]
3501 tags[patch[1]] = patch[0]
3502
3502
3503 return result
3503 return result
3504
3504
3505 if repo.local():
3505 if repo.local():
3506 repo.__class__ = mqrepo
3506 repo.__class__ = mqrepo
3507
3507
3508 repo._phasedefaults.append(mqphasedefaults)
3508 repo._phasedefaults.append(mqphasedefaults)
3509
3509
3510 def mqimport(orig, ui, repo, *args, **kwargs):
3510 def mqimport(orig, ui, repo, *args, **kwargs):
3511 if (util.safehasattr(repo, 'abortifwdirpatched')
3511 if (util.safehasattr(repo, 'abortifwdirpatched')
3512 and not kwargs.get('no_commit', False)):
3512 and not kwargs.get('no_commit', False)):
3513 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3513 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3514 kwargs.get('force'))
3514 kwargs.get('force'))
3515 return orig(ui, repo, *args, **kwargs)
3515 return orig(ui, repo, *args, **kwargs)
3516
3516
3517 def mqinit(orig, ui, *args, **kwargs):
3517 def mqinit(orig, ui, *args, **kwargs):
3518 mq = kwargs.pop('mq', None)
3518 mq = kwargs.pop('mq', None)
3519
3519
3520 if not mq:
3520 if not mq:
3521 return orig(ui, *args, **kwargs)
3521 return orig(ui, *args, **kwargs)
3522
3522
3523 if args:
3523 if args:
3524 repopath = args[0]
3524 repopath = args[0]
3525 if not hg.islocal(repopath):
3525 if not hg.islocal(repopath):
3526 raise error.Abort(_('only a local queue repository '
3526 raise error.Abort(_('only a local queue repository '
3527 'may be initialized'))
3527 'may be initialized'))
3528 else:
3528 else:
3529 repopath = cmdutil.findrepo(pycompat.getcwd())
3529 repopath = cmdutil.findrepo(pycompat.getcwd())
3530 if not repopath:
3530 if not repopath:
3531 raise error.Abort(_('there is no Mercurial repository here '
3531 raise error.Abort(_('there is no Mercurial repository here '
3532 '(.hg not found)'))
3532 '(.hg not found)'))
3533 repo = hg.repository(ui, repopath)
3533 repo = hg.repository(ui, repopath)
3534 return qinit(ui, repo, True)
3534 return qinit(ui, repo, True)
3535
3535
3536 def mqcommand(orig, ui, repo, *args, **kwargs):
3536 def mqcommand(orig, ui, repo, *args, **kwargs):
3537 """Add --mq option to operate on patch repository instead of main"""
3537 """Add --mq option to operate on patch repository instead of main"""
3538
3538
3539 # some commands do not like getting unknown options
3539 # some commands do not like getting unknown options
3540 mq = kwargs.pop('mq', None)
3540 mq = kwargs.pop('mq', None)
3541
3541
3542 if not mq:
3542 if not mq:
3543 return orig(ui, repo, *args, **kwargs)
3543 return orig(ui, repo, *args, **kwargs)
3544
3544
3545 q = repo.mq
3545 q = repo.mq
3546 r = q.qrepo()
3546 r = q.qrepo()
3547 if not r:
3547 if not r:
3548 raise error.Abort(_('no queue repository'))
3548 raise error.Abort(_('no queue repository'))
3549 return orig(r.ui, r, *args, **kwargs)
3549 return orig(r.ui, r, *args, **kwargs)
3550
3550
3551 def summaryhook(ui, repo):
3551 def summaryhook(ui, repo):
3552 q = repo.mq
3552 q = repo.mq
3553 m = []
3553 m = []
3554 a, u = len(q.applied), len(q.unapplied(repo))
3554 a, u = len(q.applied), len(q.unapplied(repo))
3555 if a:
3555 if a:
3556 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3556 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3557 if u:
3557 if u:
3558 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3558 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3559 if m:
3559 if m:
3560 # i18n: column positioning for "hg summary"
3560 # i18n: column positioning for "hg summary"
3561 ui.write(_("mq: %s\n") % ', '.join(m))
3561 ui.write(_("mq: %s\n") % ', '.join(m))
3562 else:
3562 else:
3563 # i18n: column positioning for "hg summary"
3563 # i18n: column positioning for "hg summary"
3564 ui.note(_("mq: (empty queue)\n"))
3564 ui.note(_("mq: (empty queue)\n"))
3565
3565
3566 revsetpredicate = registrar.revsetpredicate()
3566 revsetpredicate = registrar.revsetpredicate()
3567
3567
3568 @revsetpredicate('mq()')
3568 @revsetpredicate('mq()')
3569 def revsetmq(repo, subset, x):
3569 def revsetmq(repo, subset, x):
3570 """Changesets managed by MQ.
3570 """Changesets managed by MQ.
3571 """
3571 """
3572 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3572 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3573 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3573 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3574 return smartset.baseset([r for r in subset if r in applied])
3574 return smartset.baseset([r for r in subset if r in applied])
3575
3575
3576 # tell hggettext to extract docstrings from these functions:
3576 # tell hggettext to extract docstrings from these functions:
3577 i18nfunctions = [revsetmq]
3577 i18nfunctions = [revsetmq]
3578
3578
3579 def extsetup(ui):
3579 def extsetup(ui):
3580 # Ensure mq wrappers are called first, regardless of extension load order by
3580 # Ensure mq wrappers are called first, regardless of extension load order by
3581 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3581 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3582 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3582 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3583
3583
3584 extensions.wrapcommand(commands.table, 'import', mqimport)
3584 extensions.wrapcommand(commands.table, 'import', mqimport)
3585 cmdutil.summaryhooks.add('mq', summaryhook)
3585 cmdutil.summaryhooks.add('mq', summaryhook)
3586
3586
3587 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3587 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3588 entry[1].extend(mqopt)
3588 entry[1].extend(mqopt)
3589
3589
3590 def dotable(cmdtable):
3590 def dotable(cmdtable):
3591 for cmd, entry in cmdtable.iteritems():
3591 for cmd, entry in cmdtable.iteritems():
3592 cmd = cmdutil.parsealiases(cmd)[0]
3592 cmd = cmdutil.parsealiases(cmd)[0]
3593 func = entry[0]
3593 func = entry[0]
3594 if func.norepo:
3594 if func.norepo:
3595 continue
3595 continue
3596 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3596 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3597 entry[1].extend(mqopt)
3597 entry[1].extend(mqopt)
3598
3598
3599 dotable(commands.table)
3599 dotable(commands.table)
3600
3600
3601 for extname, extmodule in extensions.extensions():
3601 for extname, extmodule in extensions.extensions():
3602 if extmodule.__file__ != __file__:
3602 if extmodule.__file__ != __file__:
3603 dotable(getattr(extmodule, 'cmdtable', {}))
3603 dotable(getattr(extmodule, 'cmdtable', {}))
3604
3604
3605 colortable = {'qguard.negative': 'red',
3605 colortable = {'qguard.negative': 'red',
3606 'qguard.positive': 'yellow',
3606 'qguard.positive': 'yellow',
3607 'qguard.unguarded': 'green',
3607 'qguard.unguarded': 'green',
3608 'qseries.applied': 'blue bold underline',
3608 'qseries.applied': 'blue bold underline',
3609 'qseries.guarded': 'black bold',
3609 'qseries.guarded': 'black bold',
3610 'qseries.missing': 'red bold',
3610 'qseries.missing': 'red bold',
3611 'qseries.unapplied': 'black bold'}
3611 'qseries.unapplied': 'black bold'}
@@ -1,746 +1,746 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to transplant changesets from another branch
8 '''command to transplant changesets from another branch
9
9
10 This extension allows you to transplant changes to another parent revision,
10 This extension allows you to transplant changes to another parent revision,
11 possibly in another repository. The transplant is done using 'diff' patches.
11 possibly in another repository. The transplant is done using 'diff' patches.
12
12
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
14 map from a changeset hash to its hash in the source repository.
14 map from a changeset hash to its hash in the source repository.
15 '''
15 '''
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import os
18 import os
19 import tempfile
19 import tempfile
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import (
21 from mercurial import (
22 bundlerepo,
22 bundlerepo,
23 cmdutil,
23 cmdutil,
24 error,
24 error,
25 exchange,
25 exchange,
26 hg,
26 hg,
27 match,
27 match,
28 merge,
28 merge,
29 node as nodemod,
29 node as nodemod,
30 patch,
30 patch,
31 pycompat,
31 pycompat,
32 registrar,
32 registrar,
33 revlog,
33 revlog,
34 revset,
34 revset,
35 scmutil,
35 scmutil,
36 smartset,
36 smartset,
37 util,
37 util,
38 )
38 )
39
39
40 class TransplantError(error.Abort):
40 class TransplantError(error.Abort):
41 pass
41 pass
42
42
43 cmdtable = {}
43 cmdtable = {}
44 command = cmdutil.command(cmdtable)
44 command = cmdutil.command(cmdtable)
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
47 # be specifying the version(s) of Mercurial they are tested with, or
47 # be specifying the version(s) of Mercurial they are tested with, or
48 # leave the attribute unspecified.
48 # leave the attribute unspecified.
49 testedwith = 'ships-with-hg-core'
49 testedwith = 'ships-with-hg-core'
50
50
51 class transplantentry(object):
51 class transplantentry(object):
52 def __init__(self, lnode, rnode):
52 def __init__(self, lnode, rnode):
53 self.lnode = lnode
53 self.lnode = lnode
54 self.rnode = rnode
54 self.rnode = rnode
55
55
56 class transplants(object):
56 class transplants(object):
57 def __init__(self, path=None, transplantfile=None, opener=None):
57 def __init__(self, path=None, transplantfile=None, opener=None):
58 self.path = path
58 self.path = path
59 self.transplantfile = transplantfile
59 self.transplantfile = transplantfile
60 self.opener = opener
60 self.opener = opener
61
61
62 if not opener:
62 if not opener:
63 self.opener = scmutil.opener(self.path)
63 self.opener = scmutil.vfs(self.path)
64 self.transplants = {}
64 self.transplants = {}
65 self.dirty = False
65 self.dirty = False
66 self.read()
66 self.read()
67
67
68 def read(self):
68 def read(self):
69 abspath = os.path.join(self.path, self.transplantfile)
69 abspath = os.path.join(self.path, self.transplantfile)
70 if self.transplantfile and os.path.exists(abspath):
70 if self.transplantfile and os.path.exists(abspath):
71 for line in self.opener.read(self.transplantfile).splitlines():
71 for line in self.opener.read(self.transplantfile).splitlines():
72 lnode, rnode = map(revlog.bin, line.split(':'))
72 lnode, rnode = map(revlog.bin, line.split(':'))
73 list = self.transplants.setdefault(rnode, [])
73 list = self.transplants.setdefault(rnode, [])
74 list.append(transplantentry(lnode, rnode))
74 list.append(transplantentry(lnode, rnode))
75
75
76 def write(self):
76 def write(self):
77 if self.dirty and self.transplantfile:
77 if self.dirty and self.transplantfile:
78 if not os.path.isdir(self.path):
78 if not os.path.isdir(self.path):
79 os.mkdir(self.path)
79 os.mkdir(self.path)
80 fp = self.opener(self.transplantfile, 'w')
80 fp = self.opener(self.transplantfile, 'w')
81 for list in self.transplants.itervalues():
81 for list in self.transplants.itervalues():
82 for t in list:
82 for t in list:
83 l, r = map(nodemod.hex, (t.lnode, t.rnode))
83 l, r = map(nodemod.hex, (t.lnode, t.rnode))
84 fp.write(l + ':' + r + '\n')
84 fp.write(l + ':' + r + '\n')
85 fp.close()
85 fp.close()
86 self.dirty = False
86 self.dirty = False
87
87
88 def get(self, rnode):
88 def get(self, rnode):
89 return self.transplants.get(rnode) or []
89 return self.transplants.get(rnode) or []
90
90
91 def set(self, lnode, rnode):
91 def set(self, lnode, rnode):
92 list = self.transplants.setdefault(rnode, [])
92 list = self.transplants.setdefault(rnode, [])
93 list.append(transplantentry(lnode, rnode))
93 list.append(transplantentry(lnode, rnode))
94 self.dirty = True
94 self.dirty = True
95
95
96 def remove(self, transplant):
96 def remove(self, transplant):
97 list = self.transplants.get(transplant.rnode)
97 list = self.transplants.get(transplant.rnode)
98 if list:
98 if list:
99 del list[list.index(transplant)]
99 del list[list.index(transplant)]
100 self.dirty = True
100 self.dirty = True
101
101
102 class transplanter(object):
102 class transplanter(object):
103 def __init__(self, ui, repo, opts):
103 def __init__(self, ui, repo, opts):
104 self.ui = ui
104 self.ui = ui
105 self.path = repo.join('transplant')
105 self.path = repo.join('transplant')
106 self.opener = scmutil.opener(self.path)
106 self.opener = scmutil.vfs(self.path)
107 self.transplants = transplants(self.path, 'transplants',
107 self.transplants = transplants(self.path, 'transplants',
108 opener=self.opener)
108 opener=self.opener)
109 def getcommiteditor():
109 def getcommiteditor():
110 editform = cmdutil.mergeeditform(repo[None], 'transplant')
110 editform = cmdutil.mergeeditform(repo[None], 'transplant')
111 return cmdutil.getcommiteditor(editform=editform, **opts)
111 return cmdutil.getcommiteditor(editform=editform, **opts)
112 self.getcommiteditor = getcommiteditor
112 self.getcommiteditor = getcommiteditor
113
113
114 def applied(self, repo, node, parent):
114 def applied(self, repo, node, parent):
115 '''returns True if a node is already an ancestor of parent
115 '''returns True if a node is already an ancestor of parent
116 or is parent or has already been transplanted'''
116 or is parent or has already been transplanted'''
117 if hasnode(repo, parent):
117 if hasnode(repo, parent):
118 parentrev = repo.changelog.rev(parent)
118 parentrev = repo.changelog.rev(parent)
119 if hasnode(repo, node):
119 if hasnode(repo, node):
120 rev = repo.changelog.rev(node)
120 rev = repo.changelog.rev(node)
121 reachable = repo.changelog.ancestors([parentrev], rev,
121 reachable = repo.changelog.ancestors([parentrev], rev,
122 inclusive=True)
122 inclusive=True)
123 if rev in reachable:
123 if rev in reachable:
124 return True
124 return True
125 for t in self.transplants.get(node):
125 for t in self.transplants.get(node):
126 # it might have been stripped
126 # it might have been stripped
127 if not hasnode(repo, t.lnode):
127 if not hasnode(repo, t.lnode):
128 self.transplants.remove(t)
128 self.transplants.remove(t)
129 return False
129 return False
130 lnoderev = repo.changelog.rev(t.lnode)
130 lnoderev = repo.changelog.rev(t.lnode)
131 if lnoderev in repo.changelog.ancestors([parentrev], lnoderev,
131 if lnoderev in repo.changelog.ancestors([parentrev], lnoderev,
132 inclusive=True):
132 inclusive=True):
133 return True
133 return True
134 return False
134 return False
135
135
136 def apply(self, repo, source, revmap, merges, opts=None):
136 def apply(self, repo, source, revmap, merges, opts=None):
137 '''apply the revisions in revmap one by one in revision order'''
137 '''apply the revisions in revmap one by one in revision order'''
138 if opts is None:
138 if opts is None:
139 opts = {}
139 opts = {}
140 revs = sorted(revmap)
140 revs = sorted(revmap)
141 p1, p2 = repo.dirstate.parents()
141 p1, p2 = repo.dirstate.parents()
142 pulls = []
142 pulls = []
143 diffopts = patch.difffeatureopts(self.ui, opts)
143 diffopts = patch.difffeatureopts(self.ui, opts)
144 diffopts.git = True
144 diffopts.git = True
145
145
146 lock = tr = None
146 lock = tr = None
147 try:
147 try:
148 lock = repo.lock()
148 lock = repo.lock()
149 tr = repo.transaction('transplant')
149 tr = repo.transaction('transplant')
150 for rev in revs:
150 for rev in revs:
151 node = revmap[rev]
151 node = revmap[rev]
152 revstr = '%s:%s' % (rev, nodemod.short(node))
152 revstr = '%s:%s' % (rev, nodemod.short(node))
153
153
154 if self.applied(repo, node, p1):
154 if self.applied(repo, node, p1):
155 self.ui.warn(_('skipping already applied revision %s\n') %
155 self.ui.warn(_('skipping already applied revision %s\n') %
156 revstr)
156 revstr)
157 continue
157 continue
158
158
159 parents = source.changelog.parents(node)
159 parents = source.changelog.parents(node)
160 if not (opts.get('filter') or opts.get('log')):
160 if not (opts.get('filter') or opts.get('log')):
161 # If the changeset parent is the same as the
161 # If the changeset parent is the same as the
162 # wdir's parent, just pull it.
162 # wdir's parent, just pull it.
163 if parents[0] == p1:
163 if parents[0] == p1:
164 pulls.append(node)
164 pulls.append(node)
165 p1 = node
165 p1 = node
166 continue
166 continue
167 if pulls:
167 if pulls:
168 if source != repo:
168 if source != repo:
169 exchange.pull(repo, source.peer(), heads=pulls)
169 exchange.pull(repo, source.peer(), heads=pulls)
170 merge.update(repo, pulls[-1], False, False)
170 merge.update(repo, pulls[-1], False, False)
171 p1, p2 = repo.dirstate.parents()
171 p1, p2 = repo.dirstate.parents()
172 pulls = []
172 pulls = []
173
173
174 domerge = False
174 domerge = False
175 if node in merges:
175 if node in merges:
176 # pulling all the merge revs at once would mean we
176 # pulling all the merge revs at once would mean we
177 # couldn't transplant after the latest even if
177 # couldn't transplant after the latest even if
178 # transplants before them fail.
178 # transplants before them fail.
179 domerge = True
179 domerge = True
180 if not hasnode(repo, node):
180 if not hasnode(repo, node):
181 exchange.pull(repo, source.peer(), heads=[node])
181 exchange.pull(repo, source.peer(), heads=[node])
182
182
183 skipmerge = False
183 skipmerge = False
184 if parents[1] != revlog.nullid:
184 if parents[1] != revlog.nullid:
185 if not opts.get('parent'):
185 if not opts.get('parent'):
186 self.ui.note(_('skipping merge changeset %s:%s\n')
186 self.ui.note(_('skipping merge changeset %s:%s\n')
187 % (rev, nodemod.short(node)))
187 % (rev, nodemod.short(node)))
188 skipmerge = True
188 skipmerge = True
189 else:
189 else:
190 parent = source.lookup(opts['parent'])
190 parent = source.lookup(opts['parent'])
191 if parent not in parents:
191 if parent not in parents:
192 raise error.Abort(_('%s is not a parent of %s') %
192 raise error.Abort(_('%s is not a parent of %s') %
193 (nodemod.short(parent),
193 (nodemod.short(parent),
194 nodemod.short(node)))
194 nodemod.short(node)))
195 else:
195 else:
196 parent = parents[0]
196 parent = parents[0]
197
197
198 if skipmerge:
198 if skipmerge:
199 patchfile = None
199 patchfile = None
200 else:
200 else:
201 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
201 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
202 fp = os.fdopen(fd, pycompat.sysstr('w'))
202 fp = os.fdopen(fd, pycompat.sysstr('w'))
203 gen = patch.diff(source, parent, node, opts=diffopts)
203 gen = patch.diff(source, parent, node, opts=diffopts)
204 for chunk in gen:
204 for chunk in gen:
205 fp.write(chunk)
205 fp.write(chunk)
206 fp.close()
206 fp.close()
207
207
208 del revmap[rev]
208 del revmap[rev]
209 if patchfile or domerge:
209 if patchfile or domerge:
210 try:
210 try:
211 try:
211 try:
212 n = self.applyone(repo, node,
212 n = self.applyone(repo, node,
213 source.changelog.read(node),
213 source.changelog.read(node),
214 patchfile, merge=domerge,
214 patchfile, merge=domerge,
215 log=opts.get('log'),
215 log=opts.get('log'),
216 filter=opts.get('filter'))
216 filter=opts.get('filter'))
217 except TransplantError:
217 except TransplantError:
218 # Do not rollback, it is up to the user to
218 # Do not rollback, it is up to the user to
219 # fix the merge or cancel everything
219 # fix the merge or cancel everything
220 tr.close()
220 tr.close()
221 raise
221 raise
222 if n and domerge:
222 if n and domerge:
223 self.ui.status(_('%s merged at %s\n') % (revstr,
223 self.ui.status(_('%s merged at %s\n') % (revstr,
224 nodemod.short(n)))
224 nodemod.short(n)))
225 elif n:
225 elif n:
226 self.ui.status(_('%s transplanted to %s\n')
226 self.ui.status(_('%s transplanted to %s\n')
227 % (nodemod.short(node),
227 % (nodemod.short(node),
228 nodemod.short(n)))
228 nodemod.short(n)))
229 finally:
229 finally:
230 if patchfile:
230 if patchfile:
231 os.unlink(patchfile)
231 os.unlink(patchfile)
232 tr.close()
232 tr.close()
233 if pulls:
233 if pulls:
234 exchange.pull(repo, source.peer(), heads=pulls)
234 exchange.pull(repo, source.peer(), heads=pulls)
235 merge.update(repo, pulls[-1], False, False)
235 merge.update(repo, pulls[-1], False, False)
236 finally:
236 finally:
237 self.saveseries(revmap, merges)
237 self.saveseries(revmap, merges)
238 self.transplants.write()
238 self.transplants.write()
239 if tr:
239 if tr:
240 tr.release()
240 tr.release()
241 if lock:
241 if lock:
242 lock.release()
242 lock.release()
243
243
244 def filter(self, filter, node, changelog, patchfile):
244 def filter(self, filter, node, changelog, patchfile):
245 '''arbitrarily rewrite changeset before applying it'''
245 '''arbitrarily rewrite changeset before applying it'''
246
246
247 self.ui.status(_('filtering %s\n') % patchfile)
247 self.ui.status(_('filtering %s\n') % patchfile)
248 user, date, msg = (changelog[1], changelog[2], changelog[4])
248 user, date, msg = (changelog[1], changelog[2], changelog[4])
249 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
249 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
250 fp = os.fdopen(fd, pycompat.sysstr('w'))
250 fp = os.fdopen(fd, pycompat.sysstr('w'))
251 fp.write("# HG changeset patch\n")
251 fp.write("# HG changeset patch\n")
252 fp.write("# User %s\n" % user)
252 fp.write("# User %s\n" % user)
253 fp.write("# Date %d %d\n" % date)
253 fp.write("# Date %d %d\n" % date)
254 fp.write(msg + '\n')
254 fp.write(msg + '\n')
255 fp.close()
255 fp.close()
256
256
257 try:
257 try:
258 self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile),
258 self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile),
259 util.shellquote(patchfile)),
259 util.shellquote(patchfile)),
260 environ={'HGUSER': changelog[1],
260 environ={'HGUSER': changelog[1],
261 'HGREVISION': nodemod.hex(node),
261 'HGREVISION': nodemod.hex(node),
262 },
262 },
263 onerr=error.Abort, errprefix=_('filter failed'),
263 onerr=error.Abort, errprefix=_('filter failed'),
264 blockedtag='transplant_filter')
264 blockedtag='transplant_filter')
265 user, date, msg = self.parselog(file(headerfile))[1:4]
265 user, date, msg = self.parselog(file(headerfile))[1:4]
266 finally:
266 finally:
267 os.unlink(headerfile)
267 os.unlink(headerfile)
268
268
269 return (user, date, msg)
269 return (user, date, msg)
270
270
271 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
271 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
272 filter=None):
272 filter=None):
273 '''apply the patch in patchfile to the repository as a transplant'''
273 '''apply the patch in patchfile to the repository as a transplant'''
274 (manifest, user, (time, timezone), files, message) = cl[:5]
274 (manifest, user, (time, timezone), files, message) = cl[:5]
275 date = "%d %d" % (time, timezone)
275 date = "%d %d" % (time, timezone)
276 extra = {'transplant_source': node}
276 extra = {'transplant_source': node}
277 if filter:
277 if filter:
278 (user, date, message) = self.filter(filter, node, cl, patchfile)
278 (user, date, message) = self.filter(filter, node, cl, patchfile)
279
279
280 if log:
280 if log:
281 # we don't translate messages inserted into commits
281 # we don't translate messages inserted into commits
282 message += '\n(transplanted from %s)' % nodemod.hex(node)
282 message += '\n(transplanted from %s)' % nodemod.hex(node)
283
283
284 self.ui.status(_('applying %s\n') % nodemod.short(node))
284 self.ui.status(_('applying %s\n') % nodemod.short(node))
285 self.ui.note('%s %s\n%s\n' % (user, date, message))
285 self.ui.note('%s %s\n%s\n' % (user, date, message))
286
286
287 if not patchfile and not merge:
287 if not patchfile and not merge:
288 raise error.Abort(_('can only omit patchfile if merging'))
288 raise error.Abort(_('can only omit patchfile if merging'))
289 if patchfile:
289 if patchfile:
290 try:
290 try:
291 files = set()
291 files = set()
292 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
292 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
293 files = list(files)
293 files = list(files)
294 except Exception as inst:
294 except Exception as inst:
295 seriespath = os.path.join(self.path, 'series')
295 seriespath = os.path.join(self.path, 'series')
296 if os.path.exists(seriespath):
296 if os.path.exists(seriespath):
297 os.unlink(seriespath)
297 os.unlink(seriespath)
298 p1 = repo.dirstate.p1()
298 p1 = repo.dirstate.p1()
299 p2 = node
299 p2 = node
300 self.log(user, date, message, p1, p2, merge=merge)
300 self.log(user, date, message, p1, p2, merge=merge)
301 self.ui.write(str(inst) + '\n')
301 self.ui.write(str(inst) + '\n')
302 raise TransplantError(_('fix up the working directory and run '
302 raise TransplantError(_('fix up the working directory and run '
303 'hg transplant --continue'))
303 'hg transplant --continue'))
304 else:
304 else:
305 files = None
305 files = None
306 if merge:
306 if merge:
307 p1, p2 = repo.dirstate.parents()
307 p1, p2 = repo.dirstate.parents()
308 repo.setparents(p1, node)
308 repo.setparents(p1, node)
309 m = match.always(repo.root, '')
309 m = match.always(repo.root, '')
310 else:
310 else:
311 m = match.exact(repo.root, '', files)
311 m = match.exact(repo.root, '', files)
312
312
313 n = repo.commit(message, user, date, extra=extra, match=m,
313 n = repo.commit(message, user, date, extra=extra, match=m,
314 editor=self.getcommiteditor())
314 editor=self.getcommiteditor())
315 if not n:
315 if not n:
316 self.ui.warn(_('skipping emptied changeset %s\n') %
316 self.ui.warn(_('skipping emptied changeset %s\n') %
317 nodemod.short(node))
317 nodemod.short(node))
318 return None
318 return None
319 if not merge:
319 if not merge:
320 self.transplants.set(n, node)
320 self.transplants.set(n, node)
321
321
322 return n
322 return n
323
323
324 def canresume(self):
324 def canresume(self):
325 return os.path.exists(os.path.join(self.path, 'journal'))
325 return os.path.exists(os.path.join(self.path, 'journal'))
326
326
327 def resume(self, repo, source, opts):
327 def resume(self, repo, source, opts):
328 '''recover last transaction and apply remaining changesets'''
328 '''recover last transaction and apply remaining changesets'''
329 if os.path.exists(os.path.join(self.path, 'journal')):
329 if os.path.exists(os.path.join(self.path, 'journal')):
330 n, node = self.recover(repo, source, opts)
330 n, node = self.recover(repo, source, opts)
331 if n:
331 if n:
332 self.ui.status(_('%s transplanted as %s\n') %
332 self.ui.status(_('%s transplanted as %s\n') %
333 (nodemod.short(node),
333 (nodemod.short(node),
334 nodemod.short(n)))
334 nodemod.short(n)))
335 else:
335 else:
336 self.ui.status(_('%s skipped due to empty diff\n')
336 self.ui.status(_('%s skipped due to empty diff\n')
337 % (nodemod.short(node),))
337 % (nodemod.short(node),))
338 seriespath = os.path.join(self.path, 'series')
338 seriespath = os.path.join(self.path, 'series')
339 if not os.path.exists(seriespath):
339 if not os.path.exists(seriespath):
340 self.transplants.write()
340 self.transplants.write()
341 return
341 return
342 nodes, merges = self.readseries()
342 nodes, merges = self.readseries()
343 revmap = {}
343 revmap = {}
344 for n in nodes:
344 for n in nodes:
345 revmap[source.changelog.rev(n)] = n
345 revmap[source.changelog.rev(n)] = n
346 os.unlink(seriespath)
346 os.unlink(seriespath)
347
347
348 self.apply(repo, source, revmap, merges, opts)
348 self.apply(repo, source, revmap, merges, opts)
349
349
350 def recover(self, repo, source, opts):
350 def recover(self, repo, source, opts):
351 '''commit working directory using journal metadata'''
351 '''commit working directory using journal metadata'''
352 node, user, date, message, parents = self.readlog()
352 node, user, date, message, parents = self.readlog()
353 merge = False
353 merge = False
354
354
355 if not user or not date or not message or not parents[0]:
355 if not user or not date or not message or not parents[0]:
356 raise error.Abort(_('transplant log file is corrupt'))
356 raise error.Abort(_('transplant log file is corrupt'))
357
357
358 parent = parents[0]
358 parent = parents[0]
359 if len(parents) > 1:
359 if len(parents) > 1:
360 if opts.get('parent'):
360 if opts.get('parent'):
361 parent = source.lookup(opts['parent'])
361 parent = source.lookup(opts['parent'])
362 if parent not in parents:
362 if parent not in parents:
363 raise error.Abort(_('%s is not a parent of %s') %
363 raise error.Abort(_('%s is not a parent of %s') %
364 (nodemod.short(parent),
364 (nodemod.short(parent),
365 nodemod.short(node)))
365 nodemod.short(node)))
366 else:
366 else:
367 merge = True
367 merge = True
368
368
369 extra = {'transplant_source': node}
369 extra = {'transplant_source': node}
370 try:
370 try:
371 p1, p2 = repo.dirstate.parents()
371 p1, p2 = repo.dirstate.parents()
372 if p1 != parent:
372 if p1 != parent:
373 raise error.Abort(_('working directory not at transplant '
373 raise error.Abort(_('working directory not at transplant '
374 'parent %s') % nodemod.hex(parent))
374 'parent %s') % nodemod.hex(parent))
375 if merge:
375 if merge:
376 repo.setparents(p1, parents[1])
376 repo.setparents(p1, parents[1])
377 modified, added, removed, deleted = repo.status()[:4]
377 modified, added, removed, deleted = repo.status()[:4]
378 if merge or modified or added or removed or deleted:
378 if merge or modified or added or removed or deleted:
379 n = repo.commit(message, user, date, extra=extra,
379 n = repo.commit(message, user, date, extra=extra,
380 editor=self.getcommiteditor())
380 editor=self.getcommiteditor())
381 if not n:
381 if not n:
382 raise error.Abort(_('commit failed'))
382 raise error.Abort(_('commit failed'))
383 if not merge:
383 if not merge:
384 self.transplants.set(n, node)
384 self.transplants.set(n, node)
385 else:
385 else:
386 n = None
386 n = None
387 self.unlog()
387 self.unlog()
388
388
389 return n, node
389 return n, node
390 finally:
390 finally:
391 # TODO: get rid of this meaningless try/finally enclosing.
391 # TODO: get rid of this meaningless try/finally enclosing.
392 # this is kept only to reduce changes in a patch.
392 # this is kept only to reduce changes in a patch.
393 pass
393 pass
394
394
395 def readseries(self):
395 def readseries(self):
396 nodes = []
396 nodes = []
397 merges = []
397 merges = []
398 cur = nodes
398 cur = nodes
399 for line in self.opener.read('series').splitlines():
399 for line in self.opener.read('series').splitlines():
400 if line.startswith('# Merges'):
400 if line.startswith('# Merges'):
401 cur = merges
401 cur = merges
402 continue
402 continue
403 cur.append(revlog.bin(line))
403 cur.append(revlog.bin(line))
404
404
405 return (nodes, merges)
405 return (nodes, merges)
406
406
407 def saveseries(self, revmap, merges):
407 def saveseries(self, revmap, merges):
408 if not revmap:
408 if not revmap:
409 return
409 return
410
410
411 if not os.path.isdir(self.path):
411 if not os.path.isdir(self.path):
412 os.mkdir(self.path)
412 os.mkdir(self.path)
413 series = self.opener('series', 'w')
413 series = self.opener('series', 'w')
414 for rev in sorted(revmap):
414 for rev in sorted(revmap):
415 series.write(nodemod.hex(revmap[rev]) + '\n')
415 series.write(nodemod.hex(revmap[rev]) + '\n')
416 if merges:
416 if merges:
417 series.write('# Merges\n')
417 series.write('# Merges\n')
418 for m in merges:
418 for m in merges:
419 series.write(nodemod.hex(m) + '\n')
419 series.write(nodemod.hex(m) + '\n')
420 series.close()
420 series.close()
421
421
422 def parselog(self, fp):
422 def parselog(self, fp):
423 parents = []
423 parents = []
424 message = []
424 message = []
425 node = revlog.nullid
425 node = revlog.nullid
426 inmsg = False
426 inmsg = False
427 user = None
427 user = None
428 date = None
428 date = None
429 for line in fp.read().splitlines():
429 for line in fp.read().splitlines():
430 if inmsg:
430 if inmsg:
431 message.append(line)
431 message.append(line)
432 elif line.startswith('# User '):
432 elif line.startswith('# User '):
433 user = line[7:]
433 user = line[7:]
434 elif line.startswith('# Date '):
434 elif line.startswith('# Date '):
435 date = line[7:]
435 date = line[7:]
436 elif line.startswith('# Node ID '):
436 elif line.startswith('# Node ID '):
437 node = revlog.bin(line[10:])
437 node = revlog.bin(line[10:])
438 elif line.startswith('# Parent '):
438 elif line.startswith('# Parent '):
439 parents.append(revlog.bin(line[9:]))
439 parents.append(revlog.bin(line[9:]))
440 elif not line.startswith('# '):
440 elif not line.startswith('# '):
441 inmsg = True
441 inmsg = True
442 message.append(line)
442 message.append(line)
443 if None in (user, date):
443 if None in (user, date):
444 raise error.Abort(_("filter corrupted changeset (no user or date)"))
444 raise error.Abort(_("filter corrupted changeset (no user or date)"))
445 return (node, user, date, '\n'.join(message), parents)
445 return (node, user, date, '\n'.join(message), parents)
446
446
447 def log(self, user, date, message, p1, p2, merge=False):
447 def log(self, user, date, message, p1, p2, merge=False):
448 '''journal changelog metadata for later recover'''
448 '''journal changelog metadata for later recover'''
449
449
450 if not os.path.isdir(self.path):
450 if not os.path.isdir(self.path):
451 os.mkdir(self.path)
451 os.mkdir(self.path)
452 fp = self.opener('journal', 'w')
452 fp = self.opener('journal', 'w')
453 fp.write('# User %s\n' % user)
453 fp.write('# User %s\n' % user)
454 fp.write('# Date %s\n' % date)
454 fp.write('# Date %s\n' % date)
455 fp.write('# Node ID %s\n' % nodemod.hex(p2))
455 fp.write('# Node ID %s\n' % nodemod.hex(p2))
456 fp.write('# Parent ' + nodemod.hex(p1) + '\n')
456 fp.write('# Parent ' + nodemod.hex(p1) + '\n')
457 if merge:
457 if merge:
458 fp.write('# Parent ' + nodemod.hex(p2) + '\n')
458 fp.write('# Parent ' + nodemod.hex(p2) + '\n')
459 fp.write(message.rstrip() + '\n')
459 fp.write(message.rstrip() + '\n')
460 fp.close()
460 fp.close()
461
461
462 def readlog(self):
462 def readlog(self):
463 return self.parselog(self.opener('journal'))
463 return self.parselog(self.opener('journal'))
464
464
465 def unlog(self):
465 def unlog(self):
466 '''remove changelog journal'''
466 '''remove changelog journal'''
467 absdst = os.path.join(self.path, 'journal')
467 absdst = os.path.join(self.path, 'journal')
468 if os.path.exists(absdst):
468 if os.path.exists(absdst):
469 os.unlink(absdst)
469 os.unlink(absdst)
470
470
471 def transplantfilter(self, repo, source, root):
471 def transplantfilter(self, repo, source, root):
472 def matchfn(node):
472 def matchfn(node):
473 if self.applied(repo, node, root):
473 if self.applied(repo, node, root):
474 return False
474 return False
475 if source.changelog.parents(node)[1] != revlog.nullid:
475 if source.changelog.parents(node)[1] != revlog.nullid:
476 return False
476 return False
477 extra = source.changelog.read(node)[5]
477 extra = source.changelog.read(node)[5]
478 cnode = extra.get('transplant_source')
478 cnode = extra.get('transplant_source')
479 if cnode and self.applied(repo, cnode, root):
479 if cnode and self.applied(repo, cnode, root):
480 return False
480 return False
481 return True
481 return True
482
482
483 return matchfn
483 return matchfn
484
484
485 def hasnode(repo, node):
485 def hasnode(repo, node):
486 try:
486 try:
487 return repo.changelog.rev(node) is not None
487 return repo.changelog.rev(node) is not None
488 except error.RevlogError:
488 except error.RevlogError:
489 return False
489 return False
490
490
491 def browserevs(ui, repo, nodes, opts):
491 def browserevs(ui, repo, nodes, opts):
492 '''interactively transplant changesets'''
492 '''interactively transplant changesets'''
493 displayer = cmdutil.show_changeset(ui, repo, opts)
493 displayer = cmdutil.show_changeset(ui, repo, opts)
494 transplants = []
494 transplants = []
495 merges = []
495 merges = []
496 prompt = _('apply changeset? [ynmpcq?]:'
496 prompt = _('apply changeset? [ynmpcq?]:'
497 '$$ &yes, transplant this changeset'
497 '$$ &yes, transplant this changeset'
498 '$$ &no, skip this changeset'
498 '$$ &no, skip this changeset'
499 '$$ &merge at this changeset'
499 '$$ &merge at this changeset'
500 '$$ show &patch'
500 '$$ show &patch'
501 '$$ &commit selected changesets'
501 '$$ &commit selected changesets'
502 '$$ &quit and cancel transplant'
502 '$$ &quit and cancel transplant'
503 '$$ &? (show this help)')
503 '$$ &? (show this help)')
504 for node in nodes:
504 for node in nodes:
505 displayer.show(repo[node])
505 displayer.show(repo[node])
506 action = None
506 action = None
507 while not action:
507 while not action:
508 action = 'ynmpcq?'[ui.promptchoice(prompt)]
508 action = 'ynmpcq?'[ui.promptchoice(prompt)]
509 if action == '?':
509 if action == '?':
510 for c, t in ui.extractchoices(prompt)[1]:
510 for c, t in ui.extractchoices(prompt)[1]:
511 ui.write('%s: %s\n' % (c, t))
511 ui.write('%s: %s\n' % (c, t))
512 action = None
512 action = None
513 elif action == 'p':
513 elif action == 'p':
514 parent = repo.changelog.parents(node)[0]
514 parent = repo.changelog.parents(node)[0]
515 for chunk in patch.diff(repo, parent, node):
515 for chunk in patch.diff(repo, parent, node):
516 ui.write(chunk)
516 ui.write(chunk)
517 action = None
517 action = None
518 if action == 'y':
518 if action == 'y':
519 transplants.append(node)
519 transplants.append(node)
520 elif action == 'm':
520 elif action == 'm':
521 merges.append(node)
521 merges.append(node)
522 elif action == 'c':
522 elif action == 'c':
523 break
523 break
524 elif action == 'q':
524 elif action == 'q':
525 transplants = ()
525 transplants = ()
526 merges = ()
526 merges = ()
527 break
527 break
528 displayer.close()
528 displayer.close()
529 return (transplants, merges)
529 return (transplants, merges)
530
530
531 @command('transplant',
531 @command('transplant',
532 [('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
532 [('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
533 ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
533 ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
534 ('a', 'all', None, _('pull all changesets up to the --branch revisions')),
534 ('a', 'all', None, _('pull all changesets up to the --branch revisions')),
535 ('p', 'prune', [], _('skip over REV'), _('REV')),
535 ('p', 'prune', [], _('skip over REV'), _('REV')),
536 ('m', 'merge', [], _('merge at REV'), _('REV')),
536 ('m', 'merge', [], _('merge at REV'), _('REV')),
537 ('', 'parent', '',
537 ('', 'parent', '',
538 _('parent to choose when transplanting merge'), _('REV')),
538 _('parent to choose when transplanting merge'), _('REV')),
539 ('e', 'edit', False, _('invoke editor on commit messages')),
539 ('e', 'edit', False, _('invoke editor on commit messages')),
540 ('', 'log', None, _('append transplant info to log message')),
540 ('', 'log', None, _('append transplant info to log message')),
541 ('c', 'continue', None, _('continue last transplant session '
541 ('c', 'continue', None, _('continue last transplant session '
542 'after fixing conflicts')),
542 'after fixing conflicts')),
543 ('', 'filter', '',
543 ('', 'filter', '',
544 _('filter changesets through command'), _('CMD'))],
544 _('filter changesets through command'), _('CMD'))],
545 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
545 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
546 '[-m REV] [REV]...'))
546 '[-m REV] [REV]...'))
547 def transplant(ui, repo, *revs, **opts):
547 def transplant(ui, repo, *revs, **opts):
548 '''transplant changesets from another branch
548 '''transplant changesets from another branch
549
549
550 Selected changesets will be applied on top of the current working
550 Selected changesets will be applied on top of the current working
551 directory with the log of the original changeset. The changesets
551 directory with the log of the original changeset. The changesets
552 are copied and will thus appear twice in the history with different
552 are copied and will thus appear twice in the history with different
553 identities.
553 identities.
554
554
555 Consider using the graft command if everything is inside the same
555 Consider using the graft command if everything is inside the same
556 repository - it will use merges and will usually give a better result.
556 repository - it will use merges and will usually give a better result.
557 Use the rebase extension if the changesets are unpublished and you want
557 Use the rebase extension if the changesets are unpublished and you want
558 to move them instead of copying them.
558 to move them instead of copying them.
559
559
560 If --log is specified, log messages will have a comment appended
560 If --log is specified, log messages will have a comment appended
561 of the form::
561 of the form::
562
562
563 (transplanted from CHANGESETHASH)
563 (transplanted from CHANGESETHASH)
564
564
565 You can rewrite the changelog message with the --filter option.
565 You can rewrite the changelog message with the --filter option.
566 Its argument will be invoked with the current changelog message as
566 Its argument will be invoked with the current changelog message as
567 $1 and the patch as $2.
567 $1 and the patch as $2.
568
568
569 --source/-s specifies another repository to use for selecting changesets,
569 --source/-s specifies another repository to use for selecting changesets,
570 just as if it temporarily had been pulled.
570 just as if it temporarily had been pulled.
571 If --branch/-b is specified, these revisions will be used as
571 If --branch/-b is specified, these revisions will be used as
572 heads when deciding which changesets to transplant, just as if only
572 heads when deciding which changesets to transplant, just as if only
573 these revisions had been pulled.
573 these revisions had been pulled.
574 If --all/-a is specified, all the revisions up to the heads specified
574 If --all/-a is specified, all the revisions up to the heads specified
575 with --branch will be transplanted.
575 with --branch will be transplanted.
576
576
577 Example:
577 Example:
578
578
579 - transplant all changes up to REV on top of your current revision::
579 - transplant all changes up to REV on top of your current revision::
580
580
581 hg transplant --branch REV --all
581 hg transplant --branch REV --all
582
582
583 You can optionally mark selected transplanted changesets as merge
583 You can optionally mark selected transplanted changesets as merge
584 changesets. You will not be prompted to transplant any ancestors
584 changesets. You will not be prompted to transplant any ancestors
585 of a merged transplant, and you can merge descendants of them
585 of a merged transplant, and you can merge descendants of them
586 normally instead of transplanting them.
586 normally instead of transplanting them.
587
587
588 Merge changesets may be transplanted directly by specifying the
588 Merge changesets may be transplanted directly by specifying the
589 proper parent changeset by calling :hg:`transplant --parent`.
589 proper parent changeset by calling :hg:`transplant --parent`.
590
590
591 If no merges or revisions are provided, :hg:`transplant` will
591 If no merges or revisions are provided, :hg:`transplant` will
592 start an interactive changeset browser.
592 start an interactive changeset browser.
593
593
594 If a changeset application fails, you can fix the merge by hand
594 If a changeset application fails, you can fix the merge by hand
595 and then resume where you left off by calling :hg:`transplant
595 and then resume where you left off by calling :hg:`transplant
596 --continue/-c`.
596 --continue/-c`.
597 '''
597 '''
598 with repo.wlock():
598 with repo.wlock():
599 return _dotransplant(ui, repo, *revs, **opts)
599 return _dotransplant(ui, repo, *revs, **opts)
600
600
601 def _dotransplant(ui, repo, *revs, **opts):
601 def _dotransplant(ui, repo, *revs, **opts):
602 def incwalk(repo, csets, match=util.always):
602 def incwalk(repo, csets, match=util.always):
603 for node in csets:
603 for node in csets:
604 if match(node):
604 if match(node):
605 yield node
605 yield node
606
606
607 def transplantwalk(repo, dest, heads, match=util.always):
607 def transplantwalk(repo, dest, heads, match=util.always):
608 '''Yield all nodes that are ancestors of a head but not ancestors
608 '''Yield all nodes that are ancestors of a head but not ancestors
609 of dest.
609 of dest.
610 If no heads are specified, the heads of repo will be used.'''
610 If no heads are specified, the heads of repo will be used.'''
611 if not heads:
611 if not heads:
612 heads = repo.heads()
612 heads = repo.heads()
613 ancestors = []
613 ancestors = []
614 ctx = repo[dest]
614 ctx = repo[dest]
615 for head in heads:
615 for head in heads:
616 ancestors.append(ctx.ancestor(repo[head]).node())
616 ancestors.append(ctx.ancestor(repo[head]).node())
617 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
617 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
618 if match(node):
618 if match(node):
619 yield node
619 yield node
620
620
621 def checkopts(opts, revs):
621 def checkopts(opts, revs):
622 if opts.get('continue'):
622 if opts.get('continue'):
623 if opts.get('branch') or opts.get('all') or opts.get('merge'):
623 if opts.get('branch') or opts.get('all') or opts.get('merge'):
624 raise error.Abort(_('--continue is incompatible with '
624 raise error.Abort(_('--continue is incompatible with '
625 '--branch, --all and --merge'))
625 '--branch, --all and --merge'))
626 return
626 return
627 if not (opts.get('source') or revs or
627 if not (opts.get('source') or revs or
628 opts.get('merge') or opts.get('branch')):
628 opts.get('merge') or opts.get('branch')):
629 raise error.Abort(_('no source URL, branch revision, or revision '
629 raise error.Abort(_('no source URL, branch revision, or revision '
630 'list provided'))
630 'list provided'))
631 if opts.get('all'):
631 if opts.get('all'):
632 if not opts.get('branch'):
632 if not opts.get('branch'):
633 raise error.Abort(_('--all requires a branch revision'))
633 raise error.Abort(_('--all requires a branch revision'))
634 if revs:
634 if revs:
635 raise error.Abort(_('--all is incompatible with a '
635 raise error.Abort(_('--all is incompatible with a '
636 'revision list'))
636 'revision list'))
637
637
638 checkopts(opts, revs)
638 checkopts(opts, revs)
639
639
640 if not opts.get('log'):
640 if not opts.get('log'):
641 # deprecated config: transplant.log
641 # deprecated config: transplant.log
642 opts['log'] = ui.config('transplant', 'log')
642 opts['log'] = ui.config('transplant', 'log')
643 if not opts.get('filter'):
643 if not opts.get('filter'):
644 # deprecated config: transplant.filter
644 # deprecated config: transplant.filter
645 opts['filter'] = ui.config('transplant', 'filter')
645 opts['filter'] = ui.config('transplant', 'filter')
646
646
647 tp = transplanter(ui, repo, opts)
647 tp = transplanter(ui, repo, opts)
648
648
649 p1, p2 = repo.dirstate.parents()
649 p1, p2 = repo.dirstate.parents()
650 if len(repo) > 0 and p1 == revlog.nullid:
650 if len(repo) > 0 and p1 == revlog.nullid:
651 raise error.Abort(_('no revision checked out'))
651 raise error.Abort(_('no revision checked out'))
652 if opts.get('continue'):
652 if opts.get('continue'):
653 if not tp.canresume():
653 if not tp.canresume():
654 raise error.Abort(_('no transplant to continue'))
654 raise error.Abort(_('no transplant to continue'))
655 else:
655 else:
656 cmdutil.checkunfinished(repo)
656 cmdutil.checkunfinished(repo)
657 if p2 != revlog.nullid:
657 if p2 != revlog.nullid:
658 raise error.Abort(_('outstanding uncommitted merges'))
658 raise error.Abort(_('outstanding uncommitted merges'))
659 m, a, r, d = repo.status()[:4]
659 m, a, r, d = repo.status()[:4]
660 if m or a or r or d:
660 if m or a or r or d:
661 raise error.Abort(_('outstanding local changes'))
661 raise error.Abort(_('outstanding local changes'))
662
662
663 sourcerepo = opts.get('source')
663 sourcerepo = opts.get('source')
664 if sourcerepo:
664 if sourcerepo:
665 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
665 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
666 heads = map(peer.lookup, opts.get('branch', ()))
666 heads = map(peer.lookup, opts.get('branch', ()))
667 target = set(heads)
667 target = set(heads)
668 for r in revs:
668 for r in revs:
669 try:
669 try:
670 target.add(peer.lookup(r))
670 target.add(peer.lookup(r))
671 except error.RepoError:
671 except error.RepoError:
672 pass
672 pass
673 source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
673 source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
674 onlyheads=sorted(target), force=True)
674 onlyheads=sorted(target), force=True)
675 else:
675 else:
676 source = repo
676 source = repo
677 heads = map(source.lookup, opts.get('branch', ()))
677 heads = map(source.lookup, opts.get('branch', ()))
678 cleanupfn = None
678 cleanupfn = None
679
679
680 try:
680 try:
681 if opts.get('continue'):
681 if opts.get('continue'):
682 tp.resume(repo, source, opts)
682 tp.resume(repo, source, opts)
683 return
683 return
684
684
685 tf = tp.transplantfilter(repo, source, p1)
685 tf = tp.transplantfilter(repo, source, p1)
686 if opts.get('prune'):
686 if opts.get('prune'):
687 prune = set(source.lookup(r)
687 prune = set(source.lookup(r)
688 for r in scmutil.revrange(source, opts.get('prune')))
688 for r in scmutil.revrange(source, opts.get('prune')))
689 matchfn = lambda x: tf(x) and x not in prune
689 matchfn = lambda x: tf(x) and x not in prune
690 else:
690 else:
691 matchfn = tf
691 matchfn = tf
692 merges = map(source.lookup, opts.get('merge', ()))
692 merges = map(source.lookup, opts.get('merge', ()))
693 revmap = {}
693 revmap = {}
694 if revs:
694 if revs:
695 for r in scmutil.revrange(source, revs):
695 for r in scmutil.revrange(source, revs):
696 revmap[int(r)] = source.lookup(r)
696 revmap[int(r)] = source.lookup(r)
697 elif opts.get('all') or not merges:
697 elif opts.get('all') or not merges:
698 if source != repo:
698 if source != repo:
699 alltransplants = incwalk(source, csets, match=matchfn)
699 alltransplants = incwalk(source, csets, match=matchfn)
700 else:
700 else:
701 alltransplants = transplantwalk(source, p1, heads,
701 alltransplants = transplantwalk(source, p1, heads,
702 match=matchfn)
702 match=matchfn)
703 if opts.get('all'):
703 if opts.get('all'):
704 revs = alltransplants
704 revs = alltransplants
705 else:
705 else:
706 revs, newmerges = browserevs(ui, source, alltransplants, opts)
706 revs, newmerges = browserevs(ui, source, alltransplants, opts)
707 merges.extend(newmerges)
707 merges.extend(newmerges)
708 for r in revs:
708 for r in revs:
709 revmap[source.changelog.rev(r)] = r
709 revmap[source.changelog.rev(r)] = r
710 for r in merges:
710 for r in merges:
711 revmap[source.changelog.rev(r)] = r
711 revmap[source.changelog.rev(r)] = r
712
712
713 tp.apply(repo, source, revmap, merges, opts)
713 tp.apply(repo, source, revmap, merges, opts)
714 finally:
714 finally:
715 if cleanupfn:
715 if cleanupfn:
716 cleanupfn()
716 cleanupfn()
717
717
718 revsetpredicate = registrar.revsetpredicate()
718 revsetpredicate = registrar.revsetpredicate()
719
719
720 @revsetpredicate('transplanted([set])')
720 @revsetpredicate('transplanted([set])')
721 def revsettransplanted(repo, subset, x):
721 def revsettransplanted(repo, subset, x):
722 """Transplanted changesets in set, or all transplanted changesets.
722 """Transplanted changesets in set, or all transplanted changesets.
723 """
723 """
724 if x:
724 if x:
725 s = revset.getset(repo, subset, x)
725 s = revset.getset(repo, subset, x)
726 else:
726 else:
727 s = subset
727 s = subset
728 return smartset.baseset([r for r in s if
728 return smartset.baseset([r for r in s if
729 repo[r].extra().get('transplant_source')])
729 repo[r].extra().get('transplant_source')])
730
730
731 templatekeyword = registrar.templatekeyword()
731 templatekeyword = registrar.templatekeyword()
732
732
733 @templatekeyword('transplanted')
733 @templatekeyword('transplanted')
734 def kwtransplanted(repo, ctx, **args):
734 def kwtransplanted(repo, ctx, **args):
735 """String. The node identifier of the transplanted
735 """String. The node identifier of the transplanted
736 changeset if any."""
736 changeset if any."""
737 n = ctx.extra().get('transplant_source')
737 n = ctx.extra().get('transplant_source')
738 return n and nodemod.hex(n) or ''
738 return n and nodemod.hex(n) or ''
739
739
740 def extsetup(ui):
740 def extsetup(ui):
741 cmdutil.unfinishedstates.append(
741 cmdutil.unfinishedstates.append(
742 ['transplant/journal', True, False, _('transplant in progress'),
742 ['transplant/journal', True, False, _('transplant in progress'),
743 _("use 'hg transplant --continue' or 'hg update' to abort")])
743 _("use 'hg transplant --continue' or 'hg update' to abort")])
744
744
745 # tell hggettext to extract docstrings from these functions:
745 # tell hggettext to extract docstrings from these functions:
746 i18nfunctions = [revsettransplanted, kwtransplanted]
746 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,340 +1,340 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19
19
20 from . import (
20 from . import (
21 cmdutil,
21 cmdutil,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 scmutil,
25 scmutil,
26 util,
26 util,
27 )
27 )
28 stringio = util.stringio
28 stringio = util.stringio
29
29
30 # from unzip source code:
30 # from unzip source code:
31 _UNX_IFREG = 0x8000
31 _UNX_IFREG = 0x8000
32 _UNX_IFLNK = 0xa000
32 _UNX_IFLNK = 0xa000
33
33
34 def tidyprefix(dest, kind, prefix):
34 def tidyprefix(dest, kind, prefix):
35 '''choose prefix to use for names in archive. make sure prefix is
35 '''choose prefix to use for names in archive. make sure prefix is
36 safe for consumers.'''
36 safe for consumers.'''
37
37
38 if prefix:
38 if prefix:
39 prefix = util.normpath(prefix)
39 prefix = util.normpath(prefix)
40 else:
40 else:
41 if not isinstance(dest, str):
41 if not isinstance(dest, str):
42 raise ValueError('dest must be string if no prefix')
42 raise ValueError('dest must be string if no prefix')
43 prefix = os.path.basename(dest)
43 prefix = os.path.basename(dest)
44 lower = prefix.lower()
44 lower = prefix.lower()
45 for sfx in exts.get(kind, []):
45 for sfx in exts.get(kind, []):
46 if lower.endswith(sfx):
46 if lower.endswith(sfx):
47 prefix = prefix[:-len(sfx)]
47 prefix = prefix[:-len(sfx)]
48 break
48 break
49 lpfx = os.path.normpath(util.localpath(prefix))
49 lpfx = os.path.normpath(util.localpath(prefix))
50 prefix = util.pconvert(lpfx)
50 prefix = util.pconvert(lpfx)
51 if not prefix.endswith('/'):
51 if not prefix.endswith('/'):
52 prefix += '/'
52 prefix += '/'
53 # Drop the leading '.' path component if present, so Windows can read the
53 # Drop the leading '.' path component if present, so Windows can read the
54 # zip files (issue4634)
54 # zip files (issue4634)
55 if prefix.startswith('./'):
55 if prefix.startswith('./'):
56 prefix = prefix[2:]
56 prefix = prefix[2:]
57 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
57 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
58 raise error.Abort(_('archive prefix contains illegal components'))
58 raise error.Abort(_('archive prefix contains illegal components'))
59 return prefix
59 return prefix
60
60
61 exts = {
61 exts = {
62 'tar': ['.tar'],
62 'tar': ['.tar'],
63 'tbz2': ['.tbz2', '.tar.bz2'],
63 'tbz2': ['.tbz2', '.tar.bz2'],
64 'tgz': ['.tgz', '.tar.gz'],
64 'tgz': ['.tgz', '.tar.gz'],
65 'zip': ['.zip'],
65 'zip': ['.zip'],
66 }
66 }
67
67
68 def guesskind(dest):
68 def guesskind(dest):
69 for kind, extensions in exts.iteritems():
69 for kind, extensions in exts.iteritems():
70 if any(dest.endswith(ext) for ext in extensions):
70 if any(dest.endswith(ext) for ext in extensions):
71 return kind
71 return kind
72 return None
72 return None
73
73
74 def _rootctx(repo):
74 def _rootctx(repo):
75 # repo[0] may be hidden
75 # repo[0] may be hidden
76 for rev in repo:
76 for rev in repo:
77 return repo[rev]
77 return repo[rev]
78 return repo['null']
78 return repo['null']
79
79
80 def buildmetadata(ctx):
80 def buildmetadata(ctx):
81 '''build content of .hg_archival.txt'''
81 '''build content of .hg_archival.txt'''
82 repo = ctx.repo()
82 repo = ctx.repo()
83 hex = ctx.hex()
83 hex = ctx.hex()
84 if ctx.rev() is None:
84 if ctx.rev() is None:
85 hex = ctx.p1().hex()
85 hex = ctx.p1().hex()
86 if ctx.dirty():
86 if ctx.dirty():
87 hex += '+'
87 hex += '+'
88
88
89 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
89 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
90 _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch()))
90 _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch()))
91
91
92 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
92 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
93 if repo.tagtype(t) == 'global')
93 if repo.tagtype(t) == 'global')
94 if not tags:
94 if not tags:
95 repo.ui.pushbuffer()
95 repo.ui.pushbuffer()
96 opts = {'template': '{latesttag}\n{latesttagdistance}\n'
96 opts = {'template': '{latesttag}\n{latesttagdistance}\n'
97 '{changessincelatesttag}',
97 '{changessincelatesttag}',
98 'style': '', 'patch': None, 'git': None}
98 'style': '', 'patch': None, 'git': None}
99 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
99 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
100 ltags, dist, changessince = repo.ui.popbuffer().split('\n')
100 ltags, dist, changessince = repo.ui.popbuffer().split('\n')
101 ltags = ltags.split(':')
101 ltags = ltags.split(':')
102 tags = ''.join('latesttag: %s\n' % t for t in ltags)
102 tags = ''.join('latesttag: %s\n' % t for t in ltags)
103 tags += 'latesttagdistance: %s\n' % dist
103 tags += 'latesttagdistance: %s\n' % dist
104 tags += 'changessincelatesttag: %s\n' % changessince
104 tags += 'changessincelatesttag: %s\n' % changessince
105
105
106 return base + tags
106 return base + tags
107
107
108 class tarit(object):
108 class tarit(object):
109 '''write archive to tar file or stream. can write uncompressed,
109 '''write archive to tar file or stream. can write uncompressed,
110 or compress with gzip or bzip2.'''
110 or compress with gzip or bzip2.'''
111
111
112 class GzipFileWithTime(gzip.GzipFile):
112 class GzipFileWithTime(gzip.GzipFile):
113
113
114 def __init__(self, *args, **kw):
114 def __init__(self, *args, **kw):
115 timestamp = None
115 timestamp = None
116 if 'timestamp' in kw:
116 if 'timestamp' in kw:
117 timestamp = kw.pop('timestamp')
117 timestamp = kw.pop('timestamp')
118 if timestamp is None:
118 if timestamp is None:
119 self.timestamp = time.time()
119 self.timestamp = time.time()
120 else:
120 else:
121 self.timestamp = timestamp
121 self.timestamp = timestamp
122 gzip.GzipFile.__init__(self, *args, **kw)
122 gzip.GzipFile.__init__(self, *args, **kw)
123
123
124 def _write_gzip_header(self):
124 def _write_gzip_header(self):
125 self.fileobj.write('\037\213') # magic header
125 self.fileobj.write('\037\213') # magic header
126 self.fileobj.write('\010') # compression method
126 self.fileobj.write('\010') # compression method
127 fname = self.name
127 fname = self.name
128 if fname and fname.endswith('.gz'):
128 if fname and fname.endswith('.gz'):
129 fname = fname[:-3]
129 fname = fname[:-3]
130 flags = 0
130 flags = 0
131 if fname:
131 if fname:
132 flags = gzip.FNAME
132 flags = gzip.FNAME
133 self.fileobj.write(chr(flags))
133 self.fileobj.write(chr(flags))
134 gzip.write32u(self.fileobj, long(self.timestamp))
134 gzip.write32u(self.fileobj, long(self.timestamp))
135 self.fileobj.write('\002')
135 self.fileobj.write('\002')
136 self.fileobj.write('\377')
136 self.fileobj.write('\377')
137 if fname:
137 if fname:
138 self.fileobj.write(fname + '\000')
138 self.fileobj.write(fname + '\000')
139
139
140 def __init__(self, dest, mtime, kind=''):
140 def __init__(self, dest, mtime, kind=''):
141 self.mtime = mtime
141 self.mtime = mtime
142 self.fileobj = None
142 self.fileobj = None
143
143
144 def taropen(mode, name='', fileobj=None):
144 def taropen(mode, name='', fileobj=None):
145 if kind == 'gz':
145 if kind == 'gz':
146 mode = mode[0]
146 mode = mode[0]
147 if not fileobj:
147 if not fileobj:
148 fileobj = open(name, mode + 'b')
148 fileobj = open(name, mode + 'b')
149 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
149 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
150 zlib.Z_BEST_COMPRESSION,
150 zlib.Z_BEST_COMPRESSION,
151 fileobj, timestamp=mtime)
151 fileobj, timestamp=mtime)
152 self.fileobj = gzfileobj
152 self.fileobj = gzfileobj
153 return tarfile.TarFile.taropen(name, mode, gzfileobj)
153 return tarfile.TarFile.taropen(name, mode, gzfileobj)
154 else:
154 else:
155 return tarfile.open(name, mode + kind, fileobj)
155 return tarfile.open(name, mode + kind, fileobj)
156
156
157 if isinstance(dest, str):
157 if isinstance(dest, str):
158 self.z = taropen('w:', name=dest)
158 self.z = taropen('w:', name=dest)
159 else:
159 else:
160 self.z = taropen('w|', fileobj=dest)
160 self.z = taropen('w|', fileobj=dest)
161
161
162 def addfile(self, name, mode, islink, data):
162 def addfile(self, name, mode, islink, data):
163 i = tarfile.TarInfo(name)
163 i = tarfile.TarInfo(name)
164 i.mtime = self.mtime
164 i.mtime = self.mtime
165 i.size = len(data)
165 i.size = len(data)
166 if islink:
166 if islink:
167 i.type = tarfile.SYMTYPE
167 i.type = tarfile.SYMTYPE
168 i.mode = 0o777
168 i.mode = 0o777
169 i.linkname = data
169 i.linkname = data
170 data = None
170 data = None
171 i.size = 0
171 i.size = 0
172 else:
172 else:
173 i.mode = mode
173 i.mode = mode
174 data = stringio(data)
174 data = stringio(data)
175 self.z.addfile(i, data)
175 self.z.addfile(i, data)
176
176
177 def done(self):
177 def done(self):
178 self.z.close()
178 self.z.close()
179 if self.fileobj:
179 if self.fileobj:
180 self.fileobj.close()
180 self.fileobj.close()
181
181
182 class tellable(object):
182 class tellable(object):
183 '''provide tell method for zipfile.ZipFile when writing to http
183 '''provide tell method for zipfile.ZipFile when writing to http
184 response file object.'''
184 response file object.'''
185
185
186 def __init__(self, fp):
186 def __init__(self, fp):
187 self.fp = fp
187 self.fp = fp
188 self.offset = 0
188 self.offset = 0
189
189
190 def __getattr__(self, key):
190 def __getattr__(self, key):
191 return getattr(self.fp, key)
191 return getattr(self.fp, key)
192
192
193 def write(self, s):
193 def write(self, s):
194 self.fp.write(s)
194 self.fp.write(s)
195 self.offset += len(s)
195 self.offset += len(s)
196
196
197 def tell(self):
197 def tell(self):
198 return self.offset
198 return self.offset
199
199
200 class zipit(object):
200 class zipit(object):
201 '''write archive to zip file or stream. can write uncompressed,
201 '''write archive to zip file or stream. can write uncompressed,
202 or compressed with deflate.'''
202 or compressed with deflate.'''
203
203
204 def __init__(self, dest, mtime, compress=True):
204 def __init__(self, dest, mtime, compress=True):
205 if not isinstance(dest, str):
205 if not isinstance(dest, str):
206 try:
206 try:
207 dest.tell()
207 dest.tell()
208 except (AttributeError, IOError):
208 except (AttributeError, IOError):
209 dest = tellable(dest)
209 dest = tellable(dest)
210 self.z = zipfile.ZipFile(dest, 'w',
210 self.z = zipfile.ZipFile(dest, 'w',
211 compress and zipfile.ZIP_DEFLATED or
211 compress and zipfile.ZIP_DEFLATED or
212 zipfile.ZIP_STORED)
212 zipfile.ZIP_STORED)
213
213
214 # Python's zipfile module emits deprecation warnings if we try
214 # Python's zipfile module emits deprecation warnings if we try
215 # to store files with a date before 1980.
215 # to store files with a date before 1980.
216 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
216 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
217 if mtime < epoch:
217 if mtime < epoch:
218 mtime = epoch
218 mtime = epoch
219
219
220 self.mtime = mtime
220 self.mtime = mtime
221 self.date_time = time.gmtime(mtime)[:6]
221 self.date_time = time.gmtime(mtime)[:6]
222
222
223 def addfile(self, name, mode, islink, data):
223 def addfile(self, name, mode, islink, data):
224 i = zipfile.ZipInfo(name, self.date_time)
224 i = zipfile.ZipInfo(name, self.date_time)
225 i.compress_type = self.z.compression
225 i.compress_type = self.z.compression
226 # unzip will not honor unix file modes unless file creator is
226 # unzip will not honor unix file modes unless file creator is
227 # set to unix (id 3).
227 # set to unix (id 3).
228 i.create_system = 3
228 i.create_system = 3
229 ftype = _UNX_IFREG
229 ftype = _UNX_IFREG
230 if islink:
230 if islink:
231 mode = 0o777
231 mode = 0o777
232 ftype = _UNX_IFLNK
232 ftype = _UNX_IFLNK
233 i.external_attr = (mode | ftype) << 16
233 i.external_attr = (mode | ftype) << 16
234 # add "extended-timestamp" extra block, because zip archives
234 # add "extended-timestamp" extra block, because zip archives
235 # without this will be extracted with unexpected timestamp,
235 # without this will be extracted with unexpected timestamp,
236 # if TZ is not configured as GMT
236 # if TZ is not configured as GMT
237 i.extra += struct.pack('<hhBl',
237 i.extra += struct.pack('<hhBl',
238 0x5455, # block type: "extended-timestamp"
238 0x5455, # block type: "extended-timestamp"
239 1 + 4, # size of this block
239 1 + 4, # size of this block
240 1, # "modification time is present"
240 1, # "modification time is present"
241 int(self.mtime)) # last modification (UTC)
241 int(self.mtime)) # last modification (UTC)
242 self.z.writestr(i, data)
242 self.z.writestr(i, data)
243
243
244 def done(self):
244 def done(self):
245 self.z.close()
245 self.z.close()
246
246
247 class fileit(object):
247 class fileit(object):
248 '''write archive as files in directory.'''
248 '''write archive as files in directory.'''
249
249
250 def __init__(self, name, mtime):
250 def __init__(self, name, mtime):
251 self.basedir = name
251 self.basedir = name
252 self.opener = scmutil.opener(self.basedir)
252 self.opener = scmutil.vfs(self.basedir)
253
253
254 def addfile(self, name, mode, islink, data):
254 def addfile(self, name, mode, islink, data):
255 if islink:
255 if islink:
256 self.opener.symlink(data, name)
256 self.opener.symlink(data, name)
257 return
257 return
258 f = self.opener(name, "w", atomictemp=True)
258 f = self.opener(name, "w", atomictemp=True)
259 f.write(data)
259 f.write(data)
260 f.close()
260 f.close()
261 destfile = os.path.join(self.basedir, name)
261 destfile = os.path.join(self.basedir, name)
262 os.chmod(destfile, mode)
262 os.chmod(destfile, mode)
263
263
264 def done(self):
264 def done(self):
265 pass
265 pass
266
266
267 archivers = {
267 archivers = {
268 'files': fileit,
268 'files': fileit,
269 'tar': tarit,
269 'tar': tarit,
270 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
270 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
271 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
271 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
272 'uzip': lambda name, mtime: zipit(name, mtime, False),
272 'uzip': lambda name, mtime: zipit(name, mtime, False),
273 'zip': zipit,
273 'zip': zipit,
274 }
274 }
275
275
276 def archive(repo, dest, node, kind, decode=True, matchfn=None,
276 def archive(repo, dest, node, kind, decode=True, matchfn=None,
277 prefix='', mtime=None, subrepos=False):
277 prefix='', mtime=None, subrepos=False):
278 '''create archive of repo as it was at node.
278 '''create archive of repo as it was at node.
279
279
280 dest can be name of directory, name of archive file, or file
280 dest can be name of directory, name of archive file, or file
281 object to write archive to.
281 object to write archive to.
282
282
283 kind is type of archive to create.
283 kind is type of archive to create.
284
284
285 decode tells whether to put files through decode filters from
285 decode tells whether to put files through decode filters from
286 hgrc.
286 hgrc.
287
287
288 matchfn is function to filter names of files to write to archive.
288 matchfn is function to filter names of files to write to archive.
289
289
290 prefix is name of path to put before every archive member.'''
290 prefix is name of path to put before every archive member.'''
291
291
292 if kind == 'files':
292 if kind == 'files':
293 if prefix:
293 if prefix:
294 raise error.Abort(_('cannot give prefix when archiving to files'))
294 raise error.Abort(_('cannot give prefix when archiving to files'))
295 else:
295 else:
296 prefix = tidyprefix(dest, kind, prefix)
296 prefix = tidyprefix(dest, kind, prefix)
297
297
298 def write(name, mode, islink, getdata):
298 def write(name, mode, islink, getdata):
299 data = getdata()
299 data = getdata()
300 if decode:
300 if decode:
301 data = repo.wwritedata(name, data)
301 data = repo.wwritedata(name, data)
302 archiver.addfile(prefix + name, mode, islink, data)
302 archiver.addfile(prefix + name, mode, islink, data)
303
303
304 if kind not in archivers:
304 if kind not in archivers:
305 raise error.Abort(_("unknown archive type '%s'") % kind)
305 raise error.Abort(_("unknown archive type '%s'") % kind)
306
306
307 ctx = repo[node]
307 ctx = repo[node]
308 archiver = archivers[kind](dest, mtime or ctx.date()[0])
308 archiver = archivers[kind](dest, mtime or ctx.date()[0])
309
309
310 if repo.ui.configbool("ui", "archivemeta", True):
310 if repo.ui.configbool("ui", "archivemeta", True):
311 name = '.hg_archival.txt'
311 name = '.hg_archival.txt'
312 if not matchfn or matchfn(name):
312 if not matchfn or matchfn(name):
313 write(name, 0o644, False, lambda: buildmetadata(ctx))
313 write(name, 0o644, False, lambda: buildmetadata(ctx))
314
314
315 if matchfn:
315 if matchfn:
316 files = [f for f in ctx.manifest().keys() if matchfn(f)]
316 files = [f for f in ctx.manifest().keys() if matchfn(f)]
317 else:
317 else:
318 files = ctx.manifest().keys()
318 files = ctx.manifest().keys()
319 total = len(files)
319 total = len(files)
320 if total:
320 if total:
321 files.sort()
321 files.sort()
322 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
322 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
323 for i, f in enumerate(files):
323 for i, f in enumerate(files):
324 ff = ctx.flags(f)
324 ff = ctx.flags(f)
325 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
325 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
326 repo.ui.progress(_('archiving'), i + 1, item=f,
326 repo.ui.progress(_('archiving'), i + 1, item=f,
327 unit=_('files'), total=total)
327 unit=_('files'), total=total)
328 repo.ui.progress(_('archiving'), None)
328 repo.ui.progress(_('archiving'), None)
329
329
330 if subrepos:
330 if subrepos:
331 for subpath in sorted(ctx.substate):
331 for subpath in sorted(ctx.substate):
332 sub = ctx.workingsub(subpath)
332 sub = ctx.workingsub(subpath)
333 submatch = matchmod.subdirmatcher(subpath, matchfn)
333 submatch = matchmod.subdirmatcher(subpath, matchfn)
334 total += sub.archive(archiver, prefix, submatch, decode)
334 total += sub.archive(archiver, prefix, submatch, decode)
335
335
336 if total == 0:
336 if total == 0:
337 raise error.Abort(_('no files match the archive pattern'))
337 raise error.Abort(_('no files match the archive pattern'))
338
338
339 archiver.done()
339 archiver.done()
340 return total
340 return total
@@ -1,3470 +1,3470 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 bin,
17 bin,
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 lock as lockmod,
33 lock as lockmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 repair,
40 repair,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 templatekw,
45 templatekw,
46 templater,
46 templater,
47 util,
47 util,
48 )
48 )
49 stringio = util.stringio
49 stringio = util.stringio
50
50
51 # special string such that everything below this line will be ingored in the
51 # special string such that everything below this line will be ingored in the
52 # editor text
52 # editor text
53 _linebelow = "^HG: ------------------------ >8 ------------------------$"
53 _linebelow = "^HG: ------------------------ >8 ------------------------$"
54
54
55 def ishunk(x):
55 def ishunk(x):
56 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
56 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
57 return isinstance(x, hunkclasses)
57 return isinstance(x, hunkclasses)
58
58
59 def newandmodified(chunks, originalchunks):
59 def newandmodified(chunks, originalchunks):
60 newlyaddedandmodifiedfiles = set()
60 newlyaddedandmodifiedfiles = set()
61 for chunk in chunks:
61 for chunk in chunks:
62 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
62 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
63 originalchunks:
63 originalchunks:
64 newlyaddedandmodifiedfiles.add(chunk.header.filename())
64 newlyaddedandmodifiedfiles.add(chunk.header.filename())
65 return newlyaddedandmodifiedfiles
65 return newlyaddedandmodifiedfiles
66
66
67 def parsealiases(cmd):
67 def parsealiases(cmd):
68 return cmd.lstrip("^").split("|")
68 return cmd.lstrip("^").split("|")
69
69
70 def setupwrapcolorwrite(ui):
70 def setupwrapcolorwrite(ui):
71 # wrap ui.write so diff output can be labeled/colorized
71 # wrap ui.write so diff output can be labeled/colorized
72 def wrapwrite(orig, *args, **kw):
72 def wrapwrite(orig, *args, **kw):
73 label = kw.pop('label', '')
73 label = kw.pop('label', '')
74 for chunk, l in patch.difflabel(lambda: args):
74 for chunk, l in patch.difflabel(lambda: args):
75 orig(chunk, label=label + l)
75 orig(chunk, label=label + l)
76
76
77 oldwrite = ui.write
77 oldwrite = ui.write
78 def wrap(*args, **kwargs):
78 def wrap(*args, **kwargs):
79 return wrapwrite(oldwrite, *args, **kwargs)
79 return wrapwrite(oldwrite, *args, **kwargs)
80 setattr(ui, 'write', wrap)
80 setattr(ui, 'write', wrap)
81 return oldwrite
81 return oldwrite
82
82
83 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
83 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
84 if usecurses:
84 if usecurses:
85 if testfile:
85 if testfile:
86 recordfn = crecordmod.testdecorator(testfile,
86 recordfn = crecordmod.testdecorator(testfile,
87 crecordmod.testchunkselector)
87 crecordmod.testchunkselector)
88 else:
88 else:
89 recordfn = crecordmod.chunkselector
89 recordfn = crecordmod.chunkselector
90
90
91 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
91 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
92
92
93 else:
93 else:
94 return patch.filterpatch(ui, originalhunks, operation)
94 return patch.filterpatch(ui, originalhunks, operation)
95
95
96 def recordfilter(ui, originalhunks, operation=None):
96 def recordfilter(ui, originalhunks, operation=None):
97 """ Prompts the user to filter the originalhunks and return a list of
97 """ Prompts the user to filter the originalhunks and return a list of
98 selected hunks.
98 selected hunks.
99 *operation* is used for to build ui messages to indicate the user what
99 *operation* is used for to build ui messages to indicate the user what
100 kind of filtering they are doing: reverting, committing, shelving, etc.
100 kind of filtering they are doing: reverting, committing, shelving, etc.
101 (see patch.filterpatch).
101 (see patch.filterpatch).
102 """
102 """
103 usecurses = crecordmod.checkcurses(ui)
103 usecurses = crecordmod.checkcurses(ui)
104 testfile = ui.config('experimental', 'crecordtest', None)
104 testfile = ui.config('experimental', 'crecordtest', None)
105 oldwrite = setupwrapcolorwrite(ui)
105 oldwrite = setupwrapcolorwrite(ui)
106 try:
106 try:
107 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
107 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
108 testfile, operation)
108 testfile, operation)
109 finally:
109 finally:
110 ui.write = oldwrite
110 ui.write = oldwrite
111 return newchunks, newopts
111 return newchunks, newopts
112
112
113 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
113 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
114 filterfn, *pats, **opts):
114 filterfn, *pats, **opts):
115 from . import merge as mergemod
115 from . import merge as mergemod
116 if not ui.interactive():
116 if not ui.interactive():
117 if cmdsuggest:
117 if cmdsuggest:
118 msg = _('running non-interactively, use %s instead') % cmdsuggest
118 msg = _('running non-interactively, use %s instead') % cmdsuggest
119 else:
119 else:
120 msg = _('running non-interactively')
120 msg = _('running non-interactively')
121 raise error.Abort(msg)
121 raise error.Abort(msg)
122
122
123 # make sure username is set before going interactive
123 # make sure username is set before going interactive
124 if not opts.get('user'):
124 if not opts.get('user'):
125 ui.username() # raise exception, username not provided
125 ui.username() # raise exception, username not provided
126
126
127 def recordfunc(ui, repo, message, match, opts):
127 def recordfunc(ui, repo, message, match, opts):
128 """This is generic record driver.
128 """This is generic record driver.
129
129
130 Its job is to interactively filter local changes, and
130 Its job is to interactively filter local changes, and
131 accordingly prepare working directory into a state in which the
131 accordingly prepare working directory into a state in which the
132 job can be delegated to a non-interactive commit command such as
132 job can be delegated to a non-interactive commit command such as
133 'commit' or 'qrefresh'.
133 'commit' or 'qrefresh'.
134
134
135 After the actual job is done by non-interactive command, the
135 After the actual job is done by non-interactive command, the
136 working directory is restored to its original state.
136 working directory is restored to its original state.
137
137
138 In the end we'll record interesting changes, and everything else
138 In the end we'll record interesting changes, and everything else
139 will be left in place, so the user can continue working.
139 will be left in place, so the user can continue working.
140 """
140 """
141
141
142 checkunfinished(repo, commit=True)
142 checkunfinished(repo, commit=True)
143 wctx = repo[None]
143 wctx = repo[None]
144 merge = len(wctx.parents()) > 1
144 merge = len(wctx.parents()) > 1
145 if merge:
145 if merge:
146 raise error.Abort(_('cannot partially commit a merge '
146 raise error.Abort(_('cannot partially commit a merge '
147 '(use "hg commit" instead)'))
147 '(use "hg commit" instead)'))
148
148
149 def fail(f, msg):
149 def fail(f, msg):
150 raise error.Abort('%s: %s' % (f, msg))
150 raise error.Abort('%s: %s' % (f, msg))
151
151
152 force = opts.get('force')
152 force = opts.get('force')
153 if not force:
153 if not force:
154 vdirs = []
154 vdirs = []
155 match.explicitdir = vdirs.append
155 match.explicitdir = vdirs.append
156 match.bad = fail
156 match.bad = fail
157
157
158 status = repo.status(match=match)
158 status = repo.status(match=match)
159 if not force:
159 if not force:
160 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
160 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
161 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
161 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
162 diffopts.nodates = True
162 diffopts.nodates = True
163 diffopts.git = True
163 diffopts.git = True
164 diffopts.showfunc = True
164 diffopts.showfunc = True
165 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
165 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
166 originalchunks = patch.parsepatch(originaldiff)
166 originalchunks = patch.parsepatch(originaldiff)
167
167
168 # 1. filter patch, since we are intending to apply subset of it
168 # 1. filter patch, since we are intending to apply subset of it
169 try:
169 try:
170 chunks, newopts = filterfn(ui, originalchunks)
170 chunks, newopts = filterfn(ui, originalchunks)
171 except patch.PatchError as err:
171 except patch.PatchError as err:
172 raise error.Abort(_('error parsing patch: %s') % err)
172 raise error.Abort(_('error parsing patch: %s') % err)
173 opts.update(newopts)
173 opts.update(newopts)
174
174
175 # We need to keep a backup of files that have been newly added and
175 # We need to keep a backup of files that have been newly added and
176 # modified during the recording process because there is a previous
176 # modified during the recording process because there is a previous
177 # version without the edit in the workdir
177 # version without the edit in the workdir
178 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
178 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
179 contenders = set()
179 contenders = set()
180 for h in chunks:
180 for h in chunks:
181 try:
181 try:
182 contenders.update(set(h.files()))
182 contenders.update(set(h.files()))
183 except AttributeError:
183 except AttributeError:
184 pass
184 pass
185
185
186 changed = status.modified + status.added + status.removed
186 changed = status.modified + status.added + status.removed
187 newfiles = [f for f in changed if f in contenders]
187 newfiles = [f for f in changed if f in contenders]
188 if not newfiles:
188 if not newfiles:
189 ui.status(_('no changes to record\n'))
189 ui.status(_('no changes to record\n'))
190 return 0
190 return 0
191
191
192 modified = set(status.modified)
192 modified = set(status.modified)
193
193
194 # 2. backup changed files, so we can restore them in the end
194 # 2. backup changed files, so we can restore them in the end
195
195
196 if backupall:
196 if backupall:
197 tobackup = changed
197 tobackup = changed
198 else:
198 else:
199 tobackup = [f for f in newfiles if f in modified or f in \
199 tobackup = [f for f in newfiles if f in modified or f in \
200 newlyaddedandmodifiedfiles]
200 newlyaddedandmodifiedfiles]
201 backups = {}
201 backups = {}
202 if tobackup:
202 if tobackup:
203 backupdir = repo.join('record-backups')
203 backupdir = repo.join('record-backups')
204 try:
204 try:
205 os.mkdir(backupdir)
205 os.mkdir(backupdir)
206 except OSError as err:
206 except OSError as err:
207 if err.errno != errno.EEXIST:
207 if err.errno != errno.EEXIST:
208 raise
208 raise
209 try:
209 try:
210 # backup continues
210 # backup continues
211 for f in tobackup:
211 for f in tobackup:
212 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
212 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
213 dir=backupdir)
213 dir=backupdir)
214 os.close(fd)
214 os.close(fd)
215 ui.debug('backup %r as %r\n' % (f, tmpname))
215 ui.debug('backup %r as %r\n' % (f, tmpname))
216 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
216 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
217 backups[f] = tmpname
217 backups[f] = tmpname
218
218
219 fp = stringio()
219 fp = stringio()
220 for c in chunks:
220 for c in chunks:
221 fname = c.filename()
221 fname = c.filename()
222 if fname in backups:
222 if fname in backups:
223 c.write(fp)
223 c.write(fp)
224 dopatch = fp.tell()
224 dopatch = fp.tell()
225 fp.seek(0)
225 fp.seek(0)
226
226
227 # 2.5 optionally review / modify patch in text editor
227 # 2.5 optionally review / modify patch in text editor
228 if opts.get('review', False):
228 if opts.get('review', False):
229 patchtext = (crecordmod.diffhelptext
229 patchtext = (crecordmod.diffhelptext
230 + crecordmod.patchhelptext
230 + crecordmod.patchhelptext
231 + fp.read())
231 + fp.read())
232 reviewedpatch = ui.edit(patchtext, "",
232 reviewedpatch = ui.edit(patchtext, "",
233 extra={"suffix": ".diff"},
233 extra={"suffix": ".diff"},
234 repopath=repo.path)
234 repopath=repo.path)
235 fp.truncate(0)
235 fp.truncate(0)
236 fp.write(reviewedpatch)
236 fp.write(reviewedpatch)
237 fp.seek(0)
237 fp.seek(0)
238
238
239 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
239 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
240 # 3a. apply filtered patch to clean repo (clean)
240 # 3a. apply filtered patch to clean repo (clean)
241 if backups:
241 if backups:
242 # Equivalent to hg.revert
242 # Equivalent to hg.revert
243 m = scmutil.matchfiles(repo, backups.keys())
243 m = scmutil.matchfiles(repo, backups.keys())
244 mergemod.update(repo, repo.dirstate.p1(),
244 mergemod.update(repo, repo.dirstate.p1(),
245 False, True, matcher=m)
245 False, True, matcher=m)
246
246
247 # 3b. (apply)
247 # 3b. (apply)
248 if dopatch:
248 if dopatch:
249 try:
249 try:
250 ui.debug('applying patch\n')
250 ui.debug('applying patch\n')
251 ui.debug(fp.getvalue())
251 ui.debug(fp.getvalue())
252 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
252 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
253 except patch.PatchError as err:
253 except patch.PatchError as err:
254 raise error.Abort(str(err))
254 raise error.Abort(str(err))
255 del fp
255 del fp
256
256
257 # 4. We prepared working directory according to filtered
257 # 4. We prepared working directory according to filtered
258 # patch. Now is the time to delegate the job to
258 # patch. Now is the time to delegate the job to
259 # commit/qrefresh or the like!
259 # commit/qrefresh or the like!
260
260
261 # Make all of the pathnames absolute.
261 # Make all of the pathnames absolute.
262 newfiles = [repo.wjoin(nf) for nf in newfiles]
262 newfiles = [repo.wjoin(nf) for nf in newfiles]
263 return commitfunc(ui, repo, *newfiles, **opts)
263 return commitfunc(ui, repo, *newfiles, **opts)
264 finally:
264 finally:
265 # 5. finally restore backed-up files
265 # 5. finally restore backed-up files
266 try:
266 try:
267 dirstate = repo.dirstate
267 dirstate = repo.dirstate
268 for realname, tmpname in backups.iteritems():
268 for realname, tmpname in backups.iteritems():
269 ui.debug('restoring %r to %r\n' % (tmpname, realname))
269 ui.debug('restoring %r to %r\n' % (tmpname, realname))
270
270
271 if dirstate[realname] == 'n':
271 if dirstate[realname] == 'n':
272 # without normallookup, restoring timestamp
272 # without normallookup, restoring timestamp
273 # may cause partially committed files
273 # may cause partially committed files
274 # to be treated as unmodified
274 # to be treated as unmodified
275 dirstate.normallookup(realname)
275 dirstate.normallookup(realname)
276
276
277 # copystat=True here and above are a hack to trick any
277 # copystat=True here and above are a hack to trick any
278 # editors that have f open that we haven't modified them.
278 # editors that have f open that we haven't modified them.
279 #
279 #
280 # Also note that this racy as an editor could notice the
280 # Also note that this racy as an editor could notice the
281 # file's mtime before we've finished writing it.
281 # file's mtime before we've finished writing it.
282 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
282 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
283 os.unlink(tmpname)
283 os.unlink(tmpname)
284 if tobackup:
284 if tobackup:
285 os.rmdir(backupdir)
285 os.rmdir(backupdir)
286 except OSError:
286 except OSError:
287 pass
287 pass
288
288
289 def recordinwlock(ui, repo, message, match, opts):
289 def recordinwlock(ui, repo, message, match, opts):
290 with repo.wlock():
290 with repo.wlock():
291 return recordfunc(ui, repo, message, match, opts)
291 return recordfunc(ui, repo, message, match, opts)
292
292
293 return commit(ui, repo, recordinwlock, pats, opts)
293 return commit(ui, repo, recordinwlock, pats, opts)
294
294
295 def findpossible(cmd, table, strict=False):
295 def findpossible(cmd, table, strict=False):
296 """
296 """
297 Return cmd -> (aliases, command table entry)
297 Return cmd -> (aliases, command table entry)
298 for each matching command.
298 for each matching command.
299 Return debug commands (or their aliases) only if no normal command matches.
299 Return debug commands (or their aliases) only if no normal command matches.
300 """
300 """
301 choice = {}
301 choice = {}
302 debugchoice = {}
302 debugchoice = {}
303
303
304 if cmd in table:
304 if cmd in table:
305 # short-circuit exact matches, "log" alias beats "^log|history"
305 # short-circuit exact matches, "log" alias beats "^log|history"
306 keys = [cmd]
306 keys = [cmd]
307 else:
307 else:
308 keys = table.keys()
308 keys = table.keys()
309
309
310 allcmds = []
310 allcmds = []
311 for e in keys:
311 for e in keys:
312 aliases = parsealiases(e)
312 aliases = parsealiases(e)
313 allcmds.extend(aliases)
313 allcmds.extend(aliases)
314 found = None
314 found = None
315 if cmd in aliases:
315 if cmd in aliases:
316 found = cmd
316 found = cmd
317 elif not strict:
317 elif not strict:
318 for a in aliases:
318 for a in aliases:
319 if a.startswith(cmd):
319 if a.startswith(cmd):
320 found = a
320 found = a
321 break
321 break
322 if found is not None:
322 if found is not None:
323 if aliases[0].startswith("debug") or found.startswith("debug"):
323 if aliases[0].startswith("debug") or found.startswith("debug"):
324 debugchoice[found] = (aliases, table[e])
324 debugchoice[found] = (aliases, table[e])
325 else:
325 else:
326 choice[found] = (aliases, table[e])
326 choice[found] = (aliases, table[e])
327
327
328 if not choice and debugchoice:
328 if not choice and debugchoice:
329 choice = debugchoice
329 choice = debugchoice
330
330
331 return choice, allcmds
331 return choice, allcmds
332
332
333 def findcmd(cmd, table, strict=True):
333 def findcmd(cmd, table, strict=True):
334 """Return (aliases, command table entry) for command string."""
334 """Return (aliases, command table entry) for command string."""
335 choice, allcmds = findpossible(cmd, table, strict)
335 choice, allcmds = findpossible(cmd, table, strict)
336
336
337 if cmd in choice:
337 if cmd in choice:
338 return choice[cmd]
338 return choice[cmd]
339
339
340 if len(choice) > 1:
340 if len(choice) > 1:
341 clist = choice.keys()
341 clist = choice.keys()
342 clist.sort()
342 clist.sort()
343 raise error.AmbiguousCommand(cmd, clist)
343 raise error.AmbiguousCommand(cmd, clist)
344
344
345 if choice:
345 if choice:
346 return choice.values()[0]
346 return choice.values()[0]
347
347
348 raise error.UnknownCommand(cmd, allcmds)
348 raise error.UnknownCommand(cmd, allcmds)
349
349
350 def findrepo(p):
350 def findrepo(p):
351 while not os.path.isdir(os.path.join(p, ".hg")):
351 while not os.path.isdir(os.path.join(p, ".hg")):
352 oldp, p = p, os.path.dirname(p)
352 oldp, p = p, os.path.dirname(p)
353 if p == oldp:
353 if p == oldp:
354 return None
354 return None
355
355
356 return p
356 return p
357
357
358 def bailifchanged(repo, merge=True, hint=None):
358 def bailifchanged(repo, merge=True, hint=None):
359 """ enforce the precondition that working directory must be clean.
359 """ enforce the precondition that working directory must be clean.
360
360
361 'merge' can be set to false if a pending uncommitted merge should be
361 'merge' can be set to false if a pending uncommitted merge should be
362 ignored (such as when 'update --check' runs).
362 ignored (such as when 'update --check' runs).
363
363
364 'hint' is the usual hint given to Abort exception.
364 'hint' is the usual hint given to Abort exception.
365 """
365 """
366
366
367 if merge and repo.dirstate.p2() != nullid:
367 if merge and repo.dirstate.p2() != nullid:
368 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
368 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
369 modified, added, removed, deleted = repo.status()[:4]
369 modified, added, removed, deleted = repo.status()[:4]
370 if modified or added or removed or deleted:
370 if modified or added or removed or deleted:
371 raise error.Abort(_('uncommitted changes'), hint=hint)
371 raise error.Abort(_('uncommitted changes'), hint=hint)
372 ctx = repo[None]
372 ctx = repo[None]
373 for s in sorted(ctx.substate):
373 for s in sorted(ctx.substate):
374 ctx.sub(s).bailifchanged(hint=hint)
374 ctx.sub(s).bailifchanged(hint=hint)
375
375
376 def logmessage(ui, opts):
376 def logmessage(ui, opts):
377 """ get the log message according to -m and -l option """
377 """ get the log message according to -m and -l option """
378 message = opts.get('message')
378 message = opts.get('message')
379 logfile = opts.get('logfile')
379 logfile = opts.get('logfile')
380
380
381 if message and logfile:
381 if message and logfile:
382 raise error.Abort(_('options --message and --logfile are mutually '
382 raise error.Abort(_('options --message and --logfile are mutually '
383 'exclusive'))
383 'exclusive'))
384 if not message and logfile:
384 if not message and logfile:
385 try:
385 try:
386 if logfile == '-':
386 if logfile == '-':
387 message = ui.fin.read()
387 message = ui.fin.read()
388 else:
388 else:
389 message = '\n'.join(util.readfile(logfile).splitlines())
389 message = '\n'.join(util.readfile(logfile).splitlines())
390 except IOError as inst:
390 except IOError as inst:
391 raise error.Abort(_("can't read commit message '%s': %s") %
391 raise error.Abort(_("can't read commit message '%s': %s") %
392 (logfile, inst.strerror))
392 (logfile, inst.strerror))
393 return message
393 return message
394
394
395 def mergeeditform(ctxorbool, baseformname):
395 def mergeeditform(ctxorbool, baseformname):
396 """return appropriate editform name (referencing a committemplate)
396 """return appropriate editform name (referencing a committemplate)
397
397
398 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
398 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
399 merging is committed.
399 merging is committed.
400
400
401 This returns baseformname with '.merge' appended if it is a merge,
401 This returns baseformname with '.merge' appended if it is a merge,
402 otherwise '.normal' is appended.
402 otherwise '.normal' is appended.
403 """
403 """
404 if isinstance(ctxorbool, bool):
404 if isinstance(ctxorbool, bool):
405 if ctxorbool:
405 if ctxorbool:
406 return baseformname + ".merge"
406 return baseformname + ".merge"
407 elif 1 < len(ctxorbool.parents()):
407 elif 1 < len(ctxorbool.parents()):
408 return baseformname + ".merge"
408 return baseformname + ".merge"
409
409
410 return baseformname + ".normal"
410 return baseformname + ".normal"
411
411
412 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
412 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
413 editform='', **opts):
413 editform='', **opts):
414 """get appropriate commit message editor according to '--edit' option
414 """get appropriate commit message editor according to '--edit' option
415
415
416 'finishdesc' is a function to be called with edited commit message
416 'finishdesc' is a function to be called with edited commit message
417 (= 'description' of the new changeset) just after editing, but
417 (= 'description' of the new changeset) just after editing, but
418 before checking empty-ness. It should return actual text to be
418 before checking empty-ness. It should return actual text to be
419 stored into history. This allows to change description before
419 stored into history. This allows to change description before
420 storing.
420 storing.
421
421
422 'extramsg' is a extra message to be shown in the editor instead of
422 'extramsg' is a extra message to be shown in the editor instead of
423 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
423 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
424 is automatically added.
424 is automatically added.
425
425
426 'editform' is a dot-separated list of names, to distinguish
426 'editform' is a dot-separated list of names, to distinguish
427 the purpose of commit text editing.
427 the purpose of commit text editing.
428
428
429 'getcommiteditor' returns 'commitforceeditor' regardless of
429 'getcommiteditor' returns 'commitforceeditor' regardless of
430 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
430 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
431 they are specific for usage in MQ.
431 they are specific for usage in MQ.
432 """
432 """
433 if edit or finishdesc or extramsg:
433 if edit or finishdesc or extramsg:
434 return lambda r, c, s: commitforceeditor(r, c, s,
434 return lambda r, c, s: commitforceeditor(r, c, s,
435 finishdesc=finishdesc,
435 finishdesc=finishdesc,
436 extramsg=extramsg,
436 extramsg=extramsg,
437 editform=editform)
437 editform=editform)
438 elif editform:
438 elif editform:
439 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
439 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
440 else:
440 else:
441 return commiteditor
441 return commiteditor
442
442
443 def loglimit(opts):
443 def loglimit(opts):
444 """get the log limit according to option -l/--limit"""
444 """get the log limit according to option -l/--limit"""
445 limit = opts.get('limit')
445 limit = opts.get('limit')
446 if limit:
446 if limit:
447 try:
447 try:
448 limit = int(limit)
448 limit = int(limit)
449 except ValueError:
449 except ValueError:
450 raise error.Abort(_('limit must be a positive integer'))
450 raise error.Abort(_('limit must be a positive integer'))
451 if limit <= 0:
451 if limit <= 0:
452 raise error.Abort(_('limit must be positive'))
452 raise error.Abort(_('limit must be positive'))
453 else:
453 else:
454 limit = None
454 limit = None
455 return limit
455 return limit
456
456
457 def makefilename(repo, pat, node, desc=None,
457 def makefilename(repo, pat, node, desc=None,
458 total=None, seqno=None, revwidth=None, pathname=None):
458 total=None, seqno=None, revwidth=None, pathname=None):
459 node_expander = {
459 node_expander = {
460 'H': lambda: hex(node),
460 'H': lambda: hex(node),
461 'R': lambda: str(repo.changelog.rev(node)),
461 'R': lambda: str(repo.changelog.rev(node)),
462 'h': lambda: short(node),
462 'h': lambda: short(node),
463 'm': lambda: re.sub('[^\w]', '_', str(desc))
463 'm': lambda: re.sub('[^\w]', '_', str(desc))
464 }
464 }
465 expander = {
465 expander = {
466 '%': lambda: '%',
466 '%': lambda: '%',
467 'b': lambda: os.path.basename(repo.root),
467 'b': lambda: os.path.basename(repo.root),
468 }
468 }
469
469
470 try:
470 try:
471 if node:
471 if node:
472 expander.update(node_expander)
472 expander.update(node_expander)
473 if node:
473 if node:
474 expander['r'] = (lambda:
474 expander['r'] = (lambda:
475 str(repo.changelog.rev(node)).zfill(revwidth or 0))
475 str(repo.changelog.rev(node)).zfill(revwidth or 0))
476 if total is not None:
476 if total is not None:
477 expander['N'] = lambda: str(total)
477 expander['N'] = lambda: str(total)
478 if seqno is not None:
478 if seqno is not None:
479 expander['n'] = lambda: str(seqno)
479 expander['n'] = lambda: str(seqno)
480 if total is not None and seqno is not None:
480 if total is not None and seqno is not None:
481 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
481 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
482 if pathname is not None:
482 if pathname is not None:
483 expander['s'] = lambda: os.path.basename(pathname)
483 expander['s'] = lambda: os.path.basename(pathname)
484 expander['d'] = lambda: os.path.dirname(pathname) or '.'
484 expander['d'] = lambda: os.path.dirname(pathname) or '.'
485 expander['p'] = lambda: pathname
485 expander['p'] = lambda: pathname
486
486
487 newname = []
487 newname = []
488 patlen = len(pat)
488 patlen = len(pat)
489 i = 0
489 i = 0
490 while i < patlen:
490 while i < patlen:
491 c = pat[i]
491 c = pat[i]
492 if c == '%':
492 if c == '%':
493 i += 1
493 i += 1
494 c = pat[i]
494 c = pat[i]
495 c = expander[c]()
495 c = expander[c]()
496 newname.append(c)
496 newname.append(c)
497 i += 1
497 i += 1
498 return ''.join(newname)
498 return ''.join(newname)
499 except KeyError as inst:
499 except KeyError as inst:
500 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
500 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
501 inst.args[0])
501 inst.args[0])
502
502
503 class _unclosablefile(object):
503 class _unclosablefile(object):
504 def __init__(self, fp):
504 def __init__(self, fp):
505 self._fp = fp
505 self._fp = fp
506
506
507 def close(self):
507 def close(self):
508 pass
508 pass
509
509
510 def __iter__(self):
510 def __iter__(self):
511 return iter(self._fp)
511 return iter(self._fp)
512
512
513 def __getattr__(self, attr):
513 def __getattr__(self, attr):
514 return getattr(self._fp, attr)
514 return getattr(self._fp, attr)
515
515
516 def __enter__(self):
516 def __enter__(self):
517 return self
517 return self
518
518
519 def __exit__(self, exc_type, exc_value, exc_tb):
519 def __exit__(self, exc_type, exc_value, exc_tb):
520 pass
520 pass
521
521
522 def makefileobj(repo, pat, node=None, desc=None, total=None,
522 def makefileobj(repo, pat, node=None, desc=None, total=None,
523 seqno=None, revwidth=None, mode='wb', modemap=None,
523 seqno=None, revwidth=None, mode='wb', modemap=None,
524 pathname=None):
524 pathname=None):
525
525
526 writable = mode not in ('r', 'rb')
526 writable = mode not in ('r', 'rb')
527
527
528 if not pat or pat == '-':
528 if not pat or pat == '-':
529 if writable:
529 if writable:
530 fp = repo.ui.fout
530 fp = repo.ui.fout
531 else:
531 else:
532 fp = repo.ui.fin
532 fp = repo.ui.fin
533 return _unclosablefile(fp)
533 return _unclosablefile(fp)
534 if util.safehasattr(pat, 'write') and writable:
534 if util.safehasattr(pat, 'write') and writable:
535 return pat
535 return pat
536 if util.safehasattr(pat, 'read') and 'r' in mode:
536 if util.safehasattr(pat, 'read') and 'r' in mode:
537 return pat
537 return pat
538 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
538 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
539 if modemap is not None:
539 if modemap is not None:
540 mode = modemap.get(fn, mode)
540 mode = modemap.get(fn, mode)
541 if mode == 'wb':
541 if mode == 'wb':
542 modemap[fn] = 'ab'
542 modemap[fn] = 'ab'
543 return open(fn, mode)
543 return open(fn, mode)
544
544
545 def openrevlog(repo, cmd, file_, opts):
545 def openrevlog(repo, cmd, file_, opts):
546 """opens the changelog, manifest, a filelog or a given revlog"""
546 """opens the changelog, manifest, a filelog or a given revlog"""
547 cl = opts['changelog']
547 cl = opts['changelog']
548 mf = opts['manifest']
548 mf = opts['manifest']
549 dir = opts['dir']
549 dir = opts['dir']
550 msg = None
550 msg = None
551 if cl and mf:
551 if cl and mf:
552 msg = _('cannot specify --changelog and --manifest at the same time')
552 msg = _('cannot specify --changelog and --manifest at the same time')
553 elif cl and dir:
553 elif cl and dir:
554 msg = _('cannot specify --changelog and --dir at the same time')
554 msg = _('cannot specify --changelog and --dir at the same time')
555 elif cl or mf or dir:
555 elif cl or mf or dir:
556 if file_:
556 if file_:
557 msg = _('cannot specify filename with --changelog or --manifest')
557 msg = _('cannot specify filename with --changelog or --manifest')
558 elif not repo:
558 elif not repo:
559 msg = _('cannot specify --changelog or --manifest or --dir '
559 msg = _('cannot specify --changelog or --manifest or --dir '
560 'without a repository')
560 'without a repository')
561 if msg:
561 if msg:
562 raise error.Abort(msg)
562 raise error.Abort(msg)
563
563
564 r = None
564 r = None
565 if repo:
565 if repo:
566 if cl:
566 if cl:
567 r = repo.unfiltered().changelog
567 r = repo.unfiltered().changelog
568 elif dir:
568 elif dir:
569 if 'treemanifest' not in repo.requirements:
569 if 'treemanifest' not in repo.requirements:
570 raise error.Abort(_("--dir can only be used on repos with "
570 raise error.Abort(_("--dir can only be used on repos with "
571 "treemanifest enabled"))
571 "treemanifest enabled"))
572 dirlog = repo.manifestlog._revlog.dirlog(dir)
572 dirlog = repo.manifestlog._revlog.dirlog(dir)
573 if len(dirlog):
573 if len(dirlog):
574 r = dirlog
574 r = dirlog
575 elif mf:
575 elif mf:
576 r = repo.manifestlog._revlog
576 r = repo.manifestlog._revlog
577 elif file_:
577 elif file_:
578 filelog = repo.file(file_)
578 filelog = repo.file(file_)
579 if len(filelog):
579 if len(filelog):
580 r = filelog
580 r = filelog
581 if not r:
581 if not r:
582 if not file_:
582 if not file_:
583 raise error.CommandError(cmd, _('invalid arguments'))
583 raise error.CommandError(cmd, _('invalid arguments'))
584 if not os.path.isfile(file_):
584 if not os.path.isfile(file_):
585 raise error.Abort(_("revlog '%s' not found") % file_)
585 raise error.Abort(_("revlog '%s' not found") % file_)
586 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
586 r = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False),
587 file_[:-2] + ".i")
587 file_[:-2] + ".i")
588 return r
588 return r
589
589
590 def copy(ui, repo, pats, opts, rename=False):
590 def copy(ui, repo, pats, opts, rename=False):
591 # called with the repo lock held
591 # called with the repo lock held
592 #
592 #
593 # hgsep => pathname that uses "/" to separate directories
593 # hgsep => pathname that uses "/" to separate directories
594 # ossep => pathname that uses os.sep to separate directories
594 # ossep => pathname that uses os.sep to separate directories
595 cwd = repo.getcwd()
595 cwd = repo.getcwd()
596 targets = {}
596 targets = {}
597 after = opts.get("after")
597 after = opts.get("after")
598 dryrun = opts.get("dry_run")
598 dryrun = opts.get("dry_run")
599 wctx = repo[None]
599 wctx = repo[None]
600
600
601 def walkpat(pat):
601 def walkpat(pat):
602 srcs = []
602 srcs = []
603 if after:
603 if after:
604 badstates = '?'
604 badstates = '?'
605 else:
605 else:
606 badstates = '?r'
606 badstates = '?r'
607 m = scmutil.match(repo[None], [pat], opts, globbed=True)
607 m = scmutil.match(repo[None], [pat], opts, globbed=True)
608 for abs in repo.walk(m):
608 for abs in repo.walk(m):
609 state = repo.dirstate[abs]
609 state = repo.dirstate[abs]
610 rel = m.rel(abs)
610 rel = m.rel(abs)
611 exact = m.exact(abs)
611 exact = m.exact(abs)
612 if state in badstates:
612 if state in badstates:
613 if exact and state == '?':
613 if exact and state == '?':
614 ui.warn(_('%s: not copying - file is not managed\n') % rel)
614 ui.warn(_('%s: not copying - file is not managed\n') % rel)
615 if exact and state == 'r':
615 if exact and state == 'r':
616 ui.warn(_('%s: not copying - file has been marked for'
616 ui.warn(_('%s: not copying - file has been marked for'
617 ' remove\n') % rel)
617 ' remove\n') % rel)
618 continue
618 continue
619 # abs: hgsep
619 # abs: hgsep
620 # rel: ossep
620 # rel: ossep
621 srcs.append((abs, rel, exact))
621 srcs.append((abs, rel, exact))
622 return srcs
622 return srcs
623
623
624 # abssrc: hgsep
624 # abssrc: hgsep
625 # relsrc: ossep
625 # relsrc: ossep
626 # otarget: ossep
626 # otarget: ossep
627 def copyfile(abssrc, relsrc, otarget, exact):
627 def copyfile(abssrc, relsrc, otarget, exact):
628 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
628 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
629 if '/' in abstarget:
629 if '/' in abstarget:
630 # We cannot normalize abstarget itself, this would prevent
630 # We cannot normalize abstarget itself, this would prevent
631 # case only renames, like a => A.
631 # case only renames, like a => A.
632 abspath, absname = abstarget.rsplit('/', 1)
632 abspath, absname = abstarget.rsplit('/', 1)
633 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
633 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
634 reltarget = repo.pathto(abstarget, cwd)
634 reltarget = repo.pathto(abstarget, cwd)
635 target = repo.wjoin(abstarget)
635 target = repo.wjoin(abstarget)
636 src = repo.wjoin(abssrc)
636 src = repo.wjoin(abssrc)
637 state = repo.dirstate[abstarget]
637 state = repo.dirstate[abstarget]
638
638
639 scmutil.checkportable(ui, abstarget)
639 scmutil.checkportable(ui, abstarget)
640
640
641 # check for collisions
641 # check for collisions
642 prevsrc = targets.get(abstarget)
642 prevsrc = targets.get(abstarget)
643 if prevsrc is not None:
643 if prevsrc is not None:
644 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
644 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
645 (reltarget, repo.pathto(abssrc, cwd),
645 (reltarget, repo.pathto(abssrc, cwd),
646 repo.pathto(prevsrc, cwd)))
646 repo.pathto(prevsrc, cwd)))
647 return
647 return
648
648
649 # check for overwrites
649 # check for overwrites
650 exists = os.path.lexists(target)
650 exists = os.path.lexists(target)
651 samefile = False
651 samefile = False
652 if exists and abssrc != abstarget:
652 if exists and abssrc != abstarget:
653 if (repo.dirstate.normalize(abssrc) ==
653 if (repo.dirstate.normalize(abssrc) ==
654 repo.dirstate.normalize(abstarget)):
654 repo.dirstate.normalize(abstarget)):
655 if not rename:
655 if not rename:
656 ui.warn(_("%s: can't copy - same file\n") % reltarget)
656 ui.warn(_("%s: can't copy - same file\n") % reltarget)
657 return
657 return
658 exists = False
658 exists = False
659 samefile = True
659 samefile = True
660
660
661 if not after and exists or after and state in 'mn':
661 if not after and exists or after and state in 'mn':
662 if not opts['force']:
662 if not opts['force']:
663 if state in 'mn':
663 if state in 'mn':
664 msg = _('%s: not overwriting - file already committed\n')
664 msg = _('%s: not overwriting - file already committed\n')
665 if after:
665 if after:
666 flags = '--after --force'
666 flags = '--after --force'
667 else:
667 else:
668 flags = '--force'
668 flags = '--force'
669 if rename:
669 if rename:
670 hint = _('(hg rename %s to replace the file by '
670 hint = _('(hg rename %s to replace the file by '
671 'recording a rename)\n') % flags
671 'recording a rename)\n') % flags
672 else:
672 else:
673 hint = _('(hg copy %s to replace the file by '
673 hint = _('(hg copy %s to replace the file by '
674 'recording a copy)\n') % flags
674 'recording a copy)\n') % flags
675 else:
675 else:
676 msg = _('%s: not overwriting - file exists\n')
676 msg = _('%s: not overwriting - file exists\n')
677 if rename:
677 if rename:
678 hint = _('(hg rename --after to record the rename)\n')
678 hint = _('(hg rename --after to record the rename)\n')
679 else:
679 else:
680 hint = _('(hg copy --after to record the copy)\n')
680 hint = _('(hg copy --after to record the copy)\n')
681 ui.warn(msg % reltarget)
681 ui.warn(msg % reltarget)
682 ui.warn(hint)
682 ui.warn(hint)
683 return
683 return
684
684
685 if after:
685 if after:
686 if not exists:
686 if not exists:
687 if rename:
687 if rename:
688 ui.warn(_('%s: not recording move - %s does not exist\n') %
688 ui.warn(_('%s: not recording move - %s does not exist\n') %
689 (relsrc, reltarget))
689 (relsrc, reltarget))
690 else:
690 else:
691 ui.warn(_('%s: not recording copy - %s does not exist\n') %
691 ui.warn(_('%s: not recording copy - %s does not exist\n') %
692 (relsrc, reltarget))
692 (relsrc, reltarget))
693 return
693 return
694 elif not dryrun:
694 elif not dryrun:
695 try:
695 try:
696 if exists:
696 if exists:
697 os.unlink(target)
697 os.unlink(target)
698 targetdir = os.path.dirname(target) or '.'
698 targetdir = os.path.dirname(target) or '.'
699 if not os.path.isdir(targetdir):
699 if not os.path.isdir(targetdir):
700 os.makedirs(targetdir)
700 os.makedirs(targetdir)
701 if samefile:
701 if samefile:
702 tmp = target + "~hgrename"
702 tmp = target + "~hgrename"
703 os.rename(src, tmp)
703 os.rename(src, tmp)
704 os.rename(tmp, target)
704 os.rename(tmp, target)
705 else:
705 else:
706 util.copyfile(src, target)
706 util.copyfile(src, target)
707 srcexists = True
707 srcexists = True
708 except IOError as inst:
708 except IOError as inst:
709 if inst.errno == errno.ENOENT:
709 if inst.errno == errno.ENOENT:
710 ui.warn(_('%s: deleted in working directory\n') % relsrc)
710 ui.warn(_('%s: deleted in working directory\n') % relsrc)
711 srcexists = False
711 srcexists = False
712 else:
712 else:
713 ui.warn(_('%s: cannot copy - %s\n') %
713 ui.warn(_('%s: cannot copy - %s\n') %
714 (relsrc, inst.strerror))
714 (relsrc, inst.strerror))
715 return True # report a failure
715 return True # report a failure
716
716
717 if ui.verbose or not exact:
717 if ui.verbose or not exact:
718 if rename:
718 if rename:
719 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
719 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
720 else:
720 else:
721 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
721 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
722
722
723 targets[abstarget] = abssrc
723 targets[abstarget] = abssrc
724
724
725 # fix up dirstate
725 # fix up dirstate
726 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
726 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
727 dryrun=dryrun, cwd=cwd)
727 dryrun=dryrun, cwd=cwd)
728 if rename and not dryrun:
728 if rename and not dryrun:
729 if not after and srcexists and not samefile:
729 if not after and srcexists and not samefile:
730 util.unlinkpath(repo.wjoin(abssrc))
730 util.unlinkpath(repo.wjoin(abssrc))
731 wctx.forget([abssrc])
731 wctx.forget([abssrc])
732
732
733 # pat: ossep
733 # pat: ossep
734 # dest ossep
734 # dest ossep
735 # srcs: list of (hgsep, hgsep, ossep, bool)
735 # srcs: list of (hgsep, hgsep, ossep, bool)
736 # return: function that takes hgsep and returns ossep
736 # return: function that takes hgsep and returns ossep
737 def targetpathfn(pat, dest, srcs):
737 def targetpathfn(pat, dest, srcs):
738 if os.path.isdir(pat):
738 if os.path.isdir(pat):
739 abspfx = pathutil.canonpath(repo.root, cwd, pat)
739 abspfx = pathutil.canonpath(repo.root, cwd, pat)
740 abspfx = util.localpath(abspfx)
740 abspfx = util.localpath(abspfx)
741 if destdirexists:
741 if destdirexists:
742 striplen = len(os.path.split(abspfx)[0])
742 striplen = len(os.path.split(abspfx)[0])
743 else:
743 else:
744 striplen = len(abspfx)
744 striplen = len(abspfx)
745 if striplen:
745 if striplen:
746 striplen += len(pycompat.ossep)
746 striplen += len(pycompat.ossep)
747 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
747 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
748 elif destdirexists:
748 elif destdirexists:
749 res = lambda p: os.path.join(dest,
749 res = lambda p: os.path.join(dest,
750 os.path.basename(util.localpath(p)))
750 os.path.basename(util.localpath(p)))
751 else:
751 else:
752 res = lambda p: dest
752 res = lambda p: dest
753 return res
753 return res
754
754
755 # pat: ossep
755 # pat: ossep
756 # dest ossep
756 # dest ossep
757 # srcs: list of (hgsep, hgsep, ossep, bool)
757 # srcs: list of (hgsep, hgsep, ossep, bool)
758 # return: function that takes hgsep and returns ossep
758 # return: function that takes hgsep and returns ossep
759 def targetpathafterfn(pat, dest, srcs):
759 def targetpathafterfn(pat, dest, srcs):
760 if matchmod.patkind(pat):
760 if matchmod.patkind(pat):
761 # a mercurial pattern
761 # a mercurial pattern
762 res = lambda p: os.path.join(dest,
762 res = lambda p: os.path.join(dest,
763 os.path.basename(util.localpath(p)))
763 os.path.basename(util.localpath(p)))
764 else:
764 else:
765 abspfx = pathutil.canonpath(repo.root, cwd, pat)
765 abspfx = pathutil.canonpath(repo.root, cwd, pat)
766 if len(abspfx) < len(srcs[0][0]):
766 if len(abspfx) < len(srcs[0][0]):
767 # A directory. Either the target path contains the last
767 # A directory. Either the target path contains the last
768 # component of the source path or it does not.
768 # component of the source path or it does not.
769 def evalpath(striplen):
769 def evalpath(striplen):
770 score = 0
770 score = 0
771 for s in srcs:
771 for s in srcs:
772 t = os.path.join(dest, util.localpath(s[0])[striplen:])
772 t = os.path.join(dest, util.localpath(s[0])[striplen:])
773 if os.path.lexists(t):
773 if os.path.lexists(t):
774 score += 1
774 score += 1
775 return score
775 return score
776
776
777 abspfx = util.localpath(abspfx)
777 abspfx = util.localpath(abspfx)
778 striplen = len(abspfx)
778 striplen = len(abspfx)
779 if striplen:
779 if striplen:
780 striplen += len(pycompat.ossep)
780 striplen += len(pycompat.ossep)
781 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
781 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
782 score = evalpath(striplen)
782 score = evalpath(striplen)
783 striplen1 = len(os.path.split(abspfx)[0])
783 striplen1 = len(os.path.split(abspfx)[0])
784 if striplen1:
784 if striplen1:
785 striplen1 += len(pycompat.ossep)
785 striplen1 += len(pycompat.ossep)
786 if evalpath(striplen1) > score:
786 if evalpath(striplen1) > score:
787 striplen = striplen1
787 striplen = striplen1
788 res = lambda p: os.path.join(dest,
788 res = lambda p: os.path.join(dest,
789 util.localpath(p)[striplen:])
789 util.localpath(p)[striplen:])
790 else:
790 else:
791 # a file
791 # a file
792 if destdirexists:
792 if destdirexists:
793 res = lambda p: os.path.join(dest,
793 res = lambda p: os.path.join(dest,
794 os.path.basename(util.localpath(p)))
794 os.path.basename(util.localpath(p)))
795 else:
795 else:
796 res = lambda p: dest
796 res = lambda p: dest
797 return res
797 return res
798
798
799 pats = scmutil.expandpats(pats)
799 pats = scmutil.expandpats(pats)
800 if not pats:
800 if not pats:
801 raise error.Abort(_('no source or destination specified'))
801 raise error.Abort(_('no source or destination specified'))
802 if len(pats) == 1:
802 if len(pats) == 1:
803 raise error.Abort(_('no destination specified'))
803 raise error.Abort(_('no destination specified'))
804 dest = pats.pop()
804 dest = pats.pop()
805 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
805 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
806 if not destdirexists:
806 if not destdirexists:
807 if len(pats) > 1 or matchmod.patkind(pats[0]):
807 if len(pats) > 1 or matchmod.patkind(pats[0]):
808 raise error.Abort(_('with multiple sources, destination must be an '
808 raise error.Abort(_('with multiple sources, destination must be an '
809 'existing directory'))
809 'existing directory'))
810 if util.endswithsep(dest):
810 if util.endswithsep(dest):
811 raise error.Abort(_('destination %s is not a directory') % dest)
811 raise error.Abort(_('destination %s is not a directory') % dest)
812
812
813 tfn = targetpathfn
813 tfn = targetpathfn
814 if after:
814 if after:
815 tfn = targetpathafterfn
815 tfn = targetpathafterfn
816 copylist = []
816 copylist = []
817 for pat in pats:
817 for pat in pats:
818 srcs = walkpat(pat)
818 srcs = walkpat(pat)
819 if not srcs:
819 if not srcs:
820 continue
820 continue
821 copylist.append((tfn(pat, dest, srcs), srcs))
821 copylist.append((tfn(pat, dest, srcs), srcs))
822 if not copylist:
822 if not copylist:
823 raise error.Abort(_('no files to copy'))
823 raise error.Abort(_('no files to copy'))
824
824
825 errors = 0
825 errors = 0
826 for targetpath, srcs in copylist:
826 for targetpath, srcs in copylist:
827 for abssrc, relsrc, exact in srcs:
827 for abssrc, relsrc, exact in srcs:
828 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
828 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
829 errors += 1
829 errors += 1
830
830
831 if errors:
831 if errors:
832 ui.warn(_('(consider using --after)\n'))
832 ui.warn(_('(consider using --after)\n'))
833
833
834 return errors != 0
834 return errors != 0
835
835
836 ## facility to let extension process additional data into an import patch
836 ## facility to let extension process additional data into an import patch
837 # list of identifier to be executed in order
837 # list of identifier to be executed in order
838 extrapreimport = [] # run before commit
838 extrapreimport = [] # run before commit
839 extrapostimport = [] # run after commit
839 extrapostimport = [] # run after commit
840 # mapping from identifier to actual import function
840 # mapping from identifier to actual import function
841 #
841 #
842 # 'preimport' are run before the commit is made and are provided the following
842 # 'preimport' are run before the commit is made and are provided the following
843 # arguments:
843 # arguments:
844 # - repo: the localrepository instance,
844 # - repo: the localrepository instance,
845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
846 # - extra: the future extra dictionary of the changeset, please mutate it,
846 # - extra: the future extra dictionary of the changeset, please mutate it,
847 # - opts: the import options.
847 # - opts: the import options.
848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
849 # mutation of in memory commit and more. Feel free to rework the code to get
849 # mutation of in memory commit and more. Feel free to rework the code to get
850 # there.
850 # there.
851 extrapreimportmap = {}
851 extrapreimportmap = {}
852 # 'postimport' are run after the commit is made and are provided the following
852 # 'postimport' are run after the commit is made and are provided the following
853 # argument:
853 # argument:
854 # - ctx: the changectx created by import.
854 # - ctx: the changectx created by import.
855 extrapostimportmap = {}
855 extrapostimportmap = {}
856
856
857 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
857 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
858 """Utility function used by commands.import to import a single patch
858 """Utility function used by commands.import to import a single patch
859
859
860 This function is explicitly defined here to help the evolve extension to
860 This function is explicitly defined here to help the evolve extension to
861 wrap this part of the import logic.
861 wrap this part of the import logic.
862
862
863 The API is currently a bit ugly because it a simple code translation from
863 The API is currently a bit ugly because it a simple code translation from
864 the import command. Feel free to make it better.
864 the import command. Feel free to make it better.
865
865
866 :hunk: a patch (as a binary string)
866 :hunk: a patch (as a binary string)
867 :parents: nodes that will be parent of the created commit
867 :parents: nodes that will be parent of the created commit
868 :opts: the full dict of option passed to the import command
868 :opts: the full dict of option passed to the import command
869 :msgs: list to save commit message to.
869 :msgs: list to save commit message to.
870 (used in case we need to save it when failing)
870 (used in case we need to save it when failing)
871 :updatefunc: a function that update a repo to a given node
871 :updatefunc: a function that update a repo to a given node
872 updatefunc(<repo>, <node>)
872 updatefunc(<repo>, <node>)
873 """
873 """
874 # avoid cycle context -> subrepo -> cmdutil
874 # avoid cycle context -> subrepo -> cmdutil
875 from . import context
875 from . import context
876 extractdata = patch.extract(ui, hunk)
876 extractdata = patch.extract(ui, hunk)
877 tmpname = extractdata.get('filename')
877 tmpname = extractdata.get('filename')
878 message = extractdata.get('message')
878 message = extractdata.get('message')
879 user = opts.get('user') or extractdata.get('user')
879 user = opts.get('user') or extractdata.get('user')
880 date = opts.get('date') or extractdata.get('date')
880 date = opts.get('date') or extractdata.get('date')
881 branch = extractdata.get('branch')
881 branch = extractdata.get('branch')
882 nodeid = extractdata.get('nodeid')
882 nodeid = extractdata.get('nodeid')
883 p1 = extractdata.get('p1')
883 p1 = extractdata.get('p1')
884 p2 = extractdata.get('p2')
884 p2 = extractdata.get('p2')
885
885
886 nocommit = opts.get('no_commit')
886 nocommit = opts.get('no_commit')
887 importbranch = opts.get('import_branch')
887 importbranch = opts.get('import_branch')
888 update = not opts.get('bypass')
888 update = not opts.get('bypass')
889 strip = opts["strip"]
889 strip = opts["strip"]
890 prefix = opts["prefix"]
890 prefix = opts["prefix"]
891 sim = float(opts.get('similarity') or 0)
891 sim = float(opts.get('similarity') or 0)
892 if not tmpname:
892 if not tmpname:
893 return (None, None, False)
893 return (None, None, False)
894
894
895 rejects = False
895 rejects = False
896
896
897 try:
897 try:
898 cmdline_message = logmessage(ui, opts)
898 cmdline_message = logmessage(ui, opts)
899 if cmdline_message:
899 if cmdline_message:
900 # pickup the cmdline msg
900 # pickup the cmdline msg
901 message = cmdline_message
901 message = cmdline_message
902 elif message:
902 elif message:
903 # pickup the patch msg
903 # pickup the patch msg
904 message = message.strip()
904 message = message.strip()
905 else:
905 else:
906 # launch the editor
906 # launch the editor
907 message = None
907 message = None
908 ui.debug('message:\n%s\n' % message)
908 ui.debug('message:\n%s\n' % message)
909
909
910 if len(parents) == 1:
910 if len(parents) == 1:
911 parents.append(repo[nullid])
911 parents.append(repo[nullid])
912 if opts.get('exact'):
912 if opts.get('exact'):
913 if not nodeid or not p1:
913 if not nodeid or not p1:
914 raise error.Abort(_('not a Mercurial patch'))
914 raise error.Abort(_('not a Mercurial patch'))
915 p1 = repo[p1]
915 p1 = repo[p1]
916 p2 = repo[p2 or nullid]
916 p2 = repo[p2 or nullid]
917 elif p2:
917 elif p2:
918 try:
918 try:
919 p1 = repo[p1]
919 p1 = repo[p1]
920 p2 = repo[p2]
920 p2 = repo[p2]
921 # Without any options, consider p2 only if the
921 # Without any options, consider p2 only if the
922 # patch is being applied on top of the recorded
922 # patch is being applied on top of the recorded
923 # first parent.
923 # first parent.
924 if p1 != parents[0]:
924 if p1 != parents[0]:
925 p1 = parents[0]
925 p1 = parents[0]
926 p2 = repo[nullid]
926 p2 = repo[nullid]
927 except error.RepoError:
927 except error.RepoError:
928 p1, p2 = parents
928 p1, p2 = parents
929 if p2.node() == nullid:
929 if p2.node() == nullid:
930 ui.warn(_("warning: import the patch as a normal revision\n"
930 ui.warn(_("warning: import the patch as a normal revision\n"
931 "(use --exact to import the patch as a merge)\n"))
931 "(use --exact to import the patch as a merge)\n"))
932 else:
932 else:
933 p1, p2 = parents
933 p1, p2 = parents
934
934
935 n = None
935 n = None
936 if update:
936 if update:
937 if p1 != parents[0]:
937 if p1 != parents[0]:
938 updatefunc(repo, p1.node())
938 updatefunc(repo, p1.node())
939 if p2 != parents[1]:
939 if p2 != parents[1]:
940 repo.setparents(p1.node(), p2.node())
940 repo.setparents(p1.node(), p2.node())
941
941
942 if opts.get('exact') or importbranch:
942 if opts.get('exact') or importbranch:
943 repo.dirstate.setbranch(branch or 'default')
943 repo.dirstate.setbranch(branch or 'default')
944
944
945 partial = opts.get('partial', False)
945 partial = opts.get('partial', False)
946 files = set()
946 files = set()
947 try:
947 try:
948 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
948 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
949 files=files, eolmode=None, similarity=sim / 100.0)
949 files=files, eolmode=None, similarity=sim / 100.0)
950 except patch.PatchError as e:
950 except patch.PatchError as e:
951 if not partial:
951 if not partial:
952 raise error.Abort(str(e))
952 raise error.Abort(str(e))
953 if partial:
953 if partial:
954 rejects = True
954 rejects = True
955
955
956 files = list(files)
956 files = list(files)
957 if nocommit:
957 if nocommit:
958 if message:
958 if message:
959 msgs.append(message)
959 msgs.append(message)
960 else:
960 else:
961 if opts.get('exact') or p2:
961 if opts.get('exact') or p2:
962 # If you got here, you either use --force and know what
962 # If you got here, you either use --force and know what
963 # you are doing or used --exact or a merge patch while
963 # you are doing or used --exact or a merge patch while
964 # being updated to its first parent.
964 # being updated to its first parent.
965 m = None
965 m = None
966 else:
966 else:
967 m = scmutil.matchfiles(repo, files or [])
967 m = scmutil.matchfiles(repo, files or [])
968 editform = mergeeditform(repo[None], 'import.normal')
968 editform = mergeeditform(repo[None], 'import.normal')
969 if opts.get('exact'):
969 if opts.get('exact'):
970 editor = None
970 editor = None
971 else:
971 else:
972 editor = getcommiteditor(editform=editform, **opts)
972 editor = getcommiteditor(editform=editform, **opts)
973 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
973 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
974 extra = {}
974 extra = {}
975 for idfunc in extrapreimport:
975 for idfunc in extrapreimport:
976 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
976 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
977 try:
977 try:
978 if partial:
978 if partial:
979 repo.ui.setconfig('ui', 'allowemptycommit', True)
979 repo.ui.setconfig('ui', 'allowemptycommit', True)
980 n = repo.commit(message, user,
980 n = repo.commit(message, user,
981 date, match=m,
981 date, match=m,
982 editor=editor, extra=extra)
982 editor=editor, extra=extra)
983 for idfunc in extrapostimport:
983 for idfunc in extrapostimport:
984 extrapostimportmap[idfunc](repo[n])
984 extrapostimportmap[idfunc](repo[n])
985 finally:
985 finally:
986 repo.ui.restoreconfig(allowemptyback)
986 repo.ui.restoreconfig(allowemptyback)
987 else:
987 else:
988 if opts.get('exact') or importbranch:
988 if opts.get('exact') or importbranch:
989 branch = branch or 'default'
989 branch = branch or 'default'
990 else:
990 else:
991 branch = p1.branch()
991 branch = p1.branch()
992 store = patch.filestore()
992 store = patch.filestore()
993 try:
993 try:
994 files = set()
994 files = set()
995 try:
995 try:
996 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
996 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
997 files, eolmode=None)
997 files, eolmode=None)
998 except patch.PatchError as e:
998 except patch.PatchError as e:
999 raise error.Abort(str(e))
999 raise error.Abort(str(e))
1000 if opts.get('exact'):
1000 if opts.get('exact'):
1001 editor = None
1001 editor = None
1002 else:
1002 else:
1003 editor = getcommiteditor(editform='import.bypass')
1003 editor = getcommiteditor(editform='import.bypass')
1004 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1004 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1005 message,
1005 message,
1006 user,
1006 user,
1007 date,
1007 date,
1008 branch, files, store,
1008 branch, files, store,
1009 editor=editor)
1009 editor=editor)
1010 n = memctx.commit()
1010 n = memctx.commit()
1011 finally:
1011 finally:
1012 store.close()
1012 store.close()
1013 if opts.get('exact') and nocommit:
1013 if opts.get('exact') and nocommit:
1014 # --exact with --no-commit is still useful in that it does merge
1014 # --exact with --no-commit is still useful in that it does merge
1015 # and branch bits
1015 # and branch bits
1016 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1016 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1017 elif opts.get('exact') and hex(n) != nodeid:
1017 elif opts.get('exact') and hex(n) != nodeid:
1018 raise error.Abort(_('patch is damaged or loses information'))
1018 raise error.Abort(_('patch is damaged or loses information'))
1019 msg = _('applied to working directory')
1019 msg = _('applied to working directory')
1020 if n:
1020 if n:
1021 # i18n: refers to a short changeset id
1021 # i18n: refers to a short changeset id
1022 msg = _('created %s') % short(n)
1022 msg = _('created %s') % short(n)
1023 return (msg, n, rejects)
1023 return (msg, n, rejects)
1024 finally:
1024 finally:
1025 os.unlink(tmpname)
1025 os.unlink(tmpname)
1026
1026
1027 # facility to let extensions include additional data in an exported patch
1027 # facility to let extensions include additional data in an exported patch
1028 # list of identifiers to be executed in order
1028 # list of identifiers to be executed in order
1029 extraexport = []
1029 extraexport = []
1030 # mapping from identifier to actual export function
1030 # mapping from identifier to actual export function
1031 # function as to return a string to be added to the header or None
1031 # function as to return a string to be added to the header or None
1032 # it is given two arguments (sequencenumber, changectx)
1032 # it is given two arguments (sequencenumber, changectx)
1033 extraexportmap = {}
1033 extraexportmap = {}
1034
1034
1035 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1035 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1036 opts=None, match=None):
1036 opts=None, match=None):
1037 '''export changesets as hg patches.'''
1037 '''export changesets as hg patches.'''
1038
1038
1039 total = len(revs)
1039 total = len(revs)
1040 revwidth = max([len(str(rev)) for rev in revs])
1040 revwidth = max([len(str(rev)) for rev in revs])
1041 filemode = {}
1041 filemode = {}
1042
1042
1043 def single(rev, seqno, fp):
1043 def single(rev, seqno, fp):
1044 ctx = repo[rev]
1044 ctx = repo[rev]
1045 node = ctx.node()
1045 node = ctx.node()
1046 parents = [p.node() for p in ctx.parents() if p]
1046 parents = [p.node() for p in ctx.parents() if p]
1047 branch = ctx.branch()
1047 branch = ctx.branch()
1048 if switch_parent:
1048 if switch_parent:
1049 parents.reverse()
1049 parents.reverse()
1050
1050
1051 if parents:
1051 if parents:
1052 prev = parents[0]
1052 prev = parents[0]
1053 else:
1053 else:
1054 prev = nullid
1054 prev = nullid
1055
1055
1056 shouldclose = False
1056 shouldclose = False
1057 if not fp and len(template) > 0:
1057 if not fp and len(template) > 0:
1058 desc_lines = ctx.description().rstrip().split('\n')
1058 desc_lines = ctx.description().rstrip().split('\n')
1059 desc = desc_lines[0] #Commit always has a first line.
1059 desc = desc_lines[0] #Commit always has a first line.
1060 fp = makefileobj(repo, template, node, desc=desc, total=total,
1060 fp = makefileobj(repo, template, node, desc=desc, total=total,
1061 seqno=seqno, revwidth=revwidth, mode='wb',
1061 seqno=seqno, revwidth=revwidth, mode='wb',
1062 modemap=filemode)
1062 modemap=filemode)
1063 shouldclose = True
1063 shouldclose = True
1064 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1064 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1065 repo.ui.note("%s\n" % fp.name)
1065 repo.ui.note("%s\n" % fp.name)
1066
1066
1067 if not fp:
1067 if not fp:
1068 write = repo.ui.write
1068 write = repo.ui.write
1069 else:
1069 else:
1070 def write(s, **kw):
1070 def write(s, **kw):
1071 fp.write(s)
1071 fp.write(s)
1072
1072
1073 write("# HG changeset patch\n")
1073 write("# HG changeset patch\n")
1074 write("# User %s\n" % ctx.user())
1074 write("# User %s\n" % ctx.user())
1075 write("# Date %d %d\n" % ctx.date())
1075 write("# Date %d %d\n" % ctx.date())
1076 write("# %s\n" % util.datestr(ctx.date()))
1076 write("# %s\n" % util.datestr(ctx.date()))
1077 if branch and branch != 'default':
1077 if branch and branch != 'default':
1078 write("# Branch %s\n" % branch)
1078 write("# Branch %s\n" % branch)
1079 write("# Node ID %s\n" % hex(node))
1079 write("# Node ID %s\n" % hex(node))
1080 write("# Parent %s\n" % hex(prev))
1080 write("# Parent %s\n" % hex(prev))
1081 if len(parents) > 1:
1081 if len(parents) > 1:
1082 write("# Parent %s\n" % hex(parents[1]))
1082 write("# Parent %s\n" % hex(parents[1]))
1083
1083
1084 for headerid in extraexport:
1084 for headerid in extraexport:
1085 header = extraexportmap[headerid](seqno, ctx)
1085 header = extraexportmap[headerid](seqno, ctx)
1086 if header is not None:
1086 if header is not None:
1087 write('# %s\n' % header)
1087 write('# %s\n' % header)
1088 write(ctx.description().rstrip())
1088 write(ctx.description().rstrip())
1089 write("\n\n")
1089 write("\n\n")
1090
1090
1091 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1091 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1092 write(chunk, label=label)
1092 write(chunk, label=label)
1093
1093
1094 if shouldclose:
1094 if shouldclose:
1095 fp.close()
1095 fp.close()
1096
1096
1097 for seqno, rev in enumerate(revs):
1097 for seqno, rev in enumerate(revs):
1098 single(rev, seqno + 1, fp)
1098 single(rev, seqno + 1, fp)
1099
1099
1100 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1100 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1101 changes=None, stat=False, fp=None, prefix='',
1101 changes=None, stat=False, fp=None, prefix='',
1102 root='', listsubrepos=False):
1102 root='', listsubrepos=False):
1103 '''show diff or diffstat.'''
1103 '''show diff or diffstat.'''
1104 if fp is None:
1104 if fp is None:
1105 write = ui.write
1105 write = ui.write
1106 else:
1106 else:
1107 def write(s, **kw):
1107 def write(s, **kw):
1108 fp.write(s)
1108 fp.write(s)
1109
1109
1110 if root:
1110 if root:
1111 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1111 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1112 else:
1112 else:
1113 relroot = ''
1113 relroot = ''
1114 if relroot != '':
1114 if relroot != '':
1115 # XXX relative roots currently don't work if the root is within a
1115 # XXX relative roots currently don't work if the root is within a
1116 # subrepo
1116 # subrepo
1117 uirelroot = match.uipath(relroot)
1117 uirelroot = match.uipath(relroot)
1118 relroot += '/'
1118 relroot += '/'
1119 for matchroot in match.files():
1119 for matchroot in match.files():
1120 if not matchroot.startswith(relroot):
1120 if not matchroot.startswith(relroot):
1121 ui.warn(_('warning: %s not inside relative root %s\n') % (
1121 ui.warn(_('warning: %s not inside relative root %s\n') % (
1122 match.uipath(matchroot), uirelroot))
1122 match.uipath(matchroot), uirelroot))
1123
1123
1124 if stat:
1124 if stat:
1125 diffopts = diffopts.copy(context=0)
1125 diffopts = diffopts.copy(context=0)
1126 width = 80
1126 width = 80
1127 if not ui.plain():
1127 if not ui.plain():
1128 width = ui.termwidth()
1128 width = ui.termwidth()
1129 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1129 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1130 prefix=prefix, relroot=relroot)
1130 prefix=prefix, relroot=relroot)
1131 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1131 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1132 width=width):
1132 width=width):
1133 write(chunk, label=label)
1133 write(chunk, label=label)
1134 else:
1134 else:
1135 for chunk, label in patch.diffui(repo, node1, node2, match,
1135 for chunk, label in patch.diffui(repo, node1, node2, match,
1136 changes, diffopts, prefix=prefix,
1136 changes, diffopts, prefix=prefix,
1137 relroot=relroot):
1137 relroot=relroot):
1138 write(chunk, label=label)
1138 write(chunk, label=label)
1139
1139
1140 if listsubrepos:
1140 if listsubrepos:
1141 ctx1 = repo[node1]
1141 ctx1 = repo[node1]
1142 ctx2 = repo[node2]
1142 ctx2 = repo[node2]
1143 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1143 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1144 tempnode2 = node2
1144 tempnode2 = node2
1145 try:
1145 try:
1146 if node2 is not None:
1146 if node2 is not None:
1147 tempnode2 = ctx2.substate[subpath][1]
1147 tempnode2 = ctx2.substate[subpath][1]
1148 except KeyError:
1148 except KeyError:
1149 # A subrepo that existed in node1 was deleted between node1 and
1149 # A subrepo that existed in node1 was deleted between node1 and
1150 # node2 (inclusive). Thus, ctx2's substate won't contain that
1150 # node2 (inclusive). Thus, ctx2's substate won't contain that
1151 # subpath. The best we can do is to ignore it.
1151 # subpath. The best we can do is to ignore it.
1152 tempnode2 = None
1152 tempnode2 = None
1153 submatch = matchmod.subdirmatcher(subpath, match)
1153 submatch = matchmod.subdirmatcher(subpath, match)
1154 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1154 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1155 stat=stat, fp=fp, prefix=prefix)
1155 stat=stat, fp=fp, prefix=prefix)
1156
1156
1157 def _changesetlabels(ctx):
1157 def _changesetlabels(ctx):
1158 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1158 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1159 if ctx.troubled():
1159 if ctx.troubled():
1160 labels.append('changeset.troubled')
1160 labels.append('changeset.troubled')
1161 for trouble in ctx.troubles():
1161 for trouble in ctx.troubles():
1162 labels.append('trouble.%s' % trouble)
1162 labels.append('trouble.%s' % trouble)
1163 return ' '.join(labels)
1163 return ' '.join(labels)
1164
1164
1165 class changeset_printer(object):
1165 class changeset_printer(object):
1166 '''show changeset information when templating not requested.'''
1166 '''show changeset information when templating not requested.'''
1167
1167
1168 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1168 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1169 self.ui = ui
1169 self.ui = ui
1170 self.repo = repo
1170 self.repo = repo
1171 self.buffered = buffered
1171 self.buffered = buffered
1172 self.matchfn = matchfn
1172 self.matchfn = matchfn
1173 self.diffopts = diffopts
1173 self.diffopts = diffopts
1174 self.header = {}
1174 self.header = {}
1175 self.hunk = {}
1175 self.hunk = {}
1176 self.lastheader = None
1176 self.lastheader = None
1177 self.footer = None
1177 self.footer = None
1178
1178
1179 def flush(self, ctx):
1179 def flush(self, ctx):
1180 rev = ctx.rev()
1180 rev = ctx.rev()
1181 if rev in self.header:
1181 if rev in self.header:
1182 h = self.header[rev]
1182 h = self.header[rev]
1183 if h != self.lastheader:
1183 if h != self.lastheader:
1184 self.lastheader = h
1184 self.lastheader = h
1185 self.ui.write(h)
1185 self.ui.write(h)
1186 del self.header[rev]
1186 del self.header[rev]
1187 if rev in self.hunk:
1187 if rev in self.hunk:
1188 self.ui.write(self.hunk[rev])
1188 self.ui.write(self.hunk[rev])
1189 del self.hunk[rev]
1189 del self.hunk[rev]
1190 return 1
1190 return 1
1191 return 0
1191 return 0
1192
1192
1193 def close(self):
1193 def close(self):
1194 if self.footer:
1194 if self.footer:
1195 self.ui.write(self.footer)
1195 self.ui.write(self.footer)
1196
1196
1197 def show(self, ctx, copies=None, matchfn=None, **props):
1197 def show(self, ctx, copies=None, matchfn=None, **props):
1198 if self.buffered:
1198 if self.buffered:
1199 self.ui.pushbuffer(labeled=True)
1199 self.ui.pushbuffer(labeled=True)
1200 self._show(ctx, copies, matchfn, props)
1200 self._show(ctx, copies, matchfn, props)
1201 self.hunk[ctx.rev()] = self.ui.popbuffer()
1201 self.hunk[ctx.rev()] = self.ui.popbuffer()
1202 else:
1202 else:
1203 self._show(ctx, copies, matchfn, props)
1203 self._show(ctx, copies, matchfn, props)
1204
1204
1205 def _show(self, ctx, copies, matchfn, props):
1205 def _show(self, ctx, copies, matchfn, props):
1206 '''show a single changeset or file revision'''
1206 '''show a single changeset or file revision'''
1207 changenode = ctx.node()
1207 changenode = ctx.node()
1208 rev = ctx.rev()
1208 rev = ctx.rev()
1209 if self.ui.debugflag:
1209 if self.ui.debugflag:
1210 hexfunc = hex
1210 hexfunc = hex
1211 else:
1211 else:
1212 hexfunc = short
1212 hexfunc = short
1213 # as of now, wctx.node() and wctx.rev() return None, but we want to
1213 # as of now, wctx.node() and wctx.rev() return None, but we want to
1214 # show the same values as {node} and {rev} templatekw
1214 # show the same values as {node} and {rev} templatekw
1215 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1215 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1216
1216
1217 if self.ui.quiet:
1217 if self.ui.quiet:
1218 self.ui.write("%d:%s\n" % revnode, label='log.node')
1218 self.ui.write("%d:%s\n" % revnode, label='log.node')
1219 return
1219 return
1220
1220
1221 date = util.datestr(ctx.date())
1221 date = util.datestr(ctx.date())
1222
1222
1223 # i18n: column positioning for "hg log"
1223 # i18n: column positioning for "hg log"
1224 self.ui.write(_("changeset: %d:%s\n") % revnode,
1224 self.ui.write(_("changeset: %d:%s\n") % revnode,
1225 label=_changesetlabels(ctx))
1225 label=_changesetlabels(ctx))
1226
1226
1227 # branches are shown first before any other names due to backwards
1227 # branches are shown first before any other names due to backwards
1228 # compatibility
1228 # compatibility
1229 branch = ctx.branch()
1229 branch = ctx.branch()
1230 # don't show the default branch name
1230 # don't show the default branch name
1231 if branch != 'default':
1231 if branch != 'default':
1232 # i18n: column positioning for "hg log"
1232 # i18n: column positioning for "hg log"
1233 self.ui.write(_("branch: %s\n") % branch,
1233 self.ui.write(_("branch: %s\n") % branch,
1234 label='log.branch')
1234 label='log.branch')
1235
1235
1236 for nsname, ns in self.repo.names.iteritems():
1236 for nsname, ns in self.repo.names.iteritems():
1237 # branches has special logic already handled above, so here we just
1237 # branches has special logic already handled above, so here we just
1238 # skip it
1238 # skip it
1239 if nsname == 'branches':
1239 if nsname == 'branches':
1240 continue
1240 continue
1241 # we will use the templatename as the color name since those two
1241 # we will use the templatename as the color name since those two
1242 # should be the same
1242 # should be the same
1243 for name in ns.names(self.repo, changenode):
1243 for name in ns.names(self.repo, changenode):
1244 self.ui.write(ns.logfmt % name,
1244 self.ui.write(ns.logfmt % name,
1245 label='log.%s' % ns.colorname)
1245 label='log.%s' % ns.colorname)
1246 if self.ui.debugflag:
1246 if self.ui.debugflag:
1247 # i18n: column positioning for "hg log"
1247 # i18n: column positioning for "hg log"
1248 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1248 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1249 label='log.phase')
1249 label='log.phase')
1250 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1250 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1251 label = 'log.parent changeset.%s' % pctx.phasestr()
1251 label = 'log.parent changeset.%s' % pctx.phasestr()
1252 # i18n: column positioning for "hg log"
1252 # i18n: column positioning for "hg log"
1253 self.ui.write(_("parent: %d:%s\n")
1253 self.ui.write(_("parent: %d:%s\n")
1254 % (pctx.rev(), hexfunc(pctx.node())),
1254 % (pctx.rev(), hexfunc(pctx.node())),
1255 label=label)
1255 label=label)
1256
1256
1257 if self.ui.debugflag and rev is not None:
1257 if self.ui.debugflag and rev is not None:
1258 mnode = ctx.manifestnode()
1258 mnode = ctx.manifestnode()
1259 # i18n: column positioning for "hg log"
1259 # i18n: column positioning for "hg log"
1260 self.ui.write(_("manifest: %d:%s\n") %
1260 self.ui.write(_("manifest: %d:%s\n") %
1261 (self.repo.manifestlog._revlog.rev(mnode),
1261 (self.repo.manifestlog._revlog.rev(mnode),
1262 hex(mnode)),
1262 hex(mnode)),
1263 label='ui.debug log.manifest')
1263 label='ui.debug log.manifest')
1264 # i18n: column positioning for "hg log"
1264 # i18n: column positioning for "hg log"
1265 self.ui.write(_("user: %s\n") % ctx.user(),
1265 self.ui.write(_("user: %s\n") % ctx.user(),
1266 label='log.user')
1266 label='log.user')
1267 # i18n: column positioning for "hg log"
1267 # i18n: column positioning for "hg log"
1268 self.ui.write(_("date: %s\n") % date,
1268 self.ui.write(_("date: %s\n") % date,
1269 label='log.date')
1269 label='log.date')
1270
1270
1271 if ctx.troubled():
1271 if ctx.troubled():
1272 # i18n: column positioning for "hg log"
1272 # i18n: column positioning for "hg log"
1273 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1273 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1274 label='log.trouble')
1274 label='log.trouble')
1275
1275
1276 if self.ui.debugflag:
1276 if self.ui.debugflag:
1277 files = ctx.p1().status(ctx)[:3]
1277 files = ctx.p1().status(ctx)[:3]
1278 for key, value in zip([# i18n: column positioning for "hg log"
1278 for key, value in zip([# i18n: column positioning for "hg log"
1279 _("files:"),
1279 _("files:"),
1280 # i18n: column positioning for "hg log"
1280 # i18n: column positioning for "hg log"
1281 _("files+:"),
1281 _("files+:"),
1282 # i18n: column positioning for "hg log"
1282 # i18n: column positioning for "hg log"
1283 _("files-:")], files):
1283 _("files-:")], files):
1284 if value:
1284 if value:
1285 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1285 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1286 label='ui.debug log.files')
1286 label='ui.debug log.files')
1287 elif ctx.files() and self.ui.verbose:
1287 elif ctx.files() and self.ui.verbose:
1288 # i18n: column positioning for "hg log"
1288 # i18n: column positioning for "hg log"
1289 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1289 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1290 label='ui.note log.files')
1290 label='ui.note log.files')
1291 if copies and self.ui.verbose:
1291 if copies and self.ui.verbose:
1292 copies = ['%s (%s)' % c for c in copies]
1292 copies = ['%s (%s)' % c for c in copies]
1293 # i18n: column positioning for "hg log"
1293 # i18n: column positioning for "hg log"
1294 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1294 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1295 label='ui.note log.copies')
1295 label='ui.note log.copies')
1296
1296
1297 extra = ctx.extra()
1297 extra = ctx.extra()
1298 if extra and self.ui.debugflag:
1298 if extra and self.ui.debugflag:
1299 for key, value in sorted(extra.items()):
1299 for key, value in sorted(extra.items()):
1300 # i18n: column positioning for "hg log"
1300 # i18n: column positioning for "hg log"
1301 self.ui.write(_("extra: %s=%s\n")
1301 self.ui.write(_("extra: %s=%s\n")
1302 % (key, value.encode('string_escape')),
1302 % (key, value.encode('string_escape')),
1303 label='ui.debug log.extra')
1303 label='ui.debug log.extra')
1304
1304
1305 description = ctx.description().strip()
1305 description = ctx.description().strip()
1306 if description:
1306 if description:
1307 if self.ui.verbose:
1307 if self.ui.verbose:
1308 self.ui.write(_("description:\n"),
1308 self.ui.write(_("description:\n"),
1309 label='ui.note log.description')
1309 label='ui.note log.description')
1310 self.ui.write(description,
1310 self.ui.write(description,
1311 label='ui.note log.description')
1311 label='ui.note log.description')
1312 self.ui.write("\n\n")
1312 self.ui.write("\n\n")
1313 else:
1313 else:
1314 # i18n: column positioning for "hg log"
1314 # i18n: column positioning for "hg log"
1315 self.ui.write(_("summary: %s\n") %
1315 self.ui.write(_("summary: %s\n") %
1316 description.splitlines()[0],
1316 description.splitlines()[0],
1317 label='log.summary')
1317 label='log.summary')
1318 self.ui.write("\n")
1318 self.ui.write("\n")
1319
1319
1320 self.showpatch(ctx, matchfn)
1320 self.showpatch(ctx, matchfn)
1321
1321
1322 def showpatch(self, ctx, matchfn):
1322 def showpatch(self, ctx, matchfn):
1323 if not matchfn:
1323 if not matchfn:
1324 matchfn = self.matchfn
1324 matchfn = self.matchfn
1325 if matchfn:
1325 if matchfn:
1326 stat = self.diffopts.get('stat')
1326 stat = self.diffopts.get('stat')
1327 diff = self.diffopts.get('patch')
1327 diff = self.diffopts.get('patch')
1328 diffopts = patch.diffallopts(self.ui, self.diffopts)
1328 diffopts = patch.diffallopts(self.ui, self.diffopts)
1329 node = ctx.node()
1329 node = ctx.node()
1330 prev = ctx.p1().node()
1330 prev = ctx.p1().node()
1331 if stat:
1331 if stat:
1332 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1332 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1333 match=matchfn, stat=True)
1333 match=matchfn, stat=True)
1334 if diff:
1334 if diff:
1335 if stat:
1335 if stat:
1336 self.ui.write("\n")
1336 self.ui.write("\n")
1337 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1337 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1338 match=matchfn, stat=False)
1338 match=matchfn, stat=False)
1339 self.ui.write("\n")
1339 self.ui.write("\n")
1340
1340
1341 class jsonchangeset(changeset_printer):
1341 class jsonchangeset(changeset_printer):
1342 '''format changeset information.'''
1342 '''format changeset information.'''
1343
1343
1344 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1344 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1345 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1345 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1346 self.cache = {}
1346 self.cache = {}
1347 self._first = True
1347 self._first = True
1348
1348
1349 def close(self):
1349 def close(self):
1350 if not self._first:
1350 if not self._first:
1351 self.ui.write("\n]\n")
1351 self.ui.write("\n]\n")
1352 else:
1352 else:
1353 self.ui.write("[]\n")
1353 self.ui.write("[]\n")
1354
1354
1355 def _show(self, ctx, copies, matchfn, props):
1355 def _show(self, ctx, copies, matchfn, props):
1356 '''show a single changeset or file revision'''
1356 '''show a single changeset or file revision'''
1357 rev = ctx.rev()
1357 rev = ctx.rev()
1358 if rev is None:
1358 if rev is None:
1359 jrev = jnode = 'null'
1359 jrev = jnode = 'null'
1360 else:
1360 else:
1361 jrev = str(rev)
1361 jrev = str(rev)
1362 jnode = '"%s"' % hex(ctx.node())
1362 jnode = '"%s"' % hex(ctx.node())
1363 j = encoding.jsonescape
1363 j = encoding.jsonescape
1364
1364
1365 if self._first:
1365 if self._first:
1366 self.ui.write("[\n {")
1366 self.ui.write("[\n {")
1367 self._first = False
1367 self._first = False
1368 else:
1368 else:
1369 self.ui.write(",\n {")
1369 self.ui.write(",\n {")
1370
1370
1371 if self.ui.quiet:
1371 if self.ui.quiet:
1372 self.ui.write(('\n "rev": %s') % jrev)
1372 self.ui.write(('\n "rev": %s') % jrev)
1373 self.ui.write((',\n "node": %s') % jnode)
1373 self.ui.write((',\n "node": %s') % jnode)
1374 self.ui.write('\n }')
1374 self.ui.write('\n }')
1375 return
1375 return
1376
1376
1377 self.ui.write(('\n "rev": %s') % jrev)
1377 self.ui.write(('\n "rev": %s') % jrev)
1378 self.ui.write((',\n "node": %s') % jnode)
1378 self.ui.write((',\n "node": %s') % jnode)
1379 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1379 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1380 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1380 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1381 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1381 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1382 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1382 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1383 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1383 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1384
1384
1385 self.ui.write((',\n "bookmarks": [%s]') %
1385 self.ui.write((',\n "bookmarks": [%s]') %
1386 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1386 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1387 self.ui.write((',\n "tags": [%s]') %
1387 self.ui.write((',\n "tags": [%s]') %
1388 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1388 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1389 self.ui.write((',\n "parents": [%s]') %
1389 self.ui.write((',\n "parents": [%s]') %
1390 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1390 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1391
1391
1392 if self.ui.debugflag:
1392 if self.ui.debugflag:
1393 if rev is None:
1393 if rev is None:
1394 jmanifestnode = 'null'
1394 jmanifestnode = 'null'
1395 else:
1395 else:
1396 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1396 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1397 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1397 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1398
1398
1399 self.ui.write((',\n "extra": {%s}') %
1399 self.ui.write((',\n "extra": {%s}') %
1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1401 for k, v in ctx.extra().items()))
1401 for k, v in ctx.extra().items()))
1402
1402
1403 files = ctx.p1().status(ctx)
1403 files = ctx.p1().status(ctx)
1404 self.ui.write((',\n "modified": [%s]') %
1404 self.ui.write((',\n "modified": [%s]') %
1405 ", ".join('"%s"' % j(f) for f in files[0]))
1405 ", ".join('"%s"' % j(f) for f in files[0]))
1406 self.ui.write((',\n "added": [%s]') %
1406 self.ui.write((',\n "added": [%s]') %
1407 ", ".join('"%s"' % j(f) for f in files[1]))
1407 ", ".join('"%s"' % j(f) for f in files[1]))
1408 self.ui.write((',\n "removed": [%s]') %
1408 self.ui.write((',\n "removed": [%s]') %
1409 ", ".join('"%s"' % j(f) for f in files[2]))
1409 ", ".join('"%s"' % j(f) for f in files[2]))
1410
1410
1411 elif self.ui.verbose:
1411 elif self.ui.verbose:
1412 self.ui.write((',\n "files": [%s]') %
1412 self.ui.write((',\n "files": [%s]') %
1413 ", ".join('"%s"' % j(f) for f in ctx.files()))
1413 ", ".join('"%s"' % j(f) for f in ctx.files()))
1414
1414
1415 if copies:
1415 if copies:
1416 self.ui.write((',\n "copies": {%s}') %
1416 self.ui.write((',\n "copies": {%s}') %
1417 ", ".join('"%s": "%s"' % (j(k), j(v))
1417 ", ".join('"%s": "%s"' % (j(k), j(v))
1418 for k, v in copies))
1418 for k, v in copies))
1419
1419
1420 matchfn = self.matchfn
1420 matchfn = self.matchfn
1421 if matchfn:
1421 if matchfn:
1422 stat = self.diffopts.get('stat')
1422 stat = self.diffopts.get('stat')
1423 diff = self.diffopts.get('patch')
1423 diff = self.diffopts.get('patch')
1424 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1424 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1425 node, prev = ctx.node(), ctx.p1().node()
1425 node, prev = ctx.node(), ctx.p1().node()
1426 if stat:
1426 if stat:
1427 self.ui.pushbuffer()
1427 self.ui.pushbuffer()
1428 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1428 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1429 match=matchfn, stat=True)
1429 match=matchfn, stat=True)
1430 self.ui.write((',\n "diffstat": "%s"')
1430 self.ui.write((',\n "diffstat": "%s"')
1431 % j(self.ui.popbuffer()))
1431 % j(self.ui.popbuffer()))
1432 if diff:
1432 if diff:
1433 self.ui.pushbuffer()
1433 self.ui.pushbuffer()
1434 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1434 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1435 match=matchfn, stat=False)
1435 match=matchfn, stat=False)
1436 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1436 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1437
1437
1438 self.ui.write("\n }")
1438 self.ui.write("\n }")
1439
1439
1440 class changeset_templater(changeset_printer):
1440 class changeset_templater(changeset_printer):
1441 '''format changeset information.'''
1441 '''format changeset information.'''
1442
1442
1443 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1443 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1444 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1444 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1445 assert not (tmpl and mapfile)
1445 assert not (tmpl and mapfile)
1446 defaulttempl = templatekw.defaulttempl
1446 defaulttempl = templatekw.defaulttempl
1447 if mapfile:
1447 if mapfile:
1448 self.t = templater.templater.frommapfile(mapfile,
1448 self.t = templater.templater.frommapfile(mapfile,
1449 cache=defaulttempl)
1449 cache=defaulttempl)
1450 else:
1450 else:
1451 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1451 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1452 cache=defaulttempl)
1452 cache=defaulttempl)
1453
1453
1454 self.cache = {}
1454 self.cache = {}
1455
1455
1456 # find correct templates for current mode
1456 # find correct templates for current mode
1457 tmplmodes = [
1457 tmplmodes = [
1458 (True, None),
1458 (True, None),
1459 (self.ui.verbose, 'verbose'),
1459 (self.ui.verbose, 'verbose'),
1460 (self.ui.quiet, 'quiet'),
1460 (self.ui.quiet, 'quiet'),
1461 (self.ui.debugflag, 'debug'),
1461 (self.ui.debugflag, 'debug'),
1462 ]
1462 ]
1463
1463
1464 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1464 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1465 'docheader': '', 'docfooter': ''}
1465 'docheader': '', 'docfooter': ''}
1466 for mode, postfix in tmplmodes:
1466 for mode, postfix in tmplmodes:
1467 for t in self._parts:
1467 for t in self._parts:
1468 cur = t
1468 cur = t
1469 if postfix:
1469 if postfix:
1470 cur += "_" + postfix
1470 cur += "_" + postfix
1471 if mode and cur in self.t:
1471 if mode and cur in self.t:
1472 self._parts[t] = cur
1472 self._parts[t] = cur
1473
1473
1474 if self._parts['docheader']:
1474 if self._parts['docheader']:
1475 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1475 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1476
1476
1477 def close(self):
1477 def close(self):
1478 if self._parts['docfooter']:
1478 if self._parts['docfooter']:
1479 if not self.footer:
1479 if not self.footer:
1480 self.footer = ""
1480 self.footer = ""
1481 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1481 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1482 return super(changeset_templater, self).close()
1482 return super(changeset_templater, self).close()
1483
1483
1484 def _show(self, ctx, copies, matchfn, props):
1484 def _show(self, ctx, copies, matchfn, props):
1485 '''show a single changeset or file revision'''
1485 '''show a single changeset or file revision'''
1486 props = props.copy()
1486 props = props.copy()
1487 props.update(templatekw.keywords)
1487 props.update(templatekw.keywords)
1488 props['templ'] = self.t
1488 props['templ'] = self.t
1489 props['ctx'] = ctx
1489 props['ctx'] = ctx
1490 props['repo'] = self.repo
1490 props['repo'] = self.repo
1491 props['ui'] = self.repo.ui
1491 props['ui'] = self.repo.ui
1492 props['revcache'] = {'copies': copies}
1492 props['revcache'] = {'copies': copies}
1493 props['cache'] = self.cache
1493 props['cache'] = self.cache
1494
1494
1495 # write header
1495 # write header
1496 if self._parts['header']:
1496 if self._parts['header']:
1497 h = templater.stringify(self.t(self._parts['header'], **props))
1497 h = templater.stringify(self.t(self._parts['header'], **props))
1498 if self.buffered:
1498 if self.buffered:
1499 self.header[ctx.rev()] = h
1499 self.header[ctx.rev()] = h
1500 else:
1500 else:
1501 if self.lastheader != h:
1501 if self.lastheader != h:
1502 self.lastheader = h
1502 self.lastheader = h
1503 self.ui.write(h)
1503 self.ui.write(h)
1504
1504
1505 # write changeset metadata, then patch if requested
1505 # write changeset metadata, then patch if requested
1506 key = self._parts['changeset']
1506 key = self._parts['changeset']
1507 self.ui.write(templater.stringify(self.t(key, **props)))
1507 self.ui.write(templater.stringify(self.t(key, **props)))
1508 self.showpatch(ctx, matchfn)
1508 self.showpatch(ctx, matchfn)
1509
1509
1510 if self._parts['footer']:
1510 if self._parts['footer']:
1511 if not self.footer:
1511 if not self.footer:
1512 self.footer = templater.stringify(
1512 self.footer = templater.stringify(
1513 self.t(self._parts['footer'], **props))
1513 self.t(self._parts['footer'], **props))
1514
1514
1515 def gettemplate(ui, tmpl, style):
1515 def gettemplate(ui, tmpl, style):
1516 """
1516 """
1517 Find the template matching the given template spec or style.
1517 Find the template matching the given template spec or style.
1518 """
1518 """
1519
1519
1520 # ui settings
1520 # ui settings
1521 if not tmpl and not style: # template are stronger than style
1521 if not tmpl and not style: # template are stronger than style
1522 tmpl = ui.config('ui', 'logtemplate')
1522 tmpl = ui.config('ui', 'logtemplate')
1523 if tmpl:
1523 if tmpl:
1524 return templater.unquotestring(tmpl), None
1524 return templater.unquotestring(tmpl), None
1525 else:
1525 else:
1526 style = util.expandpath(ui.config('ui', 'style', ''))
1526 style = util.expandpath(ui.config('ui', 'style', ''))
1527
1527
1528 if not tmpl and style:
1528 if not tmpl and style:
1529 mapfile = style
1529 mapfile = style
1530 if not os.path.split(mapfile)[0]:
1530 if not os.path.split(mapfile)[0]:
1531 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1531 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1532 or templater.templatepath(mapfile))
1532 or templater.templatepath(mapfile))
1533 if mapname:
1533 if mapname:
1534 mapfile = mapname
1534 mapfile = mapname
1535 return None, mapfile
1535 return None, mapfile
1536
1536
1537 if not tmpl:
1537 if not tmpl:
1538 return None, None
1538 return None, None
1539
1539
1540 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1540 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1541
1541
1542 def show_changeset(ui, repo, opts, buffered=False):
1542 def show_changeset(ui, repo, opts, buffered=False):
1543 """show one changeset using template or regular display.
1543 """show one changeset using template or regular display.
1544
1544
1545 Display format will be the first non-empty hit of:
1545 Display format will be the first non-empty hit of:
1546 1. option 'template'
1546 1. option 'template'
1547 2. option 'style'
1547 2. option 'style'
1548 3. [ui] setting 'logtemplate'
1548 3. [ui] setting 'logtemplate'
1549 4. [ui] setting 'style'
1549 4. [ui] setting 'style'
1550 If all of these values are either the unset or the empty string,
1550 If all of these values are either the unset or the empty string,
1551 regular display via changeset_printer() is done.
1551 regular display via changeset_printer() is done.
1552 """
1552 """
1553 # options
1553 # options
1554 matchfn = None
1554 matchfn = None
1555 if opts.get('patch') or opts.get('stat'):
1555 if opts.get('patch') or opts.get('stat'):
1556 matchfn = scmutil.matchall(repo)
1556 matchfn = scmutil.matchall(repo)
1557
1557
1558 if opts.get('template') == 'json':
1558 if opts.get('template') == 'json':
1559 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1559 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1560
1560
1561 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1561 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1562
1562
1563 if not tmpl and not mapfile:
1563 if not tmpl and not mapfile:
1564 return changeset_printer(ui, repo, matchfn, opts, buffered)
1564 return changeset_printer(ui, repo, matchfn, opts, buffered)
1565
1565
1566 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1566 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1567
1567
1568 def showmarker(fm, marker, index=None):
1568 def showmarker(fm, marker, index=None):
1569 """utility function to display obsolescence marker in a readable way
1569 """utility function to display obsolescence marker in a readable way
1570
1570
1571 To be used by debug function."""
1571 To be used by debug function."""
1572 if index is not None:
1572 if index is not None:
1573 fm.write('index', '%i ', index)
1573 fm.write('index', '%i ', index)
1574 fm.write('precnode', '%s ', hex(marker.precnode()))
1574 fm.write('precnode', '%s ', hex(marker.precnode()))
1575 succs = marker.succnodes()
1575 succs = marker.succnodes()
1576 fm.condwrite(succs, 'succnodes', '%s ',
1576 fm.condwrite(succs, 'succnodes', '%s ',
1577 fm.formatlist(map(hex, succs), name='node'))
1577 fm.formatlist(map(hex, succs), name='node'))
1578 fm.write('flag', '%X ', marker.flags())
1578 fm.write('flag', '%X ', marker.flags())
1579 parents = marker.parentnodes()
1579 parents = marker.parentnodes()
1580 if parents is not None:
1580 if parents is not None:
1581 fm.write('parentnodes', '{%s} ',
1581 fm.write('parentnodes', '{%s} ',
1582 fm.formatlist(map(hex, parents), name='node', sep=', '))
1582 fm.formatlist(map(hex, parents), name='node', sep=', '))
1583 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1583 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1584 meta = marker.metadata().copy()
1584 meta = marker.metadata().copy()
1585 meta.pop('date', None)
1585 meta.pop('date', None)
1586 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1586 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1587 fm.plain('\n')
1587 fm.plain('\n')
1588
1588
1589 def finddate(ui, repo, date):
1589 def finddate(ui, repo, date):
1590 """Find the tipmost changeset that matches the given date spec"""
1590 """Find the tipmost changeset that matches the given date spec"""
1591
1591
1592 df = util.matchdate(date)
1592 df = util.matchdate(date)
1593 m = scmutil.matchall(repo)
1593 m = scmutil.matchall(repo)
1594 results = {}
1594 results = {}
1595
1595
1596 def prep(ctx, fns):
1596 def prep(ctx, fns):
1597 d = ctx.date()
1597 d = ctx.date()
1598 if df(d[0]):
1598 if df(d[0]):
1599 results[ctx.rev()] = d
1599 results[ctx.rev()] = d
1600
1600
1601 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1601 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1602 rev = ctx.rev()
1602 rev = ctx.rev()
1603 if rev in results:
1603 if rev in results:
1604 ui.status(_("found revision %s from %s\n") %
1604 ui.status(_("found revision %s from %s\n") %
1605 (rev, util.datestr(results[rev])))
1605 (rev, util.datestr(results[rev])))
1606 return str(rev)
1606 return str(rev)
1607
1607
1608 raise error.Abort(_("revision matching date not found"))
1608 raise error.Abort(_("revision matching date not found"))
1609
1609
1610 def increasingwindows(windowsize=8, sizelimit=512):
1610 def increasingwindows(windowsize=8, sizelimit=512):
1611 while True:
1611 while True:
1612 yield windowsize
1612 yield windowsize
1613 if windowsize < sizelimit:
1613 if windowsize < sizelimit:
1614 windowsize *= 2
1614 windowsize *= 2
1615
1615
1616 class FileWalkError(Exception):
1616 class FileWalkError(Exception):
1617 pass
1617 pass
1618
1618
1619 def walkfilerevs(repo, match, follow, revs, fncache):
1619 def walkfilerevs(repo, match, follow, revs, fncache):
1620 '''Walks the file history for the matched files.
1620 '''Walks the file history for the matched files.
1621
1621
1622 Returns the changeset revs that are involved in the file history.
1622 Returns the changeset revs that are involved in the file history.
1623
1623
1624 Throws FileWalkError if the file history can't be walked using
1624 Throws FileWalkError if the file history can't be walked using
1625 filelogs alone.
1625 filelogs alone.
1626 '''
1626 '''
1627 wanted = set()
1627 wanted = set()
1628 copies = []
1628 copies = []
1629 minrev, maxrev = min(revs), max(revs)
1629 minrev, maxrev = min(revs), max(revs)
1630 def filerevgen(filelog, last):
1630 def filerevgen(filelog, last):
1631 """
1631 """
1632 Only files, no patterns. Check the history of each file.
1632 Only files, no patterns. Check the history of each file.
1633
1633
1634 Examines filelog entries within minrev, maxrev linkrev range
1634 Examines filelog entries within minrev, maxrev linkrev range
1635 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1635 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1636 tuples in backwards order
1636 tuples in backwards order
1637 """
1637 """
1638 cl_count = len(repo)
1638 cl_count = len(repo)
1639 revs = []
1639 revs = []
1640 for j in xrange(0, last + 1):
1640 for j in xrange(0, last + 1):
1641 linkrev = filelog.linkrev(j)
1641 linkrev = filelog.linkrev(j)
1642 if linkrev < minrev:
1642 if linkrev < minrev:
1643 continue
1643 continue
1644 # only yield rev for which we have the changelog, it can
1644 # only yield rev for which we have the changelog, it can
1645 # happen while doing "hg log" during a pull or commit
1645 # happen while doing "hg log" during a pull or commit
1646 if linkrev >= cl_count:
1646 if linkrev >= cl_count:
1647 break
1647 break
1648
1648
1649 parentlinkrevs = []
1649 parentlinkrevs = []
1650 for p in filelog.parentrevs(j):
1650 for p in filelog.parentrevs(j):
1651 if p != nullrev:
1651 if p != nullrev:
1652 parentlinkrevs.append(filelog.linkrev(p))
1652 parentlinkrevs.append(filelog.linkrev(p))
1653 n = filelog.node(j)
1653 n = filelog.node(j)
1654 revs.append((linkrev, parentlinkrevs,
1654 revs.append((linkrev, parentlinkrevs,
1655 follow and filelog.renamed(n)))
1655 follow and filelog.renamed(n)))
1656
1656
1657 return reversed(revs)
1657 return reversed(revs)
1658 def iterfiles():
1658 def iterfiles():
1659 pctx = repo['.']
1659 pctx = repo['.']
1660 for filename in match.files():
1660 for filename in match.files():
1661 if follow:
1661 if follow:
1662 if filename not in pctx:
1662 if filename not in pctx:
1663 raise error.Abort(_('cannot follow file not in parent '
1663 raise error.Abort(_('cannot follow file not in parent '
1664 'revision: "%s"') % filename)
1664 'revision: "%s"') % filename)
1665 yield filename, pctx[filename].filenode()
1665 yield filename, pctx[filename].filenode()
1666 else:
1666 else:
1667 yield filename, None
1667 yield filename, None
1668 for filename_node in copies:
1668 for filename_node in copies:
1669 yield filename_node
1669 yield filename_node
1670
1670
1671 for file_, node in iterfiles():
1671 for file_, node in iterfiles():
1672 filelog = repo.file(file_)
1672 filelog = repo.file(file_)
1673 if not len(filelog):
1673 if not len(filelog):
1674 if node is None:
1674 if node is None:
1675 # A zero count may be a directory or deleted file, so
1675 # A zero count may be a directory or deleted file, so
1676 # try to find matching entries on the slow path.
1676 # try to find matching entries on the slow path.
1677 if follow:
1677 if follow:
1678 raise error.Abort(
1678 raise error.Abort(
1679 _('cannot follow nonexistent file: "%s"') % file_)
1679 _('cannot follow nonexistent file: "%s"') % file_)
1680 raise FileWalkError("Cannot walk via filelog")
1680 raise FileWalkError("Cannot walk via filelog")
1681 else:
1681 else:
1682 continue
1682 continue
1683
1683
1684 if node is None:
1684 if node is None:
1685 last = len(filelog) - 1
1685 last = len(filelog) - 1
1686 else:
1686 else:
1687 last = filelog.rev(node)
1687 last = filelog.rev(node)
1688
1688
1689 # keep track of all ancestors of the file
1689 # keep track of all ancestors of the file
1690 ancestors = set([filelog.linkrev(last)])
1690 ancestors = set([filelog.linkrev(last)])
1691
1691
1692 # iterate from latest to oldest revision
1692 # iterate from latest to oldest revision
1693 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1693 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1694 if not follow:
1694 if not follow:
1695 if rev > maxrev:
1695 if rev > maxrev:
1696 continue
1696 continue
1697 else:
1697 else:
1698 # Note that last might not be the first interesting
1698 # Note that last might not be the first interesting
1699 # rev to us:
1699 # rev to us:
1700 # if the file has been changed after maxrev, we'll
1700 # if the file has been changed after maxrev, we'll
1701 # have linkrev(last) > maxrev, and we still need
1701 # have linkrev(last) > maxrev, and we still need
1702 # to explore the file graph
1702 # to explore the file graph
1703 if rev not in ancestors:
1703 if rev not in ancestors:
1704 continue
1704 continue
1705 # XXX insert 1327 fix here
1705 # XXX insert 1327 fix here
1706 if flparentlinkrevs:
1706 if flparentlinkrevs:
1707 ancestors.update(flparentlinkrevs)
1707 ancestors.update(flparentlinkrevs)
1708
1708
1709 fncache.setdefault(rev, []).append(file_)
1709 fncache.setdefault(rev, []).append(file_)
1710 wanted.add(rev)
1710 wanted.add(rev)
1711 if copied:
1711 if copied:
1712 copies.append(copied)
1712 copies.append(copied)
1713
1713
1714 return wanted
1714 return wanted
1715
1715
1716 class _followfilter(object):
1716 class _followfilter(object):
1717 def __init__(self, repo, onlyfirst=False):
1717 def __init__(self, repo, onlyfirst=False):
1718 self.repo = repo
1718 self.repo = repo
1719 self.startrev = nullrev
1719 self.startrev = nullrev
1720 self.roots = set()
1720 self.roots = set()
1721 self.onlyfirst = onlyfirst
1721 self.onlyfirst = onlyfirst
1722
1722
1723 def match(self, rev):
1723 def match(self, rev):
1724 def realparents(rev):
1724 def realparents(rev):
1725 if self.onlyfirst:
1725 if self.onlyfirst:
1726 return self.repo.changelog.parentrevs(rev)[0:1]
1726 return self.repo.changelog.parentrevs(rev)[0:1]
1727 else:
1727 else:
1728 return filter(lambda x: x != nullrev,
1728 return filter(lambda x: x != nullrev,
1729 self.repo.changelog.parentrevs(rev))
1729 self.repo.changelog.parentrevs(rev))
1730
1730
1731 if self.startrev == nullrev:
1731 if self.startrev == nullrev:
1732 self.startrev = rev
1732 self.startrev = rev
1733 return True
1733 return True
1734
1734
1735 if rev > self.startrev:
1735 if rev > self.startrev:
1736 # forward: all descendants
1736 # forward: all descendants
1737 if not self.roots:
1737 if not self.roots:
1738 self.roots.add(self.startrev)
1738 self.roots.add(self.startrev)
1739 for parent in realparents(rev):
1739 for parent in realparents(rev):
1740 if parent in self.roots:
1740 if parent in self.roots:
1741 self.roots.add(rev)
1741 self.roots.add(rev)
1742 return True
1742 return True
1743 else:
1743 else:
1744 # backwards: all parents
1744 # backwards: all parents
1745 if not self.roots:
1745 if not self.roots:
1746 self.roots.update(realparents(self.startrev))
1746 self.roots.update(realparents(self.startrev))
1747 if rev in self.roots:
1747 if rev in self.roots:
1748 self.roots.remove(rev)
1748 self.roots.remove(rev)
1749 self.roots.update(realparents(rev))
1749 self.roots.update(realparents(rev))
1750 return True
1750 return True
1751
1751
1752 return False
1752 return False
1753
1753
1754 def walkchangerevs(repo, match, opts, prepare):
1754 def walkchangerevs(repo, match, opts, prepare):
1755 '''Iterate over files and the revs in which they changed.
1755 '''Iterate over files and the revs in which they changed.
1756
1756
1757 Callers most commonly need to iterate backwards over the history
1757 Callers most commonly need to iterate backwards over the history
1758 in which they are interested. Doing so has awful (quadratic-looking)
1758 in which they are interested. Doing so has awful (quadratic-looking)
1759 performance, so we use iterators in a "windowed" way.
1759 performance, so we use iterators in a "windowed" way.
1760
1760
1761 We walk a window of revisions in the desired order. Within the
1761 We walk a window of revisions in the desired order. Within the
1762 window, we first walk forwards to gather data, then in the desired
1762 window, we first walk forwards to gather data, then in the desired
1763 order (usually backwards) to display it.
1763 order (usually backwards) to display it.
1764
1764
1765 This function returns an iterator yielding contexts. Before
1765 This function returns an iterator yielding contexts. Before
1766 yielding each context, the iterator will first call the prepare
1766 yielding each context, the iterator will first call the prepare
1767 function on each context in the window in forward order.'''
1767 function on each context in the window in forward order.'''
1768
1768
1769 follow = opts.get('follow') or opts.get('follow_first')
1769 follow = opts.get('follow') or opts.get('follow_first')
1770 revs = _logrevs(repo, opts)
1770 revs = _logrevs(repo, opts)
1771 if not revs:
1771 if not revs:
1772 return []
1772 return []
1773 wanted = set()
1773 wanted = set()
1774 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1774 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1775 opts.get('removed'))
1775 opts.get('removed'))
1776 fncache = {}
1776 fncache = {}
1777 change = repo.changectx
1777 change = repo.changectx
1778
1778
1779 # First step is to fill wanted, the set of revisions that we want to yield.
1779 # First step is to fill wanted, the set of revisions that we want to yield.
1780 # When it does not induce extra cost, we also fill fncache for revisions in
1780 # When it does not induce extra cost, we also fill fncache for revisions in
1781 # wanted: a cache of filenames that were changed (ctx.files()) and that
1781 # wanted: a cache of filenames that were changed (ctx.files()) and that
1782 # match the file filtering conditions.
1782 # match the file filtering conditions.
1783
1783
1784 if match.always():
1784 if match.always():
1785 # No files, no patterns. Display all revs.
1785 # No files, no patterns. Display all revs.
1786 wanted = revs
1786 wanted = revs
1787 elif not slowpath:
1787 elif not slowpath:
1788 # We only have to read through the filelog to find wanted revisions
1788 # We only have to read through the filelog to find wanted revisions
1789
1789
1790 try:
1790 try:
1791 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1791 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1792 except FileWalkError:
1792 except FileWalkError:
1793 slowpath = True
1793 slowpath = True
1794
1794
1795 # We decided to fall back to the slowpath because at least one
1795 # We decided to fall back to the slowpath because at least one
1796 # of the paths was not a file. Check to see if at least one of them
1796 # of the paths was not a file. Check to see if at least one of them
1797 # existed in history, otherwise simply return
1797 # existed in history, otherwise simply return
1798 for path in match.files():
1798 for path in match.files():
1799 if path == '.' or path in repo.store:
1799 if path == '.' or path in repo.store:
1800 break
1800 break
1801 else:
1801 else:
1802 return []
1802 return []
1803
1803
1804 if slowpath:
1804 if slowpath:
1805 # We have to read the changelog to match filenames against
1805 # We have to read the changelog to match filenames against
1806 # changed files
1806 # changed files
1807
1807
1808 if follow:
1808 if follow:
1809 raise error.Abort(_('can only follow copies/renames for explicit '
1809 raise error.Abort(_('can only follow copies/renames for explicit '
1810 'filenames'))
1810 'filenames'))
1811
1811
1812 # The slow path checks files modified in every changeset.
1812 # The slow path checks files modified in every changeset.
1813 # This is really slow on large repos, so compute the set lazily.
1813 # This is really slow on large repos, so compute the set lazily.
1814 class lazywantedset(object):
1814 class lazywantedset(object):
1815 def __init__(self):
1815 def __init__(self):
1816 self.set = set()
1816 self.set = set()
1817 self.revs = set(revs)
1817 self.revs = set(revs)
1818
1818
1819 # No need to worry about locality here because it will be accessed
1819 # No need to worry about locality here because it will be accessed
1820 # in the same order as the increasing window below.
1820 # in the same order as the increasing window below.
1821 def __contains__(self, value):
1821 def __contains__(self, value):
1822 if value in self.set:
1822 if value in self.set:
1823 return True
1823 return True
1824 elif not value in self.revs:
1824 elif not value in self.revs:
1825 return False
1825 return False
1826 else:
1826 else:
1827 self.revs.discard(value)
1827 self.revs.discard(value)
1828 ctx = change(value)
1828 ctx = change(value)
1829 matches = filter(match, ctx.files())
1829 matches = filter(match, ctx.files())
1830 if matches:
1830 if matches:
1831 fncache[value] = matches
1831 fncache[value] = matches
1832 self.set.add(value)
1832 self.set.add(value)
1833 return True
1833 return True
1834 return False
1834 return False
1835
1835
1836 def discard(self, value):
1836 def discard(self, value):
1837 self.revs.discard(value)
1837 self.revs.discard(value)
1838 self.set.discard(value)
1838 self.set.discard(value)
1839
1839
1840 wanted = lazywantedset()
1840 wanted = lazywantedset()
1841
1841
1842 # it might be worthwhile to do this in the iterator if the rev range
1842 # it might be worthwhile to do this in the iterator if the rev range
1843 # is descending and the prune args are all within that range
1843 # is descending and the prune args are all within that range
1844 for rev in opts.get('prune', ()):
1844 for rev in opts.get('prune', ()):
1845 rev = repo[rev].rev()
1845 rev = repo[rev].rev()
1846 ff = _followfilter(repo)
1846 ff = _followfilter(repo)
1847 stop = min(revs[0], revs[-1])
1847 stop = min(revs[0], revs[-1])
1848 for x in xrange(rev, stop - 1, -1):
1848 for x in xrange(rev, stop - 1, -1):
1849 if ff.match(x):
1849 if ff.match(x):
1850 wanted = wanted - [x]
1850 wanted = wanted - [x]
1851
1851
1852 # Now that wanted is correctly initialized, we can iterate over the
1852 # Now that wanted is correctly initialized, we can iterate over the
1853 # revision range, yielding only revisions in wanted.
1853 # revision range, yielding only revisions in wanted.
1854 def iterate():
1854 def iterate():
1855 if follow and match.always():
1855 if follow and match.always():
1856 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1856 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1857 def want(rev):
1857 def want(rev):
1858 return ff.match(rev) and rev in wanted
1858 return ff.match(rev) and rev in wanted
1859 else:
1859 else:
1860 def want(rev):
1860 def want(rev):
1861 return rev in wanted
1861 return rev in wanted
1862
1862
1863 it = iter(revs)
1863 it = iter(revs)
1864 stopiteration = False
1864 stopiteration = False
1865 for windowsize in increasingwindows():
1865 for windowsize in increasingwindows():
1866 nrevs = []
1866 nrevs = []
1867 for i in xrange(windowsize):
1867 for i in xrange(windowsize):
1868 rev = next(it, None)
1868 rev = next(it, None)
1869 if rev is None:
1869 if rev is None:
1870 stopiteration = True
1870 stopiteration = True
1871 break
1871 break
1872 elif want(rev):
1872 elif want(rev):
1873 nrevs.append(rev)
1873 nrevs.append(rev)
1874 for rev in sorted(nrevs):
1874 for rev in sorted(nrevs):
1875 fns = fncache.get(rev)
1875 fns = fncache.get(rev)
1876 ctx = change(rev)
1876 ctx = change(rev)
1877 if not fns:
1877 if not fns:
1878 def fns_generator():
1878 def fns_generator():
1879 for f in ctx.files():
1879 for f in ctx.files():
1880 if match(f):
1880 if match(f):
1881 yield f
1881 yield f
1882 fns = fns_generator()
1882 fns = fns_generator()
1883 prepare(ctx, fns)
1883 prepare(ctx, fns)
1884 for rev in nrevs:
1884 for rev in nrevs:
1885 yield change(rev)
1885 yield change(rev)
1886
1886
1887 if stopiteration:
1887 if stopiteration:
1888 break
1888 break
1889
1889
1890 return iterate()
1890 return iterate()
1891
1891
1892 def _makefollowlogfilematcher(repo, files, followfirst):
1892 def _makefollowlogfilematcher(repo, files, followfirst):
1893 # When displaying a revision with --patch --follow FILE, we have
1893 # When displaying a revision with --patch --follow FILE, we have
1894 # to know which file of the revision must be diffed. With
1894 # to know which file of the revision must be diffed. With
1895 # --follow, we want the names of the ancestors of FILE in the
1895 # --follow, we want the names of the ancestors of FILE in the
1896 # revision, stored in "fcache". "fcache" is populated by
1896 # revision, stored in "fcache". "fcache" is populated by
1897 # reproducing the graph traversal already done by --follow revset
1897 # reproducing the graph traversal already done by --follow revset
1898 # and relating revs to file names (which is not "correct" but
1898 # and relating revs to file names (which is not "correct" but
1899 # good enough).
1899 # good enough).
1900 fcache = {}
1900 fcache = {}
1901 fcacheready = [False]
1901 fcacheready = [False]
1902 pctx = repo['.']
1902 pctx = repo['.']
1903
1903
1904 def populate():
1904 def populate():
1905 for fn in files:
1905 for fn in files:
1906 fctx = pctx[fn]
1906 fctx = pctx[fn]
1907 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1907 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1908 for c in fctx.ancestors(followfirst=followfirst):
1908 for c in fctx.ancestors(followfirst=followfirst):
1909 fcache.setdefault(c.rev(), set()).add(c.path())
1909 fcache.setdefault(c.rev(), set()).add(c.path())
1910
1910
1911 def filematcher(rev):
1911 def filematcher(rev):
1912 if not fcacheready[0]:
1912 if not fcacheready[0]:
1913 # Lazy initialization
1913 # Lazy initialization
1914 fcacheready[0] = True
1914 fcacheready[0] = True
1915 populate()
1915 populate()
1916 return scmutil.matchfiles(repo, fcache.get(rev, []))
1916 return scmutil.matchfiles(repo, fcache.get(rev, []))
1917
1917
1918 return filematcher
1918 return filematcher
1919
1919
1920 def _makenofollowlogfilematcher(repo, pats, opts):
1920 def _makenofollowlogfilematcher(repo, pats, opts):
1921 '''hook for extensions to override the filematcher for non-follow cases'''
1921 '''hook for extensions to override the filematcher for non-follow cases'''
1922 return None
1922 return None
1923
1923
1924 def _makelogrevset(repo, pats, opts, revs):
1924 def _makelogrevset(repo, pats, opts, revs):
1925 """Return (expr, filematcher) where expr is a revset string built
1925 """Return (expr, filematcher) where expr is a revset string built
1926 from log options and file patterns or None. If --stat or --patch
1926 from log options and file patterns or None. If --stat or --patch
1927 are not passed filematcher is None. Otherwise it is a callable
1927 are not passed filematcher is None. Otherwise it is a callable
1928 taking a revision number and returning a match objects filtering
1928 taking a revision number and returning a match objects filtering
1929 the files to be detailed when displaying the revision.
1929 the files to be detailed when displaying the revision.
1930 """
1930 """
1931 opt2revset = {
1931 opt2revset = {
1932 'no_merges': ('not merge()', None),
1932 'no_merges': ('not merge()', None),
1933 'only_merges': ('merge()', None),
1933 'only_merges': ('merge()', None),
1934 '_ancestors': ('ancestors(%(val)s)', None),
1934 '_ancestors': ('ancestors(%(val)s)', None),
1935 '_fancestors': ('_firstancestors(%(val)s)', None),
1935 '_fancestors': ('_firstancestors(%(val)s)', None),
1936 '_descendants': ('descendants(%(val)s)', None),
1936 '_descendants': ('descendants(%(val)s)', None),
1937 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1937 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1938 '_matchfiles': ('_matchfiles(%(val)s)', None),
1938 '_matchfiles': ('_matchfiles(%(val)s)', None),
1939 'date': ('date(%(val)r)', None),
1939 'date': ('date(%(val)r)', None),
1940 'branch': ('branch(%(val)r)', ' or '),
1940 'branch': ('branch(%(val)r)', ' or '),
1941 '_patslog': ('filelog(%(val)r)', ' or '),
1941 '_patslog': ('filelog(%(val)r)', ' or '),
1942 '_patsfollow': ('follow(%(val)r)', ' or '),
1942 '_patsfollow': ('follow(%(val)r)', ' or '),
1943 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1943 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1944 'keyword': ('keyword(%(val)r)', ' or '),
1944 'keyword': ('keyword(%(val)r)', ' or '),
1945 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1945 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1946 'user': ('user(%(val)r)', ' or '),
1946 'user': ('user(%(val)r)', ' or '),
1947 }
1947 }
1948
1948
1949 opts = dict(opts)
1949 opts = dict(opts)
1950 # follow or not follow?
1950 # follow or not follow?
1951 follow = opts.get('follow') or opts.get('follow_first')
1951 follow = opts.get('follow') or opts.get('follow_first')
1952 if opts.get('follow_first'):
1952 if opts.get('follow_first'):
1953 followfirst = 1
1953 followfirst = 1
1954 else:
1954 else:
1955 followfirst = 0
1955 followfirst = 0
1956 # --follow with FILE behavior depends on revs...
1956 # --follow with FILE behavior depends on revs...
1957 it = iter(revs)
1957 it = iter(revs)
1958 startrev = next(it)
1958 startrev = next(it)
1959 followdescendants = startrev < next(it, startrev)
1959 followdescendants = startrev < next(it, startrev)
1960
1960
1961 # branch and only_branch are really aliases and must be handled at
1961 # branch and only_branch are really aliases and must be handled at
1962 # the same time
1962 # the same time
1963 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1963 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1964 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1964 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1965 # pats/include/exclude are passed to match.match() directly in
1965 # pats/include/exclude are passed to match.match() directly in
1966 # _matchfiles() revset but walkchangerevs() builds its matcher with
1966 # _matchfiles() revset but walkchangerevs() builds its matcher with
1967 # scmutil.match(). The difference is input pats are globbed on
1967 # scmutil.match(). The difference is input pats are globbed on
1968 # platforms without shell expansion (windows).
1968 # platforms without shell expansion (windows).
1969 wctx = repo[None]
1969 wctx = repo[None]
1970 match, pats = scmutil.matchandpats(wctx, pats, opts)
1970 match, pats = scmutil.matchandpats(wctx, pats, opts)
1971 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1971 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1972 opts.get('removed'))
1972 opts.get('removed'))
1973 if not slowpath:
1973 if not slowpath:
1974 for f in match.files():
1974 for f in match.files():
1975 if follow and f not in wctx:
1975 if follow and f not in wctx:
1976 # If the file exists, it may be a directory, so let it
1976 # If the file exists, it may be a directory, so let it
1977 # take the slow path.
1977 # take the slow path.
1978 if os.path.exists(repo.wjoin(f)):
1978 if os.path.exists(repo.wjoin(f)):
1979 slowpath = True
1979 slowpath = True
1980 continue
1980 continue
1981 else:
1981 else:
1982 raise error.Abort(_('cannot follow file not in parent '
1982 raise error.Abort(_('cannot follow file not in parent '
1983 'revision: "%s"') % f)
1983 'revision: "%s"') % f)
1984 filelog = repo.file(f)
1984 filelog = repo.file(f)
1985 if not filelog:
1985 if not filelog:
1986 # A zero count may be a directory or deleted file, so
1986 # A zero count may be a directory or deleted file, so
1987 # try to find matching entries on the slow path.
1987 # try to find matching entries on the slow path.
1988 if follow:
1988 if follow:
1989 raise error.Abort(
1989 raise error.Abort(
1990 _('cannot follow nonexistent file: "%s"') % f)
1990 _('cannot follow nonexistent file: "%s"') % f)
1991 slowpath = True
1991 slowpath = True
1992
1992
1993 # We decided to fall back to the slowpath because at least one
1993 # We decided to fall back to the slowpath because at least one
1994 # of the paths was not a file. Check to see if at least one of them
1994 # of the paths was not a file. Check to see if at least one of them
1995 # existed in history - in that case, we'll continue down the
1995 # existed in history - in that case, we'll continue down the
1996 # slowpath; otherwise, we can turn off the slowpath
1996 # slowpath; otherwise, we can turn off the slowpath
1997 if slowpath:
1997 if slowpath:
1998 for path in match.files():
1998 for path in match.files():
1999 if path == '.' or path in repo.store:
1999 if path == '.' or path in repo.store:
2000 break
2000 break
2001 else:
2001 else:
2002 slowpath = False
2002 slowpath = False
2003
2003
2004 fpats = ('_patsfollow', '_patsfollowfirst')
2004 fpats = ('_patsfollow', '_patsfollowfirst')
2005 fnopats = (('_ancestors', '_fancestors'),
2005 fnopats = (('_ancestors', '_fancestors'),
2006 ('_descendants', '_fdescendants'))
2006 ('_descendants', '_fdescendants'))
2007 if slowpath:
2007 if slowpath:
2008 # See walkchangerevs() slow path.
2008 # See walkchangerevs() slow path.
2009 #
2009 #
2010 # pats/include/exclude cannot be represented as separate
2010 # pats/include/exclude cannot be represented as separate
2011 # revset expressions as their filtering logic applies at file
2011 # revset expressions as their filtering logic applies at file
2012 # level. For instance "-I a -X a" matches a revision touching
2012 # level. For instance "-I a -X a" matches a revision touching
2013 # "a" and "b" while "file(a) and not file(b)" does
2013 # "a" and "b" while "file(a) and not file(b)" does
2014 # not. Besides, filesets are evaluated against the working
2014 # not. Besides, filesets are evaluated against the working
2015 # directory.
2015 # directory.
2016 matchargs = ['r:', 'd:relpath']
2016 matchargs = ['r:', 'd:relpath']
2017 for p in pats:
2017 for p in pats:
2018 matchargs.append('p:' + p)
2018 matchargs.append('p:' + p)
2019 for p in opts.get('include', []):
2019 for p in opts.get('include', []):
2020 matchargs.append('i:' + p)
2020 matchargs.append('i:' + p)
2021 for p in opts.get('exclude', []):
2021 for p in opts.get('exclude', []):
2022 matchargs.append('x:' + p)
2022 matchargs.append('x:' + p)
2023 matchargs = ','.join(('%r' % p) for p in matchargs)
2023 matchargs = ','.join(('%r' % p) for p in matchargs)
2024 opts['_matchfiles'] = matchargs
2024 opts['_matchfiles'] = matchargs
2025 if follow:
2025 if follow:
2026 opts[fnopats[0][followfirst]] = '.'
2026 opts[fnopats[0][followfirst]] = '.'
2027 else:
2027 else:
2028 if follow:
2028 if follow:
2029 if pats:
2029 if pats:
2030 # follow() revset interprets its file argument as a
2030 # follow() revset interprets its file argument as a
2031 # manifest entry, so use match.files(), not pats.
2031 # manifest entry, so use match.files(), not pats.
2032 opts[fpats[followfirst]] = list(match.files())
2032 opts[fpats[followfirst]] = list(match.files())
2033 else:
2033 else:
2034 op = fnopats[followdescendants][followfirst]
2034 op = fnopats[followdescendants][followfirst]
2035 opts[op] = 'rev(%d)' % startrev
2035 opts[op] = 'rev(%d)' % startrev
2036 else:
2036 else:
2037 opts['_patslog'] = list(pats)
2037 opts['_patslog'] = list(pats)
2038
2038
2039 filematcher = None
2039 filematcher = None
2040 if opts.get('patch') or opts.get('stat'):
2040 if opts.get('patch') or opts.get('stat'):
2041 # When following files, track renames via a special matcher.
2041 # When following files, track renames via a special matcher.
2042 # If we're forced to take the slowpath it means we're following
2042 # If we're forced to take the slowpath it means we're following
2043 # at least one pattern/directory, so don't bother with rename tracking.
2043 # at least one pattern/directory, so don't bother with rename tracking.
2044 if follow and not match.always() and not slowpath:
2044 if follow and not match.always() and not slowpath:
2045 # _makefollowlogfilematcher expects its files argument to be
2045 # _makefollowlogfilematcher expects its files argument to be
2046 # relative to the repo root, so use match.files(), not pats.
2046 # relative to the repo root, so use match.files(), not pats.
2047 filematcher = _makefollowlogfilematcher(repo, match.files(),
2047 filematcher = _makefollowlogfilematcher(repo, match.files(),
2048 followfirst)
2048 followfirst)
2049 else:
2049 else:
2050 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2050 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2051 if filematcher is None:
2051 if filematcher is None:
2052 filematcher = lambda rev: match
2052 filematcher = lambda rev: match
2053
2053
2054 expr = []
2054 expr = []
2055 for op, val in sorted(opts.iteritems()):
2055 for op, val in sorted(opts.iteritems()):
2056 if not val:
2056 if not val:
2057 continue
2057 continue
2058 if op not in opt2revset:
2058 if op not in opt2revset:
2059 continue
2059 continue
2060 revop, andor = opt2revset[op]
2060 revop, andor = opt2revset[op]
2061 if '%(val)' not in revop:
2061 if '%(val)' not in revop:
2062 expr.append(revop)
2062 expr.append(revop)
2063 else:
2063 else:
2064 if not isinstance(val, list):
2064 if not isinstance(val, list):
2065 e = revop % {'val': val}
2065 e = revop % {'val': val}
2066 else:
2066 else:
2067 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2067 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2068 expr.append(e)
2068 expr.append(e)
2069
2069
2070 if expr:
2070 if expr:
2071 expr = '(' + ' and '.join(expr) + ')'
2071 expr = '(' + ' and '.join(expr) + ')'
2072 else:
2072 else:
2073 expr = None
2073 expr = None
2074 return expr, filematcher
2074 return expr, filematcher
2075
2075
2076 def _logrevs(repo, opts):
2076 def _logrevs(repo, opts):
2077 # Default --rev value depends on --follow but --follow behavior
2077 # Default --rev value depends on --follow but --follow behavior
2078 # depends on revisions resolved from --rev...
2078 # depends on revisions resolved from --rev...
2079 follow = opts.get('follow') or opts.get('follow_first')
2079 follow = opts.get('follow') or opts.get('follow_first')
2080 if opts.get('rev'):
2080 if opts.get('rev'):
2081 revs = scmutil.revrange(repo, opts['rev'])
2081 revs = scmutil.revrange(repo, opts['rev'])
2082 elif follow and repo.dirstate.p1() == nullid:
2082 elif follow and repo.dirstate.p1() == nullid:
2083 revs = smartset.baseset()
2083 revs = smartset.baseset()
2084 elif follow:
2084 elif follow:
2085 revs = repo.revs('reverse(:.)')
2085 revs = repo.revs('reverse(:.)')
2086 else:
2086 else:
2087 revs = smartset.spanset(repo)
2087 revs = smartset.spanset(repo)
2088 revs.reverse()
2088 revs.reverse()
2089 return revs
2089 return revs
2090
2090
2091 def getgraphlogrevs(repo, pats, opts):
2091 def getgraphlogrevs(repo, pats, opts):
2092 """Return (revs, expr, filematcher) where revs is an iterable of
2092 """Return (revs, expr, filematcher) where revs is an iterable of
2093 revision numbers, expr is a revset string built from log options
2093 revision numbers, expr is a revset string built from log options
2094 and file patterns or None, and used to filter 'revs'. If --stat or
2094 and file patterns or None, and used to filter 'revs'. If --stat or
2095 --patch are not passed filematcher is None. Otherwise it is a
2095 --patch are not passed filematcher is None. Otherwise it is a
2096 callable taking a revision number and returning a match objects
2096 callable taking a revision number and returning a match objects
2097 filtering the files to be detailed when displaying the revision.
2097 filtering the files to be detailed when displaying the revision.
2098 """
2098 """
2099 limit = loglimit(opts)
2099 limit = loglimit(opts)
2100 revs = _logrevs(repo, opts)
2100 revs = _logrevs(repo, opts)
2101 if not revs:
2101 if not revs:
2102 return smartset.baseset(), None, None
2102 return smartset.baseset(), None, None
2103 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2103 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2104 if opts.get('rev'):
2104 if opts.get('rev'):
2105 # User-specified revs might be unsorted, but don't sort before
2105 # User-specified revs might be unsorted, but don't sort before
2106 # _makelogrevset because it might depend on the order of revs
2106 # _makelogrevset because it might depend on the order of revs
2107 if not (revs.isdescending() or revs.istopo()):
2107 if not (revs.isdescending() or revs.istopo()):
2108 revs.sort(reverse=True)
2108 revs.sort(reverse=True)
2109 if expr:
2109 if expr:
2110 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2110 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2111 revs = matcher(repo, revs)
2111 revs = matcher(repo, revs)
2112 if limit is not None:
2112 if limit is not None:
2113 limitedrevs = []
2113 limitedrevs = []
2114 for idx, rev in enumerate(revs):
2114 for idx, rev in enumerate(revs):
2115 if idx >= limit:
2115 if idx >= limit:
2116 break
2116 break
2117 limitedrevs.append(rev)
2117 limitedrevs.append(rev)
2118 revs = smartset.baseset(limitedrevs)
2118 revs = smartset.baseset(limitedrevs)
2119
2119
2120 return revs, expr, filematcher
2120 return revs, expr, filematcher
2121
2121
2122 def getlogrevs(repo, pats, opts):
2122 def getlogrevs(repo, pats, opts):
2123 """Return (revs, expr, filematcher) where revs is an iterable of
2123 """Return (revs, expr, filematcher) where revs is an iterable of
2124 revision numbers, expr is a revset string built from log options
2124 revision numbers, expr is a revset string built from log options
2125 and file patterns or None, and used to filter 'revs'. If --stat or
2125 and file patterns or None, and used to filter 'revs'. If --stat or
2126 --patch are not passed filematcher is None. Otherwise it is a
2126 --patch are not passed filematcher is None. Otherwise it is a
2127 callable taking a revision number and returning a match objects
2127 callable taking a revision number and returning a match objects
2128 filtering the files to be detailed when displaying the revision.
2128 filtering the files to be detailed when displaying the revision.
2129 """
2129 """
2130 limit = loglimit(opts)
2130 limit = loglimit(opts)
2131 revs = _logrevs(repo, opts)
2131 revs = _logrevs(repo, opts)
2132 if not revs:
2132 if not revs:
2133 return smartset.baseset([]), None, None
2133 return smartset.baseset([]), None, None
2134 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2134 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2135 if expr:
2135 if expr:
2136 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2136 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2137 revs = matcher(repo, revs)
2137 revs = matcher(repo, revs)
2138 if limit is not None:
2138 if limit is not None:
2139 limitedrevs = []
2139 limitedrevs = []
2140 for idx, r in enumerate(revs):
2140 for idx, r in enumerate(revs):
2141 if limit <= idx:
2141 if limit <= idx:
2142 break
2142 break
2143 limitedrevs.append(r)
2143 limitedrevs.append(r)
2144 revs = smartset.baseset(limitedrevs)
2144 revs = smartset.baseset(limitedrevs)
2145
2145
2146 return revs, expr, filematcher
2146 return revs, expr, filematcher
2147
2147
2148 def _graphnodeformatter(ui, displayer):
2148 def _graphnodeformatter(ui, displayer):
2149 spec = ui.config('ui', 'graphnodetemplate')
2149 spec = ui.config('ui', 'graphnodetemplate')
2150 if not spec:
2150 if not spec:
2151 return templatekw.showgraphnode # fast path for "{graphnode}"
2151 return templatekw.showgraphnode # fast path for "{graphnode}"
2152
2152
2153 templ = formatter.gettemplater(ui, 'graphnode', spec)
2153 templ = formatter.gettemplater(ui, 'graphnode', spec)
2154 cache = {}
2154 cache = {}
2155 if isinstance(displayer, changeset_templater):
2155 if isinstance(displayer, changeset_templater):
2156 cache = displayer.cache # reuse cache of slow templates
2156 cache = displayer.cache # reuse cache of slow templates
2157 props = templatekw.keywords.copy()
2157 props = templatekw.keywords.copy()
2158 props['templ'] = templ
2158 props['templ'] = templ
2159 props['cache'] = cache
2159 props['cache'] = cache
2160 def formatnode(repo, ctx):
2160 def formatnode(repo, ctx):
2161 props['ctx'] = ctx
2161 props['ctx'] = ctx
2162 props['repo'] = repo
2162 props['repo'] = repo
2163 props['ui'] = repo.ui
2163 props['ui'] = repo.ui
2164 props['revcache'] = {}
2164 props['revcache'] = {}
2165 return templater.stringify(templ('graphnode', **props))
2165 return templater.stringify(templ('graphnode', **props))
2166 return formatnode
2166 return formatnode
2167
2167
2168 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2168 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2169 filematcher=None):
2169 filematcher=None):
2170 formatnode = _graphnodeformatter(ui, displayer)
2170 formatnode = _graphnodeformatter(ui, displayer)
2171 state = graphmod.asciistate()
2171 state = graphmod.asciistate()
2172 styles = state['styles']
2172 styles = state['styles']
2173
2173
2174 # only set graph styling if HGPLAIN is not set.
2174 # only set graph styling if HGPLAIN is not set.
2175 if ui.plain('graph'):
2175 if ui.plain('graph'):
2176 # set all edge styles to |, the default pre-3.8 behaviour
2176 # set all edge styles to |, the default pre-3.8 behaviour
2177 styles.update(dict.fromkeys(styles, '|'))
2177 styles.update(dict.fromkeys(styles, '|'))
2178 else:
2178 else:
2179 edgetypes = {
2179 edgetypes = {
2180 'parent': graphmod.PARENT,
2180 'parent': graphmod.PARENT,
2181 'grandparent': graphmod.GRANDPARENT,
2181 'grandparent': graphmod.GRANDPARENT,
2182 'missing': graphmod.MISSINGPARENT
2182 'missing': graphmod.MISSINGPARENT
2183 }
2183 }
2184 for name, key in edgetypes.items():
2184 for name, key in edgetypes.items():
2185 # experimental config: experimental.graphstyle.*
2185 # experimental config: experimental.graphstyle.*
2186 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2186 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2187 styles[key])
2187 styles[key])
2188 if not styles[key]:
2188 if not styles[key]:
2189 styles[key] = None
2189 styles[key] = None
2190
2190
2191 # experimental config: experimental.graphshorten
2191 # experimental config: experimental.graphshorten
2192 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2192 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2193
2193
2194 for rev, type, ctx, parents in dag:
2194 for rev, type, ctx, parents in dag:
2195 char = formatnode(repo, ctx)
2195 char = formatnode(repo, ctx)
2196 copies = None
2196 copies = None
2197 if getrenamed and ctx.rev():
2197 if getrenamed and ctx.rev():
2198 copies = []
2198 copies = []
2199 for fn in ctx.files():
2199 for fn in ctx.files():
2200 rename = getrenamed(fn, ctx.rev())
2200 rename = getrenamed(fn, ctx.rev())
2201 if rename:
2201 if rename:
2202 copies.append((fn, rename[0]))
2202 copies.append((fn, rename[0]))
2203 revmatchfn = None
2203 revmatchfn = None
2204 if filematcher is not None:
2204 if filematcher is not None:
2205 revmatchfn = filematcher(ctx.rev())
2205 revmatchfn = filematcher(ctx.rev())
2206 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2206 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2207 lines = displayer.hunk.pop(rev).split('\n')
2207 lines = displayer.hunk.pop(rev).split('\n')
2208 if not lines[-1]:
2208 if not lines[-1]:
2209 del lines[-1]
2209 del lines[-1]
2210 displayer.flush(ctx)
2210 displayer.flush(ctx)
2211 edges = edgefn(type, char, lines, state, rev, parents)
2211 edges = edgefn(type, char, lines, state, rev, parents)
2212 for type, char, lines, coldata in edges:
2212 for type, char, lines, coldata in edges:
2213 graphmod.ascii(ui, state, type, char, lines, coldata)
2213 graphmod.ascii(ui, state, type, char, lines, coldata)
2214 displayer.close()
2214 displayer.close()
2215
2215
2216 def graphlog(ui, repo, *pats, **opts):
2216 def graphlog(ui, repo, *pats, **opts):
2217 # Parameters are identical to log command ones
2217 # Parameters are identical to log command ones
2218 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2218 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2219 revdag = graphmod.dagwalker(repo, revs)
2219 revdag = graphmod.dagwalker(repo, revs)
2220
2220
2221 getrenamed = None
2221 getrenamed = None
2222 if opts.get('copies'):
2222 if opts.get('copies'):
2223 endrev = None
2223 endrev = None
2224 if opts.get('rev'):
2224 if opts.get('rev'):
2225 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2225 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2226 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2226 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2227
2227
2228 ui.pager('log')
2228 ui.pager('log')
2229 displayer = show_changeset(ui, repo, opts, buffered=True)
2229 displayer = show_changeset(ui, repo, opts, buffered=True)
2230 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2230 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2231 filematcher)
2231 filematcher)
2232
2232
2233 def checkunsupportedgraphflags(pats, opts):
2233 def checkunsupportedgraphflags(pats, opts):
2234 for op in ["newest_first"]:
2234 for op in ["newest_first"]:
2235 if op in opts and opts[op]:
2235 if op in opts and opts[op]:
2236 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2236 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2237 % op.replace("_", "-"))
2237 % op.replace("_", "-"))
2238
2238
2239 def graphrevs(repo, nodes, opts):
2239 def graphrevs(repo, nodes, opts):
2240 limit = loglimit(opts)
2240 limit = loglimit(opts)
2241 nodes.reverse()
2241 nodes.reverse()
2242 if limit is not None:
2242 if limit is not None:
2243 nodes = nodes[:limit]
2243 nodes = nodes[:limit]
2244 return graphmod.nodes(repo, nodes)
2244 return graphmod.nodes(repo, nodes)
2245
2245
2246 def add(ui, repo, match, prefix, explicitonly, **opts):
2246 def add(ui, repo, match, prefix, explicitonly, **opts):
2247 join = lambda f: os.path.join(prefix, f)
2247 join = lambda f: os.path.join(prefix, f)
2248 bad = []
2248 bad = []
2249
2249
2250 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2250 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2251 names = []
2251 names = []
2252 wctx = repo[None]
2252 wctx = repo[None]
2253 cca = None
2253 cca = None
2254 abort, warn = scmutil.checkportabilityalert(ui)
2254 abort, warn = scmutil.checkportabilityalert(ui)
2255 if abort or warn:
2255 if abort or warn:
2256 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2256 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2257
2257
2258 badmatch = matchmod.badmatch(match, badfn)
2258 badmatch = matchmod.badmatch(match, badfn)
2259 dirstate = repo.dirstate
2259 dirstate = repo.dirstate
2260 # We don't want to just call wctx.walk here, since it would return a lot of
2260 # We don't want to just call wctx.walk here, since it would return a lot of
2261 # clean files, which we aren't interested in and takes time.
2261 # clean files, which we aren't interested in and takes time.
2262 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2262 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2263 True, False, full=False)):
2263 True, False, full=False)):
2264 exact = match.exact(f)
2264 exact = match.exact(f)
2265 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2265 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2266 if cca:
2266 if cca:
2267 cca(f)
2267 cca(f)
2268 names.append(f)
2268 names.append(f)
2269 if ui.verbose or not exact:
2269 if ui.verbose or not exact:
2270 ui.status(_('adding %s\n') % match.rel(f))
2270 ui.status(_('adding %s\n') % match.rel(f))
2271
2271
2272 for subpath in sorted(wctx.substate):
2272 for subpath in sorted(wctx.substate):
2273 sub = wctx.sub(subpath)
2273 sub = wctx.sub(subpath)
2274 try:
2274 try:
2275 submatch = matchmod.subdirmatcher(subpath, match)
2275 submatch = matchmod.subdirmatcher(subpath, match)
2276 if opts.get('subrepos'):
2276 if opts.get('subrepos'):
2277 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2277 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2278 else:
2278 else:
2279 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2279 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2280 except error.LookupError:
2280 except error.LookupError:
2281 ui.status(_("skipping missing subrepository: %s\n")
2281 ui.status(_("skipping missing subrepository: %s\n")
2282 % join(subpath))
2282 % join(subpath))
2283
2283
2284 if not opts.get('dry_run'):
2284 if not opts.get('dry_run'):
2285 rejected = wctx.add(names, prefix)
2285 rejected = wctx.add(names, prefix)
2286 bad.extend(f for f in rejected if f in match.files())
2286 bad.extend(f for f in rejected if f in match.files())
2287 return bad
2287 return bad
2288
2288
2289 def forget(ui, repo, match, prefix, explicitonly):
2289 def forget(ui, repo, match, prefix, explicitonly):
2290 join = lambda f: os.path.join(prefix, f)
2290 join = lambda f: os.path.join(prefix, f)
2291 bad = []
2291 bad = []
2292 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2292 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2293 wctx = repo[None]
2293 wctx = repo[None]
2294 forgot = []
2294 forgot = []
2295
2295
2296 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2296 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2297 forget = sorted(s[0] + s[1] + s[3] + s[6])
2297 forget = sorted(s[0] + s[1] + s[3] + s[6])
2298 if explicitonly:
2298 if explicitonly:
2299 forget = [f for f in forget if match.exact(f)]
2299 forget = [f for f in forget if match.exact(f)]
2300
2300
2301 for subpath in sorted(wctx.substate):
2301 for subpath in sorted(wctx.substate):
2302 sub = wctx.sub(subpath)
2302 sub = wctx.sub(subpath)
2303 try:
2303 try:
2304 submatch = matchmod.subdirmatcher(subpath, match)
2304 submatch = matchmod.subdirmatcher(subpath, match)
2305 subbad, subforgot = sub.forget(submatch, prefix)
2305 subbad, subforgot = sub.forget(submatch, prefix)
2306 bad.extend([subpath + '/' + f for f in subbad])
2306 bad.extend([subpath + '/' + f for f in subbad])
2307 forgot.extend([subpath + '/' + f for f in subforgot])
2307 forgot.extend([subpath + '/' + f for f in subforgot])
2308 except error.LookupError:
2308 except error.LookupError:
2309 ui.status(_("skipping missing subrepository: %s\n")
2309 ui.status(_("skipping missing subrepository: %s\n")
2310 % join(subpath))
2310 % join(subpath))
2311
2311
2312 if not explicitonly:
2312 if not explicitonly:
2313 for f in match.files():
2313 for f in match.files():
2314 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2314 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2315 if f not in forgot:
2315 if f not in forgot:
2316 if repo.wvfs.exists(f):
2316 if repo.wvfs.exists(f):
2317 # Don't complain if the exact case match wasn't given.
2317 # Don't complain if the exact case match wasn't given.
2318 # But don't do this until after checking 'forgot', so
2318 # But don't do this until after checking 'forgot', so
2319 # that subrepo files aren't normalized, and this op is
2319 # that subrepo files aren't normalized, and this op is
2320 # purely from data cached by the status walk above.
2320 # purely from data cached by the status walk above.
2321 if repo.dirstate.normalize(f) in repo.dirstate:
2321 if repo.dirstate.normalize(f) in repo.dirstate:
2322 continue
2322 continue
2323 ui.warn(_('not removing %s: '
2323 ui.warn(_('not removing %s: '
2324 'file is already untracked\n')
2324 'file is already untracked\n')
2325 % match.rel(f))
2325 % match.rel(f))
2326 bad.append(f)
2326 bad.append(f)
2327
2327
2328 for f in forget:
2328 for f in forget:
2329 if ui.verbose or not match.exact(f):
2329 if ui.verbose or not match.exact(f):
2330 ui.status(_('removing %s\n') % match.rel(f))
2330 ui.status(_('removing %s\n') % match.rel(f))
2331
2331
2332 rejected = wctx.forget(forget, prefix)
2332 rejected = wctx.forget(forget, prefix)
2333 bad.extend(f for f in rejected if f in match.files())
2333 bad.extend(f for f in rejected if f in match.files())
2334 forgot.extend(f for f in forget if f not in rejected)
2334 forgot.extend(f for f in forget if f not in rejected)
2335 return bad, forgot
2335 return bad, forgot
2336
2336
2337 def files(ui, ctx, m, fm, fmt, subrepos):
2337 def files(ui, ctx, m, fm, fmt, subrepos):
2338 rev = ctx.rev()
2338 rev = ctx.rev()
2339 ret = 1
2339 ret = 1
2340 ds = ctx.repo().dirstate
2340 ds = ctx.repo().dirstate
2341
2341
2342 for f in ctx.matches(m):
2342 for f in ctx.matches(m):
2343 if rev is None and ds[f] == 'r':
2343 if rev is None and ds[f] == 'r':
2344 continue
2344 continue
2345 fm.startitem()
2345 fm.startitem()
2346 if ui.verbose:
2346 if ui.verbose:
2347 fc = ctx[f]
2347 fc = ctx[f]
2348 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2348 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2349 fm.data(abspath=f)
2349 fm.data(abspath=f)
2350 fm.write('path', fmt, m.rel(f))
2350 fm.write('path', fmt, m.rel(f))
2351 ret = 0
2351 ret = 0
2352
2352
2353 for subpath in sorted(ctx.substate):
2353 for subpath in sorted(ctx.substate):
2354 submatch = matchmod.subdirmatcher(subpath, m)
2354 submatch = matchmod.subdirmatcher(subpath, m)
2355 if (subrepos or m.exact(subpath) or any(submatch.files())):
2355 if (subrepos or m.exact(subpath) or any(submatch.files())):
2356 sub = ctx.sub(subpath)
2356 sub = ctx.sub(subpath)
2357 try:
2357 try:
2358 recurse = m.exact(subpath) or subrepos
2358 recurse = m.exact(subpath) or subrepos
2359 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2359 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2360 ret = 0
2360 ret = 0
2361 except error.LookupError:
2361 except error.LookupError:
2362 ui.status(_("skipping missing subrepository: %s\n")
2362 ui.status(_("skipping missing subrepository: %s\n")
2363 % m.abs(subpath))
2363 % m.abs(subpath))
2364
2364
2365 return ret
2365 return ret
2366
2366
2367 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2367 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2368 join = lambda f: os.path.join(prefix, f)
2368 join = lambda f: os.path.join(prefix, f)
2369 ret = 0
2369 ret = 0
2370 s = repo.status(match=m, clean=True)
2370 s = repo.status(match=m, clean=True)
2371 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2371 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2372
2372
2373 wctx = repo[None]
2373 wctx = repo[None]
2374
2374
2375 if warnings is None:
2375 if warnings is None:
2376 warnings = []
2376 warnings = []
2377 warn = True
2377 warn = True
2378 else:
2378 else:
2379 warn = False
2379 warn = False
2380
2380
2381 subs = sorted(wctx.substate)
2381 subs = sorted(wctx.substate)
2382 total = len(subs)
2382 total = len(subs)
2383 count = 0
2383 count = 0
2384 for subpath in subs:
2384 for subpath in subs:
2385 count += 1
2385 count += 1
2386 submatch = matchmod.subdirmatcher(subpath, m)
2386 submatch = matchmod.subdirmatcher(subpath, m)
2387 if subrepos or m.exact(subpath) or any(submatch.files()):
2387 if subrepos or m.exact(subpath) or any(submatch.files()):
2388 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2388 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2389 sub = wctx.sub(subpath)
2389 sub = wctx.sub(subpath)
2390 try:
2390 try:
2391 if sub.removefiles(submatch, prefix, after, force, subrepos,
2391 if sub.removefiles(submatch, prefix, after, force, subrepos,
2392 warnings):
2392 warnings):
2393 ret = 1
2393 ret = 1
2394 except error.LookupError:
2394 except error.LookupError:
2395 warnings.append(_("skipping missing subrepository: %s\n")
2395 warnings.append(_("skipping missing subrepository: %s\n")
2396 % join(subpath))
2396 % join(subpath))
2397 ui.progress(_('searching'), None)
2397 ui.progress(_('searching'), None)
2398
2398
2399 # warn about failure to delete explicit files/dirs
2399 # warn about failure to delete explicit files/dirs
2400 deleteddirs = util.dirs(deleted)
2400 deleteddirs = util.dirs(deleted)
2401 files = m.files()
2401 files = m.files()
2402 total = len(files)
2402 total = len(files)
2403 count = 0
2403 count = 0
2404 for f in files:
2404 for f in files:
2405 def insubrepo():
2405 def insubrepo():
2406 for subpath in wctx.substate:
2406 for subpath in wctx.substate:
2407 if f.startswith(subpath + '/'):
2407 if f.startswith(subpath + '/'):
2408 return True
2408 return True
2409 return False
2409 return False
2410
2410
2411 count += 1
2411 count += 1
2412 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2412 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2413 isdir = f in deleteddirs or wctx.hasdir(f)
2413 isdir = f in deleteddirs or wctx.hasdir(f)
2414 if (f in repo.dirstate or isdir or f == '.'
2414 if (f in repo.dirstate or isdir or f == '.'
2415 or insubrepo() or f in subs):
2415 or insubrepo() or f in subs):
2416 continue
2416 continue
2417
2417
2418 if repo.wvfs.exists(f):
2418 if repo.wvfs.exists(f):
2419 if repo.wvfs.isdir(f):
2419 if repo.wvfs.isdir(f):
2420 warnings.append(_('not removing %s: no tracked files\n')
2420 warnings.append(_('not removing %s: no tracked files\n')
2421 % m.rel(f))
2421 % m.rel(f))
2422 else:
2422 else:
2423 warnings.append(_('not removing %s: file is untracked\n')
2423 warnings.append(_('not removing %s: file is untracked\n')
2424 % m.rel(f))
2424 % m.rel(f))
2425 # missing files will generate a warning elsewhere
2425 # missing files will generate a warning elsewhere
2426 ret = 1
2426 ret = 1
2427 ui.progress(_('deleting'), None)
2427 ui.progress(_('deleting'), None)
2428
2428
2429 if force:
2429 if force:
2430 list = modified + deleted + clean + added
2430 list = modified + deleted + clean + added
2431 elif after:
2431 elif after:
2432 list = deleted
2432 list = deleted
2433 remaining = modified + added + clean
2433 remaining = modified + added + clean
2434 total = len(remaining)
2434 total = len(remaining)
2435 count = 0
2435 count = 0
2436 for f in remaining:
2436 for f in remaining:
2437 count += 1
2437 count += 1
2438 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2438 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2439 warnings.append(_('not removing %s: file still exists\n')
2439 warnings.append(_('not removing %s: file still exists\n')
2440 % m.rel(f))
2440 % m.rel(f))
2441 ret = 1
2441 ret = 1
2442 ui.progress(_('skipping'), None)
2442 ui.progress(_('skipping'), None)
2443 else:
2443 else:
2444 list = deleted + clean
2444 list = deleted + clean
2445 total = len(modified) + len(added)
2445 total = len(modified) + len(added)
2446 count = 0
2446 count = 0
2447 for f in modified:
2447 for f in modified:
2448 count += 1
2448 count += 1
2449 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2449 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2450 warnings.append(_('not removing %s: file is modified (use -f'
2450 warnings.append(_('not removing %s: file is modified (use -f'
2451 ' to force removal)\n') % m.rel(f))
2451 ' to force removal)\n') % m.rel(f))
2452 ret = 1
2452 ret = 1
2453 for f in added:
2453 for f in added:
2454 count += 1
2454 count += 1
2455 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2455 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2456 warnings.append(_("not removing %s: file has been marked for add"
2456 warnings.append(_("not removing %s: file has been marked for add"
2457 " (use 'hg forget' to undo add)\n") % m.rel(f))
2457 " (use 'hg forget' to undo add)\n") % m.rel(f))
2458 ret = 1
2458 ret = 1
2459 ui.progress(_('skipping'), None)
2459 ui.progress(_('skipping'), None)
2460
2460
2461 list = sorted(list)
2461 list = sorted(list)
2462 total = len(list)
2462 total = len(list)
2463 count = 0
2463 count = 0
2464 for f in list:
2464 for f in list:
2465 count += 1
2465 count += 1
2466 if ui.verbose or not m.exact(f):
2466 if ui.verbose or not m.exact(f):
2467 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2467 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2468 ui.status(_('removing %s\n') % m.rel(f))
2468 ui.status(_('removing %s\n') % m.rel(f))
2469 ui.progress(_('deleting'), None)
2469 ui.progress(_('deleting'), None)
2470
2470
2471 with repo.wlock():
2471 with repo.wlock():
2472 if not after:
2472 if not after:
2473 for f in list:
2473 for f in list:
2474 if f in added:
2474 if f in added:
2475 continue # we never unlink added files on remove
2475 continue # we never unlink added files on remove
2476 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2476 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2477 repo[None].forget(list)
2477 repo[None].forget(list)
2478
2478
2479 if warn:
2479 if warn:
2480 for warning in warnings:
2480 for warning in warnings:
2481 ui.warn(warning)
2481 ui.warn(warning)
2482
2482
2483 return ret
2483 return ret
2484
2484
2485 def cat(ui, repo, ctx, matcher, prefix, **opts):
2485 def cat(ui, repo, ctx, matcher, prefix, **opts):
2486 err = 1
2486 err = 1
2487
2487
2488 def write(path):
2488 def write(path):
2489 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2489 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2490 pathname=os.path.join(prefix, path))
2490 pathname=os.path.join(prefix, path))
2491 data = ctx[path].data()
2491 data = ctx[path].data()
2492 if opts.get('decode'):
2492 if opts.get('decode'):
2493 data = repo.wwritedata(path, data)
2493 data = repo.wwritedata(path, data)
2494 fp.write(data)
2494 fp.write(data)
2495 fp.close()
2495 fp.close()
2496
2496
2497 # Automation often uses hg cat on single files, so special case it
2497 # Automation often uses hg cat on single files, so special case it
2498 # for performance to avoid the cost of parsing the manifest.
2498 # for performance to avoid the cost of parsing the manifest.
2499 if len(matcher.files()) == 1 and not matcher.anypats():
2499 if len(matcher.files()) == 1 and not matcher.anypats():
2500 file = matcher.files()[0]
2500 file = matcher.files()[0]
2501 mfl = repo.manifestlog
2501 mfl = repo.manifestlog
2502 mfnode = ctx.manifestnode()
2502 mfnode = ctx.manifestnode()
2503 try:
2503 try:
2504 if mfnode and mfl[mfnode].find(file)[0]:
2504 if mfnode and mfl[mfnode].find(file)[0]:
2505 write(file)
2505 write(file)
2506 return 0
2506 return 0
2507 except KeyError:
2507 except KeyError:
2508 pass
2508 pass
2509
2509
2510 for abs in ctx.walk(matcher):
2510 for abs in ctx.walk(matcher):
2511 write(abs)
2511 write(abs)
2512 err = 0
2512 err = 0
2513
2513
2514 for subpath in sorted(ctx.substate):
2514 for subpath in sorted(ctx.substate):
2515 sub = ctx.sub(subpath)
2515 sub = ctx.sub(subpath)
2516 try:
2516 try:
2517 submatch = matchmod.subdirmatcher(subpath, matcher)
2517 submatch = matchmod.subdirmatcher(subpath, matcher)
2518
2518
2519 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2519 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2520 **opts):
2520 **opts):
2521 err = 0
2521 err = 0
2522 except error.RepoLookupError:
2522 except error.RepoLookupError:
2523 ui.status(_("skipping missing subrepository: %s\n")
2523 ui.status(_("skipping missing subrepository: %s\n")
2524 % os.path.join(prefix, subpath))
2524 % os.path.join(prefix, subpath))
2525
2525
2526 return err
2526 return err
2527
2527
2528 def commit(ui, repo, commitfunc, pats, opts):
2528 def commit(ui, repo, commitfunc, pats, opts):
2529 '''commit the specified files or all outstanding changes'''
2529 '''commit the specified files or all outstanding changes'''
2530 date = opts.get('date')
2530 date = opts.get('date')
2531 if date:
2531 if date:
2532 opts['date'] = util.parsedate(date)
2532 opts['date'] = util.parsedate(date)
2533 message = logmessage(ui, opts)
2533 message = logmessage(ui, opts)
2534 matcher = scmutil.match(repo[None], pats, opts)
2534 matcher = scmutil.match(repo[None], pats, opts)
2535
2535
2536 # extract addremove carefully -- this function can be called from a command
2536 # extract addremove carefully -- this function can be called from a command
2537 # that doesn't support addremove
2537 # that doesn't support addremove
2538 if opts.get('addremove'):
2538 if opts.get('addremove'):
2539 if scmutil.addremove(repo, matcher, "", opts) != 0:
2539 if scmutil.addremove(repo, matcher, "", opts) != 0:
2540 raise error.Abort(
2540 raise error.Abort(
2541 _("failed to mark all new/missing files as added/removed"))
2541 _("failed to mark all new/missing files as added/removed"))
2542
2542
2543 return commitfunc(ui, repo, message, matcher, opts)
2543 return commitfunc(ui, repo, message, matcher, opts)
2544
2544
2545 def samefile(f, ctx1, ctx2):
2545 def samefile(f, ctx1, ctx2):
2546 if f in ctx1.manifest():
2546 if f in ctx1.manifest():
2547 a = ctx1.filectx(f)
2547 a = ctx1.filectx(f)
2548 if f in ctx2.manifest():
2548 if f in ctx2.manifest():
2549 b = ctx2.filectx(f)
2549 b = ctx2.filectx(f)
2550 return (not a.cmp(b)
2550 return (not a.cmp(b)
2551 and a.flags() == b.flags())
2551 and a.flags() == b.flags())
2552 else:
2552 else:
2553 return False
2553 return False
2554 else:
2554 else:
2555 return f not in ctx2.manifest()
2555 return f not in ctx2.manifest()
2556
2556
2557 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2557 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2558 # avoid cycle context -> subrepo -> cmdutil
2558 # avoid cycle context -> subrepo -> cmdutil
2559 from . import context
2559 from . import context
2560
2560
2561 # amend will reuse the existing user if not specified, but the obsolete
2561 # amend will reuse the existing user if not specified, but the obsolete
2562 # marker creation requires that the current user's name is specified.
2562 # marker creation requires that the current user's name is specified.
2563 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2563 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2564 ui.username() # raise exception if username not set
2564 ui.username() # raise exception if username not set
2565
2565
2566 ui.note(_('amending changeset %s\n') % old)
2566 ui.note(_('amending changeset %s\n') % old)
2567 base = old.p1()
2567 base = old.p1()
2568 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2568 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2569
2569
2570 wlock = lock = newid = None
2570 wlock = lock = newid = None
2571 try:
2571 try:
2572 wlock = repo.wlock()
2572 wlock = repo.wlock()
2573 lock = repo.lock()
2573 lock = repo.lock()
2574 with repo.transaction('amend') as tr:
2574 with repo.transaction('amend') as tr:
2575 # See if we got a message from -m or -l, if not, open the editor
2575 # See if we got a message from -m or -l, if not, open the editor
2576 # with the message of the changeset to amend
2576 # with the message of the changeset to amend
2577 message = logmessage(ui, opts)
2577 message = logmessage(ui, opts)
2578 # ensure logfile does not conflict with later enforcement of the
2578 # ensure logfile does not conflict with later enforcement of the
2579 # message. potential logfile content has been processed by
2579 # message. potential logfile content has been processed by
2580 # `logmessage` anyway.
2580 # `logmessage` anyway.
2581 opts.pop('logfile')
2581 opts.pop('logfile')
2582 # First, do a regular commit to record all changes in the working
2582 # First, do a regular commit to record all changes in the working
2583 # directory (if there are any)
2583 # directory (if there are any)
2584 ui.callhooks = False
2584 ui.callhooks = False
2585 activebookmark = repo._bookmarks.active
2585 activebookmark = repo._bookmarks.active
2586 try:
2586 try:
2587 repo._bookmarks.active = None
2587 repo._bookmarks.active = None
2588 opts['message'] = 'temporary amend commit for %s' % old
2588 opts['message'] = 'temporary amend commit for %s' % old
2589 node = commit(ui, repo, commitfunc, pats, opts)
2589 node = commit(ui, repo, commitfunc, pats, opts)
2590 finally:
2590 finally:
2591 repo._bookmarks.active = activebookmark
2591 repo._bookmarks.active = activebookmark
2592 repo._bookmarks.recordchange(tr)
2592 repo._bookmarks.recordchange(tr)
2593 ui.callhooks = True
2593 ui.callhooks = True
2594 ctx = repo[node]
2594 ctx = repo[node]
2595
2595
2596 # Participating changesets:
2596 # Participating changesets:
2597 #
2597 #
2598 # node/ctx o - new (intermediate) commit that contains changes
2598 # node/ctx o - new (intermediate) commit that contains changes
2599 # | from working dir to go into amending commit
2599 # | from working dir to go into amending commit
2600 # | (or a workingctx if there were no changes)
2600 # | (or a workingctx if there were no changes)
2601 # |
2601 # |
2602 # old o - changeset to amend
2602 # old o - changeset to amend
2603 # |
2603 # |
2604 # base o - parent of amending changeset
2604 # base o - parent of amending changeset
2605
2605
2606 # Update extra dict from amended commit (e.g. to preserve graft
2606 # Update extra dict from amended commit (e.g. to preserve graft
2607 # source)
2607 # source)
2608 extra.update(old.extra())
2608 extra.update(old.extra())
2609
2609
2610 # Also update it from the intermediate commit or from the wctx
2610 # Also update it from the intermediate commit or from the wctx
2611 extra.update(ctx.extra())
2611 extra.update(ctx.extra())
2612
2612
2613 if len(old.parents()) > 1:
2613 if len(old.parents()) > 1:
2614 # ctx.files() isn't reliable for merges, so fall back to the
2614 # ctx.files() isn't reliable for merges, so fall back to the
2615 # slower repo.status() method
2615 # slower repo.status() method
2616 files = set([fn for st in repo.status(base, old)[:3]
2616 files = set([fn for st in repo.status(base, old)[:3]
2617 for fn in st])
2617 for fn in st])
2618 else:
2618 else:
2619 files = set(old.files())
2619 files = set(old.files())
2620
2620
2621 # Second, we use either the commit we just did, or if there were no
2621 # Second, we use either the commit we just did, or if there were no
2622 # changes the parent of the working directory as the version of the
2622 # changes the parent of the working directory as the version of the
2623 # files in the final amend commit
2623 # files in the final amend commit
2624 if node:
2624 if node:
2625 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2625 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2626
2626
2627 user = ctx.user()
2627 user = ctx.user()
2628 date = ctx.date()
2628 date = ctx.date()
2629 # Recompute copies (avoid recording a -> b -> a)
2629 # Recompute copies (avoid recording a -> b -> a)
2630 copied = copies.pathcopies(base, ctx)
2630 copied = copies.pathcopies(base, ctx)
2631 if old.p2:
2631 if old.p2:
2632 copied.update(copies.pathcopies(old.p2(), ctx))
2632 copied.update(copies.pathcopies(old.p2(), ctx))
2633
2633
2634 # Prune files which were reverted by the updates: if old
2634 # Prune files which were reverted by the updates: if old
2635 # introduced file X and our intermediate commit, node,
2635 # introduced file X and our intermediate commit, node,
2636 # renamed that file, then those two files are the same and
2636 # renamed that file, then those two files are the same and
2637 # we can discard X from our list of files. Likewise if X
2637 # we can discard X from our list of files. Likewise if X
2638 # was deleted, it's no longer relevant
2638 # was deleted, it's no longer relevant
2639 files.update(ctx.files())
2639 files.update(ctx.files())
2640 files = [f for f in files if not samefile(f, ctx, base)]
2640 files = [f for f in files if not samefile(f, ctx, base)]
2641
2641
2642 def filectxfn(repo, ctx_, path):
2642 def filectxfn(repo, ctx_, path):
2643 try:
2643 try:
2644 fctx = ctx[path]
2644 fctx = ctx[path]
2645 flags = fctx.flags()
2645 flags = fctx.flags()
2646 mctx = context.memfilectx(repo,
2646 mctx = context.memfilectx(repo,
2647 fctx.path(), fctx.data(),
2647 fctx.path(), fctx.data(),
2648 islink='l' in flags,
2648 islink='l' in flags,
2649 isexec='x' in flags,
2649 isexec='x' in flags,
2650 copied=copied.get(path))
2650 copied=copied.get(path))
2651 return mctx
2651 return mctx
2652 except KeyError:
2652 except KeyError:
2653 return None
2653 return None
2654 else:
2654 else:
2655 ui.note(_('copying changeset %s to %s\n') % (old, base))
2655 ui.note(_('copying changeset %s to %s\n') % (old, base))
2656
2656
2657 # Use version of files as in the old cset
2657 # Use version of files as in the old cset
2658 def filectxfn(repo, ctx_, path):
2658 def filectxfn(repo, ctx_, path):
2659 try:
2659 try:
2660 return old.filectx(path)
2660 return old.filectx(path)
2661 except KeyError:
2661 except KeyError:
2662 return None
2662 return None
2663
2663
2664 user = opts.get('user') or old.user()
2664 user = opts.get('user') or old.user()
2665 date = opts.get('date') or old.date()
2665 date = opts.get('date') or old.date()
2666 editform = mergeeditform(old, 'commit.amend')
2666 editform = mergeeditform(old, 'commit.amend')
2667 editor = getcommiteditor(editform=editform, **opts)
2667 editor = getcommiteditor(editform=editform, **opts)
2668 if not message:
2668 if not message:
2669 editor = getcommiteditor(edit=True, editform=editform)
2669 editor = getcommiteditor(edit=True, editform=editform)
2670 message = old.description()
2670 message = old.description()
2671
2671
2672 pureextra = extra.copy()
2672 pureextra = extra.copy()
2673 extra['amend_source'] = old.hex()
2673 extra['amend_source'] = old.hex()
2674
2674
2675 new = context.memctx(repo,
2675 new = context.memctx(repo,
2676 parents=[base.node(), old.p2().node()],
2676 parents=[base.node(), old.p2().node()],
2677 text=message,
2677 text=message,
2678 files=files,
2678 files=files,
2679 filectxfn=filectxfn,
2679 filectxfn=filectxfn,
2680 user=user,
2680 user=user,
2681 date=date,
2681 date=date,
2682 extra=extra,
2682 extra=extra,
2683 editor=editor)
2683 editor=editor)
2684
2684
2685 newdesc = changelog.stripdesc(new.description())
2685 newdesc = changelog.stripdesc(new.description())
2686 if ((not node)
2686 if ((not node)
2687 and newdesc == old.description()
2687 and newdesc == old.description()
2688 and user == old.user()
2688 and user == old.user()
2689 and date == old.date()
2689 and date == old.date()
2690 and pureextra == old.extra()):
2690 and pureextra == old.extra()):
2691 # nothing changed. continuing here would create a new node
2691 # nothing changed. continuing here would create a new node
2692 # anyway because of the amend_source noise.
2692 # anyway because of the amend_source noise.
2693 #
2693 #
2694 # This not what we expect from amend.
2694 # This not what we expect from amend.
2695 return old.node()
2695 return old.node()
2696
2696
2697 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2697 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2698 try:
2698 try:
2699 if opts.get('secret'):
2699 if opts.get('secret'):
2700 commitphase = 'secret'
2700 commitphase = 'secret'
2701 else:
2701 else:
2702 commitphase = old.phase()
2702 commitphase = old.phase()
2703 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2703 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2704 newid = repo.commitctx(new)
2704 newid = repo.commitctx(new)
2705 finally:
2705 finally:
2706 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2706 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2707 if newid != old.node():
2707 if newid != old.node():
2708 # Reroute the working copy parent to the new changeset
2708 # Reroute the working copy parent to the new changeset
2709 repo.setparents(newid, nullid)
2709 repo.setparents(newid, nullid)
2710
2710
2711 # Move bookmarks from old parent to amend commit
2711 # Move bookmarks from old parent to amend commit
2712 bms = repo.nodebookmarks(old.node())
2712 bms = repo.nodebookmarks(old.node())
2713 if bms:
2713 if bms:
2714 marks = repo._bookmarks
2714 marks = repo._bookmarks
2715 for bm in bms:
2715 for bm in bms:
2716 ui.debug('moving bookmarks %r from %s to %s\n' %
2716 ui.debug('moving bookmarks %r from %s to %s\n' %
2717 (marks, old.hex(), hex(newid)))
2717 (marks, old.hex(), hex(newid)))
2718 marks[bm] = newid
2718 marks[bm] = newid
2719 marks.recordchange(tr)
2719 marks.recordchange(tr)
2720 #commit the whole amend process
2720 #commit the whole amend process
2721 if createmarkers:
2721 if createmarkers:
2722 # mark the new changeset as successor of the rewritten one
2722 # mark the new changeset as successor of the rewritten one
2723 new = repo[newid]
2723 new = repo[newid]
2724 obs = [(old, (new,))]
2724 obs = [(old, (new,))]
2725 if node:
2725 if node:
2726 obs.append((ctx, ()))
2726 obs.append((ctx, ()))
2727
2727
2728 obsolete.createmarkers(repo, obs)
2728 obsolete.createmarkers(repo, obs)
2729 if not createmarkers and newid != old.node():
2729 if not createmarkers and newid != old.node():
2730 # Strip the intermediate commit (if there was one) and the amended
2730 # Strip the intermediate commit (if there was one) and the amended
2731 # commit
2731 # commit
2732 if node:
2732 if node:
2733 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2733 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2734 ui.note(_('stripping amended changeset %s\n') % old)
2734 ui.note(_('stripping amended changeset %s\n') % old)
2735 repair.strip(ui, repo, old.node(), topic='amend-backup')
2735 repair.strip(ui, repo, old.node(), topic='amend-backup')
2736 finally:
2736 finally:
2737 lockmod.release(lock, wlock)
2737 lockmod.release(lock, wlock)
2738 return newid
2738 return newid
2739
2739
2740 def commiteditor(repo, ctx, subs, editform=''):
2740 def commiteditor(repo, ctx, subs, editform=''):
2741 if ctx.description():
2741 if ctx.description():
2742 return ctx.description()
2742 return ctx.description()
2743 return commitforceeditor(repo, ctx, subs, editform=editform,
2743 return commitforceeditor(repo, ctx, subs, editform=editform,
2744 unchangedmessagedetection=True)
2744 unchangedmessagedetection=True)
2745
2745
2746 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2746 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2747 editform='', unchangedmessagedetection=False):
2747 editform='', unchangedmessagedetection=False):
2748 if not extramsg:
2748 if not extramsg:
2749 extramsg = _("Leave message empty to abort commit.")
2749 extramsg = _("Leave message empty to abort commit.")
2750
2750
2751 forms = [e for e in editform.split('.') if e]
2751 forms = [e for e in editform.split('.') if e]
2752 forms.insert(0, 'changeset')
2752 forms.insert(0, 'changeset')
2753 templatetext = None
2753 templatetext = None
2754 while forms:
2754 while forms:
2755 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2755 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2756 if tmpl:
2756 if tmpl:
2757 templatetext = committext = buildcommittemplate(
2757 templatetext = committext = buildcommittemplate(
2758 repo, ctx, subs, extramsg, tmpl)
2758 repo, ctx, subs, extramsg, tmpl)
2759 break
2759 break
2760 forms.pop()
2760 forms.pop()
2761 else:
2761 else:
2762 committext = buildcommittext(repo, ctx, subs, extramsg)
2762 committext = buildcommittext(repo, ctx, subs, extramsg)
2763
2763
2764 # run editor in the repository root
2764 # run editor in the repository root
2765 olddir = pycompat.getcwd()
2765 olddir = pycompat.getcwd()
2766 os.chdir(repo.root)
2766 os.chdir(repo.root)
2767
2767
2768 # make in-memory changes visible to external process
2768 # make in-memory changes visible to external process
2769 tr = repo.currenttransaction()
2769 tr = repo.currenttransaction()
2770 repo.dirstate.write(tr)
2770 repo.dirstate.write(tr)
2771 pending = tr and tr.writepending() and repo.root
2771 pending = tr and tr.writepending() and repo.root
2772
2772
2773 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2773 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2774 editform=editform, pending=pending,
2774 editform=editform, pending=pending,
2775 repopath=repo.path)
2775 repopath=repo.path)
2776 text = editortext
2776 text = editortext
2777
2777
2778 # strip away anything below this special string (used for editors that want
2778 # strip away anything below this special string (used for editors that want
2779 # to display the diff)
2779 # to display the diff)
2780 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2780 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2781 if stripbelow:
2781 if stripbelow:
2782 text = text[:stripbelow.start()]
2782 text = text[:stripbelow.start()]
2783
2783
2784 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2784 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2785 os.chdir(olddir)
2785 os.chdir(olddir)
2786
2786
2787 if finishdesc:
2787 if finishdesc:
2788 text = finishdesc(text)
2788 text = finishdesc(text)
2789 if not text.strip():
2789 if not text.strip():
2790 raise error.Abort(_("empty commit message"))
2790 raise error.Abort(_("empty commit message"))
2791 if unchangedmessagedetection and editortext == templatetext:
2791 if unchangedmessagedetection and editortext == templatetext:
2792 raise error.Abort(_("commit message unchanged"))
2792 raise error.Abort(_("commit message unchanged"))
2793
2793
2794 return text
2794 return text
2795
2795
2796 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2796 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2797 ui = repo.ui
2797 ui = repo.ui
2798 tmpl, mapfile = gettemplate(ui, tmpl, None)
2798 tmpl, mapfile = gettemplate(ui, tmpl, None)
2799
2799
2800 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2800 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2801
2801
2802 for k, v in repo.ui.configitems('committemplate'):
2802 for k, v in repo.ui.configitems('committemplate'):
2803 if k != 'changeset':
2803 if k != 'changeset':
2804 t.t.cache[k] = v
2804 t.t.cache[k] = v
2805
2805
2806 if not extramsg:
2806 if not extramsg:
2807 extramsg = '' # ensure that extramsg is string
2807 extramsg = '' # ensure that extramsg is string
2808
2808
2809 ui.pushbuffer()
2809 ui.pushbuffer()
2810 t.show(ctx, extramsg=extramsg)
2810 t.show(ctx, extramsg=extramsg)
2811 return ui.popbuffer()
2811 return ui.popbuffer()
2812
2812
2813 def hgprefix(msg):
2813 def hgprefix(msg):
2814 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2814 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2815
2815
2816 def buildcommittext(repo, ctx, subs, extramsg):
2816 def buildcommittext(repo, ctx, subs, extramsg):
2817 edittext = []
2817 edittext = []
2818 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2818 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2819 if ctx.description():
2819 if ctx.description():
2820 edittext.append(ctx.description())
2820 edittext.append(ctx.description())
2821 edittext.append("")
2821 edittext.append("")
2822 edittext.append("") # Empty line between message and comments.
2822 edittext.append("") # Empty line between message and comments.
2823 edittext.append(hgprefix(_("Enter commit message."
2823 edittext.append(hgprefix(_("Enter commit message."
2824 " Lines beginning with 'HG:' are removed.")))
2824 " Lines beginning with 'HG:' are removed.")))
2825 edittext.append(hgprefix(extramsg))
2825 edittext.append(hgprefix(extramsg))
2826 edittext.append("HG: --")
2826 edittext.append("HG: --")
2827 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2827 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2828 if ctx.p2():
2828 if ctx.p2():
2829 edittext.append(hgprefix(_("branch merge")))
2829 edittext.append(hgprefix(_("branch merge")))
2830 if ctx.branch():
2830 if ctx.branch():
2831 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2831 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2832 if bookmarks.isactivewdirparent(repo):
2832 if bookmarks.isactivewdirparent(repo):
2833 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2833 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2834 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2834 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2835 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2835 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2836 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2836 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2837 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2837 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2838 if not added and not modified and not removed:
2838 if not added and not modified and not removed:
2839 edittext.append(hgprefix(_("no files changed")))
2839 edittext.append(hgprefix(_("no files changed")))
2840 edittext.append("")
2840 edittext.append("")
2841
2841
2842 return "\n".join(edittext)
2842 return "\n".join(edittext)
2843
2843
2844 def commitstatus(repo, node, branch, bheads=None, opts=None):
2844 def commitstatus(repo, node, branch, bheads=None, opts=None):
2845 if opts is None:
2845 if opts is None:
2846 opts = {}
2846 opts = {}
2847 ctx = repo[node]
2847 ctx = repo[node]
2848 parents = ctx.parents()
2848 parents = ctx.parents()
2849
2849
2850 if (not opts.get('amend') and bheads and node not in bheads and not
2850 if (not opts.get('amend') and bheads and node not in bheads and not
2851 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2851 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2852 repo.ui.status(_('created new head\n'))
2852 repo.ui.status(_('created new head\n'))
2853 # The message is not printed for initial roots. For the other
2853 # The message is not printed for initial roots. For the other
2854 # changesets, it is printed in the following situations:
2854 # changesets, it is printed in the following situations:
2855 #
2855 #
2856 # Par column: for the 2 parents with ...
2856 # Par column: for the 2 parents with ...
2857 # N: null or no parent
2857 # N: null or no parent
2858 # B: parent is on another named branch
2858 # B: parent is on another named branch
2859 # C: parent is a regular non head changeset
2859 # C: parent is a regular non head changeset
2860 # H: parent was a branch head of the current branch
2860 # H: parent was a branch head of the current branch
2861 # Msg column: whether we print "created new head" message
2861 # Msg column: whether we print "created new head" message
2862 # In the following, it is assumed that there already exists some
2862 # In the following, it is assumed that there already exists some
2863 # initial branch heads of the current branch, otherwise nothing is
2863 # initial branch heads of the current branch, otherwise nothing is
2864 # printed anyway.
2864 # printed anyway.
2865 #
2865 #
2866 # Par Msg Comment
2866 # Par Msg Comment
2867 # N N y additional topo root
2867 # N N y additional topo root
2868 #
2868 #
2869 # B N y additional branch root
2869 # B N y additional branch root
2870 # C N y additional topo head
2870 # C N y additional topo head
2871 # H N n usual case
2871 # H N n usual case
2872 #
2872 #
2873 # B B y weird additional branch root
2873 # B B y weird additional branch root
2874 # C B y branch merge
2874 # C B y branch merge
2875 # H B n merge with named branch
2875 # H B n merge with named branch
2876 #
2876 #
2877 # C C y additional head from merge
2877 # C C y additional head from merge
2878 # C H n merge with a head
2878 # C H n merge with a head
2879 #
2879 #
2880 # H H n head merge: head count decreases
2880 # H H n head merge: head count decreases
2881
2881
2882 if not opts.get('close_branch'):
2882 if not opts.get('close_branch'):
2883 for r in parents:
2883 for r in parents:
2884 if r.closesbranch() and r.branch() == branch:
2884 if r.closesbranch() and r.branch() == branch:
2885 repo.ui.status(_('reopening closed branch head %d\n') % r)
2885 repo.ui.status(_('reopening closed branch head %d\n') % r)
2886
2886
2887 if repo.ui.debugflag:
2887 if repo.ui.debugflag:
2888 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2888 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2889 elif repo.ui.verbose:
2889 elif repo.ui.verbose:
2890 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2890 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2891
2891
2892 def postcommitstatus(repo, pats, opts):
2892 def postcommitstatus(repo, pats, opts):
2893 return repo.status(match=scmutil.match(repo[None], pats, opts))
2893 return repo.status(match=scmutil.match(repo[None], pats, opts))
2894
2894
2895 def revert(ui, repo, ctx, parents, *pats, **opts):
2895 def revert(ui, repo, ctx, parents, *pats, **opts):
2896 parent, p2 = parents
2896 parent, p2 = parents
2897 node = ctx.node()
2897 node = ctx.node()
2898
2898
2899 mf = ctx.manifest()
2899 mf = ctx.manifest()
2900 if node == p2:
2900 if node == p2:
2901 parent = p2
2901 parent = p2
2902
2902
2903 # need all matching names in dirstate and manifest of target rev,
2903 # need all matching names in dirstate and manifest of target rev,
2904 # so have to walk both. do not print errors if files exist in one
2904 # so have to walk both. do not print errors if files exist in one
2905 # but not other. in both cases, filesets should be evaluated against
2905 # but not other. in both cases, filesets should be evaluated against
2906 # workingctx to get consistent result (issue4497). this means 'set:**'
2906 # workingctx to get consistent result (issue4497). this means 'set:**'
2907 # cannot be used to select missing files from target rev.
2907 # cannot be used to select missing files from target rev.
2908
2908
2909 # `names` is a mapping for all elements in working copy and target revision
2909 # `names` is a mapping for all elements in working copy and target revision
2910 # The mapping is in the form:
2910 # The mapping is in the form:
2911 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2911 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2912 names = {}
2912 names = {}
2913
2913
2914 with repo.wlock():
2914 with repo.wlock():
2915 ## filling of the `names` mapping
2915 ## filling of the `names` mapping
2916 # walk dirstate to fill `names`
2916 # walk dirstate to fill `names`
2917
2917
2918 interactive = opts.get('interactive', False)
2918 interactive = opts.get('interactive', False)
2919 wctx = repo[None]
2919 wctx = repo[None]
2920 m = scmutil.match(wctx, pats, opts)
2920 m = scmutil.match(wctx, pats, opts)
2921
2921
2922 # we'll need this later
2922 # we'll need this later
2923 targetsubs = sorted(s for s in wctx.substate if m(s))
2923 targetsubs = sorted(s for s in wctx.substate if m(s))
2924
2924
2925 if not m.always():
2925 if not m.always():
2926 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2926 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2927 names[abs] = m.rel(abs), m.exact(abs)
2927 names[abs] = m.rel(abs), m.exact(abs)
2928
2928
2929 # walk target manifest to fill `names`
2929 # walk target manifest to fill `names`
2930
2930
2931 def badfn(path, msg):
2931 def badfn(path, msg):
2932 if path in names:
2932 if path in names:
2933 return
2933 return
2934 if path in ctx.substate:
2934 if path in ctx.substate:
2935 return
2935 return
2936 path_ = path + '/'
2936 path_ = path + '/'
2937 for f in names:
2937 for f in names:
2938 if f.startswith(path_):
2938 if f.startswith(path_):
2939 return
2939 return
2940 ui.warn("%s: %s\n" % (m.rel(path), msg))
2940 ui.warn("%s: %s\n" % (m.rel(path), msg))
2941
2941
2942 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2942 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2943 if abs not in names:
2943 if abs not in names:
2944 names[abs] = m.rel(abs), m.exact(abs)
2944 names[abs] = m.rel(abs), m.exact(abs)
2945
2945
2946 # Find status of all file in `names`.
2946 # Find status of all file in `names`.
2947 m = scmutil.matchfiles(repo, names)
2947 m = scmutil.matchfiles(repo, names)
2948
2948
2949 changes = repo.status(node1=node, match=m,
2949 changes = repo.status(node1=node, match=m,
2950 unknown=True, ignored=True, clean=True)
2950 unknown=True, ignored=True, clean=True)
2951 else:
2951 else:
2952 changes = repo.status(node1=node, match=m)
2952 changes = repo.status(node1=node, match=m)
2953 for kind in changes:
2953 for kind in changes:
2954 for abs in kind:
2954 for abs in kind:
2955 names[abs] = m.rel(abs), m.exact(abs)
2955 names[abs] = m.rel(abs), m.exact(abs)
2956
2956
2957 m = scmutil.matchfiles(repo, names)
2957 m = scmutil.matchfiles(repo, names)
2958
2958
2959 modified = set(changes.modified)
2959 modified = set(changes.modified)
2960 added = set(changes.added)
2960 added = set(changes.added)
2961 removed = set(changes.removed)
2961 removed = set(changes.removed)
2962 _deleted = set(changes.deleted)
2962 _deleted = set(changes.deleted)
2963 unknown = set(changes.unknown)
2963 unknown = set(changes.unknown)
2964 unknown.update(changes.ignored)
2964 unknown.update(changes.ignored)
2965 clean = set(changes.clean)
2965 clean = set(changes.clean)
2966 modadded = set()
2966 modadded = set()
2967
2967
2968 # We need to account for the state of the file in the dirstate,
2968 # We need to account for the state of the file in the dirstate,
2969 # even when we revert against something else than parent. This will
2969 # even when we revert against something else than parent. This will
2970 # slightly alter the behavior of revert (doing back up or not, delete
2970 # slightly alter the behavior of revert (doing back up or not, delete
2971 # or just forget etc).
2971 # or just forget etc).
2972 if parent == node:
2972 if parent == node:
2973 dsmodified = modified
2973 dsmodified = modified
2974 dsadded = added
2974 dsadded = added
2975 dsremoved = removed
2975 dsremoved = removed
2976 # store all local modifications, useful later for rename detection
2976 # store all local modifications, useful later for rename detection
2977 localchanges = dsmodified | dsadded
2977 localchanges = dsmodified | dsadded
2978 modified, added, removed = set(), set(), set()
2978 modified, added, removed = set(), set(), set()
2979 else:
2979 else:
2980 changes = repo.status(node1=parent, match=m)
2980 changes = repo.status(node1=parent, match=m)
2981 dsmodified = set(changes.modified)
2981 dsmodified = set(changes.modified)
2982 dsadded = set(changes.added)
2982 dsadded = set(changes.added)
2983 dsremoved = set(changes.removed)
2983 dsremoved = set(changes.removed)
2984 # store all local modifications, useful later for rename detection
2984 # store all local modifications, useful later for rename detection
2985 localchanges = dsmodified | dsadded
2985 localchanges = dsmodified | dsadded
2986
2986
2987 # only take into account for removes between wc and target
2987 # only take into account for removes between wc and target
2988 clean |= dsremoved - removed
2988 clean |= dsremoved - removed
2989 dsremoved &= removed
2989 dsremoved &= removed
2990 # distinct between dirstate remove and other
2990 # distinct between dirstate remove and other
2991 removed -= dsremoved
2991 removed -= dsremoved
2992
2992
2993 modadded = added & dsmodified
2993 modadded = added & dsmodified
2994 added -= modadded
2994 added -= modadded
2995
2995
2996 # tell newly modified apart.
2996 # tell newly modified apart.
2997 dsmodified &= modified
2997 dsmodified &= modified
2998 dsmodified |= modified & dsadded # dirstate added may need backup
2998 dsmodified |= modified & dsadded # dirstate added may need backup
2999 modified -= dsmodified
2999 modified -= dsmodified
3000
3000
3001 # We need to wait for some post-processing to update this set
3001 # We need to wait for some post-processing to update this set
3002 # before making the distinction. The dirstate will be used for
3002 # before making the distinction. The dirstate will be used for
3003 # that purpose.
3003 # that purpose.
3004 dsadded = added
3004 dsadded = added
3005
3005
3006 # in case of merge, files that are actually added can be reported as
3006 # in case of merge, files that are actually added can be reported as
3007 # modified, we need to post process the result
3007 # modified, we need to post process the result
3008 if p2 != nullid:
3008 if p2 != nullid:
3009 mergeadd = set(dsmodified)
3009 mergeadd = set(dsmodified)
3010 for path in dsmodified:
3010 for path in dsmodified:
3011 if path in mf:
3011 if path in mf:
3012 mergeadd.remove(path)
3012 mergeadd.remove(path)
3013 dsadded |= mergeadd
3013 dsadded |= mergeadd
3014 dsmodified -= mergeadd
3014 dsmodified -= mergeadd
3015
3015
3016 # if f is a rename, update `names` to also revert the source
3016 # if f is a rename, update `names` to also revert the source
3017 cwd = repo.getcwd()
3017 cwd = repo.getcwd()
3018 for f in localchanges:
3018 for f in localchanges:
3019 src = repo.dirstate.copied(f)
3019 src = repo.dirstate.copied(f)
3020 # XXX should we check for rename down to target node?
3020 # XXX should we check for rename down to target node?
3021 if src and src not in names and repo.dirstate[src] == 'r':
3021 if src and src not in names and repo.dirstate[src] == 'r':
3022 dsremoved.add(src)
3022 dsremoved.add(src)
3023 names[src] = (repo.pathto(src, cwd), True)
3023 names[src] = (repo.pathto(src, cwd), True)
3024
3024
3025 # determine the exact nature of the deleted changesets
3025 # determine the exact nature of the deleted changesets
3026 deladded = set(_deleted)
3026 deladded = set(_deleted)
3027 for path in _deleted:
3027 for path in _deleted:
3028 if path in mf:
3028 if path in mf:
3029 deladded.remove(path)
3029 deladded.remove(path)
3030 deleted = _deleted - deladded
3030 deleted = _deleted - deladded
3031
3031
3032 # distinguish between file to forget and the other
3032 # distinguish between file to forget and the other
3033 added = set()
3033 added = set()
3034 for abs in dsadded:
3034 for abs in dsadded:
3035 if repo.dirstate[abs] != 'a':
3035 if repo.dirstate[abs] != 'a':
3036 added.add(abs)
3036 added.add(abs)
3037 dsadded -= added
3037 dsadded -= added
3038
3038
3039 for abs in deladded:
3039 for abs in deladded:
3040 if repo.dirstate[abs] == 'a':
3040 if repo.dirstate[abs] == 'a':
3041 dsadded.add(abs)
3041 dsadded.add(abs)
3042 deladded -= dsadded
3042 deladded -= dsadded
3043
3043
3044 # For files marked as removed, we check if an unknown file is present at
3044 # For files marked as removed, we check if an unknown file is present at
3045 # the same path. If a such file exists it may need to be backed up.
3045 # the same path. If a such file exists it may need to be backed up.
3046 # Making the distinction at this stage helps have simpler backup
3046 # Making the distinction at this stage helps have simpler backup
3047 # logic.
3047 # logic.
3048 removunk = set()
3048 removunk = set()
3049 for abs in removed:
3049 for abs in removed:
3050 target = repo.wjoin(abs)
3050 target = repo.wjoin(abs)
3051 if os.path.lexists(target):
3051 if os.path.lexists(target):
3052 removunk.add(abs)
3052 removunk.add(abs)
3053 removed -= removunk
3053 removed -= removunk
3054
3054
3055 dsremovunk = set()
3055 dsremovunk = set()
3056 for abs in dsremoved:
3056 for abs in dsremoved:
3057 target = repo.wjoin(abs)
3057 target = repo.wjoin(abs)
3058 if os.path.lexists(target):
3058 if os.path.lexists(target):
3059 dsremovunk.add(abs)
3059 dsremovunk.add(abs)
3060 dsremoved -= dsremovunk
3060 dsremoved -= dsremovunk
3061
3061
3062 # action to be actually performed by revert
3062 # action to be actually performed by revert
3063 # (<list of file>, message>) tuple
3063 # (<list of file>, message>) tuple
3064 actions = {'revert': ([], _('reverting %s\n')),
3064 actions = {'revert': ([], _('reverting %s\n')),
3065 'add': ([], _('adding %s\n')),
3065 'add': ([], _('adding %s\n')),
3066 'remove': ([], _('removing %s\n')),
3066 'remove': ([], _('removing %s\n')),
3067 'drop': ([], _('removing %s\n')),
3067 'drop': ([], _('removing %s\n')),
3068 'forget': ([], _('forgetting %s\n')),
3068 'forget': ([], _('forgetting %s\n')),
3069 'undelete': ([], _('undeleting %s\n')),
3069 'undelete': ([], _('undeleting %s\n')),
3070 'noop': (None, _('no changes needed to %s\n')),
3070 'noop': (None, _('no changes needed to %s\n')),
3071 'unknown': (None, _('file not managed: %s\n')),
3071 'unknown': (None, _('file not managed: %s\n')),
3072 }
3072 }
3073
3073
3074 # "constant" that convey the backup strategy.
3074 # "constant" that convey the backup strategy.
3075 # All set to `discard` if `no-backup` is set do avoid checking
3075 # All set to `discard` if `no-backup` is set do avoid checking
3076 # no_backup lower in the code.
3076 # no_backup lower in the code.
3077 # These values are ordered for comparison purposes
3077 # These values are ordered for comparison purposes
3078 backupinteractive = 3 # do backup if interactively modified
3078 backupinteractive = 3 # do backup if interactively modified
3079 backup = 2 # unconditionally do backup
3079 backup = 2 # unconditionally do backup
3080 check = 1 # check if the existing file differs from target
3080 check = 1 # check if the existing file differs from target
3081 discard = 0 # never do backup
3081 discard = 0 # never do backup
3082 if opts.get('no_backup'):
3082 if opts.get('no_backup'):
3083 backupinteractive = backup = check = discard
3083 backupinteractive = backup = check = discard
3084 if interactive:
3084 if interactive:
3085 dsmodifiedbackup = backupinteractive
3085 dsmodifiedbackup = backupinteractive
3086 else:
3086 else:
3087 dsmodifiedbackup = backup
3087 dsmodifiedbackup = backup
3088 tobackup = set()
3088 tobackup = set()
3089
3089
3090 backupanddel = actions['remove']
3090 backupanddel = actions['remove']
3091 if not opts.get('no_backup'):
3091 if not opts.get('no_backup'):
3092 backupanddel = actions['drop']
3092 backupanddel = actions['drop']
3093
3093
3094 disptable = (
3094 disptable = (
3095 # dispatch table:
3095 # dispatch table:
3096 # file state
3096 # file state
3097 # action
3097 # action
3098 # make backup
3098 # make backup
3099
3099
3100 ## Sets that results that will change file on disk
3100 ## Sets that results that will change file on disk
3101 # Modified compared to target, no local change
3101 # Modified compared to target, no local change
3102 (modified, actions['revert'], discard),
3102 (modified, actions['revert'], discard),
3103 # Modified compared to target, but local file is deleted
3103 # Modified compared to target, but local file is deleted
3104 (deleted, actions['revert'], discard),
3104 (deleted, actions['revert'], discard),
3105 # Modified compared to target, local change
3105 # Modified compared to target, local change
3106 (dsmodified, actions['revert'], dsmodifiedbackup),
3106 (dsmodified, actions['revert'], dsmodifiedbackup),
3107 # Added since target
3107 # Added since target
3108 (added, actions['remove'], discard),
3108 (added, actions['remove'], discard),
3109 # Added in working directory
3109 # Added in working directory
3110 (dsadded, actions['forget'], discard),
3110 (dsadded, actions['forget'], discard),
3111 # Added since target, have local modification
3111 # Added since target, have local modification
3112 (modadded, backupanddel, backup),
3112 (modadded, backupanddel, backup),
3113 # Added since target but file is missing in working directory
3113 # Added since target but file is missing in working directory
3114 (deladded, actions['drop'], discard),
3114 (deladded, actions['drop'], discard),
3115 # Removed since target, before working copy parent
3115 # Removed since target, before working copy parent
3116 (removed, actions['add'], discard),
3116 (removed, actions['add'], discard),
3117 # Same as `removed` but an unknown file exists at the same path
3117 # Same as `removed` but an unknown file exists at the same path
3118 (removunk, actions['add'], check),
3118 (removunk, actions['add'], check),
3119 # Removed since targe, marked as such in working copy parent
3119 # Removed since targe, marked as such in working copy parent
3120 (dsremoved, actions['undelete'], discard),
3120 (dsremoved, actions['undelete'], discard),
3121 # Same as `dsremoved` but an unknown file exists at the same path
3121 # Same as `dsremoved` but an unknown file exists at the same path
3122 (dsremovunk, actions['undelete'], check),
3122 (dsremovunk, actions['undelete'], check),
3123 ## the following sets does not result in any file changes
3123 ## the following sets does not result in any file changes
3124 # File with no modification
3124 # File with no modification
3125 (clean, actions['noop'], discard),
3125 (clean, actions['noop'], discard),
3126 # Existing file, not tracked anywhere
3126 # Existing file, not tracked anywhere
3127 (unknown, actions['unknown'], discard),
3127 (unknown, actions['unknown'], discard),
3128 )
3128 )
3129
3129
3130 for abs, (rel, exact) in sorted(names.items()):
3130 for abs, (rel, exact) in sorted(names.items()):
3131 # target file to be touch on disk (relative to cwd)
3131 # target file to be touch on disk (relative to cwd)
3132 target = repo.wjoin(abs)
3132 target = repo.wjoin(abs)
3133 # search the entry in the dispatch table.
3133 # search the entry in the dispatch table.
3134 # if the file is in any of these sets, it was touched in the working
3134 # if the file is in any of these sets, it was touched in the working
3135 # directory parent and we are sure it needs to be reverted.
3135 # directory parent and we are sure it needs to be reverted.
3136 for table, (xlist, msg), dobackup in disptable:
3136 for table, (xlist, msg), dobackup in disptable:
3137 if abs not in table:
3137 if abs not in table:
3138 continue
3138 continue
3139 if xlist is not None:
3139 if xlist is not None:
3140 xlist.append(abs)
3140 xlist.append(abs)
3141 if dobackup:
3141 if dobackup:
3142 # If in interactive mode, don't automatically create
3142 # If in interactive mode, don't automatically create
3143 # .orig files (issue4793)
3143 # .orig files (issue4793)
3144 if dobackup == backupinteractive:
3144 if dobackup == backupinteractive:
3145 tobackup.add(abs)
3145 tobackup.add(abs)
3146 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3146 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3147 bakname = scmutil.origpath(ui, repo, rel)
3147 bakname = scmutil.origpath(ui, repo, rel)
3148 ui.note(_('saving current version of %s as %s\n') %
3148 ui.note(_('saving current version of %s as %s\n') %
3149 (rel, bakname))
3149 (rel, bakname))
3150 if not opts.get('dry_run'):
3150 if not opts.get('dry_run'):
3151 if interactive:
3151 if interactive:
3152 util.copyfile(target, bakname)
3152 util.copyfile(target, bakname)
3153 else:
3153 else:
3154 util.rename(target, bakname)
3154 util.rename(target, bakname)
3155 if ui.verbose or not exact:
3155 if ui.verbose or not exact:
3156 if not isinstance(msg, basestring):
3156 if not isinstance(msg, basestring):
3157 msg = msg(abs)
3157 msg = msg(abs)
3158 ui.status(msg % rel)
3158 ui.status(msg % rel)
3159 elif exact:
3159 elif exact:
3160 ui.warn(msg % rel)
3160 ui.warn(msg % rel)
3161 break
3161 break
3162
3162
3163 if not opts.get('dry_run'):
3163 if not opts.get('dry_run'):
3164 needdata = ('revert', 'add', 'undelete')
3164 needdata = ('revert', 'add', 'undelete')
3165 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3165 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3166 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3166 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3167
3167
3168 if targetsubs:
3168 if targetsubs:
3169 # Revert the subrepos on the revert list
3169 # Revert the subrepos on the revert list
3170 for sub in targetsubs:
3170 for sub in targetsubs:
3171 try:
3171 try:
3172 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3172 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3173 except KeyError:
3173 except KeyError:
3174 raise error.Abort("subrepository '%s' does not exist in %s!"
3174 raise error.Abort("subrepository '%s' does not exist in %s!"
3175 % (sub, short(ctx.node())))
3175 % (sub, short(ctx.node())))
3176
3176
3177 def _revertprefetch(repo, ctx, *files):
3177 def _revertprefetch(repo, ctx, *files):
3178 """Let extension changing the storage layer prefetch content"""
3178 """Let extension changing the storage layer prefetch content"""
3179 pass
3179 pass
3180
3180
3181 def _performrevert(repo, parents, ctx, actions, interactive=False,
3181 def _performrevert(repo, parents, ctx, actions, interactive=False,
3182 tobackup=None):
3182 tobackup=None):
3183 """function that actually perform all the actions computed for revert
3183 """function that actually perform all the actions computed for revert
3184
3184
3185 This is an independent function to let extension to plug in and react to
3185 This is an independent function to let extension to plug in and react to
3186 the imminent revert.
3186 the imminent revert.
3187
3187
3188 Make sure you have the working directory locked when calling this function.
3188 Make sure you have the working directory locked when calling this function.
3189 """
3189 """
3190 parent, p2 = parents
3190 parent, p2 = parents
3191 node = ctx.node()
3191 node = ctx.node()
3192 excluded_files = []
3192 excluded_files = []
3193 matcher_opts = {"exclude": excluded_files}
3193 matcher_opts = {"exclude": excluded_files}
3194
3194
3195 def checkout(f):
3195 def checkout(f):
3196 fc = ctx[f]
3196 fc = ctx[f]
3197 repo.wwrite(f, fc.data(), fc.flags())
3197 repo.wwrite(f, fc.data(), fc.flags())
3198
3198
3199 def doremove(f):
3199 def doremove(f):
3200 try:
3200 try:
3201 util.unlinkpath(repo.wjoin(f))
3201 util.unlinkpath(repo.wjoin(f))
3202 except OSError:
3202 except OSError:
3203 pass
3203 pass
3204 repo.dirstate.remove(f)
3204 repo.dirstate.remove(f)
3205
3205
3206 audit_path = pathutil.pathauditor(repo.root)
3206 audit_path = pathutil.pathauditor(repo.root)
3207 for f in actions['forget'][0]:
3207 for f in actions['forget'][0]:
3208 if interactive:
3208 if interactive:
3209 choice = repo.ui.promptchoice(
3209 choice = repo.ui.promptchoice(
3210 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3210 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3211 if choice == 0:
3211 if choice == 0:
3212 repo.dirstate.drop(f)
3212 repo.dirstate.drop(f)
3213 else:
3213 else:
3214 excluded_files.append(repo.wjoin(f))
3214 excluded_files.append(repo.wjoin(f))
3215 else:
3215 else:
3216 repo.dirstate.drop(f)
3216 repo.dirstate.drop(f)
3217 for f in actions['remove'][0]:
3217 for f in actions['remove'][0]:
3218 audit_path(f)
3218 audit_path(f)
3219 if interactive:
3219 if interactive:
3220 choice = repo.ui.promptchoice(
3220 choice = repo.ui.promptchoice(
3221 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3221 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3222 if choice == 0:
3222 if choice == 0:
3223 doremove(f)
3223 doremove(f)
3224 else:
3224 else:
3225 excluded_files.append(repo.wjoin(f))
3225 excluded_files.append(repo.wjoin(f))
3226 else:
3226 else:
3227 doremove(f)
3227 doremove(f)
3228 for f in actions['drop'][0]:
3228 for f in actions['drop'][0]:
3229 audit_path(f)
3229 audit_path(f)
3230 repo.dirstate.remove(f)
3230 repo.dirstate.remove(f)
3231
3231
3232 normal = None
3232 normal = None
3233 if node == parent:
3233 if node == parent:
3234 # We're reverting to our parent. If possible, we'd like status
3234 # We're reverting to our parent. If possible, we'd like status
3235 # to report the file as clean. We have to use normallookup for
3235 # to report the file as clean. We have to use normallookup for
3236 # merges to avoid losing information about merged/dirty files.
3236 # merges to avoid losing information about merged/dirty files.
3237 if p2 != nullid:
3237 if p2 != nullid:
3238 normal = repo.dirstate.normallookup
3238 normal = repo.dirstate.normallookup
3239 else:
3239 else:
3240 normal = repo.dirstate.normal
3240 normal = repo.dirstate.normal
3241
3241
3242 newlyaddedandmodifiedfiles = set()
3242 newlyaddedandmodifiedfiles = set()
3243 if interactive:
3243 if interactive:
3244 # Prompt the user for changes to revert
3244 # Prompt the user for changes to revert
3245 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3245 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3246 m = scmutil.match(ctx, torevert, matcher_opts)
3246 m = scmutil.match(ctx, torevert, matcher_opts)
3247 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3247 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3248 diffopts.nodates = True
3248 diffopts.nodates = True
3249 diffopts.git = True
3249 diffopts.git = True
3250 operation = 'discard'
3250 operation = 'discard'
3251 reversehunks = True
3251 reversehunks = True
3252 if node != parent:
3252 if node != parent:
3253 operation = 'revert'
3253 operation = 'revert'
3254 reversehunks = repo.ui.configbool('experimental',
3254 reversehunks = repo.ui.configbool('experimental',
3255 'revertalternateinteractivemode',
3255 'revertalternateinteractivemode',
3256 True)
3256 True)
3257 if reversehunks:
3257 if reversehunks:
3258 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3258 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3259 else:
3259 else:
3260 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3260 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3261 originalchunks = patch.parsepatch(diff)
3261 originalchunks = patch.parsepatch(diff)
3262
3262
3263 try:
3263 try:
3264
3264
3265 chunks, opts = recordfilter(repo.ui, originalchunks,
3265 chunks, opts = recordfilter(repo.ui, originalchunks,
3266 operation=operation)
3266 operation=operation)
3267 if reversehunks:
3267 if reversehunks:
3268 chunks = patch.reversehunks(chunks)
3268 chunks = patch.reversehunks(chunks)
3269
3269
3270 except patch.PatchError as err:
3270 except patch.PatchError as err:
3271 raise error.Abort(_('error parsing patch: %s') % err)
3271 raise error.Abort(_('error parsing patch: %s') % err)
3272
3272
3273 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3273 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3274 if tobackup is None:
3274 if tobackup is None:
3275 tobackup = set()
3275 tobackup = set()
3276 # Apply changes
3276 # Apply changes
3277 fp = stringio()
3277 fp = stringio()
3278 for c in chunks:
3278 for c in chunks:
3279 # Create a backup file only if this hunk should be backed up
3279 # Create a backup file only if this hunk should be backed up
3280 if ishunk(c) and c.header.filename() in tobackup:
3280 if ishunk(c) and c.header.filename() in tobackup:
3281 abs = c.header.filename()
3281 abs = c.header.filename()
3282 target = repo.wjoin(abs)
3282 target = repo.wjoin(abs)
3283 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3283 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3284 util.copyfile(target, bakname)
3284 util.copyfile(target, bakname)
3285 tobackup.remove(abs)
3285 tobackup.remove(abs)
3286 c.write(fp)
3286 c.write(fp)
3287 dopatch = fp.tell()
3287 dopatch = fp.tell()
3288 fp.seek(0)
3288 fp.seek(0)
3289 if dopatch:
3289 if dopatch:
3290 try:
3290 try:
3291 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3291 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3292 except patch.PatchError as err:
3292 except patch.PatchError as err:
3293 raise error.Abort(str(err))
3293 raise error.Abort(str(err))
3294 del fp
3294 del fp
3295 else:
3295 else:
3296 for f in actions['revert'][0]:
3296 for f in actions['revert'][0]:
3297 checkout(f)
3297 checkout(f)
3298 if normal:
3298 if normal:
3299 normal(f)
3299 normal(f)
3300
3300
3301 for f in actions['add'][0]:
3301 for f in actions['add'][0]:
3302 # Don't checkout modified files, they are already created by the diff
3302 # Don't checkout modified files, they are already created by the diff
3303 if f not in newlyaddedandmodifiedfiles:
3303 if f not in newlyaddedandmodifiedfiles:
3304 checkout(f)
3304 checkout(f)
3305 repo.dirstate.add(f)
3305 repo.dirstate.add(f)
3306
3306
3307 normal = repo.dirstate.normallookup
3307 normal = repo.dirstate.normallookup
3308 if node == parent and p2 == nullid:
3308 if node == parent and p2 == nullid:
3309 normal = repo.dirstate.normal
3309 normal = repo.dirstate.normal
3310 for f in actions['undelete'][0]:
3310 for f in actions['undelete'][0]:
3311 checkout(f)
3311 checkout(f)
3312 normal(f)
3312 normal(f)
3313
3313
3314 copied = copies.pathcopies(repo[parent], ctx)
3314 copied = copies.pathcopies(repo[parent], ctx)
3315
3315
3316 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3316 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3317 if f in copied:
3317 if f in copied:
3318 repo.dirstate.copy(copied[f], f)
3318 repo.dirstate.copy(copied[f], f)
3319
3319
3320 def command(table):
3320 def command(table):
3321 """Returns a function object to be used as a decorator for making commands.
3321 """Returns a function object to be used as a decorator for making commands.
3322
3322
3323 This function receives a command table as its argument. The table should
3323 This function receives a command table as its argument. The table should
3324 be a dict.
3324 be a dict.
3325
3325
3326 The returned function can be used as a decorator for adding commands
3326 The returned function can be used as a decorator for adding commands
3327 to that command table. This function accepts multiple arguments to define
3327 to that command table. This function accepts multiple arguments to define
3328 a command.
3328 a command.
3329
3329
3330 The first argument is the command name.
3330 The first argument is the command name.
3331
3331
3332 The options argument is an iterable of tuples defining command arguments.
3332 The options argument is an iterable of tuples defining command arguments.
3333 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3333 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3334
3334
3335 The synopsis argument defines a short, one line summary of how to use the
3335 The synopsis argument defines a short, one line summary of how to use the
3336 command. This shows up in the help output.
3336 command. This shows up in the help output.
3337
3337
3338 The norepo argument defines whether the command does not require a
3338 The norepo argument defines whether the command does not require a
3339 local repository. Most commands operate against a repository, thus the
3339 local repository. Most commands operate against a repository, thus the
3340 default is False.
3340 default is False.
3341
3341
3342 The optionalrepo argument defines whether the command optionally requires
3342 The optionalrepo argument defines whether the command optionally requires
3343 a local repository.
3343 a local repository.
3344
3344
3345 The inferrepo argument defines whether to try to find a repository from the
3345 The inferrepo argument defines whether to try to find a repository from the
3346 command line arguments. If True, arguments will be examined for potential
3346 command line arguments. If True, arguments will be examined for potential
3347 repository locations. See ``findrepo()``. If a repository is found, it
3347 repository locations. See ``findrepo()``. If a repository is found, it
3348 will be used.
3348 will be used.
3349 """
3349 """
3350 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3350 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3351 inferrepo=False):
3351 inferrepo=False):
3352 def decorator(func):
3352 def decorator(func):
3353 func.norepo = norepo
3353 func.norepo = norepo
3354 func.optionalrepo = optionalrepo
3354 func.optionalrepo = optionalrepo
3355 func.inferrepo = inferrepo
3355 func.inferrepo = inferrepo
3356 if synopsis:
3356 if synopsis:
3357 table[name] = func, list(options), synopsis
3357 table[name] = func, list(options), synopsis
3358 else:
3358 else:
3359 table[name] = func, list(options)
3359 table[name] = func, list(options)
3360 return func
3360 return func
3361 return decorator
3361 return decorator
3362
3362
3363 return cmd
3363 return cmd
3364
3364
3365 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3365 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3366 # commands.outgoing. "missing" is "missing" of the result of
3366 # commands.outgoing. "missing" is "missing" of the result of
3367 # "findcommonoutgoing()"
3367 # "findcommonoutgoing()"
3368 outgoinghooks = util.hooks()
3368 outgoinghooks = util.hooks()
3369
3369
3370 # a list of (ui, repo) functions called by commands.summary
3370 # a list of (ui, repo) functions called by commands.summary
3371 summaryhooks = util.hooks()
3371 summaryhooks = util.hooks()
3372
3372
3373 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3373 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3374 #
3374 #
3375 # functions should return tuple of booleans below, if 'changes' is None:
3375 # functions should return tuple of booleans below, if 'changes' is None:
3376 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3376 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3377 #
3377 #
3378 # otherwise, 'changes' is a tuple of tuples below:
3378 # otherwise, 'changes' is a tuple of tuples below:
3379 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3379 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3380 # - (desturl, destbranch, destpeer, outgoing)
3380 # - (desturl, destbranch, destpeer, outgoing)
3381 summaryremotehooks = util.hooks()
3381 summaryremotehooks = util.hooks()
3382
3382
3383 # A list of state files kept by multistep operations like graft.
3383 # A list of state files kept by multistep operations like graft.
3384 # Since graft cannot be aborted, it is considered 'clearable' by update.
3384 # Since graft cannot be aborted, it is considered 'clearable' by update.
3385 # note: bisect is intentionally excluded
3385 # note: bisect is intentionally excluded
3386 # (state file, clearable, allowcommit, error, hint)
3386 # (state file, clearable, allowcommit, error, hint)
3387 unfinishedstates = [
3387 unfinishedstates = [
3388 ('graftstate', True, False, _('graft in progress'),
3388 ('graftstate', True, False, _('graft in progress'),
3389 _("use 'hg graft --continue' or 'hg update' to abort")),
3389 _("use 'hg graft --continue' or 'hg update' to abort")),
3390 ('updatestate', True, False, _('last update was interrupted'),
3390 ('updatestate', True, False, _('last update was interrupted'),
3391 _("use 'hg update' to get a consistent checkout"))
3391 _("use 'hg update' to get a consistent checkout"))
3392 ]
3392 ]
3393
3393
3394 def checkunfinished(repo, commit=False):
3394 def checkunfinished(repo, commit=False):
3395 '''Look for an unfinished multistep operation, like graft, and abort
3395 '''Look for an unfinished multistep operation, like graft, and abort
3396 if found. It's probably good to check this right before
3396 if found. It's probably good to check this right before
3397 bailifchanged().
3397 bailifchanged().
3398 '''
3398 '''
3399 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3399 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3400 if commit and allowcommit:
3400 if commit and allowcommit:
3401 continue
3401 continue
3402 if repo.vfs.exists(f):
3402 if repo.vfs.exists(f):
3403 raise error.Abort(msg, hint=hint)
3403 raise error.Abort(msg, hint=hint)
3404
3404
3405 def clearunfinished(repo):
3405 def clearunfinished(repo):
3406 '''Check for unfinished operations (as above), and clear the ones
3406 '''Check for unfinished operations (as above), and clear the ones
3407 that are clearable.
3407 that are clearable.
3408 '''
3408 '''
3409 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3409 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3410 if not clearable and repo.vfs.exists(f):
3410 if not clearable and repo.vfs.exists(f):
3411 raise error.Abort(msg, hint=hint)
3411 raise error.Abort(msg, hint=hint)
3412 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3412 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3413 if clearable and repo.vfs.exists(f):
3413 if clearable and repo.vfs.exists(f):
3414 util.unlink(repo.join(f))
3414 util.unlink(repo.join(f))
3415
3415
3416 afterresolvedstates = [
3416 afterresolvedstates = [
3417 ('graftstate',
3417 ('graftstate',
3418 _('hg graft --continue')),
3418 _('hg graft --continue')),
3419 ]
3419 ]
3420
3420
3421 def howtocontinue(repo):
3421 def howtocontinue(repo):
3422 '''Check for an unfinished operation and return the command to finish
3422 '''Check for an unfinished operation and return the command to finish
3423 it.
3423 it.
3424
3424
3425 afterresolvedstates tuples define a .hg/{file} and the corresponding
3425 afterresolvedstates tuples define a .hg/{file} and the corresponding
3426 command needed to finish it.
3426 command needed to finish it.
3427
3427
3428 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3428 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3429 a boolean.
3429 a boolean.
3430 '''
3430 '''
3431 contmsg = _("continue: %s")
3431 contmsg = _("continue: %s")
3432 for f, msg in afterresolvedstates:
3432 for f, msg in afterresolvedstates:
3433 if repo.vfs.exists(f):
3433 if repo.vfs.exists(f):
3434 return contmsg % msg, True
3434 return contmsg % msg, True
3435 workingctx = repo[None]
3435 workingctx = repo[None]
3436 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3436 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3437 for s in workingctx.substate)
3437 for s in workingctx.substate)
3438 if dirty:
3438 if dirty:
3439 return contmsg % _("hg commit"), False
3439 return contmsg % _("hg commit"), False
3440 return None, None
3440 return None, None
3441
3441
3442 def checkafterresolved(repo):
3442 def checkafterresolved(repo):
3443 '''Inform the user about the next action after completing hg resolve
3443 '''Inform the user about the next action after completing hg resolve
3444
3444
3445 If there's a matching afterresolvedstates, howtocontinue will yield
3445 If there's a matching afterresolvedstates, howtocontinue will yield
3446 repo.ui.warn as the reporter.
3446 repo.ui.warn as the reporter.
3447
3447
3448 Otherwise, it will yield repo.ui.note.
3448 Otherwise, it will yield repo.ui.note.
3449 '''
3449 '''
3450 msg, warning = howtocontinue(repo)
3450 msg, warning = howtocontinue(repo)
3451 if msg is not None:
3451 if msg is not None:
3452 if warning:
3452 if warning:
3453 repo.ui.warn("%s\n" % msg)
3453 repo.ui.warn("%s\n" % msg)
3454 else:
3454 else:
3455 repo.ui.note("%s\n" % msg)
3455 repo.ui.note("%s\n" % msg)
3456
3456
3457 def wrongtooltocontinue(repo, task):
3457 def wrongtooltocontinue(repo, task):
3458 '''Raise an abort suggesting how to properly continue if there is an
3458 '''Raise an abort suggesting how to properly continue if there is an
3459 active task.
3459 active task.
3460
3460
3461 Uses howtocontinue() to find the active task.
3461 Uses howtocontinue() to find the active task.
3462
3462
3463 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3463 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3464 a hint.
3464 a hint.
3465 '''
3465 '''
3466 after = howtocontinue(repo)
3466 after = howtocontinue(repo)
3467 hint = None
3467 hint = None
3468 if after[1]:
3468 if after[1]:
3469 hint = after[0]
3469 hint = after[0]
3470 raise error.Abort(_('no %s in progress') % task, hint=hint)
3470 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,2110 +1,2110 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 repair,
53 repair,
54 revlog,
54 revlog,
55 revset,
55 revset,
56 revsetlang,
56 revsetlang,
57 scmutil,
57 scmutil,
58 setdiscovery,
58 setdiscovery,
59 simplemerge,
59 simplemerge,
60 smartset,
60 smartset,
61 sslutil,
61 sslutil,
62 streamclone,
62 streamclone,
63 templater,
63 templater,
64 treediscovery,
64 treediscovery,
65 util,
65 util,
66 )
66 )
67
67
68 release = lockmod.release
68 release = lockmod.release
69
69
70 # We reuse the command table from commands because it is easier than
70 # We reuse the command table from commands because it is easier than
71 # teaching dispatch about multiple tables.
71 # teaching dispatch about multiple tables.
72 command = cmdutil.command(commands.table)
72 command = cmdutil.command(commands.table)
73
73
74 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
74 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 def debugancestor(ui, repo, *args):
75 def debugancestor(ui, repo, *args):
76 """find the ancestor revision of two revisions in a given index"""
76 """find the ancestor revision of two revisions in a given index"""
77 if len(args) == 3:
77 if len(args) == 3:
78 index, rev1, rev2 = args
78 index, rev1, rev2 = args
79 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False), index)
79 r = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False), index)
80 lookup = r.lookup
80 lookup = r.lookup
81 elif len(args) == 2:
81 elif len(args) == 2:
82 if not repo:
82 if not repo:
83 raise error.Abort(_('there is no Mercurial repository here '
83 raise error.Abort(_('there is no Mercurial repository here '
84 '(.hg not found)'))
84 '(.hg not found)'))
85 rev1, rev2 = args
85 rev1, rev2 = args
86 r = repo.changelog
86 r = repo.changelog
87 lookup = repo.lookup
87 lookup = repo.lookup
88 else:
88 else:
89 raise error.Abort(_('either two or three arguments required'))
89 raise error.Abort(_('either two or three arguments required'))
90 a = r.ancestor(lookup(rev1), lookup(rev2))
90 a = r.ancestor(lookup(rev1), lookup(rev2))
91 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
91 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92
92
93 @command('debugapplystreamclonebundle', [], 'FILE')
93 @command('debugapplystreamclonebundle', [], 'FILE')
94 def debugapplystreamclonebundle(ui, repo, fname):
94 def debugapplystreamclonebundle(ui, repo, fname):
95 """apply a stream clone bundle file"""
95 """apply a stream clone bundle file"""
96 f = hg.openpath(ui, fname)
96 f = hg.openpath(ui, fname)
97 gen = exchange.readbundle(ui, f, fname)
97 gen = exchange.readbundle(ui, f, fname)
98 gen.apply(repo)
98 gen.apply(repo)
99
99
100 @command('debugbuilddag',
100 @command('debugbuilddag',
101 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
101 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
102 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('n', 'new-file', None, _('add new file at each rev'))],
103 ('n', 'new-file', None, _('add new file at each rev'))],
104 _('[OPTION]... [TEXT]'))
104 _('[OPTION]... [TEXT]'))
105 def debugbuilddag(ui, repo, text=None,
105 def debugbuilddag(ui, repo, text=None,
106 mergeable_file=False,
106 mergeable_file=False,
107 overwritten_file=False,
107 overwritten_file=False,
108 new_file=False):
108 new_file=False):
109 """builds a repo with a given DAG from scratch in the current empty repo
109 """builds a repo with a given DAG from scratch in the current empty repo
110
110
111 The description of the DAG is read from stdin if not given on the
111 The description of the DAG is read from stdin if not given on the
112 command line.
112 command line.
113
113
114 Elements:
114 Elements:
115
115
116 - "+n" is a linear run of n nodes based on the current default parent
116 - "+n" is a linear run of n nodes based on the current default parent
117 - "." is a single node based on the current default parent
117 - "." is a single node based on the current default parent
118 - "$" resets the default parent to null (implied at the start);
118 - "$" resets the default parent to null (implied at the start);
119 otherwise the default parent is always the last node created
119 otherwise the default parent is always the last node created
120 - "<p" sets the default parent to the backref p
120 - "<p" sets the default parent to the backref p
121 - "*p" is a fork at parent p, which is a backref
121 - "*p" is a fork at parent p, which is a backref
122 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
122 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "/p2" is a merge of the preceding node and p2
123 - "/p2" is a merge of the preceding node and p2
124 - ":tag" defines a local tag for the preceding node
124 - ":tag" defines a local tag for the preceding node
125 - "@branch" sets the named branch for subsequent nodes
125 - "@branch" sets the named branch for subsequent nodes
126 - "#...\\n" is a comment up to the end of the line
126 - "#...\\n" is a comment up to the end of the line
127
127
128 Whitespace between the above elements is ignored.
128 Whitespace between the above elements is ignored.
129
129
130 A backref is either
130 A backref is either
131
131
132 - a number n, which references the node curr-n, where curr is the current
132 - a number n, which references the node curr-n, where curr is the current
133 node, or
133 node, or
134 - the name of a local tag you placed earlier using ":tag", or
134 - the name of a local tag you placed earlier using ":tag", or
135 - empty to denote the default parent.
135 - empty to denote the default parent.
136
136
137 All string valued-elements are either strictly alphanumeric, or must
137 All string valued-elements are either strictly alphanumeric, or must
138 be enclosed in double quotes ("..."), with "\\" as escape character.
138 be enclosed in double quotes ("..."), with "\\" as escape character.
139 """
139 """
140
140
141 if text is None:
141 if text is None:
142 ui.status(_("reading DAG from stdin\n"))
142 ui.status(_("reading DAG from stdin\n"))
143 text = ui.fin.read()
143 text = ui.fin.read()
144
144
145 cl = repo.changelog
145 cl = repo.changelog
146 if len(cl) > 0:
146 if len(cl) > 0:
147 raise error.Abort(_('repository is not empty'))
147 raise error.Abort(_('repository is not empty'))
148
148
149 # determine number of revs in DAG
149 # determine number of revs in DAG
150 total = 0
150 total = 0
151 for type, data in dagparser.parsedag(text):
151 for type, data in dagparser.parsedag(text):
152 if type == 'n':
152 if type == 'n':
153 total += 1
153 total += 1
154
154
155 if mergeable_file:
155 if mergeable_file:
156 linesperrev = 2
156 linesperrev = 2
157 # make a file with k lines per rev
157 # make a file with k lines per rev
158 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
158 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines.append("")
159 initialmergedlines.append("")
160
160
161 tags = []
161 tags = []
162
162
163 wlock = lock = tr = None
163 wlock = lock = tr = None
164 try:
164 try:
165 wlock = repo.wlock()
165 wlock = repo.wlock()
166 lock = repo.lock()
166 lock = repo.lock()
167 tr = repo.transaction("builddag")
167 tr = repo.transaction("builddag")
168
168
169 at = -1
169 at = -1
170 atbranch = 'default'
170 atbranch = 'default'
171 nodeids = []
171 nodeids = []
172 id = 0
172 id = 0
173 ui.progress(_('building'), id, unit=_('revisions'), total=total)
173 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 for type, data in dagparser.parsedag(text):
174 for type, data in dagparser.parsedag(text):
175 if type == 'n':
175 if type == 'n':
176 ui.note(('node %s\n' % str(data)))
176 ui.note(('node %s\n' % str(data)))
177 id, ps = data
177 id, ps = data
178
178
179 files = []
179 files = []
180 fctxs = {}
180 fctxs = {}
181
181
182 p2 = None
182 p2 = None
183 if mergeable_file:
183 if mergeable_file:
184 fn = "mf"
184 fn = "mf"
185 p1 = repo[ps[0]]
185 p1 = repo[ps[0]]
186 if len(ps) > 1:
186 if len(ps) > 1:
187 p2 = repo[ps[1]]
187 p2 = repo[ps[1]]
188 pa = p1.ancestor(p2)
188 pa = p1.ancestor(p2)
189 base, local, other = [x[fn].data() for x in (pa, p1,
189 base, local, other = [x[fn].data() for x in (pa, p1,
190 p2)]
190 p2)]
191 m3 = simplemerge.Merge3Text(base, local, other)
191 m3 = simplemerge.Merge3Text(base, local, other)
192 ml = [l.strip() for l in m3.merge_lines()]
192 ml = [l.strip() for l in m3.merge_lines()]
193 ml.append("")
193 ml.append("")
194 elif at > 0:
194 elif at > 0:
195 ml = p1[fn].data().split("\n")
195 ml = p1[fn].data().split("\n")
196 else:
196 else:
197 ml = initialmergedlines
197 ml = initialmergedlines
198 ml[id * linesperrev] += " r%i" % id
198 ml[id * linesperrev] += " r%i" % id
199 mergedtext = "\n".join(ml)
199 mergedtext = "\n".join(ml)
200 files.append(fn)
200 files.append(fn)
201 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
201 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202
202
203 if overwritten_file:
203 if overwritten_file:
204 fn = "of"
204 fn = "of"
205 files.append(fn)
205 files.append(fn)
206 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
206 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207
207
208 if new_file:
208 if new_file:
209 fn = "nf%i" % id
209 fn = "nf%i" % id
210 files.append(fn)
210 files.append(fn)
211 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 if len(ps) > 1:
212 if len(ps) > 1:
213 if not p2:
213 if not p2:
214 p2 = repo[ps[1]]
214 p2 = repo[ps[1]]
215 for fn in p2:
215 for fn in p2:
216 if fn.startswith("nf"):
216 if fn.startswith("nf"):
217 files.append(fn)
217 files.append(fn)
218 fctxs[fn] = p2[fn]
218 fctxs[fn] = p2[fn]
219
219
220 def fctxfn(repo, cx, path):
220 def fctxfn(repo, cx, path):
221 return fctxs.get(path)
221 return fctxs.get(path)
222
222
223 if len(ps) == 0 or ps[0] < 0:
223 if len(ps) == 0 or ps[0] < 0:
224 pars = [None, None]
224 pars = [None, None]
225 elif len(ps) == 1:
225 elif len(ps) == 1:
226 pars = [nodeids[ps[0]], None]
226 pars = [nodeids[ps[0]], None]
227 else:
227 else:
228 pars = [nodeids[p] for p in ps]
228 pars = [nodeids[p] for p in ps]
229 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
229 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 date=(id, 0),
230 date=(id, 0),
231 user="debugbuilddag",
231 user="debugbuilddag",
232 extra={'branch': atbranch})
232 extra={'branch': atbranch})
233 nodeid = repo.commitctx(cx)
233 nodeid = repo.commitctx(cx)
234 nodeids.append(nodeid)
234 nodeids.append(nodeid)
235 at = id
235 at = id
236 elif type == 'l':
236 elif type == 'l':
237 id, name = data
237 id, name = data
238 ui.note(('tag %s\n' % name))
238 ui.note(('tag %s\n' % name))
239 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
239 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 elif type == 'a':
240 elif type == 'a':
241 ui.note(('branch %s\n' % data))
241 ui.note(('branch %s\n' % data))
242 atbranch = data
242 atbranch = data
243 ui.progress(_('building'), id, unit=_('revisions'), total=total)
243 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 tr.close()
244 tr.close()
245
245
246 if tags:
246 if tags:
247 repo.vfs.write("localtags", "".join(tags))
247 repo.vfs.write("localtags", "".join(tags))
248 finally:
248 finally:
249 ui.progress(_('building'), None)
249 ui.progress(_('building'), None)
250 release(tr, lock, wlock)
250 release(tr, lock, wlock)
251
251
252 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
252 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 indent_string = ' ' * indent
253 indent_string = ' ' * indent
254 if all:
254 if all:
255 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
255 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 % indent_string)
256 % indent_string)
257
257
258 def showchunks(named):
258 def showchunks(named):
259 ui.write("\n%s%s\n" % (indent_string, named))
259 ui.write("\n%s%s\n" % (indent_string, named))
260 chain = None
260 chain = None
261 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
261 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 node = chunkdata['node']
262 node = chunkdata['node']
263 p1 = chunkdata['p1']
263 p1 = chunkdata['p1']
264 p2 = chunkdata['p2']
264 p2 = chunkdata['p2']
265 cs = chunkdata['cs']
265 cs = chunkdata['cs']
266 deltabase = chunkdata['deltabase']
266 deltabase = chunkdata['deltabase']
267 delta = chunkdata['delta']
267 delta = chunkdata['delta']
268 ui.write("%s%s %s %s %s %s %s\n" %
268 ui.write("%s%s %s %s %s %s %s\n" %
269 (indent_string, hex(node), hex(p1), hex(p2),
269 (indent_string, hex(node), hex(p1), hex(p2),
270 hex(cs), hex(deltabase), len(delta)))
270 hex(cs), hex(deltabase), len(delta)))
271 chain = node
271 chain = node
272
272
273 chunkdata = gen.changelogheader()
273 chunkdata = gen.changelogheader()
274 showchunks("changelog")
274 showchunks("changelog")
275 chunkdata = gen.manifestheader()
275 chunkdata = gen.manifestheader()
276 showchunks("manifest")
276 showchunks("manifest")
277 for chunkdata in iter(gen.filelogheader, {}):
277 for chunkdata in iter(gen.filelogheader, {}):
278 fname = chunkdata['filename']
278 fname = chunkdata['filename']
279 showchunks(fname)
279 showchunks(fname)
280 else:
280 else:
281 if isinstance(gen, bundle2.unbundle20):
281 if isinstance(gen, bundle2.unbundle20):
282 raise error.Abort(_('use debugbundle2 for this file'))
282 raise error.Abort(_('use debugbundle2 for this file'))
283 chunkdata = gen.changelogheader()
283 chunkdata = gen.changelogheader()
284 chain = None
284 chain = None
285 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
285 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 node = chunkdata['node']
286 node = chunkdata['node']
287 ui.write("%s%s\n" % (indent_string, hex(node)))
287 ui.write("%s%s\n" % (indent_string, hex(node)))
288 chain = node
288 chain = node
289
289
290 def _debugbundle2(ui, gen, all=None, **opts):
290 def _debugbundle2(ui, gen, all=None, **opts):
291 """lists the contents of a bundle2"""
291 """lists the contents of a bundle2"""
292 if not isinstance(gen, bundle2.unbundle20):
292 if not isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('not a bundle2 file'))
293 raise error.Abort(_('not a bundle2 file'))
294 ui.write(('Stream params: %s\n' % repr(gen.params)))
294 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 for part in gen.iterparts():
295 for part in gen.iterparts():
296 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
296 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 if part.type == 'changegroup':
297 if part.type == 'changegroup':
298 version = part.params.get('version', '01')
298 version = part.params.get('version', '01')
299 cg = changegroup.getunbundler(version, part, 'UN')
299 cg = changegroup.getunbundler(version, part, 'UN')
300 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
300 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301
301
302 @command('debugbundle',
302 @command('debugbundle',
303 [('a', 'all', None, _('show all details')),
303 [('a', 'all', None, _('show all details')),
304 ('', 'spec', None, _('print the bundlespec of the bundle'))],
304 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 _('FILE'),
305 _('FILE'),
306 norepo=True)
306 norepo=True)
307 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
307 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 """lists the contents of a bundle"""
308 """lists the contents of a bundle"""
309 with hg.openpath(ui, bundlepath) as f:
309 with hg.openpath(ui, bundlepath) as f:
310 if spec:
310 if spec:
311 spec = exchange.getbundlespec(ui, f)
311 spec = exchange.getbundlespec(ui, f)
312 ui.write('%s\n' % spec)
312 ui.write('%s\n' % spec)
313 return
313 return
314
314
315 gen = exchange.readbundle(ui, f, bundlepath)
315 gen = exchange.readbundle(ui, f, bundlepath)
316 if isinstance(gen, bundle2.unbundle20):
316 if isinstance(gen, bundle2.unbundle20):
317 return _debugbundle2(ui, gen, all=all, **opts)
317 return _debugbundle2(ui, gen, all=all, **opts)
318 _debugchangegroup(ui, gen, all=all, **opts)
318 _debugchangegroup(ui, gen, all=all, **opts)
319
319
320 @command('debugcheckstate', [], '')
320 @command('debugcheckstate', [], '')
321 def debugcheckstate(ui, repo):
321 def debugcheckstate(ui, repo):
322 """validate the correctness of the current dirstate"""
322 """validate the correctness of the current dirstate"""
323 parent1, parent2 = repo.dirstate.parents()
323 parent1, parent2 = repo.dirstate.parents()
324 m1 = repo[parent1].manifest()
324 m1 = repo[parent1].manifest()
325 m2 = repo[parent2].manifest()
325 m2 = repo[parent2].manifest()
326 errors = 0
326 errors = 0
327 for f in repo.dirstate:
327 for f in repo.dirstate:
328 state = repo.dirstate[f]
328 state = repo.dirstate[f]
329 if state in "nr" and f not in m1:
329 if state in "nr" and f not in m1:
330 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
330 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 errors += 1
331 errors += 1
332 if state in "a" and f in m1:
332 if state in "a" and f in m1:
333 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
333 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 errors += 1
334 errors += 1
335 if state in "m" and f not in m1 and f not in m2:
335 if state in "m" and f not in m1 and f not in m2:
336 ui.warn(_("%s in state %s, but not in either manifest\n") %
336 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 (f, state))
337 (f, state))
338 errors += 1
338 errors += 1
339 for f in m1:
339 for f in m1:
340 state = repo.dirstate[f]
340 state = repo.dirstate[f]
341 if state not in "nrm":
341 if state not in "nrm":
342 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
342 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 errors += 1
343 errors += 1
344 if errors:
344 if errors:
345 error = _(".hg/dirstate inconsistent with current parent's manifest")
345 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 raise error.Abort(error)
346 raise error.Abort(error)
347
347
348 @command('debugcolor',
348 @command('debugcolor',
349 [('', 'style', None, _('show all configured styles'))],
349 [('', 'style', None, _('show all configured styles'))],
350 'hg debugcolor')
350 'hg debugcolor')
351 def debugcolor(ui, repo, **opts):
351 def debugcolor(ui, repo, **opts):
352 """show available color, effects or style"""
352 """show available color, effects or style"""
353 ui.write(('color mode: %s\n') % ui._colormode)
353 ui.write(('color mode: %s\n') % ui._colormode)
354 if opts.get('style'):
354 if opts.get('style'):
355 return _debugdisplaystyle(ui)
355 return _debugdisplaystyle(ui)
356 else:
356 else:
357 return _debugdisplaycolor(ui)
357 return _debugdisplaycolor(ui)
358
358
359 def _debugdisplaycolor(ui):
359 def _debugdisplaycolor(ui):
360 ui = ui.copy()
360 ui = ui.copy()
361 ui._styles.clear()
361 ui._styles.clear()
362 for effect in color._effects.keys():
362 for effect in color._effects.keys():
363 ui._styles[effect] = effect
363 ui._styles[effect] = effect
364 if ui._terminfoparams:
364 if ui._terminfoparams:
365 for k, v in ui.configitems('color'):
365 for k, v in ui.configitems('color'):
366 if k.startswith('color.'):
366 if k.startswith('color.'):
367 ui._styles[k] = k[6:]
367 ui._styles[k] = k[6:]
368 elif k.startswith('terminfo.'):
368 elif k.startswith('terminfo.'):
369 ui._styles[k] = k[9:]
369 ui._styles[k] = k[9:]
370 ui.write(_('available colors:\n'))
370 ui.write(_('available colors:\n'))
371 # sort label with a '_' after the other to group '_background' entry.
371 # sort label with a '_' after the other to group '_background' entry.
372 items = sorted(ui._styles.items(),
372 items = sorted(ui._styles.items(),
373 key=lambda i: ('_' in i[0], i[0], i[1]))
373 key=lambda i: ('_' in i[0], i[0], i[1]))
374 for colorname, label in items:
374 for colorname, label in items:
375 ui.write(('%s\n') % colorname, label=label)
375 ui.write(('%s\n') % colorname, label=label)
376
376
377 def _debugdisplaystyle(ui):
377 def _debugdisplaystyle(ui):
378 ui.write(_('available style:\n'))
378 ui.write(_('available style:\n'))
379 width = max(len(s) for s in ui._styles)
379 width = max(len(s) for s in ui._styles)
380 for label, effects in sorted(ui._styles.items()):
380 for label, effects in sorted(ui._styles.items()):
381 ui.write('%s' % label, label=label)
381 ui.write('%s' % label, label=label)
382 if effects:
382 if effects:
383 # 50
383 # 50
384 ui.write(': ')
384 ui.write(': ')
385 ui.write(' ' * (max(0, width - len(label))))
385 ui.write(' ' * (max(0, width - len(label))))
386 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
386 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 ui.write('\n')
387 ui.write('\n')
388
388
389 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
389 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
390 def debugcommands(ui, cmd='', *args):
390 def debugcommands(ui, cmd='', *args):
391 """list all available commands and options"""
391 """list all available commands and options"""
392 for cmd, vals in sorted(commands.table.iteritems()):
392 for cmd, vals in sorted(commands.table.iteritems()):
393 cmd = cmd.split('|')[0].strip('^')
393 cmd = cmd.split('|')[0].strip('^')
394 opts = ', '.join([i[1] for i in vals[1]])
394 opts = ', '.join([i[1] for i in vals[1]])
395 ui.write('%s: %s\n' % (cmd, opts))
395 ui.write('%s: %s\n' % (cmd, opts))
396
396
397 @command('debugcomplete',
397 @command('debugcomplete',
398 [('o', 'options', None, _('show the command options'))],
398 [('o', 'options', None, _('show the command options'))],
399 _('[-o] CMD'),
399 _('[-o] CMD'),
400 norepo=True)
400 norepo=True)
401 def debugcomplete(ui, cmd='', **opts):
401 def debugcomplete(ui, cmd='', **opts):
402 """returns the completion list associated with the given command"""
402 """returns the completion list associated with the given command"""
403
403
404 if opts.get('options'):
404 if opts.get('options'):
405 options = []
405 options = []
406 otables = [commands.globalopts]
406 otables = [commands.globalopts]
407 if cmd:
407 if cmd:
408 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
408 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
409 otables.append(entry[1])
409 otables.append(entry[1])
410 for t in otables:
410 for t in otables:
411 for o in t:
411 for o in t:
412 if "(DEPRECATED)" in o[3]:
412 if "(DEPRECATED)" in o[3]:
413 continue
413 continue
414 if o[0]:
414 if o[0]:
415 options.append('-%s' % o[0])
415 options.append('-%s' % o[0])
416 options.append('--%s' % o[1])
416 options.append('--%s' % o[1])
417 ui.write("%s\n" % "\n".join(options))
417 ui.write("%s\n" % "\n".join(options))
418 return
418 return
419
419
420 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
420 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
421 if ui.verbose:
421 if ui.verbose:
422 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
422 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
423 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
423 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
424
424
425 @command('debugcreatestreamclonebundle', [], 'FILE')
425 @command('debugcreatestreamclonebundle', [], 'FILE')
426 def debugcreatestreamclonebundle(ui, repo, fname):
426 def debugcreatestreamclonebundle(ui, repo, fname):
427 """create a stream clone bundle file
427 """create a stream clone bundle file
428
428
429 Stream bundles are special bundles that are essentially archives of
429 Stream bundles are special bundles that are essentially archives of
430 revlog files. They are commonly used for cloning very quickly.
430 revlog files. They are commonly used for cloning very quickly.
431 """
431 """
432 requirements, gen = streamclone.generatebundlev1(repo)
432 requirements, gen = streamclone.generatebundlev1(repo)
433 changegroup.writechunks(ui, gen, fname)
433 changegroup.writechunks(ui, gen, fname)
434
434
435 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
435 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436
436
437 @command('debugdag',
437 @command('debugdag',
438 [('t', 'tags', None, _('use tags as labels')),
438 [('t', 'tags', None, _('use tags as labels')),
439 ('b', 'branches', None, _('annotate with branch names')),
439 ('b', 'branches', None, _('annotate with branch names')),
440 ('', 'dots', None, _('use dots for runs')),
440 ('', 'dots', None, _('use dots for runs')),
441 ('s', 'spaces', None, _('separate elements by spaces'))],
441 ('s', 'spaces', None, _('separate elements by spaces'))],
442 _('[OPTION]... [FILE [REV]...]'),
442 _('[OPTION]... [FILE [REV]...]'),
443 optionalrepo=True)
443 optionalrepo=True)
444 def debugdag(ui, repo, file_=None, *revs, **opts):
444 def debugdag(ui, repo, file_=None, *revs, **opts):
445 """format the changelog or an index DAG as a concise textual description
445 """format the changelog or an index DAG as a concise textual description
446
446
447 If you pass a revlog index, the revlog's DAG is emitted. If you list
447 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 revision numbers, they get labeled in the output as rN.
448 revision numbers, they get labeled in the output as rN.
449
449
450 Otherwise, the changelog DAG of the current repo is emitted.
450 Otherwise, the changelog DAG of the current repo is emitted.
451 """
451 """
452 spaces = opts.get('spaces')
452 spaces = opts.get('spaces')
453 dots = opts.get('dots')
453 dots = opts.get('dots')
454 if file_:
454 if file_:
455 rlog = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
455 rlog = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False),
456 file_)
456 file_)
457 revs = set((int(r) for r in revs))
457 revs = set((int(r) for r in revs))
458 def events():
458 def events():
459 for r in rlog:
459 for r in rlog:
460 yield 'n', (r, list(p for p in rlog.parentrevs(r)
460 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 if p != -1))
461 if p != -1))
462 if r in revs:
462 if r in revs:
463 yield 'l', (r, "r%i" % r)
463 yield 'l', (r, "r%i" % r)
464 elif repo:
464 elif repo:
465 cl = repo.changelog
465 cl = repo.changelog
466 tags = opts.get('tags')
466 tags = opts.get('tags')
467 branches = opts.get('branches')
467 branches = opts.get('branches')
468 if tags:
468 if tags:
469 labels = {}
469 labels = {}
470 for l, n in repo.tags().items():
470 for l, n in repo.tags().items():
471 labels.setdefault(cl.rev(n), []).append(l)
471 labels.setdefault(cl.rev(n), []).append(l)
472 def events():
472 def events():
473 b = "default"
473 b = "default"
474 for r in cl:
474 for r in cl:
475 if branches:
475 if branches:
476 newb = cl.read(cl.node(r))[5]['branch']
476 newb = cl.read(cl.node(r))[5]['branch']
477 if newb != b:
477 if newb != b:
478 yield 'a', newb
478 yield 'a', newb
479 b = newb
479 b = newb
480 yield 'n', (r, list(p for p in cl.parentrevs(r)
480 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 if p != -1))
481 if p != -1))
482 if tags:
482 if tags:
483 ls = labels.get(r)
483 ls = labels.get(r)
484 if ls:
484 if ls:
485 for l in ls:
485 for l in ls:
486 yield 'l', (r, l)
486 yield 'l', (r, l)
487 else:
487 else:
488 raise error.Abort(_('need repo for changelog dag'))
488 raise error.Abort(_('need repo for changelog dag'))
489
489
490 for line in dagparser.dagtextlines(events(),
490 for line in dagparser.dagtextlines(events(),
491 addspaces=spaces,
491 addspaces=spaces,
492 wraplabels=True,
492 wraplabels=True,
493 wrapannotations=True,
493 wrapannotations=True,
494 wrapnonlinear=dots,
494 wrapnonlinear=dots,
495 usedots=dots,
495 usedots=dots,
496 maxlinewidth=70):
496 maxlinewidth=70):
497 ui.write(line)
497 ui.write(line)
498 ui.write("\n")
498 ui.write("\n")
499
499
500 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
500 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
501 def debugdata(ui, repo, file_, rev=None, **opts):
501 def debugdata(ui, repo, file_, rev=None, **opts):
502 """dump the contents of a data file revision"""
502 """dump the contents of a data file revision"""
503 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
503 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 if rev is not None:
504 if rev is not None:
505 raise error.CommandError('debugdata', _('invalid arguments'))
505 raise error.CommandError('debugdata', _('invalid arguments'))
506 file_, rev = None, file_
506 file_, rev = None, file_
507 elif rev is None:
507 elif rev is None:
508 raise error.CommandError('debugdata', _('invalid arguments'))
508 raise error.CommandError('debugdata', _('invalid arguments'))
509 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
509 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 try:
510 try:
511 ui.write(r.revision(r.lookup(rev), raw=True))
511 ui.write(r.revision(r.lookup(rev), raw=True))
512 except KeyError:
512 except KeyError:
513 raise error.Abort(_('invalid revision identifier %s') % rev)
513 raise error.Abort(_('invalid revision identifier %s') % rev)
514
514
515 @command('debugdate',
515 @command('debugdate',
516 [('e', 'extended', None, _('try extended date formats'))],
516 [('e', 'extended', None, _('try extended date formats'))],
517 _('[-e] DATE [RANGE]'),
517 _('[-e] DATE [RANGE]'),
518 norepo=True, optionalrepo=True)
518 norepo=True, optionalrepo=True)
519 def debugdate(ui, date, range=None, **opts):
519 def debugdate(ui, date, range=None, **opts):
520 """parse and display a date"""
520 """parse and display a date"""
521 if opts["extended"]:
521 if opts["extended"]:
522 d = util.parsedate(date, util.extendeddateformats)
522 d = util.parsedate(date, util.extendeddateformats)
523 else:
523 else:
524 d = util.parsedate(date)
524 d = util.parsedate(date)
525 ui.write(("internal: %s %s\n") % d)
525 ui.write(("internal: %s %s\n") % d)
526 ui.write(("standard: %s\n") % util.datestr(d))
526 ui.write(("standard: %s\n") % util.datestr(d))
527 if range:
527 if range:
528 m = util.matchdate(range)
528 m = util.matchdate(range)
529 ui.write(("match: %s\n") % m(d[0]))
529 ui.write(("match: %s\n") % m(d[0]))
530
530
531 @command('debugdeltachain',
531 @command('debugdeltachain',
532 commands.debugrevlogopts + commands.formatteropts,
532 commands.debugrevlogopts + commands.formatteropts,
533 _('-c|-m|FILE'),
533 _('-c|-m|FILE'),
534 optionalrepo=True)
534 optionalrepo=True)
535 def debugdeltachain(ui, repo, file_=None, **opts):
535 def debugdeltachain(ui, repo, file_=None, **opts):
536 """dump information about delta chains in a revlog
536 """dump information about delta chains in a revlog
537
537
538 Output can be templatized. Available template keywords are:
538 Output can be templatized. Available template keywords are:
539
539
540 :``rev``: revision number
540 :``rev``: revision number
541 :``chainid``: delta chain identifier (numbered by unique base)
541 :``chainid``: delta chain identifier (numbered by unique base)
542 :``chainlen``: delta chain length to this revision
542 :``chainlen``: delta chain length to this revision
543 :``prevrev``: previous revision in delta chain
543 :``prevrev``: previous revision in delta chain
544 :``deltatype``: role of delta / how it was computed
544 :``deltatype``: role of delta / how it was computed
545 :``compsize``: compressed size of revision
545 :``compsize``: compressed size of revision
546 :``uncompsize``: uncompressed size of revision
546 :``uncompsize``: uncompressed size of revision
547 :``chainsize``: total size of compressed revisions in chain
547 :``chainsize``: total size of compressed revisions in chain
548 :``chainratio``: total chain size divided by uncompressed revision size
548 :``chainratio``: total chain size divided by uncompressed revision size
549 (new delta chains typically start at ratio 2.00)
549 (new delta chains typically start at ratio 2.00)
550 :``lindist``: linear distance from base revision in delta chain to end
550 :``lindist``: linear distance from base revision in delta chain to end
551 of this revision
551 of this revision
552 :``extradist``: total size of revisions not part of this delta chain from
552 :``extradist``: total size of revisions not part of this delta chain from
553 base of delta chain to end of this revision; a measurement
553 base of delta chain to end of this revision; a measurement
554 of how much extra data we need to read/seek across to read
554 of how much extra data we need to read/seek across to read
555 the delta chain for this revision
555 the delta chain for this revision
556 :``extraratio``: extradist divided by chainsize; another representation of
556 :``extraratio``: extradist divided by chainsize; another representation of
557 how much unrelated data is needed to load this delta chain
557 how much unrelated data is needed to load this delta chain
558 """
558 """
559 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
559 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 index = r.index
560 index = r.index
561 generaldelta = r.version & revlog.REVLOGGENERALDELTA
561 generaldelta = r.version & revlog.REVLOGGENERALDELTA
562
562
563 def revinfo(rev):
563 def revinfo(rev):
564 e = index[rev]
564 e = index[rev]
565 compsize = e[1]
565 compsize = e[1]
566 uncompsize = e[2]
566 uncompsize = e[2]
567 chainsize = 0
567 chainsize = 0
568
568
569 if generaldelta:
569 if generaldelta:
570 if e[3] == e[5]:
570 if e[3] == e[5]:
571 deltatype = 'p1'
571 deltatype = 'p1'
572 elif e[3] == e[6]:
572 elif e[3] == e[6]:
573 deltatype = 'p2'
573 deltatype = 'p2'
574 elif e[3] == rev - 1:
574 elif e[3] == rev - 1:
575 deltatype = 'prev'
575 deltatype = 'prev'
576 elif e[3] == rev:
576 elif e[3] == rev:
577 deltatype = 'base'
577 deltatype = 'base'
578 else:
578 else:
579 deltatype = 'other'
579 deltatype = 'other'
580 else:
580 else:
581 if e[3] == rev:
581 if e[3] == rev:
582 deltatype = 'base'
582 deltatype = 'base'
583 else:
583 else:
584 deltatype = 'prev'
584 deltatype = 'prev'
585
585
586 chain = r._deltachain(rev)[0]
586 chain = r._deltachain(rev)[0]
587 for iterrev in chain:
587 for iterrev in chain:
588 e = index[iterrev]
588 e = index[iterrev]
589 chainsize += e[1]
589 chainsize += e[1]
590
590
591 return compsize, uncompsize, deltatype, chain, chainsize
591 return compsize, uncompsize, deltatype, chain, chainsize
592
592
593 fm = ui.formatter('debugdeltachain', opts)
593 fm = ui.formatter('debugdeltachain', opts)
594
594
595 fm.plain(' rev chain# chainlen prev delta '
595 fm.plain(' rev chain# chainlen prev delta '
596 'size rawsize chainsize ratio lindist extradist '
596 'size rawsize chainsize ratio lindist extradist '
597 'extraratio\n')
597 'extraratio\n')
598
598
599 chainbases = {}
599 chainbases = {}
600 for rev in r:
600 for rev in r:
601 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
601 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 chainbase = chain[0]
602 chainbase = chain[0]
603 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
603 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 basestart = r.start(chainbase)
604 basestart = r.start(chainbase)
605 revstart = r.start(rev)
605 revstart = r.start(rev)
606 lineardist = revstart + comp - basestart
606 lineardist = revstart + comp - basestart
607 extradist = lineardist - chainsize
607 extradist = lineardist - chainsize
608 try:
608 try:
609 prevrev = chain[-2]
609 prevrev = chain[-2]
610 except IndexError:
610 except IndexError:
611 prevrev = -1
611 prevrev = -1
612
612
613 chainratio = float(chainsize) / float(uncomp)
613 chainratio = float(chainsize) / float(uncomp)
614 extraratio = float(extradist) / float(chainsize)
614 extraratio = float(extradist) / float(chainsize)
615
615
616 fm.startitem()
616 fm.startitem()
617 fm.write('rev chainid chainlen prevrev deltatype compsize '
617 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 'uncompsize chainsize chainratio lindist extradist '
618 'uncompsize chainsize chainratio lindist extradist '
619 'extraratio',
619 'extraratio',
620 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
620 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 rev, chainid, len(chain), prevrev, deltatype, comp,
621 rev, chainid, len(chain), prevrev, deltatype, comp,
622 uncomp, chainsize, chainratio, lineardist, extradist,
622 uncomp, chainsize, chainratio, lineardist, extradist,
623 extraratio,
623 extraratio,
624 rev=rev, chainid=chainid, chainlen=len(chain),
624 rev=rev, chainid=chainid, chainlen=len(chain),
625 prevrev=prevrev, deltatype=deltatype, compsize=comp,
625 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 uncompsize=uncomp, chainsize=chainsize,
626 uncompsize=uncomp, chainsize=chainsize,
627 chainratio=chainratio, lindist=lineardist,
627 chainratio=chainratio, lindist=lineardist,
628 extradist=extradist, extraratio=extraratio)
628 extradist=extradist, extraratio=extraratio)
629
629
630 fm.end()
630 fm.end()
631
631
632 @command('debugdirstate|debugstate',
632 @command('debugdirstate|debugstate',
633 [('', 'nodates', None, _('do not display the saved mtime')),
633 [('', 'nodates', None, _('do not display the saved mtime')),
634 ('', 'datesort', None, _('sort by saved mtime'))],
634 ('', 'datesort', None, _('sort by saved mtime'))],
635 _('[OPTION]...'))
635 _('[OPTION]...'))
636 def debugstate(ui, repo, **opts):
636 def debugstate(ui, repo, **opts):
637 """show the contents of the current dirstate"""
637 """show the contents of the current dirstate"""
638
638
639 nodates = opts.get('nodates')
639 nodates = opts.get('nodates')
640 datesort = opts.get('datesort')
640 datesort = opts.get('datesort')
641
641
642 timestr = ""
642 timestr = ""
643 if datesort:
643 if datesort:
644 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
644 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 else:
645 else:
646 keyfunc = None # sort by filename
646 keyfunc = None # sort by filename
647 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
647 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 if ent[3] == -1:
648 if ent[3] == -1:
649 timestr = 'unset '
649 timestr = 'unset '
650 elif nodates:
650 elif nodates:
651 timestr = 'set '
651 timestr = 'set '
652 else:
652 else:
653 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
653 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 time.localtime(ent[3]))
654 time.localtime(ent[3]))
655 if ent[1] & 0o20000:
655 if ent[1] & 0o20000:
656 mode = 'lnk'
656 mode = 'lnk'
657 else:
657 else:
658 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
658 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
659 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 for f in repo.dirstate.copies():
660 for f in repo.dirstate.copies():
661 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
661 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662
662
663 @command('debugdiscovery',
663 @command('debugdiscovery',
664 [('', 'old', None, _('use old-style discovery')),
664 [('', 'old', None, _('use old-style discovery')),
665 ('', 'nonheads', None,
665 ('', 'nonheads', None,
666 _('use old-style discovery with non-heads included')),
666 _('use old-style discovery with non-heads included')),
667 ] + commands.remoteopts,
667 ] + commands.remoteopts,
668 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
668 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 def debugdiscovery(ui, repo, remoteurl="default", **opts):
669 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 """runs the changeset discovery protocol in isolation"""
670 """runs the changeset discovery protocol in isolation"""
671 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
671 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 opts.get('branch'))
672 opts.get('branch'))
673 remote = hg.peer(repo, opts, remoteurl)
673 remote = hg.peer(repo, opts, remoteurl)
674 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
674 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675
675
676 # make sure tests are repeatable
676 # make sure tests are repeatable
677 random.seed(12323)
677 random.seed(12323)
678
678
679 def doit(localheads, remoteheads, remote=remote):
679 def doit(localheads, remoteheads, remote=remote):
680 if opts.get('old'):
680 if opts.get('old'):
681 if localheads:
681 if localheads:
682 raise error.Abort('cannot use localheads with old style '
682 raise error.Abort('cannot use localheads with old style '
683 'discovery')
683 'discovery')
684 if not util.safehasattr(remote, 'branches'):
684 if not util.safehasattr(remote, 'branches'):
685 # enable in-client legacy support
685 # enable in-client legacy support
686 remote = localrepo.locallegacypeer(remote.local())
686 remote = localrepo.locallegacypeer(remote.local())
687 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
687 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 force=True)
688 force=True)
689 common = set(common)
689 common = set(common)
690 if not opts.get('nonheads'):
690 if not opts.get('nonheads'):
691 ui.write(("unpruned common: %s\n") %
691 ui.write(("unpruned common: %s\n") %
692 " ".join(sorted(short(n) for n in common)))
692 " ".join(sorted(short(n) for n in common)))
693 dag = dagutil.revlogdag(repo.changelog)
693 dag = dagutil.revlogdag(repo.changelog)
694 all = dag.ancestorset(dag.internalizeall(common))
694 all = dag.ancestorset(dag.internalizeall(common))
695 common = dag.externalizeall(dag.headsetofconnecteds(all))
695 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 else:
696 else:
697 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
697 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 common = set(common)
698 common = set(common)
699 rheads = set(hds)
699 rheads = set(hds)
700 lheads = set(repo.heads())
700 lheads = set(repo.heads())
701 ui.write(("common heads: %s\n") %
701 ui.write(("common heads: %s\n") %
702 " ".join(sorted(short(n) for n in common)))
702 " ".join(sorted(short(n) for n in common)))
703 if lheads <= common:
703 if lheads <= common:
704 ui.write(("local is subset\n"))
704 ui.write(("local is subset\n"))
705 elif rheads <= common:
705 elif rheads <= common:
706 ui.write(("remote is subset\n"))
706 ui.write(("remote is subset\n"))
707
707
708 serverlogs = opts.get('serverlog')
708 serverlogs = opts.get('serverlog')
709 if serverlogs:
709 if serverlogs:
710 for filename in serverlogs:
710 for filename in serverlogs:
711 with open(filename, 'r') as logfile:
711 with open(filename, 'r') as logfile:
712 line = logfile.readline()
712 line = logfile.readline()
713 while line:
713 while line:
714 parts = line.strip().split(';')
714 parts = line.strip().split(';')
715 op = parts[1]
715 op = parts[1]
716 if op == 'cg':
716 if op == 'cg':
717 pass
717 pass
718 elif op == 'cgss':
718 elif op == 'cgss':
719 doit(parts[2].split(' '), parts[3].split(' '))
719 doit(parts[2].split(' '), parts[3].split(' '))
720 elif op == 'unb':
720 elif op == 'unb':
721 doit(parts[3].split(' '), parts[2].split(' '))
721 doit(parts[3].split(' '), parts[2].split(' '))
722 line = logfile.readline()
722 line = logfile.readline()
723 else:
723 else:
724 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
724 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 opts.get('remote_head'))
725 opts.get('remote_head'))
726 localrevs = opts.get('local_head')
726 localrevs = opts.get('local_head')
727 doit(localrevs, remoterevs)
727 doit(localrevs, remoterevs)
728
728
729 @command('debugextensions', commands.formatteropts, [], norepo=True)
729 @command('debugextensions', commands.formatteropts, [], norepo=True)
730 def debugextensions(ui, **opts):
730 def debugextensions(ui, **opts):
731 '''show information about active extensions'''
731 '''show information about active extensions'''
732 exts = extensions.extensions(ui)
732 exts = extensions.extensions(ui)
733 hgver = util.version()
733 hgver = util.version()
734 fm = ui.formatter('debugextensions', opts)
734 fm = ui.formatter('debugextensions', opts)
735 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
735 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 isinternal = extensions.ismoduleinternal(extmod)
736 isinternal = extensions.ismoduleinternal(extmod)
737 extsource = pycompat.fsencode(extmod.__file__)
737 extsource = pycompat.fsencode(extmod.__file__)
738 if isinternal:
738 if isinternal:
739 exttestedwith = [] # never expose magic string to users
739 exttestedwith = [] # never expose magic string to users
740 else:
740 else:
741 exttestedwith = getattr(extmod, 'testedwith', '').split()
741 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 extbuglink = getattr(extmod, 'buglink', None)
742 extbuglink = getattr(extmod, 'buglink', None)
743
743
744 fm.startitem()
744 fm.startitem()
745
745
746 if ui.quiet or ui.verbose:
746 if ui.quiet or ui.verbose:
747 fm.write('name', '%s\n', extname)
747 fm.write('name', '%s\n', extname)
748 else:
748 else:
749 fm.write('name', '%s', extname)
749 fm.write('name', '%s', extname)
750 if isinternal or hgver in exttestedwith:
750 if isinternal or hgver in exttestedwith:
751 fm.plain('\n')
751 fm.plain('\n')
752 elif not exttestedwith:
752 elif not exttestedwith:
753 fm.plain(_(' (untested!)\n'))
753 fm.plain(_(' (untested!)\n'))
754 else:
754 else:
755 lasttestedversion = exttestedwith[-1]
755 lasttestedversion = exttestedwith[-1]
756 fm.plain(' (%s!)\n' % lasttestedversion)
756 fm.plain(' (%s!)\n' % lasttestedversion)
757
757
758 fm.condwrite(ui.verbose and extsource, 'source',
758 fm.condwrite(ui.verbose and extsource, 'source',
759 _(' location: %s\n'), extsource or "")
759 _(' location: %s\n'), extsource or "")
760
760
761 if ui.verbose:
761 if ui.verbose:
762 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
762 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 fm.data(bundled=isinternal)
763 fm.data(bundled=isinternal)
764
764
765 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
765 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 _(' tested with: %s\n'),
766 _(' tested with: %s\n'),
767 fm.formatlist(exttestedwith, name='ver'))
767 fm.formatlist(exttestedwith, name='ver'))
768
768
769 fm.condwrite(ui.verbose and extbuglink, 'buglink',
769 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 _(' bug reporting: %s\n'), extbuglink or "")
770 _(' bug reporting: %s\n'), extbuglink or "")
771
771
772 fm.end()
772 fm.end()
773
773
774 @command('debugfileset',
774 @command('debugfileset',
775 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
775 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 _('[-r REV] FILESPEC'))
776 _('[-r REV] FILESPEC'))
777 def debugfileset(ui, repo, expr, **opts):
777 def debugfileset(ui, repo, expr, **opts):
778 '''parse and apply a fileset specification'''
778 '''parse and apply a fileset specification'''
779 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
779 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 if ui.verbose:
780 if ui.verbose:
781 tree = fileset.parse(expr)
781 tree = fileset.parse(expr)
782 ui.note(fileset.prettyformat(tree), "\n")
782 ui.note(fileset.prettyformat(tree), "\n")
783
783
784 for f in ctx.getfileset(expr):
784 for f in ctx.getfileset(expr):
785 ui.write("%s\n" % f)
785 ui.write("%s\n" % f)
786
786
787 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
787 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 def debugfsinfo(ui, path="."):
788 def debugfsinfo(ui, path="."):
789 """show information detected about current filesystem"""
789 """show information detected about current filesystem"""
790 util.writefile('.debugfsinfo', '')
790 util.writefile('.debugfsinfo', '')
791 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
791 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
792 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
792 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
793 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
794 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
795 and 'yes' or 'no'))
795 and 'yes' or 'no'))
796 os.unlink('.debugfsinfo')
796 os.unlink('.debugfsinfo')
797
797
798 @command('debuggetbundle',
798 @command('debuggetbundle',
799 [('H', 'head', [], _('id of head node'), _('ID')),
799 [('H', 'head', [], _('id of head node'), _('ID')),
800 ('C', 'common', [], _('id of common node'), _('ID')),
800 ('C', 'common', [], _('id of common node'), _('ID')),
801 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
801 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
802 _('REPO FILE [-H|-C ID]...'),
802 _('REPO FILE [-H|-C ID]...'),
803 norepo=True)
803 norepo=True)
804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
805 """retrieves a bundle from a repo
805 """retrieves a bundle from a repo
806
806
807 Every ID must be a full-length hex node id string. Saves the bundle to the
807 Every ID must be a full-length hex node id string. Saves the bundle to the
808 given file.
808 given file.
809 """
809 """
810 repo = hg.peer(ui, opts, repopath)
810 repo = hg.peer(ui, opts, repopath)
811 if not repo.capable('getbundle'):
811 if not repo.capable('getbundle'):
812 raise error.Abort("getbundle() not supported by target repository")
812 raise error.Abort("getbundle() not supported by target repository")
813 args = {}
813 args = {}
814 if common:
814 if common:
815 args['common'] = [bin(s) for s in common]
815 args['common'] = [bin(s) for s in common]
816 if head:
816 if head:
817 args['heads'] = [bin(s) for s in head]
817 args['heads'] = [bin(s) for s in head]
818 # TODO: get desired bundlecaps from command line.
818 # TODO: get desired bundlecaps from command line.
819 args['bundlecaps'] = None
819 args['bundlecaps'] = None
820 bundle = repo.getbundle('debug', **args)
820 bundle = repo.getbundle('debug', **args)
821
821
822 bundletype = opts.get('type', 'bzip2').lower()
822 bundletype = opts.get('type', 'bzip2').lower()
823 btypes = {'none': 'HG10UN',
823 btypes = {'none': 'HG10UN',
824 'bzip2': 'HG10BZ',
824 'bzip2': 'HG10BZ',
825 'gzip': 'HG10GZ',
825 'gzip': 'HG10GZ',
826 'bundle2': 'HG20'}
826 'bundle2': 'HG20'}
827 bundletype = btypes.get(bundletype)
827 bundletype = btypes.get(bundletype)
828 if bundletype not in bundle2.bundletypes:
828 if bundletype not in bundle2.bundletypes:
829 raise error.Abort(_('unknown bundle type specified with --type'))
829 raise error.Abort(_('unknown bundle type specified with --type'))
830 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
830 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
831
831
832 @command('debugignore', [], '[FILE]')
832 @command('debugignore', [], '[FILE]')
833 def debugignore(ui, repo, *files, **opts):
833 def debugignore(ui, repo, *files, **opts):
834 """display the combined ignore pattern and information about ignored files
834 """display the combined ignore pattern and information about ignored files
835
835
836 With no argument display the combined ignore pattern.
836 With no argument display the combined ignore pattern.
837
837
838 Given space separated file names, shows if the given file is ignored and
838 Given space separated file names, shows if the given file is ignored and
839 if so, show the ignore rule (file and line number) that matched it.
839 if so, show the ignore rule (file and line number) that matched it.
840 """
840 """
841 ignore = repo.dirstate._ignore
841 ignore = repo.dirstate._ignore
842 if not files:
842 if not files:
843 # Show all the patterns
843 # Show all the patterns
844 includepat = getattr(ignore, 'includepat', None)
844 includepat = getattr(ignore, 'includepat', None)
845 if includepat is not None:
845 if includepat is not None:
846 ui.write("%s\n" % includepat)
846 ui.write("%s\n" % includepat)
847 else:
847 else:
848 raise error.Abort(_("no ignore patterns found"))
848 raise error.Abort(_("no ignore patterns found"))
849 else:
849 else:
850 for f in files:
850 for f in files:
851 nf = util.normpath(f)
851 nf = util.normpath(f)
852 ignored = None
852 ignored = None
853 ignoredata = None
853 ignoredata = None
854 if nf != '.':
854 if nf != '.':
855 if ignore(nf):
855 if ignore(nf):
856 ignored = nf
856 ignored = nf
857 ignoredata = repo.dirstate._ignorefileandline(nf)
857 ignoredata = repo.dirstate._ignorefileandline(nf)
858 else:
858 else:
859 for p in util.finddirs(nf):
859 for p in util.finddirs(nf):
860 if ignore(p):
860 if ignore(p):
861 ignored = p
861 ignored = p
862 ignoredata = repo.dirstate._ignorefileandline(p)
862 ignoredata = repo.dirstate._ignorefileandline(p)
863 break
863 break
864 if ignored:
864 if ignored:
865 if ignored == nf:
865 if ignored == nf:
866 ui.write(_("%s is ignored\n") % f)
866 ui.write(_("%s is ignored\n") % f)
867 else:
867 else:
868 ui.write(_("%s is ignored because of "
868 ui.write(_("%s is ignored because of "
869 "containing folder %s\n")
869 "containing folder %s\n")
870 % (f, ignored))
870 % (f, ignored))
871 ignorefile, lineno, line = ignoredata
871 ignorefile, lineno, line = ignoredata
872 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
872 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
873 % (ignorefile, lineno, line))
873 % (ignorefile, lineno, line))
874 else:
874 else:
875 ui.write(_("%s is not ignored\n") % f)
875 ui.write(_("%s is not ignored\n") % f)
876
876
877 @command('debugindex', commands.debugrevlogopts +
877 @command('debugindex', commands.debugrevlogopts +
878 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
878 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
879 _('[-f FORMAT] -c|-m|FILE'),
879 _('[-f FORMAT] -c|-m|FILE'),
880 optionalrepo=True)
880 optionalrepo=True)
881 def debugindex(ui, repo, file_=None, **opts):
881 def debugindex(ui, repo, file_=None, **opts):
882 """dump the contents of an index file"""
882 """dump the contents of an index file"""
883 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
883 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
884 format = opts.get('format', 0)
884 format = opts.get('format', 0)
885 if format not in (0, 1):
885 if format not in (0, 1):
886 raise error.Abort(_("unknown format %d") % format)
886 raise error.Abort(_("unknown format %d") % format)
887
887
888 generaldelta = r.version & revlog.REVLOGGENERALDELTA
888 generaldelta = r.version & revlog.REVLOGGENERALDELTA
889 if generaldelta:
889 if generaldelta:
890 basehdr = ' delta'
890 basehdr = ' delta'
891 else:
891 else:
892 basehdr = ' base'
892 basehdr = ' base'
893
893
894 if ui.debugflag:
894 if ui.debugflag:
895 shortfn = hex
895 shortfn = hex
896 else:
896 else:
897 shortfn = short
897 shortfn = short
898
898
899 # There might not be anything in r, so have a sane default
899 # There might not be anything in r, so have a sane default
900 idlen = 12
900 idlen = 12
901 for i in r:
901 for i in r:
902 idlen = len(shortfn(r.node(i)))
902 idlen = len(shortfn(r.node(i)))
903 break
903 break
904
904
905 if format == 0:
905 if format == 0:
906 ui.write((" rev offset length " + basehdr + " linkrev"
906 ui.write((" rev offset length " + basehdr + " linkrev"
907 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
907 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
908 elif format == 1:
908 elif format == 1:
909 ui.write((" rev flag offset length"
909 ui.write((" rev flag offset length"
910 " size " + basehdr + " link p1 p2"
910 " size " + basehdr + " link p1 p2"
911 " %s\n") % "nodeid".rjust(idlen))
911 " %s\n") % "nodeid".rjust(idlen))
912
912
913 for i in r:
913 for i in r:
914 node = r.node(i)
914 node = r.node(i)
915 if generaldelta:
915 if generaldelta:
916 base = r.deltaparent(i)
916 base = r.deltaparent(i)
917 else:
917 else:
918 base = r.chainbase(i)
918 base = r.chainbase(i)
919 if format == 0:
919 if format == 0:
920 try:
920 try:
921 pp = r.parents(node)
921 pp = r.parents(node)
922 except Exception:
922 except Exception:
923 pp = [nullid, nullid]
923 pp = [nullid, nullid]
924 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
924 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
925 i, r.start(i), r.length(i), base, r.linkrev(i),
925 i, r.start(i), r.length(i), base, r.linkrev(i),
926 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
926 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
927 elif format == 1:
927 elif format == 1:
928 pr = r.parentrevs(i)
928 pr = r.parentrevs(i)
929 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
929 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
930 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
930 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
931 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
931 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
932
932
933 @command('debugindexdot', commands.debugrevlogopts,
933 @command('debugindexdot', commands.debugrevlogopts,
934 _('-c|-m|FILE'), optionalrepo=True)
934 _('-c|-m|FILE'), optionalrepo=True)
935 def debugindexdot(ui, repo, file_=None, **opts):
935 def debugindexdot(ui, repo, file_=None, **opts):
936 """dump an index DAG as a graphviz dot file"""
936 """dump an index DAG as a graphviz dot file"""
937 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
937 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
938 ui.write(("digraph G {\n"))
938 ui.write(("digraph G {\n"))
939 for i in r:
939 for i in r:
940 node = r.node(i)
940 node = r.node(i)
941 pp = r.parents(node)
941 pp = r.parents(node)
942 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
942 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 if pp[1] != nullid:
943 if pp[1] != nullid:
944 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
944 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 ui.write("}\n")
945 ui.write("}\n")
946
946
947 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
947 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
948 def debuginstall(ui, **opts):
948 def debuginstall(ui, **opts):
949 '''test Mercurial installation
949 '''test Mercurial installation
950
950
951 Returns 0 on success.
951 Returns 0 on success.
952 '''
952 '''
953
953
954 def writetemp(contents):
954 def writetemp(contents):
955 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
955 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
956 f = os.fdopen(fd, pycompat.sysstr("wb"))
956 f = os.fdopen(fd, pycompat.sysstr("wb"))
957 f.write(contents)
957 f.write(contents)
958 f.close()
958 f.close()
959 return name
959 return name
960
960
961 problems = 0
961 problems = 0
962
962
963 fm = ui.formatter('debuginstall', opts)
963 fm = ui.formatter('debuginstall', opts)
964 fm.startitem()
964 fm.startitem()
965
965
966 # encoding
966 # encoding
967 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
967 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
968 err = None
968 err = None
969 try:
969 try:
970 encoding.fromlocal("test")
970 encoding.fromlocal("test")
971 except error.Abort as inst:
971 except error.Abort as inst:
972 err = inst
972 err = inst
973 problems += 1
973 problems += 1
974 fm.condwrite(err, 'encodingerror', _(" %s\n"
974 fm.condwrite(err, 'encodingerror', _(" %s\n"
975 " (check that your locale is properly set)\n"), err)
975 " (check that your locale is properly set)\n"), err)
976
976
977 # Python
977 # Python
978 fm.write('pythonexe', _("checking Python executable (%s)\n"),
978 fm.write('pythonexe', _("checking Python executable (%s)\n"),
979 pycompat.sysexecutable)
979 pycompat.sysexecutable)
980 fm.write('pythonver', _("checking Python version (%s)\n"),
980 fm.write('pythonver', _("checking Python version (%s)\n"),
981 ("%d.%d.%d" % sys.version_info[:3]))
981 ("%d.%d.%d" % sys.version_info[:3]))
982 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
982 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
983 os.path.dirname(pycompat.fsencode(os.__file__)))
983 os.path.dirname(pycompat.fsencode(os.__file__)))
984
984
985 security = set(sslutil.supportedprotocols)
985 security = set(sslutil.supportedprotocols)
986 if sslutil.hassni:
986 if sslutil.hassni:
987 security.add('sni')
987 security.add('sni')
988
988
989 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
989 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
990 fm.formatlist(sorted(security), name='protocol',
990 fm.formatlist(sorted(security), name='protocol',
991 fmt='%s', sep=','))
991 fmt='%s', sep=','))
992
992
993 # These are warnings, not errors. So don't increment problem count. This
993 # These are warnings, not errors. So don't increment problem count. This
994 # may change in the future.
994 # may change in the future.
995 if 'tls1.2' not in security:
995 if 'tls1.2' not in security:
996 fm.plain(_(' TLS 1.2 not supported by Python install; '
996 fm.plain(_(' TLS 1.2 not supported by Python install; '
997 'network connections lack modern security\n'))
997 'network connections lack modern security\n'))
998 if 'sni' not in security:
998 if 'sni' not in security:
999 fm.plain(_(' SNI not supported by Python install; may have '
999 fm.plain(_(' SNI not supported by Python install; may have '
1000 'connectivity issues with some servers\n'))
1000 'connectivity issues with some servers\n'))
1001
1001
1002 # TODO print CA cert info
1002 # TODO print CA cert info
1003
1003
1004 # hg version
1004 # hg version
1005 hgver = util.version()
1005 hgver = util.version()
1006 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1006 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1007 hgver.split('+')[0])
1007 hgver.split('+')[0])
1008 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1008 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1009 '+'.join(hgver.split('+')[1:]))
1009 '+'.join(hgver.split('+')[1:]))
1010
1010
1011 # compiled modules
1011 # compiled modules
1012 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1012 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1013 policy.policy)
1013 policy.policy)
1014 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1014 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1015 os.path.dirname(pycompat.fsencode(__file__)))
1015 os.path.dirname(pycompat.fsencode(__file__)))
1016
1016
1017 err = None
1017 err = None
1018 try:
1018 try:
1019 from . import (
1019 from . import (
1020 base85,
1020 base85,
1021 bdiff,
1021 bdiff,
1022 mpatch,
1022 mpatch,
1023 osutil,
1023 osutil,
1024 )
1024 )
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 except Exception as inst:
1026 except Exception as inst:
1027 err = inst
1027 err = inst
1028 problems += 1
1028 problems += 1
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030
1030
1031 compengines = util.compengines._engines.values()
1031 compengines = util.compengines._engines.values()
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1034 name='compengine', fmt='%s', sep=', '))
1034 name='compengine', fmt='%s', sep=', '))
1035 fm.write('compenginesavail', _('checking available compression engines '
1035 fm.write('compenginesavail', _('checking available compression engines '
1036 '(%s)\n'),
1036 '(%s)\n'),
1037 fm.formatlist(sorted(e.name() for e in compengines
1037 fm.formatlist(sorted(e.name() for e in compengines
1038 if e.available()),
1038 if e.available()),
1039 name='compengine', fmt='%s', sep=', '))
1039 name='compengine', fmt='%s', sep=', '))
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 fm.write('compenginesserver', _('checking available compression engines '
1041 fm.write('compenginesserver', _('checking available compression engines '
1042 'for wire protocol (%s)\n'),
1042 'for wire protocol (%s)\n'),
1043 fm.formatlist([e.name() for e in wirecompengines
1043 fm.formatlist([e.name() for e in wirecompengines
1044 if e.wireprotosupport()],
1044 if e.wireprotosupport()],
1045 name='compengine', fmt='%s', sep=', '))
1045 name='compengine', fmt='%s', sep=', '))
1046
1046
1047 # templates
1047 # templates
1048 p = templater.templatepaths()
1048 p = templater.templatepaths()
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 if p:
1051 if p:
1052 m = templater.templatepath("map-cmdline.default")
1052 m = templater.templatepath("map-cmdline.default")
1053 if m:
1053 if m:
1054 # template found, check if it is working
1054 # template found, check if it is working
1055 err = None
1055 err = None
1056 try:
1056 try:
1057 templater.templater.frommapfile(m)
1057 templater.templater.frommapfile(m)
1058 except Exception as inst:
1058 except Exception as inst:
1059 err = inst
1059 err = inst
1060 p = None
1060 p = None
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 else:
1062 else:
1063 p = None
1063 p = None
1064 fm.condwrite(p, 'defaulttemplate',
1064 fm.condwrite(p, 'defaulttemplate',
1065 _("checking default template (%s)\n"), m)
1065 _("checking default template (%s)\n"), m)
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 _(" template '%s' not found\n"), "default")
1067 _(" template '%s' not found\n"), "default")
1068 if not p:
1068 if not p:
1069 problems += 1
1069 problems += 1
1070 fm.condwrite(not p, '',
1070 fm.condwrite(not p, '',
1071 _(" (templates seem to have been installed incorrectly)\n"))
1071 _(" (templates seem to have been installed incorrectly)\n"))
1072
1072
1073 # editor
1073 # editor
1074 editor = ui.geteditor()
1074 editor = ui.geteditor()
1075 editor = util.expandpath(editor)
1075 editor = util.expandpath(editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 _(" No commit editor set and can't find %s in PATH\n"
1079 _(" No commit editor set and can't find %s in PATH\n"
1080 " (specify a commit editor in your configuration"
1080 " (specify a commit editor in your configuration"
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 _(" Can't find editor '%s' in PATH\n"
1083 _(" Can't find editor '%s' in PATH\n"
1084 " (specify a commit editor in your configuration"
1084 " (specify a commit editor in your configuration"
1085 " file)\n"), not cmdpath and editor)
1085 " file)\n"), not cmdpath and editor)
1086 if not cmdpath and editor != 'vi':
1086 if not cmdpath and editor != 'vi':
1087 problems += 1
1087 problems += 1
1088
1088
1089 # check username
1089 # check username
1090 username = None
1090 username = None
1091 err = None
1091 err = None
1092 try:
1092 try:
1093 username = ui.username()
1093 username = ui.username()
1094 except error.Abort as e:
1094 except error.Abort as e:
1095 err = e
1095 err = e
1096 problems += 1
1096 problems += 1
1097
1097
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 " (specify a username in your configuration file)\n"), err)
1100 " (specify a username in your configuration file)\n"), err)
1101
1101
1102 fm.condwrite(not problems, '',
1102 fm.condwrite(not problems, '',
1103 _("no problems detected\n"))
1103 _("no problems detected\n"))
1104 if not problems:
1104 if not problems:
1105 fm.data(problems=problems)
1105 fm.data(problems=problems)
1106 fm.condwrite(problems, 'problems',
1106 fm.condwrite(problems, 'problems',
1107 _("%d problems detected,"
1107 _("%d problems detected,"
1108 " please check your install!\n"), problems)
1108 " please check your install!\n"), problems)
1109 fm.end()
1109 fm.end()
1110
1110
1111 return problems
1111 return problems
1112
1112
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 def debugknown(ui, repopath, *ids, **opts):
1114 def debugknown(ui, repopath, *ids, **opts):
1115 """test whether node ids are known to a repo
1115 """test whether node ids are known to a repo
1116
1116
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 and 1s indicating unknown/known.
1118 and 1s indicating unknown/known.
1119 """
1119 """
1120 repo = hg.peer(ui, opts, repopath)
1120 repo = hg.peer(ui, opts, repopath)
1121 if not repo.capable('known'):
1121 if not repo.capable('known'):
1122 raise error.Abort("known() not supported by target repository")
1122 raise error.Abort("known() not supported by target repository")
1123 flags = repo.known([bin(s) for s in ids])
1123 flags = repo.known([bin(s) for s in ids])
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125
1125
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1127 def debuglabelcomplete(ui, repo, *args):
1127 def debuglabelcomplete(ui, repo, *args):
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 commands.debugnamecomplete(ui, repo, *args)
1129 commands.debugnamecomplete(ui, repo, *args)
1130
1130
1131 @command('debuglocks',
1131 @command('debuglocks',
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 ('W', 'force-wlock', None,
1133 ('W', 'force-wlock', None,
1134 _('free the working state lock (DANGEROUS)'))],
1134 _('free the working state lock (DANGEROUS)'))],
1135 _('[OPTION]...'))
1135 _('[OPTION]...'))
1136 def debuglocks(ui, repo, **opts):
1136 def debuglocks(ui, repo, **opts):
1137 """show or modify state of locks
1137 """show or modify state of locks
1138
1138
1139 By default, this command will show which locks are held. This
1139 By default, this command will show which locks are held. This
1140 includes the user and process holding the lock, the amount of time
1140 includes the user and process holding the lock, the amount of time
1141 the lock has been held, and the machine name where the process is
1141 the lock has been held, and the machine name where the process is
1142 running if it's not local.
1142 running if it's not local.
1143
1143
1144 Locks protect the integrity of Mercurial's data, so should be
1144 Locks protect the integrity of Mercurial's data, so should be
1145 treated with care. System crashes or other interruptions may cause
1145 treated with care. System crashes or other interruptions may cause
1146 locks to not be properly released, though Mercurial will usually
1146 locks to not be properly released, though Mercurial will usually
1147 detect and remove such stale locks automatically.
1147 detect and remove such stale locks automatically.
1148
1148
1149 However, detecting stale locks may not always be possible (for
1149 However, detecting stale locks may not always be possible (for
1150 instance, on a shared filesystem). Removing locks may also be
1150 instance, on a shared filesystem). Removing locks may also be
1151 blocked by filesystem permissions.
1151 blocked by filesystem permissions.
1152
1152
1153 Returns 0 if no locks are held.
1153 Returns 0 if no locks are held.
1154
1154
1155 """
1155 """
1156
1156
1157 if opts.get('force_lock'):
1157 if opts.get('force_lock'):
1158 repo.svfs.unlink('lock')
1158 repo.svfs.unlink('lock')
1159 if opts.get('force_wlock'):
1159 if opts.get('force_wlock'):
1160 repo.vfs.unlink('wlock')
1160 repo.vfs.unlink('wlock')
1161 if opts.get('force_lock') or opts.get('force_lock'):
1161 if opts.get('force_lock') or opts.get('force_lock'):
1162 return 0
1162 return 0
1163
1163
1164 now = time.time()
1164 now = time.time()
1165 held = 0
1165 held = 0
1166
1166
1167 def report(vfs, name, method):
1167 def report(vfs, name, method):
1168 # this causes stale locks to get reaped for more accurate reporting
1168 # this causes stale locks to get reaped for more accurate reporting
1169 try:
1169 try:
1170 l = method(False)
1170 l = method(False)
1171 except error.LockHeld:
1171 except error.LockHeld:
1172 l = None
1172 l = None
1173
1173
1174 if l:
1174 if l:
1175 l.release()
1175 l.release()
1176 else:
1176 else:
1177 try:
1177 try:
1178 stat = vfs.lstat(name)
1178 stat = vfs.lstat(name)
1179 age = now - stat.st_mtime
1179 age = now - stat.st_mtime
1180 user = util.username(stat.st_uid)
1180 user = util.username(stat.st_uid)
1181 locker = vfs.readlock(name)
1181 locker = vfs.readlock(name)
1182 if ":" in locker:
1182 if ":" in locker:
1183 host, pid = locker.split(':')
1183 host, pid = locker.split(':')
1184 if host == socket.gethostname():
1184 if host == socket.gethostname():
1185 locker = 'user %s, process %s' % (user, pid)
1185 locker = 'user %s, process %s' % (user, pid)
1186 else:
1186 else:
1187 locker = 'user %s, process %s, host %s' \
1187 locker = 'user %s, process %s, host %s' \
1188 % (user, pid, host)
1188 % (user, pid, host)
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 return 1
1190 return 1
1191 except OSError as e:
1191 except OSError as e:
1192 if e.errno != errno.ENOENT:
1192 if e.errno != errno.ENOENT:
1193 raise
1193 raise
1194
1194
1195 ui.write(("%-6s free\n") % (name + ":"))
1195 ui.write(("%-6s free\n") % (name + ":"))
1196 return 0
1196 return 0
1197
1197
1198 held += report(repo.svfs, "lock", repo.lock)
1198 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1200
1200
1201 return held
1201 return held
1202
1202
1203 @command('debugmergestate', [], '')
1203 @command('debugmergestate', [], '')
1204 def debugmergestate(ui, repo, *args):
1204 def debugmergestate(ui, repo, *args):
1205 """print merge state
1205 """print merge state
1206
1206
1207 Use --verbose to print out information about whether v1 or v2 merge state
1207 Use --verbose to print out information about whether v1 or v2 merge state
1208 was chosen."""
1208 was chosen."""
1209 def _hashornull(h):
1209 def _hashornull(h):
1210 if h == nullhex:
1210 if h == nullhex:
1211 return 'null'
1211 return 'null'
1212 else:
1212 else:
1213 return h
1213 return h
1214
1214
1215 def printrecords(version):
1215 def printrecords(version):
1216 ui.write(('* version %s records\n') % version)
1216 ui.write(('* version %s records\n') % version)
1217 if version == 1:
1217 if version == 1:
1218 records = v1records
1218 records = v1records
1219 else:
1219 else:
1220 records = v2records
1220 records = v2records
1221
1221
1222 for rtype, record in records:
1222 for rtype, record in records:
1223 # pretty print some record types
1223 # pretty print some record types
1224 if rtype == 'L':
1224 if rtype == 'L':
1225 ui.write(('local: %s\n') % record)
1225 ui.write(('local: %s\n') % record)
1226 elif rtype == 'O':
1226 elif rtype == 'O':
1227 ui.write(('other: %s\n') % record)
1227 ui.write(('other: %s\n') % record)
1228 elif rtype == 'm':
1228 elif rtype == 'm':
1229 driver, mdstate = record.split('\0', 1)
1229 driver, mdstate = record.split('\0', 1)
1230 ui.write(('merge driver: %s (state "%s")\n')
1230 ui.write(('merge driver: %s (state "%s")\n')
1231 % (driver, mdstate))
1231 % (driver, mdstate))
1232 elif rtype in 'FDC':
1232 elif rtype in 'FDC':
1233 r = record.split('\0')
1233 r = record.split('\0')
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 if version == 1:
1235 if version == 1:
1236 onode = 'not stored in v1 format'
1236 onode = 'not stored in v1 format'
1237 flags = r[7]
1237 flags = r[7]
1238 else:
1238 else:
1239 onode, flags = r[7:9]
1239 onode, flags = r[7:9]
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 % (f, rtype, state, _hashornull(hash)))
1241 % (f, rtype, state, _hashornull(hash)))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' ancestor path: %s (node %s)\n')
1243 ui.write((' ancestor path: %s (node %s)\n')
1244 % (afile, _hashornull(anode)))
1244 % (afile, _hashornull(anode)))
1245 ui.write((' other path: %s (node %s)\n')
1245 ui.write((' other path: %s (node %s)\n')
1246 % (ofile, _hashornull(onode)))
1246 % (ofile, _hashornull(onode)))
1247 elif rtype == 'f':
1247 elif rtype == 'f':
1248 filename, rawextras = record.split('\0', 1)
1248 filename, rawextras = record.split('\0', 1)
1249 extras = rawextras.split('\0')
1249 extras = rawextras.split('\0')
1250 i = 0
1250 i = 0
1251 extrastrings = []
1251 extrastrings = []
1252 while i < len(extras):
1252 while i < len(extras):
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 i += 2
1254 i += 2
1255
1255
1256 ui.write(('file extras: %s (%s)\n')
1256 ui.write(('file extras: %s (%s)\n')
1257 % (filename, ', '.join(extrastrings)))
1257 % (filename, ', '.join(extrastrings)))
1258 elif rtype == 'l':
1258 elif rtype == 'l':
1259 labels = record.split('\0', 2)
1259 labels = record.split('\0', 2)
1260 labels = [l for l in labels if len(l) > 0]
1260 labels = [l for l in labels if len(l) > 0]
1261 ui.write(('labels:\n'))
1261 ui.write(('labels:\n'))
1262 ui.write((' local: %s\n' % labels[0]))
1262 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' other: %s\n' % labels[1]))
1263 ui.write((' other: %s\n' % labels[1]))
1264 if len(labels) > 2:
1264 if len(labels) > 2:
1265 ui.write((' base: %s\n' % labels[2]))
1265 ui.write((' base: %s\n' % labels[2]))
1266 else:
1266 else:
1267 ui.write(('unrecognized entry: %s\t%s\n')
1267 ui.write(('unrecognized entry: %s\t%s\n')
1268 % (rtype, record.replace('\0', '\t')))
1268 % (rtype, record.replace('\0', '\t')))
1269
1269
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # command is pretty low-level.
1272 # command is pretty low-level.
1273 ms = mergemod.mergestate(repo)
1273 ms = mergemod.mergestate(repo)
1274
1274
1275 # sort so that reasonable information is on top
1275 # sort so that reasonable information is on top
1276 v1records = ms._readrecordsv1()
1276 v1records = ms._readrecordsv1()
1277 v2records = ms._readrecordsv2()
1277 v2records = ms._readrecordsv2()
1278 order = 'LOml'
1278 order = 'LOml'
1279 def key(r):
1279 def key(r):
1280 idx = order.find(r[0])
1280 idx = order.find(r[0])
1281 if idx == -1:
1281 if idx == -1:
1282 return (1, r[1])
1282 return (1, r[1])
1283 else:
1283 else:
1284 return (0, idx)
1284 return (0, idx)
1285 v1records.sort(key=key)
1285 v1records.sort(key=key)
1286 v2records.sort(key=key)
1286 v2records.sort(key=key)
1287
1287
1288 if not v1records and not v2records:
1288 if not v1records and not v2records:
1289 ui.write(('no merge state found\n'))
1289 ui.write(('no merge state found\n'))
1290 elif not v2records:
1290 elif not v2records:
1291 ui.note(('no version 2 merge state\n'))
1291 ui.note(('no version 2 merge state\n'))
1292 printrecords(1)
1292 printrecords(1)
1293 elif ms._v1v2match(v1records, v2records):
1293 elif ms._v1v2match(v1records, v2records):
1294 ui.note(('v1 and v2 states match: using v2\n'))
1294 ui.note(('v1 and v2 states match: using v2\n'))
1295 printrecords(2)
1295 printrecords(2)
1296 else:
1296 else:
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 printrecords(1)
1298 printrecords(1)
1299 if ui.verbose:
1299 if ui.verbose:
1300 printrecords(2)
1300 printrecords(2)
1301
1301
1302 @command('debugnamecomplete', [], _('NAME...'))
1302 @command('debugnamecomplete', [], _('NAME...'))
1303 def debugnamecomplete(ui, repo, *args):
1303 def debugnamecomplete(ui, repo, *args):
1304 '''complete "names" - tags, open branch names, bookmark names'''
1304 '''complete "names" - tags, open branch names, bookmark names'''
1305
1305
1306 names = set()
1306 names = set()
1307 # since we previously only listed open branches, we will handle that
1307 # since we previously only listed open branches, we will handle that
1308 # specially (after this for loop)
1308 # specially (after this for loop)
1309 for name, ns in repo.names.iteritems():
1309 for name, ns in repo.names.iteritems():
1310 if name != 'branches':
1310 if name != 'branches':
1311 names.update(ns.listnames(repo))
1311 names.update(ns.listnames(repo))
1312 names.update(tag for (tag, heads, tip, closed)
1312 names.update(tag for (tag, heads, tip, closed)
1313 in repo.branchmap().iterbranches() if not closed)
1313 in repo.branchmap().iterbranches() if not closed)
1314 completions = set()
1314 completions = set()
1315 if not args:
1315 if not args:
1316 args = ['']
1316 args = ['']
1317 for a in args:
1317 for a in args:
1318 completions.update(n for n in names if n.startswith(a))
1318 completions.update(n for n in names if n.startswith(a))
1319 ui.write('\n'.join(sorted(completions)))
1319 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n')
1320 ui.write('\n')
1321
1321
1322 @command('debugobsolete',
1322 @command('debugobsolete',
1323 [('', 'flags', 0, _('markers flag')),
1323 [('', 'flags', 0, _('markers flag')),
1324 ('', 'record-parents', False,
1324 ('', 'record-parents', False,
1325 _('record parent information for the precursor')),
1325 _('record parent information for the precursor')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('', 'index', False, _('display index of the marker')),
1327 ('', 'index', False, _('display index of the marker')),
1328 ('', 'delete', [], _('delete markers specified by indices')),
1328 ('', 'delete', [], _('delete markers specified by indices')),
1329 ] + commands.commitopts2 + commands.formatteropts,
1329 ] + commands.commitopts2 + commands.formatteropts,
1330 _('[OBSOLETED [REPLACEMENT ...]]'))
1330 _('[OBSOLETED [REPLACEMENT ...]]'))
1331 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1331 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1332 """create arbitrary obsolete marker
1332 """create arbitrary obsolete marker
1333
1333
1334 With no arguments, displays the list of obsolescence markers."""
1334 With no arguments, displays the list of obsolescence markers."""
1335
1335
1336 def parsenodeid(s):
1336 def parsenodeid(s):
1337 try:
1337 try:
1338 # We do not use revsingle/revrange functions here to accept
1338 # We do not use revsingle/revrange functions here to accept
1339 # arbitrary node identifiers, possibly not present in the
1339 # arbitrary node identifiers, possibly not present in the
1340 # local repository.
1340 # local repository.
1341 n = bin(s)
1341 n = bin(s)
1342 if len(n) != len(nullid):
1342 if len(n) != len(nullid):
1343 raise TypeError()
1343 raise TypeError()
1344 return n
1344 return n
1345 except TypeError:
1345 except TypeError:
1346 raise error.Abort('changeset references must be full hexadecimal '
1346 raise error.Abort('changeset references must be full hexadecimal '
1347 'node identifiers')
1347 'node identifiers')
1348
1348
1349 if opts.get('delete'):
1349 if opts.get('delete'):
1350 indices = []
1350 indices = []
1351 for v in opts.get('delete'):
1351 for v in opts.get('delete'):
1352 try:
1352 try:
1353 indices.append(int(v))
1353 indices.append(int(v))
1354 except ValueError:
1354 except ValueError:
1355 raise error.Abort(_('invalid index value: %r') % v,
1355 raise error.Abort(_('invalid index value: %r') % v,
1356 hint=_('use integers for indices'))
1356 hint=_('use integers for indices'))
1357
1357
1358 if repo.currenttransaction():
1358 if repo.currenttransaction():
1359 raise error.Abort(_('cannot delete obsmarkers in the middle '
1359 raise error.Abort(_('cannot delete obsmarkers in the middle '
1360 'of transaction.'))
1360 'of transaction.'))
1361
1361
1362 with repo.lock():
1362 with repo.lock():
1363 n = repair.deleteobsmarkers(repo.obsstore, indices)
1363 n = repair.deleteobsmarkers(repo.obsstore, indices)
1364 ui.write(_('deleted %i obsolescence markers\n') % n)
1364 ui.write(_('deleted %i obsolescence markers\n') % n)
1365
1365
1366 return
1366 return
1367
1367
1368 if precursor is not None:
1368 if precursor is not None:
1369 if opts['rev']:
1369 if opts['rev']:
1370 raise error.Abort('cannot select revision when creating marker')
1370 raise error.Abort('cannot select revision when creating marker')
1371 metadata = {}
1371 metadata = {}
1372 metadata['user'] = opts['user'] or ui.username()
1372 metadata['user'] = opts['user'] or ui.username()
1373 succs = tuple(parsenodeid(succ) for succ in successors)
1373 succs = tuple(parsenodeid(succ) for succ in successors)
1374 l = repo.lock()
1374 l = repo.lock()
1375 try:
1375 try:
1376 tr = repo.transaction('debugobsolete')
1376 tr = repo.transaction('debugobsolete')
1377 try:
1377 try:
1378 date = opts.get('date')
1378 date = opts.get('date')
1379 if date:
1379 if date:
1380 date = util.parsedate(date)
1380 date = util.parsedate(date)
1381 else:
1381 else:
1382 date = None
1382 date = None
1383 prec = parsenodeid(precursor)
1383 prec = parsenodeid(precursor)
1384 parents = None
1384 parents = None
1385 if opts['record_parents']:
1385 if opts['record_parents']:
1386 if prec not in repo.unfiltered():
1386 if prec not in repo.unfiltered():
1387 raise error.Abort('cannot used --record-parents on '
1387 raise error.Abort('cannot used --record-parents on '
1388 'unknown changesets')
1388 'unknown changesets')
1389 parents = repo.unfiltered()[prec].parents()
1389 parents = repo.unfiltered()[prec].parents()
1390 parents = tuple(p.node() for p in parents)
1390 parents = tuple(p.node() for p in parents)
1391 repo.obsstore.create(tr, prec, succs, opts['flags'],
1391 repo.obsstore.create(tr, prec, succs, opts['flags'],
1392 parents=parents, date=date,
1392 parents=parents, date=date,
1393 metadata=metadata)
1393 metadata=metadata)
1394 tr.close()
1394 tr.close()
1395 except ValueError as exc:
1395 except ValueError as exc:
1396 raise error.Abort(_('bad obsmarker input: %s') % exc)
1396 raise error.Abort(_('bad obsmarker input: %s') % exc)
1397 finally:
1397 finally:
1398 tr.release()
1398 tr.release()
1399 finally:
1399 finally:
1400 l.release()
1400 l.release()
1401 else:
1401 else:
1402 if opts['rev']:
1402 if opts['rev']:
1403 revs = scmutil.revrange(repo, opts['rev'])
1403 revs = scmutil.revrange(repo, opts['rev'])
1404 nodes = [repo[r].node() for r in revs]
1404 nodes = [repo[r].node() for r in revs]
1405 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1405 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1406 markers.sort(key=lambda x: x._data)
1406 markers.sort(key=lambda x: x._data)
1407 else:
1407 else:
1408 markers = obsolete.getmarkers(repo)
1408 markers = obsolete.getmarkers(repo)
1409
1409
1410 markerstoiter = markers
1410 markerstoiter = markers
1411 isrelevant = lambda m: True
1411 isrelevant = lambda m: True
1412 if opts.get('rev') and opts.get('index'):
1412 if opts.get('rev') and opts.get('index'):
1413 markerstoiter = obsolete.getmarkers(repo)
1413 markerstoiter = obsolete.getmarkers(repo)
1414 markerset = set(markers)
1414 markerset = set(markers)
1415 isrelevant = lambda m: m in markerset
1415 isrelevant = lambda m: m in markerset
1416
1416
1417 fm = ui.formatter('debugobsolete', opts)
1417 fm = ui.formatter('debugobsolete', opts)
1418 for i, m in enumerate(markerstoiter):
1418 for i, m in enumerate(markerstoiter):
1419 if not isrelevant(m):
1419 if not isrelevant(m):
1420 # marker can be irrelevant when we're iterating over a set
1420 # marker can be irrelevant when we're iterating over a set
1421 # of markers (markerstoiter) which is bigger than the set
1421 # of markers (markerstoiter) which is bigger than the set
1422 # of markers we want to display (markers)
1422 # of markers we want to display (markers)
1423 # this can happen if both --index and --rev options are
1423 # this can happen if both --index and --rev options are
1424 # provided and thus we need to iterate over all of the markers
1424 # provided and thus we need to iterate over all of the markers
1425 # to get the correct indices, but only display the ones that
1425 # to get the correct indices, but only display the ones that
1426 # are relevant to --rev value
1426 # are relevant to --rev value
1427 continue
1427 continue
1428 fm.startitem()
1428 fm.startitem()
1429 ind = i if opts.get('index') else None
1429 ind = i if opts.get('index') else None
1430 cmdutil.showmarker(fm, m, index=ind)
1430 cmdutil.showmarker(fm, m, index=ind)
1431 fm.end()
1431 fm.end()
1432
1432
1433 @command('debugpathcomplete',
1433 @command('debugpathcomplete',
1434 [('f', 'full', None, _('complete an entire path')),
1434 [('f', 'full', None, _('complete an entire path')),
1435 ('n', 'normal', None, _('show only normal files')),
1435 ('n', 'normal', None, _('show only normal files')),
1436 ('a', 'added', None, _('show only added files')),
1436 ('a', 'added', None, _('show only added files')),
1437 ('r', 'removed', None, _('show only removed files'))],
1437 ('r', 'removed', None, _('show only removed files'))],
1438 _('FILESPEC...'))
1438 _('FILESPEC...'))
1439 def debugpathcomplete(ui, repo, *specs, **opts):
1439 def debugpathcomplete(ui, repo, *specs, **opts):
1440 '''complete part or all of a tracked path
1440 '''complete part or all of a tracked path
1441
1441
1442 This command supports shells that offer path name completion. It
1442 This command supports shells that offer path name completion. It
1443 currently completes only files already known to the dirstate.
1443 currently completes only files already known to the dirstate.
1444
1444
1445 Completion extends only to the next path segment unless
1445 Completion extends only to the next path segment unless
1446 --full is specified, in which case entire paths are used.'''
1446 --full is specified, in which case entire paths are used.'''
1447
1447
1448 def complete(path, acceptable):
1448 def complete(path, acceptable):
1449 dirstate = repo.dirstate
1449 dirstate = repo.dirstate
1450 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1450 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1451 rootdir = repo.root + pycompat.ossep
1451 rootdir = repo.root + pycompat.ossep
1452 if spec != repo.root and not spec.startswith(rootdir):
1452 if spec != repo.root and not spec.startswith(rootdir):
1453 return [], []
1453 return [], []
1454 if os.path.isdir(spec):
1454 if os.path.isdir(spec):
1455 spec += '/'
1455 spec += '/'
1456 spec = spec[len(rootdir):]
1456 spec = spec[len(rootdir):]
1457 fixpaths = pycompat.ossep != '/'
1457 fixpaths = pycompat.ossep != '/'
1458 if fixpaths:
1458 if fixpaths:
1459 spec = spec.replace(pycompat.ossep, '/')
1459 spec = spec.replace(pycompat.ossep, '/')
1460 speclen = len(spec)
1460 speclen = len(spec)
1461 fullpaths = opts['full']
1461 fullpaths = opts['full']
1462 files, dirs = set(), set()
1462 files, dirs = set(), set()
1463 adddir, addfile = dirs.add, files.add
1463 adddir, addfile = dirs.add, files.add
1464 for f, st in dirstate.iteritems():
1464 for f, st in dirstate.iteritems():
1465 if f.startswith(spec) and st[0] in acceptable:
1465 if f.startswith(spec) and st[0] in acceptable:
1466 if fixpaths:
1466 if fixpaths:
1467 f = f.replace('/', pycompat.ossep)
1467 f = f.replace('/', pycompat.ossep)
1468 if fullpaths:
1468 if fullpaths:
1469 addfile(f)
1469 addfile(f)
1470 continue
1470 continue
1471 s = f.find(pycompat.ossep, speclen)
1471 s = f.find(pycompat.ossep, speclen)
1472 if s >= 0:
1472 if s >= 0:
1473 adddir(f[:s])
1473 adddir(f[:s])
1474 else:
1474 else:
1475 addfile(f)
1475 addfile(f)
1476 return files, dirs
1476 return files, dirs
1477
1477
1478 acceptable = ''
1478 acceptable = ''
1479 if opts['normal']:
1479 if opts['normal']:
1480 acceptable += 'nm'
1480 acceptable += 'nm'
1481 if opts['added']:
1481 if opts['added']:
1482 acceptable += 'a'
1482 acceptable += 'a'
1483 if opts['removed']:
1483 if opts['removed']:
1484 acceptable += 'r'
1484 acceptable += 'r'
1485 cwd = repo.getcwd()
1485 cwd = repo.getcwd()
1486 if not specs:
1486 if not specs:
1487 specs = ['.']
1487 specs = ['.']
1488
1488
1489 files, dirs = set(), set()
1489 files, dirs = set(), set()
1490 for spec in specs:
1490 for spec in specs:
1491 f, d = complete(spec, acceptable or 'nmar')
1491 f, d = complete(spec, acceptable or 'nmar')
1492 files.update(f)
1492 files.update(f)
1493 dirs.update(d)
1493 dirs.update(d)
1494 files.update(dirs)
1494 files.update(dirs)
1495 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1495 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1496 ui.write('\n')
1496 ui.write('\n')
1497
1497
1498 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1498 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1499 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1499 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1500 '''access the pushkey key/value protocol
1500 '''access the pushkey key/value protocol
1501
1501
1502 With two args, list the keys in the given namespace.
1502 With two args, list the keys in the given namespace.
1503
1503
1504 With five args, set a key to new if it currently is set to old.
1504 With five args, set a key to new if it currently is set to old.
1505 Reports success or failure.
1505 Reports success or failure.
1506 '''
1506 '''
1507
1507
1508 target = hg.peer(ui, {}, repopath)
1508 target = hg.peer(ui, {}, repopath)
1509 if keyinfo:
1509 if keyinfo:
1510 key, old, new = keyinfo
1510 key, old, new = keyinfo
1511 r = target.pushkey(namespace, key, old, new)
1511 r = target.pushkey(namespace, key, old, new)
1512 ui.status(str(r) + '\n')
1512 ui.status(str(r) + '\n')
1513 return not r
1513 return not r
1514 else:
1514 else:
1515 for k, v in sorted(target.listkeys(namespace).iteritems()):
1515 for k, v in sorted(target.listkeys(namespace).iteritems()):
1516 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1516 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1517 v.encode('string-escape')))
1517 v.encode('string-escape')))
1518
1518
1519 @command('debugpvec', [], _('A B'))
1519 @command('debugpvec', [], _('A B'))
1520 def debugpvec(ui, repo, a, b=None):
1520 def debugpvec(ui, repo, a, b=None):
1521 ca = scmutil.revsingle(repo, a)
1521 ca = scmutil.revsingle(repo, a)
1522 cb = scmutil.revsingle(repo, b)
1522 cb = scmutil.revsingle(repo, b)
1523 pa = pvec.ctxpvec(ca)
1523 pa = pvec.ctxpvec(ca)
1524 pb = pvec.ctxpvec(cb)
1524 pb = pvec.ctxpvec(cb)
1525 if pa == pb:
1525 if pa == pb:
1526 rel = "="
1526 rel = "="
1527 elif pa > pb:
1527 elif pa > pb:
1528 rel = ">"
1528 rel = ">"
1529 elif pa < pb:
1529 elif pa < pb:
1530 rel = "<"
1530 rel = "<"
1531 elif pa | pb:
1531 elif pa | pb:
1532 rel = "|"
1532 rel = "|"
1533 ui.write(_("a: %s\n") % pa)
1533 ui.write(_("a: %s\n") % pa)
1534 ui.write(_("b: %s\n") % pb)
1534 ui.write(_("b: %s\n") % pb)
1535 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1535 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1536 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1536 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1537 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1537 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1538 pa.distance(pb), rel))
1538 pa.distance(pb), rel))
1539
1539
1540 @command('debugrebuilddirstate|debugrebuildstate',
1540 @command('debugrebuilddirstate|debugrebuildstate',
1541 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1541 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1542 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1542 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1543 'the working copy parent')),
1543 'the working copy parent')),
1544 ],
1544 ],
1545 _('[-r REV]'))
1545 _('[-r REV]'))
1546 def debugrebuilddirstate(ui, repo, rev, **opts):
1546 def debugrebuilddirstate(ui, repo, rev, **opts):
1547 """rebuild the dirstate as it would look like for the given revision
1547 """rebuild the dirstate as it would look like for the given revision
1548
1548
1549 If no revision is specified the first current parent will be used.
1549 If no revision is specified the first current parent will be used.
1550
1550
1551 The dirstate will be set to the files of the given revision.
1551 The dirstate will be set to the files of the given revision.
1552 The actual working directory content or existing dirstate
1552 The actual working directory content or existing dirstate
1553 information such as adds or removes is not considered.
1553 information such as adds or removes is not considered.
1554
1554
1555 ``minimal`` will only rebuild the dirstate status for files that claim to be
1555 ``minimal`` will only rebuild the dirstate status for files that claim to be
1556 tracked but are not in the parent manifest, or that exist in the parent
1556 tracked but are not in the parent manifest, or that exist in the parent
1557 manifest but are not in the dirstate. It will not change adds, removes, or
1557 manifest but are not in the dirstate. It will not change adds, removes, or
1558 modified files that are in the working copy parent.
1558 modified files that are in the working copy parent.
1559
1559
1560 One use of this command is to make the next :hg:`status` invocation
1560 One use of this command is to make the next :hg:`status` invocation
1561 check the actual file content.
1561 check the actual file content.
1562 """
1562 """
1563 ctx = scmutil.revsingle(repo, rev)
1563 ctx = scmutil.revsingle(repo, rev)
1564 with repo.wlock():
1564 with repo.wlock():
1565 dirstate = repo.dirstate
1565 dirstate = repo.dirstate
1566 changedfiles = None
1566 changedfiles = None
1567 # See command doc for what minimal does.
1567 # See command doc for what minimal does.
1568 if opts.get('minimal'):
1568 if opts.get('minimal'):
1569 manifestfiles = set(ctx.manifest().keys())
1569 manifestfiles = set(ctx.manifest().keys())
1570 dirstatefiles = set(dirstate)
1570 dirstatefiles = set(dirstate)
1571 manifestonly = manifestfiles - dirstatefiles
1571 manifestonly = manifestfiles - dirstatefiles
1572 dsonly = dirstatefiles - manifestfiles
1572 dsonly = dirstatefiles - manifestfiles
1573 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1573 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1574 changedfiles = manifestonly | dsnotadded
1574 changedfiles = manifestonly | dsnotadded
1575
1575
1576 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1576 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1577
1577
1578 @command('debugrebuildfncache', [], '')
1578 @command('debugrebuildfncache', [], '')
1579 def debugrebuildfncache(ui, repo):
1579 def debugrebuildfncache(ui, repo):
1580 """rebuild the fncache file"""
1580 """rebuild the fncache file"""
1581 repair.rebuildfncache(ui, repo)
1581 repair.rebuildfncache(ui, repo)
1582
1582
1583 @command('debugrename',
1583 @command('debugrename',
1584 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1584 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1585 _('[-r REV] FILE'))
1585 _('[-r REV] FILE'))
1586 def debugrename(ui, repo, file1, *pats, **opts):
1586 def debugrename(ui, repo, file1, *pats, **opts):
1587 """dump rename information"""
1587 """dump rename information"""
1588
1588
1589 ctx = scmutil.revsingle(repo, opts.get('rev'))
1589 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 m = scmutil.match(ctx, (file1,) + pats, opts)
1590 m = scmutil.match(ctx, (file1,) + pats, opts)
1591 for abs in ctx.walk(m):
1591 for abs in ctx.walk(m):
1592 fctx = ctx[abs]
1592 fctx = ctx[abs]
1593 o = fctx.filelog().renamed(fctx.filenode())
1593 o = fctx.filelog().renamed(fctx.filenode())
1594 rel = m.rel(abs)
1594 rel = m.rel(abs)
1595 if o:
1595 if o:
1596 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1596 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1597 else:
1597 else:
1598 ui.write(_("%s not renamed\n") % rel)
1598 ui.write(_("%s not renamed\n") % rel)
1599
1599
1600 @command('debugrevlog', commands.debugrevlogopts +
1600 @command('debugrevlog', commands.debugrevlogopts +
1601 [('d', 'dump', False, _('dump index data'))],
1601 [('d', 'dump', False, _('dump index data'))],
1602 _('-c|-m|FILE'),
1602 _('-c|-m|FILE'),
1603 optionalrepo=True)
1603 optionalrepo=True)
1604 def debugrevlog(ui, repo, file_=None, **opts):
1604 def debugrevlog(ui, repo, file_=None, **opts):
1605 """show data and statistics about a revlog"""
1605 """show data and statistics about a revlog"""
1606 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1606 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1607
1607
1608 if opts.get("dump"):
1608 if opts.get("dump"):
1609 numrevs = len(r)
1609 numrevs = len(r)
1610 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1610 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1611 " rawsize totalsize compression heads chainlen\n"))
1611 " rawsize totalsize compression heads chainlen\n"))
1612 ts = 0
1612 ts = 0
1613 heads = set()
1613 heads = set()
1614
1614
1615 for rev in xrange(numrevs):
1615 for rev in xrange(numrevs):
1616 dbase = r.deltaparent(rev)
1616 dbase = r.deltaparent(rev)
1617 if dbase == -1:
1617 if dbase == -1:
1618 dbase = rev
1618 dbase = rev
1619 cbase = r.chainbase(rev)
1619 cbase = r.chainbase(rev)
1620 clen = r.chainlen(rev)
1620 clen = r.chainlen(rev)
1621 p1, p2 = r.parentrevs(rev)
1621 p1, p2 = r.parentrevs(rev)
1622 rs = r.rawsize(rev)
1622 rs = r.rawsize(rev)
1623 ts = ts + rs
1623 ts = ts + rs
1624 heads -= set(r.parentrevs(rev))
1624 heads -= set(r.parentrevs(rev))
1625 heads.add(rev)
1625 heads.add(rev)
1626 try:
1626 try:
1627 compression = ts / r.end(rev)
1627 compression = ts / r.end(rev)
1628 except ZeroDivisionError:
1628 except ZeroDivisionError:
1629 compression = 0
1629 compression = 0
1630 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1630 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1631 "%11d %5d %8d\n" %
1631 "%11d %5d %8d\n" %
1632 (rev, p1, p2, r.start(rev), r.end(rev),
1632 (rev, p1, p2, r.start(rev), r.end(rev),
1633 r.start(dbase), r.start(cbase),
1633 r.start(dbase), r.start(cbase),
1634 r.start(p1), r.start(p2),
1634 r.start(p1), r.start(p2),
1635 rs, ts, compression, len(heads), clen))
1635 rs, ts, compression, len(heads), clen))
1636 return 0
1636 return 0
1637
1637
1638 v = r.version
1638 v = r.version
1639 format = v & 0xFFFF
1639 format = v & 0xFFFF
1640 flags = []
1640 flags = []
1641 gdelta = False
1641 gdelta = False
1642 if v & revlog.REVLOGNGINLINEDATA:
1642 if v & revlog.REVLOGNGINLINEDATA:
1643 flags.append('inline')
1643 flags.append('inline')
1644 if v & revlog.REVLOGGENERALDELTA:
1644 if v & revlog.REVLOGGENERALDELTA:
1645 gdelta = True
1645 gdelta = True
1646 flags.append('generaldelta')
1646 flags.append('generaldelta')
1647 if not flags:
1647 if not flags:
1648 flags = ['(none)']
1648 flags = ['(none)']
1649
1649
1650 nummerges = 0
1650 nummerges = 0
1651 numfull = 0
1651 numfull = 0
1652 numprev = 0
1652 numprev = 0
1653 nump1 = 0
1653 nump1 = 0
1654 nump2 = 0
1654 nump2 = 0
1655 numother = 0
1655 numother = 0
1656 nump1prev = 0
1656 nump1prev = 0
1657 nump2prev = 0
1657 nump2prev = 0
1658 chainlengths = []
1658 chainlengths = []
1659
1659
1660 datasize = [None, 0, 0]
1660 datasize = [None, 0, 0]
1661 fullsize = [None, 0, 0]
1661 fullsize = [None, 0, 0]
1662 deltasize = [None, 0, 0]
1662 deltasize = [None, 0, 0]
1663 chunktypecounts = {}
1663 chunktypecounts = {}
1664 chunktypesizes = {}
1664 chunktypesizes = {}
1665
1665
1666 def addsize(size, l):
1666 def addsize(size, l):
1667 if l[0] is None or size < l[0]:
1667 if l[0] is None or size < l[0]:
1668 l[0] = size
1668 l[0] = size
1669 if size > l[1]:
1669 if size > l[1]:
1670 l[1] = size
1670 l[1] = size
1671 l[2] += size
1671 l[2] += size
1672
1672
1673 numrevs = len(r)
1673 numrevs = len(r)
1674 for rev in xrange(numrevs):
1674 for rev in xrange(numrevs):
1675 p1, p2 = r.parentrevs(rev)
1675 p1, p2 = r.parentrevs(rev)
1676 delta = r.deltaparent(rev)
1676 delta = r.deltaparent(rev)
1677 if format > 0:
1677 if format > 0:
1678 addsize(r.rawsize(rev), datasize)
1678 addsize(r.rawsize(rev), datasize)
1679 if p2 != nullrev:
1679 if p2 != nullrev:
1680 nummerges += 1
1680 nummerges += 1
1681 size = r.length(rev)
1681 size = r.length(rev)
1682 if delta == nullrev:
1682 if delta == nullrev:
1683 chainlengths.append(0)
1683 chainlengths.append(0)
1684 numfull += 1
1684 numfull += 1
1685 addsize(size, fullsize)
1685 addsize(size, fullsize)
1686 else:
1686 else:
1687 chainlengths.append(chainlengths[delta] + 1)
1687 chainlengths.append(chainlengths[delta] + 1)
1688 addsize(size, deltasize)
1688 addsize(size, deltasize)
1689 if delta == rev - 1:
1689 if delta == rev - 1:
1690 numprev += 1
1690 numprev += 1
1691 if delta == p1:
1691 if delta == p1:
1692 nump1prev += 1
1692 nump1prev += 1
1693 elif delta == p2:
1693 elif delta == p2:
1694 nump2prev += 1
1694 nump2prev += 1
1695 elif delta == p1:
1695 elif delta == p1:
1696 nump1 += 1
1696 nump1 += 1
1697 elif delta == p2:
1697 elif delta == p2:
1698 nump2 += 1
1698 nump2 += 1
1699 elif delta != nullrev:
1699 elif delta != nullrev:
1700 numother += 1
1700 numother += 1
1701
1701
1702 # Obtain data on the raw chunks in the revlog.
1702 # Obtain data on the raw chunks in the revlog.
1703 chunk = r._chunkraw(rev, rev)[1]
1703 chunk = r._chunkraw(rev, rev)[1]
1704 if chunk:
1704 if chunk:
1705 chunktype = chunk[0]
1705 chunktype = chunk[0]
1706 else:
1706 else:
1707 chunktype = 'empty'
1707 chunktype = 'empty'
1708
1708
1709 if chunktype not in chunktypecounts:
1709 if chunktype not in chunktypecounts:
1710 chunktypecounts[chunktype] = 0
1710 chunktypecounts[chunktype] = 0
1711 chunktypesizes[chunktype] = 0
1711 chunktypesizes[chunktype] = 0
1712
1712
1713 chunktypecounts[chunktype] += 1
1713 chunktypecounts[chunktype] += 1
1714 chunktypesizes[chunktype] += size
1714 chunktypesizes[chunktype] += size
1715
1715
1716 # Adjust size min value for empty cases
1716 # Adjust size min value for empty cases
1717 for size in (datasize, fullsize, deltasize):
1717 for size in (datasize, fullsize, deltasize):
1718 if size[0] is None:
1718 if size[0] is None:
1719 size[0] = 0
1719 size[0] = 0
1720
1720
1721 numdeltas = numrevs - numfull
1721 numdeltas = numrevs - numfull
1722 numoprev = numprev - nump1prev - nump2prev
1722 numoprev = numprev - nump1prev - nump2prev
1723 totalrawsize = datasize[2]
1723 totalrawsize = datasize[2]
1724 datasize[2] /= numrevs
1724 datasize[2] /= numrevs
1725 fulltotal = fullsize[2]
1725 fulltotal = fullsize[2]
1726 fullsize[2] /= numfull
1726 fullsize[2] /= numfull
1727 deltatotal = deltasize[2]
1727 deltatotal = deltasize[2]
1728 if numrevs - numfull > 0:
1728 if numrevs - numfull > 0:
1729 deltasize[2] /= numrevs - numfull
1729 deltasize[2] /= numrevs - numfull
1730 totalsize = fulltotal + deltatotal
1730 totalsize = fulltotal + deltatotal
1731 avgchainlen = sum(chainlengths) / numrevs
1731 avgchainlen = sum(chainlengths) / numrevs
1732 maxchainlen = max(chainlengths)
1732 maxchainlen = max(chainlengths)
1733 compratio = 1
1733 compratio = 1
1734 if totalsize:
1734 if totalsize:
1735 compratio = totalrawsize / totalsize
1735 compratio = totalrawsize / totalsize
1736
1736
1737 basedfmtstr = '%%%dd\n'
1737 basedfmtstr = '%%%dd\n'
1738 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1738 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1739
1739
1740 def dfmtstr(max):
1740 def dfmtstr(max):
1741 return basedfmtstr % len(str(max))
1741 return basedfmtstr % len(str(max))
1742 def pcfmtstr(max, padding=0):
1742 def pcfmtstr(max, padding=0):
1743 return basepcfmtstr % (len(str(max)), ' ' * padding)
1743 return basepcfmtstr % (len(str(max)), ' ' * padding)
1744
1744
1745 def pcfmt(value, total):
1745 def pcfmt(value, total):
1746 if total:
1746 if total:
1747 return (value, 100 * float(value) / total)
1747 return (value, 100 * float(value) / total)
1748 else:
1748 else:
1749 return value, 100.0
1749 return value, 100.0
1750
1750
1751 ui.write(('format : %d\n') % format)
1751 ui.write(('format : %d\n') % format)
1752 ui.write(('flags : %s\n') % ', '.join(flags))
1752 ui.write(('flags : %s\n') % ', '.join(flags))
1753
1753
1754 ui.write('\n')
1754 ui.write('\n')
1755 fmt = pcfmtstr(totalsize)
1755 fmt = pcfmtstr(totalsize)
1756 fmt2 = dfmtstr(totalsize)
1756 fmt2 = dfmtstr(totalsize)
1757 ui.write(('revisions : ') + fmt2 % numrevs)
1757 ui.write(('revisions : ') + fmt2 % numrevs)
1758 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1758 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1759 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1759 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1760 ui.write(('revisions : ') + fmt2 % numrevs)
1760 ui.write(('revisions : ') + fmt2 % numrevs)
1761 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1761 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1762 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1762 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1763 ui.write(('revision size : ') + fmt2 % totalsize)
1763 ui.write(('revision size : ') + fmt2 % totalsize)
1764 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1764 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1765 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1765 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1766
1766
1767 def fmtchunktype(chunktype):
1767 def fmtchunktype(chunktype):
1768 if chunktype == 'empty':
1768 if chunktype == 'empty':
1769 return ' %s : ' % chunktype
1769 return ' %s : ' % chunktype
1770 elif chunktype in string.ascii_letters:
1770 elif chunktype in string.ascii_letters:
1771 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1771 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1772 else:
1772 else:
1773 return ' 0x%s : ' % hex(chunktype)
1773 return ' 0x%s : ' % hex(chunktype)
1774
1774
1775 ui.write('\n')
1775 ui.write('\n')
1776 ui.write(('chunks : ') + fmt2 % numrevs)
1776 ui.write(('chunks : ') + fmt2 % numrevs)
1777 for chunktype in sorted(chunktypecounts):
1777 for chunktype in sorted(chunktypecounts):
1778 ui.write(fmtchunktype(chunktype))
1778 ui.write(fmtchunktype(chunktype))
1779 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1779 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1780 ui.write(('chunks size : ') + fmt2 % totalsize)
1780 ui.write(('chunks size : ') + fmt2 % totalsize)
1781 for chunktype in sorted(chunktypecounts):
1781 for chunktype in sorted(chunktypecounts):
1782 ui.write(fmtchunktype(chunktype))
1782 ui.write(fmtchunktype(chunktype))
1783 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1783 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1784
1784
1785 ui.write('\n')
1785 ui.write('\n')
1786 fmt = dfmtstr(max(avgchainlen, compratio))
1786 fmt = dfmtstr(max(avgchainlen, compratio))
1787 ui.write(('avg chain length : ') + fmt % avgchainlen)
1787 ui.write(('avg chain length : ') + fmt % avgchainlen)
1788 ui.write(('max chain length : ') + fmt % maxchainlen)
1788 ui.write(('max chain length : ') + fmt % maxchainlen)
1789 ui.write(('compression ratio : ') + fmt % compratio)
1789 ui.write(('compression ratio : ') + fmt % compratio)
1790
1790
1791 if format > 0:
1791 if format > 0:
1792 ui.write('\n')
1792 ui.write('\n')
1793 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1793 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1794 % tuple(datasize))
1794 % tuple(datasize))
1795 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1795 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1796 % tuple(fullsize))
1796 % tuple(fullsize))
1797 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1797 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1798 % tuple(deltasize))
1798 % tuple(deltasize))
1799
1799
1800 if numdeltas > 0:
1800 if numdeltas > 0:
1801 ui.write('\n')
1801 ui.write('\n')
1802 fmt = pcfmtstr(numdeltas)
1802 fmt = pcfmtstr(numdeltas)
1803 fmt2 = pcfmtstr(numdeltas, 4)
1803 fmt2 = pcfmtstr(numdeltas, 4)
1804 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1804 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1805 if numprev > 0:
1805 if numprev > 0:
1806 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1806 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1807 numprev))
1807 numprev))
1808 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1808 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1809 numprev))
1809 numprev))
1810 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1810 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1811 numprev))
1811 numprev))
1812 if gdelta:
1812 if gdelta:
1813 ui.write(('deltas against p1 : ')
1813 ui.write(('deltas against p1 : ')
1814 + fmt % pcfmt(nump1, numdeltas))
1814 + fmt % pcfmt(nump1, numdeltas))
1815 ui.write(('deltas against p2 : ')
1815 ui.write(('deltas against p2 : ')
1816 + fmt % pcfmt(nump2, numdeltas))
1816 + fmt % pcfmt(nump2, numdeltas))
1817 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1817 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1818 numdeltas))
1818 numdeltas))
1819
1819
1820 @command('debugrevspec',
1820 @command('debugrevspec',
1821 [('', 'optimize', None,
1821 [('', 'optimize', None,
1822 _('print parsed tree after optimizing (DEPRECATED)')),
1822 _('print parsed tree after optimizing (DEPRECATED)')),
1823 ('p', 'show-stage', [],
1823 ('p', 'show-stage', [],
1824 _('print parsed tree at the given stage'), _('NAME')),
1824 _('print parsed tree at the given stage'), _('NAME')),
1825 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1825 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1826 ('', 'verify-optimized', False, _('verify optimized result')),
1826 ('', 'verify-optimized', False, _('verify optimized result')),
1827 ],
1827 ],
1828 ('REVSPEC'))
1828 ('REVSPEC'))
1829 def debugrevspec(ui, repo, expr, **opts):
1829 def debugrevspec(ui, repo, expr, **opts):
1830 """parse and apply a revision specification
1830 """parse and apply a revision specification
1831
1831
1832 Use -p/--show-stage option to print the parsed tree at the given stages.
1832 Use -p/--show-stage option to print the parsed tree at the given stages.
1833 Use -p all to print tree at every stage.
1833 Use -p all to print tree at every stage.
1834
1834
1835 Use --verify-optimized to compare the optimized result with the unoptimized
1835 Use --verify-optimized to compare the optimized result with the unoptimized
1836 one. Returns 1 if the optimized result differs.
1836 one. Returns 1 if the optimized result differs.
1837 """
1837 """
1838 stages = [
1838 stages = [
1839 ('parsed', lambda tree: tree),
1839 ('parsed', lambda tree: tree),
1840 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1840 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1841 ('concatenated', revsetlang.foldconcat),
1841 ('concatenated', revsetlang.foldconcat),
1842 ('analyzed', revsetlang.analyze),
1842 ('analyzed', revsetlang.analyze),
1843 ('optimized', revsetlang.optimize),
1843 ('optimized', revsetlang.optimize),
1844 ]
1844 ]
1845 if opts['no_optimized']:
1845 if opts['no_optimized']:
1846 stages = stages[:-1]
1846 stages = stages[:-1]
1847 if opts['verify_optimized'] and opts['no_optimized']:
1847 if opts['verify_optimized'] and opts['no_optimized']:
1848 raise error.Abort(_('cannot use --verify-optimized with '
1848 raise error.Abort(_('cannot use --verify-optimized with '
1849 '--no-optimized'))
1849 '--no-optimized'))
1850 stagenames = set(n for n, f in stages)
1850 stagenames = set(n for n, f in stages)
1851
1851
1852 showalways = set()
1852 showalways = set()
1853 showchanged = set()
1853 showchanged = set()
1854 if ui.verbose and not opts['show_stage']:
1854 if ui.verbose and not opts['show_stage']:
1855 # show parsed tree by --verbose (deprecated)
1855 # show parsed tree by --verbose (deprecated)
1856 showalways.add('parsed')
1856 showalways.add('parsed')
1857 showchanged.update(['expanded', 'concatenated'])
1857 showchanged.update(['expanded', 'concatenated'])
1858 if opts['optimize']:
1858 if opts['optimize']:
1859 showalways.add('optimized')
1859 showalways.add('optimized')
1860 if opts['show_stage'] and opts['optimize']:
1860 if opts['show_stage'] and opts['optimize']:
1861 raise error.Abort(_('cannot use --optimize with --show-stage'))
1861 raise error.Abort(_('cannot use --optimize with --show-stage'))
1862 if opts['show_stage'] == ['all']:
1862 if opts['show_stage'] == ['all']:
1863 showalways.update(stagenames)
1863 showalways.update(stagenames)
1864 else:
1864 else:
1865 for n in opts['show_stage']:
1865 for n in opts['show_stage']:
1866 if n not in stagenames:
1866 if n not in stagenames:
1867 raise error.Abort(_('invalid stage name: %s') % n)
1867 raise error.Abort(_('invalid stage name: %s') % n)
1868 showalways.update(opts['show_stage'])
1868 showalways.update(opts['show_stage'])
1869
1869
1870 treebystage = {}
1870 treebystage = {}
1871 printedtree = None
1871 printedtree = None
1872 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1872 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1873 for n, f in stages:
1873 for n, f in stages:
1874 treebystage[n] = tree = f(tree)
1874 treebystage[n] = tree = f(tree)
1875 if n in showalways or (n in showchanged and tree != printedtree):
1875 if n in showalways or (n in showchanged and tree != printedtree):
1876 if opts['show_stage'] or n != 'parsed':
1876 if opts['show_stage'] or n != 'parsed':
1877 ui.write(("* %s:\n") % n)
1877 ui.write(("* %s:\n") % n)
1878 ui.write(revsetlang.prettyformat(tree), "\n")
1878 ui.write(revsetlang.prettyformat(tree), "\n")
1879 printedtree = tree
1879 printedtree = tree
1880
1880
1881 if opts['verify_optimized']:
1881 if opts['verify_optimized']:
1882 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1882 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1883 brevs = revset.makematcher(treebystage['optimized'])(repo)
1883 brevs = revset.makematcher(treebystage['optimized'])(repo)
1884 if ui.verbose:
1884 if ui.verbose:
1885 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1885 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1886 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1886 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1887 arevs = list(arevs)
1887 arevs = list(arevs)
1888 brevs = list(brevs)
1888 brevs = list(brevs)
1889 if arevs == brevs:
1889 if arevs == brevs:
1890 return 0
1890 return 0
1891 ui.write(('--- analyzed\n'), label='diff.file_a')
1891 ui.write(('--- analyzed\n'), label='diff.file_a')
1892 ui.write(('+++ optimized\n'), label='diff.file_b')
1892 ui.write(('+++ optimized\n'), label='diff.file_b')
1893 sm = difflib.SequenceMatcher(None, arevs, brevs)
1893 sm = difflib.SequenceMatcher(None, arevs, brevs)
1894 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1894 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1895 if tag in ('delete', 'replace'):
1895 if tag in ('delete', 'replace'):
1896 for c in arevs[alo:ahi]:
1896 for c in arevs[alo:ahi]:
1897 ui.write('-%s\n' % c, label='diff.deleted')
1897 ui.write('-%s\n' % c, label='diff.deleted')
1898 if tag in ('insert', 'replace'):
1898 if tag in ('insert', 'replace'):
1899 for c in brevs[blo:bhi]:
1899 for c in brevs[blo:bhi]:
1900 ui.write('+%s\n' % c, label='diff.inserted')
1900 ui.write('+%s\n' % c, label='diff.inserted')
1901 if tag == 'equal':
1901 if tag == 'equal':
1902 for c in arevs[alo:ahi]:
1902 for c in arevs[alo:ahi]:
1903 ui.write(' %s\n' % c)
1903 ui.write(' %s\n' % c)
1904 return 1
1904 return 1
1905
1905
1906 func = revset.makematcher(tree)
1906 func = revset.makematcher(tree)
1907 revs = func(repo)
1907 revs = func(repo)
1908 if ui.verbose:
1908 if ui.verbose:
1909 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1909 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1910 for c in revs:
1910 for c in revs:
1911 ui.write("%s\n" % c)
1911 ui.write("%s\n" % c)
1912
1912
1913 @command('debugsetparents', [], _('REV1 [REV2]'))
1913 @command('debugsetparents', [], _('REV1 [REV2]'))
1914 def debugsetparents(ui, repo, rev1, rev2=None):
1914 def debugsetparents(ui, repo, rev1, rev2=None):
1915 """manually set the parents of the current working directory
1915 """manually set the parents of the current working directory
1916
1916
1917 This is useful for writing repository conversion tools, but should
1917 This is useful for writing repository conversion tools, but should
1918 be used with care. For example, neither the working directory nor the
1918 be used with care. For example, neither the working directory nor the
1919 dirstate is updated, so file status may be incorrect after running this
1919 dirstate is updated, so file status may be incorrect after running this
1920 command.
1920 command.
1921
1921
1922 Returns 0 on success.
1922 Returns 0 on success.
1923 """
1923 """
1924
1924
1925 r1 = scmutil.revsingle(repo, rev1).node()
1925 r1 = scmutil.revsingle(repo, rev1).node()
1926 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1926 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1927
1927
1928 with repo.wlock():
1928 with repo.wlock():
1929 repo.setparents(r1, r2)
1929 repo.setparents(r1, r2)
1930
1930
1931 @command('debugsub',
1931 @command('debugsub',
1932 [('r', 'rev', '',
1932 [('r', 'rev', '',
1933 _('revision to check'), _('REV'))],
1933 _('revision to check'), _('REV'))],
1934 _('[-r REV] [REV]'))
1934 _('[-r REV] [REV]'))
1935 def debugsub(ui, repo, rev=None):
1935 def debugsub(ui, repo, rev=None):
1936 ctx = scmutil.revsingle(repo, rev, None)
1936 ctx = scmutil.revsingle(repo, rev, None)
1937 for k, v in sorted(ctx.substate.items()):
1937 for k, v in sorted(ctx.substate.items()):
1938 ui.write(('path %s\n') % k)
1938 ui.write(('path %s\n') % k)
1939 ui.write((' source %s\n') % v[0])
1939 ui.write((' source %s\n') % v[0])
1940 ui.write((' revision %s\n') % v[1])
1940 ui.write((' revision %s\n') % v[1])
1941
1941
1942 @command('debugsuccessorssets',
1942 @command('debugsuccessorssets',
1943 [],
1943 [],
1944 _('[REV]'))
1944 _('[REV]'))
1945 def debugsuccessorssets(ui, repo, *revs):
1945 def debugsuccessorssets(ui, repo, *revs):
1946 """show set of successors for revision
1946 """show set of successors for revision
1947
1947
1948 A successors set of changeset A is a consistent group of revisions that
1948 A successors set of changeset A is a consistent group of revisions that
1949 succeed A. It contains non-obsolete changesets only.
1949 succeed A. It contains non-obsolete changesets only.
1950
1950
1951 In most cases a changeset A has a single successors set containing a single
1951 In most cases a changeset A has a single successors set containing a single
1952 successor (changeset A replaced by A').
1952 successor (changeset A replaced by A').
1953
1953
1954 A changeset that is made obsolete with no successors are called "pruned".
1954 A changeset that is made obsolete with no successors are called "pruned".
1955 Such changesets have no successors sets at all.
1955 Such changesets have no successors sets at all.
1956
1956
1957 A changeset that has been "split" will have a successors set containing
1957 A changeset that has been "split" will have a successors set containing
1958 more than one successor.
1958 more than one successor.
1959
1959
1960 A changeset that has been rewritten in multiple different ways is called
1960 A changeset that has been rewritten in multiple different ways is called
1961 "divergent". Such changesets have multiple successor sets (each of which
1961 "divergent". Such changesets have multiple successor sets (each of which
1962 may also be split, i.e. have multiple successors).
1962 may also be split, i.e. have multiple successors).
1963
1963
1964 Results are displayed as follows::
1964 Results are displayed as follows::
1965
1965
1966 <rev1>
1966 <rev1>
1967 <successors-1A>
1967 <successors-1A>
1968 <rev2>
1968 <rev2>
1969 <successors-2A>
1969 <successors-2A>
1970 <successors-2B1> <successors-2B2> <successors-2B3>
1970 <successors-2B1> <successors-2B2> <successors-2B3>
1971
1971
1972 Here rev2 has two possible (i.e. divergent) successors sets. The first
1972 Here rev2 has two possible (i.e. divergent) successors sets. The first
1973 holds one element, whereas the second holds three (i.e. the changeset has
1973 holds one element, whereas the second holds three (i.e. the changeset has
1974 been split).
1974 been split).
1975 """
1975 """
1976 # passed to successorssets caching computation from one call to another
1976 # passed to successorssets caching computation from one call to another
1977 cache = {}
1977 cache = {}
1978 ctx2str = str
1978 ctx2str = str
1979 node2str = short
1979 node2str = short
1980 if ui.debug():
1980 if ui.debug():
1981 def ctx2str(ctx):
1981 def ctx2str(ctx):
1982 return ctx.hex()
1982 return ctx.hex()
1983 node2str = hex
1983 node2str = hex
1984 for rev in scmutil.revrange(repo, revs):
1984 for rev in scmutil.revrange(repo, revs):
1985 ctx = repo[rev]
1985 ctx = repo[rev]
1986 ui.write('%s\n'% ctx2str(ctx))
1986 ui.write('%s\n'% ctx2str(ctx))
1987 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1987 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1988 if succsset:
1988 if succsset:
1989 ui.write(' ')
1989 ui.write(' ')
1990 ui.write(node2str(succsset[0]))
1990 ui.write(node2str(succsset[0]))
1991 for node in succsset[1:]:
1991 for node in succsset[1:]:
1992 ui.write(' ')
1992 ui.write(' ')
1993 ui.write(node2str(node))
1993 ui.write(node2str(node))
1994 ui.write('\n')
1994 ui.write('\n')
1995
1995
1996 @command('debugtemplate',
1996 @command('debugtemplate',
1997 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
1997 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
1998 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
1998 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
1999 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
1999 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2000 optionalrepo=True)
2000 optionalrepo=True)
2001 def debugtemplate(ui, repo, tmpl, **opts):
2001 def debugtemplate(ui, repo, tmpl, **opts):
2002 """parse and apply a template
2002 """parse and apply a template
2003
2003
2004 If -r/--rev is given, the template is processed as a log template and
2004 If -r/--rev is given, the template is processed as a log template and
2005 applied to the given changesets. Otherwise, it is processed as a generic
2005 applied to the given changesets. Otherwise, it is processed as a generic
2006 template.
2006 template.
2007
2007
2008 Use --verbose to print the parsed tree.
2008 Use --verbose to print the parsed tree.
2009 """
2009 """
2010 revs = None
2010 revs = None
2011 if opts['rev']:
2011 if opts['rev']:
2012 if repo is None:
2012 if repo is None:
2013 raise error.RepoError(_('there is no Mercurial repository here '
2013 raise error.RepoError(_('there is no Mercurial repository here '
2014 '(.hg not found)'))
2014 '(.hg not found)'))
2015 revs = scmutil.revrange(repo, opts['rev'])
2015 revs = scmutil.revrange(repo, opts['rev'])
2016
2016
2017 props = {}
2017 props = {}
2018 for d in opts['define']:
2018 for d in opts['define']:
2019 try:
2019 try:
2020 k, v = (e.strip() for e in d.split('=', 1))
2020 k, v = (e.strip() for e in d.split('=', 1))
2021 if not k:
2021 if not k:
2022 raise ValueError
2022 raise ValueError
2023 props[k] = v
2023 props[k] = v
2024 except ValueError:
2024 except ValueError:
2025 raise error.Abort(_('malformed keyword definition: %s') % d)
2025 raise error.Abort(_('malformed keyword definition: %s') % d)
2026
2026
2027 if ui.verbose:
2027 if ui.verbose:
2028 aliases = ui.configitems('templatealias')
2028 aliases = ui.configitems('templatealias')
2029 tree = templater.parse(tmpl)
2029 tree = templater.parse(tmpl)
2030 ui.note(templater.prettyformat(tree), '\n')
2030 ui.note(templater.prettyformat(tree), '\n')
2031 newtree = templater.expandaliases(tree, aliases)
2031 newtree = templater.expandaliases(tree, aliases)
2032 if newtree != tree:
2032 if newtree != tree:
2033 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2033 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2034
2034
2035 mapfile = None
2035 mapfile = None
2036 if revs is None:
2036 if revs is None:
2037 k = 'debugtemplate'
2037 k = 'debugtemplate'
2038 t = formatter.maketemplater(ui, k, tmpl)
2038 t = formatter.maketemplater(ui, k, tmpl)
2039 ui.write(templater.stringify(t(k, **props)))
2039 ui.write(templater.stringify(t(k, **props)))
2040 else:
2040 else:
2041 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2041 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2042 mapfile, buffered=False)
2042 mapfile, buffered=False)
2043 for r in revs:
2043 for r in revs:
2044 displayer.show(repo[r], **props)
2044 displayer.show(repo[r], **props)
2045 displayer.close()
2045 displayer.close()
2046
2046
2047 @command('debugupgraderepo', [
2047 @command('debugupgraderepo', [
2048 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2048 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2049 ('', 'run', False, _('performs an upgrade')),
2049 ('', 'run', False, _('performs an upgrade')),
2050 ])
2050 ])
2051 def debugupgraderepo(ui, repo, run=False, optimize=None):
2051 def debugupgraderepo(ui, repo, run=False, optimize=None):
2052 """upgrade a repository to use different features
2052 """upgrade a repository to use different features
2053
2053
2054 If no arguments are specified, the repository is evaluated for upgrade
2054 If no arguments are specified, the repository is evaluated for upgrade
2055 and a list of problems and potential optimizations is printed.
2055 and a list of problems and potential optimizations is printed.
2056
2056
2057 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2057 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2058 can be influenced via additional arguments. More details will be provided
2058 can be influenced via additional arguments. More details will be provided
2059 by the command output when run without ``--run``.
2059 by the command output when run without ``--run``.
2060
2060
2061 During the upgrade, the repository will be locked and no writes will be
2061 During the upgrade, the repository will be locked and no writes will be
2062 allowed.
2062 allowed.
2063
2063
2064 At the end of the upgrade, the repository may not be readable while new
2064 At the end of the upgrade, the repository may not be readable while new
2065 repository data is swapped in. This window will be as long as it takes to
2065 repository data is swapped in. This window will be as long as it takes to
2066 rename some directories inside the ``.hg`` directory. On most machines, this
2066 rename some directories inside the ``.hg`` directory. On most machines, this
2067 should complete almost instantaneously and the chances of a consumer being
2067 should complete almost instantaneously and the chances of a consumer being
2068 unable to access the repository should be low.
2068 unable to access the repository should be low.
2069 """
2069 """
2070 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2070 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2071
2071
2072 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2072 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2073 inferrepo=True)
2073 inferrepo=True)
2074 def debugwalk(ui, repo, *pats, **opts):
2074 def debugwalk(ui, repo, *pats, **opts):
2075 """show how files match on given patterns"""
2075 """show how files match on given patterns"""
2076 m = scmutil.match(repo[None], pats, opts)
2076 m = scmutil.match(repo[None], pats, opts)
2077 items = list(repo.walk(m))
2077 items = list(repo.walk(m))
2078 if not items:
2078 if not items:
2079 return
2079 return
2080 f = lambda fn: fn
2080 f = lambda fn: fn
2081 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2081 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2082 f = lambda fn: util.normpath(fn)
2082 f = lambda fn: util.normpath(fn)
2083 fmt = 'f %%-%ds %%-%ds %%s' % (
2083 fmt = 'f %%-%ds %%-%ds %%s' % (
2084 max([len(abs) for abs in items]),
2084 max([len(abs) for abs in items]),
2085 max([len(m.rel(abs)) for abs in items]))
2085 max([len(m.rel(abs)) for abs in items]))
2086 for abs in items:
2086 for abs in items:
2087 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2087 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2088 ui.write("%s\n" % line.rstrip())
2088 ui.write("%s\n" % line.rstrip())
2089
2089
2090 @command('debugwireargs',
2090 @command('debugwireargs',
2091 [('', 'three', '', 'three'),
2091 [('', 'three', '', 'three'),
2092 ('', 'four', '', 'four'),
2092 ('', 'four', '', 'four'),
2093 ('', 'five', '', 'five'),
2093 ('', 'five', '', 'five'),
2094 ] + commands.remoteopts,
2094 ] + commands.remoteopts,
2095 _('REPO [OPTIONS]... [ONE [TWO]]'),
2095 _('REPO [OPTIONS]... [ONE [TWO]]'),
2096 norepo=True)
2096 norepo=True)
2097 def debugwireargs(ui, repopath, *vals, **opts):
2097 def debugwireargs(ui, repopath, *vals, **opts):
2098 repo = hg.peer(ui, opts, repopath)
2098 repo = hg.peer(ui, opts, repopath)
2099 for opt in commands.remoteopts:
2099 for opt in commands.remoteopts:
2100 del opts[opt[1]]
2100 del opts[opt[1]]
2101 args = {}
2101 args = {}
2102 for k, v in opts.iteritems():
2102 for k, v in opts.iteritems():
2103 if v:
2103 if v:
2104 args[k] = v
2104 args[k] = v
2105 # run twice to check that we don't mess up the stream for the next command
2105 # run twice to check that we don't mess up the stream for the next command
2106 res1 = repo.debugwireargs(*vals, **args)
2106 res1 = repo.debugwireargs(*vals, **args)
2107 res2 = repo.debugwireargs(*vals, **args)
2107 res2 = repo.debugwireargs(*vals, **args)
2108 ui.write("%s\n" % res1)
2108 ui.write("%s\n" % res1)
2109 if res1 != res2:
2109 if res1 != res2:
2110 ui.warn("%s\n" % res2)
2110 ui.warn("%s\n" % res2)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now