##// END OF EJS Templates
move opener from util to scmutil
Adrian Buehlmann -
r13970:d1391335 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,37 +1,37 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Undump a dump from dumprevlog
2 # Undump a dump from dumprevlog
3 # $ hg init
3 # $ hg init
4 # $ undumprevlog < repo.dump
4 # $ undumprevlog < repo.dump
5
5
6 import sys
6 import sys
7 from mercurial import revlog, node, util, transaction
7 from mercurial import revlog, node, scmutil, util, transaction
8
8
9 for fp in (sys.stdin, sys.stdout, sys.stderr):
9 for fp in (sys.stdin, sys.stdout, sys.stderr):
10 util.set_binary(fp)
10 util.set_binary(fp)
11
11
12 opener = util.opener('.', False)
12 opener = scmutil.opener('.', False)
13 tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
13 tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
14 while 1:
14 while 1:
15 l = sys.stdin.readline()
15 l = sys.stdin.readline()
16 if not l:
16 if not l:
17 break
17 break
18 if l.startswith("file:"):
18 if l.startswith("file:"):
19 f = l[6:-1]
19 f = l[6:-1]
20 r = revlog.revlog(opener, f)
20 r = revlog.revlog(opener, f)
21 print f
21 print f
22 elif l.startswith("node:"):
22 elif l.startswith("node:"):
23 n = node.bin(l[6:-1])
23 n = node.bin(l[6:-1])
24 elif l.startswith("linkrev:"):
24 elif l.startswith("linkrev:"):
25 lr = int(l[9:-1])
25 lr = int(l[9:-1])
26 elif l.startswith("parents:"):
26 elif l.startswith("parents:"):
27 p = l[9:-1].split()
27 p = l[9:-1].split()
28 p1 = node.bin(p[0])
28 p1 = node.bin(p[0])
29 p2 = node.bin(p[1])
29 p2 = node.bin(p[1])
30 elif l.startswith("length:"):
30 elif l.startswith("length:"):
31 length = int(l[8:-1])
31 length = int(l[8:-1])
32 sys.stdin.readline() # start marker
32 sys.stdin.readline() # start marker
33 d = sys.stdin.read(length)
33 d = sys.stdin.read(length)
34 sys.stdin.readline() # end marker
34 sys.stdin.readline() # end marker
35 r.addrevision(d, tr, lr, p1, p2)
35 r.addrevision(d, tr, lr, p1, p2)
36
36
37 tr.close()
37 tr.close()
@@ -1,1172 +1,1172 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os
5 import os
6 import re
6 import re
7 import sys
7 import sys
8 import cPickle as pickle
8 import cPickle as pickle
9 import tempfile
9 import tempfile
10 import urllib
10 import urllib
11 import urllib2
11 import urllib2
12
12
13 from mercurial import strutil, util, encoding
13 from mercurial import strutil, scmutil, util, encoding
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 # Subversion stuff. Works best with very recent Python SVN bindings
16 # Subversion stuff. Works best with very recent Python SVN bindings
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 # these bindings.
18 # these bindings.
19
19
20 from cStringIO import StringIO
20 from cStringIO import StringIO
21
21
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 from common import commandline, converter_source, converter_sink, mapfile
23 from common import commandline, converter_source, converter_sink, mapfile
24
24
25 try:
25 try:
26 from svn.core import SubversionException, Pool
26 from svn.core import SubversionException, Pool
27 import svn
27 import svn
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.ra
30 import svn.ra
31 import svn.delta
31 import svn.delta
32 import transport
32 import transport
33 import warnings
33 import warnings
34 warnings.filterwarnings('ignore',
34 warnings.filterwarnings('ignore',
35 module='svn.core',
35 module='svn.core',
36 category=DeprecationWarning)
36 category=DeprecationWarning)
37
37
38 except ImportError:
38 except ImportError:
39 svn = None
39 svn = None
40
40
41 class SvnPathNotFound(Exception):
41 class SvnPathNotFound(Exception):
42 pass
42 pass
43
43
44 def revsplit(rev):
44 def revsplit(rev):
45 """Parse a revision string and return (uuid, path, revnum)."""
45 """Parse a revision string and return (uuid, path, revnum)."""
46 url, revnum = rev.rsplit('@', 1)
46 url, revnum = rev.rsplit('@', 1)
47 parts = url.split('/', 1)
47 parts = url.split('/', 1)
48 mod = ''
48 mod = ''
49 if len(parts) > 1:
49 if len(parts) > 1:
50 mod = '/' + parts[1]
50 mod = '/' + parts[1]
51 return parts[0][4:], mod, int(revnum)
51 return parts[0][4:], mod, int(revnum)
52
52
53 def geturl(path):
53 def geturl(path):
54 try:
54 try:
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 except SubversionException:
56 except SubversionException:
57 pass
57 pass
58 if os.path.isdir(path):
58 if os.path.isdir(path):
59 path = os.path.normpath(os.path.abspath(path))
59 path = os.path.normpath(os.path.abspath(path))
60 if os.name == 'nt':
60 if os.name == 'nt':
61 path = '/' + util.normpath(path)
61 path = '/' + util.normpath(path)
62 # Module URL is later compared with the repository URL returned
62 # Module URL is later compared with the repository URL returned
63 # by svn API, which is UTF-8.
63 # by svn API, which is UTF-8.
64 path = encoding.tolocal(path)
64 path = encoding.tolocal(path)
65 return 'file://%s' % urllib.quote(path)
65 return 'file://%s' % urllib.quote(path)
66 return path
66 return path
67
67
68 def optrev(number):
68 def optrev(number):
69 optrev = svn.core.svn_opt_revision_t()
69 optrev = svn.core.svn_opt_revision_t()
70 optrev.kind = svn.core.svn_opt_revision_number
70 optrev.kind = svn.core.svn_opt_revision_number
71 optrev.value.number = number
71 optrev.value.number = number
72 return optrev
72 return optrev
73
73
74 class changedpath(object):
74 class changedpath(object):
75 def __init__(self, p):
75 def __init__(self, p):
76 self.copyfrom_path = p.copyfrom_path
76 self.copyfrom_path = p.copyfrom_path
77 self.copyfrom_rev = p.copyfrom_rev
77 self.copyfrom_rev = p.copyfrom_rev
78 self.action = p.action
78 self.action = p.action
79
79
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 strict_node_history=False):
81 strict_node_history=False):
82 protocol = -1
82 protocol = -1
83 def receiver(orig_paths, revnum, author, date, message, pool):
83 def receiver(orig_paths, revnum, author, date, message, pool):
84 if orig_paths is not None:
84 if orig_paths is not None:
85 for k, v in orig_paths.iteritems():
85 for k, v in orig_paths.iteritems():
86 orig_paths[k] = changedpath(v)
86 orig_paths[k] = changedpath(v)
87 pickle.dump((orig_paths, revnum, author, date, message),
87 pickle.dump((orig_paths, revnum, author, date, message),
88 fp, protocol)
88 fp, protocol)
89
89
90 try:
90 try:
91 # Use an ra of our own so that our parent can consume
91 # Use an ra of our own so that our parent can consume
92 # our results without confusing the server.
92 # our results without confusing the server.
93 t = transport.SvnRaTransport(url=url)
93 t = transport.SvnRaTransport(url=url)
94 svn.ra.get_log(t.ra, paths, start, end, limit,
94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 discover_changed_paths,
95 discover_changed_paths,
96 strict_node_history,
96 strict_node_history,
97 receiver)
97 receiver)
98 except SubversionException, (inst, num):
98 except SubversionException, (inst, num):
99 pickle.dump(num, fp, protocol)
99 pickle.dump(num, fp, protocol)
100 except IOError:
100 except IOError:
101 # Caller may interrupt the iteration
101 # Caller may interrupt the iteration
102 pickle.dump(None, fp, protocol)
102 pickle.dump(None, fp, protocol)
103 else:
103 else:
104 pickle.dump(None, fp, protocol)
104 pickle.dump(None, fp, protocol)
105 fp.close()
105 fp.close()
106 # With large history, cleanup process goes crazy and suddenly
106 # With large history, cleanup process goes crazy and suddenly
107 # consumes *huge* amount of memory. The output file being closed,
107 # consumes *huge* amount of memory. The output file being closed,
108 # there is no need for clean termination.
108 # there is no need for clean termination.
109 os._exit(0)
109 os._exit(0)
110
110
111 def debugsvnlog(ui, **opts):
111 def debugsvnlog(ui, **opts):
112 """Fetch SVN log in a subprocess and channel them back to parent to
112 """Fetch SVN log in a subprocess and channel them back to parent to
113 avoid memory collection issues.
113 avoid memory collection issues.
114 """
114 """
115 util.set_binary(sys.stdin)
115 util.set_binary(sys.stdin)
116 util.set_binary(sys.stdout)
116 util.set_binary(sys.stdout)
117 args = decodeargs(sys.stdin.read())
117 args = decodeargs(sys.stdin.read())
118 get_log_child(sys.stdout, *args)
118 get_log_child(sys.stdout, *args)
119
119
120 class logstream(object):
120 class logstream(object):
121 """Interruptible revision log iterator."""
121 """Interruptible revision log iterator."""
122 def __init__(self, stdout):
122 def __init__(self, stdout):
123 self._stdout = stdout
123 self._stdout = stdout
124
124
125 def __iter__(self):
125 def __iter__(self):
126 while True:
126 while True:
127 try:
127 try:
128 entry = pickle.load(self._stdout)
128 entry = pickle.load(self._stdout)
129 except EOFError:
129 except EOFError:
130 raise util.Abort(_('Mercurial failed to run itself, check'
130 raise util.Abort(_('Mercurial failed to run itself, check'
131 ' hg executable is in PATH'))
131 ' hg executable is in PATH'))
132 try:
132 try:
133 orig_paths, revnum, author, date, message = entry
133 orig_paths, revnum, author, date, message = entry
134 except:
134 except:
135 if entry is None:
135 if entry is None:
136 break
136 break
137 raise SubversionException("child raised exception", entry)
137 raise SubversionException("child raised exception", entry)
138 yield entry
138 yield entry
139
139
140 def close(self):
140 def close(self):
141 if self._stdout:
141 if self._stdout:
142 self._stdout.close()
142 self._stdout.close()
143 self._stdout = None
143 self._stdout = None
144
144
145
145
146 # Check to see if the given path is a local Subversion repo. Verify this by
146 # Check to see if the given path is a local Subversion repo. Verify this by
147 # looking for several svn-specific files and directories in the given
147 # looking for several svn-specific files and directories in the given
148 # directory.
148 # directory.
149 def filecheck(ui, path, proto):
149 def filecheck(ui, path, proto):
150 for x in ('locks', 'hooks', 'format', 'db'):
150 for x in ('locks', 'hooks', 'format', 'db'):
151 if not os.path.exists(os.path.join(path, x)):
151 if not os.path.exists(os.path.join(path, x)):
152 return False
152 return False
153 return True
153 return True
154
154
155 # Check to see if a given path is the root of an svn repo over http. We verify
155 # Check to see if a given path is the root of an svn repo over http. We verify
156 # this by requesting a version-controlled URL we know can't exist and looking
156 # this by requesting a version-controlled URL we know can't exist and looking
157 # for the svn-specific "not found" XML.
157 # for the svn-specific "not found" XML.
158 def httpcheck(ui, path, proto):
158 def httpcheck(ui, path, proto):
159 try:
159 try:
160 opener = urllib2.build_opener()
160 opener = urllib2.build_opener()
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 data = rsp.read()
162 data = rsp.read()
163 except urllib2.HTTPError, inst:
163 except urllib2.HTTPError, inst:
164 if inst.code != 404:
164 if inst.code != 404:
165 # Except for 404 we cannot know for sure this is not an svn repo
165 # Except for 404 we cannot know for sure this is not an svn repo
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 'be a subversion repository. Use --source-type if you '
167 'be a subversion repository. Use --source-type if you '
168 'know better.\n'))
168 'know better.\n'))
169 return True
169 return True
170 data = inst.fp.read()
170 data = inst.fp.read()
171 except:
171 except:
172 # Could be urllib2.URLError if the URL is invalid or anything else.
172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 return False
173 return False
174 return '<m:human-readable errcode="160013">' in data
174 return '<m:human-readable errcode="160013">' in data
175
175
176 protomap = {'http': httpcheck,
176 protomap = {'http': httpcheck,
177 'https': httpcheck,
177 'https': httpcheck,
178 'file': filecheck,
178 'file': filecheck,
179 }
179 }
180 def issvnurl(ui, url):
180 def issvnurl(ui, url):
181 try:
181 try:
182 proto, path = url.split('://', 1)
182 proto, path = url.split('://', 1)
183 if proto == 'file':
183 if proto == 'file':
184 path = urllib.url2pathname(path)
184 path = urllib.url2pathname(path)
185 except ValueError:
185 except ValueError:
186 proto = 'file'
186 proto = 'file'
187 path = os.path.abspath(url)
187 path = os.path.abspath(url)
188 if proto == 'file':
188 if proto == 'file':
189 path = path.replace(os.sep, '/')
189 path = path.replace(os.sep, '/')
190 check = protomap.get(proto, lambda *args: False)
190 check = protomap.get(proto, lambda *args: False)
191 while '/' in path:
191 while '/' in path:
192 if check(ui, path, proto):
192 if check(ui, path, proto):
193 return True
193 return True
194 path = path.rsplit('/', 1)[0]
194 path = path.rsplit('/', 1)[0]
195 return False
195 return False
196
196
197 # SVN conversion code stolen from bzr-svn and tailor
197 # SVN conversion code stolen from bzr-svn and tailor
198 #
198 #
199 # Subversion looks like a versioned filesystem, branches structures
199 # Subversion looks like a versioned filesystem, branches structures
200 # are defined by conventions and not enforced by the tool. First,
200 # are defined by conventions and not enforced by the tool. First,
201 # we define the potential branches (modules) as "trunk" and "branches"
201 # we define the potential branches (modules) as "trunk" and "branches"
202 # children directories. Revisions are then identified by their
202 # children directories. Revisions are then identified by their
203 # module and revision number (and a repository identifier).
203 # module and revision number (and a repository identifier).
204 #
204 #
205 # The revision graph is really a tree (or a forest). By default, a
205 # The revision graph is really a tree (or a forest). By default, a
206 # revision parent is the previous revision in the same module. If the
206 # revision parent is the previous revision in the same module. If the
207 # module directory is copied/moved from another module then the
207 # module directory is copied/moved from another module then the
208 # revision is the module root and its parent the source revision in
208 # revision is the module root and its parent the source revision in
209 # the parent module. A revision has at most one parent.
209 # the parent module. A revision has at most one parent.
210 #
210 #
211 class svn_source(converter_source):
211 class svn_source(converter_source):
212 def __init__(self, ui, url, rev=None):
212 def __init__(self, ui, url, rev=None):
213 super(svn_source, self).__init__(ui, url, rev=rev)
213 super(svn_source, self).__init__(ui, url, rev=rev)
214
214
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 (os.path.exists(url) and
216 (os.path.exists(url) and
217 os.path.exists(os.path.join(url, '.svn'))) or
217 os.path.exists(os.path.join(url, '.svn'))) or
218 issvnurl(ui, url)):
218 issvnurl(ui, url)):
219 raise NoRepo(_("%s does not look like a Subversion repository")
219 raise NoRepo(_("%s does not look like a Subversion repository")
220 % url)
220 % url)
221 if svn is None:
221 if svn is None:
222 raise MissingTool(_('Could not load Subversion python bindings'))
222 raise MissingTool(_('Could not load Subversion python bindings'))
223
223
224 try:
224 try:
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 if version < (1, 4):
226 if version < (1, 4):
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 '1.4 or later required') % version)
228 '1.4 or later required') % version)
229 except AttributeError:
229 except AttributeError:
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 'or later required'))
231 'or later required'))
232
232
233 self.lastrevs = {}
233 self.lastrevs = {}
234
234
235 latest = None
235 latest = None
236 try:
236 try:
237 # Support file://path@rev syntax. Useful e.g. to convert
237 # Support file://path@rev syntax. Useful e.g. to convert
238 # deleted branches.
238 # deleted branches.
239 at = url.rfind('@')
239 at = url.rfind('@')
240 if at >= 0:
240 if at >= 0:
241 latest = int(url[at + 1:])
241 latest = int(url[at + 1:])
242 url = url[:at]
242 url = url[:at]
243 except ValueError:
243 except ValueError:
244 pass
244 pass
245 self.url = geturl(url)
245 self.url = geturl(url)
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 try:
247 try:
248 self.transport = transport.SvnRaTransport(url=self.url)
248 self.transport = transport.SvnRaTransport(url=self.url)
249 self.ra = self.transport.ra
249 self.ra = self.transport.ra
250 self.ctx = self.transport.client
250 self.ctx = self.transport.client
251 self.baseurl = svn.ra.get_repos_root(self.ra)
251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 # Module is either empty or a repository path starting with
252 # Module is either empty or a repository path starting with
253 # a slash and not ending with a slash.
253 # a slash and not ending with a slash.
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 self.prevmodule = None
255 self.prevmodule = None
256 self.rootmodule = self.module
256 self.rootmodule = self.module
257 self.commits = {}
257 self.commits = {}
258 self.paths = {}
258 self.paths = {}
259 self.uuid = svn.ra.get_uuid(self.ra)
259 self.uuid = svn.ra.get_uuid(self.ra)
260 except SubversionException:
260 except SubversionException:
261 ui.traceback()
261 ui.traceback()
262 raise NoRepo(_("%s does not look like a Subversion repository")
262 raise NoRepo(_("%s does not look like a Subversion repository")
263 % self.url)
263 % self.url)
264
264
265 if rev:
265 if rev:
266 try:
266 try:
267 latest = int(rev)
267 latest = int(rev)
268 except ValueError:
268 except ValueError:
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270
270
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 try:
273 try:
274 self.startrev = int(self.startrev)
274 self.startrev = int(self.startrev)
275 if self.startrev < 0:
275 if self.startrev < 0:
276 self.startrev = 0
276 self.startrev = 0
277 except ValueError:
277 except ValueError:
278 raise util.Abort(_('svn: start revision %s is not an integer')
278 raise util.Abort(_('svn: start revision %s is not an integer')
279 % self.startrev)
279 % self.startrev)
280
280
281 self.head = self.latest(self.module, latest)
281 self.head = self.latest(self.module, latest)
282 if not self.head:
282 if not self.head:
283 raise util.Abort(_('no revision found in module %s')
283 raise util.Abort(_('no revision found in module %s')
284 % self.module)
284 % self.module)
285 self.last_changed = self.revnum(self.head)
285 self.last_changed = self.revnum(self.head)
286
286
287 self._changescache = None
287 self._changescache = None
288
288
289 if os.path.exists(os.path.join(url, '.svn/entries')):
289 if os.path.exists(os.path.join(url, '.svn/entries')):
290 self.wc = url
290 self.wc = url
291 else:
291 else:
292 self.wc = None
292 self.wc = None
293 self.convertfp = None
293 self.convertfp = None
294
294
295 def setrevmap(self, revmap):
295 def setrevmap(self, revmap):
296 lastrevs = {}
296 lastrevs = {}
297 for revid in revmap.iterkeys():
297 for revid in revmap.iterkeys():
298 uuid, module, revnum = revsplit(revid)
298 uuid, module, revnum = revsplit(revid)
299 lastrevnum = lastrevs.setdefault(module, revnum)
299 lastrevnum = lastrevs.setdefault(module, revnum)
300 if revnum > lastrevnum:
300 if revnum > lastrevnum:
301 lastrevs[module] = revnum
301 lastrevs[module] = revnum
302 self.lastrevs = lastrevs
302 self.lastrevs = lastrevs
303
303
304 def exists(self, path, optrev):
304 def exists(self, path, optrev):
305 try:
305 try:
306 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
306 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
307 optrev, False, self.ctx)
307 optrev, False, self.ctx)
308 return True
308 return True
309 except SubversionException:
309 except SubversionException:
310 return False
310 return False
311
311
312 def getheads(self):
312 def getheads(self):
313
313
314 def isdir(path, revnum):
314 def isdir(path, revnum):
315 kind = self._checkpath(path, revnum)
315 kind = self._checkpath(path, revnum)
316 return kind == svn.core.svn_node_dir
316 return kind == svn.core.svn_node_dir
317
317
318 def getcfgpath(name, rev):
318 def getcfgpath(name, rev):
319 cfgpath = self.ui.config('convert', 'svn.' + name)
319 cfgpath = self.ui.config('convert', 'svn.' + name)
320 if cfgpath is not None and cfgpath.strip() == '':
320 if cfgpath is not None and cfgpath.strip() == '':
321 return None
321 return None
322 path = (cfgpath or name).strip('/')
322 path = (cfgpath or name).strip('/')
323 if not self.exists(path, rev):
323 if not self.exists(path, rev):
324 if self.module.endswith(path) and name == 'trunk':
324 if self.module.endswith(path) and name == 'trunk':
325 # we are converting from inside this directory
325 # we are converting from inside this directory
326 return None
326 return None
327 if cfgpath:
327 if cfgpath:
328 raise util.Abort(_('expected %s to be at %r, but not found')
328 raise util.Abort(_('expected %s to be at %r, but not found')
329 % (name, path))
329 % (name, path))
330 return None
330 return None
331 self.ui.note(_('found %s at %r\n') % (name, path))
331 self.ui.note(_('found %s at %r\n') % (name, path))
332 return path
332 return path
333
333
334 rev = optrev(self.last_changed)
334 rev = optrev(self.last_changed)
335 oldmodule = ''
335 oldmodule = ''
336 trunk = getcfgpath('trunk', rev)
336 trunk = getcfgpath('trunk', rev)
337 self.tags = getcfgpath('tags', rev)
337 self.tags = getcfgpath('tags', rev)
338 branches = getcfgpath('branches', rev)
338 branches = getcfgpath('branches', rev)
339
339
340 # If the project has a trunk or branches, we will extract heads
340 # If the project has a trunk or branches, we will extract heads
341 # from them. We keep the project root otherwise.
341 # from them. We keep the project root otherwise.
342 if trunk:
342 if trunk:
343 oldmodule = self.module or ''
343 oldmodule = self.module or ''
344 self.module += '/' + trunk
344 self.module += '/' + trunk
345 self.head = self.latest(self.module, self.last_changed)
345 self.head = self.latest(self.module, self.last_changed)
346 if not self.head:
346 if not self.head:
347 raise util.Abort(_('no revision found in module %s')
347 raise util.Abort(_('no revision found in module %s')
348 % self.module)
348 % self.module)
349
349
350 # First head in the list is the module's head
350 # First head in the list is the module's head
351 self.heads = [self.head]
351 self.heads = [self.head]
352 if self.tags is not None:
352 if self.tags is not None:
353 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
353 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
354
354
355 # Check if branches bring a few more heads to the list
355 # Check if branches bring a few more heads to the list
356 if branches:
356 if branches:
357 rpath = self.url.strip('/')
357 rpath = self.url.strip('/')
358 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
358 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
359 rev, False, self.ctx)
359 rev, False, self.ctx)
360 for branch in branchnames.keys():
360 for branch in branchnames.keys():
361 module = '%s/%s/%s' % (oldmodule, branches, branch)
361 module = '%s/%s/%s' % (oldmodule, branches, branch)
362 if not isdir(module, self.last_changed):
362 if not isdir(module, self.last_changed):
363 continue
363 continue
364 brevid = self.latest(module, self.last_changed)
364 brevid = self.latest(module, self.last_changed)
365 if not brevid:
365 if not brevid:
366 self.ui.note(_('ignoring empty branch %s\n') % branch)
366 self.ui.note(_('ignoring empty branch %s\n') % branch)
367 continue
367 continue
368 self.ui.note(_('found branch %s at %d\n') %
368 self.ui.note(_('found branch %s at %d\n') %
369 (branch, self.revnum(brevid)))
369 (branch, self.revnum(brevid)))
370 self.heads.append(brevid)
370 self.heads.append(brevid)
371
371
372 if self.startrev and self.heads:
372 if self.startrev and self.heads:
373 if len(self.heads) > 1:
373 if len(self.heads) > 1:
374 raise util.Abort(_('svn: start revision is not supported '
374 raise util.Abort(_('svn: start revision is not supported '
375 'with more than one branch'))
375 'with more than one branch'))
376 revnum = self.revnum(self.heads[0])
376 revnum = self.revnum(self.heads[0])
377 if revnum < self.startrev:
377 if revnum < self.startrev:
378 raise util.Abort(
378 raise util.Abort(
379 _('svn: no revision found after start revision %d')
379 _('svn: no revision found after start revision %d')
380 % self.startrev)
380 % self.startrev)
381
381
382 return self.heads
382 return self.heads
383
383
384 def getchanges(self, rev):
384 def getchanges(self, rev):
385 if self._changescache and self._changescache[0] == rev:
385 if self._changescache and self._changescache[0] == rev:
386 return self._changescache[1]
386 return self._changescache[1]
387 self._changescache = None
387 self._changescache = None
388 (paths, parents) = self.paths[rev]
388 (paths, parents) = self.paths[rev]
389 if parents:
389 if parents:
390 files, self.removed, copies = self.expandpaths(rev, paths, parents)
390 files, self.removed, copies = self.expandpaths(rev, paths, parents)
391 else:
391 else:
392 # Perform a full checkout on roots
392 # Perform a full checkout on roots
393 uuid, module, revnum = revsplit(rev)
393 uuid, module, revnum = revsplit(rev)
394 entries = svn.client.ls(self.baseurl + urllib.quote(module),
394 entries = svn.client.ls(self.baseurl + urllib.quote(module),
395 optrev(revnum), True, self.ctx)
395 optrev(revnum), True, self.ctx)
396 files = [n for n, e in entries.iteritems()
396 files = [n for n, e in entries.iteritems()
397 if e.kind == svn.core.svn_node_file]
397 if e.kind == svn.core.svn_node_file]
398 copies = {}
398 copies = {}
399 self.removed = set()
399 self.removed = set()
400
400
401 files.sort()
401 files.sort()
402 files = zip(files, [rev] * len(files))
402 files = zip(files, [rev] * len(files))
403
403
404 # caller caches the result, so free it here to release memory
404 # caller caches the result, so free it here to release memory
405 del self.paths[rev]
405 del self.paths[rev]
406 return (files, copies)
406 return (files, copies)
407
407
408 def getchangedfiles(self, rev, i):
408 def getchangedfiles(self, rev, i):
409 changes = self.getchanges(rev)
409 changes = self.getchanges(rev)
410 self._changescache = (rev, changes)
410 self._changescache = (rev, changes)
411 return [f[0] for f in changes[0]]
411 return [f[0] for f in changes[0]]
412
412
413 def getcommit(self, rev):
413 def getcommit(self, rev):
414 if rev not in self.commits:
414 if rev not in self.commits:
415 uuid, module, revnum = revsplit(rev)
415 uuid, module, revnum = revsplit(rev)
416 self.module = module
416 self.module = module
417 self.reparent(module)
417 self.reparent(module)
418 # We assume that:
418 # We assume that:
419 # - requests for revisions after "stop" come from the
419 # - requests for revisions after "stop" come from the
420 # revision graph backward traversal. Cache all of them
420 # revision graph backward traversal. Cache all of them
421 # down to stop, they will be used eventually.
421 # down to stop, they will be used eventually.
422 # - requests for revisions before "stop" come to get
422 # - requests for revisions before "stop" come to get
423 # isolated branches parents. Just fetch what is needed.
423 # isolated branches parents. Just fetch what is needed.
424 stop = self.lastrevs.get(module, 0)
424 stop = self.lastrevs.get(module, 0)
425 if revnum < stop:
425 if revnum < stop:
426 stop = revnum + 1
426 stop = revnum + 1
427 self._fetch_revisions(revnum, stop)
427 self._fetch_revisions(revnum, stop)
428 commit = self.commits[rev]
428 commit = self.commits[rev]
429 # caller caches the result, so free it here to release memory
429 # caller caches the result, so free it here to release memory
430 del self.commits[rev]
430 del self.commits[rev]
431 return commit
431 return commit
432
432
433 def gettags(self):
433 def gettags(self):
434 tags = {}
434 tags = {}
435 if self.tags is None:
435 if self.tags is None:
436 return tags
436 return tags
437
437
438 # svn tags are just a convention, project branches left in a
438 # svn tags are just a convention, project branches left in a
439 # 'tags' directory. There is no other relationship than
439 # 'tags' directory. There is no other relationship than
440 # ancestry, which is expensive to discover and makes them hard
440 # ancestry, which is expensive to discover and makes them hard
441 # to update incrementally. Worse, past revisions may be
441 # to update incrementally. Worse, past revisions may be
442 # referenced by tags far away in the future, requiring a deep
442 # referenced by tags far away in the future, requiring a deep
443 # history traversal on every calculation. Current code
443 # history traversal on every calculation. Current code
444 # performs a single backward traversal, tracking moves within
444 # performs a single backward traversal, tracking moves within
445 # the tags directory (tag renaming) and recording a new tag
445 # the tags directory (tag renaming) and recording a new tag
446 # everytime a project is copied from outside the tags
446 # everytime a project is copied from outside the tags
447 # directory. It also lists deleted tags, this behaviour may
447 # directory. It also lists deleted tags, this behaviour may
448 # change in the future.
448 # change in the future.
449 pendings = []
449 pendings = []
450 tagspath = self.tags
450 tagspath = self.tags
451 start = svn.ra.get_latest_revnum(self.ra)
451 start = svn.ra.get_latest_revnum(self.ra)
452 stream = self._getlog([self.tags], start, self.startrev)
452 stream = self._getlog([self.tags], start, self.startrev)
453 try:
453 try:
454 for entry in stream:
454 for entry in stream:
455 origpaths, revnum, author, date, message = entry
455 origpaths, revnum, author, date, message = entry
456 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
456 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
457 in origpaths.iteritems() if e.copyfrom_path]
457 in origpaths.iteritems() if e.copyfrom_path]
458 # Apply moves/copies from more specific to general
458 # Apply moves/copies from more specific to general
459 copies.sort(reverse=True)
459 copies.sort(reverse=True)
460
460
461 srctagspath = tagspath
461 srctagspath = tagspath
462 if copies and copies[-1][2] == tagspath:
462 if copies and copies[-1][2] == tagspath:
463 # Track tags directory moves
463 # Track tags directory moves
464 srctagspath = copies.pop()[0]
464 srctagspath = copies.pop()[0]
465
465
466 for source, sourcerev, dest in copies:
466 for source, sourcerev, dest in copies:
467 if not dest.startswith(tagspath + '/'):
467 if not dest.startswith(tagspath + '/'):
468 continue
468 continue
469 for tag in pendings:
469 for tag in pendings:
470 if tag[0].startswith(dest):
470 if tag[0].startswith(dest):
471 tagpath = source + tag[0][len(dest):]
471 tagpath = source + tag[0][len(dest):]
472 tag[:2] = [tagpath, sourcerev]
472 tag[:2] = [tagpath, sourcerev]
473 break
473 break
474 else:
474 else:
475 pendings.append([source, sourcerev, dest])
475 pendings.append([source, sourcerev, dest])
476
476
477 # Filter out tags with children coming from different
477 # Filter out tags with children coming from different
478 # parts of the repository like:
478 # parts of the repository like:
479 # /tags/tag.1 (from /trunk:10)
479 # /tags/tag.1 (from /trunk:10)
480 # /tags/tag.1/foo (from /branches/foo:12)
480 # /tags/tag.1/foo (from /branches/foo:12)
481 # Here/tags/tag.1 discarded as well as its children.
481 # Here/tags/tag.1 discarded as well as its children.
482 # It happens with tools like cvs2svn. Such tags cannot
482 # It happens with tools like cvs2svn. Such tags cannot
483 # be represented in mercurial.
483 # be represented in mercurial.
484 addeds = dict((p, e.copyfrom_path) for p, e
484 addeds = dict((p, e.copyfrom_path) for p, e
485 in origpaths.iteritems()
485 in origpaths.iteritems()
486 if e.action == 'A' and e.copyfrom_path)
486 if e.action == 'A' and e.copyfrom_path)
487 badroots = set()
487 badroots = set()
488 for destroot in addeds:
488 for destroot in addeds:
489 for source, sourcerev, dest in pendings:
489 for source, sourcerev, dest in pendings:
490 if (not dest.startswith(destroot + '/')
490 if (not dest.startswith(destroot + '/')
491 or source.startswith(addeds[destroot] + '/')):
491 or source.startswith(addeds[destroot] + '/')):
492 continue
492 continue
493 badroots.add(destroot)
493 badroots.add(destroot)
494 break
494 break
495
495
496 for badroot in badroots:
496 for badroot in badroots:
497 pendings = [p for p in pendings if p[2] != badroot
497 pendings = [p for p in pendings if p[2] != badroot
498 and not p[2].startswith(badroot + '/')]
498 and not p[2].startswith(badroot + '/')]
499
499
500 # Tell tag renamings from tag creations
500 # Tell tag renamings from tag creations
501 remainings = []
501 remainings = []
502 for source, sourcerev, dest in pendings:
502 for source, sourcerev, dest in pendings:
503 tagname = dest.split('/')[-1]
503 tagname = dest.split('/')[-1]
504 if source.startswith(srctagspath):
504 if source.startswith(srctagspath):
505 remainings.append([source, sourcerev, tagname])
505 remainings.append([source, sourcerev, tagname])
506 continue
506 continue
507 if tagname in tags:
507 if tagname in tags:
508 # Keep the latest tag value
508 # Keep the latest tag value
509 continue
509 continue
510 # From revision may be fake, get one with changes
510 # From revision may be fake, get one with changes
511 try:
511 try:
512 tagid = self.latest(source, sourcerev)
512 tagid = self.latest(source, sourcerev)
513 if tagid and tagname not in tags:
513 if tagid and tagname not in tags:
514 tags[tagname] = tagid
514 tags[tagname] = tagid
515 except SvnPathNotFound:
515 except SvnPathNotFound:
516 # It happens when we are following directories
516 # It happens when we are following directories
517 # we assumed were copied with their parents
517 # we assumed were copied with their parents
518 # but were really created in the tag
518 # but were really created in the tag
519 # directory.
519 # directory.
520 pass
520 pass
521 pendings = remainings
521 pendings = remainings
522 tagspath = srctagspath
522 tagspath = srctagspath
523 finally:
523 finally:
524 stream.close()
524 stream.close()
525 return tags
525 return tags
526
526
527 def converted(self, rev, destrev):
527 def converted(self, rev, destrev):
528 if not self.wc:
528 if not self.wc:
529 return
529 return
530 if self.convertfp is None:
530 if self.convertfp is None:
531 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
531 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
532 'a')
532 'a')
533 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
533 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
534 self.convertfp.flush()
534 self.convertfp.flush()
535
535
536 def revid(self, revnum, module=None):
536 def revid(self, revnum, module=None):
537 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
537 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
538
538
539 def revnum(self, rev):
539 def revnum(self, rev):
540 return int(rev.split('@')[-1])
540 return int(rev.split('@')[-1])
541
541
542 def latest(self, path, stop=0):
542 def latest(self, path, stop=0):
543 """Find the latest revid affecting path, up to stop. It may return
543 """Find the latest revid affecting path, up to stop. It may return
544 a revision in a different module, since a branch may be moved without
544 a revision in a different module, since a branch may be moved without
545 a change being reported. Return None if computed module does not
545 a change being reported. Return None if computed module does not
546 belong to rootmodule subtree.
546 belong to rootmodule subtree.
547 """
547 """
548 if not path.startswith(self.rootmodule):
548 if not path.startswith(self.rootmodule):
549 # Requests on foreign branches may be forbidden at server level
549 # Requests on foreign branches may be forbidden at server level
550 self.ui.debug('ignoring foreign branch %r\n' % path)
550 self.ui.debug('ignoring foreign branch %r\n' % path)
551 return None
551 return None
552
552
553 if not stop:
553 if not stop:
554 stop = svn.ra.get_latest_revnum(self.ra)
554 stop = svn.ra.get_latest_revnum(self.ra)
555 try:
555 try:
556 prevmodule = self.reparent('')
556 prevmodule = self.reparent('')
557 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
557 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
558 self.reparent(prevmodule)
558 self.reparent(prevmodule)
559 except SubversionException:
559 except SubversionException:
560 dirent = None
560 dirent = None
561 if not dirent:
561 if not dirent:
562 raise SvnPathNotFound(_('%s not found up to revision %d')
562 raise SvnPathNotFound(_('%s not found up to revision %d')
563 % (path, stop))
563 % (path, stop))
564
564
565 # stat() gives us the previous revision on this line of
565 # stat() gives us the previous revision on this line of
566 # development, but it might be in *another module*. Fetch the
566 # development, but it might be in *another module*. Fetch the
567 # log and detect renames down to the latest revision.
567 # log and detect renames down to the latest revision.
568 stream = self._getlog([path], stop, dirent.created_rev)
568 stream = self._getlog([path], stop, dirent.created_rev)
569 try:
569 try:
570 for entry in stream:
570 for entry in stream:
571 paths, revnum, author, date, message = entry
571 paths, revnum, author, date, message = entry
572 if revnum <= dirent.created_rev:
572 if revnum <= dirent.created_rev:
573 break
573 break
574
574
575 for p in paths:
575 for p in paths:
576 if not path.startswith(p) or not paths[p].copyfrom_path:
576 if not path.startswith(p) or not paths[p].copyfrom_path:
577 continue
577 continue
578 newpath = paths[p].copyfrom_path + path[len(p):]
578 newpath = paths[p].copyfrom_path + path[len(p):]
579 self.ui.debug("branch renamed from %s to %s at %d\n" %
579 self.ui.debug("branch renamed from %s to %s at %d\n" %
580 (path, newpath, revnum))
580 (path, newpath, revnum))
581 path = newpath
581 path = newpath
582 break
582 break
583 finally:
583 finally:
584 stream.close()
584 stream.close()
585
585
586 if not path.startswith(self.rootmodule):
586 if not path.startswith(self.rootmodule):
587 self.ui.debug('ignoring foreign branch %r\n' % path)
587 self.ui.debug('ignoring foreign branch %r\n' % path)
588 return None
588 return None
589 return self.revid(dirent.created_rev, path)
589 return self.revid(dirent.created_rev, path)
590
590
591 def reparent(self, module):
591 def reparent(self, module):
592 """Reparent the svn transport and return the previous parent."""
592 """Reparent the svn transport and return the previous parent."""
593 if self.prevmodule == module:
593 if self.prevmodule == module:
594 return module
594 return module
595 svnurl = self.baseurl + urllib.quote(module)
595 svnurl = self.baseurl + urllib.quote(module)
596 prevmodule = self.prevmodule
596 prevmodule = self.prevmodule
597 if prevmodule is None:
597 if prevmodule is None:
598 prevmodule = ''
598 prevmodule = ''
599 self.ui.debug("reparent to %s\n" % svnurl)
599 self.ui.debug("reparent to %s\n" % svnurl)
600 svn.ra.reparent(self.ra, svnurl)
600 svn.ra.reparent(self.ra, svnurl)
601 self.prevmodule = module
601 self.prevmodule = module
602 return prevmodule
602 return prevmodule
603
603
604 def expandpaths(self, rev, paths, parents):
604 def expandpaths(self, rev, paths, parents):
605 changed, removed = set(), set()
605 changed, removed = set(), set()
606 copies = {}
606 copies = {}
607
607
608 new_module, revnum = revsplit(rev)[1:]
608 new_module, revnum = revsplit(rev)[1:]
609 if new_module != self.module:
609 if new_module != self.module:
610 self.module = new_module
610 self.module = new_module
611 self.reparent(self.module)
611 self.reparent(self.module)
612
612
613 for i, (path, ent) in enumerate(paths):
613 for i, (path, ent) in enumerate(paths):
614 self.ui.progress(_('scanning paths'), i, item=path,
614 self.ui.progress(_('scanning paths'), i, item=path,
615 total=len(paths))
615 total=len(paths))
616 entrypath = self.getrelpath(path)
616 entrypath = self.getrelpath(path)
617
617
618 kind = self._checkpath(entrypath, revnum)
618 kind = self._checkpath(entrypath, revnum)
619 if kind == svn.core.svn_node_file:
619 if kind == svn.core.svn_node_file:
620 changed.add(self.recode(entrypath))
620 changed.add(self.recode(entrypath))
621 if not ent.copyfrom_path or not parents:
621 if not ent.copyfrom_path or not parents:
622 continue
622 continue
623 # Copy sources not in parent revisions cannot be
623 # Copy sources not in parent revisions cannot be
624 # represented, ignore their origin for now
624 # represented, ignore their origin for now
625 pmodule, prevnum = revsplit(parents[0])[1:]
625 pmodule, prevnum = revsplit(parents[0])[1:]
626 if ent.copyfrom_rev < prevnum:
626 if ent.copyfrom_rev < prevnum:
627 continue
627 continue
628 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
628 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
629 if not copyfrom_path:
629 if not copyfrom_path:
630 continue
630 continue
631 self.ui.debug("copied to %s from %s@%s\n" %
631 self.ui.debug("copied to %s from %s@%s\n" %
632 (entrypath, copyfrom_path, ent.copyfrom_rev))
632 (entrypath, copyfrom_path, ent.copyfrom_rev))
633 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
633 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
634 elif kind == 0: # gone, but had better be a deleted *file*
634 elif kind == 0: # gone, but had better be a deleted *file*
635 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
635 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
636 pmodule, prevnum = revsplit(parents[0])[1:]
636 pmodule, prevnum = revsplit(parents[0])[1:]
637 parentpath = pmodule + "/" + entrypath
637 parentpath = pmodule + "/" + entrypath
638 fromkind = self._checkpath(entrypath, prevnum, pmodule)
638 fromkind = self._checkpath(entrypath, prevnum, pmodule)
639
639
640 if fromkind == svn.core.svn_node_file:
640 if fromkind == svn.core.svn_node_file:
641 removed.add(self.recode(entrypath))
641 removed.add(self.recode(entrypath))
642 elif fromkind == svn.core.svn_node_dir:
642 elif fromkind == svn.core.svn_node_dir:
643 oroot = parentpath.strip('/')
643 oroot = parentpath.strip('/')
644 nroot = path.strip('/')
644 nroot = path.strip('/')
645 children = self._iterfiles(oroot, prevnum)
645 children = self._iterfiles(oroot, prevnum)
646 for childpath in children:
646 for childpath in children:
647 childpath = childpath.replace(oroot, nroot)
647 childpath = childpath.replace(oroot, nroot)
648 childpath = self.getrelpath("/" + childpath, pmodule)
648 childpath = self.getrelpath("/" + childpath, pmodule)
649 if childpath:
649 if childpath:
650 removed.add(self.recode(childpath))
650 removed.add(self.recode(childpath))
651 else:
651 else:
652 self.ui.debug('unknown path in revision %d: %s\n' % \
652 self.ui.debug('unknown path in revision %d: %s\n' % \
653 (revnum, path))
653 (revnum, path))
654 elif kind == svn.core.svn_node_dir:
654 elif kind == svn.core.svn_node_dir:
655 if ent.action == 'M':
655 if ent.action == 'M':
656 # If the directory just had a prop change,
656 # If the directory just had a prop change,
657 # then we shouldn't need to look for its children.
657 # then we shouldn't need to look for its children.
658 continue
658 continue
659 if ent.action == 'R' and parents:
659 if ent.action == 'R' and parents:
660 # If a directory is replacing a file, mark the previous
660 # If a directory is replacing a file, mark the previous
661 # file as deleted
661 # file as deleted
662 pmodule, prevnum = revsplit(parents[0])[1:]
662 pmodule, prevnum = revsplit(parents[0])[1:]
663 pkind = self._checkpath(entrypath, prevnum, pmodule)
663 pkind = self._checkpath(entrypath, prevnum, pmodule)
664 if pkind == svn.core.svn_node_file:
664 if pkind == svn.core.svn_node_file:
665 removed.add(self.recode(entrypath))
665 removed.add(self.recode(entrypath))
666 elif pkind == svn.core.svn_node_dir:
666 elif pkind == svn.core.svn_node_dir:
667 # We do not know what files were kept or removed,
667 # We do not know what files were kept or removed,
668 # mark them all as changed.
668 # mark them all as changed.
669 for childpath in self._iterfiles(pmodule, prevnum):
669 for childpath in self._iterfiles(pmodule, prevnum):
670 childpath = self.getrelpath("/" + childpath)
670 childpath = self.getrelpath("/" + childpath)
671 if childpath:
671 if childpath:
672 changed.add(self.recode(childpath))
672 changed.add(self.recode(childpath))
673
673
674 for childpath in self._iterfiles(path, revnum):
674 for childpath in self._iterfiles(path, revnum):
675 childpath = self.getrelpath("/" + childpath)
675 childpath = self.getrelpath("/" + childpath)
676 if childpath:
676 if childpath:
677 changed.add(self.recode(childpath))
677 changed.add(self.recode(childpath))
678
678
679 # Handle directory copies
679 # Handle directory copies
680 if not ent.copyfrom_path or not parents:
680 if not ent.copyfrom_path or not parents:
681 continue
681 continue
682 # Copy sources not in parent revisions cannot be
682 # Copy sources not in parent revisions cannot be
683 # represented, ignore their origin for now
683 # represented, ignore their origin for now
684 pmodule, prevnum = revsplit(parents[0])[1:]
684 pmodule, prevnum = revsplit(parents[0])[1:]
685 if ent.copyfrom_rev < prevnum:
685 if ent.copyfrom_rev < prevnum:
686 continue
686 continue
687 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
687 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
688 if not copyfrompath:
688 if not copyfrompath:
689 continue
689 continue
690 self.ui.debug("mark %s came from %s:%d\n"
690 self.ui.debug("mark %s came from %s:%d\n"
691 % (path, copyfrompath, ent.copyfrom_rev))
691 % (path, copyfrompath, ent.copyfrom_rev))
692 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
692 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
693 for childpath in children:
693 for childpath in children:
694 childpath = self.getrelpath("/" + childpath, pmodule)
694 childpath = self.getrelpath("/" + childpath, pmodule)
695 if not childpath:
695 if not childpath:
696 continue
696 continue
697 copytopath = path + childpath[len(copyfrompath):]
697 copytopath = path + childpath[len(copyfrompath):]
698 copytopath = self.getrelpath(copytopath)
698 copytopath = self.getrelpath(copytopath)
699 copies[self.recode(copytopath)] = self.recode(childpath)
699 copies[self.recode(copytopath)] = self.recode(childpath)
700
700
701 self.ui.progress(_('scanning paths'), None)
701 self.ui.progress(_('scanning paths'), None)
702 changed.update(removed)
702 changed.update(removed)
703 return (list(changed), removed, copies)
703 return (list(changed), removed, copies)
704
704
705 def _fetch_revisions(self, from_revnum, to_revnum):
705 def _fetch_revisions(self, from_revnum, to_revnum):
706 if from_revnum < to_revnum:
706 if from_revnum < to_revnum:
707 from_revnum, to_revnum = to_revnum, from_revnum
707 from_revnum, to_revnum = to_revnum, from_revnum
708
708
709 self.child_cset = None
709 self.child_cset = None
710
710
711 def parselogentry(orig_paths, revnum, author, date, message):
711 def parselogentry(orig_paths, revnum, author, date, message):
712 """Return the parsed commit object or None, and True if
712 """Return the parsed commit object or None, and True if
713 the revision is a branch root.
713 the revision is a branch root.
714 """
714 """
715 self.ui.debug("parsing revision %d (%d changes)\n" %
715 self.ui.debug("parsing revision %d (%d changes)\n" %
716 (revnum, len(orig_paths)))
716 (revnum, len(orig_paths)))
717
717
718 branched = False
718 branched = False
719 rev = self.revid(revnum)
719 rev = self.revid(revnum)
720 # branch log might return entries for a parent we already have
720 # branch log might return entries for a parent we already have
721
721
722 if rev in self.commits or revnum < to_revnum:
722 if rev in self.commits or revnum < to_revnum:
723 return None, branched
723 return None, branched
724
724
725 parents = []
725 parents = []
726 # check whether this revision is the start of a branch or part
726 # check whether this revision is the start of a branch or part
727 # of a branch renaming
727 # of a branch renaming
728 orig_paths = sorted(orig_paths.iteritems())
728 orig_paths = sorted(orig_paths.iteritems())
729 root_paths = [(p, e) for p, e in orig_paths
729 root_paths = [(p, e) for p, e in orig_paths
730 if self.module.startswith(p)]
730 if self.module.startswith(p)]
731 if root_paths:
731 if root_paths:
732 path, ent = root_paths[-1]
732 path, ent = root_paths[-1]
733 if ent.copyfrom_path:
733 if ent.copyfrom_path:
734 branched = True
734 branched = True
735 newpath = ent.copyfrom_path + self.module[len(path):]
735 newpath = ent.copyfrom_path + self.module[len(path):]
736 # ent.copyfrom_rev may not be the actual last revision
736 # ent.copyfrom_rev may not be the actual last revision
737 previd = self.latest(newpath, ent.copyfrom_rev)
737 previd = self.latest(newpath, ent.copyfrom_rev)
738 if previd is not None:
738 if previd is not None:
739 prevmodule, prevnum = revsplit(previd)[1:]
739 prevmodule, prevnum = revsplit(previd)[1:]
740 if prevnum >= self.startrev:
740 if prevnum >= self.startrev:
741 parents = [previd]
741 parents = [previd]
742 self.ui.note(
742 self.ui.note(
743 _('found parent of branch %s at %d: %s\n') %
743 _('found parent of branch %s at %d: %s\n') %
744 (self.module, prevnum, prevmodule))
744 (self.module, prevnum, prevmodule))
745 else:
745 else:
746 self.ui.debug("no copyfrom path, don't know what to do.\n")
746 self.ui.debug("no copyfrom path, don't know what to do.\n")
747
747
748 paths = []
748 paths = []
749 # filter out unrelated paths
749 # filter out unrelated paths
750 for path, ent in orig_paths:
750 for path, ent in orig_paths:
751 if self.getrelpath(path) is None:
751 if self.getrelpath(path) is None:
752 continue
752 continue
753 paths.append((path, ent))
753 paths.append((path, ent))
754
754
755 # Example SVN datetime. Includes microseconds.
755 # Example SVN datetime. Includes microseconds.
756 # ISO-8601 conformant
756 # ISO-8601 conformant
757 # '2007-01-04T17:35:00.902377Z'
757 # '2007-01-04T17:35:00.902377Z'
758 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
758 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
759
759
760 log = message and self.recode(message) or ''
760 log = message and self.recode(message) or ''
761 author = author and self.recode(author) or ''
761 author = author and self.recode(author) or ''
762 try:
762 try:
763 branch = self.module.split("/")[-1]
763 branch = self.module.split("/")[-1]
764 if branch == self.trunkname:
764 if branch == self.trunkname:
765 branch = None
765 branch = None
766 except IndexError:
766 except IndexError:
767 branch = None
767 branch = None
768
768
769 cset = commit(author=author,
769 cset = commit(author=author,
770 date=util.datestr(date),
770 date=util.datestr(date),
771 desc=log,
771 desc=log,
772 parents=parents,
772 parents=parents,
773 branch=branch,
773 branch=branch,
774 rev=rev)
774 rev=rev)
775
775
776 self.commits[rev] = cset
776 self.commits[rev] = cset
777 # The parents list is *shared* among self.paths and the
777 # The parents list is *shared* among self.paths and the
778 # commit object. Both will be updated below.
778 # commit object. Both will be updated below.
779 self.paths[rev] = (paths, cset.parents)
779 self.paths[rev] = (paths, cset.parents)
780 if self.child_cset and not self.child_cset.parents:
780 if self.child_cset and not self.child_cset.parents:
781 self.child_cset.parents[:] = [rev]
781 self.child_cset.parents[:] = [rev]
782 self.child_cset = cset
782 self.child_cset = cset
783 return cset, branched
783 return cset, branched
784
784
785 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
785 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
786 (self.module, from_revnum, to_revnum))
786 (self.module, from_revnum, to_revnum))
787
787
788 try:
788 try:
789 firstcset = None
789 firstcset = None
790 lastonbranch = False
790 lastonbranch = False
791 stream = self._getlog([self.module], from_revnum, to_revnum)
791 stream = self._getlog([self.module], from_revnum, to_revnum)
792 try:
792 try:
793 for entry in stream:
793 for entry in stream:
794 paths, revnum, author, date, message = entry
794 paths, revnum, author, date, message = entry
795 if revnum < self.startrev:
795 if revnum < self.startrev:
796 lastonbranch = True
796 lastonbranch = True
797 break
797 break
798 if not paths:
798 if not paths:
799 self.ui.debug('revision %d has no entries\n' % revnum)
799 self.ui.debug('revision %d has no entries\n' % revnum)
800 # If we ever leave the loop on an empty
800 # If we ever leave the loop on an empty
801 # revision, do not try to get a parent branch
801 # revision, do not try to get a parent branch
802 lastonbranch = lastonbranch or revnum == 0
802 lastonbranch = lastonbranch or revnum == 0
803 continue
803 continue
804 cset, lastonbranch = parselogentry(paths, revnum, author,
804 cset, lastonbranch = parselogentry(paths, revnum, author,
805 date, message)
805 date, message)
806 if cset:
806 if cset:
807 firstcset = cset
807 firstcset = cset
808 if lastonbranch:
808 if lastonbranch:
809 break
809 break
810 finally:
810 finally:
811 stream.close()
811 stream.close()
812
812
813 if not lastonbranch and firstcset and not firstcset.parents:
813 if not lastonbranch and firstcset and not firstcset.parents:
814 # The first revision of the sequence (the last fetched one)
814 # The first revision of the sequence (the last fetched one)
815 # has invalid parents if not a branch root. Find the parent
815 # has invalid parents if not a branch root. Find the parent
816 # revision now, if any.
816 # revision now, if any.
817 try:
817 try:
818 firstrevnum = self.revnum(firstcset.rev)
818 firstrevnum = self.revnum(firstcset.rev)
819 if firstrevnum > 1:
819 if firstrevnum > 1:
820 latest = self.latest(self.module, firstrevnum - 1)
820 latest = self.latest(self.module, firstrevnum - 1)
821 if latest:
821 if latest:
822 firstcset.parents.append(latest)
822 firstcset.parents.append(latest)
823 except SvnPathNotFound:
823 except SvnPathNotFound:
824 pass
824 pass
825 except SubversionException, (inst, num):
825 except SubversionException, (inst, num):
826 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
826 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
827 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
827 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
828 raise
828 raise
829
829
830 def getfile(self, file, rev):
830 def getfile(self, file, rev):
831 # TODO: ra.get_file transmits the whole file instead of diffs.
831 # TODO: ra.get_file transmits the whole file instead of diffs.
832 if file in self.removed:
832 if file in self.removed:
833 raise IOError()
833 raise IOError()
834 mode = ''
834 mode = ''
835 try:
835 try:
836 new_module, revnum = revsplit(rev)[1:]
836 new_module, revnum = revsplit(rev)[1:]
837 if self.module != new_module:
837 if self.module != new_module:
838 self.module = new_module
838 self.module = new_module
839 self.reparent(self.module)
839 self.reparent(self.module)
840 io = StringIO()
840 io = StringIO()
841 info = svn.ra.get_file(self.ra, file, revnum, io)
841 info = svn.ra.get_file(self.ra, file, revnum, io)
842 data = io.getvalue()
842 data = io.getvalue()
843 # ra.get_files() seems to keep a reference on the input buffer
843 # ra.get_files() seems to keep a reference on the input buffer
844 # preventing collection. Release it explicitely.
844 # preventing collection. Release it explicitely.
845 io.close()
845 io.close()
846 if isinstance(info, list):
846 if isinstance(info, list):
847 info = info[-1]
847 info = info[-1]
848 mode = ("svn:executable" in info) and 'x' or ''
848 mode = ("svn:executable" in info) and 'x' or ''
849 mode = ("svn:special" in info) and 'l' or mode
849 mode = ("svn:special" in info) and 'l' or mode
850 except SubversionException, e:
850 except SubversionException, e:
851 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
851 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
852 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
852 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
853 if e.apr_err in notfound: # File not found
853 if e.apr_err in notfound: # File not found
854 raise IOError()
854 raise IOError()
855 raise
855 raise
856 if mode == 'l':
856 if mode == 'l':
857 link_prefix = "link "
857 link_prefix = "link "
858 if data.startswith(link_prefix):
858 if data.startswith(link_prefix):
859 data = data[len(link_prefix):]
859 data = data[len(link_prefix):]
860 return data, mode
860 return data, mode
861
861
862 def _iterfiles(self, path, revnum):
862 def _iterfiles(self, path, revnum):
863 """Enumerate all files in path at revnum, recursively."""
863 """Enumerate all files in path at revnum, recursively."""
864 path = path.strip('/')
864 path = path.strip('/')
865 pool = Pool()
865 pool = Pool()
866 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
866 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
867 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
867 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
868 if path:
868 if path:
869 path += '/'
869 path += '/'
870 return ((path + p) for p, e in entries.iteritems()
870 return ((path + p) for p, e in entries.iteritems()
871 if e.kind == svn.core.svn_node_file)
871 if e.kind == svn.core.svn_node_file)
872
872
873 def getrelpath(self, path, module=None):
873 def getrelpath(self, path, module=None):
874 if module is None:
874 if module is None:
875 module = self.module
875 module = self.module
876 # Given the repository url of this wc, say
876 # Given the repository url of this wc, say
877 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
877 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
878 # extract the "entry" portion (a relative path) from what
878 # extract the "entry" portion (a relative path) from what
879 # svn log --xml says, ie
879 # svn log --xml says, ie
880 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
880 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
881 # that is to say "tests/PloneTestCase.py"
881 # that is to say "tests/PloneTestCase.py"
882 if path.startswith(module):
882 if path.startswith(module):
883 relative = path.rstrip('/')[len(module):]
883 relative = path.rstrip('/')[len(module):]
884 if relative.startswith('/'):
884 if relative.startswith('/'):
885 return relative[1:]
885 return relative[1:]
886 elif relative == '':
886 elif relative == '':
887 return relative
887 return relative
888
888
889 # The path is outside our tracked tree...
889 # The path is outside our tracked tree...
890 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
890 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
891 return None
891 return None
892
892
893 def _checkpath(self, path, revnum, module=None):
893 def _checkpath(self, path, revnum, module=None):
894 if module is not None:
894 if module is not None:
895 prevmodule = self.reparent('')
895 prevmodule = self.reparent('')
896 path = module + '/' + path
896 path = module + '/' + path
897 try:
897 try:
898 # ra.check_path does not like leading slashes very much, it leads
898 # ra.check_path does not like leading slashes very much, it leads
899 # to PROPFIND subversion errors
899 # to PROPFIND subversion errors
900 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
900 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
901 finally:
901 finally:
902 if module is not None:
902 if module is not None:
903 self.reparent(prevmodule)
903 self.reparent(prevmodule)
904
904
905 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
905 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
906 strict_node_history=False):
906 strict_node_history=False):
907 # Normalize path names, svn >= 1.5 only wants paths relative to
907 # Normalize path names, svn >= 1.5 only wants paths relative to
908 # supplied URL
908 # supplied URL
909 relpaths = []
909 relpaths = []
910 for p in paths:
910 for p in paths:
911 if not p.startswith('/'):
911 if not p.startswith('/'):
912 p = self.module + '/' + p
912 p = self.module + '/' + p
913 relpaths.append(p.strip('/'))
913 relpaths.append(p.strip('/'))
914 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
914 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
915 strict_node_history]
915 strict_node_history]
916 arg = encodeargs(args)
916 arg = encodeargs(args)
917 hgexe = util.hgexecutable()
917 hgexe = util.hgexecutable()
918 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
918 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
919 stdin, stdout = util.popen2(util.quotecommand(cmd))
919 stdin, stdout = util.popen2(util.quotecommand(cmd))
920 stdin.write(arg)
920 stdin.write(arg)
921 try:
921 try:
922 stdin.close()
922 stdin.close()
923 except IOError:
923 except IOError:
924 raise util.Abort(_('Mercurial failed to run itself, check'
924 raise util.Abort(_('Mercurial failed to run itself, check'
925 ' hg executable is in PATH'))
925 ' hg executable is in PATH'))
926 return logstream(stdout)
926 return logstream(stdout)
927
927
928 pre_revprop_change = '''#!/bin/sh
928 pre_revprop_change = '''#!/bin/sh
929
929
930 REPOS="$1"
930 REPOS="$1"
931 REV="$2"
931 REV="$2"
932 USER="$3"
932 USER="$3"
933 PROPNAME="$4"
933 PROPNAME="$4"
934 ACTION="$5"
934 ACTION="$5"
935
935
936 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
936 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
937 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
937 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
938 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
938 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
939
939
940 echo "Changing prohibited revision property" >&2
940 echo "Changing prohibited revision property" >&2
941 exit 1
941 exit 1
942 '''
942 '''
943
943
944 class svn_sink(converter_sink, commandline):
944 class svn_sink(converter_sink, commandline):
945 commit_re = re.compile(r'Committed revision (\d+).', re.M)
945 commit_re = re.compile(r'Committed revision (\d+).', re.M)
946 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
946 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
947
947
948 def prerun(self):
948 def prerun(self):
949 if self.wc:
949 if self.wc:
950 os.chdir(self.wc)
950 os.chdir(self.wc)
951
951
952 def postrun(self):
952 def postrun(self):
953 if self.wc:
953 if self.wc:
954 os.chdir(self.cwd)
954 os.chdir(self.cwd)
955
955
956 def join(self, name):
956 def join(self, name):
957 return os.path.join(self.wc, '.svn', name)
957 return os.path.join(self.wc, '.svn', name)
958
958
959 def revmapfile(self):
959 def revmapfile(self):
960 return self.join('hg-shamap')
960 return self.join('hg-shamap')
961
961
962 def authorfile(self):
962 def authorfile(self):
963 return self.join('hg-authormap')
963 return self.join('hg-authormap')
964
964
965 def __init__(self, ui, path):
965 def __init__(self, ui, path):
966
966
967 converter_sink.__init__(self, ui, path)
967 converter_sink.__init__(self, ui, path)
968 commandline.__init__(self, ui, 'svn')
968 commandline.__init__(self, ui, 'svn')
969 self.delete = []
969 self.delete = []
970 self.setexec = []
970 self.setexec = []
971 self.delexec = []
971 self.delexec = []
972 self.copies = []
972 self.copies = []
973 self.wc = None
973 self.wc = None
974 self.cwd = os.getcwd()
974 self.cwd = os.getcwd()
975
975
976 path = os.path.realpath(path)
976 path = os.path.realpath(path)
977
977
978 created = False
978 created = False
979 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
979 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
980 self.wc = path
980 self.wc = path
981 self.run0('update')
981 self.run0('update')
982 else:
982 else:
983 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
983 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
984
984
985 if os.path.isdir(os.path.dirname(path)):
985 if os.path.isdir(os.path.dirname(path)):
986 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
986 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
987 ui.status(_('initializing svn repository %r\n') %
987 ui.status(_('initializing svn repository %r\n') %
988 os.path.basename(path))
988 os.path.basename(path))
989 commandline(ui, 'svnadmin').run0('create', path)
989 commandline(ui, 'svnadmin').run0('create', path)
990 created = path
990 created = path
991 path = util.normpath(path)
991 path = util.normpath(path)
992 if not path.startswith('/'):
992 if not path.startswith('/'):
993 path = '/' + path
993 path = '/' + path
994 path = 'file://' + path
994 path = 'file://' + path
995
995
996 ui.status(_('initializing svn working copy %r\n')
996 ui.status(_('initializing svn working copy %r\n')
997 % os.path.basename(wcpath))
997 % os.path.basename(wcpath))
998 self.run0('checkout', path, wcpath)
998 self.run0('checkout', path, wcpath)
999
999
1000 self.wc = wcpath
1000 self.wc = wcpath
1001 self.opener = util.opener(self.wc)
1001 self.opener = scmutil.opener(self.wc)
1002 self.wopener = util.opener(self.wc)
1002 self.wopener = scmutil.opener(self.wc)
1003 self.childmap = mapfile(ui, self.join('hg-childmap'))
1003 self.childmap = mapfile(ui, self.join('hg-childmap'))
1004 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1004 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1005
1005
1006 if created:
1006 if created:
1007 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1007 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1008 fp = open(hook, 'w')
1008 fp = open(hook, 'w')
1009 fp.write(pre_revprop_change)
1009 fp.write(pre_revprop_change)
1010 fp.close()
1010 fp.close()
1011 util.set_flags(hook, False, True)
1011 util.set_flags(hook, False, True)
1012
1012
1013 output = self.run0('info')
1013 output = self.run0('info')
1014 self.uuid = self.uuid_re.search(output).group(1).strip()
1014 self.uuid = self.uuid_re.search(output).group(1).strip()
1015
1015
1016 def wjoin(self, *names):
1016 def wjoin(self, *names):
1017 return os.path.join(self.wc, *names)
1017 return os.path.join(self.wc, *names)
1018
1018
1019 def putfile(self, filename, flags, data):
1019 def putfile(self, filename, flags, data):
1020 if 'l' in flags:
1020 if 'l' in flags:
1021 self.wopener.symlink(data, filename)
1021 self.wopener.symlink(data, filename)
1022 else:
1022 else:
1023 try:
1023 try:
1024 if os.path.islink(self.wjoin(filename)):
1024 if os.path.islink(self.wjoin(filename)):
1025 os.unlink(filename)
1025 os.unlink(filename)
1026 except OSError:
1026 except OSError:
1027 pass
1027 pass
1028 self.wopener(filename, 'w').write(data)
1028 self.wopener(filename, 'w').write(data)
1029
1029
1030 if self.is_exec:
1030 if self.is_exec:
1031 was_exec = self.is_exec(self.wjoin(filename))
1031 was_exec = self.is_exec(self.wjoin(filename))
1032 else:
1032 else:
1033 # On filesystems not supporting execute-bit, there is no way
1033 # On filesystems not supporting execute-bit, there is no way
1034 # to know if it is set but asking subversion. Setting it
1034 # to know if it is set but asking subversion. Setting it
1035 # systematically is just as expensive and much simpler.
1035 # systematically is just as expensive and much simpler.
1036 was_exec = 'x' not in flags
1036 was_exec = 'x' not in flags
1037
1037
1038 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1038 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1039 if was_exec:
1039 if was_exec:
1040 if 'x' not in flags:
1040 if 'x' not in flags:
1041 self.delexec.append(filename)
1041 self.delexec.append(filename)
1042 else:
1042 else:
1043 if 'x' in flags:
1043 if 'x' in flags:
1044 self.setexec.append(filename)
1044 self.setexec.append(filename)
1045
1045
1046 def _copyfile(self, source, dest):
1046 def _copyfile(self, source, dest):
1047 # SVN's copy command pukes if the destination file exists, but
1047 # SVN's copy command pukes if the destination file exists, but
1048 # our copyfile method expects to record a copy that has
1048 # our copyfile method expects to record a copy that has
1049 # already occurred. Cross the semantic gap.
1049 # already occurred. Cross the semantic gap.
1050 wdest = self.wjoin(dest)
1050 wdest = self.wjoin(dest)
1051 exists = os.path.lexists(wdest)
1051 exists = os.path.lexists(wdest)
1052 if exists:
1052 if exists:
1053 fd, tempname = tempfile.mkstemp(
1053 fd, tempname = tempfile.mkstemp(
1054 prefix='hg-copy-', dir=os.path.dirname(wdest))
1054 prefix='hg-copy-', dir=os.path.dirname(wdest))
1055 os.close(fd)
1055 os.close(fd)
1056 os.unlink(tempname)
1056 os.unlink(tempname)
1057 os.rename(wdest, tempname)
1057 os.rename(wdest, tempname)
1058 try:
1058 try:
1059 self.run0('copy', source, dest)
1059 self.run0('copy', source, dest)
1060 finally:
1060 finally:
1061 if exists:
1061 if exists:
1062 try:
1062 try:
1063 os.unlink(wdest)
1063 os.unlink(wdest)
1064 except OSError:
1064 except OSError:
1065 pass
1065 pass
1066 os.rename(tempname, wdest)
1066 os.rename(tempname, wdest)
1067
1067
1068 def dirs_of(self, files):
1068 def dirs_of(self, files):
1069 dirs = set()
1069 dirs = set()
1070 for f in files:
1070 for f in files:
1071 if os.path.isdir(self.wjoin(f)):
1071 if os.path.isdir(self.wjoin(f)):
1072 dirs.add(f)
1072 dirs.add(f)
1073 for i in strutil.rfindall(f, '/'):
1073 for i in strutil.rfindall(f, '/'):
1074 dirs.add(f[:i])
1074 dirs.add(f[:i])
1075 return dirs
1075 return dirs
1076
1076
1077 def add_dirs(self, files):
1077 def add_dirs(self, files):
1078 add_dirs = [d for d in sorted(self.dirs_of(files))
1078 add_dirs = [d for d in sorted(self.dirs_of(files))
1079 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1079 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1080 if add_dirs:
1080 if add_dirs:
1081 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1081 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1082 return add_dirs
1082 return add_dirs
1083
1083
1084 def add_files(self, files):
1084 def add_files(self, files):
1085 if files:
1085 if files:
1086 self.xargs(files, 'add', quiet=True)
1086 self.xargs(files, 'add', quiet=True)
1087 return files
1087 return files
1088
1088
1089 def tidy_dirs(self, names):
1089 def tidy_dirs(self, names):
1090 deleted = []
1090 deleted = []
1091 for d in sorted(self.dirs_of(names), reverse=True):
1091 for d in sorted(self.dirs_of(names), reverse=True):
1092 wd = self.wjoin(d)
1092 wd = self.wjoin(d)
1093 if os.listdir(wd) == '.svn':
1093 if os.listdir(wd) == '.svn':
1094 self.run0('delete', d)
1094 self.run0('delete', d)
1095 deleted.append(d)
1095 deleted.append(d)
1096 return deleted
1096 return deleted
1097
1097
1098 def addchild(self, parent, child):
1098 def addchild(self, parent, child):
1099 self.childmap[parent] = child
1099 self.childmap[parent] = child
1100
1100
1101 def revid(self, rev):
1101 def revid(self, rev):
1102 return u"svn:%s@%s" % (self.uuid, rev)
1102 return u"svn:%s@%s" % (self.uuid, rev)
1103
1103
1104 def putcommit(self, files, copies, parents, commit, source, revmap):
1104 def putcommit(self, files, copies, parents, commit, source, revmap):
1105 # Apply changes to working copy
1105 # Apply changes to working copy
1106 for f, v in files:
1106 for f, v in files:
1107 try:
1107 try:
1108 data, mode = source.getfile(f, v)
1108 data, mode = source.getfile(f, v)
1109 except IOError:
1109 except IOError:
1110 self.delete.append(f)
1110 self.delete.append(f)
1111 else:
1111 else:
1112 self.putfile(f, mode, data)
1112 self.putfile(f, mode, data)
1113 if f in copies:
1113 if f in copies:
1114 self.copies.append([copies[f], f])
1114 self.copies.append([copies[f], f])
1115 files = [f[0] for f in files]
1115 files = [f[0] for f in files]
1116
1116
1117 for parent in parents:
1117 for parent in parents:
1118 try:
1118 try:
1119 return self.revid(self.childmap[parent])
1119 return self.revid(self.childmap[parent])
1120 except KeyError:
1120 except KeyError:
1121 pass
1121 pass
1122 entries = set(self.delete)
1122 entries = set(self.delete)
1123 files = frozenset(files)
1123 files = frozenset(files)
1124 entries.update(self.add_dirs(files.difference(entries)))
1124 entries.update(self.add_dirs(files.difference(entries)))
1125 if self.copies:
1125 if self.copies:
1126 for s, d in self.copies:
1126 for s, d in self.copies:
1127 self._copyfile(s, d)
1127 self._copyfile(s, d)
1128 self.copies = []
1128 self.copies = []
1129 if self.delete:
1129 if self.delete:
1130 self.xargs(self.delete, 'delete')
1130 self.xargs(self.delete, 'delete')
1131 self.delete = []
1131 self.delete = []
1132 entries.update(self.add_files(files.difference(entries)))
1132 entries.update(self.add_files(files.difference(entries)))
1133 entries.update(self.tidy_dirs(entries))
1133 entries.update(self.tidy_dirs(entries))
1134 if self.delexec:
1134 if self.delexec:
1135 self.xargs(self.delexec, 'propdel', 'svn:executable')
1135 self.xargs(self.delexec, 'propdel', 'svn:executable')
1136 self.delexec = []
1136 self.delexec = []
1137 if self.setexec:
1137 if self.setexec:
1138 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1138 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1139 self.setexec = []
1139 self.setexec = []
1140
1140
1141 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1141 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1142 fp = os.fdopen(fd, 'w')
1142 fp = os.fdopen(fd, 'w')
1143 fp.write(commit.desc)
1143 fp.write(commit.desc)
1144 fp.close()
1144 fp.close()
1145 try:
1145 try:
1146 output = self.run0('commit',
1146 output = self.run0('commit',
1147 username=util.shortuser(commit.author),
1147 username=util.shortuser(commit.author),
1148 file=messagefile,
1148 file=messagefile,
1149 encoding='utf-8')
1149 encoding='utf-8')
1150 try:
1150 try:
1151 rev = self.commit_re.search(output).group(1)
1151 rev = self.commit_re.search(output).group(1)
1152 except AttributeError:
1152 except AttributeError:
1153 if not files:
1153 if not files:
1154 return parents[0]
1154 return parents[0]
1155 self.ui.warn(_('unexpected svn output:\n'))
1155 self.ui.warn(_('unexpected svn output:\n'))
1156 self.ui.warn(output)
1156 self.ui.warn(output)
1157 raise util.Abort(_('unable to cope with svn output'))
1157 raise util.Abort(_('unable to cope with svn output'))
1158 if commit.rev:
1158 if commit.rev:
1159 self.run('propset', 'hg:convert-rev', commit.rev,
1159 self.run('propset', 'hg:convert-rev', commit.rev,
1160 revprop=True, revision=rev)
1160 revprop=True, revision=rev)
1161 if commit.branch and commit.branch != 'default':
1161 if commit.branch and commit.branch != 'default':
1162 self.run('propset', 'hg:convert-branch', commit.branch,
1162 self.run('propset', 'hg:convert-branch', commit.branch,
1163 revprop=True, revision=rev)
1163 revprop=True, revision=rev)
1164 for parent in parents:
1164 for parent in parents:
1165 self.addchild(parent, rev)
1165 self.addchild(parent, rev)
1166 return self.revid(rev)
1166 return self.revid(rev)
1167 finally:
1167 finally:
1168 os.unlink(messagefile)
1168 os.unlink(messagefile)
1169
1169
1170 def puttags(self, tags):
1170 def puttags(self, tags):
1171 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1171 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1172 return None, None
1172 return None, None
@@ -1,325 +1,325 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 The extdiff extension also allows to configure new diff commands, so
16 The extdiff extension also allows to configure new diff commands, so
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18
18
19 [extdiff]
19 [extdiff]
20 # add new command that runs GNU diff(1) in 'context diff' mode
20 # add new command that runs GNU diff(1) in 'context diff' mode
21 cdiff = gdiff -Nprc5
21 cdiff = gdiff -Nprc5
22 ## or the old way:
22 ## or the old way:
23 #cmd.cdiff = gdiff
23 #cmd.cdiff = gdiff
24 #opts.cdiff = -Nprc5
24 #opts.cdiff = -Nprc5
25
25
26 # add new command called vdiff, runs kdiff3
26 # add new command called vdiff, runs kdiff3
27 vdiff = kdiff3
27 vdiff = kdiff3
28
28
29 # add new command called meld, runs meld (no need to name twice)
29 # add new command called meld, runs meld (no need to name twice)
30 meld =
30 meld =
31
31
32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
35 # your .vimrc
35 # your .vimrc
36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
37
37
38 Tool arguments can include variables that are expanded at runtime::
38 Tool arguments can include variables that are expanded at runtime::
39
39
40 $parent1, $plabel1 - filename, descriptive label of first parent
40 $parent1, $plabel1 - filename, descriptive label of first parent
41 $child, $clabel - filename, descriptive label of child revision
41 $child, $clabel - filename, descriptive label of child revision
42 $parent2, $plabel2 - filename, descriptive label of second parent
42 $parent2, $plabel2 - filename, descriptive label of second parent
43 $parent is an alias for $parent1.
43 $parent is an alias for $parent1.
44
44
45 The extdiff extension will look in your [diff-tools] and [merge-tools]
45 The extdiff extension will look in your [diff-tools] and [merge-tools]
46 sections for diff tool arguments, when none are specified in [extdiff].
46 sections for diff tool arguments, when none are specified in [extdiff].
47
47
48 ::
48 ::
49
49
50 [extdiff]
50 [extdiff]
51 kdiff3 =
51 kdiff3 =
52
52
53 [diff-tools]
53 [diff-tools]
54 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
54 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
55
55
56 You can use -I/-X and list of file or directory names like normal
56 You can use -I/-X and list of file or directory names like normal
57 :hg:`diff` command. The extdiff extension makes snapshots of only
57 :hg:`diff` command. The extdiff extension makes snapshots of only
58 needed files, so running the external diff program will actually be
58 needed files, so running the external diff program will actually be
59 pretty fast (at least faster than having to compare the entire tree).
59 pretty fast (at least faster than having to compare the entire tree).
60 '''
60 '''
61
61
62 from mercurial.i18n import _
62 from mercurial.i18n import _
63 from mercurial.node import short, nullid
63 from mercurial.node import short, nullid
64 from mercurial import cmdutil, util, commands, encoding
64 from mercurial import cmdutil, scmutil, util, commands, encoding
65 import os, shlex, shutil, tempfile, re
65 import os, shlex, shutil, tempfile, re
66
66
67 def snapshot(ui, repo, files, node, tmproot):
67 def snapshot(ui, repo, files, node, tmproot):
68 '''snapshot files as of some revision
68 '''snapshot files as of some revision
69 if not using snapshot, -I/-X does not work and recursive diff
69 if not using snapshot, -I/-X does not work and recursive diff
70 in tools like kdiff3 and meld displays too many files.'''
70 in tools like kdiff3 and meld displays too many files.'''
71 dirname = os.path.basename(repo.root)
71 dirname = os.path.basename(repo.root)
72 if dirname == "":
72 if dirname == "":
73 dirname = "root"
73 dirname = "root"
74 if node is not None:
74 if node is not None:
75 dirname = '%s.%s' % (dirname, short(node))
75 dirname = '%s.%s' % (dirname, short(node))
76 base = os.path.join(tmproot, dirname)
76 base = os.path.join(tmproot, dirname)
77 os.mkdir(base)
77 os.mkdir(base)
78 if node is not None:
78 if node is not None:
79 ui.note(_('making snapshot of %d files from rev %s\n') %
79 ui.note(_('making snapshot of %d files from rev %s\n') %
80 (len(files), short(node)))
80 (len(files), short(node)))
81 else:
81 else:
82 ui.note(_('making snapshot of %d files from working directory\n') %
82 ui.note(_('making snapshot of %d files from working directory\n') %
83 (len(files)))
83 (len(files)))
84 wopener = util.opener(base)
84 wopener = scmutil.opener(base)
85 fns_and_mtime = []
85 fns_and_mtime = []
86 ctx = repo[node]
86 ctx = repo[node]
87 for fn in files:
87 for fn in files:
88 wfn = util.pconvert(fn)
88 wfn = util.pconvert(fn)
89 if not wfn in ctx:
89 if not wfn in ctx:
90 # File doesn't exist; could be a bogus modify
90 # File doesn't exist; could be a bogus modify
91 continue
91 continue
92 ui.note(' %s\n' % wfn)
92 ui.note(' %s\n' % wfn)
93 dest = os.path.join(base, wfn)
93 dest = os.path.join(base, wfn)
94 fctx = ctx[wfn]
94 fctx = ctx[wfn]
95 data = repo.wwritedata(wfn, fctx.data())
95 data = repo.wwritedata(wfn, fctx.data())
96 if 'l' in fctx.flags():
96 if 'l' in fctx.flags():
97 wopener.symlink(data, wfn)
97 wopener.symlink(data, wfn)
98 else:
98 else:
99 wopener(wfn, 'w').write(data)
99 wopener(wfn, 'w').write(data)
100 if 'x' in fctx.flags():
100 if 'x' in fctx.flags():
101 util.set_flags(dest, False, True)
101 util.set_flags(dest, False, True)
102 if node is None:
102 if node is None:
103 fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest)))
103 fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest)))
104 return dirname, fns_and_mtime
104 return dirname, fns_and_mtime
105
105
106 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
106 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
107 '''Do the actuall diff:
107 '''Do the actuall diff:
108
108
109 - copy to a temp structure if diffing 2 internal revisions
109 - copy to a temp structure if diffing 2 internal revisions
110 - copy to a temp structure if diffing working revision with
110 - copy to a temp structure if diffing working revision with
111 another one and more than 1 file is changed
111 another one and more than 1 file is changed
112 - just invoke the diff for a single file in the working dir
112 - just invoke the diff for a single file in the working dir
113 '''
113 '''
114
114
115 revs = opts.get('rev')
115 revs = opts.get('rev')
116 change = opts.get('change')
116 change = opts.get('change')
117 args = ' '.join(diffopts)
117 args = ' '.join(diffopts)
118 do3way = '$parent2' in args
118 do3way = '$parent2' in args
119
119
120 if revs and change:
120 if revs and change:
121 msg = _('cannot specify --rev and --change at the same time')
121 msg = _('cannot specify --rev and --change at the same time')
122 raise util.Abort(msg)
122 raise util.Abort(msg)
123 elif change:
123 elif change:
124 node2 = cmdutil.revsingle(repo, change, None).node()
124 node2 = cmdutil.revsingle(repo, change, None).node()
125 node1a, node1b = repo.changelog.parents(node2)
125 node1a, node1b = repo.changelog.parents(node2)
126 else:
126 else:
127 node1a, node2 = cmdutil.revpair(repo, revs)
127 node1a, node2 = cmdutil.revpair(repo, revs)
128 if not revs:
128 if not revs:
129 node1b = repo.dirstate.p2()
129 node1b = repo.dirstate.p2()
130 else:
130 else:
131 node1b = nullid
131 node1b = nullid
132
132
133 # Disable 3-way merge if there is only one parent
133 # Disable 3-way merge if there is only one parent
134 if do3way:
134 if do3way:
135 if node1b == nullid:
135 if node1b == nullid:
136 do3way = False
136 do3way = False
137
137
138 matcher = cmdutil.match(repo, pats, opts)
138 matcher = cmdutil.match(repo, pats, opts)
139 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
139 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
140 if do3way:
140 if do3way:
141 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
141 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
142 else:
142 else:
143 mod_b, add_b, rem_b = set(), set(), set()
143 mod_b, add_b, rem_b = set(), set(), set()
144 modadd = mod_a | add_a | mod_b | add_b
144 modadd = mod_a | add_a | mod_b | add_b
145 common = modadd | rem_a | rem_b
145 common = modadd | rem_a | rem_b
146 if not common:
146 if not common:
147 return 0
147 return 0
148
148
149 tmproot = tempfile.mkdtemp(prefix='extdiff.')
149 tmproot = tempfile.mkdtemp(prefix='extdiff.')
150 try:
150 try:
151 # Always make a copy of node1a (and node1b, if applicable)
151 # Always make a copy of node1a (and node1b, if applicable)
152 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
152 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
153 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
153 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
154 rev1a = '@%d' % repo[node1a].rev()
154 rev1a = '@%d' % repo[node1a].rev()
155 if do3way:
155 if do3way:
156 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
156 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
157 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
157 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
158 rev1b = '@%d' % repo[node1b].rev()
158 rev1b = '@%d' % repo[node1b].rev()
159 else:
159 else:
160 dir1b = None
160 dir1b = None
161 rev1b = ''
161 rev1b = ''
162
162
163 fns_and_mtime = []
163 fns_and_mtime = []
164
164
165 # If node2 in not the wc or there is >1 change, copy it
165 # If node2 in not the wc or there is >1 change, copy it
166 dir2root = ''
166 dir2root = ''
167 rev2 = ''
167 rev2 = ''
168 if node2:
168 if node2:
169 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
169 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
170 rev2 = '@%d' % repo[node2].rev()
170 rev2 = '@%d' % repo[node2].rev()
171 elif len(common) > 1:
171 elif len(common) > 1:
172 #we only actually need to get the files to copy back to
172 #we only actually need to get the files to copy back to
173 #the working dir in this case (because the other cases
173 #the working dir in this case (because the other cases
174 #are: diffing 2 revisions or single file -- in which case
174 #are: diffing 2 revisions or single file -- in which case
175 #the file is already directly passed to the diff tool).
175 #the file is already directly passed to the diff tool).
176 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
176 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
177 else:
177 else:
178 # This lets the diff tool open the changed file directly
178 # This lets the diff tool open the changed file directly
179 dir2 = ''
179 dir2 = ''
180 dir2root = repo.root
180 dir2root = repo.root
181
181
182 label1a = rev1a
182 label1a = rev1a
183 label1b = rev1b
183 label1b = rev1b
184 label2 = rev2
184 label2 = rev2
185
185
186 # If only one change, diff the files instead of the directories
186 # If only one change, diff the files instead of the directories
187 # Handle bogus modifies correctly by checking if the files exist
187 # Handle bogus modifies correctly by checking if the files exist
188 if len(common) == 1:
188 if len(common) == 1:
189 common_file = util.localpath(common.pop())
189 common_file = util.localpath(common.pop())
190 dir1a = os.path.join(tmproot, dir1a, common_file)
190 dir1a = os.path.join(tmproot, dir1a, common_file)
191 label1a = common_file + rev1a
191 label1a = common_file + rev1a
192 if not os.path.isfile(dir1a):
192 if not os.path.isfile(dir1a):
193 dir1a = os.devnull
193 dir1a = os.devnull
194 if do3way:
194 if do3way:
195 dir1b = os.path.join(tmproot, dir1b, common_file)
195 dir1b = os.path.join(tmproot, dir1b, common_file)
196 label1b = common_file + rev1b
196 label1b = common_file + rev1b
197 if not os.path.isfile(dir1b):
197 if not os.path.isfile(dir1b):
198 dir1b = os.devnull
198 dir1b = os.devnull
199 dir2 = os.path.join(dir2root, dir2, common_file)
199 dir2 = os.path.join(dir2root, dir2, common_file)
200 label2 = common_file + rev2
200 label2 = common_file + rev2
201
201
202 # Function to quote file/dir names in the argument string.
202 # Function to quote file/dir names in the argument string.
203 # When not operating in 3-way mode, an empty string is
203 # When not operating in 3-way mode, an empty string is
204 # returned for parent2
204 # returned for parent2
205 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
205 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
206 plabel1=label1a, plabel2=label1b,
206 plabel1=label1a, plabel2=label1b,
207 clabel=label2, child=dir2)
207 clabel=label2, child=dir2)
208 def quote(match):
208 def quote(match):
209 key = match.group()[1:]
209 key = match.group()[1:]
210 if not do3way and key == 'parent2':
210 if not do3way and key == 'parent2':
211 return ''
211 return ''
212 return util.shellquote(replace[key])
212 return util.shellquote(replace[key])
213
213
214 # Match parent2 first, so 'parent1?' will match both parent1 and parent
214 # Match parent2 first, so 'parent1?' will match both parent1 and parent
215 regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel)'
215 regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel)'
216 if not do3way and not re.search(regex, args):
216 if not do3way and not re.search(regex, args):
217 args += ' $parent1 $child'
217 args += ' $parent1 $child'
218 args = re.sub(regex, quote, args)
218 args = re.sub(regex, quote, args)
219 cmdline = util.shellquote(diffcmd) + ' ' + args
219 cmdline = util.shellquote(diffcmd) + ' ' + args
220
220
221 ui.debug('running %r in %s\n' % (cmdline, tmproot))
221 ui.debug('running %r in %s\n' % (cmdline, tmproot))
222 util.system(cmdline, cwd=tmproot)
222 util.system(cmdline, cwd=tmproot)
223
223
224 for copy_fn, working_fn, mtime in fns_and_mtime:
224 for copy_fn, working_fn, mtime in fns_and_mtime:
225 if os.path.getmtime(copy_fn) != mtime:
225 if os.path.getmtime(copy_fn) != mtime:
226 ui.debug('file changed while diffing. '
226 ui.debug('file changed while diffing. '
227 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
227 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
228 util.copyfile(copy_fn, working_fn)
228 util.copyfile(copy_fn, working_fn)
229
229
230 return 1
230 return 1
231 finally:
231 finally:
232 ui.note(_('cleaning up temp directory\n'))
232 ui.note(_('cleaning up temp directory\n'))
233 shutil.rmtree(tmproot)
233 shutil.rmtree(tmproot)
234
234
235 def extdiff(ui, repo, *pats, **opts):
235 def extdiff(ui, repo, *pats, **opts):
236 '''use external program to diff repository (or selected files)
236 '''use external program to diff repository (or selected files)
237
237
238 Show differences between revisions for the specified files, using
238 Show differences between revisions for the specified files, using
239 an external program. The default program used is diff, with
239 an external program. The default program used is diff, with
240 default options "-Npru".
240 default options "-Npru".
241
241
242 To select a different program, use the -p/--program option. The
242 To select a different program, use the -p/--program option. The
243 program will be passed the names of two directories to compare. To
243 program will be passed the names of two directories to compare. To
244 pass additional options to the program, use -o/--option. These
244 pass additional options to the program, use -o/--option. These
245 will be passed before the names of the directories to compare.
245 will be passed before the names of the directories to compare.
246
246
247 When two revision arguments are given, then changes are shown
247 When two revision arguments are given, then changes are shown
248 between those revisions. If only one revision is specified then
248 between those revisions. If only one revision is specified then
249 that revision is compared to the working directory, and, when no
249 that revision is compared to the working directory, and, when no
250 revisions are specified, the working directory files are compared
250 revisions are specified, the working directory files are compared
251 to its parent.'''
251 to its parent.'''
252 program = opts.get('program')
252 program = opts.get('program')
253 option = opts.get('option')
253 option = opts.get('option')
254 if not program:
254 if not program:
255 program = 'diff'
255 program = 'diff'
256 option = option or ['-Npru']
256 option = option or ['-Npru']
257 return dodiff(ui, repo, program, option, pats, opts)
257 return dodiff(ui, repo, program, option, pats, opts)
258
258
259 cmdtable = {
259 cmdtable = {
260 "extdiff":
260 "extdiff":
261 (extdiff,
261 (extdiff,
262 [('p', 'program', '',
262 [('p', 'program', '',
263 _('comparison program to run'), _('CMD')),
263 _('comparison program to run'), _('CMD')),
264 ('o', 'option', [],
264 ('o', 'option', [],
265 _('pass option to comparison program'), _('OPT')),
265 _('pass option to comparison program'), _('OPT')),
266 ('r', 'rev', [],
266 ('r', 'rev', [],
267 _('revision'), _('REV')),
267 _('revision'), _('REV')),
268 ('c', 'change', '',
268 ('c', 'change', '',
269 _('change made by revision'), _('REV')),
269 _('change made by revision'), _('REV')),
270 ] + commands.walkopts,
270 ] + commands.walkopts,
271 _('hg extdiff [OPT]... [FILE]...')),
271 _('hg extdiff [OPT]... [FILE]...')),
272 }
272 }
273
273
274 def uisetup(ui):
274 def uisetup(ui):
275 for cmd, path in ui.configitems('extdiff'):
275 for cmd, path in ui.configitems('extdiff'):
276 if cmd.startswith('cmd.'):
276 if cmd.startswith('cmd.'):
277 cmd = cmd[4:]
277 cmd = cmd[4:]
278 if not path:
278 if not path:
279 path = cmd
279 path = cmd
280 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
280 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
281 diffopts = diffopts and [diffopts] or []
281 diffopts = diffopts and [diffopts] or []
282 elif cmd.startswith('opts.'):
282 elif cmd.startswith('opts.'):
283 continue
283 continue
284 else:
284 else:
285 # command = path opts
285 # command = path opts
286 if path:
286 if path:
287 diffopts = shlex.split(path)
287 diffopts = shlex.split(path)
288 path = diffopts.pop(0)
288 path = diffopts.pop(0)
289 else:
289 else:
290 path, diffopts = cmd, []
290 path, diffopts = cmd, []
291 # look for diff arguments in [diff-tools] then [merge-tools]
291 # look for diff arguments in [diff-tools] then [merge-tools]
292 if diffopts == []:
292 if diffopts == []:
293 args = ui.config('diff-tools', cmd+'.diffargs') or \
293 args = ui.config('diff-tools', cmd+'.diffargs') or \
294 ui.config('merge-tools', cmd+'.diffargs')
294 ui.config('merge-tools', cmd+'.diffargs')
295 if args:
295 if args:
296 diffopts = shlex.split(args)
296 diffopts = shlex.split(args)
297 def save(cmd, path, diffopts):
297 def save(cmd, path, diffopts):
298 '''use closure to save diff command to use'''
298 '''use closure to save diff command to use'''
299 def mydiff(ui, repo, *pats, **opts):
299 def mydiff(ui, repo, *pats, **opts):
300 return dodiff(ui, repo, path, diffopts + opts['option'],
300 return dodiff(ui, repo, path, diffopts + opts['option'],
301 pats, opts)
301 pats, opts)
302 doc = _('''\
302 doc = _('''\
303 use %(path)s to diff repository (or selected files)
303 use %(path)s to diff repository (or selected files)
304
304
305 Show differences between revisions for the specified files, using
305 Show differences between revisions for the specified files, using
306 the %(path)s program.
306 the %(path)s program.
307
307
308 When two revision arguments are given, then changes are shown
308 When two revision arguments are given, then changes are shown
309 between those revisions. If only one revision is specified then
309 between those revisions. If only one revision is specified then
310 that revision is compared to the working directory, and, when no
310 that revision is compared to the working directory, and, when no
311 revisions are specified, the working directory files are compared
311 revisions are specified, the working directory files are compared
312 to its parent.\
312 to its parent.\
313 ''') % dict(path=util.uirepr(path))
313 ''') % dict(path=util.uirepr(path))
314
314
315 # We must translate the docstring right away since it is
315 # We must translate the docstring right away since it is
316 # used as a format string. The string will unfortunately
316 # used as a format string. The string will unfortunately
317 # be translated again in commands.helpcmd and this will
317 # be translated again in commands.helpcmd and this will
318 # fail when the docstring contains non-ASCII characters.
318 # fail when the docstring contains non-ASCII characters.
319 # Decoding the string to a Unicode string here (using the
319 # Decoding the string to a Unicode string here (using the
320 # right encoding) prevents that.
320 # right encoding) prevents that.
321 mydiff.__doc__ = doc.decode(encoding.encoding)
321 mydiff.__doc__ = doc.decode(encoding.encoding)
322 return mydiff
322 return mydiff
323 cmdtable[cmd] = (save(cmd, path, diffopts),
323 cmdtable[cmd] = (save(cmd, path, diffopts),
324 cmdtable['extdiff'][1][1:],
324 cmdtable['extdiff'][1][1:],
325 _('hg %s [OPTION]... [FILE]...') % cmd)
325 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,3273 +1,3273 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 You will by default be managing a patch queue named "patches". You can
41 You will by default be managing a patch queue named "patches". You can
42 create other, independent patch queues with the :hg:`qqueue` command.
42 create other, independent patch queues with the :hg:`qqueue` command.
43 '''
43 '''
44
44
45 from mercurial.i18n import _
45 from mercurial.i18n import _
46 from mercurial.node import bin, hex, short, nullid, nullrev
46 from mercurial.node import bin, hex, short, nullid, nullrev
47 from mercurial.lock import release
47 from mercurial.lock import release
48 from mercurial import commands, cmdutil, hg, patch, util
48 from mercurial import commands, cmdutil, hg, patch, scmutil, util
49 from mercurial import repair, extensions, url, error
49 from mercurial import repair, extensions, url, error
50 import os, sys, re, errno, shutil
50 import os, sys, re, errno, shutil
51
51
52 commands.norepo += " qclone"
52 commands.norepo += " qclone"
53
53
54 # Patch names looks like unix-file names.
54 # Patch names looks like unix-file names.
55 # They must be joinable with queue directory and result in the patch path.
55 # They must be joinable with queue directory and result in the patch path.
56 normname = util.normpath
56 normname = util.normpath
57
57
58 class statusentry(object):
58 class statusentry(object):
59 def __init__(self, node, name):
59 def __init__(self, node, name):
60 self.node, self.name = node, name
60 self.node, self.name = node, name
61 def __repr__(self):
61 def __repr__(self):
62 return hex(self.node) + ':' + self.name
62 return hex(self.node) + ':' + self.name
63
63
64 class patchheader(object):
64 class patchheader(object):
65 def __init__(self, pf, plainmode=False):
65 def __init__(self, pf, plainmode=False):
66 def eatdiff(lines):
66 def eatdiff(lines):
67 while lines:
67 while lines:
68 l = lines[-1]
68 l = lines[-1]
69 if (l.startswith("diff -") or
69 if (l.startswith("diff -") or
70 l.startswith("Index:") or
70 l.startswith("Index:") or
71 l.startswith("===========")):
71 l.startswith("===========")):
72 del lines[-1]
72 del lines[-1]
73 else:
73 else:
74 break
74 break
75 def eatempty(lines):
75 def eatempty(lines):
76 while lines:
76 while lines:
77 if not lines[-1].strip():
77 if not lines[-1].strip():
78 del lines[-1]
78 del lines[-1]
79 else:
79 else:
80 break
80 break
81
81
82 message = []
82 message = []
83 comments = []
83 comments = []
84 user = None
84 user = None
85 date = None
85 date = None
86 parent = None
86 parent = None
87 format = None
87 format = None
88 subject = None
88 subject = None
89 branch = None
89 branch = None
90 nodeid = None
90 nodeid = None
91 diffstart = 0
91 diffstart = 0
92
92
93 for line in file(pf):
93 for line in file(pf):
94 line = line.rstrip()
94 line = line.rstrip()
95 if (line.startswith('diff --git')
95 if (line.startswith('diff --git')
96 or (diffstart and line.startswith('+++ '))):
96 or (diffstart and line.startswith('+++ '))):
97 diffstart = 2
97 diffstart = 2
98 break
98 break
99 diffstart = 0 # reset
99 diffstart = 0 # reset
100 if line.startswith("--- "):
100 if line.startswith("--- "):
101 diffstart = 1
101 diffstart = 1
102 continue
102 continue
103 elif format == "hgpatch":
103 elif format == "hgpatch":
104 # parse values when importing the result of an hg export
104 # parse values when importing the result of an hg export
105 if line.startswith("# User "):
105 if line.startswith("# User "):
106 user = line[7:]
106 user = line[7:]
107 elif line.startswith("# Date "):
107 elif line.startswith("# Date "):
108 date = line[7:]
108 date = line[7:]
109 elif line.startswith("# Parent "):
109 elif line.startswith("# Parent "):
110 parent = line[9:]
110 parent = line[9:]
111 elif line.startswith("# Branch "):
111 elif line.startswith("# Branch "):
112 branch = line[9:]
112 branch = line[9:]
113 elif line.startswith("# Node ID "):
113 elif line.startswith("# Node ID "):
114 nodeid = line[10:]
114 nodeid = line[10:]
115 elif not line.startswith("# ") and line:
115 elif not line.startswith("# ") and line:
116 message.append(line)
116 message.append(line)
117 format = None
117 format = None
118 elif line == '# HG changeset patch':
118 elif line == '# HG changeset patch':
119 message = []
119 message = []
120 format = "hgpatch"
120 format = "hgpatch"
121 elif (format != "tagdone" and (line.startswith("Subject: ") or
121 elif (format != "tagdone" and (line.startswith("Subject: ") or
122 line.startswith("subject: "))):
122 line.startswith("subject: "))):
123 subject = line[9:]
123 subject = line[9:]
124 format = "tag"
124 format = "tag"
125 elif (format != "tagdone" and (line.startswith("From: ") or
125 elif (format != "tagdone" and (line.startswith("From: ") or
126 line.startswith("from: "))):
126 line.startswith("from: "))):
127 user = line[6:]
127 user = line[6:]
128 format = "tag"
128 format = "tag"
129 elif (format != "tagdone" and (line.startswith("Date: ") or
129 elif (format != "tagdone" and (line.startswith("Date: ") or
130 line.startswith("date: "))):
130 line.startswith("date: "))):
131 date = line[6:]
131 date = line[6:]
132 format = "tag"
132 format = "tag"
133 elif format == "tag" and line == "":
133 elif format == "tag" and line == "":
134 # when looking for tags (subject: from: etc) they
134 # when looking for tags (subject: from: etc) they
135 # end once you find a blank line in the source
135 # end once you find a blank line in the source
136 format = "tagdone"
136 format = "tagdone"
137 elif message or line:
137 elif message or line:
138 message.append(line)
138 message.append(line)
139 comments.append(line)
139 comments.append(line)
140
140
141 eatdiff(message)
141 eatdiff(message)
142 eatdiff(comments)
142 eatdiff(comments)
143 # Remember the exact starting line of the patch diffs before consuming
143 # Remember the exact starting line of the patch diffs before consuming
144 # empty lines, for external use by TortoiseHg and others
144 # empty lines, for external use by TortoiseHg and others
145 self.diffstartline = len(comments)
145 self.diffstartline = len(comments)
146 eatempty(message)
146 eatempty(message)
147 eatempty(comments)
147 eatempty(comments)
148
148
149 # make sure message isn't empty
149 # make sure message isn't empty
150 if format and format.startswith("tag") and subject:
150 if format and format.startswith("tag") and subject:
151 message.insert(0, "")
151 message.insert(0, "")
152 message.insert(0, subject)
152 message.insert(0, subject)
153
153
154 self.message = message
154 self.message = message
155 self.comments = comments
155 self.comments = comments
156 self.user = user
156 self.user = user
157 self.date = date
157 self.date = date
158 self.parent = parent
158 self.parent = parent
159 # nodeid and branch are for external use by TortoiseHg and others
159 # nodeid and branch are for external use by TortoiseHg and others
160 self.nodeid = nodeid
160 self.nodeid = nodeid
161 self.branch = branch
161 self.branch = branch
162 self.haspatch = diffstart > 1
162 self.haspatch = diffstart > 1
163 self.plainmode = plainmode
163 self.plainmode = plainmode
164
164
165 def setuser(self, user):
165 def setuser(self, user):
166 if not self.updateheader(['From: ', '# User '], user):
166 if not self.updateheader(['From: ', '# User '], user):
167 try:
167 try:
168 patchheaderat = self.comments.index('# HG changeset patch')
168 patchheaderat = self.comments.index('# HG changeset patch')
169 self.comments.insert(patchheaderat + 1, '# User ' + user)
169 self.comments.insert(patchheaderat + 1, '# User ' + user)
170 except ValueError:
170 except ValueError:
171 if self.plainmode or self._hasheader(['Date: ']):
171 if self.plainmode or self._hasheader(['Date: ']):
172 self.comments = ['From: ' + user] + self.comments
172 self.comments = ['From: ' + user] + self.comments
173 else:
173 else:
174 tmp = ['# HG changeset patch', '# User ' + user, '']
174 tmp = ['# HG changeset patch', '# User ' + user, '']
175 self.comments = tmp + self.comments
175 self.comments = tmp + self.comments
176 self.user = user
176 self.user = user
177
177
178 def setdate(self, date):
178 def setdate(self, date):
179 if not self.updateheader(['Date: ', '# Date '], date):
179 if not self.updateheader(['Date: ', '# Date '], date):
180 try:
180 try:
181 patchheaderat = self.comments.index('# HG changeset patch')
181 patchheaderat = self.comments.index('# HG changeset patch')
182 self.comments.insert(patchheaderat + 1, '# Date ' + date)
182 self.comments.insert(patchheaderat + 1, '# Date ' + date)
183 except ValueError:
183 except ValueError:
184 if self.plainmode or self._hasheader(['From: ']):
184 if self.plainmode or self._hasheader(['From: ']):
185 self.comments = ['Date: ' + date] + self.comments
185 self.comments = ['Date: ' + date] + self.comments
186 else:
186 else:
187 tmp = ['# HG changeset patch', '# Date ' + date, '']
187 tmp = ['# HG changeset patch', '# Date ' + date, '']
188 self.comments = tmp + self.comments
188 self.comments = tmp + self.comments
189 self.date = date
189 self.date = date
190
190
191 def setparent(self, parent):
191 def setparent(self, parent):
192 if not self.updateheader(['# Parent '], parent):
192 if not self.updateheader(['# Parent '], parent):
193 try:
193 try:
194 patchheaderat = self.comments.index('# HG changeset patch')
194 patchheaderat = self.comments.index('# HG changeset patch')
195 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
195 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
196 except ValueError:
196 except ValueError:
197 pass
197 pass
198 self.parent = parent
198 self.parent = parent
199
199
200 def setmessage(self, message):
200 def setmessage(self, message):
201 if self.comments:
201 if self.comments:
202 self._delmsg()
202 self._delmsg()
203 self.message = [message]
203 self.message = [message]
204 self.comments += self.message
204 self.comments += self.message
205
205
206 def updateheader(self, prefixes, new):
206 def updateheader(self, prefixes, new):
207 '''Update all references to a field in the patch header.
207 '''Update all references to a field in the patch header.
208 Return whether the field is present.'''
208 Return whether the field is present.'''
209 res = False
209 res = False
210 for prefix in prefixes:
210 for prefix in prefixes:
211 for i in xrange(len(self.comments)):
211 for i in xrange(len(self.comments)):
212 if self.comments[i].startswith(prefix):
212 if self.comments[i].startswith(prefix):
213 self.comments[i] = prefix + new
213 self.comments[i] = prefix + new
214 res = True
214 res = True
215 break
215 break
216 return res
216 return res
217
217
218 def _hasheader(self, prefixes):
218 def _hasheader(self, prefixes):
219 '''Check if a header starts with any of the given prefixes.'''
219 '''Check if a header starts with any of the given prefixes.'''
220 for prefix in prefixes:
220 for prefix in prefixes:
221 for comment in self.comments:
221 for comment in self.comments:
222 if comment.startswith(prefix):
222 if comment.startswith(prefix):
223 return True
223 return True
224 return False
224 return False
225
225
226 def __str__(self):
226 def __str__(self):
227 if not self.comments:
227 if not self.comments:
228 return ''
228 return ''
229 return '\n'.join(self.comments) + '\n\n'
229 return '\n'.join(self.comments) + '\n\n'
230
230
231 def _delmsg(self):
231 def _delmsg(self):
232 '''Remove existing message, keeping the rest of the comments fields.
232 '''Remove existing message, keeping the rest of the comments fields.
233 If comments contains 'subject: ', message will prepend
233 If comments contains 'subject: ', message will prepend
234 the field and a blank line.'''
234 the field and a blank line.'''
235 if self.message:
235 if self.message:
236 subj = 'subject: ' + self.message[0].lower()
236 subj = 'subject: ' + self.message[0].lower()
237 for i in xrange(len(self.comments)):
237 for i in xrange(len(self.comments)):
238 if subj == self.comments[i].lower():
238 if subj == self.comments[i].lower():
239 del self.comments[i]
239 del self.comments[i]
240 self.message = self.message[2:]
240 self.message = self.message[2:]
241 break
241 break
242 ci = 0
242 ci = 0
243 for mi in self.message:
243 for mi in self.message:
244 while mi != self.comments[ci]:
244 while mi != self.comments[ci]:
245 ci += 1
245 ci += 1
246 del self.comments[ci]
246 del self.comments[ci]
247
247
248 class queue(object):
248 class queue(object):
249 def __init__(self, ui, path, patchdir=None):
249 def __init__(self, ui, path, patchdir=None):
250 self.basepath = path
250 self.basepath = path
251 try:
251 try:
252 fh = open(os.path.join(path, 'patches.queue'))
252 fh = open(os.path.join(path, 'patches.queue'))
253 cur = fh.read().rstrip()
253 cur = fh.read().rstrip()
254 fh.close()
254 fh.close()
255 if not cur:
255 if not cur:
256 curpath = os.path.join(path, 'patches')
256 curpath = os.path.join(path, 'patches')
257 else:
257 else:
258 curpath = os.path.join(path, 'patches-' + cur)
258 curpath = os.path.join(path, 'patches-' + cur)
259 except IOError:
259 except IOError:
260 curpath = os.path.join(path, 'patches')
260 curpath = os.path.join(path, 'patches')
261 self.path = patchdir or curpath
261 self.path = patchdir or curpath
262 self.opener = util.opener(self.path)
262 self.opener = scmutil.opener(self.path)
263 self.ui = ui
263 self.ui = ui
264 self.applied_dirty = 0
264 self.applied_dirty = 0
265 self.series_dirty = 0
265 self.series_dirty = 0
266 self.added = []
266 self.added = []
267 self.series_path = "series"
267 self.series_path = "series"
268 self.status_path = "status"
268 self.status_path = "status"
269 self.guards_path = "guards"
269 self.guards_path = "guards"
270 self.active_guards = None
270 self.active_guards = None
271 self.guards_dirty = False
271 self.guards_dirty = False
272 # Handle mq.git as a bool with extended values
272 # Handle mq.git as a bool with extended values
273 try:
273 try:
274 gitmode = ui.configbool('mq', 'git', None)
274 gitmode = ui.configbool('mq', 'git', None)
275 if gitmode is None:
275 if gitmode is None:
276 raise error.ConfigError()
276 raise error.ConfigError()
277 self.gitmode = gitmode and 'yes' or 'no'
277 self.gitmode = gitmode and 'yes' or 'no'
278 except error.ConfigError:
278 except error.ConfigError:
279 self.gitmode = ui.config('mq', 'git', 'auto').lower()
279 self.gitmode = ui.config('mq', 'git', 'auto').lower()
280 self.plainmode = ui.configbool('mq', 'plain', False)
280 self.plainmode = ui.configbool('mq', 'plain', False)
281
281
282 @util.propertycache
282 @util.propertycache
283 def applied(self):
283 def applied(self):
284 if os.path.exists(self.join(self.status_path)):
284 if os.path.exists(self.join(self.status_path)):
285 def parselines(lines):
285 def parselines(lines):
286 for l in lines:
286 for l in lines:
287 entry = l.split(':', 1)
287 entry = l.split(':', 1)
288 if len(entry) > 1:
288 if len(entry) > 1:
289 n, name = entry
289 n, name = entry
290 yield statusentry(bin(n), name)
290 yield statusentry(bin(n), name)
291 elif l.strip():
291 elif l.strip():
292 self.ui.warn(_('malformated mq status line: %s\n') % entry)
292 self.ui.warn(_('malformated mq status line: %s\n') % entry)
293 # else we ignore empty lines
293 # else we ignore empty lines
294 lines = self.opener(self.status_path).read().splitlines()
294 lines = self.opener(self.status_path).read().splitlines()
295 return list(parselines(lines))
295 return list(parselines(lines))
296 return []
296 return []
297
297
298 @util.propertycache
298 @util.propertycache
299 def full_series(self):
299 def full_series(self):
300 if os.path.exists(self.join(self.series_path)):
300 if os.path.exists(self.join(self.series_path)):
301 return self.opener(self.series_path).read().splitlines()
301 return self.opener(self.series_path).read().splitlines()
302 return []
302 return []
303
303
304 @util.propertycache
304 @util.propertycache
305 def series(self):
305 def series(self):
306 self.parse_series()
306 self.parse_series()
307 return self.series
307 return self.series
308
308
309 @util.propertycache
309 @util.propertycache
310 def series_guards(self):
310 def series_guards(self):
311 self.parse_series()
311 self.parse_series()
312 return self.series_guards
312 return self.series_guards
313
313
314 def invalidate(self):
314 def invalidate(self):
315 for a in 'applied full_series series series_guards'.split():
315 for a in 'applied full_series series series_guards'.split():
316 if a in self.__dict__:
316 if a in self.__dict__:
317 delattr(self, a)
317 delattr(self, a)
318 self.applied_dirty = 0
318 self.applied_dirty = 0
319 self.series_dirty = 0
319 self.series_dirty = 0
320 self.guards_dirty = False
320 self.guards_dirty = False
321 self.active_guards = None
321 self.active_guards = None
322
322
323 def diffopts(self, opts={}, patchfn=None):
323 def diffopts(self, opts={}, patchfn=None):
324 diffopts = patch.diffopts(self.ui, opts)
324 diffopts = patch.diffopts(self.ui, opts)
325 if self.gitmode == 'auto':
325 if self.gitmode == 'auto':
326 diffopts.upgrade = True
326 diffopts.upgrade = True
327 elif self.gitmode == 'keep':
327 elif self.gitmode == 'keep':
328 pass
328 pass
329 elif self.gitmode in ('yes', 'no'):
329 elif self.gitmode in ('yes', 'no'):
330 diffopts.git = self.gitmode == 'yes'
330 diffopts.git = self.gitmode == 'yes'
331 else:
331 else:
332 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
332 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
333 ' got %s') % self.gitmode)
333 ' got %s') % self.gitmode)
334 if patchfn:
334 if patchfn:
335 diffopts = self.patchopts(diffopts, patchfn)
335 diffopts = self.patchopts(diffopts, patchfn)
336 return diffopts
336 return diffopts
337
337
338 def patchopts(self, diffopts, *patches):
338 def patchopts(self, diffopts, *patches):
339 """Return a copy of input diff options with git set to true if
339 """Return a copy of input diff options with git set to true if
340 referenced patch is a git patch and should be preserved as such.
340 referenced patch is a git patch and should be preserved as such.
341 """
341 """
342 diffopts = diffopts.copy()
342 diffopts = diffopts.copy()
343 if not diffopts.git and self.gitmode == 'keep':
343 if not diffopts.git and self.gitmode == 'keep':
344 for patchfn in patches:
344 for patchfn in patches:
345 patchf = self.opener(patchfn, 'r')
345 patchf = self.opener(patchfn, 'r')
346 # if the patch was a git patch, refresh it as a git patch
346 # if the patch was a git patch, refresh it as a git patch
347 for line in patchf:
347 for line in patchf:
348 if line.startswith('diff --git'):
348 if line.startswith('diff --git'):
349 diffopts.git = True
349 diffopts.git = True
350 break
350 break
351 patchf.close()
351 patchf.close()
352 return diffopts
352 return diffopts
353
353
354 def join(self, *p):
354 def join(self, *p):
355 return os.path.join(self.path, *p)
355 return os.path.join(self.path, *p)
356
356
357 def find_series(self, patch):
357 def find_series(self, patch):
358 def matchpatch(l):
358 def matchpatch(l):
359 l = l.split('#', 1)[0]
359 l = l.split('#', 1)[0]
360 return l.strip() == patch
360 return l.strip() == patch
361 for index, l in enumerate(self.full_series):
361 for index, l in enumerate(self.full_series):
362 if matchpatch(l):
362 if matchpatch(l):
363 return index
363 return index
364 return None
364 return None
365
365
366 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
366 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
367
367
368 def parse_series(self):
368 def parse_series(self):
369 self.series = []
369 self.series = []
370 self.series_guards = []
370 self.series_guards = []
371 for l in self.full_series:
371 for l in self.full_series:
372 h = l.find('#')
372 h = l.find('#')
373 if h == -1:
373 if h == -1:
374 patch = l
374 patch = l
375 comment = ''
375 comment = ''
376 elif h == 0:
376 elif h == 0:
377 continue
377 continue
378 else:
378 else:
379 patch = l[:h]
379 patch = l[:h]
380 comment = l[h:]
380 comment = l[h:]
381 patch = patch.strip()
381 patch = patch.strip()
382 if patch:
382 if patch:
383 if patch in self.series:
383 if patch in self.series:
384 raise util.Abort(_('%s appears more than once in %s') %
384 raise util.Abort(_('%s appears more than once in %s') %
385 (patch, self.join(self.series_path)))
385 (patch, self.join(self.series_path)))
386 self.series.append(patch)
386 self.series.append(patch)
387 self.series_guards.append(self.guard_re.findall(comment))
387 self.series_guards.append(self.guard_re.findall(comment))
388
388
389 def check_guard(self, guard):
389 def check_guard(self, guard):
390 if not guard:
390 if not guard:
391 return _('guard cannot be an empty string')
391 return _('guard cannot be an empty string')
392 bad_chars = '# \t\r\n\f'
392 bad_chars = '# \t\r\n\f'
393 first = guard[0]
393 first = guard[0]
394 if first in '-+':
394 if first in '-+':
395 return (_('guard %r starts with invalid character: %r') %
395 return (_('guard %r starts with invalid character: %r') %
396 (guard, first))
396 (guard, first))
397 for c in bad_chars:
397 for c in bad_chars:
398 if c in guard:
398 if c in guard:
399 return _('invalid character in guard %r: %r') % (guard, c)
399 return _('invalid character in guard %r: %r') % (guard, c)
400
400
401 def set_active(self, guards):
401 def set_active(self, guards):
402 for guard in guards:
402 for guard in guards:
403 bad = self.check_guard(guard)
403 bad = self.check_guard(guard)
404 if bad:
404 if bad:
405 raise util.Abort(bad)
405 raise util.Abort(bad)
406 guards = sorted(set(guards))
406 guards = sorted(set(guards))
407 self.ui.debug('active guards: %s\n' % ' '.join(guards))
407 self.ui.debug('active guards: %s\n' % ' '.join(guards))
408 self.active_guards = guards
408 self.active_guards = guards
409 self.guards_dirty = True
409 self.guards_dirty = True
410
410
411 def active(self):
411 def active(self):
412 if self.active_guards is None:
412 if self.active_guards is None:
413 self.active_guards = []
413 self.active_guards = []
414 try:
414 try:
415 guards = self.opener(self.guards_path).read().split()
415 guards = self.opener(self.guards_path).read().split()
416 except IOError, err:
416 except IOError, err:
417 if err.errno != errno.ENOENT:
417 if err.errno != errno.ENOENT:
418 raise
418 raise
419 guards = []
419 guards = []
420 for i, guard in enumerate(guards):
420 for i, guard in enumerate(guards):
421 bad = self.check_guard(guard)
421 bad = self.check_guard(guard)
422 if bad:
422 if bad:
423 self.ui.warn('%s:%d: %s\n' %
423 self.ui.warn('%s:%d: %s\n' %
424 (self.join(self.guards_path), i + 1, bad))
424 (self.join(self.guards_path), i + 1, bad))
425 else:
425 else:
426 self.active_guards.append(guard)
426 self.active_guards.append(guard)
427 return self.active_guards
427 return self.active_guards
428
428
429 def set_guards(self, idx, guards):
429 def set_guards(self, idx, guards):
430 for g in guards:
430 for g in guards:
431 if len(g) < 2:
431 if len(g) < 2:
432 raise util.Abort(_('guard %r too short') % g)
432 raise util.Abort(_('guard %r too short') % g)
433 if g[0] not in '-+':
433 if g[0] not in '-+':
434 raise util.Abort(_('guard %r starts with invalid char') % g)
434 raise util.Abort(_('guard %r starts with invalid char') % g)
435 bad = self.check_guard(g[1:])
435 bad = self.check_guard(g[1:])
436 if bad:
436 if bad:
437 raise util.Abort(bad)
437 raise util.Abort(bad)
438 drop = self.guard_re.sub('', self.full_series[idx])
438 drop = self.guard_re.sub('', self.full_series[idx])
439 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
439 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
440 self.parse_series()
440 self.parse_series()
441 self.series_dirty = True
441 self.series_dirty = True
442
442
443 def pushable(self, idx):
443 def pushable(self, idx):
444 if isinstance(idx, str):
444 if isinstance(idx, str):
445 idx = self.series.index(idx)
445 idx = self.series.index(idx)
446 patchguards = self.series_guards[idx]
446 patchguards = self.series_guards[idx]
447 if not patchguards:
447 if not patchguards:
448 return True, None
448 return True, None
449 guards = self.active()
449 guards = self.active()
450 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
450 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
451 if exactneg:
451 if exactneg:
452 return False, exactneg[0]
452 return False, exactneg[0]
453 pos = [g for g in patchguards if g[0] == '+']
453 pos = [g for g in patchguards if g[0] == '+']
454 exactpos = [g for g in pos if g[1:] in guards]
454 exactpos = [g for g in pos if g[1:] in guards]
455 if pos:
455 if pos:
456 if exactpos:
456 if exactpos:
457 return True, exactpos[0]
457 return True, exactpos[0]
458 return False, pos
458 return False, pos
459 return True, ''
459 return True, ''
460
460
461 def explain_pushable(self, idx, all_patches=False):
461 def explain_pushable(self, idx, all_patches=False):
462 write = all_patches and self.ui.write or self.ui.warn
462 write = all_patches and self.ui.write or self.ui.warn
463 if all_patches or self.ui.verbose:
463 if all_patches or self.ui.verbose:
464 if isinstance(idx, str):
464 if isinstance(idx, str):
465 idx = self.series.index(idx)
465 idx = self.series.index(idx)
466 pushable, why = self.pushable(idx)
466 pushable, why = self.pushable(idx)
467 if all_patches and pushable:
467 if all_patches and pushable:
468 if why is None:
468 if why is None:
469 write(_('allowing %s - no guards in effect\n') %
469 write(_('allowing %s - no guards in effect\n') %
470 self.series[idx])
470 self.series[idx])
471 else:
471 else:
472 if not why:
472 if not why:
473 write(_('allowing %s - no matching negative guards\n') %
473 write(_('allowing %s - no matching negative guards\n') %
474 self.series[idx])
474 self.series[idx])
475 else:
475 else:
476 write(_('allowing %s - guarded by %r\n') %
476 write(_('allowing %s - guarded by %r\n') %
477 (self.series[idx], why))
477 (self.series[idx], why))
478 if not pushable:
478 if not pushable:
479 if why:
479 if why:
480 write(_('skipping %s - guarded by %r\n') %
480 write(_('skipping %s - guarded by %r\n') %
481 (self.series[idx], why))
481 (self.series[idx], why))
482 else:
482 else:
483 write(_('skipping %s - no matching guards\n') %
483 write(_('skipping %s - no matching guards\n') %
484 self.series[idx])
484 self.series[idx])
485
485
486 def save_dirty(self):
486 def save_dirty(self):
487 def write_list(items, path):
487 def write_list(items, path):
488 fp = self.opener(path, 'w')
488 fp = self.opener(path, 'w')
489 for i in items:
489 for i in items:
490 fp.write("%s\n" % i)
490 fp.write("%s\n" % i)
491 fp.close()
491 fp.close()
492 if self.applied_dirty:
492 if self.applied_dirty:
493 write_list(map(str, self.applied), self.status_path)
493 write_list(map(str, self.applied), self.status_path)
494 if self.series_dirty:
494 if self.series_dirty:
495 write_list(self.full_series, self.series_path)
495 write_list(self.full_series, self.series_path)
496 if self.guards_dirty:
496 if self.guards_dirty:
497 write_list(self.active_guards, self.guards_path)
497 write_list(self.active_guards, self.guards_path)
498 if self.added:
498 if self.added:
499 qrepo = self.qrepo()
499 qrepo = self.qrepo()
500 if qrepo:
500 if qrepo:
501 qrepo[None].add(f for f in self.added if f not in qrepo[None])
501 qrepo[None].add(f for f in self.added if f not in qrepo[None])
502 self.added = []
502 self.added = []
503
503
504 def removeundo(self, repo):
504 def removeundo(self, repo):
505 undo = repo.sjoin('undo')
505 undo = repo.sjoin('undo')
506 if not os.path.exists(undo):
506 if not os.path.exists(undo):
507 return
507 return
508 try:
508 try:
509 os.unlink(undo)
509 os.unlink(undo)
510 except OSError, inst:
510 except OSError, inst:
511 self.ui.warn(_('error removing undo: %s\n') % str(inst))
511 self.ui.warn(_('error removing undo: %s\n') % str(inst))
512
512
513 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
513 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
514 fp=None, changes=None, opts={}):
514 fp=None, changes=None, opts={}):
515 stat = opts.get('stat')
515 stat = opts.get('stat')
516 m = cmdutil.match(repo, files, opts)
516 m = cmdutil.match(repo, files, opts)
517 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
517 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
518 changes, stat, fp)
518 changes, stat, fp)
519
519
520 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
520 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
521 # first try just applying the patch
521 # first try just applying the patch
522 (err, n) = self.apply(repo, [patch], update_status=False,
522 (err, n) = self.apply(repo, [patch], update_status=False,
523 strict=True, merge=rev)
523 strict=True, merge=rev)
524
524
525 if err == 0:
525 if err == 0:
526 return (err, n)
526 return (err, n)
527
527
528 if n is None:
528 if n is None:
529 raise util.Abort(_("apply failed for patch %s") % patch)
529 raise util.Abort(_("apply failed for patch %s") % patch)
530
530
531 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
531 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
532
532
533 # apply failed, strip away that rev and merge.
533 # apply failed, strip away that rev and merge.
534 hg.clean(repo, head)
534 hg.clean(repo, head)
535 self.strip(repo, [n], update=False, backup='strip')
535 self.strip(repo, [n], update=False, backup='strip')
536
536
537 ctx = repo[rev]
537 ctx = repo[rev]
538 ret = hg.merge(repo, rev)
538 ret = hg.merge(repo, rev)
539 if ret:
539 if ret:
540 raise util.Abort(_("update returned %d") % ret)
540 raise util.Abort(_("update returned %d") % ret)
541 n = repo.commit(ctx.description(), ctx.user(), force=True)
541 n = repo.commit(ctx.description(), ctx.user(), force=True)
542 if n is None:
542 if n is None:
543 raise util.Abort(_("repo commit failed"))
543 raise util.Abort(_("repo commit failed"))
544 try:
544 try:
545 ph = patchheader(mergeq.join(patch), self.plainmode)
545 ph = patchheader(mergeq.join(patch), self.plainmode)
546 except:
546 except:
547 raise util.Abort(_("unable to read %s") % patch)
547 raise util.Abort(_("unable to read %s") % patch)
548
548
549 diffopts = self.patchopts(diffopts, patch)
549 diffopts = self.patchopts(diffopts, patch)
550 patchf = self.opener(patch, "w")
550 patchf = self.opener(patch, "w")
551 comments = str(ph)
551 comments = str(ph)
552 if comments:
552 if comments:
553 patchf.write(comments)
553 patchf.write(comments)
554 self.printdiff(repo, diffopts, head, n, fp=patchf)
554 self.printdiff(repo, diffopts, head, n, fp=patchf)
555 patchf.close()
555 patchf.close()
556 self.removeundo(repo)
556 self.removeundo(repo)
557 return (0, n)
557 return (0, n)
558
558
559 def qparents(self, repo, rev=None):
559 def qparents(self, repo, rev=None):
560 if rev is None:
560 if rev is None:
561 (p1, p2) = repo.dirstate.parents()
561 (p1, p2) = repo.dirstate.parents()
562 if p2 == nullid:
562 if p2 == nullid:
563 return p1
563 return p1
564 if not self.applied:
564 if not self.applied:
565 return None
565 return None
566 return self.applied[-1].node
566 return self.applied[-1].node
567 p1, p2 = repo.changelog.parents(rev)
567 p1, p2 = repo.changelog.parents(rev)
568 if p2 != nullid and p2 in [x.node for x in self.applied]:
568 if p2 != nullid and p2 in [x.node for x in self.applied]:
569 return p2
569 return p2
570 return p1
570 return p1
571
571
572 def mergepatch(self, repo, mergeq, series, diffopts):
572 def mergepatch(self, repo, mergeq, series, diffopts):
573 if not self.applied:
573 if not self.applied:
574 # each of the patches merged in will have two parents. This
574 # each of the patches merged in will have two parents. This
575 # can confuse the qrefresh, qdiff, and strip code because it
575 # can confuse the qrefresh, qdiff, and strip code because it
576 # needs to know which parent is actually in the patch queue.
576 # needs to know which parent is actually in the patch queue.
577 # so, we insert a merge marker with only one parent. This way
577 # so, we insert a merge marker with only one parent. This way
578 # the first patch in the queue is never a merge patch
578 # the first patch in the queue is never a merge patch
579 #
579 #
580 pname = ".hg.patches.merge.marker"
580 pname = ".hg.patches.merge.marker"
581 n = repo.commit('[mq]: merge marker', force=True)
581 n = repo.commit('[mq]: merge marker', force=True)
582 self.removeundo(repo)
582 self.removeundo(repo)
583 self.applied.append(statusentry(n, pname))
583 self.applied.append(statusentry(n, pname))
584 self.applied_dirty = 1
584 self.applied_dirty = 1
585
585
586 head = self.qparents(repo)
586 head = self.qparents(repo)
587
587
588 for patch in series:
588 for patch in series:
589 patch = mergeq.lookup(patch, strict=True)
589 patch = mergeq.lookup(patch, strict=True)
590 if not patch:
590 if not patch:
591 self.ui.warn(_("patch %s does not exist\n") % patch)
591 self.ui.warn(_("patch %s does not exist\n") % patch)
592 return (1, None)
592 return (1, None)
593 pushable, reason = self.pushable(patch)
593 pushable, reason = self.pushable(patch)
594 if not pushable:
594 if not pushable:
595 self.explain_pushable(patch, all_patches=True)
595 self.explain_pushable(patch, all_patches=True)
596 continue
596 continue
597 info = mergeq.isapplied(patch)
597 info = mergeq.isapplied(patch)
598 if not info:
598 if not info:
599 self.ui.warn(_("patch %s is not applied\n") % patch)
599 self.ui.warn(_("patch %s is not applied\n") % patch)
600 return (1, None)
600 return (1, None)
601 rev = info[1]
601 rev = info[1]
602 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
602 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
603 if head:
603 if head:
604 self.applied.append(statusentry(head, patch))
604 self.applied.append(statusentry(head, patch))
605 self.applied_dirty = 1
605 self.applied_dirty = 1
606 if err:
606 if err:
607 return (err, head)
607 return (err, head)
608 self.save_dirty()
608 self.save_dirty()
609 return (0, head)
609 return (0, head)
610
610
611 def patch(self, repo, patchfile):
611 def patch(self, repo, patchfile):
612 '''Apply patchfile to the working directory.
612 '''Apply patchfile to the working directory.
613 patchfile: name of patch file'''
613 patchfile: name of patch file'''
614 files = {}
614 files = {}
615 try:
615 try:
616 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
616 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
617 files=files, eolmode=None)
617 files=files, eolmode=None)
618 except Exception, inst:
618 except Exception, inst:
619 self.ui.note(str(inst) + '\n')
619 self.ui.note(str(inst) + '\n')
620 if not self.ui.verbose:
620 if not self.ui.verbose:
621 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
621 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
622 return (False, files, False)
622 return (False, files, False)
623
623
624 return (True, files, fuzz)
624 return (True, files, fuzz)
625
625
626 def apply(self, repo, series, list=False, update_status=True,
626 def apply(self, repo, series, list=False, update_status=True,
627 strict=False, patchdir=None, merge=None, all_files=None):
627 strict=False, patchdir=None, merge=None, all_files=None):
628 wlock = lock = tr = None
628 wlock = lock = tr = None
629 try:
629 try:
630 wlock = repo.wlock()
630 wlock = repo.wlock()
631 lock = repo.lock()
631 lock = repo.lock()
632 tr = repo.transaction("qpush")
632 tr = repo.transaction("qpush")
633 try:
633 try:
634 ret = self._apply(repo, series, list, update_status,
634 ret = self._apply(repo, series, list, update_status,
635 strict, patchdir, merge, all_files=all_files)
635 strict, patchdir, merge, all_files=all_files)
636 tr.close()
636 tr.close()
637 self.save_dirty()
637 self.save_dirty()
638 return ret
638 return ret
639 except:
639 except:
640 try:
640 try:
641 tr.abort()
641 tr.abort()
642 finally:
642 finally:
643 repo.invalidate()
643 repo.invalidate()
644 repo.dirstate.invalidate()
644 repo.dirstate.invalidate()
645 raise
645 raise
646 finally:
646 finally:
647 release(tr, lock, wlock)
647 release(tr, lock, wlock)
648 self.removeundo(repo)
648 self.removeundo(repo)
649
649
650 def _apply(self, repo, series, list=False, update_status=True,
650 def _apply(self, repo, series, list=False, update_status=True,
651 strict=False, patchdir=None, merge=None, all_files=None):
651 strict=False, patchdir=None, merge=None, all_files=None):
652 '''returns (error, hash)
652 '''returns (error, hash)
653 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
653 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
654 # TODO unify with commands.py
654 # TODO unify with commands.py
655 if not patchdir:
655 if not patchdir:
656 patchdir = self.path
656 patchdir = self.path
657 err = 0
657 err = 0
658 n = None
658 n = None
659 for patchname in series:
659 for patchname in series:
660 pushable, reason = self.pushable(patchname)
660 pushable, reason = self.pushable(patchname)
661 if not pushable:
661 if not pushable:
662 self.explain_pushable(patchname, all_patches=True)
662 self.explain_pushable(patchname, all_patches=True)
663 continue
663 continue
664 self.ui.status(_("applying %s\n") % patchname)
664 self.ui.status(_("applying %s\n") % patchname)
665 pf = os.path.join(patchdir, patchname)
665 pf = os.path.join(patchdir, patchname)
666
666
667 try:
667 try:
668 ph = patchheader(self.join(patchname), self.plainmode)
668 ph = patchheader(self.join(patchname), self.plainmode)
669 except:
669 except:
670 self.ui.warn(_("unable to read %s\n") % patchname)
670 self.ui.warn(_("unable to read %s\n") % patchname)
671 err = 1
671 err = 1
672 break
672 break
673
673
674 message = ph.message
674 message = ph.message
675 if not message:
675 if not message:
676 # The commit message should not be translated
676 # The commit message should not be translated
677 message = "imported patch %s\n" % patchname
677 message = "imported patch %s\n" % patchname
678 else:
678 else:
679 if list:
679 if list:
680 # The commit message should not be translated
680 # The commit message should not be translated
681 message.append("\nimported patch %s" % patchname)
681 message.append("\nimported patch %s" % patchname)
682 message = '\n'.join(message)
682 message = '\n'.join(message)
683
683
684 if ph.haspatch:
684 if ph.haspatch:
685 (patcherr, files, fuzz) = self.patch(repo, pf)
685 (patcherr, files, fuzz) = self.patch(repo, pf)
686 if all_files is not None:
686 if all_files is not None:
687 all_files.update(files)
687 all_files.update(files)
688 patcherr = not patcherr
688 patcherr = not patcherr
689 else:
689 else:
690 self.ui.warn(_("patch %s is empty\n") % patchname)
690 self.ui.warn(_("patch %s is empty\n") % patchname)
691 patcherr, files, fuzz = 0, [], 0
691 patcherr, files, fuzz = 0, [], 0
692
692
693 if merge and files:
693 if merge and files:
694 # Mark as removed/merged and update dirstate parent info
694 # Mark as removed/merged and update dirstate parent info
695 removed = []
695 removed = []
696 merged = []
696 merged = []
697 for f in files:
697 for f in files:
698 if os.path.lexists(repo.wjoin(f)):
698 if os.path.lexists(repo.wjoin(f)):
699 merged.append(f)
699 merged.append(f)
700 else:
700 else:
701 removed.append(f)
701 removed.append(f)
702 for f in removed:
702 for f in removed:
703 repo.dirstate.remove(f)
703 repo.dirstate.remove(f)
704 for f in merged:
704 for f in merged:
705 repo.dirstate.merge(f)
705 repo.dirstate.merge(f)
706 p1, p2 = repo.dirstate.parents()
706 p1, p2 = repo.dirstate.parents()
707 repo.dirstate.setparents(p1, merge)
707 repo.dirstate.setparents(p1, merge)
708
708
709 files = cmdutil.updatedir(self.ui, repo, files)
709 files = cmdutil.updatedir(self.ui, repo, files)
710 match = cmdutil.matchfiles(repo, files or [])
710 match = cmdutil.matchfiles(repo, files or [])
711 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
711 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
712
712
713 if n is None:
713 if n is None:
714 raise util.Abort(_("repository commit failed"))
714 raise util.Abort(_("repository commit failed"))
715
715
716 if update_status:
716 if update_status:
717 self.applied.append(statusentry(n, patchname))
717 self.applied.append(statusentry(n, patchname))
718
718
719 if patcherr:
719 if patcherr:
720 self.ui.warn(_("patch failed, rejects left in working dir\n"))
720 self.ui.warn(_("patch failed, rejects left in working dir\n"))
721 err = 2
721 err = 2
722 break
722 break
723
723
724 if fuzz and strict:
724 if fuzz and strict:
725 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
725 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
726 err = 3
726 err = 3
727 break
727 break
728 return (err, n)
728 return (err, n)
729
729
730 def _cleanup(self, patches, numrevs, keep=False):
730 def _cleanup(self, patches, numrevs, keep=False):
731 if not keep:
731 if not keep:
732 r = self.qrepo()
732 r = self.qrepo()
733 if r:
733 if r:
734 r[None].remove(patches, True)
734 r[None].remove(patches, True)
735 else:
735 else:
736 for p in patches:
736 for p in patches:
737 os.unlink(self.join(p))
737 os.unlink(self.join(p))
738
738
739 if numrevs:
739 if numrevs:
740 del self.applied[:numrevs]
740 del self.applied[:numrevs]
741 self.applied_dirty = 1
741 self.applied_dirty = 1
742
742
743 for i in sorted([self.find_series(p) for p in patches], reverse=True):
743 for i in sorted([self.find_series(p) for p in patches], reverse=True):
744 del self.full_series[i]
744 del self.full_series[i]
745 self.parse_series()
745 self.parse_series()
746 self.series_dirty = 1
746 self.series_dirty = 1
747
747
748 def _revpatches(self, repo, revs):
748 def _revpatches(self, repo, revs):
749 firstrev = repo[self.applied[0].node].rev()
749 firstrev = repo[self.applied[0].node].rev()
750 patches = []
750 patches = []
751 for i, rev in enumerate(revs):
751 for i, rev in enumerate(revs):
752
752
753 if rev < firstrev:
753 if rev < firstrev:
754 raise util.Abort(_('revision %d is not managed') % rev)
754 raise util.Abort(_('revision %d is not managed') % rev)
755
755
756 ctx = repo[rev]
756 ctx = repo[rev]
757 base = self.applied[i].node
757 base = self.applied[i].node
758 if ctx.node() != base:
758 if ctx.node() != base:
759 msg = _('cannot delete revision %d above applied patches')
759 msg = _('cannot delete revision %d above applied patches')
760 raise util.Abort(msg % rev)
760 raise util.Abort(msg % rev)
761
761
762 patch = self.applied[i].name
762 patch = self.applied[i].name
763 for fmt in ('[mq]: %s', 'imported patch %s'):
763 for fmt in ('[mq]: %s', 'imported patch %s'):
764 if ctx.description() == fmt % patch:
764 if ctx.description() == fmt % patch:
765 msg = _('patch %s finalized without changeset message\n')
765 msg = _('patch %s finalized without changeset message\n')
766 repo.ui.status(msg % patch)
766 repo.ui.status(msg % patch)
767 break
767 break
768
768
769 patches.append(patch)
769 patches.append(patch)
770 return patches
770 return patches
771
771
772 def finish(self, repo, revs):
772 def finish(self, repo, revs):
773 patches = self._revpatches(repo, sorted(revs))
773 patches = self._revpatches(repo, sorted(revs))
774 self._cleanup(patches, len(patches))
774 self._cleanup(patches, len(patches))
775
775
776 def delete(self, repo, patches, opts):
776 def delete(self, repo, patches, opts):
777 if not patches and not opts.get('rev'):
777 if not patches and not opts.get('rev'):
778 raise util.Abort(_('qdelete requires at least one revision or '
778 raise util.Abort(_('qdelete requires at least one revision or '
779 'patch name'))
779 'patch name'))
780
780
781 realpatches = []
781 realpatches = []
782 for patch in patches:
782 for patch in patches:
783 patch = self.lookup(patch, strict=True)
783 patch = self.lookup(patch, strict=True)
784 info = self.isapplied(patch)
784 info = self.isapplied(patch)
785 if info:
785 if info:
786 raise util.Abort(_("cannot delete applied patch %s") % patch)
786 raise util.Abort(_("cannot delete applied patch %s") % patch)
787 if patch not in self.series:
787 if patch not in self.series:
788 raise util.Abort(_("patch %s not in series file") % patch)
788 raise util.Abort(_("patch %s not in series file") % patch)
789 if patch not in realpatches:
789 if patch not in realpatches:
790 realpatches.append(patch)
790 realpatches.append(patch)
791
791
792 numrevs = 0
792 numrevs = 0
793 if opts.get('rev'):
793 if opts.get('rev'):
794 if not self.applied:
794 if not self.applied:
795 raise util.Abort(_('no patches applied'))
795 raise util.Abort(_('no patches applied'))
796 revs = cmdutil.revrange(repo, opts.get('rev'))
796 revs = cmdutil.revrange(repo, opts.get('rev'))
797 if len(revs) > 1 and revs[0] > revs[1]:
797 if len(revs) > 1 and revs[0] > revs[1]:
798 revs.reverse()
798 revs.reverse()
799 revpatches = self._revpatches(repo, revs)
799 revpatches = self._revpatches(repo, revs)
800 realpatches += revpatches
800 realpatches += revpatches
801 numrevs = len(revpatches)
801 numrevs = len(revpatches)
802
802
803 self._cleanup(realpatches, numrevs, opts.get('keep'))
803 self._cleanup(realpatches, numrevs, opts.get('keep'))
804
804
805 def check_toppatch(self, repo):
805 def check_toppatch(self, repo):
806 if self.applied:
806 if self.applied:
807 top = self.applied[-1].node
807 top = self.applied[-1].node
808 patch = self.applied[-1].name
808 patch = self.applied[-1].name
809 pp = repo.dirstate.parents()
809 pp = repo.dirstate.parents()
810 if top not in pp:
810 if top not in pp:
811 raise util.Abort(_("working directory revision is not qtip"))
811 raise util.Abort(_("working directory revision is not qtip"))
812 return top, patch
812 return top, patch
813 return None, None
813 return None, None
814
814
815 def check_substate(self, repo):
815 def check_substate(self, repo):
816 '''return list of subrepos at a different revision than substate.
816 '''return list of subrepos at a different revision than substate.
817 Abort if any subrepos have uncommitted changes.'''
817 Abort if any subrepos have uncommitted changes.'''
818 inclsubs = []
818 inclsubs = []
819 wctx = repo[None]
819 wctx = repo[None]
820 for s in wctx.substate:
820 for s in wctx.substate:
821 if wctx.sub(s).dirty(True):
821 if wctx.sub(s).dirty(True):
822 raise util.Abort(
822 raise util.Abort(
823 _("uncommitted changes in subrepository %s") % s)
823 _("uncommitted changes in subrepository %s") % s)
824 elif wctx.sub(s).dirty():
824 elif wctx.sub(s).dirty():
825 inclsubs.append(s)
825 inclsubs.append(s)
826 return inclsubs
826 return inclsubs
827
827
828 def check_localchanges(self, repo, force=False, refresh=True):
828 def check_localchanges(self, repo, force=False, refresh=True):
829 m, a, r, d = repo.status()[:4]
829 m, a, r, d = repo.status()[:4]
830 if (m or a or r or d) and not force:
830 if (m or a or r or d) and not force:
831 if refresh:
831 if refresh:
832 raise util.Abort(_("local changes found, refresh first"))
832 raise util.Abort(_("local changes found, refresh first"))
833 else:
833 else:
834 raise util.Abort(_("local changes found"))
834 raise util.Abort(_("local changes found"))
835 return m, a, r, d
835 return m, a, r, d
836
836
837 _reserved = ('series', 'status', 'guards')
837 _reserved = ('series', 'status', 'guards')
838 def check_reserved_name(self, name):
838 def check_reserved_name(self, name):
839 if (name in self._reserved or name.startswith('.hg')
839 if (name in self._reserved or name.startswith('.hg')
840 or name.startswith('.mq') or '#' in name or ':' in name):
840 or name.startswith('.mq') or '#' in name or ':' in name):
841 raise util.Abort(_('"%s" cannot be used as the name of a patch')
841 raise util.Abort(_('"%s" cannot be used as the name of a patch')
842 % name)
842 % name)
843
843
844 def new(self, repo, patchfn, *pats, **opts):
844 def new(self, repo, patchfn, *pats, **opts):
845 """options:
845 """options:
846 msg: a string or a no-argument function returning a string
846 msg: a string or a no-argument function returning a string
847 """
847 """
848 msg = opts.get('msg')
848 msg = opts.get('msg')
849 user = opts.get('user')
849 user = opts.get('user')
850 date = opts.get('date')
850 date = opts.get('date')
851 if date:
851 if date:
852 date = util.parsedate(date)
852 date = util.parsedate(date)
853 diffopts = self.diffopts({'git': opts.get('git')})
853 diffopts = self.diffopts({'git': opts.get('git')})
854 self.check_reserved_name(patchfn)
854 self.check_reserved_name(patchfn)
855 if os.path.exists(self.join(patchfn)):
855 if os.path.exists(self.join(patchfn)):
856 if os.path.isdir(self.join(patchfn)):
856 if os.path.isdir(self.join(patchfn)):
857 raise util.Abort(_('"%s" already exists as a directory')
857 raise util.Abort(_('"%s" already exists as a directory')
858 % patchfn)
858 % patchfn)
859 else:
859 else:
860 raise util.Abort(_('patch "%s" already exists') % patchfn)
860 raise util.Abort(_('patch "%s" already exists') % patchfn)
861
861
862 inclsubs = self.check_substate(repo)
862 inclsubs = self.check_substate(repo)
863 if inclsubs:
863 if inclsubs:
864 inclsubs.append('.hgsubstate')
864 inclsubs.append('.hgsubstate')
865 if opts.get('include') or opts.get('exclude') or pats:
865 if opts.get('include') or opts.get('exclude') or pats:
866 if inclsubs:
866 if inclsubs:
867 pats = list(pats or []) + inclsubs
867 pats = list(pats or []) + inclsubs
868 match = cmdutil.match(repo, pats, opts)
868 match = cmdutil.match(repo, pats, opts)
869 # detect missing files in pats
869 # detect missing files in pats
870 def badfn(f, msg):
870 def badfn(f, msg):
871 if f != '.hgsubstate': # .hgsubstate is auto-created
871 if f != '.hgsubstate': # .hgsubstate is auto-created
872 raise util.Abort('%s: %s' % (f, msg))
872 raise util.Abort('%s: %s' % (f, msg))
873 match.bad = badfn
873 match.bad = badfn
874 m, a, r, d = repo.status(match=match)[:4]
874 m, a, r, d = repo.status(match=match)[:4]
875 else:
875 else:
876 m, a, r, d = self.check_localchanges(repo, force=True)
876 m, a, r, d = self.check_localchanges(repo, force=True)
877 match = cmdutil.matchfiles(repo, m + a + r + inclsubs)
877 match = cmdutil.matchfiles(repo, m + a + r + inclsubs)
878 if len(repo[None].parents()) > 1:
878 if len(repo[None].parents()) > 1:
879 raise util.Abort(_('cannot manage merge changesets'))
879 raise util.Abort(_('cannot manage merge changesets'))
880 commitfiles = m + a + r
880 commitfiles = m + a + r
881 self.check_toppatch(repo)
881 self.check_toppatch(repo)
882 insert = self.full_series_end()
882 insert = self.full_series_end()
883 wlock = repo.wlock()
883 wlock = repo.wlock()
884 try:
884 try:
885 try:
885 try:
886 # if patch file write fails, abort early
886 # if patch file write fails, abort early
887 p = self.opener(patchfn, "w")
887 p = self.opener(patchfn, "w")
888 except IOError, e:
888 except IOError, e:
889 raise util.Abort(_('cannot write patch "%s": %s')
889 raise util.Abort(_('cannot write patch "%s": %s')
890 % (patchfn, e.strerror))
890 % (patchfn, e.strerror))
891 try:
891 try:
892 if self.plainmode:
892 if self.plainmode:
893 if user:
893 if user:
894 p.write("From: " + user + "\n")
894 p.write("From: " + user + "\n")
895 if not date:
895 if not date:
896 p.write("\n")
896 p.write("\n")
897 if date:
897 if date:
898 p.write("Date: %d %d\n\n" % date)
898 p.write("Date: %d %d\n\n" % date)
899 else:
899 else:
900 p.write("# HG changeset patch\n")
900 p.write("# HG changeset patch\n")
901 p.write("# Parent "
901 p.write("# Parent "
902 + hex(repo[None].p1().node()) + "\n")
902 + hex(repo[None].p1().node()) + "\n")
903 if user:
903 if user:
904 p.write("# User " + user + "\n")
904 p.write("# User " + user + "\n")
905 if date:
905 if date:
906 p.write("# Date %s %s\n\n" % date)
906 p.write("# Date %s %s\n\n" % date)
907 if hasattr(msg, '__call__'):
907 if hasattr(msg, '__call__'):
908 msg = msg()
908 msg = msg()
909 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
909 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
910 n = repo.commit(commitmsg, user, date, match=match, force=True)
910 n = repo.commit(commitmsg, user, date, match=match, force=True)
911 if n is None:
911 if n is None:
912 raise util.Abort(_("repo commit failed"))
912 raise util.Abort(_("repo commit failed"))
913 try:
913 try:
914 self.full_series[insert:insert] = [patchfn]
914 self.full_series[insert:insert] = [patchfn]
915 self.applied.append(statusentry(n, patchfn))
915 self.applied.append(statusentry(n, patchfn))
916 self.parse_series()
916 self.parse_series()
917 self.series_dirty = 1
917 self.series_dirty = 1
918 self.applied_dirty = 1
918 self.applied_dirty = 1
919 if msg:
919 if msg:
920 msg = msg + "\n\n"
920 msg = msg + "\n\n"
921 p.write(msg)
921 p.write(msg)
922 if commitfiles:
922 if commitfiles:
923 parent = self.qparents(repo, n)
923 parent = self.qparents(repo, n)
924 chunks = patch.diff(repo, node1=parent, node2=n,
924 chunks = patch.diff(repo, node1=parent, node2=n,
925 match=match, opts=diffopts)
925 match=match, opts=diffopts)
926 for chunk in chunks:
926 for chunk in chunks:
927 p.write(chunk)
927 p.write(chunk)
928 p.close()
928 p.close()
929 wlock.release()
929 wlock.release()
930 wlock = None
930 wlock = None
931 r = self.qrepo()
931 r = self.qrepo()
932 if r:
932 if r:
933 r[None].add([patchfn])
933 r[None].add([patchfn])
934 except:
934 except:
935 repo.rollback()
935 repo.rollback()
936 raise
936 raise
937 except Exception:
937 except Exception:
938 patchpath = self.join(patchfn)
938 patchpath = self.join(patchfn)
939 try:
939 try:
940 os.unlink(patchpath)
940 os.unlink(patchpath)
941 except:
941 except:
942 self.ui.warn(_('error unlinking %s\n') % patchpath)
942 self.ui.warn(_('error unlinking %s\n') % patchpath)
943 raise
943 raise
944 self.removeundo(repo)
944 self.removeundo(repo)
945 finally:
945 finally:
946 release(wlock)
946 release(wlock)
947
947
948 def strip(self, repo, revs, update=True, backup="all", force=None):
948 def strip(self, repo, revs, update=True, backup="all", force=None):
949 wlock = lock = None
949 wlock = lock = None
950 try:
950 try:
951 wlock = repo.wlock()
951 wlock = repo.wlock()
952 lock = repo.lock()
952 lock = repo.lock()
953
953
954 if update:
954 if update:
955 self.check_localchanges(repo, force=force, refresh=False)
955 self.check_localchanges(repo, force=force, refresh=False)
956 urev = self.qparents(repo, revs[0])
956 urev = self.qparents(repo, revs[0])
957 hg.clean(repo, urev)
957 hg.clean(repo, urev)
958 repo.dirstate.write()
958 repo.dirstate.write()
959
959
960 self.removeundo(repo)
960 self.removeundo(repo)
961 for rev in revs:
961 for rev in revs:
962 repair.strip(self.ui, repo, rev, backup)
962 repair.strip(self.ui, repo, rev, backup)
963 # strip may have unbundled a set of backed up revisions after
963 # strip may have unbundled a set of backed up revisions after
964 # the actual strip
964 # the actual strip
965 self.removeundo(repo)
965 self.removeundo(repo)
966 finally:
966 finally:
967 release(lock, wlock)
967 release(lock, wlock)
968
968
969 def isapplied(self, patch):
969 def isapplied(self, patch):
970 """returns (index, rev, patch)"""
970 """returns (index, rev, patch)"""
971 for i, a in enumerate(self.applied):
971 for i, a in enumerate(self.applied):
972 if a.name == patch:
972 if a.name == patch:
973 return (i, a.node, a.name)
973 return (i, a.node, a.name)
974 return None
974 return None
975
975
976 # if the exact patch name does not exist, we try a few
976 # if the exact patch name does not exist, we try a few
977 # variations. If strict is passed, we try only #1
977 # variations. If strict is passed, we try only #1
978 #
978 #
979 # 1) a number to indicate an offset in the series file
979 # 1) a number to indicate an offset in the series file
980 # 2) a unique substring of the patch name was given
980 # 2) a unique substring of the patch name was given
981 # 3) patchname[-+]num to indicate an offset in the series file
981 # 3) patchname[-+]num to indicate an offset in the series file
982 def lookup(self, patch, strict=False):
982 def lookup(self, patch, strict=False):
983 patch = patch and str(patch)
983 patch = patch and str(patch)
984
984
985 def partial_name(s):
985 def partial_name(s):
986 if s in self.series:
986 if s in self.series:
987 return s
987 return s
988 matches = [x for x in self.series if s in x]
988 matches = [x for x in self.series if s in x]
989 if len(matches) > 1:
989 if len(matches) > 1:
990 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
990 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
991 for m in matches:
991 for m in matches:
992 self.ui.warn(' %s\n' % m)
992 self.ui.warn(' %s\n' % m)
993 return None
993 return None
994 if matches:
994 if matches:
995 return matches[0]
995 return matches[0]
996 if self.series and self.applied:
996 if self.series and self.applied:
997 if s == 'qtip':
997 if s == 'qtip':
998 return self.series[self.series_end(True)-1]
998 return self.series[self.series_end(True)-1]
999 if s == 'qbase':
999 if s == 'qbase':
1000 return self.series[0]
1000 return self.series[0]
1001 return None
1001 return None
1002
1002
1003 if patch is None:
1003 if patch is None:
1004 return None
1004 return None
1005 if patch in self.series:
1005 if patch in self.series:
1006 return patch
1006 return patch
1007
1007
1008 if not os.path.isfile(self.join(patch)):
1008 if not os.path.isfile(self.join(patch)):
1009 try:
1009 try:
1010 sno = int(patch)
1010 sno = int(patch)
1011 except (ValueError, OverflowError):
1011 except (ValueError, OverflowError):
1012 pass
1012 pass
1013 else:
1013 else:
1014 if -len(self.series) <= sno < len(self.series):
1014 if -len(self.series) <= sno < len(self.series):
1015 return self.series[sno]
1015 return self.series[sno]
1016
1016
1017 if not strict:
1017 if not strict:
1018 res = partial_name(patch)
1018 res = partial_name(patch)
1019 if res:
1019 if res:
1020 return res
1020 return res
1021 minus = patch.rfind('-')
1021 minus = patch.rfind('-')
1022 if minus >= 0:
1022 if minus >= 0:
1023 res = partial_name(patch[:minus])
1023 res = partial_name(patch[:minus])
1024 if res:
1024 if res:
1025 i = self.series.index(res)
1025 i = self.series.index(res)
1026 try:
1026 try:
1027 off = int(patch[minus + 1:] or 1)
1027 off = int(patch[minus + 1:] or 1)
1028 except (ValueError, OverflowError):
1028 except (ValueError, OverflowError):
1029 pass
1029 pass
1030 else:
1030 else:
1031 if i - off >= 0:
1031 if i - off >= 0:
1032 return self.series[i - off]
1032 return self.series[i - off]
1033 plus = patch.rfind('+')
1033 plus = patch.rfind('+')
1034 if plus >= 0:
1034 if plus >= 0:
1035 res = partial_name(patch[:plus])
1035 res = partial_name(patch[:plus])
1036 if res:
1036 if res:
1037 i = self.series.index(res)
1037 i = self.series.index(res)
1038 try:
1038 try:
1039 off = int(patch[plus + 1:] or 1)
1039 off = int(patch[plus + 1:] or 1)
1040 except (ValueError, OverflowError):
1040 except (ValueError, OverflowError):
1041 pass
1041 pass
1042 else:
1042 else:
1043 if i + off < len(self.series):
1043 if i + off < len(self.series):
1044 return self.series[i + off]
1044 return self.series[i + off]
1045 raise util.Abort(_("patch %s not in series") % patch)
1045 raise util.Abort(_("patch %s not in series") % patch)
1046
1046
1047 def push(self, repo, patch=None, force=False, list=False,
1047 def push(self, repo, patch=None, force=False, list=False,
1048 mergeq=None, all=False, move=False, exact=False):
1048 mergeq=None, all=False, move=False, exact=False):
1049 diffopts = self.diffopts()
1049 diffopts = self.diffopts()
1050 wlock = repo.wlock()
1050 wlock = repo.wlock()
1051 try:
1051 try:
1052 heads = []
1052 heads = []
1053 for b, ls in repo.branchmap().iteritems():
1053 for b, ls in repo.branchmap().iteritems():
1054 heads += ls
1054 heads += ls
1055 if not heads:
1055 if not heads:
1056 heads = [nullid]
1056 heads = [nullid]
1057 if repo.dirstate.p1() not in heads and not exact:
1057 if repo.dirstate.p1() not in heads and not exact:
1058 self.ui.status(_("(working directory not at a head)\n"))
1058 self.ui.status(_("(working directory not at a head)\n"))
1059
1059
1060 if not self.series:
1060 if not self.series:
1061 self.ui.warn(_('no patches in series\n'))
1061 self.ui.warn(_('no patches in series\n'))
1062 return 0
1062 return 0
1063
1063
1064 patch = self.lookup(patch)
1064 patch = self.lookup(patch)
1065 # Suppose our series file is: A B C and the current 'top'
1065 # Suppose our series file is: A B C and the current 'top'
1066 # patch is B. qpush C should be performed (moving forward)
1066 # patch is B. qpush C should be performed (moving forward)
1067 # qpush B is a NOP (no change) qpush A is an error (can't
1067 # qpush B is a NOP (no change) qpush A is an error (can't
1068 # go backwards with qpush)
1068 # go backwards with qpush)
1069 if patch:
1069 if patch:
1070 info = self.isapplied(patch)
1070 info = self.isapplied(patch)
1071 if info and info[0] >= len(self.applied) - 1:
1071 if info and info[0] >= len(self.applied) - 1:
1072 self.ui.warn(
1072 self.ui.warn(
1073 _('qpush: %s is already at the top\n') % patch)
1073 _('qpush: %s is already at the top\n') % patch)
1074 return 0
1074 return 0
1075
1075
1076 pushable, reason = self.pushable(patch)
1076 pushable, reason = self.pushable(patch)
1077 if pushable:
1077 if pushable:
1078 if self.series.index(patch) < self.series_end():
1078 if self.series.index(patch) < self.series_end():
1079 raise util.Abort(
1079 raise util.Abort(
1080 _("cannot push to a previous patch: %s") % patch)
1080 _("cannot push to a previous patch: %s") % patch)
1081 else:
1081 else:
1082 if reason:
1082 if reason:
1083 reason = _('guarded by %r') % reason
1083 reason = _('guarded by %r') % reason
1084 else:
1084 else:
1085 reason = _('no matching guards')
1085 reason = _('no matching guards')
1086 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1086 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1087 return 1
1087 return 1
1088 elif all:
1088 elif all:
1089 patch = self.series[-1]
1089 patch = self.series[-1]
1090 if self.isapplied(patch):
1090 if self.isapplied(patch):
1091 self.ui.warn(_('all patches are currently applied\n'))
1091 self.ui.warn(_('all patches are currently applied\n'))
1092 return 0
1092 return 0
1093
1093
1094 # Following the above example, starting at 'top' of B:
1094 # Following the above example, starting at 'top' of B:
1095 # qpush should be performed (pushes C), but a subsequent
1095 # qpush should be performed (pushes C), but a subsequent
1096 # qpush without an argument is an error (nothing to
1096 # qpush without an argument is an error (nothing to
1097 # apply). This allows a loop of "...while hg qpush..." to
1097 # apply). This allows a loop of "...while hg qpush..." to
1098 # work as it detects an error when done
1098 # work as it detects an error when done
1099 start = self.series_end()
1099 start = self.series_end()
1100 if start == len(self.series):
1100 if start == len(self.series):
1101 self.ui.warn(_('patch series already fully applied\n'))
1101 self.ui.warn(_('patch series already fully applied\n'))
1102 return 1
1102 return 1
1103 if not force:
1103 if not force:
1104 self.check_localchanges(repo)
1104 self.check_localchanges(repo)
1105
1105
1106 if exact:
1106 if exact:
1107 if move:
1107 if move:
1108 raise util.Abort(_("cannot use --exact and --move together"))
1108 raise util.Abort(_("cannot use --exact and --move together"))
1109 if self.applied:
1109 if self.applied:
1110 raise util.Abort(_("cannot push --exact with applied patches"))
1110 raise util.Abort(_("cannot push --exact with applied patches"))
1111 root = self.series[start]
1111 root = self.series[start]
1112 target = patchheader(self.join(root), self.plainmode).parent
1112 target = patchheader(self.join(root), self.plainmode).parent
1113 if not target:
1113 if not target:
1114 raise util.Abort(_("%s does not have a parent recorded" % root))
1114 raise util.Abort(_("%s does not have a parent recorded" % root))
1115 if not repo[target] == repo['.']:
1115 if not repo[target] == repo['.']:
1116 hg.update(repo, target)
1116 hg.update(repo, target)
1117
1117
1118 if move:
1118 if move:
1119 if not patch:
1119 if not patch:
1120 raise util.Abort(_("please specify the patch to move"))
1120 raise util.Abort(_("please specify the patch to move"))
1121 for i, rpn in enumerate(self.full_series[start:]):
1121 for i, rpn in enumerate(self.full_series[start:]):
1122 # strip markers for patch guards
1122 # strip markers for patch guards
1123 if self.guard_re.split(rpn, 1)[0] == patch:
1123 if self.guard_re.split(rpn, 1)[0] == patch:
1124 break
1124 break
1125 index = start + i
1125 index = start + i
1126 assert index < len(self.full_series)
1126 assert index < len(self.full_series)
1127 fullpatch = self.full_series[index]
1127 fullpatch = self.full_series[index]
1128 del self.full_series[index]
1128 del self.full_series[index]
1129 self.full_series.insert(start, fullpatch)
1129 self.full_series.insert(start, fullpatch)
1130 self.parse_series()
1130 self.parse_series()
1131 self.series_dirty = 1
1131 self.series_dirty = 1
1132
1132
1133 self.applied_dirty = 1
1133 self.applied_dirty = 1
1134 if start > 0:
1134 if start > 0:
1135 self.check_toppatch(repo)
1135 self.check_toppatch(repo)
1136 if not patch:
1136 if not patch:
1137 patch = self.series[start]
1137 patch = self.series[start]
1138 end = start + 1
1138 end = start + 1
1139 else:
1139 else:
1140 end = self.series.index(patch, start) + 1
1140 end = self.series.index(patch, start) + 1
1141
1141
1142 s = self.series[start:end]
1142 s = self.series[start:end]
1143 all_files = set()
1143 all_files = set()
1144 try:
1144 try:
1145 if mergeq:
1145 if mergeq:
1146 ret = self.mergepatch(repo, mergeq, s, diffopts)
1146 ret = self.mergepatch(repo, mergeq, s, diffopts)
1147 else:
1147 else:
1148 ret = self.apply(repo, s, list, all_files=all_files)
1148 ret = self.apply(repo, s, list, all_files=all_files)
1149 except:
1149 except:
1150 self.ui.warn(_('cleaning up working directory...'))
1150 self.ui.warn(_('cleaning up working directory...'))
1151 node = repo.dirstate.p1()
1151 node = repo.dirstate.p1()
1152 hg.revert(repo, node, None)
1152 hg.revert(repo, node, None)
1153 # only remove unknown files that we know we touched or
1153 # only remove unknown files that we know we touched or
1154 # created while patching
1154 # created while patching
1155 for f in all_files:
1155 for f in all_files:
1156 if f not in repo.dirstate:
1156 if f not in repo.dirstate:
1157 try:
1157 try:
1158 util.unlinkpath(repo.wjoin(f))
1158 util.unlinkpath(repo.wjoin(f))
1159 except OSError, inst:
1159 except OSError, inst:
1160 if inst.errno != errno.ENOENT:
1160 if inst.errno != errno.ENOENT:
1161 raise
1161 raise
1162 self.ui.warn(_('done\n'))
1162 self.ui.warn(_('done\n'))
1163 raise
1163 raise
1164
1164
1165 if not self.applied:
1165 if not self.applied:
1166 return ret[0]
1166 return ret[0]
1167 top = self.applied[-1].name
1167 top = self.applied[-1].name
1168 if ret[0] and ret[0] > 1:
1168 if ret[0] and ret[0] > 1:
1169 msg = _("errors during apply, please fix and refresh %s\n")
1169 msg = _("errors during apply, please fix and refresh %s\n")
1170 self.ui.write(msg % top)
1170 self.ui.write(msg % top)
1171 else:
1171 else:
1172 self.ui.write(_("now at: %s\n") % top)
1172 self.ui.write(_("now at: %s\n") % top)
1173 return ret[0]
1173 return ret[0]
1174
1174
1175 finally:
1175 finally:
1176 wlock.release()
1176 wlock.release()
1177
1177
1178 def pop(self, repo, patch=None, force=False, update=True, all=False):
1178 def pop(self, repo, patch=None, force=False, update=True, all=False):
1179 wlock = repo.wlock()
1179 wlock = repo.wlock()
1180 try:
1180 try:
1181 if patch:
1181 if patch:
1182 # index, rev, patch
1182 # index, rev, patch
1183 info = self.isapplied(patch)
1183 info = self.isapplied(patch)
1184 if not info:
1184 if not info:
1185 patch = self.lookup(patch)
1185 patch = self.lookup(patch)
1186 info = self.isapplied(patch)
1186 info = self.isapplied(patch)
1187 if not info:
1187 if not info:
1188 raise util.Abort(_("patch %s is not applied") % patch)
1188 raise util.Abort(_("patch %s is not applied") % patch)
1189
1189
1190 if not self.applied:
1190 if not self.applied:
1191 # Allow qpop -a to work repeatedly,
1191 # Allow qpop -a to work repeatedly,
1192 # but not qpop without an argument
1192 # but not qpop without an argument
1193 self.ui.warn(_("no patches applied\n"))
1193 self.ui.warn(_("no patches applied\n"))
1194 return not all
1194 return not all
1195
1195
1196 if all:
1196 if all:
1197 start = 0
1197 start = 0
1198 elif patch:
1198 elif patch:
1199 start = info[0] + 1
1199 start = info[0] + 1
1200 else:
1200 else:
1201 start = len(self.applied) - 1
1201 start = len(self.applied) - 1
1202
1202
1203 if start >= len(self.applied):
1203 if start >= len(self.applied):
1204 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1204 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1205 return
1205 return
1206
1206
1207 if not update:
1207 if not update:
1208 parents = repo.dirstate.parents()
1208 parents = repo.dirstate.parents()
1209 rr = [x.node for x in self.applied]
1209 rr = [x.node for x in self.applied]
1210 for p in parents:
1210 for p in parents:
1211 if p in rr:
1211 if p in rr:
1212 self.ui.warn(_("qpop: forcing dirstate update\n"))
1212 self.ui.warn(_("qpop: forcing dirstate update\n"))
1213 update = True
1213 update = True
1214 else:
1214 else:
1215 parents = [p.node() for p in repo[None].parents()]
1215 parents = [p.node() for p in repo[None].parents()]
1216 needupdate = False
1216 needupdate = False
1217 for entry in self.applied[start:]:
1217 for entry in self.applied[start:]:
1218 if entry.node in parents:
1218 if entry.node in parents:
1219 needupdate = True
1219 needupdate = True
1220 break
1220 break
1221 update = needupdate
1221 update = needupdate
1222
1222
1223 if not force and update:
1223 if not force and update:
1224 self.check_localchanges(repo)
1224 self.check_localchanges(repo)
1225
1225
1226 self.applied_dirty = 1
1226 self.applied_dirty = 1
1227 end = len(self.applied)
1227 end = len(self.applied)
1228 rev = self.applied[start].node
1228 rev = self.applied[start].node
1229 if update:
1229 if update:
1230 top = self.check_toppatch(repo)[0]
1230 top = self.check_toppatch(repo)[0]
1231
1231
1232 try:
1232 try:
1233 heads = repo.changelog.heads(rev)
1233 heads = repo.changelog.heads(rev)
1234 except error.LookupError:
1234 except error.LookupError:
1235 node = short(rev)
1235 node = short(rev)
1236 raise util.Abort(_('trying to pop unknown node %s') % node)
1236 raise util.Abort(_('trying to pop unknown node %s') % node)
1237
1237
1238 if heads != [self.applied[-1].node]:
1238 if heads != [self.applied[-1].node]:
1239 raise util.Abort(_("popping would remove a revision not "
1239 raise util.Abort(_("popping would remove a revision not "
1240 "managed by this patch queue"))
1240 "managed by this patch queue"))
1241
1241
1242 # we know there are no local changes, so we can make a simplified
1242 # we know there are no local changes, so we can make a simplified
1243 # form of hg.update.
1243 # form of hg.update.
1244 if update:
1244 if update:
1245 qp = self.qparents(repo, rev)
1245 qp = self.qparents(repo, rev)
1246 ctx = repo[qp]
1246 ctx = repo[qp]
1247 m, a, r, d = repo.status(qp, top)[:4]
1247 m, a, r, d = repo.status(qp, top)[:4]
1248 if d:
1248 if d:
1249 raise util.Abort(_("deletions found between repo revs"))
1249 raise util.Abort(_("deletions found between repo revs"))
1250 for f in a:
1250 for f in a:
1251 try:
1251 try:
1252 util.unlinkpath(repo.wjoin(f))
1252 util.unlinkpath(repo.wjoin(f))
1253 except OSError, e:
1253 except OSError, e:
1254 if e.errno != errno.ENOENT:
1254 if e.errno != errno.ENOENT:
1255 raise
1255 raise
1256 repo.dirstate.forget(f)
1256 repo.dirstate.forget(f)
1257 for f in m + r:
1257 for f in m + r:
1258 fctx = ctx[f]
1258 fctx = ctx[f]
1259 repo.wwrite(f, fctx.data(), fctx.flags())
1259 repo.wwrite(f, fctx.data(), fctx.flags())
1260 repo.dirstate.normal(f)
1260 repo.dirstate.normal(f)
1261 repo.dirstate.setparents(qp, nullid)
1261 repo.dirstate.setparents(qp, nullid)
1262 for patch in reversed(self.applied[start:end]):
1262 for patch in reversed(self.applied[start:end]):
1263 self.ui.status(_("popping %s\n") % patch.name)
1263 self.ui.status(_("popping %s\n") % patch.name)
1264 del self.applied[start:end]
1264 del self.applied[start:end]
1265 self.strip(repo, [rev], update=False, backup='strip')
1265 self.strip(repo, [rev], update=False, backup='strip')
1266 if self.applied:
1266 if self.applied:
1267 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1267 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1268 else:
1268 else:
1269 self.ui.write(_("patch queue now empty\n"))
1269 self.ui.write(_("patch queue now empty\n"))
1270 finally:
1270 finally:
1271 wlock.release()
1271 wlock.release()
1272
1272
1273 def diff(self, repo, pats, opts):
1273 def diff(self, repo, pats, opts):
1274 top, patch = self.check_toppatch(repo)
1274 top, patch = self.check_toppatch(repo)
1275 if not top:
1275 if not top:
1276 self.ui.write(_("no patches applied\n"))
1276 self.ui.write(_("no patches applied\n"))
1277 return
1277 return
1278 qp = self.qparents(repo, top)
1278 qp = self.qparents(repo, top)
1279 if opts.get('reverse'):
1279 if opts.get('reverse'):
1280 node1, node2 = None, qp
1280 node1, node2 = None, qp
1281 else:
1281 else:
1282 node1, node2 = qp, None
1282 node1, node2 = qp, None
1283 diffopts = self.diffopts(opts, patch)
1283 diffopts = self.diffopts(opts, patch)
1284 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1284 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1285
1285
1286 def refresh(self, repo, pats=None, **opts):
1286 def refresh(self, repo, pats=None, **opts):
1287 if not self.applied:
1287 if not self.applied:
1288 self.ui.write(_("no patches applied\n"))
1288 self.ui.write(_("no patches applied\n"))
1289 return 1
1289 return 1
1290 msg = opts.get('msg', '').rstrip()
1290 msg = opts.get('msg', '').rstrip()
1291 newuser = opts.get('user')
1291 newuser = opts.get('user')
1292 newdate = opts.get('date')
1292 newdate = opts.get('date')
1293 if newdate:
1293 if newdate:
1294 newdate = '%d %d' % util.parsedate(newdate)
1294 newdate = '%d %d' % util.parsedate(newdate)
1295 wlock = repo.wlock()
1295 wlock = repo.wlock()
1296
1296
1297 try:
1297 try:
1298 self.check_toppatch(repo)
1298 self.check_toppatch(repo)
1299 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1299 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1300 if repo.changelog.heads(top) != [top]:
1300 if repo.changelog.heads(top) != [top]:
1301 raise util.Abort(_("cannot refresh a revision with children"))
1301 raise util.Abort(_("cannot refresh a revision with children"))
1302
1302
1303 inclsubs = self.check_substate(repo)
1303 inclsubs = self.check_substate(repo)
1304
1304
1305 cparents = repo.changelog.parents(top)
1305 cparents = repo.changelog.parents(top)
1306 patchparent = self.qparents(repo, top)
1306 patchparent = self.qparents(repo, top)
1307 ph = patchheader(self.join(patchfn), self.plainmode)
1307 ph = patchheader(self.join(patchfn), self.plainmode)
1308 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1308 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1309 if msg:
1309 if msg:
1310 ph.setmessage(msg)
1310 ph.setmessage(msg)
1311 if newuser:
1311 if newuser:
1312 ph.setuser(newuser)
1312 ph.setuser(newuser)
1313 if newdate:
1313 if newdate:
1314 ph.setdate(newdate)
1314 ph.setdate(newdate)
1315 ph.setparent(hex(patchparent))
1315 ph.setparent(hex(patchparent))
1316
1316
1317 # only commit new patch when write is complete
1317 # only commit new patch when write is complete
1318 patchf = self.opener(patchfn, 'w', atomictemp=True)
1318 patchf = self.opener(patchfn, 'w', atomictemp=True)
1319
1319
1320 comments = str(ph)
1320 comments = str(ph)
1321 if comments:
1321 if comments:
1322 patchf.write(comments)
1322 patchf.write(comments)
1323
1323
1324 # update the dirstate in place, strip off the qtip commit
1324 # update the dirstate in place, strip off the qtip commit
1325 # and then commit.
1325 # and then commit.
1326 #
1326 #
1327 # this should really read:
1327 # this should really read:
1328 # mm, dd, aa = repo.status(top, patchparent)[:3]
1328 # mm, dd, aa = repo.status(top, patchparent)[:3]
1329 # but we do it backwards to take advantage of manifest/chlog
1329 # but we do it backwards to take advantage of manifest/chlog
1330 # caching against the next repo.status call
1330 # caching against the next repo.status call
1331 mm, aa, dd = repo.status(patchparent, top)[:3]
1331 mm, aa, dd = repo.status(patchparent, top)[:3]
1332 changes = repo.changelog.read(top)
1332 changes = repo.changelog.read(top)
1333 man = repo.manifest.read(changes[0])
1333 man = repo.manifest.read(changes[0])
1334 aaa = aa[:]
1334 aaa = aa[:]
1335 matchfn = cmdutil.match(repo, pats, opts)
1335 matchfn = cmdutil.match(repo, pats, opts)
1336 # in short mode, we only diff the files included in the
1336 # in short mode, we only diff the files included in the
1337 # patch already plus specified files
1337 # patch already plus specified files
1338 if opts.get('short'):
1338 if opts.get('short'):
1339 # if amending a patch, we start with existing
1339 # if amending a patch, we start with existing
1340 # files plus specified files - unfiltered
1340 # files plus specified files - unfiltered
1341 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1341 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1342 # filter with inc/exl options
1342 # filter with inc/exl options
1343 matchfn = cmdutil.match(repo, opts=opts)
1343 matchfn = cmdutil.match(repo, opts=opts)
1344 else:
1344 else:
1345 match = cmdutil.matchall(repo)
1345 match = cmdutil.matchall(repo)
1346 m, a, r, d = repo.status(match=match)[:4]
1346 m, a, r, d = repo.status(match=match)[:4]
1347 mm = set(mm)
1347 mm = set(mm)
1348 aa = set(aa)
1348 aa = set(aa)
1349 dd = set(dd)
1349 dd = set(dd)
1350
1350
1351 # we might end up with files that were added between
1351 # we might end up with files that were added between
1352 # qtip and the dirstate parent, but then changed in the
1352 # qtip and the dirstate parent, but then changed in the
1353 # local dirstate. in this case, we want them to only
1353 # local dirstate. in this case, we want them to only
1354 # show up in the added section
1354 # show up in the added section
1355 for x in m:
1355 for x in m:
1356 if x not in aa:
1356 if x not in aa:
1357 mm.add(x)
1357 mm.add(x)
1358 # we might end up with files added by the local dirstate that
1358 # we might end up with files added by the local dirstate that
1359 # were deleted by the patch. In this case, they should only
1359 # were deleted by the patch. In this case, they should only
1360 # show up in the changed section.
1360 # show up in the changed section.
1361 for x in a:
1361 for x in a:
1362 if x in dd:
1362 if x in dd:
1363 dd.remove(x)
1363 dd.remove(x)
1364 mm.add(x)
1364 mm.add(x)
1365 else:
1365 else:
1366 aa.add(x)
1366 aa.add(x)
1367 # make sure any files deleted in the local dirstate
1367 # make sure any files deleted in the local dirstate
1368 # are not in the add or change column of the patch
1368 # are not in the add or change column of the patch
1369 forget = []
1369 forget = []
1370 for x in d + r:
1370 for x in d + r:
1371 if x in aa:
1371 if x in aa:
1372 aa.remove(x)
1372 aa.remove(x)
1373 forget.append(x)
1373 forget.append(x)
1374 continue
1374 continue
1375 else:
1375 else:
1376 mm.discard(x)
1376 mm.discard(x)
1377 dd.add(x)
1377 dd.add(x)
1378
1378
1379 m = list(mm)
1379 m = list(mm)
1380 r = list(dd)
1380 r = list(dd)
1381 a = list(aa)
1381 a = list(aa)
1382 c = [filter(matchfn, l) for l in (m, a, r)]
1382 c = [filter(matchfn, l) for l in (m, a, r)]
1383 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1383 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1384 chunks = patch.diff(repo, patchparent, match=match,
1384 chunks = patch.diff(repo, patchparent, match=match,
1385 changes=c, opts=diffopts)
1385 changes=c, opts=diffopts)
1386 for chunk in chunks:
1386 for chunk in chunks:
1387 patchf.write(chunk)
1387 patchf.write(chunk)
1388
1388
1389 try:
1389 try:
1390 if diffopts.git or diffopts.upgrade:
1390 if diffopts.git or diffopts.upgrade:
1391 copies = {}
1391 copies = {}
1392 for dst in a:
1392 for dst in a:
1393 src = repo.dirstate.copied(dst)
1393 src = repo.dirstate.copied(dst)
1394 # during qfold, the source file for copies may
1394 # during qfold, the source file for copies may
1395 # be removed. Treat this as a simple add.
1395 # be removed. Treat this as a simple add.
1396 if src is not None and src in repo.dirstate:
1396 if src is not None and src in repo.dirstate:
1397 copies.setdefault(src, []).append(dst)
1397 copies.setdefault(src, []).append(dst)
1398 repo.dirstate.add(dst)
1398 repo.dirstate.add(dst)
1399 # remember the copies between patchparent and qtip
1399 # remember the copies between patchparent and qtip
1400 for dst in aaa:
1400 for dst in aaa:
1401 f = repo.file(dst)
1401 f = repo.file(dst)
1402 src = f.renamed(man[dst])
1402 src = f.renamed(man[dst])
1403 if src:
1403 if src:
1404 copies.setdefault(src[0], []).extend(
1404 copies.setdefault(src[0], []).extend(
1405 copies.get(dst, []))
1405 copies.get(dst, []))
1406 if dst in a:
1406 if dst in a:
1407 copies[src[0]].append(dst)
1407 copies[src[0]].append(dst)
1408 # we can't copy a file created by the patch itself
1408 # we can't copy a file created by the patch itself
1409 if dst in copies:
1409 if dst in copies:
1410 del copies[dst]
1410 del copies[dst]
1411 for src, dsts in copies.iteritems():
1411 for src, dsts in copies.iteritems():
1412 for dst in dsts:
1412 for dst in dsts:
1413 repo.dirstate.copy(src, dst)
1413 repo.dirstate.copy(src, dst)
1414 else:
1414 else:
1415 for dst in a:
1415 for dst in a:
1416 repo.dirstate.add(dst)
1416 repo.dirstate.add(dst)
1417 # Drop useless copy information
1417 # Drop useless copy information
1418 for f in list(repo.dirstate.copies()):
1418 for f in list(repo.dirstate.copies()):
1419 repo.dirstate.copy(None, f)
1419 repo.dirstate.copy(None, f)
1420 for f in r:
1420 for f in r:
1421 repo.dirstate.remove(f)
1421 repo.dirstate.remove(f)
1422 # if the patch excludes a modified file, mark that
1422 # if the patch excludes a modified file, mark that
1423 # file with mtime=0 so status can see it.
1423 # file with mtime=0 so status can see it.
1424 mm = []
1424 mm = []
1425 for i in xrange(len(m)-1, -1, -1):
1425 for i in xrange(len(m)-1, -1, -1):
1426 if not matchfn(m[i]):
1426 if not matchfn(m[i]):
1427 mm.append(m[i])
1427 mm.append(m[i])
1428 del m[i]
1428 del m[i]
1429 for f in m:
1429 for f in m:
1430 repo.dirstate.normal(f)
1430 repo.dirstate.normal(f)
1431 for f in mm:
1431 for f in mm:
1432 repo.dirstate.normallookup(f)
1432 repo.dirstate.normallookup(f)
1433 for f in forget:
1433 for f in forget:
1434 repo.dirstate.forget(f)
1434 repo.dirstate.forget(f)
1435
1435
1436 if not msg:
1436 if not msg:
1437 if not ph.message:
1437 if not ph.message:
1438 message = "[mq]: %s\n" % patchfn
1438 message = "[mq]: %s\n" % patchfn
1439 else:
1439 else:
1440 message = "\n".join(ph.message)
1440 message = "\n".join(ph.message)
1441 else:
1441 else:
1442 message = msg
1442 message = msg
1443
1443
1444 user = ph.user or changes[1]
1444 user = ph.user or changes[1]
1445
1445
1446 # assumes strip can roll itself back if interrupted
1446 # assumes strip can roll itself back if interrupted
1447 repo.dirstate.setparents(*cparents)
1447 repo.dirstate.setparents(*cparents)
1448 self.applied.pop()
1448 self.applied.pop()
1449 self.applied_dirty = 1
1449 self.applied_dirty = 1
1450 self.strip(repo, [top], update=False,
1450 self.strip(repo, [top], update=False,
1451 backup='strip')
1451 backup='strip')
1452 except:
1452 except:
1453 repo.dirstate.invalidate()
1453 repo.dirstate.invalidate()
1454 raise
1454 raise
1455
1455
1456 try:
1456 try:
1457 # might be nice to attempt to roll back strip after this
1457 # might be nice to attempt to roll back strip after this
1458 n = repo.commit(message, user, ph.date, match=match,
1458 n = repo.commit(message, user, ph.date, match=match,
1459 force=True)
1459 force=True)
1460 # only write patch after a successful commit
1460 # only write patch after a successful commit
1461 patchf.rename()
1461 patchf.rename()
1462 self.applied.append(statusentry(n, patchfn))
1462 self.applied.append(statusentry(n, patchfn))
1463 except:
1463 except:
1464 ctx = repo[cparents[0]]
1464 ctx = repo[cparents[0]]
1465 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1465 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1466 self.save_dirty()
1466 self.save_dirty()
1467 self.ui.warn(_('refresh interrupted while patch was popped! '
1467 self.ui.warn(_('refresh interrupted while patch was popped! '
1468 '(revert --all, qpush to recover)\n'))
1468 '(revert --all, qpush to recover)\n'))
1469 raise
1469 raise
1470 finally:
1470 finally:
1471 wlock.release()
1471 wlock.release()
1472 self.removeundo(repo)
1472 self.removeundo(repo)
1473
1473
1474 def init(self, repo, create=False):
1474 def init(self, repo, create=False):
1475 if not create and os.path.isdir(self.path):
1475 if not create and os.path.isdir(self.path):
1476 raise util.Abort(_("patch queue directory already exists"))
1476 raise util.Abort(_("patch queue directory already exists"))
1477 try:
1477 try:
1478 os.mkdir(self.path)
1478 os.mkdir(self.path)
1479 except OSError, inst:
1479 except OSError, inst:
1480 if inst.errno != errno.EEXIST or not create:
1480 if inst.errno != errno.EEXIST or not create:
1481 raise
1481 raise
1482 if create:
1482 if create:
1483 return self.qrepo(create=True)
1483 return self.qrepo(create=True)
1484
1484
1485 def unapplied(self, repo, patch=None):
1485 def unapplied(self, repo, patch=None):
1486 if patch and patch not in self.series:
1486 if patch and patch not in self.series:
1487 raise util.Abort(_("patch %s is not in series file") % patch)
1487 raise util.Abort(_("patch %s is not in series file") % patch)
1488 if not patch:
1488 if not patch:
1489 start = self.series_end()
1489 start = self.series_end()
1490 else:
1490 else:
1491 start = self.series.index(patch) + 1
1491 start = self.series.index(patch) + 1
1492 unapplied = []
1492 unapplied = []
1493 for i in xrange(start, len(self.series)):
1493 for i in xrange(start, len(self.series)):
1494 pushable, reason = self.pushable(i)
1494 pushable, reason = self.pushable(i)
1495 if pushable:
1495 if pushable:
1496 unapplied.append((i, self.series[i]))
1496 unapplied.append((i, self.series[i]))
1497 self.explain_pushable(i)
1497 self.explain_pushable(i)
1498 return unapplied
1498 return unapplied
1499
1499
1500 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1500 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1501 summary=False):
1501 summary=False):
1502 def displayname(pfx, patchname, state):
1502 def displayname(pfx, patchname, state):
1503 if pfx:
1503 if pfx:
1504 self.ui.write(pfx)
1504 self.ui.write(pfx)
1505 if summary:
1505 if summary:
1506 ph = patchheader(self.join(patchname), self.plainmode)
1506 ph = patchheader(self.join(patchname), self.plainmode)
1507 msg = ph.message and ph.message[0] or ''
1507 msg = ph.message and ph.message[0] or ''
1508 if self.ui.formatted():
1508 if self.ui.formatted():
1509 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1509 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1510 if width > 0:
1510 if width > 0:
1511 msg = util.ellipsis(msg, width)
1511 msg = util.ellipsis(msg, width)
1512 else:
1512 else:
1513 msg = ''
1513 msg = ''
1514 self.ui.write(patchname, label='qseries.' + state)
1514 self.ui.write(patchname, label='qseries.' + state)
1515 self.ui.write(': ')
1515 self.ui.write(': ')
1516 self.ui.write(msg, label='qseries.message.' + state)
1516 self.ui.write(msg, label='qseries.message.' + state)
1517 else:
1517 else:
1518 self.ui.write(patchname, label='qseries.' + state)
1518 self.ui.write(patchname, label='qseries.' + state)
1519 self.ui.write('\n')
1519 self.ui.write('\n')
1520
1520
1521 applied = set([p.name for p in self.applied])
1521 applied = set([p.name for p in self.applied])
1522 if length is None:
1522 if length is None:
1523 length = len(self.series) - start
1523 length = len(self.series) - start
1524 if not missing:
1524 if not missing:
1525 if self.ui.verbose:
1525 if self.ui.verbose:
1526 idxwidth = len(str(start + length - 1))
1526 idxwidth = len(str(start + length - 1))
1527 for i in xrange(start, start + length):
1527 for i in xrange(start, start + length):
1528 patch = self.series[i]
1528 patch = self.series[i]
1529 if patch in applied:
1529 if patch in applied:
1530 char, state = 'A', 'applied'
1530 char, state = 'A', 'applied'
1531 elif self.pushable(i)[0]:
1531 elif self.pushable(i)[0]:
1532 char, state = 'U', 'unapplied'
1532 char, state = 'U', 'unapplied'
1533 else:
1533 else:
1534 char, state = 'G', 'guarded'
1534 char, state = 'G', 'guarded'
1535 pfx = ''
1535 pfx = ''
1536 if self.ui.verbose:
1536 if self.ui.verbose:
1537 pfx = '%*d %s ' % (idxwidth, i, char)
1537 pfx = '%*d %s ' % (idxwidth, i, char)
1538 elif status and status != char:
1538 elif status and status != char:
1539 continue
1539 continue
1540 displayname(pfx, patch, state)
1540 displayname(pfx, patch, state)
1541 else:
1541 else:
1542 msng_list = []
1542 msng_list = []
1543 for root, dirs, files in os.walk(self.path):
1543 for root, dirs, files in os.walk(self.path):
1544 d = root[len(self.path) + 1:]
1544 d = root[len(self.path) + 1:]
1545 for f in files:
1545 for f in files:
1546 fl = os.path.join(d, f)
1546 fl = os.path.join(d, f)
1547 if (fl not in self.series and
1547 if (fl not in self.series and
1548 fl not in (self.status_path, self.series_path,
1548 fl not in (self.status_path, self.series_path,
1549 self.guards_path)
1549 self.guards_path)
1550 and not fl.startswith('.')):
1550 and not fl.startswith('.')):
1551 msng_list.append(fl)
1551 msng_list.append(fl)
1552 for x in sorted(msng_list):
1552 for x in sorted(msng_list):
1553 pfx = self.ui.verbose and ('D ') or ''
1553 pfx = self.ui.verbose and ('D ') or ''
1554 displayname(pfx, x, 'missing')
1554 displayname(pfx, x, 'missing')
1555
1555
1556 def issaveline(self, l):
1556 def issaveline(self, l):
1557 if l.name == '.hg.patches.save.line':
1557 if l.name == '.hg.patches.save.line':
1558 return True
1558 return True
1559
1559
1560 def qrepo(self, create=False):
1560 def qrepo(self, create=False):
1561 ui = self.ui.copy()
1561 ui = self.ui.copy()
1562 ui.setconfig('paths', 'default', '', overlay=False)
1562 ui.setconfig('paths', 'default', '', overlay=False)
1563 ui.setconfig('paths', 'default-push', '', overlay=False)
1563 ui.setconfig('paths', 'default-push', '', overlay=False)
1564 if create or os.path.isdir(self.join(".hg")):
1564 if create or os.path.isdir(self.join(".hg")):
1565 return hg.repository(ui, path=self.path, create=create)
1565 return hg.repository(ui, path=self.path, create=create)
1566
1566
1567 def restore(self, repo, rev, delete=None, qupdate=None):
1567 def restore(self, repo, rev, delete=None, qupdate=None):
1568 desc = repo[rev].description().strip()
1568 desc = repo[rev].description().strip()
1569 lines = desc.splitlines()
1569 lines = desc.splitlines()
1570 i = 0
1570 i = 0
1571 datastart = None
1571 datastart = None
1572 series = []
1572 series = []
1573 applied = []
1573 applied = []
1574 qpp = None
1574 qpp = None
1575 for i, line in enumerate(lines):
1575 for i, line in enumerate(lines):
1576 if line == 'Patch Data:':
1576 if line == 'Patch Data:':
1577 datastart = i + 1
1577 datastart = i + 1
1578 elif line.startswith('Dirstate:'):
1578 elif line.startswith('Dirstate:'):
1579 l = line.rstrip()
1579 l = line.rstrip()
1580 l = l[10:].split(' ')
1580 l = l[10:].split(' ')
1581 qpp = [bin(x) for x in l]
1581 qpp = [bin(x) for x in l]
1582 elif datastart is not None:
1582 elif datastart is not None:
1583 l = line.rstrip()
1583 l = line.rstrip()
1584 n, name = l.split(':', 1)
1584 n, name = l.split(':', 1)
1585 if n:
1585 if n:
1586 applied.append(statusentry(bin(n), name))
1586 applied.append(statusentry(bin(n), name))
1587 else:
1587 else:
1588 series.append(l)
1588 series.append(l)
1589 if datastart is None:
1589 if datastart is None:
1590 self.ui.warn(_("No saved patch data found\n"))
1590 self.ui.warn(_("No saved patch data found\n"))
1591 return 1
1591 return 1
1592 self.ui.warn(_("restoring status: %s\n") % lines[0])
1592 self.ui.warn(_("restoring status: %s\n") % lines[0])
1593 self.full_series = series
1593 self.full_series = series
1594 self.applied = applied
1594 self.applied = applied
1595 self.parse_series()
1595 self.parse_series()
1596 self.series_dirty = 1
1596 self.series_dirty = 1
1597 self.applied_dirty = 1
1597 self.applied_dirty = 1
1598 heads = repo.changelog.heads()
1598 heads = repo.changelog.heads()
1599 if delete:
1599 if delete:
1600 if rev not in heads:
1600 if rev not in heads:
1601 self.ui.warn(_("save entry has children, leaving it alone\n"))
1601 self.ui.warn(_("save entry has children, leaving it alone\n"))
1602 else:
1602 else:
1603 self.ui.warn(_("removing save entry %s\n") % short(rev))
1603 self.ui.warn(_("removing save entry %s\n") % short(rev))
1604 pp = repo.dirstate.parents()
1604 pp = repo.dirstate.parents()
1605 if rev in pp:
1605 if rev in pp:
1606 update = True
1606 update = True
1607 else:
1607 else:
1608 update = False
1608 update = False
1609 self.strip(repo, [rev], update=update, backup='strip')
1609 self.strip(repo, [rev], update=update, backup='strip')
1610 if qpp:
1610 if qpp:
1611 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1611 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1612 (short(qpp[0]), short(qpp[1])))
1612 (short(qpp[0]), short(qpp[1])))
1613 if qupdate:
1613 if qupdate:
1614 self.ui.status(_("updating queue directory\n"))
1614 self.ui.status(_("updating queue directory\n"))
1615 r = self.qrepo()
1615 r = self.qrepo()
1616 if not r:
1616 if not r:
1617 self.ui.warn(_("Unable to load queue repository\n"))
1617 self.ui.warn(_("Unable to load queue repository\n"))
1618 return 1
1618 return 1
1619 hg.clean(r, qpp[0])
1619 hg.clean(r, qpp[0])
1620
1620
1621 def save(self, repo, msg=None):
1621 def save(self, repo, msg=None):
1622 if not self.applied:
1622 if not self.applied:
1623 self.ui.warn(_("save: no patches applied, exiting\n"))
1623 self.ui.warn(_("save: no patches applied, exiting\n"))
1624 return 1
1624 return 1
1625 if self.issaveline(self.applied[-1]):
1625 if self.issaveline(self.applied[-1]):
1626 self.ui.warn(_("status is already saved\n"))
1626 self.ui.warn(_("status is already saved\n"))
1627 return 1
1627 return 1
1628
1628
1629 if not msg:
1629 if not msg:
1630 msg = _("hg patches saved state")
1630 msg = _("hg patches saved state")
1631 else:
1631 else:
1632 msg = "hg patches: " + msg.rstrip('\r\n')
1632 msg = "hg patches: " + msg.rstrip('\r\n')
1633 r = self.qrepo()
1633 r = self.qrepo()
1634 if r:
1634 if r:
1635 pp = r.dirstate.parents()
1635 pp = r.dirstate.parents()
1636 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1636 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1637 msg += "\n\nPatch Data:\n"
1637 msg += "\n\nPatch Data:\n"
1638 msg += ''.join('%s\n' % x for x in self.applied)
1638 msg += ''.join('%s\n' % x for x in self.applied)
1639 msg += ''.join(':%s\n' % x for x in self.full_series)
1639 msg += ''.join(':%s\n' % x for x in self.full_series)
1640 n = repo.commit(msg, force=True)
1640 n = repo.commit(msg, force=True)
1641 if not n:
1641 if not n:
1642 self.ui.warn(_("repo commit failed\n"))
1642 self.ui.warn(_("repo commit failed\n"))
1643 return 1
1643 return 1
1644 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1644 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1645 self.applied_dirty = 1
1645 self.applied_dirty = 1
1646 self.removeundo(repo)
1646 self.removeundo(repo)
1647
1647
1648 def full_series_end(self):
1648 def full_series_end(self):
1649 if self.applied:
1649 if self.applied:
1650 p = self.applied[-1].name
1650 p = self.applied[-1].name
1651 end = self.find_series(p)
1651 end = self.find_series(p)
1652 if end is None:
1652 if end is None:
1653 return len(self.full_series)
1653 return len(self.full_series)
1654 return end + 1
1654 return end + 1
1655 return 0
1655 return 0
1656
1656
1657 def series_end(self, all_patches=False):
1657 def series_end(self, all_patches=False):
1658 """If all_patches is False, return the index of the next pushable patch
1658 """If all_patches is False, return the index of the next pushable patch
1659 in the series, or the series length. If all_patches is True, return the
1659 in the series, or the series length. If all_patches is True, return the
1660 index of the first patch past the last applied one.
1660 index of the first patch past the last applied one.
1661 """
1661 """
1662 end = 0
1662 end = 0
1663 def next(start):
1663 def next(start):
1664 if all_patches or start >= len(self.series):
1664 if all_patches or start >= len(self.series):
1665 return start
1665 return start
1666 for i in xrange(start, len(self.series)):
1666 for i in xrange(start, len(self.series)):
1667 p, reason = self.pushable(i)
1667 p, reason = self.pushable(i)
1668 if p:
1668 if p:
1669 break
1669 break
1670 self.explain_pushable(i)
1670 self.explain_pushable(i)
1671 return i
1671 return i
1672 if self.applied:
1672 if self.applied:
1673 p = self.applied[-1].name
1673 p = self.applied[-1].name
1674 try:
1674 try:
1675 end = self.series.index(p)
1675 end = self.series.index(p)
1676 except ValueError:
1676 except ValueError:
1677 return 0
1677 return 0
1678 return next(end + 1)
1678 return next(end + 1)
1679 return next(end)
1679 return next(end)
1680
1680
1681 def appliedname(self, index):
1681 def appliedname(self, index):
1682 pname = self.applied[index].name
1682 pname = self.applied[index].name
1683 if not self.ui.verbose:
1683 if not self.ui.verbose:
1684 p = pname
1684 p = pname
1685 else:
1685 else:
1686 p = str(self.series.index(pname)) + " " + pname
1686 p = str(self.series.index(pname)) + " " + pname
1687 return p
1687 return p
1688
1688
1689 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1689 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1690 force=None, git=False):
1690 force=None, git=False):
1691 def checkseries(patchname):
1691 def checkseries(patchname):
1692 if patchname in self.series:
1692 if patchname in self.series:
1693 raise util.Abort(_('patch %s is already in the series file')
1693 raise util.Abort(_('patch %s is already in the series file')
1694 % patchname)
1694 % patchname)
1695 def checkfile(patchname):
1695 def checkfile(patchname):
1696 if not force and os.path.exists(self.join(patchname)):
1696 if not force and os.path.exists(self.join(patchname)):
1697 raise util.Abort(_('patch "%s" already exists')
1697 raise util.Abort(_('patch "%s" already exists')
1698 % patchname)
1698 % patchname)
1699
1699
1700 if rev:
1700 if rev:
1701 if files:
1701 if files:
1702 raise util.Abort(_('option "-r" not valid when importing '
1702 raise util.Abort(_('option "-r" not valid when importing '
1703 'files'))
1703 'files'))
1704 rev = cmdutil.revrange(repo, rev)
1704 rev = cmdutil.revrange(repo, rev)
1705 rev.sort(reverse=True)
1705 rev.sort(reverse=True)
1706 if (len(files) > 1 or len(rev) > 1) and patchname:
1706 if (len(files) > 1 or len(rev) > 1) and patchname:
1707 raise util.Abort(_('option "-n" not valid when importing multiple '
1707 raise util.Abort(_('option "-n" not valid when importing multiple '
1708 'patches'))
1708 'patches'))
1709 if rev:
1709 if rev:
1710 # If mq patches are applied, we can only import revisions
1710 # If mq patches are applied, we can only import revisions
1711 # that form a linear path to qbase.
1711 # that form a linear path to qbase.
1712 # Otherwise, they should form a linear path to a head.
1712 # Otherwise, they should form a linear path to a head.
1713 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1713 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1714 if len(heads) > 1:
1714 if len(heads) > 1:
1715 raise util.Abort(_('revision %d is the root of more than one '
1715 raise util.Abort(_('revision %d is the root of more than one '
1716 'branch') % rev[-1])
1716 'branch') % rev[-1])
1717 if self.applied:
1717 if self.applied:
1718 base = repo.changelog.node(rev[0])
1718 base = repo.changelog.node(rev[0])
1719 if base in [n.node for n in self.applied]:
1719 if base in [n.node for n in self.applied]:
1720 raise util.Abort(_('revision %d is already managed')
1720 raise util.Abort(_('revision %d is already managed')
1721 % rev[0])
1721 % rev[0])
1722 if heads != [self.applied[-1].node]:
1722 if heads != [self.applied[-1].node]:
1723 raise util.Abort(_('revision %d is not the parent of '
1723 raise util.Abort(_('revision %d is not the parent of '
1724 'the queue') % rev[0])
1724 'the queue') % rev[0])
1725 base = repo.changelog.rev(self.applied[0].node)
1725 base = repo.changelog.rev(self.applied[0].node)
1726 lastparent = repo.changelog.parentrevs(base)[0]
1726 lastparent = repo.changelog.parentrevs(base)[0]
1727 else:
1727 else:
1728 if heads != [repo.changelog.node(rev[0])]:
1728 if heads != [repo.changelog.node(rev[0])]:
1729 raise util.Abort(_('revision %d has unmanaged children')
1729 raise util.Abort(_('revision %d has unmanaged children')
1730 % rev[0])
1730 % rev[0])
1731 lastparent = None
1731 lastparent = None
1732
1732
1733 diffopts = self.diffopts({'git': git})
1733 diffopts = self.diffopts({'git': git})
1734 for r in rev:
1734 for r in rev:
1735 p1, p2 = repo.changelog.parentrevs(r)
1735 p1, p2 = repo.changelog.parentrevs(r)
1736 n = repo.changelog.node(r)
1736 n = repo.changelog.node(r)
1737 if p2 != nullrev:
1737 if p2 != nullrev:
1738 raise util.Abort(_('cannot import merge revision %d') % r)
1738 raise util.Abort(_('cannot import merge revision %d') % r)
1739 if lastparent and lastparent != r:
1739 if lastparent and lastparent != r:
1740 raise util.Abort(_('revision %d is not the parent of %d')
1740 raise util.Abort(_('revision %d is not the parent of %d')
1741 % (r, lastparent))
1741 % (r, lastparent))
1742 lastparent = p1
1742 lastparent = p1
1743
1743
1744 if not patchname:
1744 if not patchname:
1745 patchname = normname('%d.diff' % r)
1745 patchname = normname('%d.diff' % r)
1746 self.check_reserved_name(patchname)
1746 self.check_reserved_name(patchname)
1747 checkseries(patchname)
1747 checkseries(patchname)
1748 checkfile(patchname)
1748 checkfile(patchname)
1749 self.full_series.insert(0, patchname)
1749 self.full_series.insert(0, patchname)
1750
1750
1751 patchf = self.opener(patchname, "w")
1751 patchf = self.opener(patchname, "w")
1752 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1752 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1753 patchf.close()
1753 patchf.close()
1754
1754
1755 se = statusentry(n, patchname)
1755 se = statusentry(n, patchname)
1756 self.applied.insert(0, se)
1756 self.applied.insert(0, se)
1757
1757
1758 self.added.append(patchname)
1758 self.added.append(patchname)
1759 patchname = None
1759 patchname = None
1760 self.parse_series()
1760 self.parse_series()
1761 self.applied_dirty = 1
1761 self.applied_dirty = 1
1762 self.series_dirty = True
1762 self.series_dirty = True
1763
1763
1764 for i, filename in enumerate(files):
1764 for i, filename in enumerate(files):
1765 if existing:
1765 if existing:
1766 if filename == '-':
1766 if filename == '-':
1767 raise util.Abort(_('-e is incompatible with import from -'))
1767 raise util.Abort(_('-e is incompatible with import from -'))
1768 filename = normname(filename)
1768 filename = normname(filename)
1769 self.check_reserved_name(filename)
1769 self.check_reserved_name(filename)
1770 originpath = self.join(filename)
1770 originpath = self.join(filename)
1771 if not os.path.isfile(originpath):
1771 if not os.path.isfile(originpath):
1772 raise util.Abort(_("patch %s does not exist") % filename)
1772 raise util.Abort(_("patch %s does not exist") % filename)
1773
1773
1774 if patchname:
1774 if patchname:
1775 self.check_reserved_name(patchname)
1775 self.check_reserved_name(patchname)
1776 checkfile(patchname)
1776 checkfile(patchname)
1777
1777
1778 self.ui.write(_('renaming %s to %s\n')
1778 self.ui.write(_('renaming %s to %s\n')
1779 % (filename, patchname))
1779 % (filename, patchname))
1780 util.rename(originpath, self.join(patchname))
1780 util.rename(originpath, self.join(patchname))
1781 else:
1781 else:
1782 patchname = filename
1782 patchname = filename
1783
1783
1784 else:
1784 else:
1785 try:
1785 try:
1786 if filename == '-':
1786 if filename == '-':
1787 if not patchname:
1787 if not patchname:
1788 raise util.Abort(
1788 raise util.Abort(
1789 _('need --name to import a patch from -'))
1789 _('need --name to import a patch from -'))
1790 text = sys.stdin.read()
1790 text = sys.stdin.read()
1791 else:
1791 else:
1792 fp = url.open(self.ui, filename)
1792 fp = url.open(self.ui, filename)
1793 text = fp.read()
1793 text = fp.read()
1794 fp.close()
1794 fp.close()
1795 except (OSError, IOError):
1795 except (OSError, IOError):
1796 raise util.Abort(_("unable to read file %s") % filename)
1796 raise util.Abort(_("unable to read file %s") % filename)
1797 if not patchname:
1797 if not patchname:
1798 patchname = normname(os.path.basename(filename))
1798 patchname = normname(os.path.basename(filename))
1799 self.check_reserved_name(patchname)
1799 self.check_reserved_name(patchname)
1800 checkfile(patchname)
1800 checkfile(patchname)
1801 patchf = self.opener(patchname, "w")
1801 patchf = self.opener(patchname, "w")
1802 patchf.write(text)
1802 patchf.write(text)
1803 patchf.close()
1803 patchf.close()
1804 if not force:
1804 if not force:
1805 checkseries(patchname)
1805 checkseries(patchname)
1806 if patchname not in self.series:
1806 if patchname not in self.series:
1807 index = self.full_series_end() + i
1807 index = self.full_series_end() + i
1808 self.full_series[index:index] = [patchname]
1808 self.full_series[index:index] = [patchname]
1809 self.parse_series()
1809 self.parse_series()
1810 self.series_dirty = True
1810 self.series_dirty = True
1811 self.ui.warn(_("adding %s to series file\n") % patchname)
1811 self.ui.warn(_("adding %s to series file\n") % patchname)
1812 self.added.append(patchname)
1812 self.added.append(patchname)
1813 patchname = None
1813 patchname = None
1814
1814
1815 self.removeundo(repo)
1815 self.removeundo(repo)
1816
1816
1817 def delete(ui, repo, *patches, **opts):
1817 def delete(ui, repo, *patches, **opts):
1818 """remove patches from queue
1818 """remove patches from queue
1819
1819
1820 The patches must not be applied, and at least one patch is required. With
1820 The patches must not be applied, and at least one patch is required. With
1821 -k/--keep, the patch files are preserved in the patch directory.
1821 -k/--keep, the patch files are preserved in the patch directory.
1822
1822
1823 To stop managing a patch and move it into permanent history,
1823 To stop managing a patch and move it into permanent history,
1824 use the :hg:`qfinish` command."""
1824 use the :hg:`qfinish` command."""
1825 q = repo.mq
1825 q = repo.mq
1826 q.delete(repo, patches, opts)
1826 q.delete(repo, patches, opts)
1827 q.save_dirty()
1827 q.save_dirty()
1828 return 0
1828 return 0
1829
1829
1830 def applied(ui, repo, patch=None, **opts):
1830 def applied(ui, repo, patch=None, **opts):
1831 """print the patches already applied
1831 """print the patches already applied
1832
1832
1833 Returns 0 on success."""
1833 Returns 0 on success."""
1834
1834
1835 q = repo.mq
1835 q = repo.mq
1836
1836
1837 if patch:
1837 if patch:
1838 if patch not in q.series:
1838 if patch not in q.series:
1839 raise util.Abort(_("patch %s is not in series file") % patch)
1839 raise util.Abort(_("patch %s is not in series file") % patch)
1840 end = q.series.index(patch) + 1
1840 end = q.series.index(patch) + 1
1841 else:
1841 else:
1842 end = q.series_end(True)
1842 end = q.series_end(True)
1843
1843
1844 if opts.get('last') and not end:
1844 if opts.get('last') and not end:
1845 ui.write(_("no patches applied\n"))
1845 ui.write(_("no patches applied\n"))
1846 return 1
1846 return 1
1847 elif opts.get('last') and end == 1:
1847 elif opts.get('last') and end == 1:
1848 ui.write(_("only one patch applied\n"))
1848 ui.write(_("only one patch applied\n"))
1849 return 1
1849 return 1
1850 elif opts.get('last'):
1850 elif opts.get('last'):
1851 start = end - 2
1851 start = end - 2
1852 end = 1
1852 end = 1
1853 else:
1853 else:
1854 start = 0
1854 start = 0
1855
1855
1856 q.qseries(repo, length=end, start=start, status='A',
1856 q.qseries(repo, length=end, start=start, status='A',
1857 summary=opts.get('summary'))
1857 summary=opts.get('summary'))
1858
1858
1859
1859
1860 def unapplied(ui, repo, patch=None, **opts):
1860 def unapplied(ui, repo, patch=None, **opts):
1861 """print the patches not yet applied
1861 """print the patches not yet applied
1862
1862
1863 Returns 0 on success."""
1863 Returns 0 on success."""
1864
1864
1865 q = repo.mq
1865 q = repo.mq
1866 if patch:
1866 if patch:
1867 if patch not in q.series:
1867 if patch not in q.series:
1868 raise util.Abort(_("patch %s is not in series file") % patch)
1868 raise util.Abort(_("patch %s is not in series file") % patch)
1869 start = q.series.index(patch) + 1
1869 start = q.series.index(patch) + 1
1870 else:
1870 else:
1871 start = q.series_end(True)
1871 start = q.series_end(True)
1872
1872
1873 if start == len(q.series) and opts.get('first'):
1873 if start == len(q.series) and opts.get('first'):
1874 ui.write(_("all patches applied\n"))
1874 ui.write(_("all patches applied\n"))
1875 return 1
1875 return 1
1876
1876
1877 length = opts.get('first') and 1 or None
1877 length = opts.get('first') and 1 or None
1878 q.qseries(repo, start=start, length=length, status='U',
1878 q.qseries(repo, start=start, length=length, status='U',
1879 summary=opts.get('summary'))
1879 summary=opts.get('summary'))
1880
1880
1881 def qimport(ui, repo, *filename, **opts):
1881 def qimport(ui, repo, *filename, **opts):
1882 """import a patch
1882 """import a patch
1883
1883
1884 The patch is inserted into the series after the last applied
1884 The patch is inserted into the series after the last applied
1885 patch. If no patches have been applied, qimport prepends the patch
1885 patch. If no patches have been applied, qimport prepends the patch
1886 to the series.
1886 to the series.
1887
1887
1888 The patch will have the same name as its source file unless you
1888 The patch will have the same name as its source file unless you
1889 give it a new one with -n/--name.
1889 give it a new one with -n/--name.
1890
1890
1891 You can register an existing patch inside the patch directory with
1891 You can register an existing patch inside the patch directory with
1892 the -e/--existing flag.
1892 the -e/--existing flag.
1893
1893
1894 With -f/--force, an existing patch of the same name will be
1894 With -f/--force, an existing patch of the same name will be
1895 overwritten.
1895 overwritten.
1896
1896
1897 An existing changeset may be placed under mq control with -r/--rev
1897 An existing changeset may be placed under mq control with -r/--rev
1898 (e.g. qimport --rev tip -n patch will place tip under mq control).
1898 (e.g. qimport --rev tip -n patch will place tip under mq control).
1899 With -g/--git, patches imported with --rev will use the git diff
1899 With -g/--git, patches imported with --rev will use the git diff
1900 format. See the diffs help topic for information on why this is
1900 format. See the diffs help topic for information on why this is
1901 important for preserving rename/copy information and permission
1901 important for preserving rename/copy information and permission
1902 changes. Use :hg:`qfinish` to remove changesets from mq control.
1902 changes. Use :hg:`qfinish` to remove changesets from mq control.
1903
1903
1904 To import a patch from standard input, pass - as the patch file.
1904 To import a patch from standard input, pass - as the patch file.
1905 When importing from standard input, a patch name must be specified
1905 When importing from standard input, a patch name must be specified
1906 using the --name flag.
1906 using the --name flag.
1907
1907
1908 To import an existing patch while renaming it::
1908 To import an existing patch while renaming it::
1909
1909
1910 hg qimport -e existing-patch -n new-name
1910 hg qimport -e existing-patch -n new-name
1911
1911
1912 Returns 0 if import succeeded.
1912 Returns 0 if import succeeded.
1913 """
1913 """
1914 q = repo.mq
1914 q = repo.mq
1915 try:
1915 try:
1916 q.qimport(repo, filename, patchname=opts.get('name'),
1916 q.qimport(repo, filename, patchname=opts.get('name'),
1917 existing=opts.get('existing'), force=opts.get('force'),
1917 existing=opts.get('existing'), force=opts.get('force'),
1918 rev=opts.get('rev'), git=opts.get('git'))
1918 rev=opts.get('rev'), git=opts.get('git'))
1919 finally:
1919 finally:
1920 q.save_dirty()
1920 q.save_dirty()
1921
1921
1922 if opts.get('push') and not opts.get('rev'):
1922 if opts.get('push') and not opts.get('rev'):
1923 return q.push(repo, None)
1923 return q.push(repo, None)
1924 return 0
1924 return 0
1925
1925
1926 def qinit(ui, repo, create):
1926 def qinit(ui, repo, create):
1927 """initialize a new queue repository
1927 """initialize a new queue repository
1928
1928
1929 This command also creates a series file for ordering patches, and
1929 This command also creates a series file for ordering patches, and
1930 an mq-specific .hgignore file in the queue repository, to exclude
1930 an mq-specific .hgignore file in the queue repository, to exclude
1931 the status and guards files (these contain mostly transient state).
1931 the status and guards files (these contain mostly transient state).
1932
1932
1933 Returns 0 if initialization succeeded."""
1933 Returns 0 if initialization succeeded."""
1934 q = repo.mq
1934 q = repo.mq
1935 r = q.init(repo, create)
1935 r = q.init(repo, create)
1936 q.save_dirty()
1936 q.save_dirty()
1937 if r:
1937 if r:
1938 if not os.path.exists(r.wjoin('.hgignore')):
1938 if not os.path.exists(r.wjoin('.hgignore')):
1939 fp = r.wopener('.hgignore', 'w')
1939 fp = r.wopener('.hgignore', 'w')
1940 fp.write('^\\.hg\n')
1940 fp.write('^\\.hg\n')
1941 fp.write('^\\.mq\n')
1941 fp.write('^\\.mq\n')
1942 fp.write('syntax: glob\n')
1942 fp.write('syntax: glob\n')
1943 fp.write('status\n')
1943 fp.write('status\n')
1944 fp.write('guards\n')
1944 fp.write('guards\n')
1945 fp.close()
1945 fp.close()
1946 if not os.path.exists(r.wjoin('series')):
1946 if not os.path.exists(r.wjoin('series')):
1947 r.wopener('series', 'w').close()
1947 r.wopener('series', 'w').close()
1948 r[None].add(['.hgignore', 'series'])
1948 r[None].add(['.hgignore', 'series'])
1949 commands.add(ui, r)
1949 commands.add(ui, r)
1950 return 0
1950 return 0
1951
1951
1952 def init(ui, repo, **opts):
1952 def init(ui, repo, **opts):
1953 """init a new queue repository (DEPRECATED)
1953 """init a new queue repository (DEPRECATED)
1954
1954
1955 The queue repository is unversioned by default. If
1955 The queue repository is unversioned by default. If
1956 -c/--create-repo is specified, qinit will create a separate nested
1956 -c/--create-repo is specified, qinit will create a separate nested
1957 repository for patches (qinit -c may also be run later to convert
1957 repository for patches (qinit -c may also be run later to convert
1958 an unversioned patch repository into a versioned one). You can use
1958 an unversioned patch repository into a versioned one). You can use
1959 qcommit to commit changes to this queue repository.
1959 qcommit to commit changes to this queue repository.
1960
1960
1961 This command is deprecated. Without -c, it's implied by other relevant
1961 This command is deprecated. Without -c, it's implied by other relevant
1962 commands. With -c, use :hg:`init --mq` instead."""
1962 commands. With -c, use :hg:`init --mq` instead."""
1963 return qinit(ui, repo, create=opts.get('create_repo'))
1963 return qinit(ui, repo, create=opts.get('create_repo'))
1964
1964
1965 def clone(ui, source, dest=None, **opts):
1965 def clone(ui, source, dest=None, **opts):
1966 '''clone main and patch repository at same time
1966 '''clone main and patch repository at same time
1967
1967
1968 If source is local, destination will have no patches applied. If
1968 If source is local, destination will have no patches applied. If
1969 source is remote, this command can not check if patches are
1969 source is remote, this command can not check if patches are
1970 applied in source, so cannot guarantee that patches are not
1970 applied in source, so cannot guarantee that patches are not
1971 applied in destination. If you clone remote repository, be sure
1971 applied in destination. If you clone remote repository, be sure
1972 before that it has no patches applied.
1972 before that it has no patches applied.
1973
1973
1974 Source patch repository is looked for in <src>/.hg/patches by
1974 Source patch repository is looked for in <src>/.hg/patches by
1975 default. Use -p <url> to change.
1975 default. Use -p <url> to change.
1976
1976
1977 The patch directory must be a nested Mercurial repository, as
1977 The patch directory must be a nested Mercurial repository, as
1978 would be created by :hg:`init --mq`.
1978 would be created by :hg:`init --mq`.
1979
1979
1980 Return 0 on success.
1980 Return 0 on success.
1981 '''
1981 '''
1982 def patchdir(repo):
1982 def patchdir(repo):
1983 url = repo.url()
1983 url = repo.url()
1984 if url.endswith('/'):
1984 if url.endswith('/'):
1985 url = url[:-1]
1985 url = url[:-1]
1986 return url + '/.hg/patches'
1986 return url + '/.hg/patches'
1987 if dest is None:
1987 if dest is None:
1988 dest = hg.defaultdest(source)
1988 dest = hg.defaultdest(source)
1989 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
1989 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
1990 if opts.get('patches'):
1990 if opts.get('patches'):
1991 patchespath = ui.expandpath(opts.get('patches'))
1991 patchespath = ui.expandpath(opts.get('patches'))
1992 else:
1992 else:
1993 patchespath = patchdir(sr)
1993 patchespath = patchdir(sr)
1994 try:
1994 try:
1995 hg.repository(ui, patchespath)
1995 hg.repository(ui, patchespath)
1996 except error.RepoError:
1996 except error.RepoError:
1997 raise util.Abort(_('versioned patch repository not found'
1997 raise util.Abort(_('versioned patch repository not found'
1998 ' (see init --mq)'))
1998 ' (see init --mq)'))
1999 qbase, destrev = None, None
1999 qbase, destrev = None, None
2000 if sr.local():
2000 if sr.local():
2001 if sr.mq.applied:
2001 if sr.mq.applied:
2002 qbase = sr.mq.applied[0].node
2002 qbase = sr.mq.applied[0].node
2003 if not hg.islocal(dest):
2003 if not hg.islocal(dest):
2004 heads = set(sr.heads())
2004 heads = set(sr.heads())
2005 destrev = list(heads.difference(sr.heads(qbase)))
2005 destrev = list(heads.difference(sr.heads(qbase)))
2006 destrev.append(sr.changelog.parents(qbase)[0])
2006 destrev.append(sr.changelog.parents(qbase)[0])
2007 elif sr.capable('lookup'):
2007 elif sr.capable('lookup'):
2008 try:
2008 try:
2009 qbase = sr.lookup('qbase')
2009 qbase = sr.lookup('qbase')
2010 except error.RepoError:
2010 except error.RepoError:
2011 pass
2011 pass
2012 ui.note(_('cloning main repository\n'))
2012 ui.note(_('cloning main repository\n'))
2013 sr, dr = hg.clone(ui, sr.url(), dest,
2013 sr, dr = hg.clone(ui, sr.url(), dest,
2014 pull=opts.get('pull'),
2014 pull=opts.get('pull'),
2015 rev=destrev,
2015 rev=destrev,
2016 update=False,
2016 update=False,
2017 stream=opts.get('uncompressed'))
2017 stream=opts.get('uncompressed'))
2018 ui.note(_('cloning patch repository\n'))
2018 ui.note(_('cloning patch repository\n'))
2019 hg.clone(ui, opts.get('patches') or patchdir(sr), patchdir(dr),
2019 hg.clone(ui, opts.get('patches') or patchdir(sr), patchdir(dr),
2020 pull=opts.get('pull'), update=not opts.get('noupdate'),
2020 pull=opts.get('pull'), update=not opts.get('noupdate'),
2021 stream=opts.get('uncompressed'))
2021 stream=opts.get('uncompressed'))
2022 if dr.local():
2022 if dr.local():
2023 if qbase:
2023 if qbase:
2024 ui.note(_('stripping applied patches from destination '
2024 ui.note(_('stripping applied patches from destination '
2025 'repository\n'))
2025 'repository\n'))
2026 dr.mq.strip(dr, [qbase], update=False, backup=None)
2026 dr.mq.strip(dr, [qbase], update=False, backup=None)
2027 if not opts.get('noupdate'):
2027 if not opts.get('noupdate'):
2028 ui.note(_('updating destination repository\n'))
2028 ui.note(_('updating destination repository\n'))
2029 hg.update(dr, dr.changelog.tip())
2029 hg.update(dr, dr.changelog.tip())
2030
2030
2031 def commit(ui, repo, *pats, **opts):
2031 def commit(ui, repo, *pats, **opts):
2032 """commit changes in the queue repository (DEPRECATED)
2032 """commit changes in the queue repository (DEPRECATED)
2033
2033
2034 This command is deprecated; use :hg:`commit --mq` instead."""
2034 This command is deprecated; use :hg:`commit --mq` instead."""
2035 q = repo.mq
2035 q = repo.mq
2036 r = q.qrepo()
2036 r = q.qrepo()
2037 if not r:
2037 if not r:
2038 raise util.Abort('no queue repository')
2038 raise util.Abort('no queue repository')
2039 commands.commit(r.ui, r, *pats, **opts)
2039 commands.commit(r.ui, r, *pats, **opts)
2040
2040
2041 def series(ui, repo, **opts):
2041 def series(ui, repo, **opts):
2042 """print the entire series file
2042 """print the entire series file
2043
2043
2044 Returns 0 on success."""
2044 Returns 0 on success."""
2045 repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
2045 repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
2046 return 0
2046 return 0
2047
2047
2048 def top(ui, repo, **opts):
2048 def top(ui, repo, **opts):
2049 """print the name of the current patch
2049 """print the name of the current patch
2050
2050
2051 Returns 0 on success."""
2051 Returns 0 on success."""
2052 q = repo.mq
2052 q = repo.mq
2053 t = q.applied and q.series_end(True) or 0
2053 t = q.applied and q.series_end(True) or 0
2054 if t:
2054 if t:
2055 q.qseries(repo, start=t - 1, length=1, status='A',
2055 q.qseries(repo, start=t - 1, length=1, status='A',
2056 summary=opts.get('summary'))
2056 summary=opts.get('summary'))
2057 else:
2057 else:
2058 ui.write(_("no patches applied\n"))
2058 ui.write(_("no patches applied\n"))
2059 return 1
2059 return 1
2060
2060
2061 def next(ui, repo, **opts):
2061 def next(ui, repo, **opts):
2062 """print the name of the next patch
2062 """print the name of the next patch
2063
2063
2064 Returns 0 on success."""
2064 Returns 0 on success."""
2065 q = repo.mq
2065 q = repo.mq
2066 end = q.series_end()
2066 end = q.series_end()
2067 if end == len(q.series):
2067 if end == len(q.series):
2068 ui.write(_("all patches applied\n"))
2068 ui.write(_("all patches applied\n"))
2069 return 1
2069 return 1
2070 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2070 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2071
2071
2072 def prev(ui, repo, **opts):
2072 def prev(ui, repo, **opts):
2073 """print the name of the previous patch
2073 """print the name of the previous patch
2074
2074
2075 Returns 0 on success."""
2075 Returns 0 on success."""
2076 q = repo.mq
2076 q = repo.mq
2077 l = len(q.applied)
2077 l = len(q.applied)
2078 if l == 1:
2078 if l == 1:
2079 ui.write(_("only one patch applied\n"))
2079 ui.write(_("only one patch applied\n"))
2080 return 1
2080 return 1
2081 if not l:
2081 if not l:
2082 ui.write(_("no patches applied\n"))
2082 ui.write(_("no patches applied\n"))
2083 return 1
2083 return 1
2084 q.qseries(repo, start=l - 2, length=1, status='A',
2084 q.qseries(repo, start=l - 2, length=1, status='A',
2085 summary=opts.get('summary'))
2085 summary=opts.get('summary'))
2086
2086
2087 def setupheaderopts(ui, opts):
2087 def setupheaderopts(ui, opts):
2088 if not opts.get('user') and opts.get('currentuser'):
2088 if not opts.get('user') and opts.get('currentuser'):
2089 opts['user'] = ui.username()
2089 opts['user'] = ui.username()
2090 if not opts.get('date') and opts.get('currentdate'):
2090 if not opts.get('date') and opts.get('currentdate'):
2091 opts['date'] = "%d %d" % util.makedate()
2091 opts['date'] = "%d %d" % util.makedate()
2092
2092
2093 def new(ui, repo, patch, *args, **opts):
2093 def new(ui, repo, patch, *args, **opts):
2094 """create a new patch
2094 """create a new patch
2095
2095
2096 qnew creates a new patch on top of the currently-applied patch (if
2096 qnew creates a new patch on top of the currently-applied patch (if
2097 any). The patch will be initialized with any outstanding changes
2097 any). The patch will be initialized with any outstanding changes
2098 in the working directory. You may also use -I/--include,
2098 in the working directory. You may also use -I/--include,
2099 -X/--exclude, and/or a list of files after the patch name to add
2099 -X/--exclude, and/or a list of files after the patch name to add
2100 only changes to matching files to the new patch, leaving the rest
2100 only changes to matching files to the new patch, leaving the rest
2101 as uncommitted modifications.
2101 as uncommitted modifications.
2102
2102
2103 -u/--user and -d/--date can be used to set the (given) user and
2103 -u/--user and -d/--date can be used to set the (given) user and
2104 date, respectively. -U/--currentuser and -D/--currentdate set user
2104 date, respectively. -U/--currentuser and -D/--currentdate set user
2105 to current user and date to current date.
2105 to current user and date to current date.
2106
2106
2107 -e/--edit, -m/--message or -l/--logfile set the patch header as
2107 -e/--edit, -m/--message or -l/--logfile set the patch header as
2108 well as the commit message. If none is specified, the header is
2108 well as the commit message. If none is specified, the header is
2109 empty and the commit message is '[mq]: PATCH'.
2109 empty and the commit message is '[mq]: PATCH'.
2110
2110
2111 Use the -g/--git option to keep the patch in the git extended diff
2111 Use the -g/--git option to keep the patch in the git extended diff
2112 format. Read the diffs help topic for more information on why this
2112 format. Read the diffs help topic for more information on why this
2113 is important for preserving permission changes and copy/rename
2113 is important for preserving permission changes and copy/rename
2114 information.
2114 information.
2115
2115
2116 Returns 0 on successful creation of a new patch.
2116 Returns 0 on successful creation of a new patch.
2117 """
2117 """
2118 msg = cmdutil.logmessage(opts)
2118 msg = cmdutil.logmessage(opts)
2119 def getmsg():
2119 def getmsg():
2120 return ui.edit(msg, opts.get('user') or ui.username())
2120 return ui.edit(msg, opts.get('user') or ui.username())
2121 q = repo.mq
2121 q = repo.mq
2122 opts['msg'] = msg
2122 opts['msg'] = msg
2123 if opts.get('edit'):
2123 if opts.get('edit'):
2124 opts['msg'] = getmsg
2124 opts['msg'] = getmsg
2125 else:
2125 else:
2126 opts['msg'] = msg
2126 opts['msg'] = msg
2127 setupheaderopts(ui, opts)
2127 setupheaderopts(ui, opts)
2128 q.new(repo, patch, *args, **opts)
2128 q.new(repo, patch, *args, **opts)
2129 q.save_dirty()
2129 q.save_dirty()
2130 return 0
2130 return 0
2131
2131
2132 def refresh(ui, repo, *pats, **opts):
2132 def refresh(ui, repo, *pats, **opts):
2133 """update the current patch
2133 """update the current patch
2134
2134
2135 If any file patterns are provided, the refreshed patch will
2135 If any file patterns are provided, the refreshed patch will
2136 contain only the modifications that match those patterns; the
2136 contain only the modifications that match those patterns; the
2137 remaining modifications will remain in the working directory.
2137 remaining modifications will remain in the working directory.
2138
2138
2139 If -s/--short is specified, files currently included in the patch
2139 If -s/--short is specified, files currently included in the patch
2140 will be refreshed just like matched files and remain in the patch.
2140 will be refreshed just like matched files and remain in the patch.
2141
2141
2142 If -e/--edit is specified, Mercurial will start your configured editor for
2142 If -e/--edit is specified, Mercurial will start your configured editor for
2143 you to enter a message. In case qrefresh fails, you will find a backup of
2143 you to enter a message. In case qrefresh fails, you will find a backup of
2144 your message in ``.hg/last-message.txt``.
2144 your message in ``.hg/last-message.txt``.
2145
2145
2146 hg add/remove/copy/rename work as usual, though you might want to
2146 hg add/remove/copy/rename work as usual, though you might want to
2147 use git-style patches (-g/--git or [diff] git=1) to track copies
2147 use git-style patches (-g/--git or [diff] git=1) to track copies
2148 and renames. See the diffs help topic for more information on the
2148 and renames. See the diffs help topic for more information on the
2149 git diff format.
2149 git diff format.
2150
2150
2151 Returns 0 on success.
2151 Returns 0 on success.
2152 """
2152 """
2153 q = repo.mq
2153 q = repo.mq
2154 message = cmdutil.logmessage(opts)
2154 message = cmdutil.logmessage(opts)
2155 if opts.get('edit'):
2155 if opts.get('edit'):
2156 if not q.applied:
2156 if not q.applied:
2157 ui.write(_("no patches applied\n"))
2157 ui.write(_("no patches applied\n"))
2158 return 1
2158 return 1
2159 if message:
2159 if message:
2160 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2160 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2161 patch = q.applied[-1].name
2161 patch = q.applied[-1].name
2162 ph = patchheader(q.join(patch), q.plainmode)
2162 ph = patchheader(q.join(patch), q.plainmode)
2163 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2163 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2164 # We don't want to lose the patch message if qrefresh fails (issue2062)
2164 # We don't want to lose the patch message if qrefresh fails (issue2062)
2165 msgfile = repo.opener('last-message.txt', 'wb')
2165 msgfile = repo.opener('last-message.txt', 'wb')
2166 msgfile.write(message)
2166 msgfile.write(message)
2167 msgfile.close()
2167 msgfile.close()
2168 setupheaderopts(ui, opts)
2168 setupheaderopts(ui, opts)
2169 ret = q.refresh(repo, pats, msg=message, **opts)
2169 ret = q.refresh(repo, pats, msg=message, **opts)
2170 q.save_dirty()
2170 q.save_dirty()
2171 return ret
2171 return ret
2172
2172
2173 def diff(ui, repo, *pats, **opts):
2173 def diff(ui, repo, *pats, **opts):
2174 """diff of the current patch and subsequent modifications
2174 """diff of the current patch and subsequent modifications
2175
2175
2176 Shows a diff which includes the current patch as well as any
2176 Shows a diff which includes the current patch as well as any
2177 changes which have been made in the working directory since the
2177 changes which have been made in the working directory since the
2178 last refresh (thus showing what the current patch would become
2178 last refresh (thus showing what the current patch would become
2179 after a qrefresh).
2179 after a qrefresh).
2180
2180
2181 Use :hg:`diff` if you only want to see the changes made since the
2181 Use :hg:`diff` if you only want to see the changes made since the
2182 last qrefresh, or :hg:`export qtip` if you want to see changes
2182 last qrefresh, or :hg:`export qtip` if you want to see changes
2183 made by the current patch without including changes made since the
2183 made by the current patch without including changes made since the
2184 qrefresh.
2184 qrefresh.
2185
2185
2186 Returns 0 on success.
2186 Returns 0 on success.
2187 """
2187 """
2188 repo.mq.diff(repo, pats, opts)
2188 repo.mq.diff(repo, pats, opts)
2189 return 0
2189 return 0
2190
2190
2191 def fold(ui, repo, *files, **opts):
2191 def fold(ui, repo, *files, **opts):
2192 """fold the named patches into the current patch
2192 """fold the named patches into the current patch
2193
2193
2194 Patches must not yet be applied. Each patch will be successively
2194 Patches must not yet be applied. Each patch will be successively
2195 applied to the current patch in the order given. If all the
2195 applied to the current patch in the order given. If all the
2196 patches apply successfully, the current patch will be refreshed
2196 patches apply successfully, the current patch will be refreshed
2197 with the new cumulative patch, and the folded patches will be
2197 with the new cumulative patch, and the folded patches will be
2198 deleted. With -k/--keep, the folded patch files will not be
2198 deleted. With -k/--keep, the folded patch files will not be
2199 removed afterwards.
2199 removed afterwards.
2200
2200
2201 The header for each folded patch will be concatenated with the
2201 The header for each folded patch will be concatenated with the
2202 current patch header, separated by a line of ``* * *``.
2202 current patch header, separated by a line of ``* * *``.
2203
2203
2204 Returns 0 on success."""
2204 Returns 0 on success."""
2205
2205
2206 q = repo.mq
2206 q = repo.mq
2207
2207
2208 if not files:
2208 if not files:
2209 raise util.Abort(_('qfold requires at least one patch name'))
2209 raise util.Abort(_('qfold requires at least one patch name'))
2210 if not q.check_toppatch(repo)[0]:
2210 if not q.check_toppatch(repo)[0]:
2211 raise util.Abort(_('no patches applied'))
2211 raise util.Abort(_('no patches applied'))
2212 q.check_localchanges(repo)
2212 q.check_localchanges(repo)
2213
2213
2214 message = cmdutil.logmessage(opts)
2214 message = cmdutil.logmessage(opts)
2215 if opts.get('edit'):
2215 if opts.get('edit'):
2216 if message:
2216 if message:
2217 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2217 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2218
2218
2219 parent = q.lookup('qtip')
2219 parent = q.lookup('qtip')
2220 patches = []
2220 patches = []
2221 messages = []
2221 messages = []
2222 for f in files:
2222 for f in files:
2223 p = q.lookup(f)
2223 p = q.lookup(f)
2224 if p in patches or p == parent:
2224 if p in patches or p == parent:
2225 ui.warn(_('Skipping already folded patch %s\n') % p)
2225 ui.warn(_('Skipping already folded patch %s\n') % p)
2226 if q.isapplied(p):
2226 if q.isapplied(p):
2227 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2227 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2228 patches.append(p)
2228 patches.append(p)
2229
2229
2230 for p in patches:
2230 for p in patches:
2231 if not message:
2231 if not message:
2232 ph = patchheader(q.join(p), q.plainmode)
2232 ph = patchheader(q.join(p), q.plainmode)
2233 if ph.message:
2233 if ph.message:
2234 messages.append(ph.message)
2234 messages.append(ph.message)
2235 pf = q.join(p)
2235 pf = q.join(p)
2236 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2236 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2237 if not patchsuccess:
2237 if not patchsuccess:
2238 raise util.Abort(_('error folding patch %s') % p)
2238 raise util.Abort(_('error folding patch %s') % p)
2239 cmdutil.updatedir(ui, repo, files)
2239 cmdutil.updatedir(ui, repo, files)
2240
2240
2241 if not message:
2241 if not message:
2242 ph = patchheader(q.join(parent), q.plainmode)
2242 ph = patchheader(q.join(parent), q.plainmode)
2243 message, user = ph.message, ph.user
2243 message, user = ph.message, ph.user
2244 for msg in messages:
2244 for msg in messages:
2245 message.append('* * *')
2245 message.append('* * *')
2246 message.extend(msg)
2246 message.extend(msg)
2247 message = '\n'.join(message)
2247 message = '\n'.join(message)
2248
2248
2249 if opts.get('edit'):
2249 if opts.get('edit'):
2250 message = ui.edit(message, user or ui.username())
2250 message = ui.edit(message, user or ui.username())
2251
2251
2252 diffopts = q.patchopts(q.diffopts(), *patches)
2252 diffopts = q.patchopts(q.diffopts(), *patches)
2253 q.refresh(repo, msg=message, git=diffopts.git)
2253 q.refresh(repo, msg=message, git=diffopts.git)
2254 q.delete(repo, patches, opts)
2254 q.delete(repo, patches, opts)
2255 q.save_dirty()
2255 q.save_dirty()
2256
2256
2257 def goto(ui, repo, patch, **opts):
2257 def goto(ui, repo, patch, **opts):
2258 '''push or pop patches until named patch is at top of stack
2258 '''push or pop patches until named patch is at top of stack
2259
2259
2260 Returns 0 on success.'''
2260 Returns 0 on success.'''
2261 q = repo.mq
2261 q = repo.mq
2262 patch = q.lookup(patch)
2262 patch = q.lookup(patch)
2263 if q.isapplied(patch):
2263 if q.isapplied(patch):
2264 ret = q.pop(repo, patch, force=opts.get('force'))
2264 ret = q.pop(repo, patch, force=opts.get('force'))
2265 else:
2265 else:
2266 ret = q.push(repo, patch, force=opts.get('force'))
2266 ret = q.push(repo, patch, force=opts.get('force'))
2267 q.save_dirty()
2267 q.save_dirty()
2268 return ret
2268 return ret
2269
2269
2270 def guard(ui, repo, *args, **opts):
2270 def guard(ui, repo, *args, **opts):
2271 '''set or print guards for a patch
2271 '''set or print guards for a patch
2272
2272
2273 Guards control whether a patch can be pushed. A patch with no
2273 Guards control whether a patch can be pushed. A patch with no
2274 guards is always pushed. A patch with a positive guard ("+foo") is
2274 guards is always pushed. A patch with a positive guard ("+foo") is
2275 pushed only if the :hg:`qselect` command has activated it. A patch with
2275 pushed only if the :hg:`qselect` command has activated it. A patch with
2276 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2276 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2277 has activated it.
2277 has activated it.
2278
2278
2279 With no arguments, print the currently active guards.
2279 With no arguments, print the currently active guards.
2280 With arguments, set guards for the named patch.
2280 With arguments, set guards for the named patch.
2281
2281
2282 .. note::
2282 .. note::
2283 Specifying negative guards now requires '--'.
2283 Specifying negative guards now requires '--'.
2284
2284
2285 To set guards on another patch::
2285 To set guards on another patch::
2286
2286
2287 hg qguard other.patch -- +2.6.17 -stable
2287 hg qguard other.patch -- +2.6.17 -stable
2288
2288
2289 Returns 0 on success.
2289 Returns 0 on success.
2290 '''
2290 '''
2291 def status(idx):
2291 def status(idx):
2292 guards = q.series_guards[idx] or ['unguarded']
2292 guards = q.series_guards[idx] or ['unguarded']
2293 if q.series[idx] in applied:
2293 if q.series[idx] in applied:
2294 state = 'applied'
2294 state = 'applied'
2295 elif q.pushable(idx)[0]:
2295 elif q.pushable(idx)[0]:
2296 state = 'unapplied'
2296 state = 'unapplied'
2297 else:
2297 else:
2298 state = 'guarded'
2298 state = 'guarded'
2299 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2299 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2300 ui.write('%s: ' % ui.label(q.series[idx], label))
2300 ui.write('%s: ' % ui.label(q.series[idx], label))
2301
2301
2302 for i, guard in enumerate(guards):
2302 for i, guard in enumerate(guards):
2303 if guard.startswith('+'):
2303 if guard.startswith('+'):
2304 ui.write(guard, label='qguard.positive')
2304 ui.write(guard, label='qguard.positive')
2305 elif guard.startswith('-'):
2305 elif guard.startswith('-'):
2306 ui.write(guard, label='qguard.negative')
2306 ui.write(guard, label='qguard.negative')
2307 else:
2307 else:
2308 ui.write(guard, label='qguard.unguarded')
2308 ui.write(guard, label='qguard.unguarded')
2309 if i != len(guards) - 1:
2309 if i != len(guards) - 1:
2310 ui.write(' ')
2310 ui.write(' ')
2311 ui.write('\n')
2311 ui.write('\n')
2312 q = repo.mq
2312 q = repo.mq
2313 applied = set(p.name for p in q.applied)
2313 applied = set(p.name for p in q.applied)
2314 patch = None
2314 patch = None
2315 args = list(args)
2315 args = list(args)
2316 if opts.get('list'):
2316 if opts.get('list'):
2317 if args or opts.get('none'):
2317 if args or opts.get('none'):
2318 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2318 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2319 for i in xrange(len(q.series)):
2319 for i in xrange(len(q.series)):
2320 status(i)
2320 status(i)
2321 return
2321 return
2322 if not args or args[0][0:1] in '-+':
2322 if not args or args[0][0:1] in '-+':
2323 if not q.applied:
2323 if not q.applied:
2324 raise util.Abort(_('no patches applied'))
2324 raise util.Abort(_('no patches applied'))
2325 patch = q.applied[-1].name
2325 patch = q.applied[-1].name
2326 if patch is None and args[0][0:1] not in '-+':
2326 if patch is None and args[0][0:1] not in '-+':
2327 patch = args.pop(0)
2327 patch = args.pop(0)
2328 if patch is None:
2328 if patch is None:
2329 raise util.Abort(_('no patch to work with'))
2329 raise util.Abort(_('no patch to work with'))
2330 if args or opts.get('none'):
2330 if args or opts.get('none'):
2331 idx = q.find_series(patch)
2331 idx = q.find_series(patch)
2332 if idx is None:
2332 if idx is None:
2333 raise util.Abort(_('no patch named %s') % patch)
2333 raise util.Abort(_('no patch named %s') % patch)
2334 q.set_guards(idx, args)
2334 q.set_guards(idx, args)
2335 q.save_dirty()
2335 q.save_dirty()
2336 else:
2336 else:
2337 status(q.series.index(q.lookup(patch)))
2337 status(q.series.index(q.lookup(patch)))
2338
2338
2339 def header(ui, repo, patch=None):
2339 def header(ui, repo, patch=None):
2340 """print the header of the topmost or specified patch
2340 """print the header of the topmost or specified patch
2341
2341
2342 Returns 0 on success."""
2342 Returns 0 on success."""
2343 q = repo.mq
2343 q = repo.mq
2344
2344
2345 if patch:
2345 if patch:
2346 patch = q.lookup(patch)
2346 patch = q.lookup(patch)
2347 else:
2347 else:
2348 if not q.applied:
2348 if not q.applied:
2349 ui.write(_('no patches applied\n'))
2349 ui.write(_('no patches applied\n'))
2350 return 1
2350 return 1
2351 patch = q.lookup('qtip')
2351 patch = q.lookup('qtip')
2352 ph = patchheader(q.join(patch), q.plainmode)
2352 ph = patchheader(q.join(patch), q.plainmode)
2353
2353
2354 ui.write('\n'.join(ph.message) + '\n')
2354 ui.write('\n'.join(ph.message) + '\n')
2355
2355
2356 def lastsavename(path):
2356 def lastsavename(path):
2357 (directory, base) = os.path.split(path)
2357 (directory, base) = os.path.split(path)
2358 names = os.listdir(directory)
2358 names = os.listdir(directory)
2359 namere = re.compile("%s.([0-9]+)" % base)
2359 namere = re.compile("%s.([0-9]+)" % base)
2360 maxindex = None
2360 maxindex = None
2361 maxname = None
2361 maxname = None
2362 for f in names:
2362 for f in names:
2363 m = namere.match(f)
2363 m = namere.match(f)
2364 if m:
2364 if m:
2365 index = int(m.group(1))
2365 index = int(m.group(1))
2366 if maxindex is None or index > maxindex:
2366 if maxindex is None or index > maxindex:
2367 maxindex = index
2367 maxindex = index
2368 maxname = f
2368 maxname = f
2369 if maxname:
2369 if maxname:
2370 return (os.path.join(directory, maxname), maxindex)
2370 return (os.path.join(directory, maxname), maxindex)
2371 return (None, None)
2371 return (None, None)
2372
2372
2373 def savename(path):
2373 def savename(path):
2374 (last, index) = lastsavename(path)
2374 (last, index) = lastsavename(path)
2375 if last is None:
2375 if last is None:
2376 index = 0
2376 index = 0
2377 newpath = path + ".%d" % (index + 1)
2377 newpath = path + ".%d" % (index + 1)
2378 return newpath
2378 return newpath
2379
2379
2380 def push(ui, repo, patch=None, **opts):
2380 def push(ui, repo, patch=None, **opts):
2381 """push the next patch onto the stack
2381 """push the next patch onto the stack
2382
2382
2383 When -f/--force is applied, all local changes in patched files
2383 When -f/--force is applied, all local changes in patched files
2384 will be lost.
2384 will be lost.
2385
2385
2386 Return 0 on success.
2386 Return 0 on success.
2387 """
2387 """
2388 q = repo.mq
2388 q = repo.mq
2389 mergeq = None
2389 mergeq = None
2390
2390
2391 if opts.get('merge'):
2391 if opts.get('merge'):
2392 if opts.get('name'):
2392 if opts.get('name'):
2393 newpath = repo.join(opts.get('name'))
2393 newpath = repo.join(opts.get('name'))
2394 else:
2394 else:
2395 newpath, i = lastsavename(q.path)
2395 newpath, i = lastsavename(q.path)
2396 if not newpath:
2396 if not newpath:
2397 ui.warn(_("no saved queues found, please use -n\n"))
2397 ui.warn(_("no saved queues found, please use -n\n"))
2398 return 1
2398 return 1
2399 mergeq = queue(ui, repo.join(""), newpath)
2399 mergeq = queue(ui, repo.join(""), newpath)
2400 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2400 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2401 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2401 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2402 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2402 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2403 exact=opts.get('exact'))
2403 exact=opts.get('exact'))
2404 return ret
2404 return ret
2405
2405
2406 def pop(ui, repo, patch=None, **opts):
2406 def pop(ui, repo, patch=None, **opts):
2407 """pop the current patch off the stack
2407 """pop the current patch off the stack
2408
2408
2409 By default, pops off the top of the patch stack. If given a patch
2409 By default, pops off the top of the patch stack. If given a patch
2410 name, keeps popping off patches until the named patch is at the
2410 name, keeps popping off patches until the named patch is at the
2411 top of the stack.
2411 top of the stack.
2412
2412
2413 Return 0 on success.
2413 Return 0 on success.
2414 """
2414 """
2415 localupdate = True
2415 localupdate = True
2416 if opts.get('name'):
2416 if opts.get('name'):
2417 q = queue(ui, repo.join(""), repo.join(opts.get('name')))
2417 q = queue(ui, repo.join(""), repo.join(opts.get('name')))
2418 ui.warn(_('using patch queue: %s\n') % q.path)
2418 ui.warn(_('using patch queue: %s\n') % q.path)
2419 localupdate = False
2419 localupdate = False
2420 else:
2420 else:
2421 q = repo.mq
2421 q = repo.mq
2422 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2422 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2423 all=opts.get('all'))
2423 all=opts.get('all'))
2424 q.save_dirty()
2424 q.save_dirty()
2425 return ret
2425 return ret
2426
2426
2427 def rename(ui, repo, patch, name=None, **opts):
2427 def rename(ui, repo, patch, name=None, **opts):
2428 """rename a patch
2428 """rename a patch
2429
2429
2430 With one argument, renames the current patch to PATCH1.
2430 With one argument, renames the current patch to PATCH1.
2431 With two arguments, renames PATCH1 to PATCH2.
2431 With two arguments, renames PATCH1 to PATCH2.
2432
2432
2433 Returns 0 on success."""
2433 Returns 0 on success."""
2434
2434
2435 q = repo.mq
2435 q = repo.mq
2436
2436
2437 if not name:
2437 if not name:
2438 name = patch
2438 name = patch
2439 patch = None
2439 patch = None
2440
2440
2441 if patch:
2441 if patch:
2442 patch = q.lookup(patch)
2442 patch = q.lookup(patch)
2443 else:
2443 else:
2444 if not q.applied:
2444 if not q.applied:
2445 ui.write(_('no patches applied\n'))
2445 ui.write(_('no patches applied\n'))
2446 return
2446 return
2447 patch = q.lookup('qtip')
2447 patch = q.lookup('qtip')
2448 absdest = q.join(name)
2448 absdest = q.join(name)
2449 if os.path.isdir(absdest):
2449 if os.path.isdir(absdest):
2450 name = normname(os.path.join(name, os.path.basename(patch)))
2450 name = normname(os.path.join(name, os.path.basename(patch)))
2451 absdest = q.join(name)
2451 absdest = q.join(name)
2452 if os.path.exists(absdest):
2452 if os.path.exists(absdest):
2453 raise util.Abort(_('%s already exists') % absdest)
2453 raise util.Abort(_('%s already exists') % absdest)
2454
2454
2455 if name in q.series:
2455 if name in q.series:
2456 raise util.Abort(
2456 raise util.Abort(
2457 _('A patch named %s already exists in the series file') % name)
2457 _('A patch named %s already exists in the series file') % name)
2458
2458
2459 ui.note(_('renaming %s to %s\n') % (patch, name))
2459 ui.note(_('renaming %s to %s\n') % (patch, name))
2460 i = q.find_series(patch)
2460 i = q.find_series(patch)
2461 guards = q.guard_re.findall(q.full_series[i])
2461 guards = q.guard_re.findall(q.full_series[i])
2462 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2462 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2463 q.parse_series()
2463 q.parse_series()
2464 q.series_dirty = 1
2464 q.series_dirty = 1
2465
2465
2466 info = q.isapplied(patch)
2466 info = q.isapplied(patch)
2467 if info:
2467 if info:
2468 q.applied[info[0]] = statusentry(info[1], name)
2468 q.applied[info[0]] = statusentry(info[1], name)
2469 q.applied_dirty = 1
2469 q.applied_dirty = 1
2470
2470
2471 destdir = os.path.dirname(absdest)
2471 destdir = os.path.dirname(absdest)
2472 if not os.path.isdir(destdir):
2472 if not os.path.isdir(destdir):
2473 os.makedirs(destdir)
2473 os.makedirs(destdir)
2474 util.rename(q.join(patch), absdest)
2474 util.rename(q.join(patch), absdest)
2475 r = q.qrepo()
2475 r = q.qrepo()
2476 if r and patch in r.dirstate:
2476 if r and patch in r.dirstate:
2477 wctx = r[None]
2477 wctx = r[None]
2478 wlock = r.wlock()
2478 wlock = r.wlock()
2479 try:
2479 try:
2480 if r.dirstate[patch] == 'a':
2480 if r.dirstate[patch] == 'a':
2481 r.dirstate.forget(patch)
2481 r.dirstate.forget(patch)
2482 r.dirstate.add(name)
2482 r.dirstate.add(name)
2483 else:
2483 else:
2484 if r.dirstate[name] == 'r':
2484 if r.dirstate[name] == 'r':
2485 wctx.undelete([name])
2485 wctx.undelete([name])
2486 wctx.copy(patch, name)
2486 wctx.copy(patch, name)
2487 wctx.remove([patch], False)
2487 wctx.remove([patch], False)
2488 finally:
2488 finally:
2489 wlock.release()
2489 wlock.release()
2490
2490
2491 q.save_dirty()
2491 q.save_dirty()
2492
2492
2493 def restore(ui, repo, rev, **opts):
2493 def restore(ui, repo, rev, **opts):
2494 """restore the queue state saved by a revision (DEPRECATED)
2494 """restore the queue state saved by a revision (DEPRECATED)
2495
2495
2496 This command is deprecated, use :hg:`rebase` instead."""
2496 This command is deprecated, use :hg:`rebase` instead."""
2497 rev = repo.lookup(rev)
2497 rev = repo.lookup(rev)
2498 q = repo.mq
2498 q = repo.mq
2499 q.restore(repo, rev, delete=opts.get('delete'),
2499 q.restore(repo, rev, delete=opts.get('delete'),
2500 qupdate=opts.get('update'))
2500 qupdate=opts.get('update'))
2501 q.save_dirty()
2501 q.save_dirty()
2502 return 0
2502 return 0
2503
2503
2504 def save(ui, repo, **opts):
2504 def save(ui, repo, **opts):
2505 """save current queue state (DEPRECATED)
2505 """save current queue state (DEPRECATED)
2506
2506
2507 This command is deprecated, use :hg:`rebase` instead."""
2507 This command is deprecated, use :hg:`rebase` instead."""
2508 q = repo.mq
2508 q = repo.mq
2509 message = cmdutil.logmessage(opts)
2509 message = cmdutil.logmessage(opts)
2510 ret = q.save(repo, msg=message)
2510 ret = q.save(repo, msg=message)
2511 if ret:
2511 if ret:
2512 return ret
2512 return ret
2513 q.save_dirty()
2513 q.save_dirty()
2514 if opts.get('copy'):
2514 if opts.get('copy'):
2515 path = q.path
2515 path = q.path
2516 if opts.get('name'):
2516 if opts.get('name'):
2517 newpath = os.path.join(q.basepath, opts.get('name'))
2517 newpath = os.path.join(q.basepath, opts.get('name'))
2518 if os.path.exists(newpath):
2518 if os.path.exists(newpath):
2519 if not os.path.isdir(newpath):
2519 if not os.path.isdir(newpath):
2520 raise util.Abort(_('destination %s exists and is not '
2520 raise util.Abort(_('destination %s exists and is not '
2521 'a directory') % newpath)
2521 'a directory') % newpath)
2522 if not opts.get('force'):
2522 if not opts.get('force'):
2523 raise util.Abort(_('destination %s exists, '
2523 raise util.Abort(_('destination %s exists, '
2524 'use -f to force') % newpath)
2524 'use -f to force') % newpath)
2525 else:
2525 else:
2526 newpath = savename(path)
2526 newpath = savename(path)
2527 ui.warn(_("copy %s to %s\n") % (path, newpath))
2527 ui.warn(_("copy %s to %s\n") % (path, newpath))
2528 util.copyfiles(path, newpath)
2528 util.copyfiles(path, newpath)
2529 if opts.get('empty'):
2529 if opts.get('empty'):
2530 try:
2530 try:
2531 os.unlink(q.join(q.status_path))
2531 os.unlink(q.join(q.status_path))
2532 except:
2532 except:
2533 pass
2533 pass
2534 return 0
2534 return 0
2535
2535
2536 def strip(ui, repo, *revs, **opts):
2536 def strip(ui, repo, *revs, **opts):
2537 """strip changesets and all their descendants from the repository
2537 """strip changesets and all their descendants from the repository
2538
2538
2539 The strip command removes the specified changesets and all their
2539 The strip command removes the specified changesets and all their
2540 descendants. If the working directory has uncommitted changes,
2540 descendants. If the working directory has uncommitted changes,
2541 the operation is aborted unless the --force flag is supplied.
2541 the operation is aborted unless the --force flag is supplied.
2542
2542
2543 If a parent of the working directory is stripped, then the working
2543 If a parent of the working directory is stripped, then the working
2544 directory will automatically be updated to the most recent
2544 directory will automatically be updated to the most recent
2545 available ancestor of the stripped parent after the operation
2545 available ancestor of the stripped parent after the operation
2546 completes.
2546 completes.
2547
2547
2548 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2548 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2549 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2549 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2550 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2550 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2551 where BUNDLE is the bundle file created by the strip. Note that
2551 where BUNDLE is the bundle file created by the strip. Note that
2552 the local revision numbers will in general be different after the
2552 the local revision numbers will in general be different after the
2553 restore.
2553 restore.
2554
2554
2555 Use the --no-backup option to discard the backup bundle once the
2555 Use the --no-backup option to discard the backup bundle once the
2556 operation completes.
2556 operation completes.
2557
2557
2558 Return 0 on success.
2558 Return 0 on success.
2559 """
2559 """
2560 backup = 'all'
2560 backup = 'all'
2561 if opts.get('backup'):
2561 if opts.get('backup'):
2562 backup = 'strip'
2562 backup = 'strip'
2563 elif opts.get('no_backup') or opts.get('nobackup'):
2563 elif opts.get('no_backup') or opts.get('nobackup'):
2564 backup = 'none'
2564 backup = 'none'
2565
2565
2566 cl = repo.changelog
2566 cl = repo.changelog
2567 revs = set(cmdutil.revrange(repo, revs))
2567 revs = set(cmdutil.revrange(repo, revs))
2568 if not revs:
2568 if not revs:
2569 raise util.Abort(_('empty revision set'))
2569 raise util.Abort(_('empty revision set'))
2570
2570
2571 descendants = set(cl.descendants(*revs))
2571 descendants = set(cl.descendants(*revs))
2572 strippedrevs = revs.union(descendants)
2572 strippedrevs = revs.union(descendants)
2573 roots = revs.difference(descendants)
2573 roots = revs.difference(descendants)
2574
2574
2575 update = False
2575 update = False
2576 # if one of the wdir parent is stripped we'll need
2576 # if one of the wdir parent is stripped we'll need
2577 # to update away to an earlier revision
2577 # to update away to an earlier revision
2578 for p in repo.dirstate.parents():
2578 for p in repo.dirstate.parents():
2579 if p != nullid and cl.rev(p) in strippedrevs:
2579 if p != nullid and cl.rev(p) in strippedrevs:
2580 update = True
2580 update = True
2581 break
2581 break
2582
2582
2583 rootnodes = set(cl.node(r) for r in roots)
2583 rootnodes = set(cl.node(r) for r in roots)
2584
2584
2585 q = repo.mq
2585 q = repo.mq
2586 if q.applied:
2586 if q.applied:
2587 # refresh queue state if we're about to strip
2587 # refresh queue state if we're about to strip
2588 # applied patches
2588 # applied patches
2589 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2589 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2590 q.applied_dirty = True
2590 q.applied_dirty = True
2591 start = 0
2591 start = 0
2592 end = len(q.applied)
2592 end = len(q.applied)
2593 for i, statusentry in enumerate(q.applied):
2593 for i, statusentry in enumerate(q.applied):
2594 if statusentry.node in rootnodes:
2594 if statusentry.node in rootnodes:
2595 # if one of the stripped roots is an applied
2595 # if one of the stripped roots is an applied
2596 # patch, only part of the queue is stripped
2596 # patch, only part of the queue is stripped
2597 start = i
2597 start = i
2598 break
2598 break
2599 del q.applied[start:end]
2599 del q.applied[start:end]
2600 q.save_dirty()
2600 q.save_dirty()
2601
2601
2602 revs = list(rootnodes)
2602 revs = list(rootnodes)
2603 if update and opts.get('keep'):
2603 if update and opts.get('keep'):
2604 wlock = repo.wlock()
2604 wlock = repo.wlock()
2605 try:
2605 try:
2606 urev = repo.mq.qparents(repo, revs[0])
2606 urev = repo.mq.qparents(repo, revs[0])
2607 repo.dirstate.rebuild(urev, repo[urev].manifest())
2607 repo.dirstate.rebuild(urev, repo[urev].manifest())
2608 repo.dirstate.write()
2608 repo.dirstate.write()
2609 update = False
2609 update = False
2610 finally:
2610 finally:
2611 wlock.release()
2611 wlock.release()
2612
2612
2613 repo.mq.strip(repo, revs, backup=backup, update=update,
2613 repo.mq.strip(repo, revs, backup=backup, update=update,
2614 force=opts.get('force'))
2614 force=opts.get('force'))
2615 return 0
2615 return 0
2616
2616
2617 def select(ui, repo, *args, **opts):
2617 def select(ui, repo, *args, **opts):
2618 '''set or print guarded patches to push
2618 '''set or print guarded patches to push
2619
2619
2620 Use the :hg:`qguard` command to set or print guards on patch, then use
2620 Use the :hg:`qguard` command to set or print guards on patch, then use
2621 qselect to tell mq which guards to use. A patch will be pushed if
2621 qselect to tell mq which guards to use. A patch will be pushed if
2622 it has no guards or any positive guards match the currently
2622 it has no guards or any positive guards match the currently
2623 selected guard, but will not be pushed if any negative guards
2623 selected guard, but will not be pushed if any negative guards
2624 match the current guard. For example::
2624 match the current guard. For example::
2625
2625
2626 qguard foo.patch -- -stable (negative guard)
2626 qguard foo.patch -- -stable (negative guard)
2627 qguard bar.patch +stable (positive guard)
2627 qguard bar.patch +stable (positive guard)
2628 qselect stable
2628 qselect stable
2629
2629
2630 This activates the "stable" guard. mq will skip foo.patch (because
2630 This activates the "stable" guard. mq will skip foo.patch (because
2631 it has a negative match) but push bar.patch (because it has a
2631 it has a negative match) but push bar.patch (because it has a
2632 positive match).
2632 positive match).
2633
2633
2634 With no arguments, prints the currently active guards.
2634 With no arguments, prints the currently active guards.
2635 With one argument, sets the active guard.
2635 With one argument, sets the active guard.
2636
2636
2637 Use -n/--none to deactivate guards (no other arguments needed).
2637 Use -n/--none to deactivate guards (no other arguments needed).
2638 When no guards are active, patches with positive guards are
2638 When no guards are active, patches with positive guards are
2639 skipped and patches with negative guards are pushed.
2639 skipped and patches with negative guards are pushed.
2640
2640
2641 qselect can change the guards on applied patches. It does not pop
2641 qselect can change the guards on applied patches. It does not pop
2642 guarded patches by default. Use --pop to pop back to the last
2642 guarded patches by default. Use --pop to pop back to the last
2643 applied patch that is not guarded. Use --reapply (which implies
2643 applied patch that is not guarded. Use --reapply (which implies
2644 --pop) to push back to the current patch afterwards, but skip
2644 --pop) to push back to the current patch afterwards, but skip
2645 guarded patches.
2645 guarded patches.
2646
2646
2647 Use -s/--series to print a list of all guards in the series file
2647 Use -s/--series to print a list of all guards in the series file
2648 (no other arguments needed). Use -v for more information.
2648 (no other arguments needed). Use -v for more information.
2649
2649
2650 Returns 0 on success.'''
2650 Returns 0 on success.'''
2651
2651
2652 q = repo.mq
2652 q = repo.mq
2653 guards = q.active()
2653 guards = q.active()
2654 if args or opts.get('none'):
2654 if args or opts.get('none'):
2655 old_unapplied = q.unapplied(repo)
2655 old_unapplied = q.unapplied(repo)
2656 old_guarded = [i for i in xrange(len(q.applied)) if
2656 old_guarded = [i for i in xrange(len(q.applied)) if
2657 not q.pushable(i)[0]]
2657 not q.pushable(i)[0]]
2658 q.set_active(args)
2658 q.set_active(args)
2659 q.save_dirty()
2659 q.save_dirty()
2660 if not args:
2660 if not args:
2661 ui.status(_('guards deactivated\n'))
2661 ui.status(_('guards deactivated\n'))
2662 if not opts.get('pop') and not opts.get('reapply'):
2662 if not opts.get('pop') and not opts.get('reapply'):
2663 unapplied = q.unapplied(repo)
2663 unapplied = q.unapplied(repo)
2664 guarded = [i for i in xrange(len(q.applied))
2664 guarded = [i for i in xrange(len(q.applied))
2665 if not q.pushable(i)[0]]
2665 if not q.pushable(i)[0]]
2666 if len(unapplied) != len(old_unapplied):
2666 if len(unapplied) != len(old_unapplied):
2667 ui.status(_('number of unguarded, unapplied patches has '
2667 ui.status(_('number of unguarded, unapplied patches has '
2668 'changed from %d to %d\n') %
2668 'changed from %d to %d\n') %
2669 (len(old_unapplied), len(unapplied)))
2669 (len(old_unapplied), len(unapplied)))
2670 if len(guarded) != len(old_guarded):
2670 if len(guarded) != len(old_guarded):
2671 ui.status(_('number of guarded, applied patches has changed '
2671 ui.status(_('number of guarded, applied patches has changed '
2672 'from %d to %d\n') %
2672 'from %d to %d\n') %
2673 (len(old_guarded), len(guarded)))
2673 (len(old_guarded), len(guarded)))
2674 elif opts.get('series'):
2674 elif opts.get('series'):
2675 guards = {}
2675 guards = {}
2676 noguards = 0
2676 noguards = 0
2677 for gs in q.series_guards:
2677 for gs in q.series_guards:
2678 if not gs:
2678 if not gs:
2679 noguards += 1
2679 noguards += 1
2680 for g in gs:
2680 for g in gs:
2681 guards.setdefault(g, 0)
2681 guards.setdefault(g, 0)
2682 guards[g] += 1
2682 guards[g] += 1
2683 if ui.verbose:
2683 if ui.verbose:
2684 guards['NONE'] = noguards
2684 guards['NONE'] = noguards
2685 guards = guards.items()
2685 guards = guards.items()
2686 guards.sort(key=lambda x: x[0][1:])
2686 guards.sort(key=lambda x: x[0][1:])
2687 if guards:
2687 if guards:
2688 ui.note(_('guards in series file:\n'))
2688 ui.note(_('guards in series file:\n'))
2689 for guard, count in guards:
2689 for guard, count in guards:
2690 ui.note('%2d ' % count)
2690 ui.note('%2d ' % count)
2691 ui.write(guard, '\n')
2691 ui.write(guard, '\n')
2692 else:
2692 else:
2693 ui.note(_('no guards in series file\n'))
2693 ui.note(_('no guards in series file\n'))
2694 else:
2694 else:
2695 if guards:
2695 if guards:
2696 ui.note(_('active guards:\n'))
2696 ui.note(_('active guards:\n'))
2697 for g in guards:
2697 for g in guards:
2698 ui.write(g, '\n')
2698 ui.write(g, '\n')
2699 else:
2699 else:
2700 ui.write(_('no active guards\n'))
2700 ui.write(_('no active guards\n'))
2701 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
2701 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
2702 popped = False
2702 popped = False
2703 if opts.get('pop') or opts.get('reapply'):
2703 if opts.get('pop') or opts.get('reapply'):
2704 for i in xrange(len(q.applied)):
2704 for i in xrange(len(q.applied)):
2705 pushable, reason = q.pushable(i)
2705 pushable, reason = q.pushable(i)
2706 if not pushable:
2706 if not pushable:
2707 ui.status(_('popping guarded patches\n'))
2707 ui.status(_('popping guarded patches\n'))
2708 popped = True
2708 popped = True
2709 if i == 0:
2709 if i == 0:
2710 q.pop(repo, all=True)
2710 q.pop(repo, all=True)
2711 else:
2711 else:
2712 q.pop(repo, i - 1)
2712 q.pop(repo, i - 1)
2713 break
2713 break
2714 if popped:
2714 if popped:
2715 try:
2715 try:
2716 if reapply:
2716 if reapply:
2717 ui.status(_('reapplying unguarded patches\n'))
2717 ui.status(_('reapplying unguarded patches\n'))
2718 q.push(repo, reapply)
2718 q.push(repo, reapply)
2719 finally:
2719 finally:
2720 q.save_dirty()
2720 q.save_dirty()
2721
2721
2722 def finish(ui, repo, *revrange, **opts):
2722 def finish(ui, repo, *revrange, **opts):
2723 """move applied patches into repository history
2723 """move applied patches into repository history
2724
2724
2725 Finishes the specified revisions (corresponding to applied
2725 Finishes the specified revisions (corresponding to applied
2726 patches) by moving them out of mq control into regular repository
2726 patches) by moving them out of mq control into regular repository
2727 history.
2727 history.
2728
2728
2729 Accepts a revision range or the -a/--applied option. If --applied
2729 Accepts a revision range or the -a/--applied option. If --applied
2730 is specified, all applied mq revisions are removed from mq
2730 is specified, all applied mq revisions are removed from mq
2731 control. Otherwise, the given revisions must be at the base of the
2731 control. Otherwise, the given revisions must be at the base of the
2732 stack of applied patches.
2732 stack of applied patches.
2733
2733
2734 This can be especially useful if your changes have been applied to
2734 This can be especially useful if your changes have been applied to
2735 an upstream repository, or if you are about to push your changes
2735 an upstream repository, or if you are about to push your changes
2736 to upstream.
2736 to upstream.
2737
2737
2738 Returns 0 on success.
2738 Returns 0 on success.
2739 """
2739 """
2740 if not opts.get('applied') and not revrange:
2740 if not opts.get('applied') and not revrange:
2741 raise util.Abort(_('no revisions specified'))
2741 raise util.Abort(_('no revisions specified'))
2742 elif opts.get('applied'):
2742 elif opts.get('applied'):
2743 revrange = ('qbase::qtip',) + revrange
2743 revrange = ('qbase::qtip',) + revrange
2744
2744
2745 q = repo.mq
2745 q = repo.mq
2746 if not q.applied:
2746 if not q.applied:
2747 ui.status(_('no patches applied\n'))
2747 ui.status(_('no patches applied\n'))
2748 return 0
2748 return 0
2749
2749
2750 revs = cmdutil.revrange(repo, revrange)
2750 revs = cmdutil.revrange(repo, revrange)
2751 q.finish(repo, revs)
2751 q.finish(repo, revs)
2752 q.save_dirty()
2752 q.save_dirty()
2753 return 0
2753 return 0
2754
2754
2755 def qqueue(ui, repo, name=None, **opts):
2755 def qqueue(ui, repo, name=None, **opts):
2756 '''manage multiple patch queues
2756 '''manage multiple patch queues
2757
2757
2758 Supports switching between different patch queues, as well as creating
2758 Supports switching between different patch queues, as well as creating
2759 new patch queues and deleting existing ones.
2759 new patch queues and deleting existing ones.
2760
2760
2761 Omitting a queue name or specifying -l/--list will show you the registered
2761 Omitting a queue name or specifying -l/--list will show you the registered
2762 queues - by default the "normal" patches queue is registered. The currently
2762 queues - by default the "normal" patches queue is registered. The currently
2763 active queue will be marked with "(active)".
2763 active queue will be marked with "(active)".
2764
2764
2765 To create a new queue, use -c/--create. The queue is automatically made
2765 To create a new queue, use -c/--create. The queue is automatically made
2766 active, except in the case where there are applied patches from the
2766 active, except in the case where there are applied patches from the
2767 currently active queue in the repository. Then the queue will only be
2767 currently active queue in the repository. Then the queue will only be
2768 created and switching will fail.
2768 created and switching will fail.
2769
2769
2770 To delete an existing queue, use --delete. You cannot delete the currently
2770 To delete an existing queue, use --delete. You cannot delete the currently
2771 active queue.
2771 active queue.
2772
2772
2773 Returns 0 on success.
2773 Returns 0 on success.
2774 '''
2774 '''
2775
2775
2776 q = repo.mq
2776 q = repo.mq
2777
2777
2778 _defaultqueue = 'patches'
2778 _defaultqueue = 'patches'
2779 _allqueues = 'patches.queues'
2779 _allqueues = 'patches.queues'
2780 _activequeue = 'patches.queue'
2780 _activequeue = 'patches.queue'
2781
2781
2782 def _getcurrent():
2782 def _getcurrent():
2783 cur = os.path.basename(q.path)
2783 cur = os.path.basename(q.path)
2784 if cur.startswith('patches-'):
2784 if cur.startswith('patches-'):
2785 cur = cur[8:]
2785 cur = cur[8:]
2786 return cur
2786 return cur
2787
2787
2788 def _noqueues():
2788 def _noqueues():
2789 try:
2789 try:
2790 fh = repo.opener(_allqueues, 'r')
2790 fh = repo.opener(_allqueues, 'r')
2791 fh.close()
2791 fh.close()
2792 except IOError:
2792 except IOError:
2793 return True
2793 return True
2794
2794
2795 return False
2795 return False
2796
2796
2797 def _getqueues():
2797 def _getqueues():
2798 current = _getcurrent()
2798 current = _getcurrent()
2799
2799
2800 try:
2800 try:
2801 fh = repo.opener(_allqueues, 'r')
2801 fh = repo.opener(_allqueues, 'r')
2802 queues = [queue.strip() for queue in fh if queue.strip()]
2802 queues = [queue.strip() for queue in fh if queue.strip()]
2803 fh.close()
2803 fh.close()
2804 if current not in queues:
2804 if current not in queues:
2805 queues.append(current)
2805 queues.append(current)
2806 except IOError:
2806 except IOError:
2807 queues = [_defaultqueue]
2807 queues = [_defaultqueue]
2808
2808
2809 return sorted(queues)
2809 return sorted(queues)
2810
2810
2811 def _setactive(name):
2811 def _setactive(name):
2812 if q.applied:
2812 if q.applied:
2813 raise util.Abort(_('patches applied - cannot set new queue active'))
2813 raise util.Abort(_('patches applied - cannot set new queue active'))
2814 _setactivenocheck(name)
2814 _setactivenocheck(name)
2815
2815
2816 def _setactivenocheck(name):
2816 def _setactivenocheck(name):
2817 fh = repo.opener(_activequeue, 'w')
2817 fh = repo.opener(_activequeue, 'w')
2818 if name != 'patches':
2818 if name != 'patches':
2819 fh.write(name)
2819 fh.write(name)
2820 fh.close()
2820 fh.close()
2821
2821
2822 def _addqueue(name):
2822 def _addqueue(name):
2823 fh = repo.opener(_allqueues, 'a')
2823 fh = repo.opener(_allqueues, 'a')
2824 fh.write('%s\n' % (name,))
2824 fh.write('%s\n' % (name,))
2825 fh.close()
2825 fh.close()
2826
2826
2827 def _queuedir(name):
2827 def _queuedir(name):
2828 if name == 'patches':
2828 if name == 'patches':
2829 return repo.join('patches')
2829 return repo.join('patches')
2830 else:
2830 else:
2831 return repo.join('patches-' + name)
2831 return repo.join('patches-' + name)
2832
2832
2833 def _validname(name):
2833 def _validname(name):
2834 for n in name:
2834 for n in name:
2835 if n in ':\\/.':
2835 if n in ':\\/.':
2836 return False
2836 return False
2837 return True
2837 return True
2838
2838
2839 def _delete(name):
2839 def _delete(name):
2840 if name not in existing:
2840 if name not in existing:
2841 raise util.Abort(_('cannot delete queue that does not exist'))
2841 raise util.Abort(_('cannot delete queue that does not exist'))
2842
2842
2843 current = _getcurrent()
2843 current = _getcurrent()
2844
2844
2845 if name == current:
2845 if name == current:
2846 raise util.Abort(_('cannot delete currently active queue'))
2846 raise util.Abort(_('cannot delete currently active queue'))
2847
2847
2848 fh = repo.opener('patches.queues.new', 'w')
2848 fh = repo.opener('patches.queues.new', 'w')
2849 for queue in existing:
2849 for queue in existing:
2850 if queue == name:
2850 if queue == name:
2851 continue
2851 continue
2852 fh.write('%s\n' % (queue,))
2852 fh.write('%s\n' % (queue,))
2853 fh.close()
2853 fh.close()
2854 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2854 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2855
2855
2856 if not name or opts.get('list'):
2856 if not name or opts.get('list'):
2857 current = _getcurrent()
2857 current = _getcurrent()
2858 for queue in _getqueues():
2858 for queue in _getqueues():
2859 ui.write('%s' % (queue,))
2859 ui.write('%s' % (queue,))
2860 if queue == current and not ui.quiet:
2860 if queue == current and not ui.quiet:
2861 ui.write(_(' (active)\n'))
2861 ui.write(_(' (active)\n'))
2862 else:
2862 else:
2863 ui.write('\n')
2863 ui.write('\n')
2864 return
2864 return
2865
2865
2866 if not _validname(name):
2866 if not _validname(name):
2867 raise util.Abort(
2867 raise util.Abort(
2868 _('invalid queue name, may not contain the characters ":\\/."'))
2868 _('invalid queue name, may not contain the characters ":\\/."'))
2869
2869
2870 existing = _getqueues()
2870 existing = _getqueues()
2871
2871
2872 if opts.get('create'):
2872 if opts.get('create'):
2873 if name in existing:
2873 if name in existing:
2874 raise util.Abort(_('queue "%s" already exists') % name)
2874 raise util.Abort(_('queue "%s" already exists') % name)
2875 if _noqueues():
2875 if _noqueues():
2876 _addqueue(_defaultqueue)
2876 _addqueue(_defaultqueue)
2877 _addqueue(name)
2877 _addqueue(name)
2878 _setactive(name)
2878 _setactive(name)
2879 elif opts.get('rename'):
2879 elif opts.get('rename'):
2880 current = _getcurrent()
2880 current = _getcurrent()
2881 if name == current:
2881 if name == current:
2882 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
2882 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
2883 if name in existing:
2883 if name in existing:
2884 raise util.Abort(_('queue "%s" already exists') % name)
2884 raise util.Abort(_('queue "%s" already exists') % name)
2885
2885
2886 olddir = _queuedir(current)
2886 olddir = _queuedir(current)
2887 newdir = _queuedir(name)
2887 newdir = _queuedir(name)
2888
2888
2889 if os.path.exists(newdir):
2889 if os.path.exists(newdir):
2890 raise util.Abort(_('non-queue directory "%s" already exists') %
2890 raise util.Abort(_('non-queue directory "%s" already exists') %
2891 newdir)
2891 newdir)
2892
2892
2893 fh = repo.opener('patches.queues.new', 'w')
2893 fh = repo.opener('patches.queues.new', 'w')
2894 for queue in existing:
2894 for queue in existing:
2895 if queue == current:
2895 if queue == current:
2896 fh.write('%s\n' % (name,))
2896 fh.write('%s\n' % (name,))
2897 if os.path.exists(olddir):
2897 if os.path.exists(olddir):
2898 util.rename(olddir, newdir)
2898 util.rename(olddir, newdir)
2899 else:
2899 else:
2900 fh.write('%s\n' % (queue,))
2900 fh.write('%s\n' % (queue,))
2901 fh.close()
2901 fh.close()
2902 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2902 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2903 _setactivenocheck(name)
2903 _setactivenocheck(name)
2904 elif opts.get('delete'):
2904 elif opts.get('delete'):
2905 _delete(name)
2905 _delete(name)
2906 elif opts.get('purge'):
2906 elif opts.get('purge'):
2907 if name in existing:
2907 if name in existing:
2908 _delete(name)
2908 _delete(name)
2909 qdir = _queuedir(name)
2909 qdir = _queuedir(name)
2910 if os.path.exists(qdir):
2910 if os.path.exists(qdir):
2911 shutil.rmtree(qdir)
2911 shutil.rmtree(qdir)
2912 else:
2912 else:
2913 if name not in existing:
2913 if name not in existing:
2914 raise util.Abort(_('use --create to create a new queue'))
2914 raise util.Abort(_('use --create to create a new queue'))
2915 _setactive(name)
2915 _setactive(name)
2916
2916
2917 def reposetup(ui, repo):
2917 def reposetup(ui, repo):
2918 class mqrepo(repo.__class__):
2918 class mqrepo(repo.__class__):
2919 @util.propertycache
2919 @util.propertycache
2920 def mq(self):
2920 def mq(self):
2921 return queue(self.ui, self.join(""))
2921 return queue(self.ui, self.join(""))
2922
2922
2923 def abort_if_wdir_patched(self, errmsg, force=False):
2923 def abort_if_wdir_patched(self, errmsg, force=False):
2924 if self.mq.applied and not force:
2924 if self.mq.applied and not force:
2925 parents = self.dirstate.parents()
2925 parents = self.dirstate.parents()
2926 patches = [s.node for s in self.mq.applied]
2926 patches = [s.node for s in self.mq.applied]
2927 if parents[0] in patches or parents[1] in patches:
2927 if parents[0] in patches or parents[1] in patches:
2928 raise util.Abort(errmsg)
2928 raise util.Abort(errmsg)
2929
2929
2930 def commit(self, text="", user=None, date=None, match=None,
2930 def commit(self, text="", user=None, date=None, match=None,
2931 force=False, editor=False, extra={}):
2931 force=False, editor=False, extra={}):
2932 self.abort_if_wdir_patched(
2932 self.abort_if_wdir_patched(
2933 _('cannot commit over an applied mq patch'),
2933 _('cannot commit over an applied mq patch'),
2934 force)
2934 force)
2935
2935
2936 return super(mqrepo, self).commit(text, user, date, match, force,
2936 return super(mqrepo, self).commit(text, user, date, match, force,
2937 editor, extra)
2937 editor, extra)
2938
2938
2939 def checkpush(self, force, revs):
2939 def checkpush(self, force, revs):
2940 if self.mq.applied and not force:
2940 if self.mq.applied and not force:
2941 haspatches = True
2941 haspatches = True
2942 if revs:
2942 if revs:
2943 # Assume applied patches have no non-patch descendants
2943 # Assume applied patches have no non-patch descendants
2944 # and are not on remote already. If they appear in the
2944 # and are not on remote already. If they appear in the
2945 # set of resolved 'revs', bail out.
2945 # set of resolved 'revs', bail out.
2946 applied = set(e.node for e in self.mq.applied)
2946 applied = set(e.node for e in self.mq.applied)
2947 haspatches = bool([n for n in revs if n in applied])
2947 haspatches = bool([n for n in revs if n in applied])
2948 if haspatches:
2948 if haspatches:
2949 raise util.Abort(_('source has mq patches applied'))
2949 raise util.Abort(_('source has mq patches applied'))
2950 super(mqrepo, self).checkpush(force, revs)
2950 super(mqrepo, self).checkpush(force, revs)
2951
2951
2952 def _findtags(self):
2952 def _findtags(self):
2953 '''augment tags from base class with patch tags'''
2953 '''augment tags from base class with patch tags'''
2954 result = super(mqrepo, self)._findtags()
2954 result = super(mqrepo, self)._findtags()
2955
2955
2956 q = self.mq
2956 q = self.mq
2957 if not q.applied:
2957 if not q.applied:
2958 return result
2958 return result
2959
2959
2960 mqtags = [(patch.node, patch.name) for patch in q.applied]
2960 mqtags = [(patch.node, patch.name) for patch in q.applied]
2961
2961
2962 try:
2962 try:
2963 r = self.changelog.rev(mqtags[-1][0])
2963 r = self.changelog.rev(mqtags[-1][0])
2964 except error.RepoLookupError:
2964 except error.RepoLookupError:
2965 self.ui.warn(_('mq status file refers to unknown node %s\n')
2965 self.ui.warn(_('mq status file refers to unknown node %s\n')
2966 % short(mqtags[-1][0]))
2966 % short(mqtags[-1][0]))
2967 return result
2967 return result
2968
2968
2969 mqtags.append((mqtags[-1][0], 'qtip'))
2969 mqtags.append((mqtags[-1][0], 'qtip'))
2970 mqtags.append((mqtags[0][0], 'qbase'))
2970 mqtags.append((mqtags[0][0], 'qbase'))
2971 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2971 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2972 tags = result[0]
2972 tags = result[0]
2973 for patch in mqtags:
2973 for patch in mqtags:
2974 if patch[1] in tags:
2974 if patch[1] in tags:
2975 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2975 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2976 % patch[1])
2976 % patch[1])
2977 else:
2977 else:
2978 tags[patch[1]] = patch[0]
2978 tags[patch[1]] = patch[0]
2979
2979
2980 return result
2980 return result
2981
2981
2982 def _branchtags(self, partial, lrev):
2982 def _branchtags(self, partial, lrev):
2983 q = self.mq
2983 q = self.mq
2984 if not q.applied:
2984 if not q.applied:
2985 return super(mqrepo, self)._branchtags(partial, lrev)
2985 return super(mqrepo, self)._branchtags(partial, lrev)
2986
2986
2987 cl = self.changelog
2987 cl = self.changelog
2988 qbasenode = q.applied[0].node
2988 qbasenode = q.applied[0].node
2989 try:
2989 try:
2990 qbase = cl.rev(qbasenode)
2990 qbase = cl.rev(qbasenode)
2991 except error.LookupError:
2991 except error.LookupError:
2992 self.ui.warn(_('mq status file refers to unknown node %s\n')
2992 self.ui.warn(_('mq status file refers to unknown node %s\n')
2993 % short(qbasenode))
2993 % short(qbasenode))
2994 return super(mqrepo, self)._branchtags(partial, lrev)
2994 return super(mqrepo, self)._branchtags(partial, lrev)
2995
2995
2996 start = lrev + 1
2996 start = lrev + 1
2997 if start < qbase:
2997 if start < qbase:
2998 # update the cache (excluding the patches) and save it
2998 # update the cache (excluding the patches) and save it
2999 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
2999 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3000 self._updatebranchcache(partial, ctxgen)
3000 self._updatebranchcache(partial, ctxgen)
3001 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3001 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3002 start = qbase
3002 start = qbase
3003 # if start = qbase, the cache is as updated as it should be.
3003 # if start = qbase, the cache is as updated as it should be.
3004 # if start > qbase, the cache includes (part of) the patches.
3004 # if start > qbase, the cache includes (part of) the patches.
3005 # we might as well use it, but we won't save it.
3005 # we might as well use it, but we won't save it.
3006
3006
3007 # update the cache up to the tip
3007 # update the cache up to the tip
3008 ctxgen = (self[r] for r in xrange(start, len(cl)))
3008 ctxgen = (self[r] for r in xrange(start, len(cl)))
3009 self._updatebranchcache(partial, ctxgen)
3009 self._updatebranchcache(partial, ctxgen)
3010
3010
3011 return partial
3011 return partial
3012
3012
3013 if repo.local():
3013 if repo.local():
3014 repo.__class__ = mqrepo
3014 repo.__class__ = mqrepo
3015
3015
3016 def mqimport(orig, ui, repo, *args, **kwargs):
3016 def mqimport(orig, ui, repo, *args, **kwargs):
3017 if (hasattr(repo, 'abort_if_wdir_patched')
3017 if (hasattr(repo, 'abort_if_wdir_patched')
3018 and not kwargs.get('no_commit', False)):
3018 and not kwargs.get('no_commit', False)):
3019 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
3019 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
3020 kwargs.get('force'))
3020 kwargs.get('force'))
3021 return orig(ui, repo, *args, **kwargs)
3021 return orig(ui, repo, *args, **kwargs)
3022
3022
3023 def mqinit(orig, ui, *args, **kwargs):
3023 def mqinit(orig, ui, *args, **kwargs):
3024 mq = kwargs.pop('mq', None)
3024 mq = kwargs.pop('mq', None)
3025
3025
3026 if not mq:
3026 if not mq:
3027 return orig(ui, *args, **kwargs)
3027 return orig(ui, *args, **kwargs)
3028
3028
3029 if args:
3029 if args:
3030 repopath = args[0]
3030 repopath = args[0]
3031 if not hg.islocal(repopath):
3031 if not hg.islocal(repopath):
3032 raise util.Abort(_('only a local queue repository '
3032 raise util.Abort(_('only a local queue repository '
3033 'may be initialized'))
3033 'may be initialized'))
3034 else:
3034 else:
3035 repopath = cmdutil.findrepo(os.getcwd())
3035 repopath = cmdutil.findrepo(os.getcwd())
3036 if not repopath:
3036 if not repopath:
3037 raise util.Abort(_('there is no Mercurial repository here '
3037 raise util.Abort(_('there is no Mercurial repository here '
3038 '(.hg not found)'))
3038 '(.hg not found)'))
3039 repo = hg.repository(ui, repopath)
3039 repo = hg.repository(ui, repopath)
3040 return qinit(ui, repo, True)
3040 return qinit(ui, repo, True)
3041
3041
3042 def mqcommand(orig, ui, repo, *args, **kwargs):
3042 def mqcommand(orig, ui, repo, *args, **kwargs):
3043 """Add --mq option to operate on patch repository instead of main"""
3043 """Add --mq option to operate on patch repository instead of main"""
3044
3044
3045 # some commands do not like getting unknown options
3045 # some commands do not like getting unknown options
3046 mq = kwargs.pop('mq', None)
3046 mq = kwargs.pop('mq', None)
3047
3047
3048 if not mq:
3048 if not mq:
3049 return orig(ui, repo, *args, **kwargs)
3049 return orig(ui, repo, *args, **kwargs)
3050
3050
3051 q = repo.mq
3051 q = repo.mq
3052 r = q.qrepo()
3052 r = q.qrepo()
3053 if not r:
3053 if not r:
3054 raise util.Abort(_('no queue repository'))
3054 raise util.Abort(_('no queue repository'))
3055 return orig(r.ui, r, *args, **kwargs)
3055 return orig(r.ui, r, *args, **kwargs)
3056
3056
3057 def summary(orig, ui, repo, *args, **kwargs):
3057 def summary(orig, ui, repo, *args, **kwargs):
3058 r = orig(ui, repo, *args, **kwargs)
3058 r = orig(ui, repo, *args, **kwargs)
3059 q = repo.mq
3059 q = repo.mq
3060 m = []
3060 m = []
3061 a, u = len(q.applied), len(q.unapplied(repo))
3061 a, u = len(q.applied), len(q.unapplied(repo))
3062 if a:
3062 if a:
3063 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3063 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3064 if u:
3064 if u:
3065 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3065 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3066 if m:
3066 if m:
3067 ui.write("mq: %s\n" % ', '.join(m))
3067 ui.write("mq: %s\n" % ', '.join(m))
3068 else:
3068 else:
3069 ui.note(_("mq: (empty queue)\n"))
3069 ui.note(_("mq: (empty queue)\n"))
3070 return r
3070 return r
3071
3071
3072 def uisetup(ui):
3072 def uisetup(ui):
3073 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3073 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3074
3074
3075 extensions.wrapcommand(commands.table, 'import', mqimport)
3075 extensions.wrapcommand(commands.table, 'import', mqimport)
3076 extensions.wrapcommand(commands.table, 'summary', summary)
3076 extensions.wrapcommand(commands.table, 'summary', summary)
3077
3077
3078 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3078 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3079 entry[1].extend(mqopt)
3079 entry[1].extend(mqopt)
3080
3080
3081 nowrap = set(commands.norepo.split(" ") + ['qrecord'])
3081 nowrap = set(commands.norepo.split(" ") + ['qrecord'])
3082
3082
3083 def dotable(cmdtable):
3083 def dotable(cmdtable):
3084 for cmd in cmdtable.keys():
3084 for cmd in cmdtable.keys():
3085 cmd = cmdutil.parsealiases(cmd)[0]
3085 cmd = cmdutil.parsealiases(cmd)[0]
3086 if cmd in nowrap:
3086 if cmd in nowrap:
3087 continue
3087 continue
3088 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3088 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3089 entry[1].extend(mqopt)
3089 entry[1].extend(mqopt)
3090
3090
3091 dotable(commands.table)
3091 dotable(commands.table)
3092
3092
3093 for extname, extmodule in extensions.extensions():
3093 for extname, extmodule in extensions.extensions():
3094 if extmodule.__file__ != __file__:
3094 if extmodule.__file__ != __file__:
3095 dotable(getattr(extmodule, 'cmdtable', {}))
3095 dotable(getattr(extmodule, 'cmdtable', {}))
3096
3096
3097 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
3097 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
3098
3098
3099 cmdtable = {
3099 cmdtable = {
3100 "qapplied":
3100 "qapplied":
3101 (applied,
3101 (applied,
3102 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
3102 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
3103 _('hg qapplied [-1] [-s] [PATCH]')),
3103 _('hg qapplied [-1] [-s] [PATCH]')),
3104 "qclone":
3104 "qclone":
3105 (clone,
3105 (clone,
3106 [('', 'pull', None, _('use pull protocol to copy metadata')),
3106 [('', 'pull', None, _('use pull protocol to copy metadata')),
3107 ('U', 'noupdate', None, _('do not update the new working directories')),
3107 ('U', 'noupdate', None, _('do not update the new working directories')),
3108 ('', 'uncompressed', None,
3108 ('', 'uncompressed', None,
3109 _('use uncompressed transfer (fast over LAN)')),
3109 _('use uncompressed transfer (fast over LAN)')),
3110 ('p', 'patches', '',
3110 ('p', 'patches', '',
3111 _('location of source patch repository'), _('REPO')),
3111 _('location of source patch repository'), _('REPO')),
3112 ] + commands.remoteopts,
3112 ] + commands.remoteopts,
3113 _('hg qclone [OPTION]... SOURCE [DEST]')),
3113 _('hg qclone [OPTION]... SOURCE [DEST]')),
3114 "qcommit|qci":
3114 "qcommit|qci":
3115 (commit,
3115 (commit,
3116 commands.table["^commit|ci"][1],
3116 commands.table["^commit|ci"][1],
3117 _('hg qcommit [OPTION]... [FILE]...')),
3117 _('hg qcommit [OPTION]... [FILE]...')),
3118 "^qdiff":
3118 "^qdiff":
3119 (diff,
3119 (diff,
3120 commands.diffopts + commands.diffopts2 + commands.walkopts,
3120 commands.diffopts + commands.diffopts2 + commands.walkopts,
3121 _('hg qdiff [OPTION]... [FILE]...')),
3121 _('hg qdiff [OPTION]... [FILE]...')),
3122 "qdelete|qremove|qrm":
3122 "qdelete|qremove|qrm":
3123 (delete,
3123 (delete,
3124 [('k', 'keep', None, _('keep patch file')),
3124 [('k', 'keep', None, _('keep patch file')),
3125 ('r', 'rev', [],
3125 ('r', 'rev', [],
3126 _('stop managing a revision (DEPRECATED)'), _('REV'))],
3126 _('stop managing a revision (DEPRECATED)'), _('REV'))],
3127 _('hg qdelete [-k] [PATCH]...')),
3127 _('hg qdelete [-k] [PATCH]...')),
3128 'qfold':
3128 'qfold':
3129 (fold,
3129 (fold,
3130 [('e', 'edit', None, _('edit patch header')),
3130 [('e', 'edit', None, _('edit patch header')),
3131 ('k', 'keep', None, _('keep folded patch files')),
3131 ('k', 'keep', None, _('keep folded patch files')),
3132 ] + commands.commitopts,
3132 ] + commands.commitopts,
3133 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
3133 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
3134 'qgoto':
3134 'qgoto':
3135 (goto,
3135 (goto,
3136 [('f', 'force', None, _('overwrite any local changes'))],
3136 [('f', 'force', None, _('overwrite any local changes'))],
3137 _('hg qgoto [OPTION]... PATCH')),
3137 _('hg qgoto [OPTION]... PATCH')),
3138 'qguard':
3138 'qguard':
3139 (guard,
3139 (guard,
3140 [('l', 'list', None, _('list all patches and guards')),
3140 [('l', 'list', None, _('list all patches and guards')),
3141 ('n', 'none', None, _('drop all guards'))],
3141 ('n', 'none', None, _('drop all guards'))],
3142 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
3142 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
3143 'qheader': (header, [], _('hg qheader [PATCH]')),
3143 'qheader': (header, [], _('hg qheader [PATCH]')),
3144 "qimport":
3144 "qimport":
3145 (qimport,
3145 (qimport,
3146 [('e', 'existing', None, _('import file in patch directory')),
3146 [('e', 'existing', None, _('import file in patch directory')),
3147 ('n', 'name', '',
3147 ('n', 'name', '',
3148 _('name of patch file'), _('NAME')),
3148 _('name of patch file'), _('NAME')),
3149 ('f', 'force', None, _('overwrite existing files')),
3149 ('f', 'force', None, _('overwrite existing files')),
3150 ('r', 'rev', [],
3150 ('r', 'rev', [],
3151 _('place existing revisions under mq control'), _('REV')),
3151 _('place existing revisions under mq control'), _('REV')),
3152 ('g', 'git', None, _('use git extended diff format')),
3152 ('g', 'git', None, _('use git extended diff format')),
3153 ('P', 'push', None, _('qpush after importing'))],
3153 ('P', 'push', None, _('qpush after importing'))],
3154 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
3154 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
3155 "^qinit":
3155 "^qinit":
3156 (init,
3156 (init,
3157 [('c', 'create-repo', None, _('create queue repository'))],
3157 [('c', 'create-repo', None, _('create queue repository'))],
3158 _('hg qinit [-c]')),
3158 _('hg qinit [-c]')),
3159 "^qnew":
3159 "^qnew":
3160 (new,
3160 (new,
3161 [('e', 'edit', None, _('edit commit message')),
3161 [('e', 'edit', None, _('edit commit message')),
3162 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
3162 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
3163 ('g', 'git', None, _('use git extended diff format')),
3163 ('g', 'git', None, _('use git extended diff format')),
3164 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
3164 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
3165 ('u', 'user', '',
3165 ('u', 'user', '',
3166 _('add "From: <USER>" to patch'), _('USER')),
3166 _('add "From: <USER>" to patch'), _('USER')),
3167 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
3167 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
3168 ('d', 'date', '',
3168 ('d', 'date', '',
3169 _('add "Date: <DATE>" to patch'), _('DATE'))
3169 _('add "Date: <DATE>" to patch'), _('DATE'))
3170 ] + commands.walkopts + commands.commitopts,
3170 ] + commands.walkopts + commands.commitopts,
3171 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
3171 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
3172 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
3172 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
3173 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
3173 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
3174 "^qpop":
3174 "^qpop":
3175 (pop,
3175 (pop,
3176 [('a', 'all', None, _('pop all patches')),
3176 [('a', 'all', None, _('pop all patches')),
3177 ('n', 'name', '',
3177 ('n', 'name', '',
3178 _('queue name to pop (DEPRECATED)'), _('NAME')),
3178 _('queue name to pop (DEPRECATED)'), _('NAME')),
3179 ('f', 'force', None, _('forget any local changes to patched files'))],
3179 ('f', 'force', None, _('forget any local changes to patched files'))],
3180 _('hg qpop [-a] [-f] [PATCH | INDEX]')),
3180 _('hg qpop [-a] [-f] [PATCH | INDEX]')),
3181 "^qpush":
3181 "^qpush":
3182 (push,
3182 (push,
3183 [('f', 'force', None, _('apply on top of local changes')),
3183 [('f', 'force', None, _('apply on top of local changes')),
3184 ('e', 'exact', None, _('apply the target patch to its recorded parent')),
3184 ('e', 'exact', None, _('apply the target patch to its recorded parent')),
3185 ('l', 'list', None, _('list patch name in commit text')),
3185 ('l', 'list', None, _('list patch name in commit text')),
3186 ('a', 'all', None, _('apply all patches')),
3186 ('a', 'all', None, _('apply all patches')),
3187 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
3187 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
3188 ('n', 'name', '',
3188 ('n', 'name', '',
3189 _('merge queue name (DEPRECATED)'), _('NAME')),
3189 _('merge queue name (DEPRECATED)'), _('NAME')),
3190 ('', 'move', None, _('reorder patch series and apply only the patch'))],
3190 ('', 'move', None, _('reorder patch series and apply only the patch'))],
3191 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]')),
3191 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]')),
3192 "^qrefresh":
3192 "^qrefresh":
3193 (refresh,
3193 (refresh,
3194 [('e', 'edit', None, _('edit commit message')),
3194 [('e', 'edit', None, _('edit commit message')),
3195 ('g', 'git', None, _('use git extended diff format')),
3195 ('g', 'git', None, _('use git extended diff format')),
3196 ('s', 'short', None,
3196 ('s', 'short', None,
3197 _('refresh only files already in the patch and specified files')),
3197 _('refresh only files already in the patch and specified files')),
3198 ('U', 'currentuser', None,
3198 ('U', 'currentuser', None,
3199 _('add/update author field in patch with current user')),
3199 _('add/update author field in patch with current user')),
3200 ('u', 'user', '',
3200 ('u', 'user', '',
3201 _('add/update author field in patch with given user'), _('USER')),
3201 _('add/update author field in patch with given user'), _('USER')),
3202 ('D', 'currentdate', None,
3202 ('D', 'currentdate', None,
3203 _('add/update date field in patch with current date')),
3203 _('add/update date field in patch with current date')),
3204 ('d', 'date', '',
3204 ('d', 'date', '',
3205 _('add/update date field in patch with given date'), _('DATE'))
3205 _('add/update date field in patch with given date'), _('DATE'))
3206 ] + commands.walkopts + commands.commitopts,
3206 ] + commands.walkopts + commands.commitopts,
3207 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
3207 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
3208 'qrename|qmv':
3208 'qrename|qmv':
3209 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
3209 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
3210 "qrestore":
3210 "qrestore":
3211 (restore,
3211 (restore,
3212 [('d', 'delete', None, _('delete save entry')),
3212 [('d', 'delete', None, _('delete save entry')),
3213 ('u', 'update', None, _('update queue working directory'))],
3213 ('u', 'update', None, _('update queue working directory'))],
3214 _('hg qrestore [-d] [-u] REV')),
3214 _('hg qrestore [-d] [-u] REV')),
3215 "qsave":
3215 "qsave":
3216 (save,
3216 (save,
3217 [('c', 'copy', None, _('copy patch directory')),
3217 [('c', 'copy', None, _('copy patch directory')),
3218 ('n', 'name', '',
3218 ('n', 'name', '',
3219 _('copy directory name'), _('NAME')),
3219 _('copy directory name'), _('NAME')),
3220 ('e', 'empty', None, _('clear queue status file')),
3220 ('e', 'empty', None, _('clear queue status file')),
3221 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3221 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3222 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
3222 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
3223 "qselect":
3223 "qselect":
3224 (select,
3224 (select,
3225 [('n', 'none', None, _('disable all guards')),
3225 [('n', 'none', None, _('disable all guards')),
3226 ('s', 'series', None, _('list all guards in series file')),
3226 ('s', 'series', None, _('list all guards in series file')),
3227 ('', 'pop', None, _('pop to before first guarded applied patch')),
3227 ('', 'pop', None, _('pop to before first guarded applied patch')),
3228 ('', 'reapply', None, _('pop, then reapply patches'))],
3228 ('', 'reapply', None, _('pop, then reapply patches'))],
3229 _('hg qselect [OPTION]... [GUARD]...')),
3229 _('hg qselect [OPTION]... [GUARD]...')),
3230 "qseries":
3230 "qseries":
3231 (series,
3231 (series,
3232 [('m', 'missing', None, _('print patches not in series')),
3232 [('m', 'missing', None, _('print patches not in series')),
3233 ] + seriesopts,
3233 ] + seriesopts,
3234 _('hg qseries [-ms]')),
3234 _('hg qseries [-ms]')),
3235 "strip":
3235 "strip":
3236 (strip,
3236 (strip,
3237 [('f', 'force', None, _('force removal of changesets even if the '
3237 [('f', 'force', None, _('force removal of changesets even if the '
3238 'working directory has uncommitted changes')),
3238 'working directory has uncommitted changes')),
3239 ('b', 'backup', None, _('bundle only changesets with local revision'
3239 ('b', 'backup', None, _('bundle only changesets with local revision'
3240 ' number greater than REV which are not'
3240 ' number greater than REV which are not'
3241 ' descendants of REV (DEPRECATED)')),
3241 ' descendants of REV (DEPRECATED)')),
3242 ('n', 'no-backup', None, _('no backups')),
3242 ('n', 'no-backup', None, _('no backups')),
3243 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
3243 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
3244 ('k', 'keep', None, _("do not modify working copy during strip"))],
3244 ('k', 'keep', None, _("do not modify working copy during strip"))],
3245 _('hg strip [-k] [-f] [-n] REV...')),
3245 _('hg strip [-k] [-f] [-n] REV...')),
3246 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
3246 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
3247 "qunapplied":
3247 "qunapplied":
3248 (unapplied,
3248 (unapplied,
3249 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
3249 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
3250 _('hg qunapplied [-1] [-s] [PATCH]')),
3250 _('hg qunapplied [-1] [-s] [PATCH]')),
3251 "qfinish":
3251 "qfinish":
3252 (finish,
3252 (finish,
3253 [('a', 'applied', None, _('finish all applied changesets'))],
3253 [('a', 'applied', None, _('finish all applied changesets'))],
3254 _('hg qfinish [-a] [REV]...')),
3254 _('hg qfinish [-a] [REV]...')),
3255 'qqueue':
3255 'qqueue':
3256 (qqueue,
3256 (qqueue,
3257 [
3257 [
3258 ('l', 'list', False, _('list all available queues')),
3258 ('l', 'list', False, _('list all available queues')),
3259 ('c', 'create', False, _('create new queue')),
3259 ('c', 'create', False, _('create new queue')),
3260 ('', 'rename', False, _('rename active queue')),
3260 ('', 'rename', False, _('rename active queue')),
3261 ('', 'delete', False, _('delete reference to queue')),
3261 ('', 'delete', False, _('delete reference to queue')),
3262 ('', 'purge', False, _('delete queue, and remove patch dir')),
3262 ('', 'purge', False, _('delete queue, and remove patch dir')),
3263 ],
3263 ],
3264 _('[OPTION] [QUEUE]')),
3264 _('[OPTION] [QUEUE]')),
3265 }
3265 }
3266
3266
3267 colortable = {'qguard.negative': 'red',
3267 colortable = {'qguard.negative': 'red',
3268 'qguard.positive': 'yellow',
3268 'qguard.positive': 'yellow',
3269 'qguard.unguarded': 'green',
3269 'qguard.unguarded': 'green',
3270 'qseries.applied': 'blue bold underline',
3270 'qseries.applied': 'blue bold underline',
3271 'qseries.guarded': 'black bold',
3271 'qseries.guarded': 'black bold',
3272 'qseries.missing': 'red bold',
3272 'qseries.missing': 'red bold',
3273 'qseries.unapplied': 'black bold'}
3273 'qseries.unapplied': 'black bold'}
@@ -1,647 +1,647 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to transplant changesets from another branch
8 '''command to transplant changesets from another branch
9
9
10 This extension allows you to transplant patches from another branch.
10 This extension allows you to transplant patches from another branch.
11
11
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 map from a changeset hash to its hash in the source repository.
13 map from a changeset hash to its hash in the source repository.
14 '''
14 '''
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 import os, tempfile
17 import os, tempfile
18 from mercurial import bundlerepo, cmdutil, hg, merge, match
18 from mercurial import bundlerepo, cmdutil, hg, merge, match
19 from mercurial import patch, revlog, util, error
19 from mercurial import patch, revlog, scmutil, util, error
20 from mercurial import revset, templatekw
20 from mercurial import revset, templatekw
21
21
22 class transplantentry(object):
22 class transplantentry(object):
23 def __init__(self, lnode, rnode):
23 def __init__(self, lnode, rnode):
24 self.lnode = lnode
24 self.lnode = lnode
25 self.rnode = rnode
25 self.rnode = rnode
26
26
27 class transplants(object):
27 class transplants(object):
28 def __init__(self, path=None, transplantfile=None, opener=None):
28 def __init__(self, path=None, transplantfile=None, opener=None):
29 self.path = path
29 self.path = path
30 self.transplantfile = transplantfile
30 self.transplantfile = transplantfile
31 self.opener = opener
31 self.opener = opener
32
32
33 if not opener:
33 if not opener:
34 self.opener = util.opener(self.path)
34 self.opener = scmutil.opener(self.path)
35 self.transplants = {}
35 self.transplants = {}
36 self.dirty = False
36 self.dirty = False
37 self.read()
37 self.read()
38
38
39 def read(self):
39 def read(self):
40 abspath = os.path.join(self.path, self.transplantfile)
40 abspath = os.path.join(self.path, self.transplantfile)
41 if self.transplantfile and os.path.exists(abspath):
41 if self.transplantfile and os.path.exists(abspath):
42 for line in self.opener(self.transplantfile).read().splitlines():
42 for line in self.opener(self.transplantfile).read().splitlines():
43 lnode, rnode = map(revlog.bin, line.split(':'))
43 lnode, rnode = map(revlog.bin, line.split(':'))
44 list = self.transplants.setdefault(rnode, [])
44 list = self.transplants.setdefault(rnode, [])
45 list.append(transplantentry(lnode, rnode))
45 list.append(transplantentry(lnode, rnode))
46
46
47 def write(self):
47 def write(self):
48 if self.dirty and self.transplantfile:
48 if self.dirty and self.transplantfile:
49 if not os.path.isdir(self.path):
49 if not os.path.isdir(self.path):
50 os.mkdir(self.path)
50 os.mkdir(self.path)
51 fp = self.opener(self.transplantfile, 'w')
51 fp = self.opener(self.transplantfile, 'w')
52 for list in self.transplants.itervalues():
52 for list in self.transplants.itervalues():
53 for t in list:
53 for t in list:
54 l, r = map(revlog.hex, (t.lnode, t.rnode))
54 l, r = map(revlog.hex, (t.lnode, t.rnode))
55 fp.write(l + ':' + r + '\n')
55 fp.write(l + ':' + r + '\n')
56 fp.close()
56 fp.close()
57 self.dirty = False
57 self.dirty = False
58
58
59 def get(self, rnode):
59 def get(self, rnode):
60 return self.transplants.get(rnode) or []
60 return self.transplants.get(rnode) or []
61
61
62 def set(self, lnode, rnode):
62 def set(self, lnode, rnode):
63 list = self.transplants.setdefault(rnode, [])
63 list = self.transplants.setdefault(rnode, [])
64 list.append(transplantentry(lnode, rnode))
64 list.append(transplantentry(lnode, rnode))
65 self.dirty = True
65 self.dirty = True
66
66
67 def remove(self, transplant):
67 def remove(self, transplant):
68 list = self.transplants.get(transplant.rnode)
68 list = self.transplants.get(transplant.rnode)
69 if list:
69 if list:
70 del list[list.index(transplant)]
70 del list[list.index(transplant)]
71 self.dirty = True
71 self.dirty = True
72
72
73 class transplanter(object):
73 class transplanter(object):
74 def __init__(self, ui, repo):
74 def __init__(self, ui, repo):
75 self.ui = ui
75 self.ui = ui
76 self.path = repo.join('transplant')
76 self.path = repo.join('transplant')
77 self.opener = util.opener(self.path)
77 self.opener = scmutil.opener(self.path)
78 self.transplants = transplants(self.path, 'transplants',
78 self.transplants = transplants(self.path, 'transplants',
79 opener=self.opener)
79 opener=self.opener)
80
80
81 def applied(self, repo, node, parent):
81 def applied(self, repo, node, parent):
82 '''returns True if a node is already an ancestor of parent
82 '''returns True if a node is already an ancestor of parent
83 or has already been transplanted'''
83 or has already been transplanted'''
84 if hasnode(repo, node):
84 if hasnode(repo, node):
85 if node in repo.changelog.reachable(parent, stop=node):
85 if node in repo.changelog.reachable(parent, stop=node):
86 return True
86 return True
87 for t in self.transplants.get(node):
87 for t in self.transplants.get(node):
88 # it might have been stripped
88 # it might have been stripped
89 if not hasnode(repo, t.lnode):
89 if not hasnode(repo, t.lnode):
90 self.transplants.remove(t)
90 self.transplants.remove(t)
91 return False
91 return False
92 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
92 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
93 return True
93 return True
94 return False
94 return False
95
95
96 def apply(self, repo, source, revmap, merges, opts={}):
96 def apply(self, repo, source, revmap, merges, opts={}):
97 '''apply the revisions in revmap one by one in revision order'''
97 '''apply the revisions in revmap one by one in revision order'''
98 revs = sorted(revmap)
98 revs = sorted(revmap)
99 p1, p2 = repo.dirstate.parents()
99 p1, p2 = repo.dirstate.parents()
100 pulls = []
100 pulls = []
101 diffopts = patch.diffopts(self.ui, opts)
101 diffopts = patch.diffopts(self.ui, opts)
102 diffopts.git = True
102 diffopts.git = True
103
103
104 lock = wlock = None
104 lock = wlock = None
105 try:
105 try:
106 wlock = repo.wlock()
106 wlock = repo.wlock()
107 lock = repo.lock()
107 lock = repo.lock()
108 for rev in revs:
108 for rev in revs:
109 node = revmap[rev]
109 node = revmap[rev]
110 revstr = '%s:%s' % (rev, revlog.short(node))
110 revstr = '%s:%s' % (rev, revlog.short(node))
111
111
112 if self.applied(repo, node, p1):
112 if self.applied(repo, node, p1):
113 self.ui.warn(_('skipping already applied revision %s\n') %
113 self.ui.warn(_('skipping already applied revision %s\n') %
114 revstr)
114 revstr)
115 continue
115 continue
116
116
117 parents = source.changelog.parents(node)
117 parents = source.changelog.parents(node)
118 if not opts.get('filter'):
118 if not opts.get('filter'):
119 # If the changeset parent is the same as the
119 # If the changeset parent is the same as the
120 # wdir's parent, just pull it.
120 # wdir's parent, just pull it.
121 if parents[0] == p1:
121 if parents[0] == p1:
122 pulls.append(node)
122 pulls.append(node)
123 p1 = node
123 p1 = node
124 continue
124 continue
125 if pulls:
125 if pulls:
126 if source != repo:
126 if source != repo:
127 repo.pull(source, heads=pulls)
127 repo.pull(source, heads=pulls)
128 merge.update(repo, pulls[-1], False, False, None)
128 merge.update(repo, pulls[-1], False, False, None)
129 p1, p2 = repo.dirstate.parents()
129 p1, p2 = repo.dirstate.parents()
130 pulls = []
130 pulls = []
131
131
132 domerge = False
132 domerge = False
133 if node in merges:
133 if node in merges:
134 # pulling all the merge revs at once would mean we
134 # pulling all the merge revs at once would mean we
135 # couldn't transplant after the latest even if
135 # couldn't transplant after the latest even if
136 # transplants before them fail.
136 # transplants before them fail.
137 domerge = True
137 domerge = True
138 if not hasnode(repo, node):
138 if not hasnode(repo, node):
139 repo.pull(source, heads=[node])
139 repo.pull(source, heads=[node])
140
140
141 if parents[1] != revlog.nullid:
141 if parents[1] != revlog.nullid:
142 self.ui.note(_('skipping merge changeset %s:%s\n')
142 self.ui.note(_('skipping merge changeset %s:%s\n')
143 % (rev, revlog.short(node)))
143 % (rev, revlog.short(node)))
144 patchfile = None
144 patchfile = None
145 else:
145 else:
146 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
146 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
147 fp = os.fdopen(fd, 'w')
147 fp = os.fdopen(fd, 'w')
148 gen = patch.diff(source, parents[0], node, opts=diffopts)
148 gen = patch.diff(source, parents[0], node, opts=diffopts)
149 for chunk in gen:
149 for chunk in gen:
150 fp.write(chunk)
150 fp.write(chunk)
151 fp.close()
151 fp.close()
152
152
153 del revmap[rev]
153 del revmap[rev]
154 if patchfile or domerge:
154 if patchfile or domerge:
155 try:
155 try:
156 n = self.applyone(repo, node,
156 n = self.applyone(repo, node,
157 source.changelog.read(node),
157 source.changelog.read(node),
158 patchfile, merge=domerge,
158 patchfile, merge=domerge,
159 log=opts.get('log'),
159 log=opts.get('log'),
160 filter=opts.get('filter'))
160 filter=opts.get('filter'))
161 if n and domerge:
161 if n and domerge:
162 self.ui.status(_('%s merged at %s\n') % (revstr,
162 self.ui.status(_('%s merged at %s\n') % (revstr,
163 revlog.short(n)))
163 revlog.short(n)))
164 elif n:
164 elif n:
165 self.ui.status(_('%s transplanted to %s\n')
165 self.ui.status(_('%s transplanted to %s\n')
166 % (revlog.short(node),
166 % (revlog.short(node),
167 revlog.short(n)))
167 revlog.short(n)))
168 finally:
168 finally:
169 if patchfile:
169 if patchfile:
170 os.unlink(patchfile)
170 os.unlink(patchfile)
171 if pulls:
171 if pulls:
172 repo.pull(source, heads=pulls)
172 repo.pull(source, heads=pulls)
173 merge.update(repo, pulls[-1], False, False, None)
173 merge.update(repo, pulls[-1], False, False, None)
174 finally:
174 finally:
175 self.saveseries(revmap, merges)
175 self.saveseries(revmap, merges)
176 self.transplants.write()
176 self.transplants.write()
177 lock.release()
177 lock.release()
178 wlock.release()
178 wlock.release()
179
179
180 def filter(self, filter, node, changelog, patchfile):
180 def filter(self, filter, node, changelog, patchfile):
181 '''arbitrarily rewrite changeset before applying it'''
181 '''arbitrarily rewrite changeset before applying it'''
182
182
183 self.ui.status(_('filtering %s\n') % patchfile)
183 self.ui.status(_('filtering %s\n') % patchfile)
184 user, date, msg = (changelog[1], changelog[2], changelog[4])
184 user, date, msg = (changelog[1], changelog[2], changelog[4])
185 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
185 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
186 fp = os.fdopen(fd, 'w')
186 fp = os.fdopen(fd, 'w')
187 fp.write("# HG changeset patch\n")
187 fp.write("# HG changeset patch\n")
188 fp.write("# User %s\n" % user)
188 fp.write("# User %s\n" % user)
189 fp.write("# Date %d %d\n" % date)
189 fp.write("# Date %d %d\n" % date)
190 fp.write(msg + '\n')
190 fp.write(msg + '\n')
191 fp.close()
191 fp.close()
192
192
193 try:
193 try:
194 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
194 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
195 util.shellquote(patchfile)),
195 util.shellquote(patchfile)),
196 environ={'HGUSER': changelog[1],
196 environ={'HGUSER': changelog[1],
197 'HGREVISION': revlog.hex(node),
197 'HGREVISION': revlog.hex(node),
198 },
198 },
199 onerr=util.Abort, errprefix=_('filter failed'))
199 onerr=util.Abort, errprefix=_('filter failed'))
200 user, date, msg = self.parselog(file(headerfile))[1:4]
200 user, date, msg = self.parselog(file(headerfile))[1:4]
201 finally:
201 finally:
202 os.unlink(headerfile)
202 os.unlink(headerfile)
203
203
204 return (user, date, msg)
204 return (user, date, msg)
205
205
206 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
206 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
207 filter=None):
207 filter=None):
208 '''apply the patch in patchfile to the repository as a transplant'''
208 '''apply the patch in patchfile to the repository as a transplant'''
209 (manifest, user, (time, timezone), files, message) = cl[:5]
209 (manifest, user, (time, timezone), files, message) = cl[:5]
210 date = "%d %d" % (time, timezone)
210 date = "%d %d" % (time, timezone)
211 extra = {'transplant_source': node}
211 extra = {'transplant_source': node}
212 if filter:
212 if filter:
213 (user, date, message) = self.filter(filter, node, cl, patchfile)
213 (user, date, message) = self.filter(filter, node, cl, patchfile)
214
214
215 if log:
215 if log:
216 # we don't translate messages inserted into commits
216 # we don't translate messages inserted into commits
217 message += '\n(transplanted from %s)' % revlog.hex(node)
217 message += '\n(transplanted from %s)' % revlog.hex(node)
218
218
219 self.ui.status(_('applying %s\n') % revlog.short(node))
219 self.ui.status(_('applying %s\n') % revlog.short(node))
220 self.ui.note('%s %s\n%s\n' % (user, date, message))
220 self.ui.note('%s %s\n%s\n' % (user, date, message))
221
221
222 if not patchfile and not merge:
222 if not patchfile and not merge:
223 raise util.Abort(_('can only omit patchfile if merging'))
223 raise util.Abort(_('can only omit patchfile if merging'))
224 if patchfile:
224 if patchfile:
225 try:
225 try:
226 files = {}
226 files = {}
227 try:
227 try:
228 patch.patch(patchfile, self.ui, cwd=repo.root,
228 patch.patch(patchfile, self.ui, cwd=repo.root,
229 files=files, eolmode=None)
229 files=files, eolmode=None)
230 if not files:
230 if not files:
231 self.ui.warn(_('%s: empty changeset')
231 self.ui.warn(_('%s: empty changeset')
232 % revlog.hex(node))
232 % revlog.hex(node))
233 return None
233 return None
234 finally:
234 finally:
235 files = cmdutil.updatedir(self.ui, repo, files)
235 files = cmdutil.updatedir(self.ui, repo, files)
236 except Exception, inst:
236 except Exception, inst:
237 seriespath = os.path.join(self.path, 'series')
237 seriespath = os.path.join(self.path, 'series')
238 if os.path.exists(seriespath):
238 if os.path.exists(seriespath):
239 os.unlink(seriespath)
239 os.unlink(seriespath)
240 p1 = repo.dirstate.p1()
240 p1 = repo.dirstate.p1()
241 p2 = node
241 p2 = node
242 self.log(user, date, message, p1, p2, merge=merge)
242 self.log(user, date, message, p1, p2, merge=merge)
243 self.ui.write(str(inst) + '\n')
243 self.ui.write(str(inst) + '\n')
244 raise util.Abort(_('fix up the merge and run '
244 raise util.Abort(_('fix up the merge and run '
245 'hg transplant --continue'))
245 'hg transplant --continue'))
246 else:
246 else:
247 files = None
247 files = None
248 if merge:
248 if merge:
249 p1, p2 = repo.dirstate.parents()
249 p1, p2 = repo.dirstate.parents()
250 repo.dirstate.setparents(p1, node)
250 repo.dirstate.setparents(p1, node)
251 m = match.always(repo.root, '')
251 m = match.always(repo.root, '')
252 else:
252 else:
253 m = match.exact(repo.root, '', files)
253 m = match.exact(repo.root, '', files)
254
254
255 n = repo.commit(message, user, date, extra=extra, match=m)
255 n = repo.commit(message, user, date, extra=extra, match=m)
256 if not n:
256 if not n:
257 # Crash here to prevent an unclear crash later, in
257 # Crash here to prevent an unclear crash later, in
258 # transplants.write(). This can happen if patch.patch()
258 # transplants.write(). This can happen if patch.patch()
259 # does nothing but claims success or if repo.status() fails
259 # does nothing but claims success or if repo.status() fails
260 # to report changes done by patch.patch(). These both
260 # to report changes done by patch.patch(). These both
261 # appear to be bugs in other parts of Mercurial, but dying
261 # appear to be bugs in other parts of Mercurial, but dying
262 # here, as soon as we can detect the problem, is preferable
262 # here, as soon as we can detect the problem, is preferable
263 # to silently dropping changesets on the floor.
263 # to silently dropping changesets on the floor.
264 raise RuntimeError('nothing committed after transplant')
264 raise RuntimeError('nothing committed after transplant')
265 if not merge:
265 if not merge:
266 self.transplants.set(n, node)
266 self.transplants.set(n, node)
267
267
268 return n
268 return n
269
269
270 def resume(self, repo, source, opts=None):
270 def resume(self, repo, source, opts=None):
271 '''recover last transaction and apply remaining changesets'''
271 '''recover last transaction and apply remaining changesets'''
272 if os.path.exists(os.path.join(self.path, 'journal')):
272 if os.path.exists(os.path.join(self.path, 'journal')):
273 n, node = self.recover(repo)
273 n, node = self.recover(repo)
274 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
274 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
275 revlog.short(n)))
275 revlog.short(n)))
276 seriespath = os.path.join(self.path, 'series')
276 seriespath = os.path.join(self.path, 'series')
277 if not os.path.exists(seriespath):
277 if not os.path.exists(seriespath):
278 self.transplants.write()
278 self.transplants.write()
279 return
279 return
280 nodes, merges = self.readseries()
280 nodes, merges = self.readseries()
281 revmap = {}
281 revmap = {}
282 for n in nodes:
282 for n in nodes:
283 revmap[source.changelog.rev(n)] = n
283 revmap[source.changelog.rev(n)] = n
284 os.unlink(seriespath)
284 os.unlink(seriespath)
285
285
286 self.apply(repo, source, revmap, merges, opts)
286 self.apply(repo, source, revmap, merges, opts)
287
287
288 def recover(self, repo):
288 def recover(self, repo):
289 '''commit working directory using journal metadata'''
289 '''commit working directory using journal metadata'''
290 node, user, date, message, parents = self.readlog()
290 node, user, date, message, parents = self.readlog()
291 merge = len(parents) == 2
291 merge = len(parents) == 2
292
292
293 if not user or not date or not message or not parents[0]:
293 if not user or not date or not message or not parents[0]:
294 raise util.Abort(_('transplant log file is corrupt'))
294 raise util.Abort(_('transplant log file is corrupt'))
295
295
296 extra = {'transplant_source': node}
296 extra = {'transplant_source': node}
297 wlock = repo.wlock()
297 wlock = repo.wlock()
298 try:
298 try:
299 p1, p2 = repo.dirstate.parents()
299 p1, p2 = repo.dirstate.parents()
300 if p1 != parents[0]:
300 if p1 != parents[0]:
301 raise util.Abort(
301 raise util.Abort(
302 _('working dir not at transplant parent %s') %
302 _('working dir not at transplant parent %s') %
303 revlog.hex(parents[0]))
303 revlog.hex(parents[0]))
304 if merge:
304 if merge:
305 repo.dirstate.setparents(p1, parents[1])
305 repo.dirstate.setparents(p1, parents[1])
306 n = repo.commit(message, user, date, extra=extra)
306 n = repo.commit(message, user, date, extra=extra)
307 if not n:
307 if not n:
308 raise util.Abort(_('commit failed'))
308 raise util.Abort(_('commit failed'))
309 if not merge:
309 if not merge:
310 self.transplants.set(n, node)
310 self.transplants.set(n, node)
311 self.unlog()
311 self.unlog()
312
312
313 return n, node
313 return n, node
314 finally:
314 finally:
315 wlock.release()
315 wlock.release()
316
316
317 def readseries(self):
317 def readseries(self):
318 nodes = []
318 nodes = []
319 merges = []
319 merges = []
320 cur = nodes
320 cur = nodes
321 for line in self.opener('series').read().splitlines():
321 for line in self.opener('series').read().splitlines():
322 if line.startswith('# Merges'):
322 if line.startswith('# Merges'):
323 cur = merges
323 cur = merges
324 continue
324 continue
325 cur.append(revlog.bin(line))
325 cur.append(revlog.bin(line))
326
326
327 return (nodes, merges)
327 return (nodes, merges)
328
328
329 def saveseries(self, revmap, merges):
329 def saveseries(self, revmap, merges):
330 if not revmap:
330 if not revmap:
331 return
331 return
332
332
333 if not os.path.isdir(self.path):
333 if not os.path.isdir(self.path):
334 os.mkdir(self.path)
334 os.mkdir(self.path)
335 series = self.opener('series', 'w')
335 series = self.opener('series', 'w')
336 for rev in sorted(revmap):
336 for rev in sorted(revmap):
337 series.write(revlog.hex(revmap[rev]) + '\n')
337 series.write(revlog.hex(revmap[rev]) + '\n')
338 if merges:
338 if merges:
339 series.write('# Merges\n')
339 series.write('# Merges\n')
340 for m in merges:
340 for m in merges:
341 series.write(revlog.hex(m) + '\n')
341 series.write(revlog.hex(m) + '\n')
342 series.close()
342 series.close()
343
343
344 def parselog(self, fp):
344 def parselog(self, fp):
345 parents = []
345 parents = []
346 message = []
346 message = []
347 node = revlog.nullid
347 node = revlog.nullid
348 inmsg = False
348 inmsg = False
349 user = None
349 user = None
350 date = None
350 date = None
351 for line in fp.read().splitlines():
351 for line in fp.read().splitlines():
352 if inmsg:
352 if inmsg:
353 message.append(line)
353 message.append(line)
354 elif line.startswith('# User '):
354 elif line.startswith('# User '):
355 user = line[7:]
355 user = line[7:]
356 elif line.startswith('# Date '):
356 elif line.startswith('# Date '):
357 date = line[7:]
357 date = line[7:]
358 elif line.startswith('# Node ID '):
358 elif line.startswith('# Node ID '):
359 node = revlog.bin(line[10:])
359 node = revlog.bin(line[10:])
360 elif line.startswith('# Parent '):
360 elif line.startswith('# Parent '):
361 parents.append(revlog.bin(line[9:]))
361 parents.append(revlog.bin(line[9:]))
362 elif not line.startswith('# '):
362 elif not line.startswith('# '):
363 inmsg = True
363 inmsg = True
364 message.append(line)
364 message.append(line)
365 if None in (user, date):
365 if None in (user, date):
366 raise util.Abort(_("filter corrupted changeset (no user or date)"))
366 raise util.Abort(_("filter corrupted changeset (no user or date)"))
367 return (node, user, date, '\n'.join(message), parents)
367 return (node, user, date, '\n'.join(message), parents)
368
368
369 def log(self, user, date, message, p1, p2, merge=False):
369 def log(self, user, date, message, p1, p2, merge=False):
370 '''journal changelog metadata for later recover'''
370 '''journal changelog metadata for later recover'''
371
371
372 if not os.path.isdir(self.path):
372 if not os.path.isdir(self.path):
373 os.mkdir(self.path)
373 os.mkdir(self.path)
374 fp = self.opener('journal', 'w')
374 fp = self.opener('journal', 'w')
375 fp.write('# User %s\n' % user)
375 fp.write('# User %s\n' % user)
376 fp.write('# Date %s\n' % date)
376 fp.write('# Date %s\n' % date)
377 fp.write('# Node ID %s\n' % revlog.hex(p2))
377 fp.write('# Node ID %s\n' % revlog.hex(p2))
378 fp.write('# Parent ' + revlog.hex(p1) + '\n')
378 fp.write('# Parent ' + revlog.hex(p1) + '\n')
379 if merge:
379 if merge:
380 fp.write('# Parent ' + revlog.hex(p2) + '\n')
380 fp.write('# Parent ' + revlog.hex(p2) + '\n')
381 fp.write(message.rstrip() + '\n')
381 fp.write(message.rstrip() + '\n')
382 fp.close()
382 fp.close()
383
383
384 def readlog(self):
384 def readlog(self):
385 return self.parselog(self.opener('journal'))
385 return self.parselog(self.opener('journal'))
386
386
387 def unlog(self):
387 def unlog(self):
388 '''remove changelog journal'''
388 '''remove changelog journal'''
389 absdst = os.path.join(self.path, 'journal')
389 absdst = os.path.join(self.path, 'journal')
390 if os.path.exists(absdst):
390 if os.path.exists(absdst):
391 os.unlink(absdst)
391 os.unlink(absdst)
392
392
393 def transplantfilter(self, repo, source, root):
393 def transplantfilter(self, repo, source, root):
394 def matchfn(node):
394 def matchfn(node):
395 if self.applied(repo, node, root):
395 if self.applied(repo, node, root):
396 return False
396 return False
397 if source.changelog.parents(node)[1] != revlog.nullid:
397 if source.changelog.parents(node)[1] != revlog.nullid:
398 return False
398 return False
399 extra = source.changelog.read(node)[5]
399 extra = source.changelog.read(node)[5]
400 cnode = extra.get('transplant_source')
400 cnode = extra.get('transplant_source')
401 if cnode and self.applied(repo, cnode, root):
401 if cnode and self.applied(repo, cnode, root):
402 return False
402 return False
403 return True
403 return True
404
404
405 return matchfn
405 return matchfn
406
406
407 def hasnode(repo, node):
407 def hasnode(repo, node):
408 try:
408 try:
409 return repo.changelog.rev(node) is not None
409 return repo.changelog.rev(node) is not None
410 except error.RevlogError:
410 except error.RevlogError:
411 return False
411 return False
412
412
413 def browserevs(ui, repo, nodes, opts):
413 def browserevs(ui, repo, nodes, opts):
414 '''interactively transplant changesets'''
414 '''interactively transplant changesets'''
415 def browsehelp(ui):
415 def browsehelp(ui):
416 ui.write(_('y: transplant this changeset\n'
416 ui.write(_('y: transplant this changeset\n'
417 'n: skip this changeset\n'
417 'n: skip this changeset\n'
418 'm: merge at this changeset\n'
418 'm: merge at this changeset\n'
419 'p: show patch\n'
419 'p: show patch\n'
420 'c: commit selected changesets\n'
420 'c: commit selected changesets\n'
421 'q: cancel transplant\n'
421 'q: cancel transplant\n'
422 '?: show this help\n'))
422 '?: show this help\n'))
423
423
424 displayer = cmdutil.show_changeset(ui, repo, opts)
424 displayer = cmdutil.show_changeset(ui, repo, opts)
425 transplants = []
425 transplants = []
426 merges = []
426 merges = []
427 for node in nodes:
427 for node in nodes:
428 displayer.show(repo[node])
428 displayer.show(repo[node])
429 action = None
429 action = None
430 while not action:
430 while not action:
431 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
431 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
432 if action == '?':
432 if action == '?':
433 browsehelp(ui)
433 browsehelp(ui)
434 action = None
434 action = None
435 elif action == 'p':
435 elif action == 'p':
436 parent = repo.changelog.parents(node)[0]
436 parent = repo.changelog.parents(node)[0]
437 for chunk in patch.diff(repo, parent, node):
437 for chunk in patch.diff(repo, parent, node):
438 ui.write(chunk)
438 ui.write(chunk)
439 action = None
439 action = None
440 elif action not in ('y', 'n', 'm', 'c', 'q'):
440 elif action not in ('y', 'n', 'm', 'c', 'q'):
441 ui.write(_('no such option\n'))
441 ui.write(_('no such option\n'))
442 action = None
442 action = None
443 if action == 'y':
443 if action == 'y':
444 transplants.append(node)
444 transplants.append(node)
445 elif action == 'm':
445 elif action == 'm':
446 merges.append(node)
446 merges.append(node)
447 elif action == 'c':
447 elif action == 'c':
448 break
448 break
449 elif action == 'q':
449 elif action == 'q':
450 transplants = ()
450 transplants = ()
451 merges = ()
451 merges = ()
452 break
452 break
453 displayer.close()
453 displayer.close()
454 return (transplants, merges)
454 return (transplants, merges)
455
455
456 def transplant(ui, repo, *revs, **opts):
456 def transplant(ui, repo, *revs, **opts):
457 '''transplant changesets from another branch
457 '''transplant changesets from another branch
458
458
459 Selected changesets will be applied on top of the current working
459 Selected changesets will be applied on top of the current working
460 directory with the log of the original changeset. The changesets
460 directory with the log of the original changeset. The changesets
461 are copied and will thus appear twice in the history. Use the
461 are copied and will thus appear twice in the history. Use the
462 rebase extension instead if you want to move a whole branch of
462 rebase extension instead if you want to move a whole branch of
463 unpublished changesets.
463 unpublished changesets.
464
464
465 If --log is specified, log messages will have a comment appended
465 If --log is specified, log messages will have a comment appended
466 of the form::
466 of the form::
467
467
468 (transplanted from CHANGESETHASH)
468 (transplanted from CHANGESETHASH)
469
469
470 You can rewrite the changelog message with the --filter option.
470 You can rewrite the changelog message with the --filter option.
471 Its argument will be invoked with the current changelog message as
471 Its argument will be invoked with the current changelog message as
472 $1 and the patch as $2.
472 $1 and the patch as $2.
473
473
474 If --source/-s is specified, selects changesets from the named
474 If --source/-s is specified, selects changesets from the named
475 repository. If --branch/-b is specified, selects changesets from
475 repository. If --branch/-b is specified, selects changesets from
476 the branch holding the named revision, up to that revision. If
476 the branch holding the named revision, up to that revision. If
477 --all/-a is specified, all changesets on the branch will be
477 --all/-a is specified, all changesets on the branch will be
478 transplanted, otherwise you will be prompted to select the
478 transplanted, otherwise you will be prompted to select the
479 changesets you want.
479 changesets you want.
480
480
481 :hg:`transplant --branch REVISION --all` will transplant the
481 :hg:`transplant --branch REVISION --all` will transplant the
482 selected branch (up to the named revision) onto your current
482 selected branch (up to the named revision) onto your current
483 working directory.
483 working directory.
484
484
485 You can optionally mark selected transplanted changesets as merge
485 You can optionally mark selected transplanted changesets as merge
486 changesets. You will not be prompted to transplant any ancestors
486 changesets. You will not be prompted to transplant any ancestors
487 of a merged transplant, and you can merge descendants of them
487 of a merged transplant, and you can merge descendants of them
488 normally instead of transplanting them.
488 normally instead of transplanting them.
489
489
490 If no merges or revisions are provided, :hg:`transplant` will
490 If no merges or revisions are provided, :hg:`transplant` will
491 start an interactive changeset browser.
491 start an interactive changeset browser.
492
492
493 If a changeset application fails, you can fix the merge by hand
493 If a changeset application fails, you can fix the merge by hand
494 and then resume where you left off by calling :hg:`transplant
494 and then resume where you left off by calling :hg:`transplant
495 --continue/-c`.
495 --continue/-c`.
496 '''
496 '''
497 def incwalk(repo, incoming, branches, match=util.always):
497 def incwalk(repo, incoming, branches, match=util.always):
498 if not branches:
498 if not branches:
499 branches = None
499 branches = None
500 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
500 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
501 if match(node):
501 if match(node):
502 yield node
502 yield node
503
503
504 def transplantwalk(repo, root, branches, match=util.always):
504 def transplantwalk(repo, root, branches, match=util.always):
505 if not branches:
505 if not branches:
506 branches = repo.heads()
506 branches = repo.heads()
507 ancestors = []
507 ancestors = []
508 for branch in branches:
508 for branch in branches:
509 ancestors.append(repo.changelog.ancestor(root, branch))
509 ancestors.append(repo.changelog.ancestor(root, branch))
510 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
510 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
511 if match(node):
511 if match(node):
512 yield node
512 yield node
513
513
514 def checkopts(opts, revs):
514 def checkopts(opts, revs):
515 if opts.get('continue'):
515 if opts.get('continue'):
516 if opts.get('branch') or opts.get('all') or opts.get('merge'):
516 if opts.get('branch') or opts.get('all') or opts.get('merge'):
517 raise util.Abort(_('--continue is incompatible with '
517 raise util.Abort(_('--continue is incompatible with '
518 'branch, all or merge'))
518 'branch, all or merge'))
519 return
519 return
520 if not (opts.get('source') or revs or
520 if not (opts.get('source') or revs or
521 opts.get('merge') or opts.get('branch')):
521 opts.get('merge') or opts.get('branch')):
522 raise util.Abort(_('no source URL, branch tag or revision '
522 raise util.Abort(_('no source URL, branch tag or revision '
523 'list provided'))
523 'list provided'))
524 if opts.get('all'):
524 if opts.get('all'):
525 if not opts.get('branch'):
525 if not opts.get('branch'):
526 raise util.Abort(_('--all requires a branch revision'))
526 raise util.Abort(_('--all requires a branch revision'))
527 if revs:
527 if revs:
528 raise util.Abort(_('--all is incompatible with a '
528 raise util.Abort(_('--all is incompatible with a '
529 'revision list'))
529 'revision list'))
530
530
531 checkopts(opts, revs)
531 checkopts(opts, revs)
532
532
533 if not opts.get('log'):
533 if not opts.get('log'):
534 opts['log'] = ui.config('transplant', 'log')
534 opts['log'] = ui.config('transplant', 'log')
535 if not opts.get('filter'):
535 if not opts.get('filter'):
536 opts['filter'] = ui.config('transplant', 'filter')
536 opts['filter'] = ui.config('transplant', 'filter')
537
537
538 tp = transplanter(ui, repo)
538 tp = transplanter(ui, repo)
539
539
540 p1, p2 = repo.dirstate.parents()
540 p1, p2 = repo.dirstate.parents()
541 if len(repo) > 0 and p1 == revlog.nullid:
541 if len(repo) > 0 and p1 == revlog.nullid:
542 raise util.Abort(_('no revision checked out'))
542 raise util.Abort(_('no revision checked out'))
543 if not opts.get('continue'):
543 if not opts.get('continue'):
544 if p2 != revlog.nullid:
544 if p2 != revlog.nullid:
545 raise util.Abort(_('outstanding uncommitted merges'))
545 raise util.Abort(_('outstanding uncommitted merges'))
546 m, a, r, d = repo.status()[:4]
546 m, a, r, d = repo.status()[:4]
547 if m or a or r or d:
547 if m or a or r or d:
548 raise util.Abort(_('outstanding local changes'))
548 raise util.Abort(_('outstanding local changes'))
549
549
550 bundle = None
550 bundle = None
551 source = opts.get('source')
551 source = opts.get('source')
552 if source:
552 if source:
553 sourcerepo = ui.expandpath(source)
553 sourcerepo = ui.expandpath(source)
554 source = hg.repository(ui, sourcerepo)
554 source = hg.repository(ui, sourcerepo)
555 source, common, incoming, bundle = bundlerepo.getremotechanges(ui, repo,
555 source, common, incoming, bundle = bundlerepo.getremotechanges(ui, repo,
556 source, force=True)
556 source, force=True)
557 else:
557 else:
558 source = repo
558 source = repo
559
559
560 try:
560 try:
561 if opts.get('continue'):
561 if opts.get('continue'):
562 tp.resume(repo, source, opts)
562 tp.resume(repo, source, opts)
563 return
563 return
564
564
565 tf = tp.transplantfilter(repo, source, p1)
565 tf = tp.transplantfilter(repo, source, p1)
566 if opts.get('prune'):
566 if opts.get('prune'):
567 prune = [source.lookup(r)
567 prune = [source.lookup(r)
568 for r in cmdutil.revrange(source, opts.get('prune'))]
568 for r in cmdutil.revrange(source, opts.get('prune'))]
569 matchfn = lambda x: tf(x) and x not in prune
569 matchfn = lambda x: tf(x) and x not in prune
570 else:
570 else:
571 matchfn = tf
571 matchfn = tf
572 branches = map(source.lookup, opts.get('branch', ()))
572 branches = map(source.lookup, opts.get('branch', ()))
573 merges = map(source.lookup, opts.get('merge', ()))
573 merges = map(source.lookup, opts.get('merge', ()))
574 revmap = {}
574 revmap = {}
575 if revs:
575 if revs:
576 for r in cmdutil.revrange(source, revs):
576 for r in cmdutil.revrange(source, revs):
577 revmap[int(r)] = source.lookup(r)
577 revmap[int(r)] = source.lookup(r)
578 elif opts.get('all') or not merges:
578 elif opts.get('all') or not merges:
579 if source != repo:
579 if source != repo:
580 alltransplants = incwalk(source, incoming, branches,
580 alltransplants = incwalk(source, incoming, branches,
581 match=matchfn)
581 match=matchfn)
582 else:
582 else:
583 alltransplants = transplantwalk(source, p1, branches,
583 alltransplants = transplantwalk(source, p1, branches,
584 match=matchfn)
584 match=matchfn)
585 if opts.get('all'):
585 if opts.get('all'):
586 revs = alltransplants
586 revs = alltransplants
587 else:
587 else:
588 revs, newmerges = browserevs(ui, source, alltransplants, opts)
588 revs, newmerges = browserevs(ui, source, alltransplants, opts)
589 merges.extend(newmerges)
589 merges.extend(newmerges)
590 for r in revs:
590 for r in revs:
591 revmap[source.changelog.rev(r)] = r
591 revmap[source.changelog.rev(r)] = r
592 for r in merges:
592 for r in merges:
593 revmap[source.changelog.rev(r)] = r
593 revmap[source.changelog.rev(r)] = r
594
594
595 tp.apply(repo, source, revmap, merges, opts)
595 tp.apply(repo, source, revmap, merges, opts)
596 finally:
596 finally:
597 if bundle:
597 if bundle:
598 source.close()
598 source.close()
599 os.unlink(bundle)
599 os.unlink(bundle)
600
600
601 def revsettransplanted(repo, subset, x):
601 def revsettransplanted(repo, subset, x):
602 """``transplanted(set)``
602 """``transplanted(set)``
603 Transplanted changesets in set.
603 Transplanted changesets in set.
604 """
604 """
605 if x:
605 if x:
606 s = revset.getset(repo, subset, x)
606 s = revset.getset(repo, subset, x)
607 else:
607 else:
608 s = subset
608 s = subset
609 cs = set()
609 cs = set()
610 for r in xrange(0, len(repo)):
610 for r in xrange(0, len(repo)):
611 if repo[r].extra().get('transplant_source'):
611 if repo[r].extra().get('transplant_source'):
612 cs.add(r)
612 cs.add(r)
613 return [r for r in s if r in cs]
613 return [r for r in s if r in cs]
614
614
615 def kwtransplanted(repo, ctx, **args):
615 def kwtransplanted(repo, ctx, **args):
616 """:transplanted: String. The node identifier of the transplanted
616 """:transplanted: String. The node identifier of the transplanted
617 changeset if any."""
617 changeset if any."""
618 n = ctx.extra().get('transplant_source')
618 n = ctx.extra().get('transplant_source')
619 return n and revlog.hex(n) or ''
619 return n and revlog.hex(n) or ''
620
620
621 def extsetup(ui):
621 def extsetup(ui):
622 revset.symbols['transplanted'] = revsettransplanted
622 revset.symbols['transplanted'] = revsettransplanted
623 templatekw.keywords['transplanted'] = kwtransplanted
623 templatekw.keywords['transplanted'] = kwtransplanted
624
624
625 cmdtable = {
625 cmdtable = {
626 "transplant":
626 "transplant":
627 (transplant,
627 (transplant,
628 [('s', 'source', '',
628 [('s', 'source', '',
629 _('pull patches from REPO'), _('REPO')),
629 _('pull patches from REPO'), _('REPO')),
630 ('b', 'branch', [],
630 ('b', 'branch', [],
631 _('pull patches from branch BRANCH'), _('BRANCH')),
631 _('pull patches from branch BRANCH'), _('BRANCH')),
632 ('a', 'all', None, _('pull all changesets up to BRANCH')),
632 ('a', 'all', None, _('pull all changesets up to BRANCH')),
633 ('p', 'prune', [],
633 ('p', 'prune', [],
634 _('skip over REV'), _('REV')),
634 _('skip over REV'), _('REV')),
635 ('m', 'merge', [],
635 ('m', 'merge', [],
636 _('merge at REV'), _('REV')),
636 _('merge at REV'), _('REV')),
637 ('', 'log', None, _('append transplant info to log message')),
637 ('', 'log', None, _('append transplant info to log message')),
638 ('c', 'continue', None, _('continue last transplant session '
638 ('c', 'continue', None, _('continue last transplant session '
639 'after repair')),
639 'after repair')),
640 ('', 'filter', '',
640 ('', 'filter', '',
641 _('filter changesets through command'), _('CMD'))],
641 _('filter changesets through command'), _('CMD'))],
642 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
642 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
643 '[-m REV] [REV]...'))
643 '[-m REV] [REV]...'))
644 }
644 }
645
645
646 # tell hggettext to extract docstrings from these functions:
646 # tell hggettext to extract docstrings from these functions:
647 i18nfunctions = [revsettransplanted, kwtransplanted]
647 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,284 +1,284 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex
9 from node import hex
10 import cmdutil
10 import cmdutil
11 import util, encoding
11 import scmutil, util, encoding
12 import cStringIO, os, tarfile, time, zipfile
12 import cStringIO, os, tarfile, time, zipfile
13 import zlib, gzip
13 import zlib, gzip
14
14
15 def tidyprefix(dest, kind, prefix):
15 def tidyprefix(dest, kind, prefix):
16 '''choose prefix to use for names in archive. make sure prefix is
16 '''choose prefix to use for names in archive. make sure prefix is
17 safe for consumers.'''
17 safe for consumers.'''
18
18
19 if prefix:
19 if prefix:
20 prefix = util.normpath(prefix)
20 prefix = util.normpath(prefix)
21 else:
21 else:
22 if not isinstance(dest, str):
22 if not isinstance(dest, str):
23 raise ValueError('dest must be string if no prefix')
23 raise ValueError('dest must be string if no prefix')
24 prefix = os.path.basename(dest)
24 prefix = os.path.basename(dest)
25 lower = prefix.lower()
25 lower = prefix.lower()
26 for sfx in exts.get(kind, []):
26 for sfx in exts.get(kind, []):
27 if lower.endswith(sfx):
27 if lower.endswith(sfx):
28 prefix = prefix[:-len(sfx)]
28 prefix = prefix[:-len(sfx)]
29 break
29 break
30 lpfx = os.path.normpath(util.localpath(prefix))
30 lpfx = os.path.normpath(util.localpath(prefix))
31 prefix = util.pconvert(lpfx)
31 prefix = util.pconvert(lpfx)
32 if not prefix.endswith('/'):
32 if not prefix.endswith('/'):
33 prefix += '/'
33 prefix += '/'
34 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
34 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
35 raise util.Abort(_('archive prefix contains illegal components'))
35 raise util.Abort(_('archive prefix contains illegal components'))
36 return prefix
36 return prefix
37
37
38 exts = {
38 exts = {
39 'tar': ['.tar'],
39 'tar': ['.tar'],
40 'tbz2': ['.tbz2', '.tar.bz2'],
40 'tbz2': ['.tbz2', '.tar.bz2'],
41 'tgz': ['.tgz', '.tar.gz'],
41 'tgz': ['.tgz', '.tar.gz'],
42 'zip': ['.zip'],
42 'zip': ['.zip'],
43 }
43 }
44
44
45 def guesskind(dest):
45 def guesskind(dest):
46 for kind, extensions in exts.iteritems():
46 for kind, extensions in exts.iteritems():
47 if util.any(dest.endswith(ext) for ext in extensions):
47 if util.any(dest.endswith(ext) for ext in extensions):
48 return kind
48 return kind
49 return None
49 return None
50
50
51
51
52 class tarit(object):
52 class tarit(object):
53 '''write archive to tar file or stream. can write uncompressed,
53 '''write archive to tar file or stream. can write uncompressed,
54 or compress with gzip or bzip2.'''
54 or compress with gzip or bzip2.'''
55
55
56 class GzipFileWithTime(gzip.GzipFile):
56 class GzipFileWithTime(gzip.GzipFile):
57
57
58 def __init__(self, *args, **kw):
58 def __init__(self, *args, **kw):
59 timestamp = None
59 timestamp = None
60 if 'timestamp' in kw:
60 if 'timestamp' in kw:
61 timestamp = kw.pop('timestamp')
61 timestamp = kw.pop('timestamp')
62 if timestamp is None:
62 if timestamp is None:
63 self.timestamp = time.time()
63 self.timestamp = time.time()
64 else:
64 else:
65 self.timestamp = timestamp
65 self.timestamp = timestamp
66 gzip.GzipFile.__init__(self, *args, **kw)
66 gzip.GzipFile.__init__(self, *args, **kw)
67
67
68 def _write_gzip_header(self):
68 def _write_gzip_header(self):
69 self.fileobj.write('\037\213') # magic header
69 self.fileobj.write('\037\213') # magic header
70 self.fileobj.write('\010') # compression method
70 self.fileobj.write('\010') # compression method
71 # Python 2.6 deprecates self.filename
71 # Python 2.6 deprecates self.filename
72 fname = getattr(self, 'name', None) or self.filename
72 fname = getattr(self, 'name', None) or self.filename
73 if fname and fname.endswith('.gz'):
73 if fname and fname.endswith('.gz'):
74 fname = fname[:-3]
74 fname = fname[:-3]
75 flags = 0
75 flags = 0
76 if fname:
76 if fname:
77 flags = gzip.FNAME
77 flags = gzip.FNAME
78 self.fileobj.write(chr(flags))
78 self.fileobj.write(chr(flags))
79 gzip.write32u(self.fileobj, long(self.timestamp))
79 gzip.write32u(self.fileobj, long(self.timestamp))
80 self.fileobj.write('\002')
80 self.fileobj.write('\002')
81 self.fileobj.write('\377')
81 self.fileobj.write('\377')
82 if fname:
82 if fname:
83 self.fileobj.write(fname + '\000')
83 self.fileobj.write(fname + '\000')
84
84
85 def __init__(self, dest, mtime, kind=''):
85 def __init__(self, dest, mtime, kind=''):
86 self.mtime = mtime
86 self.mtime = mtime
87 self.fileobj = None
87 self.fileobj = None
88
88
89 def taropen(name, mode, fileobj=None):
89 def taropen(name, mode, fileobj=None):
90 if kind == 'gz':
90 if kind == 'gz':
91 mode = mode[0]
91 mode = mode[0]
92 if not fileobj:
92 if not fileobj:
93 fileobj = open(name, mode + 'b')
93 fileobj = open(name, mode + 'b')
94 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
94 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
95 zlib.Z_BEST_COMPRESSION,
95 zlib.Z_BEST_COMPRESSION,
96 fileobj, timestamp=mtime)
96 fileobj, timestamp=mtime)
97 self.fileobj = gzfileobj
97 self.fileobj = gzfileobj
98 return tarfile.TarFile.taropen(name, mode, gzfileobj)
98 return tarfile.TarFile.taropen(name, mode, gzfileobj)
99 else:
99 else:
100 self.fileobj = fileobj
100 self.fileobj = fileobj
101 return tarfile.open(name, mode + kind, fileobj)
101 return tarfile.open(name, mode + kind, fileobj)
102
102
103 if isinstance(dest, str):
103 if isinstance(dest, str):
104 self.z = taropen(dest, mode='w:')
104 self.z = taropen(dest, mode='w:')
105 else:
105 else:
106 # Python 2.5-2.5.1 have a regression that requires a name arg
106 # Python 2.5-2.5.1 have a regression that requires a name arg
107 self.z = taropen(name='', mode='w|', fileobj=dest)
107 self.z = taropen(name='', mode='w|', fileobj=dest)
108
108
109 def addfile(self, name, mode, islink, data):
109 def addfile(self, name, mode, islink, data):
110 i = tarfile.TarInfo(name)
110 i = tarfile.TarInfo(name)
111 i.mtime = self.mtime
111 i.mtime = self.mtime
112 i.size = len(data)
112 i.size = len(data)
113 if islink:
113 if islink:
114 i.type = tarfile.SYMTYPE
114 i.type = tarfile.SYMTYPE
115 i.mode = 0777
115 i.mode = 0777
116 i.linkname = data
116 i.linkname = data
117 data = None
117 data = None
118 i.size = 0
118 i.size = 0
119 else:
119 else:
120 i.mode = mode
120 i.mode = mode
121 data = cStringIO.StringIO(data)
121 data = cStringIO.StringIO(data)
122 self.z.addfile(i, data)
122 self.z.addfile(i, data)
123
123
124 def done(self):
124 def done(self):
125 self.z.close()
125 self.z.close()
126 if self.fileobj:
126 if self.fileobj:
127 self.fileobj.close()
127 self.fileobj.close()
128
128
129 class tellable(object):
129 class tellable(object):
130 '''provide tell method for zipfile.ZipFile when writing to http
130 '''provide tell method for zipfile.ZipFile when writing to http
131 response file object.'''
131 response file object.'''
132
132
133 def __init__(self, fp):
133 def __init__(self, fp):
134 self.fp = fp
134 self.fp = fp
135 self.offset = 0
135 self.offset = 0
136
136
137 def __getattr__(self, key):
137 def __getattr__(self, key):
138 return getattr(self.fp, key)
138 return getattr(self.fp, key)
139
139
140 def write(self, s):
140 def write(self, s):
141 self.fp.write(s)
141 self.fp.write(s)
142 self.offset += len(s)
142 self.offset += len(s)
143
143
144 def tell(self):
144 def tell(self):
145 return self.offset
145 return self.offset
146
146
147 class zipit(object):
147 class zipit(object):
148 '''write archive to zip file or stream. can write uncompressed,
148 '''write archive to zip file or stream. can write uncompressed,
149 or compressed with deflate.'''
149 or compressed with deflate.'''
150
150
151 def __init__(self, dest, mtime, compress=True):
151 def __init__(self, dest, mtime, compress=True):
152 if not isinstance(dest, str):
152 if not isinstance(dest, str):
153 try:
153 try:
154 dest.tell()
154 dest.tell()
155 except (AttributeError, IOError):
155 except (AttributeError, IOError):
156 dest = tellable(dest)
156 dest = tellable(dest)
157 self.z = zipfile.ZipFile(dest, 'w',
157 self.z = zipfile.ZipFile(dest, 'w',
158 compress and zipfile.ZIP_DEFLATED or
158 compress and zipfile.ZIP_DEFLATED or
159 zipfile.ZIP_STORED)
159 zipfile.ZIP_STORED)
160
160
161 # Python's zipfile module emits deprecation warnings if we try
161 # Python's zipfile module emits deprecation warnings if we try
162 # to store files with a date before 1980.
162 # to store files with a date before 1980.
163 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
163 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
164 if mtime < epoch:
164 if mtime < epoch:
165 mtime = epoch
165 mtime = epoch
166
166
167 self.date_time = time.gmtime(mtime)[:6]
167 self.date_time = time.gmtime(mtime)[:6]
168
168
169 def addfile(self, name, mode, islink, data):
169 def addfile(self, name, mode, islink, data):
170 i = zipfile.ZipInfo(name, self.date_time)
170 i = zipfile.ZipInfo(name, self.date_time)
171 i.compress_type = self.z.compression
171 i.compress_type = self.z.compression
172 # unzip will not honor unix file modes unless file creator is
172 # unzip will not honor unix file modes unless file creator is
173 # set to unix (id 3).
173 # set to unix (id 3).
174 i.create_system = 3
174 i.create_system = 3
175 ftype = 0x8000 # UNX_IFREG in unzip source code
175 ftype = 0x8000 # UNX_IFREG in unzip source code
176 if islink:
176 if islink:
177 mode = 0777
177 mode = 0777
178 ftype = 0xa000 # UNX_IFLNK in unzip source code
178 ftype = 0xa000 # UNX_IFLNK in unzip source code
179 i.external_attr = (mode | ftype) << 16L
179 i.external_attr = (mode | ftype) << 16L
180 self.z.writestr(i, data)
180 self.z.writestr(i, data)
181
181
182 def done(self):
182 def done(self):
183 self.z.close()
183 self.z.close()
184
184
185 class fileit(object):
185 class fileit(object):
186 '''write archive as files in directory.'''
186 '''write archive as files in directory.'''
187
187
188 def __init__(self, name, mtime):
188 def __init__(self, name, mtime):
189 self.basedir = name
189 self.basedir = name
190 self.opener = util.opener(self.basedir)
190 self.opener = scmutil.opener(self.basedir)
191
191
192 def addfile(self, name, mode, islink, data):
192 def addfile(self, name, mode, islink, data):
193 if islink:
193 if islink:
194 self.opener.symlink(data, name)
194 self.opener.symlink(data, name)
195 return
195 return
196 f = self.opener(name, "w", atomictemp=True)
196 f = self.opener(name, "w", atomictemp=True)
197 f.write(data)
197 f.write(data)
198 f.rename()
198 f.rename()
199 destfile = os.path.join(self.basedir, name)
199 destfile = os.path.join(self.basedir, name)
200 os.chmod(destfile, mode)
200 os.chmod(destfile, mode)
201
201
202 def done(self):
202 def done(self):
203 pass
203 pass
204
204
205 archivers = {
205 archivers = {
206 'files': fileit,
206 'files': fileit,
207 'tar': tarit,
207 'tar': tarit,
208 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
208 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
209 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
209 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
210 'uzip': lambda name, mtime: zipit(name, mtime, False),
210 'uzip': lambda name, mtime: zipit(name, mtime, False),
211 'zip': zipit,
211 'zip': zipit,
212 }
212 }
213
213
214 def archive(repo, dest, node, kind, decode=True, matchfn=None,
214 def archive(repo, dest, node, kind, decode=True, matchfn=None,
215 prefix=None, mtime=None, subrepos=False):
215 prefix=None, mtime=None, subrepos=False):
216 '''create archive of repo as it was at node.
216 '''create archive of repo as it was at node.
217
217
218 dest can be name of directory, name of archive file, or file
218 dest can be name of directory, name of archive file, or file
219 object to write archive to.
219 object to write archive to.
220
220
221 kind is type of archive to create.
221 kind is type of archive to create.
222
222
223 decode tells whether to put files through decode filters from
223 decode tells whether to put files through decode filters from
224 hgrc.
224 hgrc.
225
225
226 matchfn is function to filter names of files to write to archive.
226 matchfn is function to filter names of files to write to archive.
227
227
228 prefix is name of path to put before every archive member.'''
228 prefix is name of path to put before every archive member.'''
229
229
230 if kind == 'files':
230 if kind == 'files':
231 if prefix:
231 if prefix:
232 raise util.Abort(_('cannot give prefix when archiving to files'))
232 raise util.Abort(_('cannot give prefix when archiving to files'))
233 else:
233 else:
234 prefix = tidyprefix(dest, kind, prefix)
234 prefix = tidyprefix(dest, kind, prefix)
235
235
236 def write(name, mode, islink, getdata):
236 def write(name, mode, islink, getdata):
237 if matchfn and not matchfn(name):
237 if matchfn and not matchfn(name):
238 return
238 return
239 data = getdata()
239 data = getdata()
240 if decode:
240 if decode:
241 data = repo.wwritedata(name, data)
241 data = repo.wwritedata(name, data)
242 archiver.addfile(prefix + name, mode, islink, data)
242 archiver.addfile(prefix + name, mode, islink, data)
243
243
244 if kind not in archivers:
244 if kind not in archivers:
245 raise util.Abort(_("unknown archive type '%s'") % kind)
245 raise util.Abort(_("unknown archive type '%s'") % kind)
246
246
247 ctx = repo[node]
247 ctx = repo[node]
248 archiver = archivers[kind](dest, mtime or ctx.date()[0])
248 archiver = archivers[kind](dest, mtime or ctx.date()[0])
249
249
250 if repo.ui.configbool("ui", "archivemeta", True):
250 if repo.ui.configbool("ui", "archivemeta", True):
251 def metadata():
251 def metadata():
252 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
252 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
253 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
253 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
254
254
255 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
255 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
256 if repo.tagtype(t) == 'global')
256 if repo.tagtype(t) == 'global')
257 if not tags:
257 if not tags:
258 repo.ui.pushbuffer()
258 repo.ui.pushbuffer()
259 opts = {'template': '{latesttag}\n{latesttagdistance}',
259 opts = {'template': '{latesttag}\n{latesttagdistance}',
260 'style': '', 'patch': None, 'git': None}
260 'style': '', 'patch': None, 'git': None}
261 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
261 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
262 ltags, dist = repo.ui.popbuffer().split('\n')
262 ltags, dist = repo.ui.popbuffer().split('\n')
263 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
263 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
264 tags += 'latesttagdistance: %s\n' % dist
264 tags += 'latesttagdistance: %s\n' % dist
265
265
266 return base + tags
266 return base + tags
267
267
268 write('.hg_archival.txt', 0644, False, metadata)
268 write('.hg_archival.txt', 0644, False, metadata)
269
269
270 total = len(ctx.manifest())
270 total = len(ctx.manifest())
271 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
271 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
272 for i, f in enumerate(ctx):
272 for i, f in enumerate(ctx):
273 ff = ctx.flags(f)
273 ff = ctx.flags(f)
274 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
274 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
275 repo.ui.progress(_('archiving'), i + 1, item=f,
275 repo.ui.progress(_('archiving'), i + 1, item=f,
276 unit=_('files'), total=total)
276 unit=_('files'), total=total)
277 repo.ui.progress(_('archiving'), None)
277 repo.ui.progress(_('archiving'), None)
278
278
279 if subrepos:
279 if subrepos:
280 for subpath in ctx.substate:
280 for subpath in ctx.substate:
281 sub = ctx.sub(subpath)
281 sub = ctx.sub(subpath)
282 sub.archive(repo.ui, archiver, prefix)
282 sub.archive(repo.ui, archiver, prefix)
283
283
284 archiver.done()
284 archiver.done()
@@ -1,4899 +1,4900 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, extensions, copies, error, bookmarks
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 import patch, help, mdiff, url, encoding, templatekw, discovery
13 import patch, help, mdiff, url, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 import merge as mergemod
15 import merge as mergemod
16 import minirst, revset, templatefilters
16 import minirst, revset, templatefilters
17 import dagparser
17 import dagparser
18
18
19 # Commands start here, listed alphabetically
19 # Commands start here, listed alphabetically
20
20
21 def add(ui, repo, *pats, **opts):
21 def add(ui, repo, *pats, **opts):
22 """add the specified files on the next commit
22 """add the specified files on the next commit
23
23
24 Schedule files to be version controlled and added to the
24 Schedule files to be version controlled and added to the
25 repository.
25 repository.
26
26
27 The files will be added to the repository at the next commit. To
27 The files will be added to the repository at the next commit. To
28 undo an add before that, see :hg:`forget`.
28 undo an add before that, see :hg:`forget`.
29
29
30 If no names are given, add all files to the repository.
30 If no names are given, add all files to the repository.
31
31
32 .. container:: verbose
32 .. container:: verbose
33
33
34 An example showing how new (unknown) files are added
34 An example showing how new (unknown) files are added
35 automatically by :hg:`add`::
35 automatically by :hg:`add`::
36
36
37 $ ls
37 $ ls
38 foo.c
38 foo.c
39 $ hg status
39 $ hg status
40 ? foo.c
40 ? foo.c
41 $ hg add
41 $ hg add
42 adding foo.c
42 adding foo.c
43 $ hg status
43 $ hg status
44 A foo.c
44 A foo.c
45
45
46 Returns 0 if all files are successfully added.
46 Returns 0 if all files are successfully added.
47 """
47 """
48
48
49 m = cmdutil.match(repo, pats, opts)
49 m = cmdutil.match(repo, pats, opts)
50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
51 opts.get('subrepos'), prefix="")
51 opts.get('subrepos'), prefix="")
52 return rejected and 1 or 0
52 return rejected and 1 or 0
53
53
54 def addremove(ui, repo, *pats, **opts):
54 def addremove(ui, repo, *pats, **opts):
55 """add all new files, delete all missing files
55 """add all new files, delete all missing files
56
56
57 Add all new files and remove all missing files from the
57 Add all new files and remove all missing files from the
58 repository.
58 repository.
59
59
60 New files are ignored if they match any of the patterns in
60 New files are ignored if they match any of the patterns in
61 ``.hgignore``. As with add, these changes take effect at the next
61 ``.hgignore``. As with add, these changes take effect at the next
62 commit.
62 commit.
63
63
64 Use the -s/--similarity option to detect renamed files. With a
64 Use the -s/--similarity option to detect renamed files. With a
65 parameter greater than 0, this compares every removed file with
65 parameter greater than 0, this compares every removed file with
66 every added file and records those similar enough as renames. This
66 every added file and records those similar enough as renames. This
67 option takes a percentage between 0 (disabled) and 100 (files must
67 option takes a percentage between 0 (disabled) and 100 (files must
68 be identical) as its parameter. Detecting renamed files this way
68 be identical) as its parameter. Detecting renamed files this way
69 can be expensive. After using this option, :hg:`status -C` can be
69 can be expensive. After using this option, :hg:`status -C` can be
70 used to check which files were identified as moved or renamed.
70 used to check which files were identified as moved or renamed.
71
71
72 Returns 0 if all files are successfully added.
72 Returns 0 if all files are successfully added.
73 """
73 """
74 try:
74 try:
75 sim = float(opts.get('similarity') or 100)
75 sim = float(opts.get('similarity') or 100)
76 except ValueError:
76 except ValueError:
77 raise util.Abort(_('similarity must be a number'))
77 raise util.Abort(_('similarity must be a number'))
78 if sim < 0 or sim > 100:
78 if sim < 0 or sim > 100:
79 raise util.Abort(_('similarity must be between 0 and 100'))
79 raise util.Abort(_('similarity must be between 0 and 100'))
80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
81
81
82 def annotate(ui, repo, *pats, **opts):
82 def annotate(ui, repo, *pats, **opts):
83 """show changeset information by line for each file
83 """show changeset information by line for each file
84
84
85 List changes in files, showing the revision id responsible for
85 List changes in files, showing the revision id responsible for
86 each line
86 each line
87
87
88 This command is useful for discovering when a change was made and
88 This command is useful for discovering when a change was made and
89 by whom.
89 by whom.
90
90
91 Without the -a/--text option, annotate will avoid processing files
91 Without the -a/--text option, annotate will avoid processing files
92 it detects as binary. With -a, annotate will annotate the file
92 it detects as binary. With -a, annotate will annotate the file
93 anyway, although the results will probably be neither useful
93 anyway, although the results will probably be neither useful
94 nor desirable.
94 nor desirable.
95
95
96 Returns 0 on success.
96 Returns 0 on success.
97 """
97 """
98 if opts.get('follow'):
98 if opts.get('follow'):
99 # --follow is deprecated and now just an alias for -f/--file
99 # --follow is deprecated and now just an alias for -f/--file
100 # to mimic the behavior of Mercurial before version 1.5
100 # to mimic the behavior of Mercurial before version 1.5
101 opts['file'] = 1
101 opts['file'] = 1
102
102
103 datefunc = ui.quiet and util.shortdate or util.datestr
103 datefunc = ui.quiet and util.shortdate or util.datestr
104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
105
105
106 if not pats:
106 if not pats:
107 raise util.Abort(_('at least one filename or pattern is required'))
107 raise util.Abort(_('at least one filename or pattern is required'))
108
108
109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
110 ('number', lambda x: str(x[0].rev())),
110 ('number', lambda x: str(x[0].rev())),
111 ('changeset', lambda x: short(x[0].node())),
111 ('changeset', lambda x: short(x[0].node())),
112 ('date', getdate),
112 ('date', getdate),
113 ('file', lambda x: x[0].path()),
113 ('file', lambda x: x[0].path()),
114 ]
114 ]
115
115
116 if (not opts.get('user') and not opts.get('changeset')
116 if (not opts.get('user') and not opts.get('changeset')
117 and not opts.get('date') and not opts.get('file')):
117 and not opts.get('date') and not opts.get('file')):
118 opts['number'] = 1
118 opts['number'] = 1
119
119
120 linenumber = opts.get('line_number') is not None
120 linenumber = opts.get('line_number') is not None
121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
122 raise util.Abort(_('at least one of -n/-c is required for -l'))
122 raise util.Abort(_('at least one of -n/-c is required for -l'))
123
123
124 funcmap = [func for op, func in opmap if opts.get(op)]
124 funcmap = [func for op, func in opmap if opts.get(op)]
125 if linenumber:
125 if linenumber:
126 lastfunc = funcmap[-1]
126 lastfunc = funcmap[-1]
127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
128
128
129 def bad(x, y):
129 def bad(x, y):
130 raise util.Abort("%s: %s" % (x, y))
130 raise util.Abort("%s: %s" % (x, y))
131
131
132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
133 m = cmdutil.match(repo, pats, opts)
133 m = cmdutil.match(repo, pats, opts)
134 m.bad = bad
134 m.bad = bad
135 follow = not opts.get('no_follow')
135 follow = not opts.get('no_follow')
136 for abs in ctx.walk(m):
136 for abs in ctx.walk(m):
137 fctx = ctx[abs]
137 fctx = ctx[abs]
138 if not opts.get('text') and util.binary(fctx.data()):
138 if not opts.get('text') and util.binary(fctx.data()):
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 continue
140 continue
141
141
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 pieces = []
143 pieces = []
144
144
145 for f in funcmap:
145 for f in funcmap:
146 l = [f(n) for n, dummy in lines]
146 l = [f(n) for n, dummy in lines]
147 if l:
147 if l:
148 sized = [(x, encoding.colwidth(x)) for x in l]
148 sized = [(x, encoding.colwidth(x)) for x in l]
149 ml = max([w for x, w in sized])
149 ml = max([w for x, w in sized])
150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
151
151
152 if pieces:
152 if pieces:
153 for p, l in zip(zip(*pieces), lines):
153 for p, l in zip(zip(*pieces), lines):
154 ui.write("%s: %s" % (" ".join(p), l[1]))
154 ui.write("%s: %s" % (" ".join(p), l[1]))
155
155
156 def archive(ui, repo, dest, **opts):
156 def archive(ui, repo, dest, **opts):
157 '''create an unversioned archive of a repository revision
157 '''create an unversioned archive of a repository revision
158
158
159 By default, the revision used is the parent of the working
159 By default, the revision used is the parent of the working
160 directory; use -r/--rev to specify a different revision.
160 directory; use -r/--rev to specify a different revision.
161
161
162 The archive type is automatically detected based on file
162 The archive type is automatically detected based on file
163 extension (or override using -t/--type).
163 extension (or override using -t/--type).
164
164
165 Valid types are:
165 Valid types are:
166
166
167 :``files``: a directory full of files (default)
167 :``files``: a directory full of files (default)
168 :``tar``: tar archive, uncompressed
168 :``tar``: tar archive, uncompressed
169 :``tbz2``: tar archive, compressed using bzip2
169 :``tbz2``: tar archive, compressed using bzip2
170 :``tgz``: tar archive, compressed using gzip
170 :``tgz``: tar archive, compressed using gzip
171 :``uzip``: zip archive, uncompressed
171 :``uzip``: zip archive, uncompressed
172 :``zip``: zip archive, compressed using deflate
172 :``zip``: zip archive, compressed using deflate
173
173
174 The exact name of the destination archive or directory is given
174 The exact name of the destination archive or directory is given
175 using a format string; see :hg:`help export` for details.
175 using a format string; see :hg:`help export` for details.
176
176
177 Each member added to an archive file has a directory prefix
177 Each member added to an archive file has a directory prefix
178 prepended. Use -p/--prefix to specify a format string for the
178 prepended. Use -p/--prefix to specify a format string for the
179 prefix. The default is the basename of the archive, with suffixes
179 prefix. The default is the basename of the archive, with suffixes
180 removed.
180 removed.
181
181
182 Returns 0 on success.
182 Returns 0 on success.
183 '''
183 '''
184
184
185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
186 if not ctx:
186 if not ctx:
187 raise util.Abort(_('no working directory: please specify a revision'))
187 raise util.Abort(_('no working directory: please specify a revision'))
188 node = ctx.node()
188 node = ctx.node()
189 dest = cmdutil.make_filename(repo, dest, node)
189 dest = cmdutil.make_filename(repo, dest, node)
190 if os.path.realpath(dest) == repo.root:
190 if os.path.realpath(dest) == repo.root:
191 raise util.Abort(_('repository root cannot be destination'))
191 raise util.Abort(_('repository root cannot be destination'))
192
192
193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
194 prefix = opts.get('prefix')
194 prefix = opts.get('prefix')
195
195
196 if dest == '-':
196 if dest == '-':
197 if kind == 'files':
197 if kind == 'files':
198 raise util.Abort(_('cannot archive plain files to stdout'))
198 raise util.Abort(_('cannot archive plain files to stdout'))
199 dest = sys.stdout
199 dest = sys.stdout
200 if not prefix:
200 if not prefix:
201 prefix = os.path.basename(repo.root) + '-%h'
201 prefix = os.path.basename(repo.root) + '-%h'
202
202
203 prefix = cmdutil.make_filename(repo, prefix, node)
203 prefix = cmdutil.make_filename(repo, prefix, node)
204 matchfn = cmdutil.match(repo, [], opts)
204 matchfn = cmdutil.match(repo, [], opts)
205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
206 matchfn, prefix, subrepos=opts.get('subrepos'))
206 matchfn, prefix, subrepos=opts.get('subrepos'))
207
207
208 def backout(ui, repo, node=None, rev=None, **opts):
208 def backout(ui, repo, node=None, rev=None, **opts):
209 '''reverse effect of earlier changeset
209 '''reverse effect of earlier changeset
210
210
211 Prepare a new changeset with the effect of REV undone in the
211 Prepare a new changeset with the effect of REV undone in the
212 current working directory.
212 current working directory.
213
213
214 If REV is the parent of the working directory, then this new changeset
214 If REV is the parent of the working directory, then this new changeset
215 is committed automatically. Otherwise, hg needs to merge the
215 is committed automatically. Otherwise, hg needs to merge the
216 changes and the merged result is left uncommitted.
216 changes and the merged result is left uncommitted.
217
217
218 By default, the pending changeset will have one parent,
218 By default, the pending changeset will have one parent,
219 maintaining a linear history. With --merge, the pending changeset
219 maintaining a linear history. With --merge, the pending changeset
220 will instead have two parents: the old parent of the working
220 will instead have two parents: the old parent of the working
221 directory and a new child of REV that simply undoes REV.
221 directory and a new child of REV that simply undoes REV.
222
222
223 Before version 1.7, the behavior without --merge was equivalent to
223 Before version 1.7, the behavior without --merge was equivalent to
224 specifying --merge followed by :hg:`update --clean .` to cancel
224 specifying --merge followed by :hg:`update --clean .` to cancel
225 the merge and leave the child of REV as a head to be merged
225 the merge and leave the child of REV as a head to be merged
226 separately.
226 separately.
227
227
228 See :hg:`help dates` for a list of formats valid for -d/--date.
228 See :hg:`help dates` for a list of formats valid for -d/--date.
229
229
230 Returns 0 on success.
230 Returns 0 on success.
231 '''
231 '''
232 if rev and node:
232 if rev and node:
233 raise util.Abort(_("please specify just one revision"))
233 raise util.Abort(_("please specify just one revision"))
234
234
235 if not rev:
235 if not rev:
236 rev = node
236 rev = node
237
237
238 if not rev:
238 if not rev:
239 raise util.Abort(_("please specify a revision to backout"))
239 raise util.Abort(_("please specify a revision to backout"))
240
240
241 date = opts.get('date')
241 date = opts.get('date')
242 if date:
242 if date:
243 opts['date'] = util.parsedate(date)
243 opts['date'] = util.parsedate(date)
244
244
245 cmdutil.bail_if_changed(repo)
245 cmdutil.bail_if_changed(repo)
246 node = cmdutil.revsingle(repo, rev).node()
246 node = cmdutil.revsingle(repo, rev).node()
247
247
248 op1, op2 = repo.dirstate.parents()
248 op1, op2 = repo.dirstate.parents()
249 a = repo.changelog.ancestor(op1, node)
249 a = repo.changelog.ancestor(op1, node)
250 if a != node:
250 if a != node:
251 raise util.Abort(_('cannot backout change on a different branch'))
251 raise util.Abort(_('cannot backout change on a different branch'))
252
252
253 p1, p2 = repo.changelog.parents(node)
253 p1, p2 = repo.changelog.parents(node)
254 if p1 == nullid:
254 if p1 == nullid:
255 raise util.Abort(_('cannot backout a change with no parents'))
255 raise util.Abort(_('cannot backout a change with no parents'))
256 if p2 != nullid:
256 if p2 != nullid:
257 if not opts.get('parent'):
257 if not opts.get('parent'):
258 raise util.Abort(_('cannot backout a merge changeset without '
258 raise util.Abort(_('cannot backout a merge changeset without '
259 '--parent'))
259 '--parent'))
260 p = repo.lookup(opts['parent'])
260 p = repo.lookup(opts['parent'])
261 if p not in (p1, p2):
261 if p not in (p1, p2):
262 raise util.Abort(_('%s is not a parent of %s') %
262 raise util.Abort(_('%s is not a parent of %s') %
263 (short(p), short(node)))
263 (short(p), short(node)))
264 parent = p
264 parent = p
265 else:
265 else:
266 if opts.get('parent'):
266 if opts.get('parent'):
267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
268 parent = p1
268 parent = p1
269
269
270 # the backout should appear on the same branch
270 # the backout should appear on the same branch
271 branch = repo.dirstate.branch()
271 branch = repo.dirstate.branch()
272 hg.clean(repo, node, show_stats=False)
272 hg.clean(repo, node, show_stats=False)
273 repo.dirstate.setbranch(branch)
273 repo.dirstate.setbranch(branch)
274 revert_opts = opts.copy()
274 revert_opts = opts.copy()
275 revert_opts['date'] = None
275 revert_opts['date'] = None
276 revert_opts['all'] = True
276 revert_opts['all'] = True
277 revert_opts['rev'] = hex(parent)
277 revert_opts['rev'] = hex(parent)
278 revert_opts['no_backup'] = None
278 revert_opts['no_backup'] = None
279 revert(ui, repo, **revert_opts)
279 revert(ui, repo, **revert_opts)
280 if not opts.get('merge') and op1 != node:
280 if not opts.get('merge') and op1 != node:
281 try:
281 try:
282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
283 return hg.update(repo, op1)
283 return hg.update(repo, op1)
284 finally:
284 finally:
285 ui.setconfig('ui', 'forcemerge', '')
285 ui.setconfig('ui', 'forcemerge', '')
286
286
287 commit_opts = opts.copy()
287 commit_opts = opts.copy()
288 commit_opts['addremove'] = False
288 commit_opts['addremove'] = False
289 if not commit_opts['message'] and not commit_opts['logfile']:
289 if not commit_opts['message'] and not commit_opts['logfile']:
290 # we don't translate commit messages
290 # we don't translate commit messages
291 commit_opts['message'] = "Backed out changeset %s" % short(node)
291 commit_opts['message'] = "Backed out changeset %s" % short(node)
292 commit_opts['force_editor'] = True
292 commit_opts['force_editor'] = True
293 commit(ui, repo, **commit_opts)
293 commit(ui, repo, **commit_opts)
294 def nice(node):
294 def nice(node):
295 return '%d:%s' % (repo.changelog.rev(node), short(node))
295 return '%d:%s' % (repo.changelog.rev(node), short(node))
296 ui.status(_('changeset %s backs out changeset %s\n') %
296 ui.status(_('changeset %s backs out changeset %s\n') %
297 (nice(repo.changelog.tip()), nice(node)))
297 (nice(repo.changelog.tip()), nice(node)))
298 if opts.get('merge') and op1 != node:
298 if opts.get('merge') and op1 != node:
299 hg.clean(repo, op1, show_stats=False)
299 hg.clean(repo, op1, show_stats=False)
300 ui.status(_('merging with changeset %s\n')
300 ui.status(_('merging with changeset %s\n')
301 % nice(repo.changelog.tip()))
301 % nice(repo.changelog.tip()))
302 try:
302 try:
303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
304 return hg.merge(repo, hex(repo.changelog.tip()))
304 return hg.merge(repo, hex(repo.changelog.tip()))
305 finally:
305 finally:
306 ui.setconfig('ui', 'forcemerge', '')
306 ui.setconfig('ui', 'forcemerge', '')
307 return 0
307 return 0
308
308
309 def bisect(ui, repo, rev=None, extra=None, command=None,
309 def bisect(ui, repo, rev=None, extra=None, command=None,
310 reset=None, good=None, bad=None, skip=None, extend=None,
310 reset=None, good=None, bad=None, skip=None, extend=None,
311 noupdate=None):
311 noupdate=None):
312 """subdivision search of changesets
312 """subdivision search of changesets
313
313
314 This command helps to find changesets which introduce problems. To
314 This command helps to find changesets which introduce problems. To
315 use, mark the earliest changeset you know exhibits the problem as
315 use, mark the earliest changeset you know exhibits the problem as
316 bad, then mark the latest changeset which is free from the problem
316 bad, then mark the latest changeset which is free from the problem
317 as good. Bisect will update your working directory to a revision
317 as good. Bisect will update your working directory to a revision
318 for testing (unless the -U/--noupdate option is specified). Once
318 for testing (unless the -U/--noupdate option is specified). Once
319 you have performed tests, mark the working directory as good or
319 you have performed tests, mark the working directory as good or
320 bad, and bisect will either update to another candidate changeset
320 bad, and bisect will either update to another candidate changeset
321 or announce that it has found the bad revision.
321 or announce that it has found the bad revision.
322
322
323 As a shortcut, you can also use the revision argument to mark a
323 As a shortcut, you can also use the revision argument to mark a
324 revision as good or bad without checking it out first.
324 revision as good or bad without checking it out first.
325
325
326 If you supply a command, it will be used for automatic bisection.
326 If you supply a command, it will be used for automatic bisection.
327 Its exit status will be used to mark revisions as good or bad:
327 Its exit status will be used to mark revisions as good or bad:
328 status 0 means good, 125 means to skip the revision, 127
328 status 0 means good, 125 means to skip the revision, 127
329 (command not found) will abort the bisection, and any other
329 (command not found) will abort the bisection, and any other
330 non-zero exit status means the revision is bad.
330 non-zero exit status means the revision is bad.
331
331
332 Returns 0 on success.
332 Returns 0 on success.
333 """
333 """
334 def extendbisectrange(nodes, good):
334 def extendbisectrange(nodes, good):
335 # bisect is incomplete when it ends on a merge node and
335 # bisect is incomplete when it ends on a merge node and
336 # one of the parent was not checked.
336 # one of the parent was not checked.
337 parents = repo[nodes[0]].parents()
337 parents = repo[nodes[0]].parents()
338 if len(parents) > 1:
338 if len(parents) > 1:
339 side = good and state['bad'] or state['good']
339 side = good and state['bad'] or state['good']
340 num = len(set(i.node() for i in parents) & set(side))
340 num = len(set(i.node() for i in parents) & set(side))
341 if num == 1:
341 if num == 1:
342 return parents[0].ancestor(parents[1])
342 return parents[0].ancestor(parents[1])
343 return None
343 return None
344
344
345 def print_result(nodes, good):
345 def print_result(nodes, good):
346 displayer = cmdutil.show_changeset(ui, repo, {})
346 displayer = cmdutil.show_changeset(ui, repo, {})
347 if len(nodes) == 1:
347 if len(nodes) == 1:
348 # narrowed it down to a single revision
348 # narrowed it down to a single revision
349 if good:
349 if good:
350 ui.write(_("The first good revision is:\n"))
350 ui.write(_("The first good revision is:\n"))
351 else:
351 else:
352 ui.write(_("The first bad revision is:\n"))
352 ui.write(_("The first bad revision is:\n"))
353 displayer.show(repo[nodes[0]])
353 displayer.show(repo[nodes[0]])
354 parents = repo[nodes[0]].parents()
354 parents = repo[nodes[0]].parents()
355 extendnode = extendbisectrange(nodes, good)
355 extendnode = extendbisectrange(nodes, good)
356 if extendnode is not None:
356 if extendnode is not None:
357 ui.write(_('Not all ancestors of this changeset have been'
357 ui.write(_('Not all ancestors of this changeset have been'
358 ' checked.\nUse bisect --extend to continue the '
358 ' checked.\nUse bisect --extend to continue the '
359 'bisection from\nthe common ancestor, %s.\n')
359 'bisection from\nthe common ancestor, %s.\n')
360 % short(extendnode.node()))
360 % short(extendnode.node()))
361 else:
361 else:
362 # multiple possible revisions
362 # multiple possible revisions
363 if good:
363 if good:
364 ui.write(_("Due to skipped revisions, the first "
364 ui.write(_("Due to skipped revisions, the first "
365 "good revision could be any of:\n"))
365 "good revision could be any of:\n"))
366 else:
366 else:
367 ui.write(_("Due to skipped revisions, the first "
367 ui.write(_("Due to skipped revisions, the first "
368 "bad revision could be any of:\n"))
368 "bad revision could be any of:\n"))
369 for n in nodes:
369 for n in nodes:
370 displayer.show(repo[n])
370 displayer.show(repo[n])
371 displayer.close()
371 displayer.close()
372
372
373 def check_state(state, interactive=True):
373 def check_state(state, interactive=True):
374 if not state['good'] or not state['bad']:
374 if not state['good'] or not state['bad']:
375 if (good or bad or skip or reset) and interactive:
375 if (good or bad or skip or reset) and interactive:
376 return
376 return
377 if not state['good']:
377 if not state['good']:
378 raise util.Abort(_('cannot bisect (no known good revisions)'))
378 raise util.Abort(_('cannot bisect (no known good revisions)'))
379 else:
379 else:
380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
381 return True
381 return True
382
382
383 # backward compatibility
383 # backward compatibility
384 if rev in "good bad reset init".split():
384 if rev in "good bad reset init".split():
385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
386 cmd, rev, extra = rev, extra, None
386 cmd, rev, extra = rev, extra, None
387 if cmd == "good":
387 if cmd == "good":
388 good = True
388 good = True
389 elif cmd == "bad":
389 elif cmd == "bad":
390 bad = True
390 bad = True
391 else:
391 else:
392 reset = True
392 reset = True
393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
394 raise util.Abort(_('incompatible arguments'))
394 raise util.Abort(_('incompatible arguments'))
395
395
396 if reset:
396 if reset:
397 p = repo.join("bisect.state")
397 p = repo.join("bisect.state")
398 if os.path.exists(p):
398 if os.path.exists(p):
399 os.unlink(p)
399 os.unlink(p)
400 return
400 return
401
401
402 state = hbisect.load_state(repo)
402 state = hbisect.load_state(repo)
403
403
404 if command:
404 if command:
405 changesets = 1
405 changesets = 1
406 try:
406 try:
407 while changesets:
407 while changesets:
408 # update state
408 # update state
409 status = util.system(command)
409 status = util.system(command)
410 if status == 125:
410 if status == 125:
411 transition = "skip"
411 transition = "skip"
412 elif status == 0:
412 elif status == 0:
413 transition = "good"
413 transition = "good"
414 # status < 0 means process was killed
414 # status < 0 means process was killed
415 elif status == 127:
415 elif status == 127:
416 raise util.Abort(_("failed to execute %s") % command)
416 raise util.Abort(_("failed to execute %s") % command)
417 elif status < 0:
417 elif status < 0:
418 raise util.Abort(_("%s killed") % command)
418 raise util.Abort(_("%s killed") % command)
419 else:
419 else:
420 transition = "bad"
420 transition = "bad"
421 ctx = cmdutil.revsingle(repo, rev)
421 ctx = cmdutil.revsingle(repo, rev)
422 rev = None # clear for future iterations
422 rev = None # clear for future iterations
423 state[transition].append(ctx.node())
423 state[transition].append(ctx.node())
424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
425 check_state(state, interactive=False)
425 check_state(state, interactive=False)
426 # bisect
426 # bisect
427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
428 # update to next check
428 # update to next check
429 cmdutil.bail_if_changed(repo)
429 cmdutil.bail_if_changed(repo)
430 hg.clean(repo, nodes[0], show_stats=False)
430 hg.clean(repo, nodes[0], show_stats=False)
431 finally:
431 finally:
432 hbisect.save_state(repo, state)
432 hbisect.save_state(repo, state)
433 print_result(nodes, good)
433 print_result(nodes, good)
434 return
434 return
435
435
436 # update state
436 # update state
437
437
438 if rev:
438 if rev:
439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
440 else:
440 else:
441 nodes = [repo.lookup('.')]
441 nodes = [repo.lookup('.')]
442
442
443 if good or bad or skip:
443 if good or bad or skip:
444 if good:
444 if good:
445 state['good'] += nodes
445 state['good'] += nodes
446 elif bad:
446 elif bad:
447 state['bad'] += nodes
447 state['bad'] += nodes
448 elif skip:
448 elif skip:
449 state['skip'] += nodes
449 state['skip'] += nodes
450 hbisect.save_state(repo, state)
450 hbisect.save_state(repo, state)
451
451
452 if not check_state(state):
452 if not check_state(state):
453 return
453 return
454
454
455 # actually bisect
455 # actually bisect
456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
457 if extend:
457 if extend:
458 if not changesets:
458 if not changesets:
459 extendnode = extendbisectrange(nodes, good)
459 extendnode = extendbisectrange(nodes, good)
460 if extendnode is not None:
460 if extendnode is not None:
461 ui.write(_("Extending search to changeset %d:%s\n"
461 ui.write(_("Extending search to changeset %d:%s\n"
462 % (extendnode.rev(), short(extendnode.node()))))
462 % (extendnode.rev(), short(extendnode.node()))))
463 if noupdate:
463 if noupdate:
464 return
464 return
465 cmdutil.bail_if_changed(repo)
465 cmdutil.bail_if_changed(repo)
466 return hg.clean(repo, extendnode.node())
466 return hg.clean(repo, extendnode.node())
467 raise util.Abort(_("nothing to extend"))
467 raise util.Abort(_("nothing to extend"))
468
468
469 if changesets == 0:
469 if changesets == 0:
470 print_result(nodes, good)
470 print_result(nodes, good)
471 else:
471 else:
472 assert len(nodes) == 1 # only a single node can be tested next
472 assert len(nodes) == 1 # only a single node can be tested next
473 node = nodes[0]
473 node = nodes[0]
474 # compute the approximate number of remaining tests
474 # compute the approximate number of remaining tests
475 tests, size = 0, 2
475 tests, size = 0, 2
476 while size <= changesets:
476 while size <= changesets:
477 tests, size = tests + 1, size * 2
477 tests, size = tests + 1, size * 2
478 rev = repo.changelog.rev(node)
478 rev = repo.changelog.rev(node)
479 ui.write(_("Testing changeset %d:%s "
479 ui.write(_("Testing changeset %d:%s "
480 "(%d changesets remaining, ~%d tests)\n")
480 "(%d changesets remaining, ~%d tests)\n")
481 % (rev, short(node), changesets, tests))
481 % (rev, short(node), changesets, tests))
482 if not noupdate:
482 if not noupdate:
483 cmdutil.bail_if_changed(repo)
483 cmdutil.bail_if_changed(repo)
484 return hg.clean(repo, node)
484 return hg.clean(repo, node)
485
485
486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
487 '''track a line of development with movable markers
487 '''track a line of development with movable markers
488
488
489 Bookmarks are pointers to certain commits that move when
489 Bookmarks are pointers to certain commits that move when
490 committing. Bookmarks are local. They can be renamed, copied and
490 committing. Bookmarks are local. They can be renamed, copied and
491 deleted. It is possible to use bookmark names in :hg:`merge` and
491 deleted. It is possible to use bookmark names in :hg:`merge` and
492 :hg:`update` to merge and update respectively to a given bookmark.
492 :hg:`update` to merge and update respectively to a given bookmark.
493
493
494 You can use :hg:`bookmark NAME` to set a bookmark on the working
494 You can use :hg:`bookmark NAME` to set a bookmark on the working
495 directory's parent revision with the given name. If you specify
495 directory's parent revision with the given name. If you specify
496 a revision using -r REV (where REV may be an existing bookmark),
496 a revision using -r REV (where REV may be an existing bookmark),
497 the bookmark is assigned to that revision.
497 the bookmark is assigned to that revision.
498
498
499 Bookmarks can be pushed and pulled between repositories (see :hg:`help
499 Bookmarks can be pushed and pulled between repositories (see :hg:`help
500 push` and :hg:`help pull`). This requires both the local and remote
500 push` and :hg:`help pull`). This requires both the local and remote
501 repositories to support bookmarks. For versions prior to 1.8, this means
501 repositories to support bookmarks. For versions prior to 1.8, this means
502 the bookmarks extension must be enabled.
502 the bookmarks extension must be enabled.
503 '''
503 '''
504 hexfn = ui.debugflag and hex or short
504 hexfn = ui.debugflag and hex or short
505 marks = repo._bookmarks
505 marks = repo._bookmarks
506 cur = repo.changectx('.').node()
506 cur = repo.changectx('.').node()
507
507
508 if rename:
508 if rename:
509 if rename not in marks:
509 if rename not in marks:
510 raise util.Abort(_("bookmark '%s' does not exist") % rename)
510 raise util.Abort(_("bookmark '%s' does not exist") % rename)
511 if mark in marks and not force:
511 if mark in marks and not force:
512 raise util.Abort(_("bookmark '%s' already exists "
512 raise util.Abort(_("bookmark '%s' already exists "
513 "(use -f to force)") % mark)
513 "(use -f to force)") % mark)
514 if mark is None:
514 if mark is None:
515 raise util.Abort(_("new bookmark name required"))
515 raise util.Abort(_("new bookmark name required"))
516 marks[mark] = marks[rename]
516 marks[mark] = marks[rename]
517 if repo._bookmarkcurrent == rename:
517 if repo._bookmarkcurrent == rename:
518 bookmarks.setcurrent(repo, mark)
518 bookmarks.setcurrent(repo, mark)
519 del marks[rename]
519 del marks[rename]
520 bookmarks.write(repo)
520 bookmarks.write(repo)
521 return
521 return
522
522
523 if delete:
523 if delete:
524 if mark is None:
524 if mark is None:
525 raise util.Abort(_("bookmark name required"))
525 raise util.Abort(_("bookmark name required"))
526 if mark not in marks:
526 if mark not in marks:
527 raise util.Abort(_("bookmark '%s' does not exist") % mark)
527 raise util.Abort(_("bookmark '%s' does not exist") % mark)
528 if mark == repo._bookmarkcurrent:
528 if mark == repo._bookmarkcurrent:
529 bookmarks.setcurrent(repo, None)
529 bookmarks.setcurrent(repo, None)
530 del marks[mark]
530 del marks[mark]
531 bookmarks.write(repo)
531 bookmarks.write(repo)
532 return
532 return
533
533
534 if mark is not None:
534 if mark is not None:
535 if "\n" in mark:
535 if "\n" in mark:
536 raise util.Abort(_("bookmark name cannot contain newlines"))
536 raise util.Abort(_("bookmark name cannot contain newlines"))
537 mark = mark.strip()
537 mark = mark.strip()
538 if not mark:
538 if not mark:
539 raise util.Abort(_("bookmark names cannot consist entirely of "
539 raise util.Abort(_("bookmark names cannot consist entirely of "
540 "whitespace"))
540 "whitespace"))
541 if mark in marks and not force:
541 if mark in marks and not force:
542 raise util.Abort(_("bookmark '%s' already exists "
542 raise util.Abort(_("bookmark '%s' already exists "
543 "(use -f to force)") % mark)
543 "(use -f to force)") % mark)
544 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
544 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
545 and not force):
545 and not force):
546 raise util.Abort(
546 raise util.Abort(
547 _("a bookmark cannot have the name of an existing branch"))
547 _("a bookmark cannot have the name of an existing branch"))
548 if rev:
548 if rev:
549 marks[mark] = repo.lookup(rev)
549 marks[mark] = repo.lookup(rev)
550 else:
550 else:
551 marks[mark] = repo.changectx('.').node()
551 marks[mark] = repo.changectx('.').node()
552 if repo.changectx('.').node() == marks[mark]:
552 if repo.changectx('.').node() == marks[mark]:
553 bookmarks.setcurrent(repo, mark)
553 bookmarks.setcurrent(repo, mark)
554 bookmarks.write(repo)
554 bookmarks.write(repo)
555 return
555 return
556
556
557 if mark is None:
557 if mark is None:
558 if rev:
558 if rev:
559 raise util.Abort(_("bookmark name required"))
559 raise util.Abort(_("bookmark name required"))
560 if len(marks) == 0:
560 if len(marks) == 0:
561 ui.status(_("no bookmarks set\n"))
561 ui.status(_("no bookmarks set\n"))
562 else:
562 else:
563 for bmark, n in sorted(marks.iteritems()):
563 for bmark, n in sorted(marks.iteritems()):
564 current = repo._bookmarkcurrent
564 current = repo._bookmarkcurrent
565 if bmark == current and n == cur:
565 if bmark == current and n == cur:
566 prefix, label = '*', 'bookmarks.current'
566 prefix, label = '*', 'bookmarks.current'
567 else:
567 else:
568 prefix, label = ' ', ''
568 prefix, label = ' ', ''
569
569
570 if ui.quiet:
570 if ui.quiet:
571 ui.write("%s\n" % bmark, label=label)
571 ui.write("%s\n" % bmark, label=label)
572 else:
572 else:
573 ui.write(" %s %-25s %d:%s\n" % (
573 ui.write(" %s %-25s %d:%s\n" % (
574 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
574 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
575 label=label)
575 label=label)
576 return
576 return
577
577
578 def branch(ui, repo, label=None, **opts):
578 def branch(ui, repo, label=None, **opts):
579 """set or show the current branch name
579 """set or show the current branch name
580
580
581 With no argument, show the current branch name. With one argument,
581 With no argument, show the current branch name. With one argument,
582 set the working directory branch name (the branch will not exist
582 set the working directory branch name (the branch will not exist
583 in the repository until the next commit). Standard practice
583 in the repository until the next commit). Standard practice
584 recommends that primary development take place on the 'default'
584 recommends that primary development take place on the 'default'
585 branch.
585 branch.
586
586
587 Unless -f/--force is specified, branch will not let you set a
587 Unless -f/--force is specified, branch will not let you set a
588 branch name that already exists, even if it's inactive.
588 branch name that already exists, even if it's inactive.
589
589
590 Use -C/--clean to reset the working directory branch to that of
590 Use -C/--clean to reset the working directory branch to that of
591 the parent of the working directory, negating a previous branch
591 the parent of the working directory, negating a previous branch
592 change.
592 change.
593
593
594 Use the command :hg:`update` to switch to an existing branch. Use
594 Use the command :hg:`update` to switch to an existing branch. Use
595 :hg:`commit --close-branch` to mark this branch as closed.
595 :hg:`commit --close-branch` to mark this branch as closed.
596
596
597 Returns 0 on success.
597 Returns 0 on success.
598 """
598 """
599
599
600 if opts.get('clean'):
600 if opts.get('clean'):
601 label = repo[None].p1().branch()
601 label = repo[None].p1().branch()
602 repo.dirstate.setbranch(label)
602 repo.dirstate.setbranch(label)
603 ui.status(_('reset working directory to branch %s\n') % label)
603 ui.status(_('reset working directory to branch %s\n') % label)
604 elif label:
604 elif label:
605 if not opts.get('force') and label in repo.branchtags():
605 if not opts.get('force') and label in repo.branchtags():
606 if label not in [p.branch() for p in repo.parents()]:
606 if label not in [p.branch() for p in repo.parents()]:
607 raise util.Abort(_('a branch of the same name already exists'
607 raise util.Abort(_('a branch of the same name already exists'
608 " (use 'hg update' to switch to it)"))
608 " (use 'hg update' to switch to it)"))
609 repo.dirstate.setbranch(label)
609 repo.dirstate.setbranch(label)
610 ui.status(_('marked working directory as branch %s\n') % label)
610 ui.status(_('marked working directory as branch %s\n') % label)
611 else:
611 else:
612 ui.write("%s\n" % repo.dirstate.branch())
612 ui.write("%s\n" % repo.dirstate.branch())
613
613
614 def branches(ui, repo, active=False, closed=False):
614 def branches(ui, repo, active=False, closed=False):
615 """list repository named branches
615 """list repository named branches
616
616
617 List the repository's named branches, indicating which ones are
617 List the repository's named branches, indicating which ones are
618 inactive. If -c/--closed is specified, also list branches which have
618 inactive. If -c/--closed is specified, also list branches which have
619 been marked closed (see :hg:`commit --close-branch`).
619 been marked closed (see :hg:`commit --close-branch`).
620
620
621 If -a/--active is specified, only show active branches. A branch
621 If -a/--active is specified, only show active branches. A branch
622 is considered active if it contains repository heads.
622 is considered active if it contains repository heads.
623
623
624 Use the command :hg:`update` to switch to an existing branch.
624 Use the command :hg:`update` to switch to an existing branch.
625
625
626 Returns 0.
626 Returns 0.
627 """
627 """
628
628
629 hexfunc = ui.debugflag and hex or short
629 hexfunc = ui.debugflag and hex or short
630 activebranches = [repo[n].branch() for n in repo.heads()]
630 activebranches = [repo[n].branch() for n in repo.heads()]
631 def testactive(tag, node):
631 def testactive(tag, node):
632 realhead = tag in activebranches
632 realhead = tag in activebranches
633 open = node in repo.branchheads(tag, closed=False)
633 open = node in repo.branchheads(tag, closed=False)
634 return realhead and open
634 return realhead and open
635 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
635 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
636 for tag, node in repo.branchtags().items()],
636 for tag, node in repo.branchtags().items()],
637 reverse=True)
637 reverse=True)
638
638
639 for isactive, node, tag in branches:
639 for isactive, node, tag in branches:
640 if (not active) or isactive:
640 if (not active) or isactive:
641 if ui.quiet:
641 if ui.quiet:
642 ui.write("%s\n" % tag)
642 ui.write("%s\n" % tag)
643 else:
643 else:
644 hn = repo.lookup(node)
644 hn = repo.lookup(node)
645 if isactive:
645 if isactive:
646 label = 'branches.active'
646 label = 'branches.active'
647 notice = ''
647 notice = ''
648 elif hn not in repo.branchheads(tag, closed=False):
648 elif hn not in repo.branchheads(tag, closed=False):
649 if not closed:
649 if not closed:
650 continue
650 continue
651 label = 'branches.closed'
651 label = 'branches.closed'
652 notice = _(' (closed)')
652 notice = _(' (closed)')
653 else:
653 else:
654 label = 'branches.inactive'
654 label = 'branches.inactive'
655 notice = _(' (inactive)')
655 notice = _(' (inactive)')
656 if tag == repo.dirstate.branch():
656 if tag == repo.dirstate.branch():
657 label = 'branches.current'
657 label = 'branches.current'
658 rev = str(node).rjust(31 - encoding.colwidth(tag))
658 rev = str(node).rjust(31 - encoding.colwidth(tag))
659 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
659 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
660 tag = ui.label(tag, label)
660 tag = ui.label(tag, label)
661 ui.write("%s %s%s\n" % (tag, rev, notice))
661 ui.write("%s %s%s\n" % (tag, rev, notice))
662
662
663 def bundle(ui, repo, fname, dest=None, **opts):
663 def bundle(ui, repo, fname, dest=None, **opts):
664 """create a changegroup file
664 """create a changegroup file
665
665
666 Generate a compressed changegroup file collecting changesets not
666 Generate a compressed changegroup file collecting changesets not
667 known to be in another repository.
667 known to be in another repository.
668
668
669 If you omit the destination repository, then hg assumes the
669 If you omit the destination repository, then hg assumes the
670 destination will have all the nodes you specify with --base
670 destination will have all the nodes you specify with --base
671 parameters. To create a bundle containing all changesets, use
671 parameters. To create a bundle containing all changesets, use
672 -a/--all (or --base null).
672 -a/--all (or --base null).
673
673
674 You can change compression method with the -t/--type option.
674 You can change compression method with the -t/--type option.
675 The available compression methods are: none, bzip2, and
675 The available compression methods are: none, bzip2, and
676 gzip (by default, bundles are compressed using bzip2).
676 gzip (by default, bundles are compressed using bzip2).
677
677
678 The bundle file can then be transferred using conventional means
678 The bundle file can then be transferred using conventional means
679 and applied to another repository with the unbundle or pull
679 and applied to another repository with the unbundle or pull
680 command. This is useful when direct push and pull are not
680 command. This is useful when direct push and pull are not
681 available or when exporting an entire repository is undesirable.
681 available or when exporting an entire repository is undesirable.
682
682
683 Applying bundles preserves all changeset contents including
683 Applying bundles preserves all changeset contents including
684 permissions, copy/rename information, and revision history.
684 permissions, copy/rename information, and revision history.
685
685
686 Returns 0 on success, 1 if no changes found.
686 Returns 0 on success, 1 if no changes found.
687 """
687 """
688 revs = None
688 revs = None
689 if 'rev' in opts:
689 if 'rev' in opts:
690 revs = cmdutil.revrange(repo, opts['rev'])
690 revs = cmdutil.revrange(repo, opts['rev'])
691
691
692 if opts.get('all'):
692 if opts.get('all'):
693 base = ['null']
693 base = ['null']
694 else:
694 else:
695 base = cmdutil.revrange(repo, opts.get('base'))
695 base = cmdutil.revrange(repo, opts.get('base'))
696 if base:
696 if base:
697 if dest:
697 if dest:
698 raise util.Abort(_("--base is incompatible with specifying "
698 raise util.Abort(_("--base is incompatible with specifying "
699 "a destination"))
699 "a destination"))
700 base = [repo.lookup(rev) for rev in base]
700 base = [repo.lookup(rev) for rev in base]
701 # create the right base
701 # create the right base
702 # XXX: nodesbetween / changegroup* should be "fixed" instead
702 # XXX: nodesbetween / changegroup* should be "fixed" instead
703 o = []
703 o = []
704 has = set((nullid,))
704 has = set((nullid,))
705 for n in base:
705 for n in base:
706 has.update(repo.changelog.reachable(n))
706 has.update(repo.changelog.reachable(n))
707 if revs:
707 if revs:
708 revs = [repo.lookup(rev) for rev in revs]
708 revs = [repo.lookup(rev) for rev in revs]
709 visit = revs[:]
709 visit = revs[:]
710 has.difference_update(visit)
710 has.difference_update(visit)
711 else:
711 else:
712 visit = repo.changelog.heads()
712 visit = repo.changelog.heads()
713 seen = {}
713 seen = {}
714 while visit:
714 while visit:
715 n = visit.pop(0)
715 n = visit.pop(0)
716 parents = [p for p in repo.changelog.parents(n) if p not in has]
716 parents = [p for p in repo.changelog.parents(n) if p not in has]
717 if len(parents) == 0:
717 if len(parents) == 0:
718 if n not in has:
718 if n not in has:
719 o.append(n)
719 o.append(n)
720 else:
720 else:
721 for p in parents:
721 for p in parents:
722 if p not in seen:
722 if p not in seen:
723 seen[p] = 1
723 seen[p] = 1
724 visit.append(p)
724 visit.append(p)
725 else:
725 else:
726 dest = ui.expandpath(dest or 'default-push', dest or 'default')
726 dest = ui.expandpath(dest or 'default-push', dest or 'default')
727 dest, branches = hg.parseurl(dest, opts.get('branch'))
727 dest, branches = hg.parseurl(dest, opts.get('branch'))
728 other = hg.repository(hg.remoteui(repo, opts), dest)
728 other = hg.repository(hg.remoteui(repo, opts), dest)
729 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
729 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
730 if revs:
730 if revs:
731 revs = [repo.lookup(rev) for rev in revs]
731 revs = [repo.lookup(rev) for rev in revs]
732 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
732 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
733
733
734 if not o:
734 if not o:
735 ui.status(_("no changes found\n"))
735 ui.status(_("no changes found\n"))
736 return 1
736 return 1
737
737
738 if revs:
738 if revs:
739 cg = repo.changegroupsubset(o, revs, 'bundle')
739 cg = repo.changegroupsubset(o, revs, 'bundle')
740 else:
740 else:
741 cg = repo.changegroup(o, 'bundle')
741 cg = repo.changegroup(o, 'bundle')
742
742
743 bundletype = opts.get('type', 'bzip2').lower()
743 bundletype = opts.get('type', 'bzip2').lower()
744 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
744 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
745 bundletype = btypes.get(bundletype)
745 bundletype = btypes.get(bundletype)
746 if bundletype not in changegroup.bundletypes:
746 if bundletype not in changegroup.bundletypes:
747 raise util.Abort(_('unknown bundle type specified with --type'))
747 raise util.Abort(_('unknown bundle type specified with --type'))
748
748
749 changegroup.writebundle(cg, fname, bundletype)
749 changegroup.writebundle(cg, fname, bundletype)
750
750
751 def cat(ui, repo, file1, *pats, **opts):
751 def cat(ui, repo, file1, *pats, **opts):
752 """output the current or given revision of files
752 """output the current or given revision of files
753
753
754 Print the specified files as they were at the given revision. If
754 Print the specified files as they were at the given revision. If
755 no revision is given, the parent of the working directory is used,
755 no revision is given, the parent of the working directory is used,
756 or tip if no revision is checked out.
756 or tip if no revision is checked out.
757
757
758 Output may be to a file, in which case the name of the file is
758 Output may be to a file, in which case the name of the file is
759 given using a format string. The formatting rules are the same as
759 given using a format string. The formatting rules are the same as
760 for the export command, with the following additions:
760 for the export command, with the following additions:
761
761
762 :``%s``: basename of file being printed
762 :``%s``: basename of file being printed
763 :``%d``: dirname of file being printed, or '.' if in repository root
763 :``%d``: dirname of file being printed, or '.' if in repository root
764 :``%p``: root-relative path name of file being printed
764 :``%p``: root-relative path name of file being printed
765
765
766 Returns 0 on success.
766 Returns 0 on success.
767 """
767 """
768 ctx = cmdutil.revsingle(repo, opts.get('rev'))
768 ctx = cmdutil.revsingle(repo, opts.get('rev'))
769 err = 1
769 err = 1
770 m = cmdutil.match(repo, (file1,) + pats, opts)
770 m = cmdutil.match(repo, (file1,) + pats, opts)
771 for abs in ctx.walk(m):
771 for abs in ctx.walk(m):
772 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
772 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
773 data = ctx[abs].data()
773 data = ctx[abs].data()
774 if opts.get('decode'):
774 if opts.get('decode'):
775 data = repo.wwritedata(abs, data)
775 data = repo.wwritedata(abs, data)
776 fp.write(data)
776 fp.write(data)
777 fp.close()
777 fp.close()
778 err = 0
778 err = 0
779 return err
779 return err
780
780
781 def clone(ui, source, dest=None, **opts):
781 def clone(ui, source, dest=None, **opts):
782 """make a copy of an existing repository
782 """make a copy of an existing repository
783
783
784 Create a copy of an existing repository in a new directory.
784 Create a copy of an existing repository in a new directory.
785
785
786 If no destination directory name is specified, it defaults to the
786 If no destination directory name is specified, it defaults to the
787 basename of the source.
787 basename of the source.
788
788
789 The location of the source is added to the new repository's
789 The location of the source is added to the new repository's
790 ``.hg/hgrc`` file, as the default to be used for future pulls.
790 ``.hg/hgrc`` file, as the default to be used for future pulls.
791
791
792 See :hg:`help urls` for valid source format details.
792 See :hg:`help urls` for valid source format details.
793
793
794 It is possible to specify an ``ssh://`` URL as the destination, but no
794 It is possible to specify an ``ssh://`` URL as the destination, but no
795 ``.hg/hgrc`` and working directory will be created on the remote side.
795 ``.hg/hgrc`` and working directory will be created on the remote side.
796 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
796 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
797
797
798 A set of changesets (tags, or branch names) to pull may be specified
798 A set of changesets (tags, or branch names) to pull may be specified
799 by listing each changeset (tag, or branch name) with -r/--rev.
799 by listing each changeset (tag, or branch name) with -r/--rev.
800 If -r/--rev is used, the cloned repository will contain only a subset
800 If -r/--rev is used, the cloned repository will contain only a subset
801 of the changesets of the source repository. Only the set of changesets
801 of the changesets of the source repository. Only the set of changesets
802 defined by all -r/--rev options (including all their ancestors)
802 defined by all -r/--rev options (including all their ancestors)
803 will be pulled into the destination repository.
803 will be pulled into the destination repository.
804 No subsequent changesets (including subsequent tags) will be present
804 No subsequent changesets (including subsequent tags) will be present
805 in the destination.
805 in the destination.
806
806
807 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
807 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
808 local source repositories.
808 local source repositories.
809
809
810 For efficiency, hardlinks are used for cloning whenever the source
810 For efficiency, hardlinks are used for cloning whenever the source
811 and destination are on the same filesystem (note this applies only
811 and destination are on the same filesystem (note this applies only
812 to the repository data, not to the working directory). Some
812 to the repository data, not to the working directory). Some
813 filesystems, such as AFS, implement hardlinking incorrectly, but
813 filesystems, such as AFS, implement hardlinking incorrectly, but
814 do not report errors. In these cases, use the --pull option to
814 do not report errors. In these cases, use the --pull option to
815 avoid hardlinking.
815 avoid hardlinking.
816
816
817 In some cases, you can clone repositories and the working directory
817 In some cases, you can clone repositories and the working directory
818 using full hardlinks with ::
818 using full hardlinks with ::
819
819
820 $ cp -al REPO REPOCLONE
820 $ cp -al REPO REPOCLONE
821
821
822 This is the fastest way to clone, but it is not always safe. The
822 This is the fastest way to clone, but it is not always safe. The
823 operation is not atomic (making sure REPO is not modified during
823 operation is not atomic (making sure REPO is not modified during
824 the operation is up to you) and you have to make sure your editor
824 the operation is up to you) and you have to make sure your editor
825 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
825 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
826 this is not compatible with certain extensions that place their
826 this is not compatible with certain extensions that place their
827 metadata under the .hg directory, such as mq.
827 metadata under the .hg directory, such as mq.
828
828
829 Mercurial will update the working directory to the first applicable
829 Mercurial will update the working directory to the first applicable
830 revision from this list:
830 revision from this list:
831
831
832 a) null if -U or the source repository has no changesets
832 a) null if -U or the source repository has no changesets
833 b) if -u . and the source repository is local, the first parent of
833 b) if -u . and the source repository is local, the first parent of
834 the source repository's working directory
834 the source repository's working directory
835 c) the changeset specified with -u (if a branch name, this means the
835 c) the changeset specified with -u (if a branch name, this means the
836 latest head of that branch)
836 latest head of that branch)
837 d) the changeset specified with -r
837 d) the changeset specified with -r
838 e) the tipmost head specified with -b
838 e) the tipmost head specified with -b
839 f) the tipmost head specified with the url#branch source syntax
839 f) the tipmost head specified with the url#branch source syntax
840 g) the tipmost head of the default branch
840 g) the tipmost head of the default branch
841 h) tip
841 h) tip
842
842
843 Returns 0 on success.
843 Returns 0 on success.
844 """
844 """
845 if opts.get('noupdate') and opts.get('updaterev'):
845 if opts.get('noupdate') and opts.get('updaterev'):
846 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
846 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
847
847
848 r = hg.clone(hg.remoteui(ui, opts), source, dest,
848 r = hg.clone(hg.remoteui(ui, opts), source, dest,
849 pull=opts.get('pull'),
849 pull=opts.get('pull'),
850 stream=opts.get('uncompressed'),
850 stream=opts.get('uncompressed'),
851 rev=opts.get('rev'),
851 rev=opts.get('rev'),
852 update=opts.get('updaterev') or not opts.get('noupdate'),
852 update=opts.get('updaterev') or not opts.get('noupdate'),
853 branch=opts.get('branch'))
853 branch=opts.get('branch'))
854
854
855 return r is None
855 return r is None
856
856
857 def commit(ui, repo, *pats, **opts):
857 def commit(ui, repo, *pats, **opts):
858 """commit the specified files or all outstanding changes
858 """commit the specified files or all outstanding changes
859
859
860 Commit changes to the given files into the repository. Unlike a
860 Commit changes to the given files into the repository. Unlike a
861 centralized SCM, this operation is a local operation. See
861 centralized SCM, this operation is a local operation. See
862 :hg:`push` for a way to actively distribute your changes.
862 :hg:`push` for a way to actively distribute your changes.
863
863
864 If a list of files is omitted, all changes reported by :hg:`status`
864 If a list of files is omitted, all changes reported by :hg:`status`
865 will be committed.
865 will be committed.
866
866
867 If you are committing the result of a merge, do not provide any
867 If you are committing the result of a merge, do not provide any
868 filenames or -I/-X filters.
868 filenames or -I/-X filters.
869
869
870 If no commit message is specified, Mercurial starts your
870 If no commit message is specified, Mercurial starts your
871 configured editor where you can enter a message. In case your
871 configured editor where you can enter a message. In case your
872 commit fails, you will find a backup of your message in
872 commit fails, you will find a backup of your message in
873 ``.hg/last-message.txt``.
873 ``.hg/last-message.txt``.
874
874
875 See :hg:`help dates` for a list of formats valid for -d/--date.
875 See :hg:`help dates` for a list of formats valid for -d/--date.
876
876
877 Returns 0 on success, 1 if nothing changed.
877 Returns 0 on success, 1 if nothing changed.
878 """
878 """
879 extra = {}
879 extra = {}
880 if opts.get('close_branch'):
880 if opts.get('close_branch'):
881 if repo['.'].node() not in repo.branchheads():
881 if repo['.'].node() not in repo.branchheads():
882 # The topo heads set is included in the branch heads set of the
882 # The topo heads set is included in the branch heads set of the
883 # current branch, so it's sufficient to test branchheads
883 # current branch, so it's sufficient to test branchheads
884 raise util.Abort(_('can only close branch heads'))
884 raise util.Abort(_('can only close branch heads'))
885 extra['close'] = 1
885 extra['close'] = 1
886 e = cmdutil.commiteditor
886 e = cmdutil.commiteditor
887 if opts.get('force_editor'):
887 if opts.get('force_editor'):
888 e = cmdutil.commitforceeditor
888 e = cmdutil.commitforceeditor
889
889
890 def commitfunc(ui, repo, message, match, opts):
890 def commitfunc(ui, repo, message, match, opts):
891 return repo.commit(message, opts.get('user'), opts.get('date'), match,
891 return repo.commit(message, opts.get('user'), opts.get('date'), match,
892 editor=e, extra=extra)
892 editor=e, extra=extra)
893
893
894 branch = repo[None].branch()
894 branch = repo[None].branch()
895 bheads = repo.branchheads(branch)
895 bheads = repo.branchheads(branch)
896
896
897 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
897 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
898 if not node:
898 if not node:
899 stat = repo.status(match=cmdutil.match(repo, pats, opts))
899 stat = repo.status(match=cmdutil.match(repo, pats, opts))
900 if stat[3]:
900 if stat[3]:
901 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
901 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
902 % len(stat[3]))
902 % len(stat[3]))
903 else:
903 else:
904 ui.status(_("nothing changed\n"))
904 ui.status(_("nothing changed\n"))
905 return 1
905 return 1
906
906
907 ctx = repo[node]
907 ctx = repo[node]
908 parents = ctx.parents()
908 parents = ctx.parents()
909
909
910 if bheads and not [x for x in parents
910 if bheads and not [x for x in parents
911 if x.node() in bheads and x.branch() == branch]:
911 if x.node() in bheads and x.branch() == branch]:
912 ui.status(_('created new head\n'))
912 ui.status(_('created new head\n'))
913 # The message is not printed for initial roots. For the other
913 # The message is not printed for initial roots. For the other
914 # changesets, it is printed in the following situations:
914 # changesets, it is printed in the following situations:
915 #
915 #
916 # Par column: for the 2 parents with ...
916 # Par column: for the 2 parents with ...
917 # N: null or no parent
917 # N: null or no parent
918 # B: parent is on another named branch
918 # B: parent is on another named branch
919 # C: parent is a regular non head changeset
919 # C: parent is a regular non head changeset
920 # H: parent was a branch head of the current branch
920 # H: parent was a branch head of the current branch
921 # Msg column: whether we print "created new head" message
921 # Msg column: whether we print "created new head" message
922 # In the following, it is assumed that there already exists some
922 # In the following, it is assumed that there already exists some
923 # initial branch heads of the current branch, otherwise nothing is
923 # initial branch heads of the current branch, otherwise nothing is
924 # printed anyway.
924 # printed anyway.
925 #
925 #
926 # Par Msg Comment
926 # Par Msg Comment
927 # NN y additional topo root
927 # NN y additional topo root
928 #
928 #
929 # BN y additional branch root
929 # BN y additional branch root
930 # CN y additional topo head
930 # CN y additional topo head
931 # HN n usual case
931 # HN n usual case
932 #
932 #
933 # BB y weird additional branch root
933 # BB y weird additional branch root
934 # CB y branch merge
934 # CB y branch merge
935 # HB n merge with named branch
935 # HB n merge with named branch
936 #
936 #
937 # CC y additional head from merge
937 # CC y additional head from merge
938 # CH n merge with a head
938 # CH n merge with a head
939 #
939 #
940 # HH n head merge: head count decreases
940 # HH n head merge: head count decreases
941
941
942 if not opts.get('close_branch'):
942 if not opts.get('close_branch'):
943 for r in parents:
943 for r in parents:
944 if r.extra().get('close') and r.branch() == branch:
944 if r.extra().get('close') and r.branch() == branch:
945 ui.status(_('reopening closed branch head %d\n') % r)
945 ui.status(_('reopening closed branch head %d\n') % r)
946
946
947 if ui.debugflag:
947 if ui.debugflag:
948 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
948 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
949 elif ui.verbose:
949 elif ui.verbose:
950 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
950 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
951
951
952 def copy(ui, repo, *pats, **opts):
952 def copy(ui, repo, *pats, **opts):
953 """mark files as copied for the next commit
953 """mark files as copied for the next commit
954
954
955 Mark dest as having copies of source files. If dest is a
955 Mark dest as having copies of source files. If dest is a
956 directory, copies are put in that directory. If dest is a file,
956 directory, copies are put in that directory. If dest is a file,
957 the source must be a single file.
957 the source must be a single file.
958
958
959 By default, this command copies the contents of files as they
959 By default, this command copies the contents of files as they
960 exist in the working directory. If invoked with -A/--after, the
960 exist in the working directory. If invoked with -A/--after, the
961 operation is recorded, but no copying is performed.
961 operation is recorded, but no copying is performed.
962
962
963 This command takes effect with the next commit. To undo a copy
963 This command takes effect with the next commit. To undo a copy
964 before that, see :hg:`revert`.
964 before that, see :hg:`revert`.
965
965
966 Returns 0 on success, 1 if errors are encountered.
966 Returns 0 on success, 1 if errors are encountered.
967 """
967 """
968 wlock = repo.wlock(False)
968 wlock = repo.wlock(False)
969 try:
969 try:
970 return cmdutil.copy(ui, repo, pats, opts)
970 return cmdutil.copy(ui, repo, pats, opts)
971 finally:
971 finally:
972 wlock.release()
972 wlock.release()
973
973
974 def debugancestor(ui, repo, *args):
974 def debugancestor(ui, repo, *args):
975 """find the ancestor revision of two revisions in a given index"""
975 """find the ancestor revision of two revisions in a given index"""
976 if len(args) == 3:
976 if len(args) == 3:
977 index, rev1, rev2 = args
977 index, rev1, rev2 = args
978 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
978 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
979 lookup = r.lookup
979 lookup = r.lookup
980 elif len(args) == 2:
980 elif len(args) == 2:
981 if not repo:
981 if not repo:
982 raise util.Abort(_("there is no Mercurial repository here "
982 raise util.Abort(_("there is no Mercurial repository here "
983 "(.hg not found)"))
983 "(.hg not found)"))
984 rev1, rev2 = args
984 rev1, rev2 = args
985 r = repo.changelog
985 r = repo.changelog
986 lookup = repo.lookup
986 lookup = repo.lookup
987 else:
987 else:
988 raise util.Abort(_('either two or three arguments required'))
988 raise util.Abort(_('either two or three arguments required'))
989 a = r.ancestor(lookup(rev1), lookup(rev2))
989 a = r.ancestor(lookup(rev1), lookup(rev2))
990 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
990 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
991
991
992 def debugbuilddag(ui, repo, text,
992 def debugbuilddag(ui, repo, text,
993 mergeable_file=False,
993 mergeable_file=False,
994 appended_file=False,
994 appended_file=False,
995 overwritten_file=False,
995 overwritten_file=False,
996 new_file=False):
996 new_file=False):
997 """builds a repo with a given dag from scratch in the current empty repo
997 """builds a repo with a given dag from scratch in the current empty repo
998
998
999 Elements:
999 Elements:
1000
1000
1001 - "+n" is a linear run of n nodes based on the current default parent
1001 - "+n" is a linear run of n nodes based on the current default parent
1002 - "." is a single node based on the current default parent
1002 - "." is a single node based on the current default parent
1003 - "$" resets the default parent to null (implied at the start);
1003 - "$" resets the default parent to null (implied at the start);
1004 otherwise the default parent is always the last node created
1004 otherwise the default parent is always the last node created
1005 - "<p" sets the default parent to the backref p
1005 - "<p" sets the default parent to the backref p
1006 - "*p" is a fork at parent p, which is a backref
1006 - "*p" is a fork at parent p, which is a backref
1007 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1007 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1008 - "/p2" is a merge of the preceding node and p2
1008 - "/p2" is a merge of the preceding node and p2
1009 - ":tag" defines a local tag for the preceding node
1009 - ":tag" defines a local tag for the preceding node
1010 - "@branch" sets the named branch for subsequent nodes
1010 - "@branch" sets the named branch for subsequent nodes
1011 - "!command" runs the command using your shell
1011 - "!command" runs the command using your shell
1012 - "!!my command\\n" is like "!", but to the end of the line
1012 - "!!my command\\n" is like "!", but to the end of the line
1013 - "#...\\n" is a comment up to the end of the line
1013 - "#...\\n" is a comment up to the end of the line
1014
1014
1015 Whitespace between the above elements is ignored.
1015 Whitespace between the above elements is ignored.
1016
1016
1017 A backref is either
1017 A backref is either
1018
1018
1019 - a number n, which references the node curr-n, where curr is the current
1019 - a number n, which references the node curr-n, where curr is the current
1020 node, or
1020 node, or
1021 - the name of a local tag you placed earlier using ":tag", or
1021 - the name of a local tag you placed earlier using ":tag", or
1022 - empty to denote the default parent.
1022 - empty to denote the default parent.
1023
1023
1024 All string valued-elements are either strictly alphanumeric, or must
1024 All string valued-elements are either strictly alphanumeric, or must
1025 be enclosed in double quotes ("..."), with "\\" as escape character.
1025 be enclosed in double quotes ("..."), with "\\" as escape character.
1026
1026
1027 Note that the --overwritten-file and --appended-file options imply the
1027 Note that the --overwritten-file and --appended-file options imply the
1028 use of "HGMERGE=internal:local" during DAG buildup.
1028 use of "HGMERGE=internal:local" during DAG buildup.
1029 """
1029 """
1030
1030
1031 if not (mergeable_file or appended_file or overwritten_file or new_file):
1031 if not (mergeable_file or appended_file or overwritten_file or new_file):
1032 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1032 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1033
1033
1034 if len(repo.changelog) > 0:
1034 if len(repo.changelog) > 0:
1035 raise util.Abort(_('repository is not empty'))
1035 raise util.Abort(_('repository is not empty'))
1036
1036
1037 if overwritten_file or appended_file:
1037 if overwritten_file or appended_file:
1038 # we don't want to fail in merges during buildup
1038 # we don't want to fail in merges during buildup
1039 os.environ['HGMERGE'] = 'internal:local'
1039 os.environ['HGMERGE'] = 'internal:local'
1040
1040
1041 def writefile(fname, text, fmode="wb"):
1041 def writefile(fname, text, fmode="wb"):
1042 f = open(fname, fmode)
1042 f = open(fname, fmode)
1043 try:
1043 try:
1044 f.write(text)
1044 f.write(text)
1045 finally:
1045 finally:
1046 f.close()
1046 f.close()
1047
1047
1048 if mergeable_file:
1048 if mergeable_file:
1049 linesperrev = 2
1049 linesperrev = 2
1050 # determine number of revs in DAG
1050 # determine number of revs in DAG
1051 n = 0
1051 n = 0
1052 for type, data in dagparser.parsedag(text):
1052 for type, data in dagparser.parsedag(text):
1053 if type == 'n':
1053 if type == 'n':
1054 n += 1
1054 n += 1
1055 # make a file with k lines per rev
1055 # make a file with k lines per rev
1056 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1056 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1057 + "\n")
1057 + "\n")
1058
1058
1059 at = -1
1059 at = -1
1060 atbranch = 'default'
1060 atbranch = 'default'
1061 for type, data in dagparser.parsedag(text):
1061 for type, data in dagparser.parsedag(text):
1062 if type == 'n':
1062 if type == 'n':
1063 ui.status('node %s\n' % str(data))
1063 ui.status('node %s\n' % str(data))
1064 id, ps = data
1064 id, ps = data
1065 p1 = ps[0]
1065 p1 = ps[0]
1066 if p1 != at:
1066 if p1 != at:
1067 update(ui, repo, node=str(p1), clean=True)
1067 update(ui, repo, node=str(p1), clean=True)
1068 at = p1
1068 at = p1
1069 if repo.dirstate.branch() != atbranch:
1069 if repo.dirstate.branch() != atbranch:
1070 branch(ui, repo, atbranch, force=True)
1070 branch(ui, repo, atbranch, force=True)
1071 if len(ps) > 1:
1071 if len(ps) > 1:
1072 p2 = ps[1]
1072 p2 = ps[1]
1073 merge(ui, repo, node=p2)
1073 merge(ui, repo, node=p2)
1074
1074
1075 if mergeable_file:
1075 if mergeable_file:
1076 f = open("mf", "rb+")
1076 f = open("mf", "rb+")
1077 try:
1077 try:
1078 lines = f.read().split("\n")
1078 lines = f.read().split("\n")
1079 lines[id * linesperrev] += " r%i" % id
1079 lines[id * linesperrev] += " r%i" % id
1080 f.seek(0)
1080 f.seek(0)
1081 f.write("\n".join(lines))
1081 f.write("\n".join(lines))
1082 finally:
1082 finally:
1083 f.close()
1083 f.close()
1084
1084
1085 if appended_file:
1085 if appended_file:
1086 writefile("af", "r%i\n" % id, "ab")
1086 writefile("af", "r%i\n" % id, "ab")
1087
1087
1088 if overwritten_file:
1088 if overwritten_file:
1089 writefile("of", "r%i\n" % id)
1089 writefile("of", "r%i\n" % id)
1090
1090
1091 if new_file:
1091 if new_file:
1092 writefile("nf%i" % id, "r%i\n" % id)
1092 writefile("nf%i" % id, "r%i\n" % id)
1093
1093
1094 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1094 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1095 at = id
1095 at = id
1096 elif type == 'l':
1096 elif type == 'l':
1097 id, name = data
1097 id, name = data
1098 ui.status('tag %s\n' % name)
1098 ui.status('tag %s\n' % name)
1099 tag(ui, repo, name, local=True)
1099 tag(ui, repo, name, local=True)
1100 elif type == 'a':
1100 elif type == 'a':
1101 ui.status('branch %s\n' % data)
1101 ui.status('branch %s\n' % data)
1102 atbranch = data
1102 atbranch = data
1103 elif type in 'cC':
1103 elif type in 'cC':
1104 r = util.system(data, cwd=repo.root)
1104 r = util.system(data, cwd=repo.root)
1105 if r:
1105 if r:
1106 desc, r = util.explain_exit(r)
1106 desc, r = util.explain_exit(r)
1107 raise util.Abort(_('%s command %s') % (data, desc))
1107 raise util.Abort(_('%s command %s') % (data, desc))
1108
1108
1109 def debugcommands(ui, cmd='', *args):
1109 def debugcommands(ui, cmd='', *args):
1110 """list all available commands and options"""
1110 """list all available commands and options"""
1111 for cmd, vals in sorted(table.iteritems()):
1111 for cmd, vals in sorted(table.iteritems()):
1112 cmd = cmd.split('|')[0].strip('^')
1112 cmd = cmd.split('|')[0].strip('^')
1113 opts = ', '.join([i[1] for i in vals[1]])
1113 opts = ', '.join([i[1] for i in vals[1]])
1114 ui.write('%s: %s\n' % (cmd, opts))
1114 ui.write('%s: %s\n' % (cmd, opts))
1115
1115
1116 def debugcomplete(ui, cmd='', **opts):
1116 def debugcomplete(ui, cmd='', **opts):
1117 """returns the completion list associated with the given command"""
1117 """returns the completion list associated with the given command"""
1118
1118
1119 if opts.get('options'):
1119 if opts.get('options'):
1120 options = []
1120 options = []
1121 otables = [globalopts]
1121 otables = [globalopts]
1122 if cmd:
1122 if cmd:
1123 aliases, entry = cmdutil.findcmd(cmd, table, False)
1123 aliases, entry = cmdutil.findcmd(cmd, table, False)
1124 otables.append(entry[1])
1124 otables.append(entry[1])
1125 for t in otables:
1125 for t in otables:
1126 for o in t:
1126 for o in t:
1127 if "(DEPRECATED)" in o[3]:
1127 if "(DEPRECATED)" in o[3]:
1128 continue
1128 continue
1129 if o[0]:
1129 if o[0]:
1130 options.append('-%s' % o[0])
1130 options.append('-%s' % o[0])
1131 options.append('--%s' % o[1])
1131 options.append('--%s' % o[1])
1132 ui.write("%s\n" % "\n".join(options))
1132 ui.write("%s\n" % "\n".join(options))
1133 return
1133 return
1134
1134
1135 cmdlist = cmdutil.findpossible(cmd, table)
1135 cmdlist = cmdutil.findpossible(cmd, table)
1136 if ui.verbose:
1136 if ui.verbose:
1137 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1137 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1138 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1138 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1139
1139
1140 def debugfsinfo(ui, path = "."):
1140 def debugfsinfo(ui, path = "."):
1141 """show information detected about current filesystem"""
1141 """show information detected about current filesystem"""
1142 open('.debugfsinfo', 'w').write('')
1142 open('.debugfsinfo', 'w').write('')
1143 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1143 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1144 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1144 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1145 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1145 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1146 and 'yes' or 'no'))
1146 and 'yes' or 'no'))
1147 os.unlink('.debugfsinfo')
1147 os.unlink('.debugfsinfo')
1148
1148
1149 def debugrebuildstate(ui, repo, rev="tip"):
1149 def debugrebuildstate(ui, repo, rev="tip"):
1150 """rebuild the dirstate as it would look like for the given revision"""
1150 """rebuild the dirstate as it would look like for the given revision"""
1151 ctx = cmdutil.revsingle(repo, rev)
1151 ctx = cmdutil.revsingle(repo, rev)
1152 wlock = repo.wlock()
1152 wlock = repo.wlock()
1153 try:
1153 try:
1154 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1154 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1155 finally:
1155 finally:
1156 wlock.release()
1156 wlock.release()
1157
1157
1158 def debugcheckstate(ui, repo):
1158 def debugcheckstate(ui, repo):
1159 """validate the correctness of the current dirstate"""
1159 """validate the correctness of the current dirstate"""
1160 parent1, parent2 = repo.dirstate.parents()
1160 parent1, parent2 = repo.dirstate.parents()
1161 m1 = repo[parent1].manifest()
1161 m1 = repo[parent1].manifest()
1162 m2 = repo[parent2].manifest()
1162 m2 = repo[parent2].manifest()
1163 errors = 0
1163 errors = 0
1164 for f in repo.dirstate:
1164 for f in repo.dirstate:
1165 state = repo.dirstate[f]
1165 state = repo.dirstate[f]
1166 if state in "nr" and f not in m1:
1166 if state in "nr" and f not in m1:
1167 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1167 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1168 errors += 1
1168 errors += 1
1169 if state in "a" and f in m1:
1169 if state in "a" and f in m1:
1170 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1170 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1171 errors += 1
1171 errors += 1
1172 if state in "m" and f not in m1 and f not in m2:
1172 if state in "m" and f not in m1 and f not in m2:
1173 ui.warn(_("%s in state %s, but not in either manifest\n") %
1173 ui.warn(_("%s in state %s, but not in either manifest\n") %
1174 (f, state))
1174 (f, state))
1175 errors += 1
1175 errors += 1
1176 for f in m1:
1176 for f in m1:
1177 state = repo.dirstate[f]
1177 state = repo.dirstate[f]
1178 if state not in "nrm":
1178 if state not in "nrm":
1179 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1179 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1180 errors += 1
1180 errors += 1
1181 if errors:
1181 if errors:
1182 error = _(".hg/dirstate inconsistent with current parent's manifest")
1182 error = _(".hg/dirstate inconsistent with current parent's manifest")
1183 raise util.Abort(error)
1183 raise util.Abort(error)
1184
1184
1185 def showconfig(ui, repo, *values, **opts):
1185 def showconfig(ui, repo, *values, **opts):
1186 """show combined config settings from all hgrc files
1186 """show combined config settings from all hgrc files
1187
1187
1188 With no arguments, print names and values of all config items.
1188 With no arguments, print names and values of all config items.
1189
1189
1190 With one argument of the form section.name, print just the value
1190 With one argument of the form section.name, print just the value
1191 of that config item.
1191 of that config item.
1192
1192
1193 With multiple arguments, print names and values of all config
1193 With multiple arguments, print names and values of all config
1194 items with matching section names.
1194 items with matching section names.
1195
1195
1196 With --debug, the source (filename and line number) is printed
1196 With --debug, the source (filename and line number) is printed
1197 for each config item.
1197 for each config item.
1198
1198
1199 Returns 0 on success.
1199 Returns 0 on success.
1200 """
1200 """
1201
1201
1202 for f in util.rcpath():
1202 for f in util.rcpath():
1203 ui.debug(_('read config from: %s\n') % f)
1203 ui.debug(_('read config from: %s\n') % f)
1204 untrusted = bool(opts.get('untrusted'))
1204 untrusted = bool(opts.get('untrusted'))
1205 if values:
1205 if values:
1206 sections = [v for v in values if '.' not in v]
1206 sections = [v for v in values if '.' not in v]
1207 items = [v for v in values if '.' in v]
1207 items = [v for v in values if '.' in v]
1208 if len(items) > 1 or items and sections:
1208 if len(items) > 1 or items and sections:
1209 raise util.Abort(_('only one config item permitted'))
1209 raise util.Abort(_('only one config item permitted'))
1210 for section, name, value in ui.walkconfig(untrusted=untrusted):
1210 for section, name, value in ui.walkconfig(untrusted=untrusted):
1211 value = str(value).replace('\n', '\\n')
1211 value = str(value).replace('\n', '\\n')
1212 sectname = section + '.' + name
1212 sectname = section + '.' + name
1213 if values:
1213 if values:
1214 for v in values:
1214 for v in values:
1215 if v == section:
1215 if v == section:
1216 ui.debug('%s: ' %
1216 ui.debug('%s: ' %
1217 ui.configsource(section, name, untrusted))
1217 ui.configsource(section, name, untrusted))
1218 ui.write('%s=%s\n' % (sectname, value))
1218 ui.write('%s=%s\n' % (sectname, value))
1219 elif v == sectname:
1219 elif v == sectname:
1220 ui.debug('%s: ' %
1220 ui.debug('%s: ' %
1221 ui.configsource(section, name, untrusted))
1221 ui.configsource(section, name, untrusted))
1222 ui.write(value, '\n')
1222 ui.write(value, '\n')
1223 else:
1223 else:
1224 ui.debug('%s: ' %
1224 ui.debug('%s: ' %
1225 ui.configsource(section, name, untrusted))
1225 ui.configsource(section, name, untrusted))
1226 ui.write('%s=%s\n' % (sectname, value))
1226 ui.write('%s=%s\n' % (sectname, value))
1227
1227
1228 def debugknown(ui, repopath, *ids, **opts):
1228 def debugknown(ui, repopath, *ids, **opts):
1229 """test whether node ids are known to a repo
1229 """test whether node ids are known to a repo
1230
1230
1231 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1231 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1232 indicating unknown/known.
1232 indicating unknown/known.
1233 """
1233 """
1234 repo = hg.repository(ui, repopath)
1234 repo = hg.repository(ui, repopath)
1235 if not repo.capable('known'):
1235 if not repo.capable('known'):
1236 raise util.Abort("known() not supported by target repository")
1236 raise util.Abort("known() not supported by target repository")
1237 flags = repo.known([bin(s) for s in ids])
1237 flags = repo.known([bin(s) for s in ids])
1238 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1238 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1239
1239
1240 def debugbundle(ui, bundlepath, all=None, **opts):
1240 def debugbundle(ui, bundlepath, all=None, **opts):
1241 """lists the contents of a bundle"""
1241 """lists the contents of a bundle"""
1242 f = url.open(ui, bundlepath)
1242 f = url.open(ui, bundlepath)
1243 try:
1243 try:
1244 gen = changegroup.readbundle(f, bundlepath)
1244 gen = changegroup.readbundle(f, bundlepath)
1245 if all:
1245 if all:
1246 ui.write("format: id, p1, p2, cset, len(delta)\n")
1246 ui.write("format: id, p1, p2, cset, len(delta)\n")
1247
1247
1248 def showchunks(named):
1248 def showchunks(named):
1249 ui.write("\n%s\n" % named)
1249 ui.write("\n%s\n" % named)
1250 while 1:
1250 while 1:
1251 chunkdata = gen.parsechunk()
1251 chunkdata = gen.parsechunk()
1252 if not chunkdata:
1252 if not chunkdata:
1253 break
1253 break
1254 node = chunkdata['node']
1254 node = chunkdata['node']
1255 p1 = chunkdata['p1']
1255 p1 = chunkdata['p1']
1256 p2 = chunkdata['p2']
1256 p2 = chunkdata['p2']
1257 cs = chunkdata['cs']
1257 cs = chunkdata['cs']
1258 delta = chunkdata['data']
1258 delta = chunkdata['data']
1259 ui.write("%s %s %s %s %s\n" %
1259 ui.write("%s %s %s %s %s\n" %
1260 (hex(node), hex(p1), hex(p2),
1260 (hex(node), hex(p1), hex(p2),
1261 hex(cs), len(delta)))
1261 hex(cs), len(delta)))
1262
1262
1263 showchunks("changelog")
1263 showchunks("changelog")
1264 showchunks("manifest")
1264 showchunks("manifest")
1265 while 1:
1265 while 1:
1266 fname = gen.chunk()
1266 fname = gen.chunk()
1267 if not fname:
1267 if not fname:
1268 break
1268 break
1269 showchunks(fname)
1269 showchunks(fname)
1270 else:
1270 else:
1271 while 1:
1271 while 1:
1272 chunkdata = gen.parsechunk()
1272 chunkdata = gen.parsechunk()
1273 if not chunkdata:
1273 if not chunkdata:
1274 break
1274 break
1275 node = chunkdata['node']
1275 node = chunkdata['node']
1276 ui.write("%s\n" % hex(node))
1276 ui.write("%s\n" % hex(node))
1277 finally:
1277 finally:
1278 f.close()
1278 f.close()
1279
1279
1280 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1280 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1281 """retrieves a bundle from a repo
1281 """retrieves a bundle from a repo
1282
1282
1283 Every ID must be a full-length hex node id string. Saves the bundle to the
1283 Every ID must be a full-length hex node id string. Saves the bundle to the
1284 given file.
1284 given file.
1285 """
1285 """
1286 repo = hg.repository(ui, repopath)
1286 repo = hg.repository(ui, repopath)
1287 if not repo.capable('getbundle'):
1287 if not repo.capable('getbundle'):
1288 raise util.Abort("getbundle() not supported by target repository")
1288 raise util.Abort("getbundle() not supported by target repository")
1289 args = {}
1289 args = {}
1290 if common:
1290 if common:
1291 args['common'] = [bin(s) for s in common]
1291 args['common'] = [bin(s) for s in common]
1292 if head:
1292 if head:
1293 args['heads'] = [bin(s) for s in head]
1293 args['heads'] = [bin(s) for s in head]
1294 bundle = repo.getbundle('debug', **args)
1294 bundle = repo.getbundle('debug', **args)
1295
1295
1296 bundletype = opts.get('type', 'bzip2').lower()
1296 bundletype = opts.get('type', 'bzip2').lower()
1297 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1297 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1298 bundletype = btypes.get(bundletype)
1298 bundletype = btypes.get(bundletype)
1299 if bundletype not in changegroup.bundletypes:
1299 if bundletype not in changegroup.bundletypes:
1300 raise util.Abort(_('unknown bundle type specified with --type'))
1300 raise util.Abort(_('unknown bundle type specified with --type'))
1301 changegroup.writebundle(bundle, bundlepath, bundletype)
1301 changegroup.writebundle(bundle, bundlepath, bundletype)
1302
1302
1303 def debugpushkey(ui, repopath, namespace, *keyinfo):
1303 def debugpushkey(ui, repopath, namespace, *keyinfo):
1304 '''access the pushkey key/value protocol
1304 '''access the pushkey key/value protocol
1305
1305
1306 With two args, list the keys in the given namespace.
1306 With two args, list the keys in the given namespace.
1307
1307
1308 With five args, set a key to new if it currently is set to old.
1308 With five args, set a key to new if it currently is set to old.
1309 Reports success or failure.
1309 Reports success or failure.
1310 '''
1310 '''
1311
1311
1312 target = hg.repository(ui, repopath)
1312 target = hg.repository(ui, repopath)
1313 if keyinfo:
1313 if keyinfo:
1314 key, old, new = keyinfo
1314 key, old, new = keyinfo
1315 r = target.pushkey(namespace, key, old, new)
1315 r = target.pushkey(namespace, key, old, new)
1316 ui.status(str(r) + '\n')
1316 ui.status(str(r) + '\n')
1317 return not r
1317 return not r
1318 else:
1318 else:
1319 for k, v in target.listkeys(namespace).iteritems():
1319 for k, v in target.listkeys(namespace).iteritems():
1320 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1320 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1321 v.encode('string-escape')))
1321 v.encode('string-escape')))
1322
1322
1323 def debugrevspec(ui, repo, expr):
1323 def debugrevspec(ui, repo, expr):
1324 '''parse and apply a revision specification'''
1324 '''parse and apply a revision specification'''
1325 if ui.verbose:
1325 if ui.verbose:
1326 tree = revset.parse(expr)[0]
1326 tree = revset.parse(expr)[0]
1327 ui.note(tree, "\n")
1327 ui.note(tree, "\n")
1328 func = revset.match(expr)
1328 func = revset.match(expr)
1329 for c in func(repo, range(len(repo))):
1329 for c in func(repo, range(len(repo))):
1330 ui.write("%s\n" % c)
1330 ui.write("%s\n" % c)
1331
1331
1332 def debugsetparents(ui, repo, rev1, rev2=None):
1332 def debugsetparents(ui, repo, rev1, rev2=None):
1333 """manually set the parents of the current working directory
1333 """manually set the parents of the current working directory
1334
1334
1335 This is useful for writing repository conversion tools, but should
1335 This is useful for writing repository conversion tools, but should
1336 be used with care.
1336 be used with care.
1337
1337
1338 Returns 0 on success.
1338 Returns 0 on success.
1339 """
1339 """
1340
1340
1341 r1 = cmdutil.revsingle(repo, rev1).node()
1341 r1 = cmdutil.revsingle(repo, rev1).node()
1342 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1342 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1343
1343
1344 wlock = repo.wlock()
1344 wlock = repo.wlock()
1345 try:
1345 try:
1346 repo.dirstate.setparents(r1, r2)
1346 repo.dirstate.setparents(r1, r2)
1347 finally:
1347 finally:
1348 wlock.release()
1348 wlock.release()
1349
1349
1350 def debugstate(ui, repo, nodates=None, datesort=None):
1350 def debugstate(ui, repo, nodates=None, datesort=None):
1351 """show the contents of the current dirstate"""
1351 """show the contents of the current dirstate"""
1352 timestr = ""
1352 timestr = ""
1353 showdate = not nodates
1353 showdate = not nodates
1354 if datesort:
1354 if datesort:
1355 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1355 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1356 else:
1356 else:
1357 keyfunc = None # sort by filename
1357 keyfunc = None # sort by filename
1358 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1358 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1359 if showdate:
1359 if showdate:
1360 if ent[3] == -1:
1360 if ent[3] == -1:
1361 # Pad or slice to locale representation
1361 # Pad or slice to locale representation
1362 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1362 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1363 time.localtime(0)))
1363 time.localtime(0)))
1364 timestr = 'unset'
1364 timestr = 'unset'
1365 timestr = (timestr[:locale_len] +
1365 timestr = (timestr[:locale_len] +
1366 ' ' * (locale_len - len(timestr)))
1366 ' ' * (locale_len - len(timestr)))
1367 else:
1367 else:
1368 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1368 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1369 time.localtime(ent[3]))
1369 time.localtime(ent[3]))
1370 if ent[1] & 020000:
1370 if ent[1] & 020000:
1371 mode = 'lnk'
1371 mode = 'lnk'
1372 else:
1372 else:
1373 mode = '%3o' % (ent[1] & 0777)
1373 mode = '%3o' % (ent[1] & 0777)
1374 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1374 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1375 for f in repo.dirstate.copies():
1375 for f in repo.dirstate.copies():
1376 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1376 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1377
1377
1378 def debugsub(ui, repo, rev=None):
1378 def debugsub(ui, repo, rev=None):
1379 ctx = cmdutil.revsingle(repo, rev, None)
1379 ctx = cmdutil.revsingle(repo, rev, None)
1380 for k, v in sorted(ctx.substate.items()):
1380 for k, v in sorted(ctx.substate.items()):
1381 ui.write('path %s\n' % k)
1381 ui.write('path %s\n' % k)
1382 ui.write(' source %s\n' % v[0])
1382 ui.write(' source %s\n' % v[0])
1383 ui.write(' revision %s\n' % v[1])
1383 ui.write(' revision %s\n' % v[1])
1384
1384
1385 def debugdag(ui, repo, file_=None, *revs, **opts):
1385 def debugdag(ui, repo, file_=None, *revs, **opts):
1386 """format the changelog or an index DAG as a concise textual description
1386 """format the changelog or an index DAG as a concise textual description
1387
1387
1388 If you pass a revlog index, the revlog's DAG is emitted. If you list
1388 If you pass a revlog index, the revlog's DAG is emitted. If you list
1389 revision numbers, they get labelled in the output as rN.
1389 revision numbers, they get labelled in the output as rN.
1390
1390
1391 Otherwise, the changelog DAG of the current repo is emitted.
1391 Otherwise, the changelog DAG of the current repo is emitted.
1392 """
1392 """
1393 spaces = opts.get('spaces')
1393 spaces = opts.get('spaces')
1394 dots = opts.get('dots')
1394 dots = opts.get('dots')
1395 if file_:
1395 if file_:
1396 rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1396 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1397 revs = set((int(r) for r in revs))
1397 revs = set((int(r) for r in revs))
1398 def events():
1398 def events():
1399 for r in rlog:
1399 for r in rlog:
1400 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1400 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1401 if r in revs:
1401 if r in revs:
1402 yield 'l', (r, "r%i" % r)
1402 yield 'l', (r, "r%i" % r)
1403 elif repo:
1403 elif repo:
1404 cl = repo.changelog
1404 cl = repo.changelog
1405 tags = opts.get('tags')
1405 tags = opts.get('tags')
1406 branches = opts.get('branches')
1406 branches = opts.get('branches')
1407 if tags:
1407 if tags:
1408 labels = {}
1408 labels = {}
1409 for l, n in repo.tags().items():
1409 for l, n in repo.tags().items():
1410 labels.setdefault(cl.rev(n), []).append(l)
1410 labels.setdefault(cl.rev(n), []).append(l)
1411 def events():
1411 def events():
1412 b = "default"
1412 b = "default"
1413 for r in cl:
1413 for r in cl:
1414 if branches:
1414 if branches:
1415 newb = cl.read(cl.node(r))[5]['branch']
1415 newb = cl.read(cl.node(r))[5]['branch']
1416 if newb != b:
1416 if newb != b:
1417 yield 'a', newb
1417 yield 'a', newb
1418 b = newb
1418 b = newb
1419 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1419 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1420 if tags:
1420 if tags:
1421 ls = labels.get(r)
1421 ls = labels.get(r)
1422 if ls:
1422 if ls:
1423 for l in ls:
1423 for l in ls:
1424 yield 'l', (r, l)
1424 yield 'l', (r, l)
1425 else:
1425 else:
1426 raise util.Abort(_('need repo for changelog dag'))
1426 raise util.Abort(_('need repo for changelog dag'))
1427
1427
1428 for line in dagparser.dagtextlines(events(),
1428 for line in dagparser.dagtextlines(events(),
1429 addspaces=spaces,
1429 addspaces=spaces,
1430 wraplabels=True,
1430 wraplabels=True,
1431 wrapannotations=True,
1431 wrapannotations=True,
1432 wrapnonlinear=dots,
1432 wrapnonlinear=dots,
1433 usedots=dots,
1433 usedots=dots,
1434 maxlinewidth=70):
1434 maxlinewidth=70):
1435 ui.write(line)
1435 ui.write(line)
1436 ui.write("\n")
1436 ui.write("\n")
1437
1437
1438 def debugdata(ui, repo, file_, rev):
1438 def debugdata(ui, repo, file_, rev):
1439 """dump the contents of a data file revision"""
1439 """dump the contents of a data file revision"""
1440 r = None
1440 r = None
1441 if repo:
1441 if repo:
1442 filelog = repo.file(file_)
1442 filelog = repo.file(file_)
1443 if len(filelog):
1443 if len(filelog):
1444 r = filelog
1444 r = filelog
1445 if not r:
1445 if not r:
1446 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
1446 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
1447 file_[:-2] + ".i")
1447 try:
1448 try:
1448 ui.write(r.revision(r.lookup(rev)))
1449 ui.write(r.revision(r.lookup(rev)))
1449 except KeyError:
1450 except KeyError:
1450 raise util.Abort(_('invalid revision identifier %s') % rev)
1451 raise util.Abort(_('invalid revision identifier %s') % rev)
1451
1452
1452 def debugdate(ui, date, range=None, **opts):
1453 def debugdate(ui, date, range=None, **opts):
1453 """parse and display a date"""
1454 """parse and display a date"""
1454 if opts["extended"]:
1455 if opts["extended"]:
1455 d = util.parsedate(date, util.extendeddateformats)
1456 d = util.parsedate(date, util.extendeddateformats)
1456 else:
1457 else:
1457 d = util.parsedate(date)
1458 d = util.parsedate(date)
1458 ui.write("internal: %s %s\n" % d)
1459 ui.write("internal: %s %s\n" % d)
1459 ui.write("standard: %s\n" % util.datestr(d))
1460 ui.write("standard: %s\n" % util.datestr(d))
1460 if range:
1461 if range:
1461 m = util.matchdate(range)
1462 m = util.matchdate(range)
1462 ui.write("match: %s\n" % m(d[0]))
1463 ui.write("match: %s\n" % m(d[0]))
1463
1464
1464 def debugignore(ui, repo, *values, **opts):
1465 def debugignore(ui, repo, *values, **opts):
1465 """display the combined ignore pattern"""
1466 """display the combined ignore pattern"""
1466 ignore = repo.dirstate._ignore
1467 ignore = repo.dirstate._ignore
1467 if hasattr(ignore, 'includepat'):
1468 if hasattr(ignore, 'includepat'):
1468 ui.write("%s\n" % ignore.includepat)
1469 ui.write("%s\n" % ignore.includepat)
1469 else:
1470 else:
1470 raise util.Abort(_("no ignore patterns found"))
1471 raise util.Abort(_("no ignore patterns found"))
1471
1472
1472 def debugindex(ui, repo, file_, **opts):
1473 def debugindex(ui, repo, file_, **opts):
1473 """dump the contents of an index file"""
1474 """dump the contents of an index file"""
1474 r = None
1475 r = None
1475 if repo:
1476 if repo:
1476 filelog = repo.file(file_)
1477 filelog = repo.file(file_)
1477 if len(filelog):
1478 if len(filelog):
1478 r = filelog
1479 r = filelog
1479
1480
1480 format = opts.get('format', 0)
1481 format = opts.get('format', 0)
1481 if format not in (0, 1):
1482 if format not in (0, 1):
1482 raise util.Abort(_("unknown format %d") % format)
1483 raise util.Abort(_("unknown format %d") % format)
1483
1484
1484 if not r:
1485 if not r:
1485 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1486 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1486
1487
1487 if format == 0:
1488 if format == 0:
1488 ui.write(" rev offset length base linkrev"
1489 ui.write(" rev offset length base linkrev"
1489 " nodeid p1 p2\n")
1490 " nodeid p1 p2\n")
1490 elif format == 1:
1491 elif format == 1:
1491 ui.write(" rev flag offset length"
1492 ui.write(" rev flag offset length"
1492 " size base link p1 p2 nodeid\n")
1493 " size base link p1 p2 nodeid\n")
1493
1494
1494 for i in r:
1495 for i in r:
1495 node = r.node(i)
1496 node = r.node(i)
1496 if format == 0:
1497 if format == 0:
1497 try:
1498 try:
1498 pp = r.parents(node)
1499 pp = r.parents(node)
1499 except:
1500 except:
1500 pp = [nullid, nullid]
1501 pp = [nullid, nullid]
1501 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1502 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1502 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1503 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1503 short(node), short(pp[0]), short(pp[1])))
1504 short(node), short(pp[0]), short(pp[1])))
1504 elif format == 1:
1505 elif format == 1:
1505 pr = r.parentrevs(i)
1506 pr = r.parentrevs(i)
1506 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1507 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1507 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1508 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1508 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1509 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1509
1510
1510 def debugindexdot(ui, repo, file_):
1511 def debugindexdot(ui, repo, file_):
1511 """dump an index DAG as a graphviz dot file"""
1512 """dump an index DAG as a graphviz dot file"""
1512 r = None
1513 r = None
1513 if repo:
1514 if repo:
1514 filelog = repo.file(file_)
1515 filelog = repo.file(file_)
1515 if len(filelog):
1516 if len(filelog):
1516 r = filelog
1517 r = filelog
1517 if not r:
1518 if not r:
1518 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1519 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1519 ui.write("digraph G {\n")
1520 ui.write("digraph G {\n")
1520 for i in r:
1521 for i in r:
1521 node = r.node(i)
1522 node = r.node(i)
1522 pp = r.parents(node)
1523 pp = r.parents(node)
1523 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1524 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1524 if pp[1] != nullid:
1525 if pp[1] != nullid:
1525 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1526 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1526 ui.write("}\n")
1527 ui.write("}\n")
1527
1528
1528 def debuginstall(ui):
1529 def debuginstall(ui):
1529 '''test Mercurial installation
1530 '''test Mercurial installation
1530
1531
1531 Returns 0 on success.
1532 Returns 0 on success.
1532 '''
1533 '''
1533
1534
1534 def writetemp(contents):
1535 def writetemp(contents):
1535 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1536 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1536 f = os.fdopen(fd, "wb")
1537 f = os.fdopen(fd, "wb")
1537 f.write(contents)
1538 f.write(contents)
1538 f.close()
1539 f.close()
1539 return name
1540 return name
1540
1541
1541 problems = 0
1542 problems = 0
1542
1543
1543 # encoding
1544 # encoding
1544 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1545 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1545 try:
1546 try:
1546 encoding.fromlocal("test")
1547 encoding.fromlocal("test")
1547 except util.Abort, inst:
1548 except util.Abort, inst:
1548 ui.write(" %s\n" % inst)
1549 ui.write(" %s\n" % inst)
1549 ui.write(_(" (check that your locale is properly set)\n"))
1550 ui.write(_(" (check that your locale is properly set)\n"))
1550 problems += 1
1551 problems += 1
1551
1552
1552 # compiled modules
1553 # compiled modules
1553 ui.status(_("Checking installed modules (%s)...\n")
1554 ui.status(_("Checking installed modules (%s)...\n")
1554 % os.path.dirname(__file__))
1555 % os.path.dirname(__file__))
1555 try:
1556 try:
1556 import bdiff, mpatch, base85, osutil
1557 import bdiff, mpatch, base85, osutil
1557 except Exception, inst:
1558 except Exception, inst:
1558 ui.write(" %s\n" % inst)
1559 ui.write(" %s\n" % inst)
1559 ui.write(_(" One or more extensions could not be found"))
1560 ui.write(_(" One or more extensions could not be found"))
1560 ui.write(_(" (check that you compiled the extensions)\n"))
1561 ui.write(_(" (check that you compiled the extensions)\n"))
1561 problems += 1
1562 problems += 1
1562
1563
1563 # templates
1564 # templates
1564 ui.status(_("Checking templates...\n"))
1565 ui.status(_("Checking templates...\n"))
1565 try:
1566 try:
1566 import templater
1567 import templater
1567 templater.templater(templater.templatepath("map-cmdline.default"))
1568 templater.templater(templater.templatepath("map-cmdline.default"))
1568 except Exception, inst:
1569 except Exception, inst:
1569 ui.write(" %s\n" % inst)
1570 ui.write(" %s\n" % inst)
1570 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1571 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1571 problems += 1
1572 problems += 1
1572
1573
1573 # editor
1574 # editor
1574 ui.status(_("Checking commit editor...\n"))
1575 ui.status(_("Checking commit editor...\n"))
1575 editor = ui.geteditor()
1576 editor = ui.geteditor()
1576 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1577 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1577 if not cmdpath:
1578 if not cmdpath:
1578 if editor == 'vi':
1579 if editor == 'vi':
1579 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1580 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1580 ui.write(_(" (specify a commit editor in your configuration"
1581 ui.write(_(" (specify a commit editor in your configuration"
1581 " file)\n"))
1582 " file)\n"))
1582 else:
1583 else:
1583 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1584 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1584 ui.write(_(" (specify a commit editor in your configuration"
1585 ui.write(_(" (specify a commit editor in your configuration"
1585 " file)\n"))
1586 " file)\n"))
1586 problems += 1
1587 problems += 1
1587
1588
1588 # check username
1589 # check username
1589 ui.status(_("Checking username...\n"))
1590 ui.status(_("Checking username...\n"))
1590 try:
1591 try:
1591 ui.username()
1592 ui.username()
1592 except util.Abort, e:
1593 except util.Abort, e:
1593 ui.write(" %s\n" % e)
1594 ui.write(" %s\n" % e)
1594 ui.write(_(" (specify a username in your configuration file)\n"))
1595 ui.write(_(" (specify a username in your configuration file)\n"))
1595 problems += 1
1596 problems += 1
1596
1597
1597 if not problems:
1598 if not problems:
1598 ui.status(_("No problems detected\n"))
1599 ui.status(_("No problems detected\n"))
1599 else:
1600 else:
1600 ui.write(_("%s problems detected,"
1601 ui.write(_("%s problems detected,"
1601 " please check your install!\n") % problems)
1602 " please check your install!\n") % problems)
1602
1603
1603 return problems
1604 return problems
1604
1605
1605 def debugrename(ui, repo, file1, *pats, **opts):
1606 def debugrename(ui, repo, file1, *pats, **opts):
1606 """dump rename information"""
1607 """dump rename information"""
1607
1608
1608 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1609 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1609 m = cmdutil.match(repo, (file1,) + pats, opts)
1610 m = cmdutil.match(repo, (file1,) + pats, opts)
1610 for abs in ctx.walk(m):
1611 for abs in ctx.walk(m):
1611 fctx = ctx[abs]
1612 fctx = ctx[abs]
1612 o = fctx.filelog().renamed(fctx.filenode())
1613 o = fctx.filelog().renamed(fctx.filenode())
1613 rel = m.rel(abs)
1614 rel = m.rel(abs)
1614 if o:
1615 if o:
1615 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1616 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1616 else:
1617 else:
1617 ui.write(_("%s not renamed\n") % rel)
1618 ui.write(_("%s not renamed\n") % rel)
1618
1619
1619 def debugwalk(ui, repo, *pats, **opts):
1620 def debugwalk(ui, repo, *pats, **opts):
1620 """show how files match on given patterns"""
1621 """show how files match on given patterns"""
1621 m = cmdutil.match(repo, pats, opts)
1622 m = cmdutil.match(repo, pats, opts)
1622 items = list(repo.walk(m))
1623 items = list(repo.walk(m))
1623 if not items:
1624 if not items:
1624 return
1625 return
1625 fmt = 'f %%-%ds %%-%ds %%s' % (
1626 fmt = 'f %%-%ds %%-%ds %%s' % (
1626 max([len(abs) for abs in items]),
1627 max([len(abs) for abs in items]),
1627 max([len(m.rel(abs)) for abs in items]))
1628 max([len(m.rel(abs)) for abs in items]))
1628 for abs in items:
1629 for abs in items:
1629 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1630 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1630 ui.write("%s\n" % line.rstrip())
1631 ui.write("%s\n" % line.rstrip())
1631
1632
1632 def debugwireargs(ui, repopath, *vals, **opts):
1633 def debugwireargs(ui, repopath, *vals, **opts):
1633 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1634 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1634 for opt in remoteopts:
1635 for opt in remoteopts:
1635 del opts[opt[1]]
1636 del opts[opt[1]]
1636 args = {}
1637 args = {}
1637 for k, v in opts.iteritems():
1638 for k, v in opts.iteritems():
1638 if v:
1639 if v:
1639 args[k] = v
1640 args[k] = v
1640 # run twice to check that we don't mess up the stream for the next command
1641 # run twice to check that we don't mess up the stream for the next command
1641 res1 = repo.debugwireargs(*vals, **args)
1642 res1 = repo.debugwireargs(*vals, **args)
1642 res2 = repo.debugwireargs(*vals, **args)
1643 res2 = repo.debugwireargs(*vals, **args)
1643 ui.write("%s\n" % res1)
1644 ui.write("%s\n" % res1)
1644 if res1 != res2:
1645 if res1 != res2:
1645 ui.warn("%s\n" % res2)
1646 ui.warn("%s\n" % res2)
1646
1647
1647 def diff(ui, repo, *pats, **opts):
1648 def diff(ui, repo, *pats, **opts):
1648 """diff repository (or selected files)
1649 """diff repository (or selected files)
1649
1650
1650 Show differences between revisions for the specified files.
1651 Show differences between revisions for the specified files.
1651
1652
1652 Differences between files are shown using the unified diff format.
1653 Differences between files are shown using the unified diff format.
1653
1654
1654 .. note::
1655 .. note::
1655 diff may generate unexpected results for merges, as it will
1656 diff may generate unexpected results for merges, as it will
1656 default to comparing against the working directory's first
1657 default to comparing against the working directory's first
1657 parent changeset if no revisions are specified.
1658 parent changeset if no revisions are specified.
1658
1659
1659 When two revision arguments are given, then changes are shown
1660 When two revision arguments are given, then changes are shown
1660 between those revisions. If only one revision is specified then
1661 between those revisions. If only one revision is specified then
1661 that revision is compared to the working directory, and, when no
1662 that revision is compared to the working directory, and, when no
1662 revisions are specified, the working directory files are compared
1663 revisions are specified, the working directory files are compared
1663 to its parent.
1664 to its parent.
1664
1665
1665 Alternatively you can specify -c/--change with a revision to see
1666 Alternatively you can specify -c/--change with a revision to see
1666 the changes in that changeset relative to its first parent.
1667 the changes in that changeset relative to its first parent.
1667
1668
1668 Without the -a/--text option, diff will avoid generating diffs of
1669 Without the -a/--text option, diff will avoid generating diffs of
1669 files it detects as binary. With -a, diff will generate a diff
1670 files it detects as binary. With -a, diff will generate a diff
1670 anyway, probably with undesirable results.
1671 anyway, probably with undesirable results.
1671
1672
1672 Use the -g/--git option to generate diffs in the git extended diff
1673 Use the -g/--git option to generate diffs in the git extended diff
1673 format. For more information, read :hg:`help diffs`.
1674 format. For more information, read :hg:`help diffs`.
1674
1675
1675 Returns 0 on success.
1676 Returns 0 on success.
1676 """
1677 """
1677
1678
1678 revs = opts.get('rev')
1679 revs = opts.get('rev')
1679 change = opts.get('change')
1680 change = opts.get('change')
1680 stat = opts.get('stat')
1681 stat = opts.get('stat')
1681 reverse = opts.get('reverse')
1682 reverse = opts.get('reverse')
1682
1683
1683 if revs and change:
1684 if revs and change:
1684 msg = _('cannot specify --rev and --change at the same time')
1685 msg = _('cannot specify --rev and --change at the same time')
1685 raise util.Abort(msg)
1686 raise util.Abort(msg)
1686 elif change:
1687 elif change:
1687 node2 = cmdutil.revsingle(repo, change, None).node()
1688 node2 = cmdutil.revsingle(repo, change, None).node()
1688 node1 = repo[node2].p1().node()
1689 node1 = repo[node2].p1().node()
1689 else:
1690 else:
1690 node1, node2 = cmdutil.revpair(repo, revs)
1691 node1, node2 = cmdutil.revpair(repo, revs)
1691
1692
1692 if reverse:
1693 if reverse:
1693 node1, node2 = node2, node1
1694 node1, node2 = node2, node1
1694
1695
1695 diffopts = patch.diffopts(ui, opts)
1696 diffopts = patch.diffopts(ui, opts)
1696 m = cmdutil.match(repo, pats, opts)
1697 m = cmdutil.match(repo, pats, opts)
1697 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1698 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1698 listsubrepos=opts.get('subrepos'))
1699 listsubrepos=opts.get('subrepos'))
1699
1700
1700 def export(ui, repo, *changesets, **opts):
1701 def export(ui, repo, *changesets, **opts):
1701 """dump the header and diffs for one or more changesets
1702 """dump the header and diffs for one or more changesets
1702
1703
1703 Print the changeset header and diffs for one or more revisions.
1704 Print the changeset header and diffs for one or more revisions.
1704
1705
1705 The information shown in the changeset header is: author, date,
1706 The information shown in the changeset header is: author, date,
1706 branch name (if non-default), changeset hash, parent(s) and commit
1707 branch name (if non-default), changeset hash, parent(s) and commit
1707 comment.
1708 comment.
1708
1709
1709 .. note::
1710 .. note::
1710 export may generate unexpected diff output for merge
1711 export may generate unexpected diff output for merge
1711 changesets, as it will compare the merge changeset against its
1712 changesets, as it will compare the merge changeset against its
1712 first parent only.
1713 first parent only.
1713
1714
1714 Output may be to a file, in which case the name of the file is
1715 Output may be to a file, in which case the name of the file is
1715 given using a format string. The formatting rules are as follows:
1716 given using a format string. The formatting rules are as follows:
1716
1717
1717 :``%%``: literal "%" character
1718 :``%%``: literal "%" character
1718 :``%H``: changeset hash (40 hexadecimal digits)
1719 :``%H``: changeset hash (40 hexadecimal digits)
1719 :``%N``: number of patches being generated
1720 :``%N``: number of patches being generated
1720 :``%R``: changeset revision number
1721 :``%R``: changeset revision number
1721 :``%b``: basename of the exporting repository
1722 :``%b``: basename of the exporting repository
1722 :``%h``: short-form changeset hash (12 hexadecimal digits)
1723 :``%h``: short-form changeset hash (12 hexadecimal digits)
1723 :``%n``: zero-padded sequence number, starting at 1
1724 :``%n``: zero-padded sequence number, starting at 1
1724 :``%r``: zero-padded changeset revision number
1725 :``%r``: zero-padded changeset revision number
1725
1726
1726 Without the -a/--text option, export will avoid generating diffs
1727 Without the -a/--text option, export will avoid generating diffs
1727 of files it detects as binary. With -a, export will generate a
1728 of files it detects as binary. With -a, export will generate a
1728 diff anyway, probably with undesirable results.
1729 diff anyway, probably with undesirable results.
1729
1730
1730 Use the -g/--git option to generate diffs in the git extended diff
1731 Use the -g/--git option to generate diffs in the git extended diff
1731 format. See :hg:`help diffs` for more information.
1732 format. See :hg:`help diffs` for more information.
1732
1733
1733 With the --switch-parent option, the diff will be against the
1734 With the --switch-parent option, the diff will be against the
1734 second parent. It can be useful to review a merge.
1735 second parent. It can be useful to review a merge.
1735
1736
1736 Returns 0 on success.
1737 Returns 0 on success.
1737 """
1738 """
1738 changesets += tuple(opts.get('rev', []))
1739 changesets += tuple(opts.get('rev', []))
1739 if not changesets:
1740 if not changesets:
1740 raise util.Abort(_("export requires at least one changeset"))
1741 raise util.Abort(_("export requires at least one changeset"))
1741 revs = cmdutil.revrange(repo, changesets)
1742 revs = cmdutil.revrange(repo, changesets)
1742 if len(revs) > 1:
1743 if len(revs) > 1:
1743 ui.note(_('exporting patches:\n'))
1744 ui.note(_('exporting patches:\n'))
1744 else:
1745 else:
1745 ui.note(_('exporting patch:\n'))
1746 ui.note(_('exporting patch:\n'))
1746 cmdutil.export(repo, revs, template=opts.get('output'),
1747 cmdutil.export(repo, revs, template=opts.get('output'),
1747 switch_parent=opts.get('switch_parent'),
1748 switch_parent=opts.get('switch_parent'),
1748 opts=patch.diffopts(ui, opts))
1749 opts=patch.diffopts(ui, opts))
1749
1750
1750 def forget(ui, repo, *pats, **opts):
1751 def forget(ui, repo, *pats, **opts):
1751 """forget the specified files on the next commit
1752 """forget the specified files on the next commit
1752
1753
1753 Mark the specified files so they will no longer be tracked
1754 Mark the specified files so they will no longer be tracked
1754 after the next commit.
1755 after the next commit.
1755
1756
1756 This only removes files from the current branch, not from the
1757 This only removes files from the current branch, not from the
1757 entire project history, and it does not delete them from the
1758 entire project history, and it does not delete them from the
1758 working directory.
1759 working directory.
1759
1760
1760 To undo a forget before the next commit, see :hg:`add`.
1761 To undo a forget before the next commit, see :hg:`add`.
1761
1762
1762 Returns 0 on success.
1763 Returns 0 on success.
1763 """
1764 """
1764
1765
1765 if not pats:
1766 if not pats:
1766 raise util.Abort(_('no files specified'))
1767 raise util.Abort(_('no files specified'))
1767
1768
1768 m = cmdutil.match(repo, pats, opts)
1769 m = cmdutil.match(repo, pats, opts)
1769 s = repo.status(match=m, clean=True)
1770 s = repo.status(match=m, clean=True)
1770 forget = sorted(s[0] + s[1] + s[3] + s[6])
1771 forget = sorted(s[0] + s[1] + s[3] + s[6])
1771 errs = 0
1772 errs = 0
1772
1773
1773 for f in m.files():
1774 for f in m.files():
1774 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1775 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1775 ui.warn(_('not removing %s: file is already untracked\n')
1776 ui.warn(_('not removing %s: file is already untracked\n')
1776 % m.rel(f))
1777 % m.rel(f))
1777 errs = 1
1778 errs = 1
1778
1779
1779 for f in forget:
1780 for f in forget:
1780 if ui.verbose or not m.exact(f):
1781 if ui.verbose or not m.exact(f):
1781 ui.status(_('removing %s\n') % m.rel(f))
1782 ui.status(_('removing %s\n') % m.rel(f))
1782
1783
1783 repo[None].remove(forget, unlink=False)
1784 repo[None].remove(forget, unlink=False)
1784 return errs
1785 return errs
1785
1786
1786 def grep(ui, repo, pattern, *pats, **opts):
1787 def grep(ui, repo, pattern, *pats, **opts):
1787 """search for a pattern in specified files and revisions
1788 """search for a pattern in specified files and revisions
1788
1789
1789 Search revisions of files for a regular expression.
1790 Search revisions of files for a regular expression.
1790
1791
1791 This command behaves differently than Unix grep. It only accepts
1792 This command behaves differently than Unix grep. It only accepts
1792 Python/Perl regexps. It searches repository history, not the
1793 Python/Perl regexps. It searches repository history, not the
1793 working directory. It always prints the revision number in which a
1794 working directory. It always prints the revision number in which a
1794 match appears.
1795 match appears.
1795
1796
1796 By default, grep only prints output for the first revision of a
1797 By default, grep only prints output for the first revision of a
1797 file in which it finds a match. To get it to print every revision
1798 file in which it finds a match. To get it to print every revision
1798 that contains a change in match status ("-" for a match that
1799 that contains a change in match status ("-" for a match that
1799 becomes a non-match, or "+" for a non-match that becomes a match),
1800 becomes a non-match, or "+" for a non-match that becomes a match),
1800 use the --all flag.
1801 use the --all flag.
1801
1802
1802 Returns 0 if a match is found, 1 otherwise.
1803 Returns 0 if a match is found, 1 otherwise.
1803 """
1804 """
1804 reflags = 0
1805 reflags = 0
1805 if opts.get('ignore_case'):
1806 if opts.get('ignore_case'):
1806 reflags |= re.I
1807 reflags |= re.I
1807 try:
1808 try:
1808 regexp = re.compile(pattern, reflags)
1809 regexp = re.compile(pattern, reflags)
1809 except re.error, inst:
1810 except re.error, inst:
1810 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1811 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1811 return 1
1812 return 1
1812 sep, eol = ':', '\n'
1813 sep, eol = ':', '\n'
1813 if opts.get('print0'):
1814 if opts.get('print0'):
1814 sep = eol = '\0'
1815 sep = eol = '\0'
1815
1816
1816 getfile = util.lrucachefunc(repo.file)
1817 getfile = util.lrucachefunc(repo.file)
1817
1818
1818 def matchlines(body):
1819 def matchlines(body):
1819 begin = 0
1820 begin = 0
1820 linenum = 0
1821 linenum = 0
1821 while True:
1822 while True:
1822 match = regexp.search(body, begin)
1823 match = regexp.search(body, begin)
1823 if not match:
1824 if not match:
1824 break
1825 break
1825 mstart, mend = match.span()
1826 mstart, mend = match.span()
1826 linenum += body.count('\n', begin, mstart) + 1
1827 linenum += body.count('\n', begin, mstart) + 1
1827 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1828 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1828 begin = body.find('\n', mend) + 1 or len(body)
1829 begin = body.find('\n', mend) + 1 or len(body)
1829 lend = begin - 1
1830 lend = begin - 1
1830 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1831 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1831
1832
1832 class linestate(object):
1833 class linestate(object):
1833 def __init__(self, line, linenum, colstart, colend):
1834 def __init__(self, line, linenum, colstart, colend):
1834 self.line = line
1835 self.line = line
1835 self.linenum = linenum
1836 self.linenum = linenum
1836 self.colstart = colstart
1837 self.colstart = colstart
1837 self.colend = colend
1838 self.colend = colend
1838
1839
1839 def __hash__(self):
1840 def __hash__(self):
1840 return hash((self.linenum, self.line))
1841 return hash((self.linenum, self.line))
1841
1842
1842 def __eq__(self, other):
1843 def __eq__(self, other):
1843 return self.line == other.line
1844 return self.line == other.line
1844
1845
1845 matches = {}
1846 matches = {}
1846 copies = {}
1847 copies = {}
1847 def grepbody(fn, rev, body):
1848 def grepbody(fn, rev, body):
1848 matches[rev].setdefault(fn, [])
1849 matches[rev].setdefault(fn, [])
1849 m = matches[rev][fn]
1850 m = matches[rev][fn]
1850 for lnum, cstart, cend, line in matchlines(body):
1851 for lnum, cstart, cend, line in matchlines(body):
1851 s = linestate(line, lnum, cstart, cend)
1852 s = linestate(line, lnum, cstart, cend)
1852 m.append(s)
1853 m.append(s)
1853
1854
1854 def difflinestates(a, b):
1855 def difflinestates(a, b):
1855 sm = difflib.SequenceMatcher(None, a, b)
1856 sm = difflib.SequenceMatcher(None, a, b)
1856 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1857 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1857 if tag == 'insert':
1858 if tag == 'insert':
1858 for i in xrange(blo, bhi):
1859 for i in xrange(blo, bhi):
1859 yield ('+', b[i])
1860 yield ('+', b[i])
1860 elif tag == 'delete':
1861 elif tag == 'delete':
1861 for i in xrange(alo, ahi):
1862 for i in xrange(alo, ahi):
1862 yield ('-', a[i])
1863 yield ('-', a[i])
1863 elif tag == 'replace':
1864 elif tag == 'replace':
1864 for i in xrange(alo, ahi):
1865 for i in xrange(alo, ahi):
1865 yield ('-', a[i])
1866 yield ('-', a[i])
1866 for i in xrange(blo, bhi):
1867 for i in xrange(blo, bhi):
1867 yield ('+', b[i])
1868 yield ('+', b[i])
1868
1869
1869 def display(fn, ctx, pstates, states):
1870 def display(fn, ctx, pstates, states):
1870 rev = ctx.rev()
1871 rev = ctx.rev()
1871 datefunc = ui.quiet and util.shortdate or util.datestr
1872 datefunc = ui.quiet and util.shortdate or util.datestr
1872 found = False
1873 found = False
1873 filerevmatches = {}
1874 filerevmatches = {}
1874 def binary():
1875 def binary():
1875 flog = getfile(fn)
1876 flog = getfile(fn)
1876 return util.binary(flog.read(ctx.filenode(fn)))
1877 return util.binary(flog.read(ctx.filenode(fn)))
1877
1878
1878 if opts.get('all'):
1879 if opts.get('all'):
1879 iter = difflinestates(pstates, states)
1880 iter = difflinestates(pstates, states)
1880 else:
1881 else:
1881 iter = [('', l) for l in states]
1882 iter = [('', l) for l in states]
1882 for change, l in iter:
1883 for change, l in iter:
1883 cols = [fn, str(rev)]
1884 cols = [fn, str(rev)]
1884 before, match, after = None, None, None
1885 before, match, after = None, None, None
1885 if opts.get('line_number'):
1886 if opts.get('line_number'):
1886 cols.append(str(l.linenum))
1887 cols.append(str(l.linenum))
1887 if opts.get('all'):
1888 if opts.get('all'):
1888 cols.append(change)
1889 cols.append(change)
1889 if opts.get('user'):
1890 if opts.get('user'):
1890 cols.append(ui.shortuser(ctx.user()))
1891 cols.append(ui.shortuser(ctx.user()))
1891 if opts.get('date'):
1892 if opts.get('date'):
1892 cols.append(datefunc(ctx.date()))
1893 cols.append(datefunc(ctx.date()))
1893 if opts.get('files_with_matches'):
1894 if opts.get('files_with_matches'):
1894 c = (fn, rev)
1895 c = (fn, rev)
1895 if c in filerevmatches:
1896 if c in filerevmatches:
1896 continue
1897 continue
1897 filerevmatches[c] = 1
1898 filerevmatches[c] = 1
1898 else:
1899 else:
1899 before = l.line[:l.colstart]
1900 before = l.line[:l.colstart]
1900 match = l.line[l.colstart:l.colend]
1901 match = l.line[l.colstart:l.colend]
1901 after = l.line[l.colend:]
1902 after = l.line[l.colend:]
1902 ui.write(sep.join(cols))
1903 ui.write(sep.join(cols))
1903 if before is not None:
1904 if before is not None:
1904 if not opts.get('text') and binary():
1905 if not opts.get('text') and binary():
1905 ui.write(sep + " Binary file matches")
1906 ui.write(sep + " Binary file matches")
1906 else:
1907 else:
1907 ui.write(sep + before)
1908 ui.write(sep + before)
1908 ui.write(match, label='grep.match')
1909 ui.write(match, label='grep.match')
1909 ui.write(after)
1910 ui.write(after)
1910 ui.write(eol)
1911 ui.write(eol)
1911 found = True
1912 found = True
1912 return found
1913 return found
1913
1914
1914 skip = {}
1915 skip = {}
1915 revfiles = {}
1916 revfiles = {}
1916 matchfn = cmdutil.match(repo, pats, opts)
1917 matchfn = cmdutil.match(repo, pats, opts)
1917 found = False
1918 found = False
1918 follow = opts.get('follow')
1919 follow = opts.get('follow')
1919
1920
1920 def prep(ctx, fns):
1921 def prep(ctx, fns):
1921 rev = ctx.rev()
1922 rev = ctx.rev()
1922 pctx = ctx.p1()
1923 pctx = ctx.p1()
1923 parent = pctx.rev()
1924 parent = pctx.rev()
1924 matches.setdefault(rev, {})
1925 matches.setdefault(rev, {})
1925 matches.setdefault(parent, {})
1926 matches.setdefault(parent, {})
1926 files = revfiles.setdefault(rev, [])
1927 files = revfiles.setdefault(rev, [])
1927 for fn in fns:
1928 for fn in fns:
1928 flog = getfile(fn)
1929 flog = getfile(fn)
1929 try:
1930 try:
1930 fnode = ctx.filenode(fn)
1931 fnode = ctx.filenode(fn)
1931 except error.LookupError:
1932 except error.LookupError:
1932 continue
1933 continue
1933
1934
1934 copied = flog.renamed(fnode)
1935 copied = flog.renamed(fnode)
1935 copy = follow and copied and copied[0]
1936 copy = follow and copied and copied[0]
1936 if copy:
1937 if copy:
1937 copies.setdefault(rev, {})[fn] = copy
1938 copies.setdefault(rev, {})[fn] = copy
1938 if fn in skip:
1939 if fn in skip:
1939 if copy:
1940 if copy:
1940 skip[copy] = True
1941 skip[copy] = True
1941 continue
1942 continue
1942 files.append(fn)
1943 files.append(fn)
1943
1944
1944 if fn not in matches[rev]:
1945 if fn not in matches[rev]:
1945 grepbody(fn, rev, flog.read(fnode))
1946 grepbody(fn, rev, flog.read(fnode))
1946
1947
1947 pfn = copy or fn
1948 pfn = copy or fn
1948 if pfn not in matches[parent]:
1949 if pfn not in matches[parent]:
1949 try:
1950 try:
1950 fnode = pctx.filenode(pfn)
1951 fnode = pctx.filenode(pfn)
1951 grepbody(pfn, parent, flog.read(fnode))
1952 grepbody(pfn, parent, flog.read(fnode))
1952 except error.LookupError:
1953 except error.LookupError:
1953 pass
1954 pass
1954
1955
1955 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1956 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1956 rev = ctx.rev()
1957 rev = ctx.rev()
1957 parent = ctx.p1().rev()
1958 parent = ctx.p1().rev()
1958 for fn in sorted(revfiles.get(rev, [])):
1959 for fn in sorted(revfiles.get(rev, [])):
1959 states = matches[rev][fn]
1960 states = matches[rev][fn]
1960 copy = copies.get(rev, {}).get(fn)
1961 copy = copies.get(rev, {}).get(fn)
1961 if fn in skip:
1962 if fn in skip:
1962 if copy:
1963 if copy:
1963 skip[copy] = True
1964 skip[copy] = True
1964 continue
1965 continue
1965 pstates = matches.get(parent, {}).get(copy or fn, [])
1966 pstates = matches.get(parent, {}).get(copy or fn, [])
1966 if pstates or states:
1967 if pstates or states:
1967 r = display(fn, ctx, pstates, states)
1968 r = display(fn, ctx, pstates, states)
1968 found = found or r
1969 found = found or r
1969 if r and not opts.get('all'):
1970 if r and not opts.get('all'):
1970 skip[fn] = True
1971 skip[fn] = True
1971 if copy:
1972 if copy:
1972 skip[copy] = True
1973 skip[copy] = True
1973 del matches[rev]
1974 del matches[rev]
1974 del revfiles[rev]
1975 del revfiles[rev]
1975
1976
1976 return not found
1977 return not found
1977
1978
1978 def heads(ui, repo, *branchrevs, **opts):
1979 def heads(ui, repo, *branchrevs, **opts):
1979 """show current repository heads or show branch heads
1980 """show current repository heads or show branch heads
1980
1981
1981 With no arguments, show all repository branch heads.
1982 With no arguments, show all repository branch heads.
1982
1983
1983 Repository "heads" are changesets with no child changesets. They are
1984 Repository "heads" are changesets with no child changesets. They are
1984 where development generally takes place and are the usual targets
1985 where development generally takes place and are the usual targets
1985 for update and merge operations. Branch heads are changesets that have
1986 for update and merge operations. Branch heads are changesets that have
1986 no child changeset on the same branch.
1987 no child changeset on the same branch.
1987
1988
1988 If one or more REVs are given, only branch heads on the branches
1989 If one or more REVs are given, only branch heads on the branches
1989 associated with the specified changesets are shown.
1990 associated with the specified changesets are shown.
1990
1991
1991 If -c/--closed is specified, also show branch heads marked closed
1992 If -c/--closed is specified, also show branch heads marked closed
1992 (see :hg:`commit --close-branch`).
1993 (see :hg:`commit --close-branch`).
1993
1994
1994 If STARTREV is specified, only those heads that are descendants of
1995 If STARTREV is specified, only those heads that are descendants of
1995 STARTREV will be displayed.
1996 STARTREV will be displayed.
1996
1997
1997 If -t/--topo is specified, named branch mechanics will be ignored and only
1998 If -t/--topo is specified, named branch mechanics will be ignored and only
1998 changesets without children will be shown.
1999 changesets without children will be shown.
1999
2000
2000 Returns 0 if matching heads are found, 1 if not.
2001 Returns 0 if matching heads are found, 1 if not.
2001 """
2002 """
2002
2003
2003 start = None
2004 start = None
2004 if 'rev' in opts:
2005 if 'rev' in opts:
2005 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2006 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2006
2007
2007 if opts.get('topo'):
2008 if opts.get('topo'):
2008 heads = [repo[h] for h in repo.heads(start)]
2009 heads = [repo[h] for h in repo.heads(start)]
2009 else:
2010 else:
2010 heads = []
2011 heads = []
2011 for b, ls in repo.branchmap().iteritems():
2012 for b, ls in repo.branchmap().iteritems():
2012 if start is None:
2013 if start is None:
2013 heads += [repo[h] for h in ls]
2014 heads += [repo[h] for h in ls]
2014 continue
2015 continue
2015 startrev = repo.changelog.rev(start)
2016 startrev = repo.changelog.rev(start)
2016 descendants = set(repo.changelog.descendants(startrev))
2017 descendants = set(repo.changelog.descendants(startrev))
2017 descendants.add(startrev)
2018 descendants.add(startrev)
2018 rev = repo.changelog.rev
2019 rev = repo.changelog.rev
2019 heads += [repo[h] for h in ls if rev(h) in descendants]
2020 heads += [repo[h] for h in ls if rev(h) in descendants]
2020
2021
2021 if branchrevs:
2022 if branchrevs:
2022 branches = set(repo[br].branch() for br in branchrevs)
2023 branches = set(repo[br].branch() for br in branchrevs)
2023 heads = [h for h in heads if h.branch() in branches]
2024 heads = [h for h in heads if h.branch() in branches]
2024
2025
2025 if not opts.get('closed'):
2026 if not opts.get('closed'):
2026 heads = [h for h in heads if not h.extra().get('close')]
2027 heads = [h for h in heads if not h.extra().get('close')]
2027
2028
2028 if opts.get('active') and branchrevs:
2029 if opts.get('active') and branchrevs:
2029 dagheads = repo.heads(start)
2030 dagheads = repo.heads(start)
2030 heads = [h for h in heads if h.node() in dagheads]
2031 heads = [h for h in heads if h.node() in dagheads]
2031
2032
2032 if branchrevs:
2033 if branchrevs:
2033 haveheads = set(h.branch() for h in heads)
2034 haveheads = set(h.branch() for h in heads)
2034 if branches - haveheads:
2035 if branches - haveheads:
2035 headless = ', '.join(b for b in branches - haveheads)
2036 headless = ', '.join(b for b in branches - haveheads)
2036 msg = _('no open branch heads found on branches %s')
2037 msg = _('no open branch heads found on branches %s')
2037 if opts.get('rev'):
2038 if opts.get('rev'):
2038 msg += _(' (started at %s)' % opts['rev'])
2039 msg += _(' (started at %s)' % opts['rev'])
2039 ui.warn((msg + '\n') % headless)
2040 ui.warn((msg + '\n') % headless)
2040
2041
2041 if not heads:
2042 if not heads:
2042 return 1
2043 return 1
2043
2044
2044 heads = sorted(heads, key=lambda x: -x.rev())
2045 heads = sorted(heads, key=lambda x: -x.rev())
2045 displayer = cmdutil.show_changeset(ui, repo, opts)
2046 displayer = cmdutil.show_changeset(ui, repo, opts)
2046 for ctx in heads:
2047 for ctx in heads:
2047 displayer.show(ctx)
2048 displayer.show(ctx)
2048 displayer.close()
2049 displayer.close()
2049
2050
2050 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2051 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2051 """show help for a given topic or a help overview
2052 """show help for a given topic or a help overview
2052
2053
2053 With no arguments, print a list of commands with short help messages.
2054 With no arguments, print a list of commands with short help messages.
2054
2055
2055 Given a topic, extension, or command name, print help for that
2056 Given a topic, extension, or command name, print help for that
2056 topic.
2057 topic.
2057
2058
2058 Returns 0 if successful.
2059 Returns 0 if successful.
2059 """
2060 """
2060 option_lists = []
2061 option_lists = []
2061 textwidth = min(ui.termwidth(), 80) - 2
2062 textwidth = min(ui.termwidth(), 80) - 2
2062
2063
2063 def addglobalopts(aliases):
2064 def addglobalopts(aliases):
2064 if ui.verbose:
2065 if ui.verbose:
2065 option_lists.append((_("global options:"), globalopts))
2066 option_lists.append((_("global options:"), globalopts))
2066 if name == 'shortlist':
2067 if name == 'shortlist':
2067 option_lists.append((_('use "hg help" for the full list '
2068 option_lists.append((_('use "hg help" for the full list '
2068 'of commands'), ()))
2069 'of commands'), ()))
2069 else:
2070 else:
2070 if name == 'shortlist':
2071 if name == 'shortlist':
2071 msg = _('use "hg help" for the full list of commands '
2072 msg = _('use "hg help" for the full list of commands '
2072 'or "hg -v" for details')
2073 'or "hg -v" for details')
2073 elif name and not full:
2074 elif name and not full:
2074 msg = _('use "hg help %s" to show the full help text' % name)
2075 msg = _('use "hg help %s" to show the full help text' % name)
2075 elif aliases:
2076 elif aliases:
2076 msg = _('use "hg -v help%s" to show builtin aliases and '
2077 msg = _('use "hg -v help%s" to show builtin aliases and '
2077 'global options') % (name and " " + name or "")
2078 'global options') % (name and " " + name or "")
2078 else:
2079 else:
2079 msg = _('use "hg -v help %s" to show global options') % name
2080 msg = _('use "hg -v help %s" to show global options') % name
2080 option_lists.append((msg, ()))
2081 option_lists.append((msg, ()))
2081
2082
2082 def helpcmd(name):
2083 def helpcmd(name):
2083 if with_version:
2084 if with_version:
2084 version_(ui)
2085 version_(ui)
2085 ui.write('\n')
2086 ui.write('\n')
2086
2087
2087 try:
2088 try:
2088 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2089 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2089 except error.AmbiguousCommand, inst:
2090 except error.AmbiguousCommand, inst:
2090 # py3k fix: except vars can't be used outside the scope of the
2091 # py3k fix: except vars can't be used outside the scope of the
2091 # except block, nor can be used inside a lambda. python issue4617
2092 # except block, nor can be used inside a lambda. python issue4617
2092 prefix = inst.args[0]
2093 prefix = inst.args[0]
2093 select = lambda c: c.lstrip('^').startswith(prefix)
2094 select = lambda c: c.lstrip('^').startswith(prefix)
2094 helplist(_('list of commands:\n\n'), select)
2095 helplist(_('list of commands:\n\n'), select)
2095 return
2096 return
2096
2097
2097 # check if it's an invalid alias and display its error if it is
2098 # check if it's an invalid alias and display its error if it is
2098 if getattr(entry[0], 'badalias', False):
2099 if getattr(entry[0], 'badalias', False):
2099 if not unknowncmd:
2100 if not unknowncmd:
2100 entry[0](ui)
2101 entry[0](ui)
2101 return
2102 return
2102
2103
2103 # synopsis
2104 # synopsis
2104 if len(entry) > 2:
2105 if len(entry) > 2:
2105 if entry[2].startswith('hg'):
2106 if entry[2].startswith('hg'):
2106 ui.write("%s\n" % entry[2])
2107 ui.write("%s\n" % entry[2])
2107 else:
2108 else:
2108 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2109 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2109 else:
2110 else:
2110 ui.write('hg %s\n' % aliases[0])
2111 ui.write('hg %s\n' % aliases[0])
2111
2112
2112 # aliases
2113 # aliases
2113 if full and not ui.quiet and len(aliases) > 1:
2114 if full and not ui.quiet and len(aliases) > 1:
2114 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2115 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2115
2116
2116 # description
2117 # description
2117 doc = gettext(entry[0].__doc__)
2118 doc = gettext(entry[0].__doc__)
2118 if not doc:
2119 if not doc:
2119 doc = _("(no help text available)")
2120 doc = _("(no help text available)")
2120 if hasattr(entry[0], 'definition'): # aliased command
2121 if hasattr(entry[0], 'definition'): # aliased command
2121 if entry[0].definition.startswith('!'): # shell alias
2122 if entry[0].definition.startswith('!'): # shell alias
2122 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2123 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2123 else:
2124 else:
2124 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2125 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2125 if ui.quiet or not full:
2126 if ui.quiet or not full:
2126 doc = doc.splitlines()[0]
2127 doc = doc.splitlines()[0]
2127 keep = ui.verbose and ['verbose'] or []
2128 keep = ui.verbose and ['verbose'] or []
2128 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2129 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2129 ui.write("\n%s\n" % formatted)
2130 ui.write("\n%s\n" % formatted)
2130 if pruned:
2131 if pruned:
2131 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2132 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2132
2133
2133 if not ui.quiet:
2134 if not ui.quiet:
2134 # options
2135 # options
2135 if entry[1]:
2136 if entry[1]:
2136 option_lists.append((_("options:\n"), entry[1]))
2137 option_lists.append((_("options:\n"), entry[1]))
2137
2138
2138 addglobalopts(False)
2139 addglobalopts(False)
2139
2140
2140 def helplist(header, select=None):
2141 def helplist(header, select=None):
2141 h = {}
2142 h = {}
2142 cmds = {}
2143 cmds = {}
2143 for c, e in table.iteritems():
2144 for c, e in table.iteritems():
2144 f = c.split("|", 1)[0]
2145 f = c.split("|", 1)[0]
2145 if select and not select(f):
2146 if select and not select(f):
2146 continue
2147 continue
2147 if (not select and name != 'shortlist' and
2148 if (not select and name != 'shortlist' and
2148 e[0].__module__ != __name__):
2149 e[0].__module__ != __name__):
2149 continue
2150 continue
2150 if name == "shortlist" and not f.startswith("^"):
2151 if name == "shortlist" and not f.startswith("^"):
2151 continue
2152 continue
2152 f = f.lstrip("^")
2153 f = f.lstrip("^")
2153 if not ui.debugflag and f.startswith("debug"):
2154 if not ui.debugflag and f.startswith("debug"):
2154 continue
2155 continue
2155 doc = e[0].__doc__
2156 doc = e[0].__doc__
2156 if doc and 'DEPRECATED' in doc and not ui.verbose:
2157 if doc and 'DEPRECATED' in doc and not ui.verbose:
2157 continue
2158 continue
2158 doc = gettext(doc)
2159 doc = gettext(doc)
2159 if not doc:
2160 if not doc:
2160 doc = _("(no help text available)")
2161 doc = _("(no help text available)")
2161 h[f] = doc.splitlines()[0].rstrip()
2162 h[f] = doc.splitlines()[0].rstrip()
2162 cmds[f] = c.lstrip("^")
2163 cmds[f] = c.lstrip("^")
2163
2164
2164 if not h:
2165 if not h:
2165 ui.status(_('no commands defined\n'))
2166 ui.status(_('no commands defined\n'))
2166 return
2167 return
2167
2168
2168 ui.status(header)
2169 ui.status(header)
2169 fns = sorted(h)
2170 fns = sorted(h)
2170 m = max(map(len, fns))
2171 m = max(map(len, fns))
2171 for f in fns:
2172 for f in fns:
2172 if ui.verbose:
2173 if ui.verbose:
2173 commands = cmds[f].replace("|",", ")
2174 commands = cmds[f].replace("|",", ")
2174 ui.write(" %s:\n %s\n"%(commands, h[f]))
2175 ui.write(" %s:\n %s\n"%(commands, h[f]))
2175 else:
2176 else:
2176 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2177 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2177 initindent=' %-*s ' % (m, f),
2178 initindent=' %-*s ' % (m, f),
2178 hangindent=' ' * (m + 4))))
2179 hangindent=' ' * (m + 4))))
2179
2180
2180 if not ui.quiet:
2181 if not ui.quiet:
2181 addglobalopts(True)
2182 addglobalopts(True)
2182
2183
2183 def helptopic(name):
2184 def helptopic(name):
2184 for names, header, doc in help.helptable:
2185 for names, header, doc in help.helptable:
2185 if name in names:
2186 if name in names:
2186 break
2187 break
2187 else:
2188 else:
2188 raise error.UnknownCommand(name)
2189 raise error.UnknownCommand(name)
2189
2190
2190 # description
2191 # description
2191 if not doc:
2192 if not doc:
2192 doc = _("(no help text available)")
2193 doc = _("(no help text available)")
2193 if hasattr(doc, '__call__'):
2194 if hasattr(doc, '__call__'):
2194 doc = doc()
2195 doc = doc()
2195
2196
2196 ui.write("%s\n\n" % header)
2197 ui.write("%s\n\n" % header)
2197 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2198 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2198
2199
2199 def helpext(name):
2200 def helpext(name):
2200 try:
2201 try:
2201 mod = extensions.find(name)
2202 mod = extensions.find(name)
2202 doc = gettext(mod.__doc__) or _('no help text available')
2203 doc = gettext(mod.__doc__) or _('no help text available')
2203 except KeyError:
2204 except KeyError:
2204 mod = None
2205 mod = None
2205 doc = extensions.disabledext(name)
2206 doc = extensions.disabledext(name)
2206 if not doc:
2207 if not doc:
2207 raise error.UnknownCommand(name)
2208 raise error.UnknownCommand(name)
2208
2209
2209 if '\n' not in doc:
2210 if '\n' not in doc:
2210 head, tail = doc, ""
2211 head, tail = doc, ""
2211 else:
2212 else:
2212 head, tail = doc.split('\n', 1)
2213 head, tail = doc.split('\n', 1)
2213 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2214 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2214 if tail:
2215 if tail:
2215 ui.write(minirst.format(tail, textwidth))
2216 ui.write(minirst.format(tail, textwidth))
2216 ui.status('\n\n')
2217 ui.status('\n\n')
2217
2218
2218 if mod:
2219 if mod:
2219 try:
2220 try:
2220 ct = mod.cmdtable
2221 ct = mod.cmdtable
2221 except AttributeError:
2222 except AttributeError:
2222 ct = {}
2223 ct = {}
2223 modcmds = set([c.split('|', 1)[0] for c in ct])
2224 modcmds = set([c.split('|', 1)[0] for c in ct])
2224 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2225 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2225 else:
2226 else:
2226 ui.write(_('use "hg help extensions" for information on enabling '
2227 ui.write(_('use "hg help extensions" for information on enabling '
2227 'extensions\n'))
2228 'extensions\n'))
2228
2229
2229 def helpextcmd(name):
2230 def helpextcmd(name):
2230 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2231 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2231 doc = gettext(mod.__doc__).splitlines()[0]
2232 doc = gettext(mod.__doc__).splitlines()[0]
2232
2233
2233 msg = help.listexts(_("'%s' is provided by the following "
2234 msg = help.listexts(_("'%s' is provided by the following "
2234 "extension:") % cmd, {ext: doc}, len(ext),
2235 "extension:") % cmd, {ext: doc}, len(ext),
2235 indent=4)
2236 indent=4)
2236 ui.write(minirst.format(msg, textwidth))
2237 ui.write(minirst.format(msg, textwidth))
2237 ui.write('\n\n')
2238 ui.write('\n\n')
2238 ui.write(_('use "hg help extensions" for information on enabling '
2239 ui.write(_('use "hg help extensions" for information on enabling '
2239 'extensions\n'))
2240 'extensions\n'))
2240
2241
2241 help.addtopichook('revsets', revset.makedoc)
2242 help.addtopichook('revsets', revset.makedoc)
2242 help.addtopichook('templates', templatekw.makedoc)
2243 help.addtopichook('templates', templatekw.makedoc)
2243 help.addtopichook('templates', templatefilters.makedoc)
2244 help.addtopichook('templates', templatefilters.makedoc)
2244
2245
2245 if name and name != 'shortlist':
2246 if name and name != 'shortlist':
2246 i = None
2247 i = None
2247 if unknowncmd:
2248 if unknowncmd:
2248 queries = (helpextcmd,)
2249 queries = (helpextcmd,)
2249 else:
2250 else:
2250 queries = (helptopic, helpcmd, helpext, helpextcmd)
2251 queries = (helptopic, helpcmd, helpext, helpextcmd)
2251 for f in queries:
2252 for f in queries:
2252 try:
2253 try:
2253 f(name)
2254 f(name)
2254 i = None
2255 i = None
2255 break
2256 break
2256 except error.UnknownCommand, inst:
2257 except error.UnknownCommand, inst:
2257 i = inst
2258 i = inst
2258 if i:
2259 if i:
2259 raise i
2260 raise i
2260
2261
2261 else:
2262 else:
2262 # program name
2263 # program name
2263 if ui.verbose or with_version:
2264 if ui.verbose or with_version:
2264 version_(ui)
2265 version_(ui)
2265 else:
2266 else:
2266 ui.status(_("Mercurial Distributed SCM\n"))
2267 ui.status(_("Mercurial Distributed SCM\n"))
2267 ui.status('\n')
2268 ui.status('\n')
2268
2269
2269 # list of commands
2270 # list of commands
2270 if name == "shortlist":
2271 if name == "shortlist":
2271 header = _('basic commands:\n\n')
2272 header = _('basic commands:\n\n')
2272 else:
2273 else:
2273 header = _('list of commands:\n\n')
2274 header = _('list of commands:\n\n')
2274
2275
2275 helplist(header)
2276 helplist(header)
2276 if name != 'shortlist':
2277 if name != 'shortlist':
2277 exts, maxlength = extensions.enabled()
2278 exts, maxlength = extensions.enabled()
2278 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2279 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2279 if text:
2280 if text:
2280 ui.write("\n%s\n" % minirst.format(text, textwidth))
2281 ui.write("\n%s\n" % minirst.format(text, textwidth))
2281
2282
2282 # list all option lists
2283 # list all option lists
2283 opt_output = []
2284 opt_output = []
2284 multioccur = False
2285 multioccur = False
2285 for title, options in option_lists:
2286 for title, options in option_lists:
2286 opt_output.append(("\n%s" % title, None))
2287 opt_output.append(("\n%s" % title, None))
2287 for option in options:
2288 for option in options:
2288 if len(option) == 5:
2289 if len(option) == 5:
2289 shortopt, longopt, default, desc, optlabel = option
2290 shortopt, longopt, default, desc, optlabel = option
2290 else:
2291 else:
2291 shortopt, longopt, default, desc = option
2292 shortopt, longopt, default, desc = option
2292 optlabel = _("VALUE") # default label
2293 optlabel = _("VALUE") # default label
2293
2294
2294 if _("DEPRECATED") in desc and not ui.verbose:
2295 if _("DEPRECATED") in desc and not ui.verbose:
2295 continue
2296 continue
2296 if isinstance(default, list):
2297 if isinstance(default, list):
2297 numqualifier = " %s [+]" % optlabel
2298 numqualifier = " %s [+]" % optlabel
2298 multioccur = True
2299 multioccur = True
2299 elif (default is not None) and not isinstance(default, bool):
2300 elif (default is not None) and not isinstance(default, bool):
2300 numqualifier = " %s" % optlabel
2301 numqualifier = " %s" % optlabel
2301 else:
2302 else:
2302 numqualifier = ""
2303 numqualifier = ""
2303 opt_output.append(("%2s%s" %
2304 opt_output.append(("%2s%s" %
2304 (shortopt and "-%s" % shortopt,
2305 (shortopt and "-%s" % shortopt,
2305 longopt and " --%s%s" %
2306 longopt and " --%s%s" %
2306 (longopt, numqualifier)),
2307 (longopt, numqualifier)),
2307 "%s%s" % (desc,
2308 "%s%s" % (desc,
2308 default
2309 default
2309 and _(" (default: %s)") % default
2310 and _(" (default: %s)") % default
2310 or "")))
2311 or "")))
2311 if multioccur:
2312 if multioccur:
2312 msg = _("\n[+] marked option can be specified multiple times")
2313 msg = _("\n[+] marked option can be specified multiple times")
2313 if ui.verbose and name != 'shortlist':
2314 if ui.verbose and name != 'shortlist':
2314 opt_output.append((msg, None))
2315 opt_output.append((msg, None))
2315 else:
2316 else:
2316 opt_output.insert(-1, (msg, None))
2317 opt_output.insert(-1, (msg, None))
2317
2318
2318 if not name:
2319 if not name:
2319 ui.write(_("\nadditional help topics:\n\n"))
2320 ui.write(_("\nadditional help topics:\n\n"))
2320 topics = []
2321 topics = []
2321 for names, header, doc in help.helptable:
2322 for names, header, doc in help.helptable:
2322 topics.append((sorted(names, key=len, reverse=True)[0], header))
2323 topics.append((sorted(names, key=len, reverse=True)[0], header))
2323 topics_len = max([len(s[0]) for s in topics])
2324 topics_len = max([len(s[0]) for s in topics])
2324 for t, desc in topics:
2325 for t, desc in topics:
2325 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2326 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2326
2327
2327 if opt_output:
2328 if opt_output:
2328 colwidth = encoding.colwidth
2329 colwidth = encoding.colwidth
2329 # normalize: (opt or message, desc or None, width of opt)
2330 # normalize: (opt or message, desc or None, width of opt)
2330 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2331 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2331 for opt, desc in opt_output]
2332 for opt, desc in opt_output]
2332 hanging = max([e[2] for e in entries])
2333 hanging = max([e[2] for e in entries])
2333 for opt, desc, width in entries:
2334 for opt, desc, width in entries:
2334 if desc:
2335 if desc:
2335 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2336 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2336 hangindent = ' ' * (hanging + 3)
2337 hangindent = ' ' * (hanging + 3)
2337 ui.write('%s\n' % (util.wrap(desc, textwidth,
2338 ui.write('%s\n' % (util.wrap(desc, textwidth,
2338 initindent=initindent,
2339 initindent=initindent,
2339 hangindent=hangindent)))
2340 hangindent=hangindent)))
2340 else:
2341 else:
2341 ui.write("%s\n" % opt)
2342 ui.write("%s\n" % opt)
2342
2343
2343 def identify(ui, repo, source=None, rev=None,
2344 def identify(ui, repo, source=None, rev=None,
2344 num=None, id=None, branch=None, tags=None, bookmarks=None):
2345 num=None, id=None, branch=None, tags=None, bookmarks=None):
2345 """identify the working copy or specified revision
2346 """identify the working copy or specified revision
2346
2347
2347 Print a summary identifying the repository state at REV using one or
2348 Print a summary identifying the repository state at REV using one or
2348 two parent hash identifiers, followed by a "+" if the working
2349 two parent hash identifiers, followed by a "+" if the working
2349 directory has uncommitted changes, the branch name (if not default),
2350 directory has uncommitted changes, the branch name (if not default),
2350 a list of tags, and a list of bookmarks.
2351 a list of tags, and a list of bookmarks.
2351
2352
2352 When REV is not given, print a summary of the current state of the
2353 When REV is not given, print a summary of the current state of the
2353 repository.
2354 repository.
2354
2355
2355 Specifying a path to a repository root or Mercurial bundle will
2356 Specifying a path to a repository root or Mercurial bundle will
2356 cause lookup to operate on that repository/bundle.
2357 cause lookup to operate on that repository/bundle.
2357
2358
2358 Returns 0 if successful.
2359 Returns 0 if successful.
2359 """
2360 """
2360
2361
2361 if not repo and not source:
2362 if not repo and not source:
2362 raise util.Abort(_("there is no Mercurial repository here "
2363 raise util.Abort(_("there is no Mercurial repository here "
2363 "(.hg not found)"))
2364 "(.hg not found)"))
2364
2365
2365 hexfunc = ui.debugflag and hex or short
2366 hexfunc = ui.debugflag and hex or short
2366 default = not (num or id or branch or tags or bookmarks)
2367 default = not (num or id or branch or tags or bookmarks)
2367 output = []
2368 output = []
2368 revs = []
2369 revs = []
2369
2370
2370 if source:
2371 if source:
2371 source, branches = hg.parseurl(ui.expandpath(source))
2372 source, branches = hg.parseurl(ui.expandpath(source))
2372 repo = hg.repository(ui, source)
2373 repo = hg.repository(ui, source)
2373 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2374 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2374
2375
2375 if not repo.local():
2376 if not repo.local():
2376 if num or branch or tags:
2377 if num or branch or tags:
2377 raise util.Abort(
2378 raise util.Abort(
2378 _("can't query remote revision number, branch, or tags"))
2379 _("can't query remote revision number, branch, or tags"))
2379 if not rev and revs:
2380 if not rev and revs:
2380 rev = revs[0]
2381 rev = revs[0]
2381 if not rev:
2382 if not rev:
2382 rev = "tip"
2383 rev = "tip"
2383
2384
2384 remoterev = repo.lookup(rev)
2385 remoterev = repo.lookup(rev)
2385 if default or id:
2386 if default or id:
2386 output = [hexfunc(remoterev)]
2387 output = [hexfunc(remoterev)]
2387
2388
2388 def getbms():
2389 def getbms():
2389 bms = []
2390 bms = []
2390
2391
2391 if 'bookmarks' in repo.listkeys('namespaces'):
2392 if 'bookmarks' in repo.listkeys('namespaces'):
2392 hexremoterev = hex(remoterev)
2393 hexremoterev = hex(remoterev)
2393 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2394 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2394 if bmr == hexremoterev]
2395 if bmr == hexremoterev]
2395
2396
2396 return bms
2397 return bms
2397
2398
2398 if bookmarks:
2399 if bookmarks:
2399 output.extend(getbms())
2400 output.extend(getbms())
2400 elif default and not ui.quiet:
2401 elif default and not ui.quiet:
2401 # multiple bookmarks for a single parent separated by '/'
2402 # multiple bookmarks for a single parent separated by '/'
2402 bm = '/'.join(getbms())
2403 bm = '/'.join(getbms())
2403 if bm:
2404 if bm:
2404 output.append(bm)
2405 output.append(bm)
2405 else:
2406 else:
2406 if not rev:
2407 if not rev:
2407 ctx = repo[None]
2408 ctx = repo[None]
2408 parents = ctx.parents()
2409 parents = ctx.parents()
2409 changed = ""
2410 changed = ""
2410 if default or id or num:
2411 if default or id or num:
2411 changed = util.any(repo.status()) and "+" or ""
2412 changed = util.any(repo.status()) and "+" or ""
2412 if default or id:
2413 if default or id:
2413 output = ["%s%s" %
2414 output = ["%s%s" %
2414 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2415 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2415 if num:
2416 if num:
2416 output.append("%s%s" %
2417 output.append("%s%s" %
2417 ('+'.join([str(p.rev()) for p in parents]), changed))
2418 ('+'.join([str(p.rev()) for p in parents]), changed))
2418 else:
2419 else:
2419 ctx = cmdutil.revsingle(repo, rev)
2420 ctx = cmdutil.revsingle(repo, rev)
2420 if default or id:
2421 if default or id:
2421 output = [hexfunc(ctx.node())]
2422 output = [hexfunc(ctx.node())]
2422 if num:
2423 if num:
2423 output.append(str(ctx.rev()))
2424 output.append(str(ctx.rev()))
2424
2425
2425 if default and not ui.quiet:
2426 if default and not ui.quiet:
2426 b = ctx.branch()
2427 b = ctx.branch()
2427 if b != 'default':
2428 if b != 'default':
2428 output.append("(%s)" % b)
2429 output.append("(%s)" % b)
2429
2430
2430 # multiple tags for a single parent separated by '/'
2431 # multiple tags for a single parent separated by '/'
2431 t = '/'.join(ctx.tags())
2432 t = '/'.join(ctx.tags())
2432 if t:
2433 if t:
2433 output.append(t)
2434 output.append(t)
2434
2435
2435 # multiple bookmarks for a single parent separated by '/'
2436 # multiple bookmarks for a single parent separated by '/'
2436 bm = '/'.join(ctx.bookmarks())
2437 bm = '/'.join(ctx.bookmarks())
2437 if bm:
2438 if bm:
2438 output.append(bm)
2439 output.append(bm)
2439 else:
2440 else:
2440 if branch:
2441 if branch:
2441 output.append(ctx.branch())
2442 output.append(ctx.branch())
2442
2443
2443 if tags:
2444 if tags:
2444 output.extend(ctx.tags())
2445 output.extend(ctx.tags())
2445
2446
2446 if bookmarks:
2447 if bookmarks:
2447 output.extend(ctx.bookmarks())
2448 output.extend(ctx.bookmarks())
2448
2449
2449 ui.write("%s\n" % ' '.join(output))
2450 ui.write("%s\n" % ' '.join(output))
2450
2451
2451 def import_(ui, repo, patch1, *patches, **opts):
2452 def import_(ui, repo, patch1, *patches, **opts):
2452 """import an ordered set of patches
2453 """import an ordered set of patches
2453
2454
2454 Import a list of patches and commit them individually (unless
2455 Import a list of patches and commit them individually (unless
2455 --no-commit is specified).
2456 --no-commit is specified).
2456
2457
2457 If there are outstanding changes in the working directory, import
2458 If there are outstanding changes in the working directory, import
2458 will abort unless given the -f/--force flag.
2459 will abort unless given the -f/--force flag.
2459
2460
2460 You can import a patch straight from a mail message. Even patches
2461 You can import a patch straight from a mail message. Even patches
2461 as attachments work (to use the body part, it must have type
2462 as attachments work (to use the body part, it must have type
2462 text/plain or text/x-patch). From and Subject headers of email
2463 text/plain or text/x-patch). From and Subject headers of email
2463 message are used as default committer and commit message. All
2464 message are used as default committer and commit message. All
2464 text/plain body parts before first diff are added to commit
2465 text/plain body parts before first diff are added to commit
2465 message.
2466 message.
2466
2467
2467 If the imported patch was generated by :hg:`export`, user and
2468 If the imported patch was generated by :hg:`export`, user and
2468 description from patch override values from message headers and
2469 description from patch override values from message headers and
2469 body. Values given on command line with -m/--message and -u/--user
2470 body. Values given on command line with -m/--message and -u/--user
2470 override these.
2471 override these.
2471
2472
2472 If --exact is specified, import will set the working directory to
2473 If --exact is specified, import will set the working directory to
2473 the parent of each patch before applying it, and will abort if the
2474 the parent of each patch before applying it, and will abort if the
2474 resulting changeset has a different ID than the one recorded in
2475 resulting changeset has a different ID than the one recorded in
2475 the patch. This may happen due to character set problems or other
2476 the patch. This may happen due to character set problems or other
2476 deficiencies in the text patch format.
2477 deficiencies in the text patch format.
2477
2478
2478 With -s/--similarity, hg will attempt to discover renames and
2479 With -s/--similarity, hg will attempt to discover renames and
2479 copies in the patch in the same way as 'addremove'.
2480 copies in the patch in the same way as 'addremove'.
2480
2481
2481 To read a patch from standard input, use "-" as the patch name. If
2482 To read a patch from standard input, use "-" as the patch name. If
2482 a URL is specified, the patch will be downloaded from it.
2483 a URL is specified, the patch will be downloaded from it.
2483 See :hg:`help dates` for a list of formats valid for -d/--date.
2484 See :hg:`help dates` for a list of formats valid for -d/--date.
2484
2485
2485 Returns 0 on success.
2486 Returns 0 on success.
2486 """
2487 """
2487 patches = (patch1,) + patches
2488 patches = (patch1,) + patches
2488
2489
2489 date = opts.get('date')
2490 date = opts.get('date')
2490 if date:
2491 if date:
2491 opts['date'] = util.parsedate(date)
2492 opts['date'] = util.parsedate(date)
2492
2493
2493 try:
2494 try:
2494 sim = float(opts.get('similarity') or 0)
2495 sim = float(opts.get('similarity') or 0)
2495 except ValueError:
2496 except ValueError:
2496 raise util.Abort(_('similarity must be a number'))
2497 raise util.Abort(_('similarity must be a number'))
2497 if sim < 0 or sim > 100:
2498 if sim < 0 or sim > 100:
2498 raise util.Abort(_('similarity must be between 0 and 100'))
2499 raise util.Abort(_('similarity must be between 0 and 100'))
2499
2500
2500 if opts.get('exact') or not opts.get('force'):
2501 if opts.get('exact') or not opts.get('force'):
2501 cmdutil.bail_if_changed(repo)
2502 cmdutil.bail_if_changed(repo)
2502
2503
2503 d = opts["base"]
2504 d = opts["base"]
2504 strip = opts["strip"]
2505 strip = opts["strip"]
2505 wlock = lock = None
2506 wlock = lock = None
2506 msgs = []
2507 msgs = []
2507
2508
2508 def tryone(ui, hunk):
2509 def tryone(ui, hunk):
2509 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2510 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2510 patch.extract(ui, hunk)
2511 patch.extract(ui, hunk)
2511
2512
2512 if not tmpname:
2513 if not tmpname:
2513 return None
2514 return None
2514 commitid = _('to working directory')
2515 commitid = _('to working directory')
2515
2516
2516 try:
2517 try:
2517 cmdline_message = cmdutil.logmessage(opts)
2518 cmdline_message = cmdutil.logmessage(opts)
2518 if cmdline_message:
2519 if cmdline_message:
2519 # pickup the cmdline msg
2520 # pickup the cmdline msg
2520 message = cmdline_message
2521 message = cmdline_message
2521 elif message:
2522 elif message:
2522 # pickup the patch msg
2523 # pickup the patch msg
2523 message = message.strip()
2524 message = message.strip()
2524 else:
2525 else:
2525 # launch the editor
2526 # launch the editor
2526 message = None
2527 message = None
2527 ui.debug('message:\n%s\n' % message)
2528 ui.debug('message:\n%s\n' % message)
2528
2529
2529 wp = repo.parents()
2530 wp = repo.parents()
2530 if opts.get('exact'):
2531 if opts.get('exact'):
2531 if not nodeid or not p1:
2532 if not nodeid or not p1:
2532 raise util.Abort(_('not a Mercurial patch'))
2533 raise util.Abort(_('not a Mercurial patch'))
2533 p1 = repo.lookup(p1)
2534 p1 = repo.lookup(p1)
2534 p2 = repo.lookup(p2 or hex(nullid))
2535 p2 = repo.lookup(p2 or hex(nullid))
2535
2536
2536 if p1 != wp[0].node():
2537 if p1 != wp[0].node():
2537 hg.clean(repo, p1)
2538 hg.clean(repo, p1)
2538 repo.dirstate.setparents(p1, p2)
2539 repo.dirstate.setparents(p1, p2)
2539 elif p2:
2540 elif p2:
2540 try:
2541 try:
2541 p1 = repo.lookup(p1)
2542 p1 = repo.lookup(p1)
2542 p2 = repo.lookup(p2)
2543 p2 = repo.lookup(p2)
2543 if p1 == wp[0].node():
2544 if p1 == wp[0].node():
2544 repo.dirstate.setparents(p1, p2)
2545 repo.dirstate.setparents(p1, p2)
2545 except error.RepoError:
2546 except error.RepoError:
2546 pass
2547 pass
2547 if opts.get('exact') or opts.get('import_branch'):
2548 if opts.get('exact') or opts.get('import_branch'):
2548 repo.dirstate.setbranch(branch or 'default')
2549 repo.dirstate.setbranch(branch or 'default')
2549
2550
2550 files = {}
2551 files = {}
2551 try:
2552 try:
2552 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2553 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2553 files=files, eolmode=None)
2554 files=files, eolmode=None)
2554 finally:
2555 finally:
2555 files = cmdutil.updatedir(ui, repo, files,
2556 files = cmdutil.updatedir(ui, repo, files,
2556 similarity=sim / 100.0)
2557 similarity=sim / 100.0)
2557 if opts.get('no_commit'):
2558 if opts.get('no_commit'):
2558 if message:
2559 if message:
2559 msgs.append(message)
2560 msgs.append(message)
2560 else:
2561 else:
2561 if opts.get('exact'):
2562 if opts.get('exact'):
2562 m = None
2563 m = None
2563 else:
2564 else:
2564 m = cmdutil.matchfiles(repo, files or [])
2565 m = cmdutil.matchfiles(repo, files or [])
2565 n = repo.commit(message, opts.get('user') or user,
2566 n = repo.commit(message, opts.get('user') or user,
2566 opts.get('date') or date, match=m,
2567 opts.get('date') or date, match=m,
2567 editor=cmdutil.commiteditor)
2568 editor=cmdutil.commiteditor)
2568 if opts.get('exact'):
2569 if opts.get('exact'):
2569 if hex(n) != nodeid:
2570 if hex(n) != nodeid:
2570 repo.rollback()
2571 repo.rollback()
2571 raise util.Abort(_('patch is damaged'
2572 raise util.Abort(_('patch is damaged'
2572 ' or loses information'))
2573 ' or loses information'))
2573 # Force a dirstate write so that the next transaction
2574 # Force a dirstate write so that the next transaction
2574 # backups an up-do-date file.
2575 # backups an up-do-date file.
2575 repo.dirstate.write()
2576 repo.dirstate.write()
2576 if n:
2577 if n:
2577 commitid = short(n)
2578 commitid = short(n)
2578
2579
2579 return commitid
2580 return commitid
2580 finally:
2581 finally:
2581 os.unlink(tmpname)
2582 os.unlink(tmpname)
2582
2583
2583 try:
2584 try:
2584 wlock = repo.wlock()
2585 wlock = repo.wlock()
2585 lock = repo.lock()
2586 lock = repo.lock()
2586 lastcommit = None
2587 lastcommit = None
2587 for p in patches:
2588 for p in patches:
2588 pf = os.path.join(d, p)
2589 pf = os.path.join(d, p)
2589
2590
2590 if pf == '-':
2591 if pf == '-':
2591 ui.status(_("applying patch from stdin\n"))
2592 ui.status(_("applying patch from stdin\n"))
2592 pf = sys.stdin
2593 pf = sys.stdin
2593 else:
2594 else:
2594 ui.status(_("applying %s\n") % p)
2595 ui.status(_("applying %s\n") % p)
2595 pf = url.open(ui, pf)
2596 pf = url.open(ui, pf)
2596
2597
2597 haspatch = False
2598 haspatch = False
2598 for hunk in patch.split(pf):
2599 for hunk in patch.split(pf):
2599 commitid = tryone(ui, hunk)
2600 commitid = tryone(ui, hunk)
2600 if commitid:
2601 if commitid:
2601 haspatch = True
2602 haspatch = True
2602 if lastcommit:
2603 if lastcommit:
2603 ui.status(_('applied %s\n') % lastcommit)
2604 ui.status(_('applied %s\n') % lastcommit)
2604 lastcommit = commitid
2605 lastcommit = commitid
2605
2606
2606 if not haspatch:
2607 if not haspatch:
2607 raise util.Abort(_('no diffs found'))
2608 raise util.Abort(_('no diffs found'))
2608
2609
2609 if msgs:
2610 if msgs:
2610 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2611 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2611 finally:
2612 finally:
2612 release(lock, wlock)
2613 release(lock, wlock)
2613
2614
2614 def incoming(ui, repo, source="default", **opts):
2615 def incoming(ui, repo, source="default", **opts):
2615 """show new changesets found in source
2616 """show new changesets found in source
2616
2617
2617 Show new changesets found in the specified path/URL or the default
2618 Show new changesets found in the specified path/URL or the default
2618 pull location. These are the changesets that would have been pulled
2619 pull location. These are the changesets that would have been pulled
2619 if a pull at the time you issued this command.
2620 if a pull at the time you issued this command.
2620
2621
2621 For remote repository, using --bundle avoids downloading the
2622 For remote repository, using --bundle avoids downloading the
2622 changesets twice if the incoming is followed by a pull.
2623 changesets twice if the incoming is followed by a pull.
2623
2624
2624 See pull for valid source format details.
2625 See pull for valid source format details.
2625
2626
2626 Returns 0 if there are incoming changes, 1 otherwise.
2627 Returns 0 if there are incoming changes, 1 otherwise.
2627 """
2628 """
2628 if opts.get('bundle') and opts.get('subrepos'):
2629 if opts.get('bundle') and opts.get('subrepos'):
2629 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2630 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2630
2631
2631 if opts.get('bookmarks'):
2632 if opts.get('bookmarks'):
2632 source, branches = hg.parseurl(ui.expandpath(source),
2633 source, branches = hg.parseurl(ui.expandpath(source),
2633 opts.get('branch'))
2634 opts.get('branch'))
2634 other = hg.repository(hg.remoteui(repo, opts), source)
2635 other = hg.repository(hg.remoteui(repo, opts), source)
2635 if 'bookmarks' not in other.listkeys('namespaces'):
2636 if 'bookmarks' not in other.listkeys('namespaces'):
2636 ui.warn(_("remote doesn't support bookmarks\n"))
2637 ui.warn(_("remote doesn't support bookmarks\n"))
2637 return 0
2638 return 0
2638 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2639 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2639 return bookmarks.diff(ui, repo, other)
2640 return bookmarks.diff(ui, repo, other)
2640
2641
2641 ret = hg.incoming(ui, repo, source, opts)
2642 ret = hg.incoming(ui, repo, source, opts)
2642 return ret
2643 return ret
2643
2644
2644 def init(ui, dest=".", **opts):
2645 def init(ui, dest=".", **opts):
2645 """create a new repository in the given directory
2646 """create a new repository in the given directory
2646
2647
2647 Initialize a new repository in the given directory. If the given
2648 Initialize a new repository in the given directory. If the given
2648 directory does not exist, it will be created.
2649 directory does not exist, it will be created.
2649
2650
2650 If no directory is given, the current directory is used.
2651 If no directory is given, the current directory is used.
2651
2652
2652 It is possible to specify an ``ssh://`` URL as the destination.
2653 It is possible to specify an ``ssh://`` URL as the destination.
2653 See :hg:`help urls` for more information.
2654 See :hg:`help urls` for more information.
2654
2655
2655 Returns 0 on success.
2656 Returns 0 on success.
2656 """
2657 """
2657 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2658 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2658
2659
2659 def locate(ui, repo, *pats, **opts):
2660 def locate(ui, repo, *pats, **opts):
2660 """locate files matching specific patterns
2661 """locate files matching specific patterns
2661
2662
2662 Print files under Mercurial control in the working directory whose
2663 Print files under Mercurial control in the working directory whose
2663 names match the given patterns.
2664 names match the given patterns.
2664
2665
2665 By default, this command searches all directories in the working
2666 By default, this command searches all directories in the working
2666 directory. To search just the current directory and its
2667 directory. To search just the current directory and its
2667 subdirectories, use "--include .".
2668 subdirectories, use "--include .".
2668
2669
2669 If no patterns are given to match, this command prints the names
2670 If no patterns are given to match, this command prints the names
2670 of all files under Mercurial control in the working directory.
2671 of all files under Mercurial control in the working directory.
2671
2672
2672 If you want to feed the output of this command into the "xargs"
2673 If you want to feed the output of this command into the "xargs"
2673 command, use the -0 option to both this command and "xargs". This
2674 command, use the -0 option to both this command and "xargs". This
2674 will avoid the problem of "xargs" treating single filenames that
2675 will avoid the problem of "xargs" treating single filenames that
2675 contain whitespace as multiple filenames.
2676 contain whitespace as multiple filenames.
2676
2677
2677 Returns 0 if a match is found, 1 otherwise.
2678 Returns 0 if a match is found, 1 otherwise.
2678 """
2679 """
2679 end = opts.get('print0') and '\0' or '\n'
2680 end = opts.get('print0') and '\0' or '\n'
2680 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2681 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2681
2682
2682 ret = 1
2683 ret = 1
2683 m = cmdutil.match(repo, pats, opts, default='relglob')
2684 m = cmdutil.match(repo, pats, opts, default='relglob')
2684 m.bad = lambda x, y: False
2685 m.bad = lambda x, y: False
2685 for abs in repo[rev].walk(m):
2686 for abs in repo[rev].walk(m):
2686 if not rev and abs not in repo.dirstate:
2687 if not rev and abs not in repo.dirstate:
2687 continue
2688 continue
2688 if opts.get('fullpath'):
2689 if opts.get('fullpath'):
2689 ui.write(repo.wjoin(abs), end)
2690 ui.write(repo.wjoin(abs), end)
2690 else:
2691 else:
2691 ui.write(((pats and m.rel(abs)) or abs), end)
2692 ui.write(((pats and m.rel(abs)) or abs), end)
2692 ret = 0
2693 ret = 0
2693
2694
2694 return ret
2695 return ret
2695
2696
2696 def log(ui, repo, *pats, **opts):
2697 def log(ui, repo, *pats, **opts):
2697 """show revision history of entire repository or files
2698 """show revision history of entire repository or files
2698
2699
2699 Print the revision history of the specified files or the entire
2700 Print the revision history of the specified files or the entire
2700 project.
2701 project.
2701
2702
2702 File history is shown without following rename or copy history of
2703 File history is shown without following rename or copy history of
2703 files. Use -f/--follow with a filename to follow history across
2704 files. Use -f/--follow with a filename to follow history across
2704 renames and copies. --follow without a filename will only show
2705 renames and copies. --follow without a filename will only show
2705 ancestors or descendants of the starting revision. --follow-first
2706 ancestors or descendants of the starting revision. --follow-first
2706 only follows the first parent of merge revisions.
2707 only follows the first parent of merge revisions.
2707
2708
2708 If no revision range is specified, the default is ``tip:0`` unless
2709 If no revision range is specified, the default is ``tip:0`` unless
2709 --follow is set, in which case the working directory parent is
2710 --follow is set, in which case the working directory parent is
2710 used as the starting revision. You can specify a revision set for
2711 used as the starting revision. You can specify a revision set for
2711 log, see :hg:`help revsets` for more information.
2712 log, see :hg:`help revsets` for more information.
2712
2713
2713 See :hg:`help dates` for a list of formats valid for -d/--date.
2714 See :hg:`help dates` for a list of formats valid for -d/--date.
2714
2715
2715 By default this command prints revision number and changeset id,
2716 By default this command prints revision number and changeset id,
2716 tags, non-trivial parents, user, date and time, and a summary for
2717 tags, non-trivial parents, user, date and time, and a summary for
2717 each commit. When the -v/--verbose switch is used, the list of
2718 each commit. When the -v/--verbose switch is used, the list of
2718 changed files and full commit message are shown.
2719 changed files and full commit message are shown.
2719
2720
2720 .. note::
2721 .. note::
2721 log -p/--patch may generate unexpected diff output for merge
2722 log -p/--patch may generate unexpected diff output for merge
2722 changesets, as it will only compare the merge changeset against
2723 changesets, as it will only compare the merge changeset against
2723 its first parent. Also, only files different from BOTH parents
2724 its first parent. Also, only files different from BOTH parents
2724 will appear in files:.
2725 will appear in files:.
2725
2726
2726 Returns 0 on success.
2727 Returns 0 on success.
2727 """
2728 """
2728
2729
2729 matchfn = cmdutil.match(repo, pats, opts)
2730 matchfn = cmdutil.match(repo, pats, opts)
2730 limit = cmdutil.loglimit(opts)
2731 limit = cmdutil.loglimit(opts)
2731 count = 0
2732 count = 0
2732
2733
2733 endrev = None
2734 endrev = None
2734 if opts.get('copies') and opts.get('rev'):
2735 if opts.get('copies') and opts.get('rev'):
2735 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2736 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2736
2737
2737 df = False
2738 df = False
2738 if opts["date"]:
2739 if opts["date"]:
2739 df = util.matchdate(opts["date"])
2740 df = util.matchdate(opts["date"])
2740
2741
2741 branches = opts.get('branch', []) + opts.get('only_branch', [])
2742 branches = opts.get('branch', []) + opts.get('only_branch', [])
2742 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2743 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2743
2744
2744 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2745 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2745 def prep(ctx, fns):
2746 def prep(ctx, fns):
2746 rev = ctx.rev()
2747 rev = ctx.rev()
2747 parents = [p for p in repo.changelog.parentrevs(rev)
2748 parents = [p for p in repo.changelog.parentrevs(rev)
2748 if p != nullrev]
2749 if p != nullrev]
2749 if opts.get('no_merges') and len(parents) == 2:
2750 if opts.get('no_merges') and len(parents) == 2:
2750 return
2751 return
2751 if opts.get('only_merges') and len(parents) != 2:
2752 if opts.get('only_merges') and len(parents) != 2:
2752 return
2753 return
2753 if opts.get('branch') and ctx.branch() not in opts['branch']:
2754 if opts.get('branch') and ctx.branch() not in opts['branch']:
2754 return
2755 return
2755 if df and not df(ctx.date()[0]):
2756 if df and not df(ctx.date()[0]):
2756 return
2757 return
2757 if opts['user'] and not [k for k in opts['user']
2758 if opts['user'] and not [k for k in opts['user']
2758 if k.lower() in ctx.user().lower()]:
2759 if k.lower() in ctx.user().lower()]:
2759 return
2760 return
2760 if opts.get('keyword'):
2761 if opts.get('keyword'):
2761 for k in [kw.lower() for kw in opts['keyword']]:
2762 for k in [kw.lower() for kw in opts['keyword']]:
2762 if (k in ctx.user().lower() or
2763 if (k in ctx.user().lower() or
2763 k in ctx.description().lower() or
2764 k in ctx.description().lower() or
2764 k in " ".join(ctx.files()).lower()):
2765 k in " ".join(ctx.files()).lower()):
2765 break
2766 break
2766 else:
2767 else:
2767 return
2768 return
2768
2769
2769 copies = None
2770 copies = None
2770 if opts.get('copies') and rev:
2771 if opts.get('copies') and rev:
2771 copies = []
2772 copies = []
2772 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2773 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2773 for fn in ctx.files():
2774 for fn in ctx.files():
2774 rename = getrenamed(fn, rev)
2775 rename = getrenamed(fn, rev)
2775 if rename:
2776 if rename:
2776 copies.append((fn, rename[0]))
2777 copies.append((fn, rename[0]))
2777
2778
2778 revmatchfn = None
2779 revmatchfn = None
2779 if opts.get('patch') or opts.get('stat'):
2780 if opts.get('patch') or opts.get('stat'):
2780 if opts.get('follow') or opts.get('follow_first'):
2781 if opts.get('follow') or opts.get('follow_first'):
2781 # note: this might be wrong when following through merges
2782 # note: this might be wrong when following through merges
2782 revmatchfn = cmdutil.match(repo, fns, default='path')
2783 revmatchfn = cmdutil.match(repo, fns, default='path')
2783 else:
2784 else:
2784 revmatchfn = matchfn
2785 revmatchfn = matchfn
2785
2786
2786 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2787 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2787
2788
2788 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2789 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2789 if count == limit:
2790 if count == limit:
2790 break
2791 break
2791 if displayer.flush(ctx.rev()):
2792 if displayer.flush(ctx.rev()):
2792 count += 1
2793 count += 1
2793 displayer.close()
2794 displayer.close()
2794
2795
2795 def manifest(ui, repo, node=None, rev=None):
2796 def manifest(ui, repo, node=None, rev=None):
2796 """output the current or given revision of the project manifest
2797 """output the current or given revision of the project manifest
2797
2798
2798 Print a list of version controlled files for the given revision.
2799 Print a list of version controlled files for the given revision.
2799 If no revision is given, the first parent of the working directory
2800 If no revision is given, the first parent of the working directory
2800 is used, or the null revision if no revision is checked out.
2801 is used, or the null revision if no revision is checked out.
2801
2802
2802 With -v, print file permissions, symlink and executable bits.
2803 With -v, print file permissions, symlink and executable bits.
2803 With --debug, print file revision hashes.
2804 With --debug, print file revision hashes.
2804
2805
2805 Returns 0 on success.
2806 Returns 0 on success.
2806 """
2807 """
2807
2808
2808 if rev and node:
2809 if rev and node:
2809 raise util.Abort(_("please specify just one revision"))
2810 raise util.Abort(_("please specify just one revision"))
2810
2811
2811 if not node:
2812 if not node:
2812 node = rev
2813 node = rev
2813
2814
2814 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2815 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2815 ctx = cmdutil.revsingle(repo, node)
2816 ctx = cmdutil.revsingle(repo, node)
2816 for f in ctx:
2817 for f in ctx:
2817 if ui.debugflag:
2818 if ui.debugflag:
2818 ui.write("%40s " % hex(ctx.manifest()[f]))
2819 ui.write("%40s " % hex(ctx.manifest()[f]))
2819 if ui.verbose:
2820 if ui.verbose:
2820 ui.write(decor[ctx.flags(f)])
2821 ui.write(decor[ctx.flags(f)])
2821 ui.write("%s\n" % f)
2822 ui.write("%s\n" % f)
2822
2823
2823 def merge(ui, repo, node=None, **opts):
2824 def merge(ui, repo, node=None, **opts):
2824 """merge working directory with another revision
2825 """merge working directory with another revision
2825
2826
2826 The current working directory is updated with all changes made in
2827 The current working directory is updated with all changes made in
2827 the requested revision since the last common predecessor revision.
2828 the requested revision since the last common predecessor revision.
2828
2829
2829 Files that changed between either parent are marked as changed for
2830 Files that changed between either parent are marked as changed for
2830 the next commit and a commit must be performed before any further
2831 the next commit and a commit must be performed before any further
2831 updates to the repository are allowed. The next commit will have
2832 updates to the repository are allowed. The next commit will have
2832 two parents.
2833 two parents.
2833
2834
2834 ``--tool`` can be used to specify the merge tool used for file
2835 ``--tool`` can be used to specify the merge tool used for file
2835 merges. It overrides the HGMERGE environment variable and your
2836 merges. It overrides the HGMERGE environment variable and your
2836 configuration files. See :hg:`help merge-tools` for options.
2837 configuration files. See :hg:`help merge-tools` for options.
2837
2838
2838 If no revision is specified, the working directory's parent is a
2839 If no revision is specified, the working directory's parent is a
2839 head revision, and the current branch contains exactly one other
2840 head revision, and the current branch contains exactly one other
2840 head, the other head is merged with by default. Otherwise, an
2841 head, the other head is merged with by default. Otherwise, an
2841 explicit revision with which to merge with must be provided.
2842 explicit revision with which to merge with must be provided.
2842
2843
2843 :hg:`resolve` must be used to resolve unresolved files.
2844 :hg:`resolve` must be used to resolve unresolved files.
2844
2845
2845 To undo an uncommitted merge, use :hg:`update --clean .` which
2846 To undo an uncommitted merge, use :hg:`update --clean .` which
2846 will check out a clean copy of the original merge parent, losing
2847 will check out a clean copy of the original merge parent, losing
2847 all changes.
2848 all changes.
2848
2849
2849 Returns 0 on success, 1 if there are unresolved files.
2850 Returns 0 on success, 1 if there are unresolved files.
2850 """
2851 """
2851
2852
2852 if opts.get('rev') and node:
2853 if opts.get('rev') and node:
2853 raise util.Abort(_("please specify just one revision"))
2854 raise util.Abort(_("please specify just one revision"))
2854 if not node:
2855 if not node:
2855 node = opts.get('rev')
2856 node = opts.get('rev')
2856
2857
2857 if not node:
2858 if not node:
2858 branch = repo[None].branch()
2859 branch = repo[None].branch()
2859 bheads = repo.branchheads(branch)
2860 bheads = repo.branchheads(branch)
2860 if len(bheads) > 2:
2861 if len(bheads) > 2:
2861 raise util.Abort(_(
2862 raise util.Abort(_(
2862 'branch \'%s\' has %d heads - '
2863 'branch \'%s\' has %d heads - '
2863 'please merge with an explicit rev\n'
2864 'please merge with an explicit rev\n'
2864 '(run \'hg heads .\' to see heads)')
2865 '(run \'hg heads .\' to see heads)')
2865 % (branch, len(bheads)))
2866 % (branch, len(bheads)))
2866
2867
2867 parent = repo.dirstate.p1()
2868 parent = repo.dirstate.p1()
2868 if len(bheads) == 1:
2869 if len(bheads) == 1:
2869 if len(repo.heads()) > 1:
2870 if len(repo.heads()) > 1:
2870 raise util.Abort(_(
2871 raise util.Abort(_(
2871 'branch \'%s\' has one head - '
2872 'branch \'%s\' has one head - '
2872 'please merge with an explicit rev\n'
2873 'please merge with an explicit rev\n'
2873 '(run \'hg heads\' to see all heads)')
2874 '(run \'hg heads\' to see all heads)')
2874 % branch)
2875 % branch)
2875 msg = _('there is nothing to merge')
2876 msg = _('there is nothing to merge')
2876 if parent != repo.lookup(repo[None].branch()):
2877 if parent != repo.lookup(repo[None].branch()):
2877 msg = _('%s - use "hg update" instead') % msg
2878 msg = _('%s - use "hg update" instead') % msg
2878 raise util.Abort(msg)
2879 raise util.Abort(msg)
2879
2880
2880 if parent not in bheads:
2881 if parent not in bheads:
2881 raise util.Abort(_('working dir not at a head rev - '
2882 raise util.Abort(_('working dir not at a head rev - '
2882 'use "hg update" or merge with an explicit rev'))
2883 'use "hg update" or merge with an explicit rev'))
2883 node = parent == bheads[0] and bheads[-1] or bheads[0]
2884 node = parent == bheads[0] and bheads[-1] or bheads[0]
2884 else:
2885 else:
2885 node = cmdutil.revsingle(repo, node).node()
2886 node = cmdutil.revsingle(repo, node).node()
2886
2887
2887 if opts.get('preview'):
2888 if opts.get('preview'):
2888 # find nodes that are ancestors of p2 but not of p1
2889 # find nodes that are ancestors of p2 but not of p1
2889 p1 = repo.lookup('.')
2890 p1 = repo.lookup('.')
2890 p2 = repo.lookup(node)
2891 p2 = repo.lookup(node)
2891 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2892 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2892
2893
2893 displayer = cmdutil.show_changeset(ui, repo, opts)
2894 displayer = cmdutil.show_changeset(ui, repo, opts)
2894 for node in nodes:
2895 for node in nodes:
2895 displayer.show(repo[node])
2896 displayer.show(repo[node])
2896 displayer.close()
2897 displayer.close()
2897 return 0
2898 return 0
2898
2899
2899 try:
2900 try:
2900 # ui.forcemerge is an internal variable, do not document
2901 # ui.forcemerge is an internal variable, do not document
2901 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2902 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2902 return hg.merge(repo, node, force=opts.get('force'))
2903 return hg.merge(repo, node, force=opts.get('force'))
2903 finally:
2904 finally:
2904 ui.setconfig('ui', 'forcemerge', '')
2905 ui.setconfig('ui', 'forcemerge', '')
2905
2906
2906 def outgoing(ui, repo, dest=None, **opts):
2907 def outgoing(ui, repo, dest=None, **opts):
2907 """show changesets not found in the destination
2908 """show changesets not found in the destination
2908
2909
2909 Show changesets not found in the specified destination repository
2910 Show changesets not found in the specified destination repository
2910 or the default push location. These are the changesets that would
2911 or the default push location. These are the changesets that would
2911 be pushed if a push was requested.
2912 be pushed if a push was requested.
2912
2913
2913 See pull for details of valid destination formats.
2914 See pull for details of valid destination formats.
2914
2915
2915 Returns 0 if there are outgoing changes, 1 otherwise.
2916 Returns 0 if there are outgoing changes, 1 otherwise.
2916 """
2917 """
2917
2918
2918 if opts.get('bookmarks'):
2919 if opts.get('bookmarks'):
2919 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2920 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2920 dest, branches = hg.parseurl(dest, opts.get('branch'))
2921 dest, branches = hg.parseurl(dest, opts.get('branch'))
2921 other = hg.repository(hg.remoteui(repo, opts), dest)
2922 other = hg.repository(hg.remoteui(repo, opts), dest)
2922 if 'bookmarks' not in other.listkeys('namespaces'):
2923 if 'bookmarks' not in other.listkeys('namespaces'):
2923 ui.warn(_("remote doesn't support bookmarks\n"))
2924 ui.warn(_("remote doesn't support bookmarks\n"))
2924 return 0
2925 return 0
2925 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2926 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2926 return bookmarks.diff(ui, other, repo)
2927 return bookmarks.diff(ui, other, repo)
2927
2928
2928 ret = hg.outgoing(ui, repo, dest, opts)
2929 ret = hg.outgoing(ui, repo, dest, opts)
2929 return ret
2930 return ret
2930
2931
2931 def parents(ui, repo, file_=None, **opts):
2932 def parents(ui, repo, file_=None, **opts):
2932 """show the parents of the working directory or revision
2933 """show the parents of the working directory or revision
2933
2934
2934 Print the working directory's parent revisions. If a revision is
2935 Print the working directory's parent revisions. If a revision is
2935 given via -r/--rev, the parent of that revision will be printed.
2936 given via -r/--rev, the parent of that revision will be printed.
2936 If a file argument is given, the revision in which the file was
2937 If a file argument is given, the revision in which the file was
2937 last changed (before the working directory revision or the
2938 last changed (before the working directory revision or the
2938 argument to --rev if given) is printed.
2939 argument to --rev if given) is printed.
2939
2940
2940 Returns 0 on success.
2941 Returns 0 on success.
2941 """
2942 """
2942
2943
2943 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2944 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2944
2945
2945 if file_:
2946 if file_:
2946 m = cmdutil.match(repo, (file_,), opts)
2947 m = cmdutil.match(repo, (file_,), opts)
2947 if m.anypats() or len(m.files()) != 1:
2948 if m.anypats() or len(m.files()) != 1:
2948 raise util.Abort(_('can only specify an explicit filename'))
2949 raise util.Abort(_('can only specify an explicit filename'))
2949 file_ = m.files()[0]
2950 file_ = m.files()[0]
2950 filenodes = []
2951 filenodes = []
2951 for cp in ctx.parents():
2952 for cp in ctx.parents():
2952 if not cp:
2953 if not cp:
2953 continue
2954 continue
2954 try:
2955 try:
2955 filenodes.append(cp.filenode(file_))
2956 filenodes.append(cp.filenode(file_))
2956 except error.LookupError:
2957 except error.LookupError:
2957 pass
2958 pass
2958 if not filenodes:
2959 if not filenodes:
2959 raise util.Abort(_("'%s' not found in manifest!") % file_)
2960 raise util.Abort(_("'%s' not found in manifest!") % file_)
2960 fl = repo.file(file_)
2961 fl = repo.file(file_)
2961 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2962 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2962 else:
2963 else:
2963 p = [cp.node() for cp in ctx.parents()]
2964 p = [cp.node() for cp in ctx.parents()]
2964
2965
2965 displayer = cmdutil.show_changeset(ui, repo, opts)
2966 displayer = cmdutil.show_changeset(ui, repo, opts)
2966 for n in p:
2967 for n in p:
2967 if n != nullid:
2968 if n != nullid:
2968 displayer.show(repo[n])
2969 displayer.show(repo[n])
2969 displayer.close()
2970 displayer.close()
2970
2971
2971 def paths(ui, repo, search=None):
2972 def paths(ui, repo, search=None):
2972 """show aliases for remote repositories
2973 """show aliases for remote repositories
2973
2974
2974 Show definition of symbolic path name NAME. If no name is given,
2975 Show definition of symbolic path name NAME. If no name is given,
2975 show definition of all available names.
2976 show definition of all available names.
2976
2977
2977 Path names are defined in the [paths] section of your
2978 Path names are defined in the [paths] section of your
2978 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2979 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2979 repository, ``.hg/hgrc`` is used, too.
2980 repository, ``.hg/hgrc`` is used, too.
2980
2981
2981 The path names ``default`` and ``default-push`` have a special
2982 The path names ``default`` and ``default-push`` have a special
2982 meaning. When performing a push or pull operation, they are used
2983 meaning. When performing a push or pull operation, they are used
2983 as fallbacks if no location is specified on the command-line.
2984 as fallbacks if no location is specified on the command-line.
2984 When ``default-push`` is set, it will be used for push and
2985 When ``default-push`` is set, it will be used for push and
2985 ``default`` will be used for pull; otherwise ``default`` is used
2986 ``default`` will be used for pull; otherwise ``default`` is used
2986 as the fallback for both. When cloning a repository, the clone
2987 as the fallback for both. When cloning a repository, the clone
2987 source is written as ``default`` in ``.hg/hgrc``. Note that
2988 source is written as ``default`` in ``.hg/hgrc``. Note that
2988 ``default`` and ``default-push`` apply to all inbound (e.g.
2989 ``default`` and ``default-push`` apply to all inbound (e.g.
2989 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2990 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2990 :hg:`bundle`) operations.
2991 :hg:`bundle`) operations.
2991
2992
2992 See :hg:`help urls` for more information.
2993 See :hg:`help urls` for more information.
2993
2994
2994 Returns 0 on success.
2995 Returns 0 on success.
2995 """
2996 """
2996 if search:
2997 if search:
2997 for name, path in ui.configitems("paths"):
2998 for name, path in ui.configitems("paths"):
2998 if name == search:
2999 if name == search:
2999 ui.write("%s\n" % url.hidepassword(path))
3000 ui.write("%s\n" % url.hidepassword(path))
3000 return
3001 return
3001 ui.warn(_("not found!\n"))
3002 ui.warn(_("not found!\n"))
3002 return 1
3003 return 1
3003 else:
3004 else:
3004 for name, path in ui.configitems("paths"):
3005 for name, path in ui.configitems("paths"):
3005 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
3006 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
3006
3007
3007 def postincoming(ui, repo, modheads, optupdate, checkout):
3008 def postincoming(ui, repo, modheads, optupdate, checkout):
3008 if modheads == 0:
3009 if modheads == 0:
3009 return
3010 return
3010 if optupdate:
3011 if optupdate:
3011 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3012 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3012 return hg.update(repo, checkout)
3013 return hg.update(repo, checkout)
3013 else:
3014 else:
3014 ui.status(_("not updating, since new heads added\n"))
3015 ui.status(_("not updating, since new heads added\n"))
3015 if modheads > 1:
3016 if modheads > 1:
3016 currentbranchheads = len(repo.branchheads())
3017 currentbranchheads = len(repo.branchheads())
3017 if currentbranchheads == modheads:
3018 if currentbranchheads == modheads:
3018 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3019 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3019 elif currentbranchheads > 1:
3020 elif currentbranchheads > 1:
3020 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
3021 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
3021 else:
3022 else:
3022 ui.status(_("(run 'hg heads' to see heads)\n"))
3023 ui.status(_("(run 'hg heads' to see heads)\n"))
3023 else:
3024 else:
3024 ui.status(_("(run 'hg update' to get a working copy)\n"))
3025 ui.status(_("(run 'hg update' to get a working copy)\n"))
3025
3026
3026 def pull(ui, repo, source="default", **opts):
3027 def pull(ui, repo, source="default", **opts):
3027 """pull changes from the specified source
3028 """pull changes from the specified source
3028
3029
3029 Pull changes from a remote repository to a local one.
3030 Pull changes from a remote repository to a local one.
3030
3031
3031 This finds all changes from the repository at the specified path
3032 This finds all changes from the repository at the specified path
3032 or URL and adds them to a local repository (the current one unless
3033 or URL and adds them to a local repository (the current one unless
3033 -R is specified). By default, this does not update the copy of the
3034 -R is specified). By default, this does not update the copy of the
3034 project in the working directory.
3035 project in the working directory.
3035
3036
3036 Use :hg:`incoming` if you want to see what would have been added
3037 Use :hg:`incoming` if you want to see what would have been added
3037 by a pull at the time you issued this command. If you then decide
3038 by a pull at the time you issued this command. If you then decide
3038 to add those changes to the repository, you should use :hg:`pull
3039 to add those changes to the repository, you should use :hg:`pull
3039 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3040 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3040
3041
3041 If SOURCE is omitted, the 'default' path will be used.
3042 If SOURCE is omitted, the 'default' path will be used.
3042 See :hg:`help urls` for more information.
3043 See :hg:`help urls` for more information.
3043
3044
3044 Returns 0 on success, 1 if an update had unresolved files.
3045 Returns 0 on success, 1 if an update had unresolved files.
3045 """
3046 """
3046 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3047 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3047 other = hg.repository(hg.remoteui(repo, opts), source)
3048 other = hg.repository(hg.remoteui(repo, opts), source)
3048 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3049 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3049 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3050 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3050
3051
3051 if opts.get('bookmark'):
3052 if opts.get('bookmark'):
3052 if not revs:
3053 if not revs:
3053 revs = []
3054 revs = []
3054 rb = other.listkeys('bookmarks')
3055 rb = other.listkeys('bookmarks')
3055 for b in opts['bookmark']:
3056 for b in opts['bookmark']:
3056 if b not in rb:
3057 if b not in rb:
3057 raise util.Abort(_('remote bookmark %s not found!') % b)
3058 raise util.Abort(_('remote bookmark %s not found!') % b)
3058 revs.append(rb[b])
3059 revs.append(rb[b])
3059
3060
3060 if revs:
3061 if revs:
3061 try:
3062 try:
3062 revs = [other.lookup(rev) for rev in revs]
3063 revs = [other.lookup(rev) for rev in revs]
3063 except error.CapabilityError:
3064 except error.CapabilityError:
3064 err = _("other repository doesn't support revision lookup, "
3065 err = _("other repository doesn't support revision lookup, "
3065 "so a rev cannot be specified.")
3066 "so a rev cannot be specified.")
3066 raise util.Abort(err)
3067 raise util.Abort(err)
3067
3068
3068 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3069 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3069 bookmarks.updatefromremote(ui, repo, other)
3070 bookmarks.updatefromremote(ui, repo, other)
3070 if checkout:
3071 if checkout:
3071 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3072 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3072 repo._subtoppath = source
3073 repo._subtoppath = source
3073 try:
3074 try:
3074 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3075 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3075
3076
3076 finally:
3077 finally:
3077 del repo._subtoppath
3078 del repo._subtoppath
3078
3079
3079 # update specified bookmarks
3080 # update specified bookmarks
3080 if opts.get('bookmark'):
3081 if opts.get('bookmark'):
3081 for b in opts['bookmark']:
3082 for b in opts['bookmark']:
3082 # explicit pull overrides local bookmark if any
3083 # explicit pull overrides local bookmark if any
3083 ui.status(_("importing bookmark %s\n") % b)
3084 ui.status(_("importing bookmark %s\n") % b)
3084 repo._bookmarks[b] = repo[rb[b]].node()
3085 repo._bookmarks[b] = repo[rb[b]].node()
3085 bookmarks.write(repo)
3086 bookmarks.write(repo)
3086
3087
3087 return ret
3088 return ret
3088
3089
3089 def push(ui, repo, dest=None, **opts):
3090 def push(ui, repo, dest=None, **opts):
3090 """push changes to the specified destination
3091 """push changes to the specified destination
3091
3092
3092 Push changesets from the local repository to the specified
3093 Push changesets from the local repository to the specified
3093 destination.
3094 destination.
3094
3095
3095 This operation is symmetrical to pull: it is identical to a pull
3096 This operation is symmetrical to pull: it is identical to a pull
3096 in the destination repository from the current one.
3097 in the destination repository from the current one.
3097
3098
3098 By default, push will not allow creation of new heads at the
3099 By default, push will not allow creation of new heads at the
3099 destination, since multiple heads would make it unclear which head
3100 destination, since multiple heads would make it unclear which head
3100 to use. In this situation, it is recommended to pull and merge
3101 to use. In this situation, it is recommended to pull and merge
3101 before pushing.
3102 before pushing.
3102
3103
3103 Use --new-branch if you want to allow push to create a new named
3104 Use --new-branch if you want to allow push to create a new named
3104 branch that is not present at the destination. This allows you to
3105 branch that is not present at the destination. This allows you to
3105 only create a new branch without forcing other changes.
3106 only create a new branch without forcing other changes.
3106
3107
3107 Use -f/--force to override the default behavior and push all
3108 Use -f/--force to override the default behavior and push all
3108 changesets on all branches.
3109 changesets on all branches.
3109
3110
3110 If -r/--rev is used, the specified revision and all its ancestors
3111 If -r/--rev is used, the specified revision and all its ancestors
3111 will be pushed to the remote repository.
3112 will be pushed to the remote repository.
3112
3113
3113 Please see :hg:`help urls` for important details about ``ssh://``
3114 Please see :hg:`help urls` for important details about ``ssh://``
3114 URLs. If DESTINATION is omitted, a default path will be used.
3115 URLs. If DESTINATION is omitted, a default path will be used.
3115
3116
3116 Returns 0 if push was successful, 1 if nothing to push.
3117 Returns 0 if push was successful, 1 if nothing to push.
3117 """
3118 """
3118
3119
3119 if opts.get('bookmark'):
3120 if opts.get('bookmark'):
3120 for b in opts['bookmark']:
3121 for b in opts['bookmark']:
3121 # translate -B options to -r so changesets get pushed
3122 # translate -B options to -r so changesets get pushed
3122 if b in repo._bookmarks:
3123 if b in repo._bookmarks:
3123 opts.setdefault('rev', []).append(b)
3124 opts.setdefault('rev', []).append(b)
3124 else:
3125 else:
3125 # if we try to push a deleted bookmark, translate it to null
3126 # if we try to push a deleted bookmark, translate it to null
3126 # this lets simultaneous -r, -b options continue working
3127 # this lets simultaneous -r, -b options continue working
3127 opts.setdefault('rev', []).append("null")
3128 opts.setdefault('rev', []).append("null")
3128
3129
3129 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3130 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3130 dest, branches = hg.parseurl(dest, opts.get('branch'))
3131 dest, branches = hg.parseurl(dest, opts.get('branch'))
3131 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3132 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3132 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3133 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3133 other = hg.repository(hg.remoteui(repo, opts), dest)
3134 other = hg.repository(hg.remoteui(repo, opts), dest)
3134 if revs:
3135 if revs:
3135 revs = [repo.lookup(rev) for rev in revs]
3136 revs = [repo.lookup(rev) for rev in revs]
3136
3137
3137 repo._subtoppath = dest
3138 repo._subtoppath = dest
3138 try:
3139 try:
3139 # push subrepos depth-first for coherent ordering
3140 # push subrepos depth-first for coherent ordering
3140 c = repo['']
3141 c = repo['']
3141 subs = c.substate # only repos that are committed
3142 subs = c.substate # only repos that are committed
3142 for s in sorted(subs):
3143 for s in sorted(subs):
3143 if not c.sub(s).push(opts.get('force')):
3144 if not c.sub(s).push(opts.get('force')):
3144 return False
3145 return False
3145 finally:
3146 finally:
3146 del repo._subtoppath
3147 del repo._subtoppath
3147 result = repo.push(other, opts.get('force'), revs=revs,
3148 result = repo.push(other, opts.get('force'), revs=revs,
3148 newbranch=opts.get('new_branch'))
3149 newbranch=opts.get('new_branch'))
3149
3150
3150 result = (result == 0)
3151 result = (result == 0)
3151
3152
3152 if opts.get('bookmark'):
3153 if opts.get('bookmark'):
3153 rb = other.listkeys('bookmarks')
3154 rb = other.listkeys('bookmarks')
3154 for b in opts['bookmark']:
3155 for b in opts['bookmark']:
3155 # explicit push overrides remote bookmark if any
3156 # explicit push overrides remote bookmark if any
3156 if b in repo._bookmarks:
3157 if b in repo._bookmarks:
3157 ui.status(_("exporting bookmark %s\n") % b)
3158 ui.status(_("exporting bookmark %s\n") % b)
3158 new = repo[b].hex()
3159 new = repo[b].hex()
3159 elif b in rb:
3160 elif b in rb:
3160 ui.status(_("deleting remote bookmark %s\n") % b)
3161 ui.status(_("deleting remote bookmark %s\n") % b)
3161 new = '' # delete
3162 new = '' # delete
3162 else:
3163 else:
3163 ui.warn(_('bookmark %s does not exist on the local '
3164 ui.warn(_('bookmark %s does not exist on the local '
3164 'or remote repository!\n') % b)
3165 'or remote repository!\n') % b)
3165 return 2
3166 return 2
3166 old = rb.get(b, '')
3167 old = rb.get(b, '')
3167 r = other.pushkey('bookmarks', b, old, new)
3168 r = other.pushkey('bookmarks', b, old, new)
3168 if not r:
3169 if not r:
3169 ui.warn(_('updating bookmark %s failed!\n') % b)
3170 ui.warn(_('updating bookmark %s failed!\n') % b)
3170 if not result:
3171 if not result:
3171 result = 2
3172 result = 2
3172
3173
3173 return result
3174 return result
3174
3175
3175 def recover(ui, repo):
3176 def recover(ui, repo):
3176 """roll back an interrupted transaction
3177 """roll back an interrupted transaction
3177
3178
3178 Recover from an interrupted commit or pull.
3179 Recover from an interrupted commit or pull.
3179
3180
3180 This command tries to fix the repository status after an
3181 This command tries to fix the repository status after an
3181 interrupted operation. It should only be necessary when Mercurial
3182 interrupted operation. It should only be necessary when Mercurial
3182 suggests it.
3183 suggests it.
3183
3184
3184 Returns 0 if successful, 1 if nothing to recover or verify fails.
3185 Returns 0 if successful, 1 if nothing to recover or verify fails.
3185 """
3186 """
3186 if repo.recover():
3187 if repo.recover():
3187 return hg.verify(repo)
3188 return hg.verify(repo)
3188 return 1
3189 return 1
3189
3190
3190 def remove(ui, repo, *pats, **opts):
3191 def remove(ui, repo, *pats, **opts):
3191 """remove the specified files on the next commit
3192 """remove the specified files on the next commit
3192
3193
3193 Schedule the indicated files for removal from the repository.
3194 Schedule the indicated files for removal from the repository.
3194
3195
3195 This only removes files from the current branch, not from the
3196 This only removes files from the current branch, not from the
3196 entire project history. -A/--after can be used to remove only
3197 entire project history. -A/--after can be used to remove only
3197 files that have already been deleted, -f/--force can be used to
3198 files that have already been deleted, -f/--force can be used to
3198 force deletion, and -Af can be used to remove files from the next
3199 force deletion, and -Af can be used to remove files from the next
3199 revision without deleting them from the working directory.
3200 revision without deleting them from the working directory.
3200
3201
3201 The following table details the behavior of remove for different
3202 The following table details the behavior of remove for different
3202 file states (columns) and option combinations (rows). The file
3203 file states (columns) and option combinations (rows). The file
3203 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3204 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3204 reported by :hg:`status`). The actions are Warn, Remove (from
3205 reported by :hg:`status`). The actions are Warn, Remove (from
3205 branch) and Delete (from disk)::
3206 branch) and Delete (from disk)::
3206
3207
3207 A C M !
3208 A C M !
3208 none W RD W R
3209 none W RD W R
3209 -f R RD RD R
3210 -f R RD RD R
3210 -A W W W R
3211 -A W W W R
3211 -Af R R R R
3212 -Af R R R R
3212
3213
3213 This command schedules the files to be removed at the next commit.
3214 This command schedules the files to be removed at the next commit.
3214 To undo a remove before that, see :hg:`revert`.
3215 To undo a remove before that, see :hg:`revert`.
3215
3216
3216 Returns 0 on success, 1 if any warnings encountered.
3217 Returns 0 on success, 1 if any warnings encountered.
3217 """
3218 """
3218
3219
3219 ret = 0
3220 ret = 0
3220 after, force = opts.get('after'), opts.get('force')
3221 after, force = opts.get('after'), opts.get('force')
3221 if not pats and not after:
3222 if not pats and not after:
3222 raise util.Abort(_('no files specified'))
3223 raise util.Abort(_('no files specified'))
3223
3224
3224 m = cmdutil.match(repo, pats, opts)
3225 m = cmdutil.match(repo, pats, opts)
3225 s = repo.status(match=m, clean=True)
3226 s = repo.status(match=m, clean=True)
3226 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3227 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3227
3228
3228 for f in m.files():
3229 for f in m.files():
3229 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3230 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3230 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3231 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3231 ret = 1
3232 ret = 1
3232
3233
3233 if force:
3234 if force:
3234 remove, forget = modified + deleted + clean, added
3235 remove, forget = modified + deleted + clean, added
3235 elif after:
3236 elif after:
3236 remove, forget = deleted, []
3237 remove, forget = deleted, []
3237 for f in modified + added + clean:
3238 for f in modified + added + clean:
3238 ui.warn(_('not removing %s: file still exists (use -f'
3239 ui.warn(_('not removing %s: file still exists (use -f'
3239 ' to force removal)\n') % m.rel(f))
3240 ' to force removal)\n') % m.rel(f))
3240 ret = 1
3241 ret = 1
3241 else:
3242 else:
3242 remove, forget = deleted + clean, []
3243 remove, forget = deleted + clean, []
3243 for f in modified:
3244 for f in modified:
3244 ui.warn(_('not removing %s: file is modified (use -f'
3245 ui.warn(_('not removing %s: file is modified (use -f'
3245 ' to force removal)\n') % m.rel(f))
3246 ' to force removal)\n') % m.rel(f))
3246 ret = 1
3247 ret = 1
3247 for f in added:
3248 for f in added:
3248 ui.warn(_('not removing %s: file has been marked for add (use -f'
3249 ui.warn(_('not removing %s: file has been marked for add (use -f'
3249 ' to force removal)\n') % m.rel(f))
3250 ' to force removal)\n') % m.rel(f))
3250 ret = 1
3251 ret = 1
3251
3252
3252 for f in sorted(remove + forget):
3253 for f in sorted(remove + forget):
3253 if ui.verbose or not m.exact(f):
3254 if ui.verbose or not m.exact(f):
3254 ui.status(_('removing %s\n') % m.rel(f))
3255 ui.status(_('removing %s\n') % m.rel(f))
3255
3256
3256 repo[None].forget(forget)
3257 repo[None].forget(forget)
3257 repo[None].remove(remove, unlink=not after)
3258 repo[None].remove(remove, unlink=not after)
3258 return ret
3259 return ret
3259
3260
3260 def rename(ui, repo, *pats, **opts):
3261 def rename(ui, repo, *pats, **opts):
3261 """rename files; equivalent of copy + remove
3262 """rename files; equivalent of copy + remove
3262
3263
3263 Mark dest as copies of sources; mark sources for deletion. If dest
3264 Mark dest as copies of sources; mark sources for deletion. If dest
3264 is a directory, copies are put in that directory. If dest is a
3265 is a directory, copies are put in that directory. If dest is a
3265 file, there can only be one source.
3266 file, there can only be one source.
3266
3267
3267 By default, this command copies the contents of files as they
3268 By default, this command copies the contents of files as they
3268 exist in the working directory. If invoked with -A/--after, the
3269 exist in the working directory. If invoked with -A/--after, the
3269 operation is recorded, but no copying is performed.
3270 operation is recorded, but no copying is performed.
3270
3271
3271 This command takes effect at the next commit. To undo a rename
3272 This command takes effect at the next commit. To undo a rename
3272 before that, see :hg:`revert`.
3273 before that, see :hg:`revert`.
3273
3274
3274 Returns 0 on success, 1 if errors are encountered.
3275 Returns 0 on success, 1 if errors are encountered.
3275 """
3276 """
3276 wlock = repo.wlock(False)
3277 wlock = repo.wlock(False)
3277 try:
3278 try:
3278 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3279 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3279 finally:
3280 finally:
3280 wlock.release()
3281 wlock.release()
3281
3282
3282 def resolve(ui, repo, *pats, **opts):
3283 def resolve(ui, repo, *pats, **opts):
3283 """redo merges or set/view the merge status of files
3284 """redo merges or set/view the merge status of files
3284
3285
3285 Merges with unresolved conflicts are often the result of
3286 Merges with unresolved conflicts are often the result of
3286 non-interactive merging using the ``internal:merge`` configuration
3287 non-interactive merging using the ``internal:merge`` configuration
3287 setting, or a command-line merge tool like ``diff3``. The resolve
3288 setting, or a command-line merge tool like ``diff3``. The resolve
3288 command is used to manage the files involved in a merge, after
3289 command is used to manage the files involved in a merge, after
3289 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3290 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3290 working directory must have two parents).
3291 working directory must have two parents).
3291
3292
3292 The resolve command can be used in the following ways:
3293 The resolve command can be used in the following ways:
3293
3294
3294 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3295 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3295 files, discarding any previous merge attempts. Re-merging is not
3296 files, discarding any previous merge attempts. Re-merging is not
3296 performed for files already marked as resolved. Use ``--all/-a``
3297 performed for files already marked as resolved. Use ``--all/-a``
3297 to selects all unresolved files. ``--tool`` can be used to specify
3298 to selects all unresolved files. ``--tool`` can be used to specify
3298 the merge tool used for the given files. It overrides the HGMERGE
3299 the merge tool used for the given files. It overrides the HGMERGE
3299 environment variable and your configuration files.
3300 environment variable and your configuration files.
3300
3301
3301 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3302 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3302 (e.g. after having manually fixed-up the files). The default is
3303 (e.g. after having manually fixed-up the files). The default is
3303 to mark all unresolved files.
3304 to mark all unresolved files.
3304
3305
3305 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3306 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3306 default is to mark all resolved files.
3307 default is to mark all resolved files.
3307
3308
3308 - :hg:`resolve -l`: list files which had or still have conflicts.
3309 - :hg:`resolve -l`: list files which had or still have conflicts.
3309 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3310 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3310
3311
3311 Note that Mercurial will not let you commit files with unresolved
3312 Note that Mercurial will not let you commit files with unresolved
3312 merge conflicts. You must use :hg:`resolve -m ...` before you can
3313 merge conflicts. You must use :hg:`resolve -m ...` before you can
3313 commit after a conflicting merge.
3314 commit after a conflicting merge.
3314
3315
3315 Returns 0 on success, 1 if any files fail a resolve attempt.
3316 Returns 0 on success, 1 if any files fail a resolve attempt.
3316 """
3317 """
3317
3318
3318 all, mark, unmark, show, nostatus = \
3319 all, mark, unmark, show, nostatus = \
3319 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3320 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3320
3321
3321 if (show and (mark or unmark)) or (mark and unmark):
3322 if (show and (mark or unmark)) or (mark and unmark):
3322 raise util.Abort(_("too many options specified"))
3323 raise util.Abort(_("too many options specified"))
3323 if pats and all:
3324 if pats and all:
3324 raise util.Abort(_("can't specify --all and patterns"))
3325 raise util.Abort(_("can't specify --all and patterns"))
3325 if not (all or pats or show or mark or unmark):
3326 if not (all or pats or show or mark or unmark):
3326 raise util.Abort(_('no files or directories specified; '
3327 raise util.Abort(_('no files or directories specified; '
3327 'use --all to remerge all files'))
3328 'use --all to remerge all files'))
3328
3329
3329 ms = mergemod.mergestate(repo)
3330 ms = mergemod.mergestate(repo)
3330 m = cmdutil.match(repo, pats, opts)
3331 m = cmdutil.match(repo, pats, opts)
3331 ret = 0
3332 ret = 0
3332
3333
3333 for f in ms:
3334 for f in ms:
3334 if m(f):
3335 if m(f):
3335 if show:
3336 if show:
3336 if nostatus:
3337 if nostatus:
3337 ui.write("%s\n" % f)
3338 ui.write("%s\n" % f)
3338 else:
3339 else:
3339 ui.write("%s %s\n" % (ms[f].upper(), f),
3340 ui.write("%s %s\n" % (ms[f].upper(), f),
3340 label='resolve.' +
3341 label='resolve.' +
3341 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3342 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3342 elif mark:
3343 elif mark:
3343 ms.mark(f, "r")
3344 ms.mark(f, "r")
3344 elif unmark:
3345 elif unmark:
3345 ms.mark(f, "u")
3346 ms.mark(f, "u")
3346 else:
3347 else:
3347 wctx = repo[None]
3348 wctx = repo[None]
3348 mctx = wctx.parents()[-1]
3349 mctx = wctx.parents()[-1]
3349
3350
3350 # backup pre-resolve (merge uses .orig for its own purposes)
3351 # backup pre-resolve (merge uses .orig for its own purposes)
3351 a = repo.wjoin(f)
3352 a = repo.wjoin(f)
3352 util.copyfile(a, a + ".resolve")
3353 util.copyfile(a, a + ".resolve")
3353
3354
3354 try:
3355 try:
3355 # resolve file
3356 # resolve file
3356 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3357 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3357 if ms.resolve(f, wctx, mctx):
3358 if ms.resolve(f, wctx, mctx):
3358 ret = 1
3359 ret = 1
3359 finally:
3360 finally:
3360 ui.setconfig('ui', 'forcemerge', '')
3361 ui.setconfig('ui', 'forcemerge', '')
3361
3362
3362 # replace filemerge's .orig file with our resolve file
3363 # replace filemerge's .orig file with our resolve file
3363 util.rename(a + ".resolve", a + ".orig")
3364 util.rename(a + ".resolve", a + ".orig")
3364
3365
3365 ms.commit()
3366 ms.commit()
3366 return ret
3367 return ret
3367
3368
3368 def revert(ui, repo, *pats, **opts):
3369 def revert(ui, repo, *pats, **opts):
3369 """restore individual files or directories to an earlier state
3370 """restore individual files or directories to an earlier state
3370
3371
3371 .. note::
3372 .. note::
3372 This command is most likely not what you are looking for.
3373 This command is most likely not what you are looking for.
3373 Revert will partially overwrite content in the working
3374 Revert will partially overwrite content in the working
3374 directory without changing the working directory parents. Use
3375 directory without changing the working directory parents. Use
3375 :hg:`update -r rev` to check out earlier revisions, or
3376 :hg:`update -r rev` to check out earlier revisions, or
3376 :hg:`update --clean .` to undo a merge which has added another
3377 :hg:`update --clean .` to undo a merge which has added another
3377 parent.
3378 parent.
3378
3379
3379 With no revision specified, revert the named files or directories
3380 With no revision specified, revert the named files or directories
3380 to the contents they had in the parent of the working directory.
3381 to the contents they had in the parent of the working directory.
3381 This restores the contents of the affected files to an unmodified
3382 This restores the contents of the affected files to an unmodified
3382 state and unschedules adds, removes, copies, and renames. If the
3383 state and unschedules adds, removes, copies, and renames. If the
3383 working directory has two parents, you must explicitly specify a
3384 working directory has two parents, you must explicitly specify a
3384 revision.
3385 revision.
3385
3386
3386 Using the -r/--rev option, revert the given files or directories
3387 Using the -r/--rev option, revert the given files or directories
3387 to their contents as of a specific revision. This can be helpful
3388 to their contents as of a specific revision. This can be helpful
3388 to "roll back" some or all of an earlier change. See :hg:`help
3389 to "roll back" some or all of an earlier change. See :hg:`help
3389 dates` for a list of formats valid for -d/--date.
3390 dates` for a list of formats valid for -d/--date.
3390
3391
3391 Revert modifies the working directory. It does not commit any
3392 Revert modifies the working directory. It does not commit any
3392 changes, or change the parent of the working directory. If you
3393 changes, or change the parent of the working directory. If you
3393 revert to a revision other than the parent of the working
3394 revert to a revision other than the parent of the working
3394 directory, the reverted files will thus appear modified
3395 directory, the reverted files will thus appear modified
3395 afterwards.
3396 afterwards.
3396
3397
3397 If a file has been deleted, it is restored. Files scheduled for
3398 If a file has been deleted, it is restored. Files scheduled for
3398 addition are just unscheduled and left as they are. If the
3399 addition are just unscheduled and left as they are. If the
3399 executable mode of a file was changed, it is reset.
3400 executable mode of a file was changed, it is reset.
3400
3401
3401 If names are given, all files matching the names are reverted.
3402 If names are given, all files matching the names are reverted.
3402 If no arguments are given, no files are reverted.
3403 If no arguments are given, no files are reverted.
3403
3404
3404 Modified files are saved with a .orig suffix before reverting.
3405 Modified files are saved with a .orig suffix before reverting.
3405 To disable these backups, use --no-backup.
3406 To disable these backups, use --no-backup.
3406
3407
3407 Returns 0 on success.
3408 Returns 0 on success.
3408 """
3409 """
3409
3410
3410 if opts.get("date"):
3411 if opts.get("date"):
3411 if opts.get("rev"):
3412 if opts.get("rev"):
3412 raise util.Abort(_("you can't specify a revision and a date"))
3413 raise util.Abort(_("you can't specify a revision and a date"))
3413 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3414 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3414
3415
3415 parent, p2 = repo.dirstate.parents()
3416 parent, p2 = repo.dirstate.parents()
3416 if not opts.get('rev') and p2 != nullid:
3417 if not opts.get('rev') and p2 != nullid:
3417 raise util.Abort(_('uncommitted merge - '
3418 raise util.Abort(_('uncommitted merge - '
3418 'use "hg update", see "hg help revert"'))
3419 'use "hg update", see "hg help revert"'))
3419
3420
3420 if not pats and not opts.get('all'):
3421 if not pats and not opts.get('all'):
3421 raise util.Abort(_('no files or directories specified; '
3422 raise util.Abort(_('no files or directories specified; '
3422 'use --all to revert the whole repo'))
3423 'use --all to revert the whole repo'))
3423
3424
3424 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3425 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3425 node = ctx.node()
3426 node = ctx.node()
3426 mf = ctx.manifest()
3427 mf = ctx.manifest()
3427 if node == parent:
3428 if node == parent:
3428 pmf = mf
3429 pmf = mf
3429 else:
3430 else:
3430 pmf = None
3431 pmf = None
3431
3432
3432 # need all matching names in dirstate and manifest of target rev,
3433 # need all matching names in dirstate and manifest of target rev,
3433 # so have to walk both. do not print errors if files exist in one
3434 # so have to walk both. do not print errors if files exist in one
3434 # but not other.
3435 # but not other.
3435
3436
3436 names = {}
3437 names = {}
3437
3438
3438 wlock = repo.wlock()
3439 wlock = repo.wlock()
3439 try:
3440 try:
3440 # walk dirstate.
3441 # walk dirstate.
3441
3442
3442 m = cmdutil.match(repo, pats, opts)
3443 m = cmdutil.match(repo, pats, opts)
3443 m.bad = lambda x, y: False
3444 m.bad = lambda x, y: False
3444 for abs in repo.walk(m):
3445 for abs in repo.walk(m):
3445 names[abs] = m.rel(abs), m.exact(abs)
3446 names[abs] = m.rel(abs), m.exact(abs)
3446
3447
3447 # walk target manifest.
3448 # walk target manifest.
3448
3449
3449 def badfn(path, msg):
3450 def badfn(path, msg):
3450 if path in names:
3451 if path in names:
3451 return
3452 return
3452 path_ = path + '/'
3453 path_ = path + '/'
3453 for f in names:
3454 for f in names:
3454 if f.startswith(path_):
3455 if f.startswith(path_):
3455 return
3456 return
3456 ui.warn("%s: %s\n" % (m.rel(path), msg))
3457 ui.warn("%s: %s\n" % (m.rel(path), msg))
3457
3458
3458 m = cmdutil.match(repo, pats, opts)
3459 m = cmdutil.match(repo, pats, opts)
3459 m.bad = badfn
3460 m.bad = badfn
3460 for abs in repo[node].walk(m):
3461 for abs in repo[node].walk(m):
3461 if abs not in names:
3462 if abs not in names:
3462 names[abs] = m.rel(abs), m.exact(abs)
3463 names[abs] = m.rel(abs), m.exact(abs)
3463
3464
3464 m = cmdutil.matchfiles(repo, names)
3465 m = cmdutil.matchfiles(repo, names)
3465 changes = repo.status(match=m)[:4]
3466 changes = repo.status(match=m)[:4]
3466 modified, added, removed, deleted = map(set, changes)
3467 modified, added, removed, deleted = map(set, changes)
3467
3468
3468 # if f is a rename, also revert the source
3469 # if f is a rename, also revert the source
3469 cwd = repo.getcwd()
3470 cwd = repo.getcwd()
3470 for f in added:
3471 for f in added:
3471 src = repo.dirstate.copied(f)
3472 src = repo.dirstate.copied(f)
3472 if src and src not in names and repo.dirstate[src] == 'r':
3473 if src and src not in names and repo.dirstate[src] == 'r':
3473 removed.add(src)
3474 removed.add(src)
3474 names[src] = (repo.pathto(src, cwd), True)
3475 names[src] = (repo.pathto(src, cwd), True)
3475
3476
3476 def removeforget(abs):
3477 def removeforget(abs):
3477 if repo.dirstate[abs] == 'a':
3478 if repo.dirstate[abs] == 'a':
3478 return _('forgetting %s\n')
3479 return _('forgetting %s\n')
3479 return _('removing %s\n')
3480 return _('removing %s\n')
3480
3481
3481 revert = ([], _('reverting %s\n'))
3482 revert = ([], _('reverting %s\n'))
3482 add = ([], _('adding %s\n'))
3483 add = ([], _('adding %s\n'))
3483 remove = ([], removeforget)
3484 remove = ([], removeforget)
3484 undelete = ([], _('undeleting %s\n'))
3485 undelete = ([], _('undeleting %s\n'))
3485
3486
3486 disptable = (
3487 disptable = (
3487 # dispatch table:
3488 # dispatch table:
3488 # file state
3489 # file state
3489 # action if in target manifest
3490 # action if in target manifest
3490 # action if not in target manifest
3491 # action if not in target manifest
3491 # make backup if in target manifest
3492 # make backup if in target manifest
3492 # make backup if not in target manifest
3493 # make backup if not in target manifest
3493 (modified, revert, remove, True, True),
3494 (modified, revert, remove, True, True),
3494 (added, revert, remove, True, False),
3495 (added, revert, remove, True, False),
3495 (removed, undelete, None, False, False),
3496 (removed, undelete, None, False, False),
3496 (deleted, revert, remove, False, False),
3497 (deleted, revert, remove, False, False),
3497 )
3498 )
3498
3499
3499 for abs, (rel, exact) in sorted(names.items()):
3500 for abs, (rel, exact) in sorted(names.items()):
3500 mfentry = mf.get(abs)
3501 mfentry = mf.get(abs)
3501 target = repo.wjoin(abs)
3502 target = repo.wjoin(abs)
3502 def handle(xlist, dobackup):
3503 def handle(xlist, dobackup):
3503 xlist[0].append(abs)
3504 xlist[0].append(abs)
3504 if (dobackup and not opts.get('no_backup') and
3505 if (dobackup and not opts.get('no_backup') and
3505 os.path.lexists(target)):
3506 os.path.lexists(target)):
3506 bakname = "%s.orig" % rel
3507 bakname = "%s.orig" % rel
3507 ui.note(_('saving current version of %s as %s\n') %
3508 ui.note(_('saving current version of %s as %s\n') %
3508 (rel, bakname))
3509 (rel, bakname))
3509 if not opts.get('dry_run'):
3510 if not opts.get('dry_run'):
3510 util.rename(target, bakname)
3511 util.rename(target, bakname)
3511 if ui.verbose or not exact:
3512 if ui.verbose or not exact:
3512 msg = xlist[1]
3513 msg = xlist[1]
3513 if not isinstance(msg, basestring):
3514 if not isinstance(msg, basestring):
3514 msg = msg(abs)
3515 msg = msg(abs)
3515 ui.status(msg % rel)
3516 ui.status(msg % rel)
3516 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3517 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3517 if abs not in table:
3518 if abs not in table:
3518 continue
3519 continue
3519 # file has changed in dirstate
3520 # file has changed in dirstate
3520 if mfentry:
3521 if mfentry:
3521 handle(hitlist, backuphit)
3522 handle(hitlist, backuphit)
3522 elif misslist is not None:
3523 elif misslist is not None:
3523 handle(misslist, backupmiss)
3524 handle(misslist, backupmiss)
3524 break
3525 break
3525 else:
3526 else:
3526 if abs not in repo.dirstate:
3527 if abs not in repo.dirstate:
3527 if mfentry:
3528 if mfentry:
3528 handle(add, True)
3529 handle(add, True)
3529 elif exact:
3530 elif exact:
3530 ui.warn(_('file not managed: %s\n') % rel)
3531 ui.warn(_('file not managed: %s\n') % rel)
3531 continue
3532 continue
3532 # file has not changed in dirstate
3533 # file has not changed in dirstate
3533 if node == parent:
3534 if node == parent:
3534 if exact:
3535 if exact:
3535 ui.warn(_('no changes needed to %s\n') % rel)
3536 ui.warn(_('no changes needed to %s\n') % rel)
3536 continue
3537 continue
3537 if pmf is None:
3538 if pmf is None:
3538 # only need parent manifest in this unlikely case,
3539 # only need parent manifest in this unlikely case,
3539 # so do not read by default
3540 # so do not read by default
3540 pmf = repo[parent].manifest()
3541 pmf = repo[parent].manifest()
3541 if abs in pmf:
3542 if abs in pmf:
3542 if mfentry:
3543 if mfentry:
3543 # if version of file is same in parent and target
3544 # if version of file is same in parent and target
3544 # manifests, do nothing
3545 # manifests, do nothing
3545 if (pmf[abs] != mfentry or
3546 if (pmf[abs] != mfentry or
3546 pmf.flags(abs) != mf.flags(abs)):
3547 pmf.flags(abs) != mf.flags(abs)):
3547 handle(revert, False)
3548 handle(revert, False)
3548 else:
3549 else:
3549 handle(remove, False)
3550 handle(remove, False)
3550
3551
3551 if not opts.get('dry_run'):
3552 if not opts.get('dry_run'):
3552 def checkout(f):
3553 def checkout(f):
3553 fc = ctx[f]
3554 fc = ctx[f]
3554 repo.wwrite(f, fc.data(), fc.flags())
3555 repo.wwrite(f, fc.data(), fc.flags())
3555
3556
3556 audit_path = util.path_auditor(repo.root)
3557 audit_path = util.path_auditor(repo.root)
3557 for f in remove[0]:
3558 for f in remove[0]:
3558 if repo.dirstate[f] == 'a':
3559 if repo.dirstate[f] == 'a':
3559 repo.dirstate.forget(f)
3560 repo.dirstate.forget(f)
3560 continue
3561 continue
3561 audit_path(f)
3562 audit_path(f)
3562 try:
3563 try:
3563 util.unlinkpath(repo.wjoin(f))
3564 util.unlinkpath(repo.wjoin(f))
3564 except OSError:
3565 except OSError:
3565 pass
3566 pass
3566 repo.dirstate.remove(f)
3567 repo.dirstate.remove(f)
3567
3568
3568 normal = None
3569 normal = None
3569 if node == parent:
3570 if node == parent:
3570 # We're reverting to our parent. If possible, we'd like status
3571 # We're reverting to our parent. If possible, we'd like status
3571 # to report the file as clean. We have to use normallookup for
3572 # to report the file as clean. We have to use normallookup for
3572 # merges to avoid losing information about merged/dirty files.
3573 # merges to avoid losing information about merged/dirty files.
3573 if p2 != nullid:
3574 if p2 != nullid:
3574 normal = repo.dirstate.normallookup
3575 normal = repo.dirstate.normallookup
3575 else:
3576 else:
3576 normal = repo.dirstate.normal
3577 normal = repo.dirstate.normal
3577 for f in revert[0]:
3578 for f in revert[0]:
3578 checkout(f)
3579 checkout(f)
3579 if normal:
3580 if normal:
3580 normal(f)
3581 normal(f)
3581
3582
3582 for f in add[0]:
3583 for f in add[0]:
3583 checkout(f)
3584 checkout(f)
3584 repo.dirstate.add(f)
3585 repo.dirstate.add(f)
3585
3586
3586 normal = repo.dirstate.normallookup
3587 normal = repo.dirstate.normallookup
3587 if node == parent and p2 == nullid:
3588 if node == parent and p2 == nullid:
3588 normal = repo.dirstate.normal
3589 normal = repo.dirstate.normal
3589 for f in undelete[0]:
3590 for f in undelete[0]:
3590 checkout(f)
3591 checkout(f)
3591 normal(f)
3592 normal(f)
3592
3593
3593 finally:
3594 finally:
3594 wlock.release()
3595 wlock.release()
3595
3596
3596 def rollback(ui, repo, **opts):
3597 def rollback(ui, repo, **opts):
3597 """roll back the last transaction (dangerous)
3598 """roll back the last transaction (dangerous)
3598
3599
3599 This command should be used with care. There is only one level of
3600 This command should be used with care. There is only one level of
3600 rollback, and there is no way to undo a rollback. It will also
3601 rollback, and there is no way to undo a rollback. It will also
3601 restore the dirstate at the time of the last transaction, losing
3602 restore the dirstate at the time of the last transaction, losing
3602 any dirstate changes since that time. This command does not alter
3603 any dirstate changes since that time. This command does not alter
3603 the working directory.
3604 the working directory.
3604
3605
3605 Transactions are used to encapsulate the effects of all commands
3606 Transactions are used to encapsulate the effects of all commands
3606 that create new changesets or propagate existing changesets into a
3607 that create new changesets or propagate existing changesets into a
3607 repository. For example, the following commands are transactional,
3608 repository. For example, the following commands are transactional,
3608 and their effects can be rolled back:
3609 and their effects can be rolled back:
3609
3610
3610 - commit
3611 - commit
3611 - import
3612 - import
3612 - pull
3613 - pull
3613 - push (with this repository as the destination)
3614 - push (with this repository as the destination)
3614 - unbundle
3615 - unbundle
3615
3616
3616 This command is not intended for use on public repositories. Once
3617 This command is not intended for use on public repositories. Once
3617 changes are visible for pull by other users, rolling a transaction
3618 changes are visible for pull by other users, rolling a transaction
3618 back locally is ineffective (someone else may already have pulled
3619 back locally is ineffective (someone else may already have pulled
3619 the changes). Furthermore, a race is possible with readers of the
3620 the changes). Furthermore, a race is possible with readers of the
3620 repository; for example an in-progress pull from the repository
3621 repository; for example an in-progress pull from the repository
3621 may fail if a rollback is performed.
3622 may fail if a rollback is performed.
3622
3623
3623 Returns 0 on success, 1 if no rollback data is available.
3624 Returns 0 on success, 1 if no rollback data is available.
3624 """
3625 """
3625 return repo.rollback(opts.get('dry_run'))
3626 return repo.rollback(opts.get('dry_run'))
3626
3627
3627 def root(ui, repo):
3628 def root(ui, repo):
3628 """print the root (top) of the current working directory
3629 """print the root (top) of the current working directory
3629
3630
3630 Print the root directory of the current repository.
3631 Print the root directory of the current repository.
3631
3632
3632 Returns 0 on success.
3633 Returns 0 on success.
3633 """
3634 """
3634 ui.write(repo.root + "\n")
3635 ui.write(repo.root + "\n")
3635
3636
3636 def serve(ui, repo, **opts):
3637 def serve(ui, repo, **opts):
3637 """start stand-alone webserver
3638 """start stand-alone webserver
3638
3639
3639 Start a local HTTP repository browser and pull server. You can use
3640 Start a local HTTP repository browser and pull server. You can use
3640 this for ad-hoc sharing and browsing of repositories. It is
3641 this for ad-hoc sharing and browsing of repositories. It is
3641 recommended to use a real web server to serve a repository for
3642 recommended to use a real web server to serve a repository for
3642 longer periods of time.
3643 longer periods of time.
3643
3644
3644 Please note that the server does not implement access control.
3645 Please note that the server does not implement access control.
3645 This means that, by default, anybody can read from the server and
3646 This means that, by default, anybody can read from the server and
3646 nobody can write to it by default. Set the ``web.allow_push``
3647 nobody can write to it by default. Set the ``web.allow_push``
3647 option to ``*`` to allow everybody to push to the server. You
3648 option to ``*`` to allow everybody to push to the server. You
3648 should use a real web server if you need to authenticate users.
3649 should use a real web server if you need to authenticate users.
3649
3650
3650 By default, the server logs accesses to stdout and errors to
3651 By default, the server logs accesses to stdout and errors to
3651 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3652 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3652 files.
3653 files.
3653
3654
3654 To have the server choose a free port number to listen on, specify
3655 To have the server choose a free port number to listen on, specify
3655 a port number of 0; in this case, the server will print the port
3656 a port number of 0; in this case, the server will print the port
3656 number it uses.
3657 number it uses.
3657
3658
3658 Returns 0 on success.
3659 Returns 0 on success.
3659 """
3660 """
3660
3661
3661 if opts["stdio"]:
3662 if opts["stdio"]:
3662 if repo is None:
3663 if repo is None:
3663 raise error.RepoError(_("There is no Mercurial repository here"
3664 raise error.RepoError(_("There is no Mercurial repository here"
3664 " (.hg not found)"))
3665 " (.hg not found)"))
3665 s = sshserver.sshserver(ui, repo)
3666 s = sshserver.sshserver(ui, repo)
3666 s.serve_forever()
3667 s.serve_forever()
3667
3668
3668 # this way we can check if something was given in the command-line
3669 # this way we can check if something was given in the command-line
3669 if opts.get('port'):
3670 if opts.get('port'):
3670 opts['port'] = util.getport(opts.get('port'))
3671 opts['port'] = util.getport(opts.get('port'))
3671
3672
3672 baseui = repo and repo.baseui or ui
3673 baseui = repo and repo.baseui or ui
3673 optlist = ("name templates style address port prefix ipv6"
3674 optlist = ("name templates style address port prefix ipv6"
3674 " accesslog errorlog certificate encoding")
3675 " accesslog errorlog certificate encoding")
3675 for o in optlist.split():
3676 for o in optlist.split():
3676 val = opts.get(o, '')
3677 val = opts.get(o, '')
3677 if val in (None, ''): # should check against default options instead
3678 if val in (None, ''): # should check against default options instead
3678 continue
3679 continue
3679 baseui.setconfig("web", o, val)
3680 baseui.setconfig("web", o, val)
3680 if repo and repo.ui != baseui:
3681 if repo and repo.ui != baseui:
3681 repo.ui.setconfig("web", o, val)
3682 repo.ui.setconfig("web", o, val)
3682
3683
3683 o = opts.get('web_conf') or opts.get('webdir_conf')
3684 o = opts.get('web_conf') or opts.get('webdir_conf')
3684 if not o:
3685 if not o:
3685 if not repo:
3686 if not repo:
3686 raise error.RepoError(_("There is no Mercurial repository"
3687 raise error.RepoError(_("There is no Mercurial repository"
3687 " here (.hg not found)"))
3688 " here (.hg not found)"))
3688 o = repo.root
3689 o = repo.root
3689
3690
3690 app = hgweb.hgweb(o, baseui=ui)
3691 app = hgweb.hgweb(o, baseui=ui)
3691
3692
3692 class service(object):
3693 class service(object):
3693 def init(self):
3694 def init(self):
3694 util.set_signal_handler()
3695 util.set_signal_handler()
3695 self.httpd = hgweb.server.create_server(ui, app)
3696 self.httpd = hgweb.server.create_server(ui, app)
3696
3697
3697 if opts['port'] and not ui.verbose:
3698 if opts['port'] and not ui.verbose:
3698 return
3699 return
3699
3700
3700 if self.httpd.prefix:
3701 if self.httpd.prefix:
3701 prefix = self.httpd.prefix.strip('/') + '/'
3702 prefix = self.httpd.prefix.strip('/') + '/'
3702 else:
3703 else:
3703 prefix = ''
3704 prefix = ''
3704
3705
3705 port = ':%d' % self.httpd.port
3706 port = ':%d' % self.httpd.port
3706 if port == ':80':
3707 if port == ':80':
3707 port = ''
3708 port = ''
3708
3709
3709 bindaddr = self.httpd.addr
3710 bindaddr = self.httpd.addr
3710 if bindaddr == '0.0.0.0':
3711 if bindaddr == '0.0.0.0':
3711 bindaddr = '*'
3712 bindaddr = '*'
3712 elif ':' in bindaddr: # IPv6
3713 elif ':' in bindaddr: # IPv6
3713 bindaddr = '[%s]' % bindaddr
3714 bindaddr = '[%s]' % bindaddr
3714
3715
3715 fqaddr = self.httpd.fqaddr
3716 fqaddr = self.httpd.fqaddr
3716 if ':' in fqaddr:
3717 if ':' in fqaddr:
3717 fqaddr = '[%s]' % fqaddr
3718 fqaddr = '[%s]' % fqaddr
3718 if opts['port']:
3719 if opts['port']:
3719 write = ui.status
3720 write = ui.status
3720 else:
3721 else:
3721 write = ui.write
3722 write = ui.write
3722 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3723 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3723 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3724 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3724
3725
3725 def run(self):
3726 def run(self):
3726 self.httpd.serve_forever()
3727 self.httpd.serve_forever()
3727
3728
3728 service = service()
3729 service = service()
3729
3730
3730 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3731 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3731
3732
3732 def status(ui, repo, *pats, **opts):
3733 def status(ui, repo, *pats, **opts):
3733 """show changed files in the working directory
3734 """show changed files in the working directory
3734
3735
3735 Show status of files in the repository. If names are given, only
3736 Show status of files in the repository. If names are given, only
3736 files that match are shown. Files that are clean or ignored or
3737 files that match are shown. Files that are clean or ignored or
3737 the source of a copy/move operation, are not listed unless
3738 the source of a copy/move operation, are not listed unless
3738 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3739 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3739 Unless options described with "show only ..." are given, the
3740 Unless options described with "show only ..." are given, the
3740 options -mardu are used.
3741 options -mardu are used.
3741
3742
3742 Option -q/--quiet hides untracked (unknown and ignored) files
3743 Option -q/--quiet hides untracked (unknown and ignored) files
3743 unless explicitly requested with -u/--unknown or -i/--ignored.
3744 unless explicitly requested with -u/--unknown or -i/--ignored.
3744
3745
3745 .. note::
3746 .. note::
3746 status may appear to disagree with diff if permissions have
3747 status may appear to disagree with diff if permissions have
3747 changed or a merge has occurred. The standard diff format does
3748 changed or a merge has occurred. The standard diff format does
3748 not report permission changes and diff only reports changes
3749 not report permission changes and diff only reports changes
3749 relative to one merge parent.
3750 relative to one merge parent.
3750
3751
3751 If one revision is given, it is used as the base revision.
3752 If one revision is given, it is used as the base revision.
3752 If two revisions are given, the differences between them are
3753 If two revisions are given, the differences between them are
3753 shown. The --change option can also be used as a shortcut to list
3754 shown. The --change option can also be used as a shortcut to list
3754 the changed files of a revision from its first parent.
3755 the changed files of a revision from its first parent.
3755
3756
3756 The codes used to show the status of files are::
3757 The codes used to show the status of files are::
3757
3758
3758 M = modified
3759 M = modified
3759 A = added
3760 A = added
3760 R = removed
3761 R = removed
3761 C = clean
3762 C = clean
3762 ! = missing (deleted by non-hg command, but still tracked)
3763 ! = missing (deleted by non-hg command, but still tracked)
3763 ? = not tracked
3764 ? = not tracked
3764 I = ignored
3765 I = ignored
3765 = origin of the previous file listed as A (added)
3766 = origin of the previous file listed as A (added)
3766
3767
3767 Returns 0 on success.
3768 Returns 0 on success.
3768 """
3769 """
3769
3770
3770 revs = opts.get('rev')
3771 revs = opts.get('rev')
3771 change = opts.get('change')
3772 change = opts.get('change')
3772
3773
3773 if revs and change:
3774 if revs and change:
3774 msg = _('cannot specify --rev and --change at the same time')
3775 msg = _('cannot specify --rev and --change at the same time')
3775 raise util.Abort(msg)
3776 raise util.Abort(msg)
3776 elif change:
3777 elif change:
3777 node2 = repo.lookup(change)
3778 node2 = repo.lookup(change)
3778 node1 = repo[node2].p1().node()
3779 node1 = repo[node2].p1().node()
3779 else:
3780 else:
3780 node1, node2 = cmdutil.revpair(repo, revs)
3781 node1, node2 = cmdutil.revpair(repo, revs)
3781
3782
3782 cwd = (pats and repo.getcwd()) or ''
3783 cwd = (pats and repo.getcwd()) or ''
3783 end = opts.get('print0') and '\0' or '\n'
3784 end = opts.get('print0') and '\0' or '\n'
3784 copy = {}
3785 copy = {}
3785 states = 'modified added removed deleted unknown ignored clean'.split()
3786 states = 'modified added removed deleted unknown ignored clean'.split()
3786 show = [k for k in states if opts.get(k)]
3787 show = [k for k in states if opts.get(k)]
3787 if opts.get('all'):
3788 if opts.get('all'):
3788 show += ui.quiet and (states[:4] + ['clean']) or states
3789 show += ui.quiet and (states[:4] + ['clean']) or states
3789 if not show:
3790 if not show:
3790 show = ui.quiet and states[:4] or states[:5]
3791 show = ui.quiet and states[:4] or states[:5]
3791
3792
3792 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3793 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3793 'ignored' in show, 'clean' in show, 'unknown' in show,
3794 'ignored' in show, 'clean' in show, 'unknown' in show,
3794 opts.get('subrepos'))
3795 opts.get('subrepos'))
3795 changestates = zip(states, 'MAR!?IC', stat)
3796 changestates = zip(states, 'MAR!?IC', stat)
3796
3797
3797 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3798 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3798 ctxn = repo[nullid]
3799 ctxn = repo[nullid]
3799 ctx1 = repo[node1]
3800 ctx1 = repo[node1]
3800 ctx2 = repo[node2]
3801 ctx2 = repo[node2]
3801 added = stat[1]
3802 added = stat[1]
3802 if node2 is None:
3803 if node2 is None:
3803 added = stat[0] + stat[1] # merged?
3804 added = stat[0] + stat[1] # merged?
3804
3805
3805 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3806 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3806 if k in added:
3807 if k in added:
3807 copy[k] = v
3808 copy[k] = v
3808 elif v in added:
3809 elif v in added:
3809 copy[v] = k
3810 copy[v] = k
3810
3811
3811 for state, char, files in changestates:
3812 for state, char, files in changestates:
3812 if state in show:
3813 if state in show:
3813 format = "%s %%s%s" % (char, end)
3814 format = "%s %%s%s" % (char, end)
3814 if opts.get('no_status'):
3815 if opts.get('no_status'):
3815 format = "%%s%s" % end
3816 format = "%%s%s" % end
3816
3817
3817 for f in files:
3818 for f in files:
3818 ui.write(format % repo.pathto(f, cwd),
3819 ui.write(format % repo.pathto(f, cwd),
3819 label='status.' + state)
3820 label='status.' + state)
3820 if f in copy:
3821 if f in copy:
3821 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3822 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3822 label='status.copied')
3823 label='status.copied')
3823
3824
3824 def summary(ui, repo, **opts):
3825 def summary(ui, repo, **opts):
3825 """summarize working directory state
3826 """summarize working directory state
3826
3827
3827 This generates a brief summary of the working directory state,
3828 This generates a brief summary of the working directory state,
3828 including parents, branch, commit status, and available updates.
3829 including parents, branch, commit status, and available updates.
3829
3830
3830 With the --remote option, this will check the default paths for
3831 With the --remote option, this will check the default paths for
3831 incoming and outgoing changes. This can be time-consuming.
3832 incoming and outgoing changes. This can be time-consuming.
3832
3833
3833 Returns 0 on success.
3834 Returns 0 on success.
3834 """
3835 """
3835
3836
3836 ctx = repo[None]
3837 ctx = repo[None]
3837 parents = ctx.parents()
3838 parents = ctx.parents()
3838 pnode = parents[0].node()
3839 pnode = parents[0].node()
3839
3840
3840 for p in parents:
3841 for p in parents:
3841 # label with log.changeset (instead of log.parent) since this
3842 # label with log.changeset (instead of log.parent) since this
3842 # shows a working directory parent *changeset*:
3843 # shows a working directory parent *changeset*:
3843 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3844 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3844 label='log.changeset')
3845 label='log.changeset')
3845 ui.write(' '.join(p.tags()), label='log.tag')
3846 ui.write(' '.join(p.tags()), label='log.tag')
3846 if p.bookmarks():
3847 if p.bookmarks():
3847 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3848 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3848 if p.rev() == -1:
3849 if p.rev() == -1:
3849 if not len(repo):
3850 if not len(repo):
3850 ui.write(_(' (empty repository)'))
3851 ui.write(_(' (empty repository)'))
3851 else:
3852 else:
3852 ui.write(_(' (no revision checked out)'))
3853 ui.write(_(' (no revision checked out)'))
3853 ui.write('\n')
3854 ui.write('\n')
3854 if p.description():
3855 if p.description():
3855 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3856 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3856 label='log.summary')
3857 label='log.summary')
3857
3858
3858 branch = ctx.branch()
3859 branch = ctx.branch()
3859 bheads = repo.branchheads(branch)
3860 bheads = repo.branchheads(branch)
3860 m = _('branch: %s\n') % branch
3861 m = _('branch: %s\n') % branch
3861 if branch != 'default':
3862 if branch != 'default':
3862 ui.write(m, label='log.branch')
3863 ui.write(m, label='log.branch')
3863 else:
3864 else:
3864 ui.status(m, label='log.branch')
3865 ui.status(m, label='log.branch')
3865
3866
3866 st = list(repo.status(unknown=True))[:6]
3867 st = list(repo.status(unknown=True))[:6]
3867
3868
3868 c = repo.dirstate.copies()
3869 c = repo.dirstate.copies()
3869 copied, renamed = [], []
3870 copied, renamed = [], []
3870 for d, s in c.iteritems():
3871 for d, s in c.iteritems():
3871 if s in st[2]:
3872 if s in st[2]:
3872 st[2].remove(s)
3873 st[2].remove(s)
3873 renamed.append(d)
3874 renamed.append(d)
3874 else:
3875 else:
3875 copied.append(d)
3876 copied.append(d)
3876 if d in st[1]:
3877 if d in st[1]:
3877 st[1].remove(d)
3878 st[1].remove(d)
3878 st.insert(3, renamed)
3879 st.insert(3, renamed)
3879 st.insert(4, copied)
3880 st.insert(4, copied)
3880
3881
3881 ms = mergemod.mergestate(repo)
3882 ms = mergemod.mergestate(repo)
3882 st.append([f for f in ms if ms[f] == 'u'])
3883 st.append([f for f in ms if ms[f] == 'u'])
3883
3884
3884 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3885 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3885 st.append(subs)
3886 st.append(subs)
3886
3887
3887 labels = [ui.label(_('%d modified'), 'status.modified'),
3888 labels = [ui.label(_('%d modified'), 'status.modified'),
3888 ui.label(_('%d added'), 'status.added'),
3889 ui.label(_('%d added'), 'status.added'),
3889 ui.label(_('%d removed'), 'status.removed'),
3890 ui.label(_('%d removed'), 'status.removed'),
3890 ui.label(_('%d renamed'), 'status.copied'),
3891 ui.label(_('%d renamed'), 'status.copied'),
3891 ui.label(_('%d copied'), 'status.copied'),
3892 ui.label(_('%d copied'), 'status.copied'),
3892 ui.label(_('%d deleted'), 'status.deleted'),
3893 ui.label(_('%d deleted'), 'status.deleted'),
3893 ui.label(_('%d unknown'), 'status.unknown'),
3894 ui.label(_('%d unknown'), 'status.unknown'),
3894 ui.label(_('%d ignored'), 'status.ignored'),
3895 ui.label(_('%d ignored'), 'status.ignored'),
3895 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3896 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3896 ui.label(_('%d subrepos'), 'status.modified')]
3897 ui.label(_('%d subrepos'), 'status.modified')]
3897 t = []
3898 t = []
3898 for s, l in zip(st, labels):
3899 for s, l in zip(st, labels):
3899 if s:
3900 if s:
3900 t.append(l % len(s))
3901 t.append(l % len(s))
3901
3902
3902 t = ', '.join(t)
3903 t = ', '.join(t)
3903 cleanworkdir = False
3904 cleanworkdir = False
3904
3905
3905 if len(parents) > 1:
3906 if len(parents) > 1:
3906 t += _(' (merge)')
3907 t += _(' (merge)')
3907 elif branch != parents[0].branch():
3908 elif branch != parents[0].branch():
3908 t += _(' (new branch)')
3909 t += _(' (new branch)')
3909 elif (parents[0].extra().get('close') and
3910 elif (parents[0].extra().get('close') and
3910 pnode in repo.branchheads(branch, closed=True)):
3911 pnode in repo.branchheads(branch, closed=True)):
3911 t += _(' (head closed)')
3912 t += _(' (head closed)')
3912 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3913 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3913 t += _(' (clean)')
3914 t += _(' (clean)')
3914 cleanworkdir = True
3915 cleanworkdir = True
3915 elif pnode not in bheads:
3916 elif pnode not in bheads:
3916 t += _(' (new branch head)')
3917 t += _(' (new branch head)')
3917
3918
3918 if cleanworkdir:
3919 if cleanworkdir:
3919 ui.status(_('commit: %s\n') % t.strip())
3920 ui.status(_('commit: %s\n') % t.strip())
3920 else:
3921 else:
3921 ui.write(_('commit: %s\n') % t.strip())
3922 ui.write(_('commit: %s\n') % t.strip())
3922
3923
3923 # all ancestors of branch heads - all ancestors of parent = new csets
3924 # all ancestors of branch heads - all ancestors of parent = new csets
3924 new = [0] * len(repo)
3925 new = [0] * len(repo)
3925 cl = repo.changelog
3926 cl = repo.changelog
3926 for a in [cl.rev(n) for n in bheads]:
3927 for a in [cl.rev(n) for n in bheads]:
3927 new[a] = 1
3928 new[a] = 1
3928 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3929 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3929 new[a] = 1
3930 new[a] = 1
3930 for a in [p.rev() for p in parents]:
3931 for a in [p.rev() for p in parents]:
3931 if a >= 0:
3932 if a >= 0:
3932 new[a] = 0
3933 new[a] = 0
3933 for a in cl.ancestors(*[p.rev() for p in parents]):
3934 for a in cl.ancestors(*[p.rev() for p in parents]):
3934 new[a] = 0
3935 new[a] = 0
3935 new = sum(new)
3936 new = sum(new)
3936
3937
3937 if new == 0:
3938 if new == 0:
3938 ui.status(_('update: (current)\n'))
3939 ui.status(_('update: (current)\n'))
3939 elif pnode not in bheads:
3940 elif pnode not in bheads:
3940 ui.write(_('update: %d new changesets (update)\n') % new)
3941 ui.write(_('update: %d new changesets (update)\n') % new)
3941 else:
3942 else:
3942 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3943 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3943 (new, len(bheads)))
3944 (new, len(bheads)))
3944
3945
3945 if opts.get('remote'):
3946 if opts.get('remote'):
3946 t = []
3947 t = []
3947 source, branches = hg.parseurl(ui.expandpath('default'))
3948 source, branches = hg.parseurl(ui.expandpath('default'))
3948 other = hg.repository(hg.remoteui(repo, {}), source)
3949 other = hg.repository(hg.remoteui(repo, {}), source)
3949 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3950 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3950 ui.debug('comparing with %s\n' % url.hidepassword(source))
3951 ui.debug('comparing with %s\n' % url.hidepassword(source))
3951 repo.ui.pushbuffer()
3952 repo.ui.pushbuffer()
3952 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3953 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3953 repo.ui.popbuffer()
3954 repo.ui.popbuffer()
3954 if incoming:
3955 if incoming:
3955 t.append(_('1 or more incoming'))
3956 t.append(_('1 or more incoming'))
3956
3957
3957 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3958 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3958 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3959 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3959 other = hg.repository(hg.remoteui(repo, {}), dest)
3960 other = hg.repository(hg.remoteui(repo, {}), dest)
3960 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3961 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3961 repo.ui.pushbuffer()
3962 repo.ui.pushbuffer()
3962 o = discovery.findoutgoing(repo, other)
3963 o = discovery.findoutgoing(repo, other)
3963 repo.ui.popbuffer()
3964 repo.ui.popbuffer()
3964 o = repo.changelog.nodesbetween(o, None)[0]
3965 o = repo.changelog.nodesbetween(o, None)[0]
3965 if o:
3966 if o:
3966 t.append(_('%d outgoing') % len(o))
3967 t.append(_('%d outgoing') % len(o))
3967 if 'bookmarks' in other.listkeys('namespaces'):
3968 if 'bookmarks' in other.listkeys('namespaces'):
3968 lmarks = repo.listkeys('bookmarks')
3969 lmarks = repo.listkeys('bookmarks')
3969 rmarks = other.listkeys('bookmarks')
3970 rmarks = other.listkeys('bookmarks')
3970 diff = set(rmarks) - set(lmarks)
3971 diff = set(rmarks) - set(lmarks)
3971 if len(diff) > 0:
3972 if len(diff) > 0:
3972 t.append(_('%d incoming bookmarks') % len(diff))
3973 t.append(_('%d incoming bookmarks') % len(diff))
3973 diff = set(lmarks) - set(rmarks)
3974 diff = set(lmarks) - set(rmarks)
3974 if len(diff) > 0:
3975 if len(diff) > 0:
3975 t.append(_('%d outgoing bookmarks') % len(diff))
3976 t.append(_('%d outgoing bookmarks') % len(diff))
3976
3977
3977 if t:
3978 if t:
3978 ui.write(_('remote: %s\n') % (', '.join(t)))
3979 ui.write(_('remote: %s\n') % (', '.join(t)))
3979 else:
3980 else:
3980 ui.status(_('remote: (synced)\n'))
3981 ui.status(_('remote: (synced)\n'))
3981
3982
3982 def tag(ui, repo, name1, *names, **opts):
3983 def tag(ui, repo, name1, *names, **opts):
3983 """add one or more tags for the current or given revision
3984 """add one or more tags for the current or given revision
3984
3985
3985 Name a particular revision using <name>.
3986 Name a particular revision using <name>.
3986
3987
3987 Tags are used to name particular revisions of the repository and are
3988 Tags are used to name particular revisions of the repository and are
3988 very useful to compare different revisions, to go back to significant
3989 very useful to compare different revisions, to go back to significant
3989 earlier versions or to mark branch points as releases, etc. Changing
3990 earlier versions or to mark branch points as releases, etc. Changing
3990 an existing tag is normally disallowed; use -f/--force to override.
3991 an existing tag is normally disallowed; use -f/--force to override.
3991
3992
3992 If no revision is given, the parent of the working directory is
3993 If no revision is given, the parent of the working directory is
3993 used, or tip if no revision is checked out.
3994 used, or tip if no revision is checked out.
3994
3995
3995 To facilitate version control, distribution, and merging of tags,
3996 To facilitate version control, distribution, and merging of tags,
3996 they are stored as a file named ".hgtags" which is managed similarly
3997 they are stored as a file named ".hgtags" which is managed similarly
3997 to other project files and can be hand-edited if necessary. This
3998 to other project files and can be hand-edited if necessary. This
3998 also means that tagging creates a new commit. The file
3999 also means that tagging creates a new commit. The file
3999 ".hg/localtags" is used for local tags (not shared among
4000 ".hg/localtags" is used for local tags (not shared among
4000 repositories).
4001 repositories).
4001
4002
4002 Tag commits are usually made at the head of a branch. If the parent
4003 Tag commits are usually made at the head of a branch. If the parent
4003 of the working directory is not a branch head, :hg:`tag` aborts; use
4004 of the working directory is not a branch head, :hg:`tag` aborts; use
4004 -f/--force to force the tag commit to be based on a non-head
4005 -f/--force to force the tag commit to be based on a non-head
4005 changeset.
4006 changeset.
4006
4007
4007 See :hg:`help dates` for a list of formats valid for -d/--date.
4008 See :hg:`help dates` for a list of formats valid for -d/--date.
4008
4009
4009 Since tag names have priority over branch names during revision
4010 Since tag names have priority over branch names during revision
4010 lookup, using an existing branch name as a tag name is discouraged.
4011 lookup, using an existing branch name as a tag name is discouraged.
4011
4012
4012 Returns 0 on success.
4013 Returns 0 on success.
4013 """
4014 """
4014
4015
4015 rev_ = "."
4016 rev_ = "."
4016 names = [t.strip() for t in (name1,) + names]
4017 names = [t.strip() for t in (name1,) + names]
4017 if len(names) != len(set(names)):
4018 if len(names) != len(set(names)):
4018 raise util.Abort(_('tag names must be unique'))
4019 raise util.Abort(_('tag names must be unique'))
4019 for n in names:
4020 for n in names:
4020 if n in ['tip', '.', 'null']:
4021 if n in ['tip', '.', 'null']:
4021 raise util.Abort(_('the name \'%s\' is reserved') % n)
4022 raise util.Abort(_('the name \'%s\' is reserved') % n)
4022 if not n:
4023 if not n:
4023 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4024 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4024 if opts.get('rev') and opts.get('remove'):
4025 if opts.get('rev') and opts.get('remove'):
4025 raise util.Abort(_("--rev and --remove are incompatible"))
4026 raise util.Abort(_("--rev and --remove are incompatible"))
4026 if opts.get('rev'):
4027 if opts.get('rev'):
4027 rev_ = opts['rev']
4028 rev_ = opts['rev']
4028 message = opts.get('message')
4029 message = opts.get('message')
4029 if opts.get('remove'):
4030 if opts.get('remove'):
4030 expectedtype = opts.get('local') and 'local' or 'global'
4031 expectedtype = opts.get('local') and 'local' or 'global'
4031 for n in names:
4032 for n in names:
4032 if not repo.tagtype(n):
4033 if not repo.tagtype(n):
4033 raise util.Abort(_('tag \'%s\' does not exist') % n)
4034 raise util.Abort(_('tag \'%s\' does not exist') % n)
4034 if repo.tagtype(n) != expectedtype:
4035 if repo.tagtype(n) != expectedtype:
4035 if expectedtype == 'global':
4036 if expectedtype == 'global':
4036 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4037 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4037 else:
4038 else:
4038 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4039 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4039 rev_ = nullid
4040 rev_ = nullid
4040 if not message:
4041 if not message:
4041 # we don't translate commit messages
4042 # we don't translate commit messages
4042 message = 'Removed tag %s' % ', '.join(names)
4043 message = 'Removed tag %s' % ', '.join(names)
4043 elif not opts.get('force'):
4044 elif not opts.get('force'):
4044 for n in names:
4045 for n in names:
4045 if n in repo.tags():
4046 if n in repo.tags():
4046 raise util.Abort(_('tag \'%s\' already exists '
4047 raise util.Abort(_('tag \'%s\' already exists '
4047 '(use -f to force)') % n)
4048 '(use -f to force)') % n)
4048 if not opts.get('local'):
4049 if not opts.get('local'):
4049 p1, p2 = repo.dirstate.parents()
4050 p1, p2 = repo.dirstate.parents()
4050 if p2 != nullid:
4051 if p2 != nullid:
4051 raise util.Abort(_('uncommitted merge'))
4052 raise util.Abort(_('uncommitted merge'))
4052 bheads = repo.branchheads()
4053 bheads = repo.branchheads()
4053 if not opts.get('force') and bheads and p1 not in bheads:
4054 if not opts.get('force') and bheads and p1 not in bheads:
4054 raise util.Abort(_('not at a branch head (use -f to force)'))
4055 raise util.Abort(_('not at a branch head (use -f to force)'))
4055 r = cmdutil.revsingle(repo, rev_).node()
4056 r = cmdutil.revsingle(repo, rev_).node()
4056
4057
4057 if not message:
4058 if not message:
4058 # we don't translate commit messages
4059 # we don't translate commit messages
4059 message = ('Added tag %s for changeset %s' %
4060 message = ('Added tag %s for changeset %s' %
4060 (', '.join(names), short(r)))
4061 (', '.join(names), short(r)))
4061
4062
4062 date = opts.get('date')
4063 date = opts.get('date')
4063 if date:
4064 if date:
4064 date = util.parsedate(date)
4065 date = util.parsedate(date)
4065
4066
4066 if opts.get('edit'):
4067 if opts.get('edit'):
4067 message = ui.edit(message, ui.username())
4068 message = ui.edit(message, ui.username())
4068
4069
4069 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4070 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4070
4071
4071 def tags(ui, repo):
4072 def tags(ui, repo):
4072 """list repository tags
4073 """list repository tags
4073
4074
4074 This lists both regular and local tags. When the -v/--verbose
4075 This lists both regular and local tags. When the -v/--verbose
4075 switch is used, a third column "local" is printed for local tags.
4076 switch is used, a third column "local" is printed for local tags.
4076
4077
4077 Returns 0 on success.
4078 Returns 0 on success.
4078 """
4079 """
4079
4080
4080 hexfunc = ui.debugflag and hex or short
4081 hexfunc = ui.debugflag and hex or short
4081 tagtype = ""
4082 tagtype = ""
4082
4083
4083 for t, n in reversed(repo.tagslist()):
4084 for t, n in reversed(repo.tagslist()):
4084 if ui.quiet:
4085 if ui.quiet:
4085 ui.write("%s\n" % t)
4086 ui.write("%s\n" % t)
4086 continue
4087 continue
4087
4088
4088 hn = hexfunc(n)
4089 hn = hexfunc(n)
4089 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4090 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4090 spaces = " " * (30 - encoding.colwidth(t))
4091 spaces = " " * (30 - encoding.colwidth(t))
4091
4092
4092 if ui.verbose:
4093 if ui.verbose:
4093 if repo.tagtype(t) == 'local':
4094 if repo.tagtype(t) == 'local':
4094 tagtype = " local"
4095 tagtype = " local"
4095 else:
4096 else:
4096 tagtype = ""
4097 tagtype = ""
4097 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4098 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4098
4099
4099 def tip(ui, repo, **opts):
4100 def tip(ui, repo, **opts):
4100 """show the tip revision
4101 """show the tip revision
4101
4102
4102 The tip revision (usually just called the tip) is the changeset
4103 The tip revision (usually just called the tip) is the changeset
4103 most recently added to the repository (and therefore the most
4104 most recently added to the repository (and therefore the most
4104 recently changed head).
4105 recently changed head).
4105
4106
4106 If you have just made a commit, that commit will be the tip. If
4107 If you have just made a commit, that commit will be the tip. If
4107 you have just pulled changes from another repository, the tip of
4108 you have just pulled changes from another repository, the tip of
4108 that repository becomes the current tip. The "tip" tag is special
4109 that repository becomes the current tip. The "tip" tag is special
4109 and cannot be renamed or assigned to a different changeset.
4110 and cannot be renamed or assigned to a different changeset.
4110
4111
4111 Returns 0 on success.
4112 Returns 0 on success.
4112 """
4113 """
4113 displayer = cmdutil.show_changeset(ui, repo, opts)
4114 displayer = cmdutil.show_changeset(ui, repo, opts)
4114 displayer.show(repo[len(repo) - 1])
4115 displayer.show(repo[len(repo) - 1])
4115 displayer.close()
4116 displayer.close()
4116
4117
4117 def unbundle(ui, repo, fname1, *fnames, **opts):
4118 def unbundle(ui, repo, fname1, *fnames, **opts):
4118 """apply one or more changegroup files
4119 """apply one or more changegroup files
4119
4120
4120 Apply one or more compressed changegroup files generated by the
4121 Apply one or more compressed changegroup files generated by the
4121 bundle command.
4122 bundle command.
4122
4123
4123 Returns 0 on success, 1 if an update has unresolved files.
4124 Returns 0 on success, 1 if an update has unresolved files.
4124 """
4125 """
4125 fnames = (fname1,) + fnames
4126 fnames = (fname1,) + fnames
4126
4127
4127 lock = repo.lock()
4128 lock = repo.lock()
4128 wc = repo['.']
4129 wc = repo['.']
4129 try:
4130 try:
4130 for fname in fnames:
4131 for fname in fnames:
4131 f = url.open(ui, fname)
4132 f = url.open(ui, fname)
4132 gen = changegroup.readbundle(f, fname)
4133 gen = changegroup.readbundle(f, fname)
4133 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4134 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4134 lock=lock)
4135 lock=lock)
4135 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4136 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4136 finally:
4137 finally:
4137 lock.release()
4138 lock.release()
4138 return postincoming(ui, repo, modheads, opts.get('update'), None)
4139 return postincoming(ui, repo, modheads, opts.get('update'), None)
4139
4140
4140 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4141 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4141 """update working directory (or switch revisions)
4142 """update working directory (or switch revisions)
4142
4143
4143 Update the repository's working directory to the specified
4144 Update the repository's working directory to the specified
4144 changeset. If no changeset is specified, update to the tip of the
4145 changeset. If no changeset is specified, update to the tip of the
4145 current named branch.
4146 current named branch.
4146
4147
4147 If the changeset is not a descendant of the working directory's
4148 If the changeset is not a descendant of the working directory's
4148 parent, the update is aborted. With the -c/--check option, the
4149 parent, the update is aborted. With the -c/--check option, the
4149 working directory is checked for uncommitted changes; if none are
4150 working directory is checked for uncommitted changes; if none are
4150 found, the working directory is updated to the specified
4151 found, the working directory is updated to the specified
4151 changeset.
4152 changeset.
4152
4153
4153 The following rules apply when the working directory contains
4154 The following rules apply when the working directory contains
4154 uncommitted changes:
4155 uncommitted changes:
4155
4156
4156 1. If neither -c/--check nor -C/--clean is specified, and if
4157 1. If neither -c/--check nor -C/--clean is specified, and if
4157 the requested changeset is an ancestor or descendant of
4158 the requested changeset is an ancestor or descendant of
4158 the working directory's parent, the uncommitted changes
4159 the working directory's parent, the uncommitted changes
4159 are merged into the requested changeset and the merged
4160 are merged into the requested changeset and the merged
4160 result is left uncommitted. If the requested changeset is
4161 result is left uncommitted. If the requested changeset is
4161 not an ancestor or descendant (that is, it is on another
4162 not an ancestor or descendant (that is, it is on another
4162 branch), the update is aborted and the uncommitted changes
4163 branch), the update is aborted and the uncommitted changes
4163 are preserved.
4164 are preserved.
4164
4165
4165 2. With the -c/--check option, the update is aborted and the
4166 2. With the -c/--check option, the update is aborted and the
4166 uncommitted changes are preserved.
4167 uncommitted changes are preserved.
4167
4168
4168 3. With the -C/--clean option, uncommitted changes are discarded and
4169 3. With the -C/--clean option, uncommitted changes are discarded and
4169 the working directory is updated to the requested changeset.
4170 the working directory is updated to the requested changeset.
4170
4171
4171 Use null as the changeset to remove the working directory (like
4172 Use null as the changeset to remove the working directory (like
4172 :hg:`clone -U`).
4173 :hg:`clone -U`).
4173
4174
4174 If you want to update just one file to an older changeset, use
4175 If you want to update just one file to an older changeset, use
4175 :hg:`revert`.
4176 :hg:`revert`.
4176
4177
4177 See :hg:`help dates` for a list of formats valid for -d/--date.
4178 See :hg:`help dates` for a list of formats valid for -d/--date.
4178
4179
4179 Returns 0 on success, 1 if there are unresolved files.
4180 Returns 0 on success, 1 if there are unresolved files.
4180 """
4181 """
4181 if rev and node:
4182 if rev and node:
4182 raise util.Abort(_("please specify just one revision"))
4183 raise util.Abort(_("please specify just one revision"))
4183
4184
4184 if rev is None or rev == '':
4185 if rev is None or rev == '':
4185 rev = node
4186 rev = node
4186
4187
4187 # if we defined a bookmark, we have to remember the original bookmark name
4188 # if we defined a bookmark, we have to remember the original bookmark name
4188 brev = rev
4189 brev = rev
4189 rev = cmdutil.revsingle(repo, rev, rev).rev()
4190 rev = cmdutil.revsingle(repo, rev, rev).rev()
4190
4191
4191 if check and clean:
4192 if check and clean:
4192 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4193 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4193
4194
4194 if check:
4195 if check:
4195 # we could use dirty() but we can ignore merge and branch trivia
4196 # we could use dirty() but we can ignore merge and branch trivia
4196 c = repo[None]
4197 c = repo[None]
4197 if c.modified() or c.added() or c.removed():
4198 if c.modified() or c.added() or c.removed():
4198 raise util.Abort(_("uncommitted local changes"))
4199 raise util.Abort(_("uncommitted local changes"))
4199
4200
4200 if date:
4201 if date:
4201 if rev is not None:
4202 if rev is not None:
4202 raise util.Abort(_("you can't specify a revision and a date"))
4203 raise util.Abort(_("you can't specify a revision and a date"))
4203 rev = cmdutil.finddate(ui, repo, date)
4204 rev = cmdutil.finddate(ui, repo, date)
4204
4205
4205 if clean or check:
4206 if clean or check:
4206 ret = hg.clean(repo, rev)
4207 ret = hg.clean(repo, rev)
4207 else:
4208 else:
4208 ret = hg.update(repo, rev)
4209 ret = hg.update(repo, rev)
4209
4210
4210 if brev in repo._bookmarks:
4211 if brev in repo._bookmarks:
4211 bookmarks.setcurrent(repo, brev)
4212 bookmarks.setcurrent(repo, brev)
4212
4213
4213 return ret
4214 return ret
4214
4215
4215 def verify(ui, repo):
4216 def verify(ui, repo):
4216 """verify the integrity of the repository
4217 """verify the integrity of the repository
4217
4218
4218 Verify the integrity of the current repository.
4219 Verify the integrity of the current repository.
4219
4220
4220 This will perform an extensive check of the repository's
4221 This will perform an extensive check of the repository's
4221 integrity, validating the hashes and checksums of each entry in
4222 integrity, validating the hashes and checksums of each entry in
4222 the changelog, manifest, and tracked files, as well as the
4223 the changelog, manifest, and tracked files, as well as the
4223 integrity of their crosslinks and indices.
4224 integrity of their crosslinks and indices.
4224
4225
4225 Returns 0 on success, 1 if errors are encountered.
4226 Returns 0 on success, 1 if errors are encountered.
4226 """
4227 """
4227 return hg.verify(repo)
4228 return hg.verify(repo)
4228
4229
4229 def version_(ui):
4230 def version_(ui):
4230 """output version and copyright information"""
4231 """output version and copyright information"""
4231 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4232 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4232 % util.version())
4233 % util.version())
4233 ui.status(_(
4234 ui.status(_(
4234 "(see http://mercurial.selenic.com for more information)\n"
4235 "(see http://mercurial.selenic.com for more information)\n"
4235 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4236 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4236 "This is free software; see the source for copying conditions. "
4237 "This is free software; see the source for copying conditions. "
4237 "There is NO\nwarranty; "
4238 "There is NO\nwarranty; "
4238 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4239 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4239 ))
4240 ))
4240
4241
4241 # Command options and aliases are listed here, alphabetically
4242 # Command options and aliases are listed here, alphabetically
4242
4243
4243 globalopts = [
4244 globalopts = [
4244 ('R', 'repository', '',
4245 ('R', 'repository', '',
4245 _('repository root directory or name of overlay bundle file'),
4246 _('repository root directory or name of overlay bundle file'),
4246 _('REPO')),
4247 _('REPO')),
4247 ('', 'cwd', '',
4248 ('', 'cwd', '',
4248 _('change working directory'), _('DIR')),
4249 _('change working directory'), _('DIR')),
4249 ('y', 'noninteractive', None,
4250 ('y', 'noninteractive', None,
4250 _('do not prompt, assume \'yes\' for any required answers')),
4251 _('do not prompt, assume \'yes\' for any required answers')),
4251 ('q', 'quiet', None, _('suppress output')),
4252 ('q', 'quiet', None, _('suppress output')),
4252 ('v', 'verbose', None, _('enable additional output')),
4253 ('v', 'verbose', None, _('enable additional output')),
4253 ('', 'config', [],
4254 ('', 'config', [],
4254 _('set/override config option (use \'section.name=value\')'),
4255 _('set/override config option (use \'section.name=value\')'),
4255 _('CONFIG')),
4256 _('CONFIG')),
4256 ('', 'debug', None, _('enable debugging output')),
4257 ('', 'debug', None, _('enable debugging output')),
4257 ('', 'debugger', None, _('start debugger')),
4258 ('', 'debugger', None, _('start debugger')),
4258 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4259 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4259 _('ENCODE')),
4260 _('ENCODE')),
4260 ('', 'encodingmode', encoding.encodingmode,
4261 ('', 'encodingmode', encoding.encodingmode,
4261 _('set the charset encoding mode'), _('MODE')),
4262 _('set the charset encoding mode'), _('MODE')),
4262 ('', 'traceback', None, _('always print a traceback on exception')),
4263 ('', 'traceback', None, _('always print a traceback on exception')),
4263 ('', 'time', None, _('time how long the command takes')),
4264 ('', 'time', None, _('time how long the command takes')),
4264 ('', 'profile', None, _('print command execution profile')),
4265 ('', 'profile', None, _('print command execution profile')),
4265 ('', 'version', None, _('output version information and exit')),
4266 ('', 'version', None, _('output version information and exit')),
4266 ('h', 'help', None, _('display help and exit')),
4267 ('h', 'help', None, _('display help and exit')),
4267 ]
4268 ]
4268
4269
4269 dryrunopts = [('n', 'dry-run', None,
4270 dryrunopts = [('n', 'dry-run', None,
4270 _('do not perform actions, just print output'))]
4271 _('do not perform actions, just print output'))]
4271
4272
4272 remoteopts = [
4273 remoteopts = [
4273 ('e', 'ssh', '',
4274 ('e', 'ssh', '',
4274 _('specify ssh command to use'), _('CMD')),
4275 _('specify ssh command to use'), _('CMD')),
4275 ('', 'remotecmd', '',
4276 ('', 'remotecmd', '',
4276 _('specify hg command to run on the remote side'), _('CMD')),
4277 _('specify hg command to run on the remote side'), _('CMD')),
4277 ('', 'insecure', None,
4278 ('', 'insecure', None,
4278 _('do not verify server certificate (ignoring web.cacerts config)')),
4279 _('do not verify server certificate (ignoring web.cacerts config)')),
4279 ]
4280 ]
4280
4281
4281 walkopts = [
4282 walkopts = [
4282 ('I', 'include', [],
4283 ('I', 'include', [],
4283 _('include names matching the given patterns'), _('PATTERN')),
4284 _('include names matching the given patterns'), _('PATTERN')),
4284 ('X', 'exclude', [],
4285 ('X', 'exclude', [],
4285 _('exclude names matching the given patterns'), _('PATTERN')),
4286 _('exclude names matching the given patterns'), _('PATTERN')),
4286 ]
4287 ]
4287
4288
4288 commitopts = [
4289 commitopts = [
4289 ('m', 'message', '',
4290 ('m', 'message', '',
4290 _('use text as commit message'), _('TEXT')),
4291 _('use text as commit message'), _('TEXT')),
4291 ('l', 'logfile', '',
4292 ('l', 'logfile', '',
4292 _('read commit message from file'), _('FILE')),
4293 _('read commit message from file'), _('FILE')),
4293 ]
4294 ]
4294
4295
4295 commitopts2 = [
4296 commitopts2 = [
4296 ('d', 'date', '',
4297 ('d', 'date', '',
4297 _('record datecode as commit date'), _('DATE')),
4298 _('record datecode as commit date'), _('DATE')),
4298 ('u', 'user', '',
4299 ('u', 'user', '',
4299 _('record the specified user as committer'), _('USER')),
4300 _('record the specified user as committer'), _('USER')),
4300 ]
4301 ]
4301
4302
4302 templateopts = [
4303 templateopts = [
4303 ('', 'style', '',
4304 ('', 'style', '',
4304 _('display using template map file'), _('STYLE')),
4305 _('display using template map file'), _('STYLE')),
4305 ('', 'template', '',
4306 ('', 'template', '',
4306 _('display with template'), _('TEMPLATE')),
4307 _('display with template'), _('TEMPLATE')),
4307 ]
4308 ]
4308
4309
4309 logopts = [
4310 logopts = [
4310 ('p', 'patch', None, _('show patch')),
4311 ('p', 'patch', None, _('show patch')),
4311 ('g', 'git', None, _('use git extended diff format')),
4312 ('g', 'git', None, _('use git extended diff format')),
4312 ('l', 'limit', '',
4313 ('l', 'limit', '',
4313 _('limit number of changes displayed'), _('NUM')),
4314 _('limit number of changes displayed'), _('NUM')),
4314 ('M', 'no-merges', None, _('do not show merges')),
4315 ('M', 'no-merges', None, _('do not show merges')),
4315 ('', 'stat', None, _('output diffstat-style summary of changes')),
4316 ('', 'stat', None, _('output diffstat-style summary of changes')),
4316 ] + templateopts
4317 ] + templateopts
4317
4318
4318 diffopts = [
4319 diffopts = [
4319 ('a', 'text', None, _('treat all files as text')),
4320 ('a', 'text', None, _('treat all files as text')),
4320 ('g', 'git', None, _('use git extended diff format')),
4321 ('g', 'git', None, _('use git extended diff format')),
4321 ('', 'nodates', None, _('omit dates from diff headers'))
4322 ('', 'nodates', None, _('omit dates from diff headers'))
4322 ]
4323 ]
4323
4324
4324 diffopts2 = [
4325 diffopts2 = [
4325 ('p', 'show-function', None, _('show which function each change is in')),
4326 ('p', 'show-function', None, _('show which function each change is in')),
4326 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4327 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4327 ('w', 'ignore-all-space', None,
4328 ('w', 'ignore-all-space', None,
4328 _('ignore white space when comparing lines')),
4329 _('ignore white space when comparing lines')),
4329 ('b', 'ignore-space-change', None,
4330 ('b', 'ignore-space-change', None,
4330 _('ignore changes in the amount of white space')),
4331 _('ignore changes in the amount of white space')),
4331 ('B', 'ignore-blank-lines', None,
4332 ('B', 'ignore-blank-lines', None,
4332 _('ignore changes whose lines are all blank')),
4333 _('ignore changes whose lines are all blank')),
4333 ('U', 'unified', '',
4334 ('U', 'unified', '',
4334 _('number of lines of context to show'), _('NUM')),
4335 _('number of lines of context to show'), _('NUM')),
4335 ('', 'stat', None, _('output diffstat-style summary of changes')),
4336 ('', 'stat', None, _('output diffstat-style summary of changes')),
4336 ]
4337 ]
4337
4338
4338 similarityopts = [
4339 similarityopts = [
4339 ('s', 'similarity', '',
4340 ('s', 'similarity', '',
4340 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4341 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4341 ]
4342 ]
4342
4343
4343 subrepoopts = [
4344 subrepoopts = [
4344 ('S', 'subrepos', None,
4345 ('S', 'subrepos', None,
4345 _('recurse into subrepositories'))
4346 _('recurse into subrepositories'))
4346 ]
4347 ]
4347
4348
4348 table = {
4349 table = {
4349 "^add": (add, walkopts + subrepoopts + dryrunopts,
4350 "^add": (add, walkopts + subrepoopts + dryrunopts,
4350 _('[OPTION]... [FILE]...')),
4351 _('[OPTION]... [FILE]...')),
4351 "addremove":
4352 "addremove":
4352 (addremove, similarityopts + walkopts + dryrunopts,
4353 (addremove, similarityopts + walkopts + dryrunopts,
4353 _('[OPTION]... [FILE]...')),
4354 _('[OPTION]... [FILE]...')),
4354 "^annotate|blame":
4355 "^annotate|blame":
4355 (annotate,
4356 (annotate,
4356 [('r', 'rev', '',
4357 [('r', 'rev', '',
4357 _('annotate the specified revision'), _('REV')),
4358 _('annotate the specified revision'), _('REV')),
4358 ('', 'follow', None,
4359 ('', 'follow', None,
4359 _('follow copies/renames and list the filename (DEPRECATED)')),
4360 _('follow copies/renames and list the filename (DEPRECATED)')),
4360 ('', 'no-follow', None, _("don't follow copies and renames")),
4361 ('', 'no-follow', None, _("don't follow copies and renames")),
4361 ('a', 'text', None, _('treat all files as text')),
4362 ('a', 'text', None, _('treat all files as text')),
4362 ('u', 'user', None, _('list the author (long with -v)')),
4363 ('u', 'user', None, _('list the author (long with -v)')),
4363 ('f', 'file', None, _('list the filename')),
4364 ('f', 'file', None, _('list the filename')),
4364 ('d', 'date', None, _('list the date (short with -q)')),
4365 ('d', 'date', None, _('list the date (short with -q)')),
4365 ('n', 'number', None, _('list the revision number (default)')),
4366 ('n', 'number', None, _('list the revision number (default)')),
4366 ('c', 'changeset', None, _('list the changeset')),
4367 ('c', 'changeset', None, _('list the changeset')),
4367 ('l', 'line-number', None,
4368 ('l', 'line-number', None,
4368 _('show line number at the first appearance'))
4369 _('show line number at the first appearance'))
4369 ] + walkopts,
4370 ] + walkopts,
4370 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4371 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4371 "archive":
4372 "archive":
4372 (archive,
4373 (archive,
4373 [('', 'no-decode', None, _('do not pass files through decoders')),
4374 [('', 'no-decode', None, _('do not pass files through decoders')),
4374 ('p', 'prefix', '',
4375 ('p', 'prefix', '',
4375 _('directory prefix for files in archive'), _('PREFIX')),
4376 _('directory prefix for files in archive'), _('PREFIX')),
4376 ('r', 'rev', '',
4377 ('r', 'rev', '',
4377 _('revision to distribute'), _('REV')),
4378 _('revision to distribute'), _('REV')),
4378 ('t', 'type', '',
4379 ('t', 'type', '',
4379 _('type of distribution to create'), _('TYPE')),
4380 _('type of distribution to create'), _('TYPE')),
4380 ] + subrepoopts + walkopts,
4381 ] + subrepoopts + walkopts,
4381 _('[OPTION]... DEST')),
4382 _('[OPTION]... DEST')),
4382 "backout":
4383 "backout":
4383 (backout,
4384 (backout,
4384 [('', 'merge', None,
4385 [('', 'merge', None,
4385 _('merge with old dirstate parent after backout')),
4386 _('merge with old dirstate parent after backout')),
4386 ('', 'parent', '',
4387 ('', 'parent', '',
4387 _('parent to choose when backing out merge'), _('REV')),
4388 _('parent to choose when backing out merge'), _('REV')),
4388 ('t', 'tool', '',
4389 ('t', 'tool', '',
4389 _('specify merge tool')),
4390 _('specify merge tool')),
4390 ('r', 'rev', '',
4391 ('r', 'rev', '',
4391 _('revision to backout'), _('REV')),
4392 _('revision to backout'), _('REV')),
4392 ] + walkopts + commitopts + commitopts2,
4393 ] + walkopts + commitopts + commitopts2,
4393 _('[OPTION]... [-r] REV')),
4394 _('[OPTION]... [-r] REV')),
4394 "bisect":
4395 "bisect":
4395 (bisect,
4396 (bisect,
4396 [('r', 'reset', False, _('reset bisect state')),
4397 [('r', 'reset', False, _('reset bisect state')),
4397 ('g', 'good', False, _('mark changeset good')),
4398 ('g', 'good', False, _('mark changeset good')),
4398 ('b', 'bad', False, _('mark changeset bad')),
4399 ('b', 'bad', False, _('mark changeset bad')),
4399 ('s', 'skip', False, _('skip testing changeset')),
4400 ('s', 'skip', False, _('skip testing changeset')),
4400 ('e', 'extend', False, _('extend the bisect range')),
4401 ('e', 'extend', False, _('extend the bisect range')),
4401 ('c', 'command', '',
4402 ('c', 'command', '',
4402 _('use command to check changeset state'), _('CMD')),
4403 _('use command to check changeset state'), _('CMD')),
4403 ('U', 'noupdate', False, _('do not update to target'))],
4404 ('U', 'noupdate', False, _('do not update to target'))],
4404 _("[-gbsr] [-U] [-c CMD] [REV]")),
4405 _("[-gbsr] [-U] [-c CMD] [REV]")),
4405 "bookmarks":
4406 "bookmarks":
4406 (bookmark,
4407 (bookmark,
4407 [('f', 'force', False, _('force')),
4408 [('f', 'force', False, _('force')),
4408 ('r', 'rev', '', _('revision'), _('REV')),
4409 ('r', 'rev', '', _('revision'), _('REV')),
4409 ('d', 'delete', False, _('delete a given bookmark')),
4410 ('d', 'delete', False, _('delete a given bookmark')),
4410 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4411 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4411 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4412 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4412 "branch":
4413 "branch":
4413 (branch,
4414 (branch,
4414 [('f', 'force', None,
4415 [('f', 'force', None,
4415 _('set branch name even if it shadows an existing branch')),
4416 _('set branch name even if it shadows an existing branch')),
4416 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4417 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4417 _('[-fC] [NAME]')),
4418 _('[-fC] [NAME]')),
4418 "branches":
4419 "branches":
4419 (branches,
4420 (branches,
4420 [('a', 'active', False,
4421 [('a', 'active', False,
4421 _('show only branches that have unmerged heads')),
4422 _('show only branches that have unmerged heads')),
4422 ('c', 'closed', False,
4423 ('c', 'closed', False,
4423 _('show normal and closed branches'))],
4424 _('show normal and closed branches'))],
4424 _('[-ac]')),
4425 _('[-ac]')),
4425 "bundle":
4426 "bundle":
4426 (bundle,
4427 (bundle,
4427 [('f', 'force', None,
4428 [('f', 'force', None,
4428 _('run even when the destination is unrelated')),
4429 _('run even when the destination is unrelated')),
4429 ('r', 'rev', [],
4430 ('r', 'rev', [],
4430 _('a changeset intended to be added to the destination'),
4431 _('a changeset intended to be added to the destination'),
4431 _('REV')),
4432 _('REV')),
4432 ('b', 'branch', [],
4433 ('b', 'branch', [],
4433 _('a specific branch you would like to bundle'),
4434 _('a specific branch you would like to bundle'),
4434 _('BRANCH')),
4435 _('BRANCH')),
4435 ('', 'base', [],
4436 ('', 'base', [],
4436 _('a base changeset assumed to be available at the destination'),
4437 _('a base changeset assumed to be available at the destination'),
4437 _('REV')),
4438 _('REV')),
4438 ('a', 'all', None, _('bundle all changesets in the repository')),
4439 ('a', 'all', None, _('bundle all changesets in the repository')),
4439 ('t', 'type', 'bzip2',
4440 ('t', 'type', 'bzip2',
4440 _('bundle compression type to use'), _('TYPE')),
4441 _('bundle compression type to use'), _('TYPE')),
4441 ] + remoteopts,
4442 ] + remoteopts,
4442 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4443 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4443 "cat":
4444 "cat":
4444 (cat,
4445 (cat,
4445 [('o', 'output', '',
4446 [('o', 'output', '',
4446 _('print output to file with formatted name'), _('FORMAT')),
4447 _('print output to file with formatted name'), _('FORMAT')),
4447 ('r', 'rev', '',
4448 ('r', 'rev', '',
4448 _('print the given revision'), _('REV')),
4449 _('print the given revision'), _('REV')),
4449 ('', 'decode', None, _('apply any matching decode filter')),
4450 ('', 'decode', None, _('apply any matching decode filter')),
4450 ] + walkopts,
4451 ] + walkopts,
4451 _('[OPTION]... FILE...')),
4452 _('[OPTION]... FILE...')),
4452 "^clone":
4453 "^clone":
4453 (clone,
4454 (clone,
4454 [('U', 'noupdate', None,
4455 [('U', 'noupdate', None,
4455 _('the clone will include an empty working copy (only a repository)')),
4456 _('the clone will include an empty working copy (only a repository)')),
4456 ('u', 'updaterev', '',
4457 ('u', 'updaterev', '',
4457 _('revision, tag or branch to check out'), _('REV')),
4458 _('revision, tag or branch to check out'), _('REV')),
4458 ('r', 'rev', [],
4459 ('r', 'rev', [],
4459 _('include the specified changeset'), _('REV')),
4460 _('include the specified changeset'), _('REV')),
4460 ('b', 'branch', [],
4461 ('b', 'branch', [],
4461 _('clone only the specified branch'), _('BRANCH')),
4462 _('clone only the specified branch'), _('BRANCH')),
4462 ('', 'pull', None, _('use pull protocol to copy metadata')),
4463 ('', 'pull', None, _('use pull protocol to copy metadata')),
4463 ('', 'uncompressed', None,
4464 ('', 'uncompressed', None,
4464 _('use uncompressed transfer (fast over LAN)')),
4465 _('use uncompressed transfer (fast over LAN)')),
4465 ] + remoteopts,
4466 ] + remoteopts,
4466 _('[OPTION]... SOURCE [DEST]')),
4467 _('[OPTION]... SOURCE [DEST]')),
4467 "^commit|ci":
4468 "^commit|ci":
4468 (commit,
4469 (commit,
4469 [('A', 'addremove', None,
4470 [('A', 'addremove', None,
4470 _('mark new/missing files as added/removed before committing')),
4471 _('mark new/missing files as added/removed before committing')),
4471 ('', 'close-branch', None,
4472 ('', 'close-branch', None,
4472 _('mark a branch as closed, hiding it from the branch list')),
4473 _('mark a branch as closed, hiding it from the branch list')),
4473 ] + walkopts + commitopts + commitopts2,
4474 ] + walkopts + commitopts + commitopts2,
4474 _('[OPTION]... [FILE]...')),
4475 _('[OPTION]... [FILE]...')),
4475 "copy|cp":
4476 "copy|cp":
4476 (copy,
4477 (copy,
4477 [('A', 'after', None, _('record a copy that has already occurred')),
4478 [('A', 'after', None, _('record a copy that has already occurred')),
4478 ('f', 'force', None,
4479 ('f', 'force', None,
4479 _('forcibly copy over an existing managed file')),
4480 _('forcibly copy over an existing managed file')),
4480 ] + walkopts + dryrunopts,
4481 ] + walkopts + dryrunopts,
4481 _('[OPTION]... [SOURCE]... DEST')),
4482 _('[OPTION]... [SOURCE]... DEST')),
4482 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4483 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4483 "debugbuilddag":
4484 "debugbuilddag":
4484 (debugbuilddag,
4485 (debugbuilddag,
4485 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4486 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4486 ('a', 'appended-file', None, _('add single file all revs append to')),
4487 ('a', 'appended-file', None, _('add single file all revs append to')),
4487 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4488 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4488 ('n', 'new-file', None, _('add new file at each rev')),
4489 ('n', 'new-file', None, _('add new file at each rev')),
4489 ],
4490 ],
4490 _('[OPTION]... TEXT')),
4491 _('[OPTION]... TEXT')),
4491 "debugbundle":
4492 "debugbundle":
4492 (debugbundle,
4493 (debugbundle,
4493 [('a', 'all', None, _('show all details')),
4494 [('a', 'all', None, _('show all details')),
4494 ],
4495 ],
4495 _('FILE')),
4496 _('FILE')),
4496 "debugcheckstate": (debugcheckstate, [], ''),
4497 "debugcheckstate": (debugcheckstate, [], ''),
4497 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4498 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4498 "debugcomplete":
4499 "debugcomplete":
4499 (debugcomplete,
4500 (debugcomplete,
4500 [('o', 'options', None, _('show the command options'))],
4501 [('o', 'options', None, _('show the command options'))],
4501 _('[-o] CMD')),
4502 _('[-o] CMD')),
4502 "debugdag":
4503 "debugdag":
4503 (debugdag,
4504 (debugdag,
4504 [('t', 'tags', None, _('use tags as labels')),
4505 [('t', 'tags', None, _('use tags as labels')),
4505 ('b', 'branches', None, _('annotate with branch names')),
4506 ('b', 'branches', None, _('annotate with branch names')),
4506 ('', 'dots', None, _('use dots for runs')),
4507 ('', 'dots', None, _('use dots for runs')),
4507 ('s', 'spaces', None, _('separate elements by spaces')),
4508 ('s', 'spaces', None, _('separate elements by spaces')),
4508 ],
4509 ],
4509 _('[OPTION]... [FILE [REV]...]')),
4510 _('[OPTION]... [FILE [REV]...]')),
4510 "debugdate":
4511 "debugdate":
4511 (debugdate,
4512 (debugdate,
4512 [('e', 'extended', None, _('try extended date formats'))],
4513 [('e', 'extended', None, _('try extended date formats'))],
4513 _('[-e] DATE [RANGE]')),
4514 _('[-e] DATE [RANGE]')),
4514 "debugdata": (debugdata, [], _('FILE REV')),
4515 "debugdata": (debugdata, [], _('FILE REV')),
4515 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4516 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4516 "debuggetbundle":
4517 "debuggetbundle":
4517 (debuggetbundle,
4518 (debuggetbundle,
4518 [('H', 'head', [], _('id of head node'), _('ID')),
4519 [('H', 'head', [], _('id of head node'), _('ID')),
4519 ('C', 'common', [], _('id of common node'), _('ID')),
4520 ('C', 'common', [], _('id of common node'), _('ID')),
4520 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4521 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4521 ],
4522 ],
4522 _('REPO FILE [-H|-C ID]...')),
4523 _('REPO FILE [-H|-C ID]...')),
4523 "debugignore": (debugignore, [], ''),
4524 "debugignore": (debugignore, [], ''),
4524 "debugindex": (debugindex,
4525 "debugindex": (debugindex,
4525 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4526 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4526 _('FILE')),
4527 _('FILE')),
4527 "debugindexdot": (debugindexdot, [], _('FILE')),
4528 "debugindexdot": (debugindexdot, [], _('FILE')),
4528 "debuginstall": (debuginstall, [], ''),
4529 "debuginstall": (debuginstall, [], ''),
4529 "debugknown": (debugknown, [], _('REPO ID...')),
4530 "debugknown": (debugknown, [], _('REPO ID...')),
4530 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4531 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4531 "debugrebuildstate":
4532 "debugrebuildstate":
4532 (debugrebuildstate,
4533 (debugrebuildstate,
4533 [('r', 'rev', '',
4534 [('r', 'rev', '',
4534 _('revision to rebuild to'), _('REV'))],
4535 _('revision to rebuild to'), _('REV'))],
4535 _('[-r REV] [REV]')),
4536 _('[-r REV] [REV]')),
4536 "debugrename":
4537 "debugrename":
4537 (debugrename,
4538 (debugrename,
4538 [('r', 'rev', '',
4539 [('r', 'rev', '',
4539 _('revision to debug'), _('REV'))],
4540 _('revision to debug'), _('REV'))],
4540 _('[-r REV] FILE')),
4541 _('[-r REV] FILE')),
4541 "debugrevspec":
4542 "debugrevspec":
4542 (debugrevspec, [], ('REVSPEC')),
4543 (debugrevspec, [], ('REVSPEC')),
4543 "debugsetparents":
4544 "debugsetparents":
4544 (debugsetparents, [], _('REV1 [REV2]')),
4545 (debugsetparents, [], _('REV1 [REV2]')),
4545 "debugstate":
4546 "debugstate":
4546 (debugstate,
4547 (debugstate,
4547 [('', 'nodates', None, _('do not display the saved mtime')),
4548 [('', 'nodates', None, _('do not display the saved mtime')),
4548 ('', 'datesort', None, _('sort by saved mtime'))],
4549 ('', 'datesort', None, _('sort by saved mtime'))],
4549 _('[OPTION]...')),
4550 _('[OPTION]...')),
4550 "debugsub":
4551 "debugsub":
4551 (debugsub,
4552 (debugsub,
4552 [('r', 'rev', '',
4553 [('r', 'rev', '',
4553 _('revision to check'), _('REV'))],
4554 _('revision to check'), _('REV'))],
4554 _('[-r REV] [REV]')),
4555 _('[-r REV] [REV]')),
4555 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4556 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4556 "debugwireargs":
4557 "debugwireargs":
4557 (debugwireargs,
4558 (debugwireargs,
4558 [('', 'three', '', 'three'),
4559 [('', 'three', '', 'three'),
4559 ('', 'four', '', 'four'),
4560 ('', 'four', '', 'four'),
4560 ] + remoteopts,
4561 ] + remoteopts,
4561 _('REPO [OPTIONS]... [ONE [TWO]]')),
4562 _('REPO [OPTIONS]... [ONE [TWO]]')),
4562 "^diff":
4563 "^diff":
4563 (diff,
4564 (diff,
4564 [('r', 'rev', [],
4565 [('r', 'rev', [],
4565 _('revision'), _('REV')),
4566 _('revision'), _('REV')),
4566 ('c', 'change', '',
4567 ('c', 'change', '',
4567 _('change made by revision'), _('REV'))
4568 _('change made by revision'), _('REV'))
4568 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4569 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4569 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4570 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4570 "^export":
4571 "^export":
4571 (export,
4572 (export,
4572 [('o', 'output', '',
4573 [('o', 'output', '',
4573 _('print output to file with formatted name'), _('FORMAT')),
4574 _('print output to file with formatted name'), _('FORMAT')),
4574 ('', 'switch-parent', None, _('diff against the second parent')),
4575 ('', 'switch-parent', None, _('diff against the second parent')),
4575 ('r', 'rev', [],
4576 ('r', 'rev', [],
4576 _('revisions to export'), _('REV')),
4577 _('revisions to export'), _('REV')),
4577 ] + diffopts,
4578 ] + diffopts,
4578 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4579 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4579 "^forget":
4580 "^forget":
4580 (forget,
4581 (forget,
4581 [] + walkopts,
4582 [] + walkopts,
4582 _('[OPTION]... FILE...')),
4583 _('[OPTION]... FILE...')),
4583 "grep":
4584 "grep":
4584 (grep,
4585 (grep,
4585 [('0', 'print0', None, _('end fields with NUL')),
4586 [('0', 'print0', None, _('end fields with NUL')),
4586 ('', 'all', None, _('print all revisions that match')),
4587 ('', 'all', None, _('print all revisions that match')),
4587 ('a', 'text', None, _('treat all files as text')),
4588 ('a', 'text', None, _('treat all files as text')),
4588 ('f', 'follow', None,
4589 ('f', 'follow', None,
4589 _('follow changeset history,'
4590 _('follow changeset history,'
4590 ' or file history across copies and renames')),
4591 ' or file history across copies and renames')),
4591 ('i', 'ignore-case', None, _('ignore case when matching')),
4592 ('i', 'ignore-case', None, _('ignore case when matching')),
4592 ('l', 'files-with-matches', None,
4593 ('l', 'files-with-matches', None,
4593 _('print only filenames and revisions that match')),
4594 _('print only filenames and revisions that match')),
4594 ('n', 'line-number', None, _('print matching line numbers')),
4595 ('n', 'line-number', None, _('print matching line numbers')),
4595 ('r', 'rev', [],
4596 ('r', 'rev', [],
4596 _('only search files changed within revision range'), _('REV')),
4597 _('only search files changed within revision range'), _('REV')),
4597 ('u', 'user', None, _('list the author (long with -v)')),
4598 ('u', 'user', None, _('list the author (long with -v)')),
4598 ('d', 'date', None, _('list the date (short with -q)')),
4599 ('d', 'date', None, _('list the date (short with -q)')),
4599 ] + walkopts,
4600 ] + walkopts,
4600 _('[OPTION]... PATTERN [FILE]...')),
4601 _('[OPTION]... PATTERN [FILE]...')),
4601 "heads":
4602 "heads":
4602 (heads,
4603 (heads,
4603 [('r', 'rev', '',
4604 [('r', 'rev', '',
4604 _('show only heads which are descendants of STARTREV'),
4605 _('show only heads which are descendants of STARTREV'),
4605 _('STARTREV')),
4606 _('STARTREV')),
4606 ('t', 'topo', False, _('show topological heads only')),
4607 ('t', 'topo', False, _('show topological heads only')),
4607 ('a', 'active', False,
4608 ('a', 'active', False,
4608 _('show active branchheads only (DEPRECATED)')),
4609 _('show active branchheads only (DEPRECATED)')),
4609 ('c', 'closed', False,
4610 ('c', 'closed', False,
4610 _('show normal and closed branch heads')),
4611 _('show normal and closed branch heads')),
4611 ] + templateopts,
4612 ] + templateopts,
4612 _('[-ac] [-r STARTREV] [REV]...')),
4613 _('[-ac] [-r STARTREV] [REV]...')),
4613 "help": (help_, [], _('[TOPIC]')),
4614 "help": (help_, [], _('[TOPIC]')),
4614 "identify|id":
4615 "identify|id":
4615 (identify,
4616 (identify,
4616 [('r', 'rev', '',
4617 [('r', 'rev', '',
4617 _('identify the specified revision'), _('REV')),
4618 _('identify the specified revision'), _('REV')),
4618 ('n', 'num', None, _('show local revision number')),
4619 ('n', 'num', None, _('show local revision number')),
4619 ('i', 'id', None, _('show global revision id')),
4620 ('i', 'id', None, _('show global revision id')),
4620 ('b', 'branch', None, _('show branch')),
4621 ('b', 'branch', None, _('show branch')),
4621 ('t', 'tags', None, _('show tags')),
4622 ('t', 'tags', None, _('show tags')),
4622 ('B', 'bookmarks', None, _('show bookmarks'))],
4623 ('B', 'bookmarks', None, _('show bookmarks'))],
4623 _('[-nibtB] [-r REV] [SOURCE]')),
4624 _('[-nibtB] [-r REV] [SOURCE]')),
4624 "import|patch":
4625 "import|patch":
4625 (import_,
4626 (import_,
4626 [('p', 'strip', 1,
4627 [('p', 'strip', 1,
4627 _('directory strip option for patch. This has the same '
4628 _('directory strip option for patch. This has the same '
4628 'meaning as the corresponding patch option'),
4629 'meaning as the corresponding patch option'),
4629 _('NUM')),
4630 _('NUM')),
4630 ('b', 'base', '',
4631 ('b', 'base', '',
4631 _('base path'), _('PATH')),
4632 _('base path'), _('PATH')),
4632 ('f', 'force', None,
4633 ('f', 'force', None,
4633 _('skip check for outstanding uncommitted changes')),
4634 _('skip check for outstanding uncommitted changes')),
4634 ('', 'no-commit', None,
4635 ('', 'no-commit', None,
4635 _("don't commit, just update the working directory")),
4636 _("don't commit, just update the working directory")),
4636 ('', 'exact', None,
4637 ('', 'exact', None,
4637 _('apply patch to the nodes from which it was generated')),
4638 _('apply patch to the nodes from which it was generated')),
4638 ('', 'import-branch', None,
4639 ('', 'import-branch', None,
4639 _('use any branch information in patch (implied by --exact)'))] +
4640 _('use any branch information in patch (implied by --exact)'))] +
4640 commitopts + commitopts2 + similarityopts,
4641 commitopts + commitopts2 + similarityopts,
4641 _('[OPTION]... PATCH...')),
4642 _('[OPTION]... PATCH...')),
4642 "incoming|in":
4643 "incoming|in":
4643 (incoming,
4644 (incoming,
4644 [('f', 'force', None,
4645 [('f', 'force', None,
4645 _('run even if remote repository is unrelated')),
4646 _('run even if remote repository is unrelated')),
4646 ('n', 'newest-first', None, _('show newest record first')),
4647 ('n', 'newest-first', None, _('show newest record first')),
4647 ('', 'bundle', '',
4648 ('', 'bundle', '',
4648 _('file to store the bundles into'), _('FILE')),
4649 _('file to store the bundles into'), _('FILE')),
4649 ('r', 'rev', [],
4650 ('r', 'rev', [],
4650 _('a remote changeset intended to be added'), _('REV')),
4651 _('a remote changeset intended to be added'), _('REV')),
4651 ('B', 'bookmarks', False, _("compare bookmarks")),
4652 ('B', 'bookmarks', False, _("compare bookmarks")),
4652 ('b', 'branch', [],
4653 ('b', 'branch', [],
4653 _('a specific branch you would like to pull'), _('BRANCH')),
4654 _('a specific branch you would like to pull'), _('BRANCH')),
4654 ] + logopts + remoteopts + subrepoopts,
4655 ] + logopts + remoteopts + subrepoopts,
4655 _('[-p] [-n] [-M] [-f] [-r REV]...'
4656 _('[-p] [-n] [-M] [-f] [-r REV]...'
4656 ' [--bundle FILENAME] [SOURCE]')),
4657 ' [--bundle FILENAME] [SOURCE]')),
4657 "^init":
4658 "^init":
4658 (init,
4659 (init,
4659 remoteopts,
4660 remoteopts,
4660 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4661 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4661 "locate":
4662 "locate":
4662 (locate,
4663 (locate,
4663 [('r', 'rev', '',
4664 [('r', 'rev', '',
4664 _('search the repository as it is in REV'), _('REV')),
4665 _('search the repository as it is in REV'), _('REV')),
4665 ('0', 'print0', None,
4666 ('0', 'print0', None,
4666 _('end filenames with NUL, for use with xargs')),
4667 _('end filenames with NUL, for use with xargs')),
4667 ('f', 'fullpath', None,
4668 ('f', 'fullpath', None,
4668 _('print complete paths from the filesystem root')),
4669 _('print complete paths from the filesystem root')),
4669 ] + walkopts,
4670 ] + walkopts,
4670 _('[OPTION]... [PATTERN]...')),
4671 _('[OPTION]... [PATTERN]...')),
4671 "^log|history":
4672 "^log|history":
4672 (log,
4673 (log,
4673 [('f', 'follow', None,
4674 [('f', 'follow', None,
4674 _('follow changeset history,'
4675 _('follow changeset history,'
4675 ' or file history across copies and renames')),
4676 ' or file history across copies and renames')),
4676 ('', 'follow-first', None,
4677 ('', 'follow-first', None,
4677 _('only follow the first parent of merge changesets')),
4678 _('only follow the first parent of merge changesets')),
4678 ('d', 'date', '',
4679 ('d', 'date', '',
4679 _('show revisions matching date spec'), _('DATE')),
4680 _('show revisions matching date spec'), _('DATE')),
4680 ('C', 'copies', None, _('show copied files')),
4681 ('C', 'copies', None, _('show copied files')),
4681 ('k', 'keyword', [],
4682 ('k', 'keyword', [],
4682 _('do case-insensitive search for a given text'), _('TEXT')),
4683 _('do case-insensitive search for a given text'), _('TEXT')),
4683 ('r', 'rev', [],
4684 ('r', 'rev', [],
4684 _('show the specified revision or range'), _('REV')),
4685 _('show the specified revision or range'), _('REV')),
4685 ('', 'removed', None, _('include revisions where files were removed')),
4686 ('', 'removed', None, _('include revisions where files were removed')),
4686 ('m', 'only-merges', None, _('show only merges')),
4687 ('m', 'only-merges', None, _('show only merges')),
4687 ('u', 'user', [],
4688 ('u', 'user', [],
4688 _('revisions committed by user'), _('USER')),
4689 _('revisions committed by user'), _('USER')),
4689 ('', 'only-branch', [],
4690 ('', 'only-branch', [],
4690 _('show only changesets within the given named branch (DEPRECATED)'),
4691 _('show only changesets within the given named branch (DEPRECATED)'),
4691 _('BRANCH')),
4692 _('BRANCH')),
4692 ('b', 'branch', [],
4693 ('b', 'branch', [],
4693 _('show changesets within the given named branch'), _('BRANCH')),
4694 _('show changesets within the given named branch'), _('BRANCH')),
4694 ('P', 'prune', [],
4695 ('P', 'prune', [],
4695 _('do not display revision or any of its ancestors'), _('REV')),
4696 _('do not display revision or any of its ancestors'), _('REV')),
4696 ] + logopts + walkopts,
4697 ] + logopts + walkopts,
4697 _('[OPTION]... [FILE]')),
4698 _('[OPTION]... [FILE]')),
4698 "manifest":
4699 "manifest":
4699 (manifest,
4700 (manifest,
4700 [('r', 'rev', '',
4701 [('r', 'rev', '',
4701 _('revision to display'), _('REV'))],
4702 _('revision to display'), _('REV'))],
4702 _('[-r REV]')),
4703 _('[-r REV]')),
4703 "^merge":
4704 "^merge":
4704 (merge,
4705 (merge,
4705 [('f', 'force', None, _('force a merge with outstanding changes')),
4706 [('f', 'force', None, _('force a merge with outstanding changes')),
4706 ('t', 'tool', '', _('specify merge tool')),
4707 ('t', 'tool', '', _('specify merge tool')),
4707 ('r', 'rev', '',
4708 ('r', 'rev', '',
4708 _('revision to merge'), _('REV')),
4709 _('revision to merge'), _('REV')),
4709 ('P', 'preview', None,
4710 ('P', 'preview', None,
4710 _('review revisions to merge (no merge is performed)'))],
4711 _('review revisions to merge (no merge is performed)'))],
4711 _('[-P] [-f] [[-r] REV]')),
4712 _('[-P] [-f] [[-r] REV]')),
4712 "outgoing|out":
4713 "outgoing|out":
4713 (outgoing,
4714 (outgoing,
4714 [('f', 'force', None,
4715 [('f', 'force', None,
4715 _('run even when the destination is unrelated')),
4716 _('run even when the destination is unrelated')),
4716 ('r', 'rev', [],
4717 ('r', 'rev', [],
4717 _('a changeset intended to be included in the destination'),
4718 _('a changeset intended to be included in the destination'),
4718 _('REV')),
4719 _('REV')),
4719 ('n', 'newest-first', None, _('show newest record first')),
4720 ('n', 'newest-first', None, _('show newest record first')),
4720 ('B', 'bookmarks', False, _("compare bookmarks")),
4721 ('B', 'bookmarks', False, _("compare bookmarks")),
4721 ('b', 'branch', [],
4722 ('b', 'branch', [],
4722 _('a specific branch you would like to push'), _('BRANCH')),
4723 _('a specific branch you would like to push'), _('BRANCH')),
4723 ] + logopts + remoteopts + subrepoopts,
4724 ] + logopts + remoteopts + subrepoopts,
4724 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4725 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4725 "parents":
4726 "parents":
4726 (parents,
4727 (parents,
4727 [('r', 'rev', '',
4728 [('r', 'rev', '',
4728 _('show parents of the specified revision'), _('REV')),
4729 _('show parents of the specified revision'), _('REV')),
4729 ] + templateopts,
4730 ] + templateopts,
4730 _('[-r REV] [FILE]')),
4731 _('[-r REV] [FILE]')),
4731 "paths": (paths, [], _('[NAME]')),
4732 "paths": (paths, [], _('[NAME]')),
4732 "^pull":
4733 "^pull":
4733 (pull,
4734 (pull,
4734 [('u', 'update', None,
4735 [('u', 'update', None,
4735 _('update to new branch head if changesets were pulled')),
4736 _('update to new branch head if changesets were pulled')),
4736 ('f', 'force', None,
4737 ('f', 'force', None,
4737 _('run even when remote repository is unrelated')),
4738 _('run even when remote repository is unrelated')),
4738 ('r', 'rev', [],
4739 ('r', 'rev', [],
4739 _('a remote changeset intended to be added'), _('REV')),
4740 _('a remote changeset intended to be added'), _('REV')),
4740 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4741 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4741 ('b', 'branch', [],
4742 ('b', 'branch', [],
4742 _('a specific branch you would like to pull'), _('BRANCH')),
4743 _('a specific branch you would like to pull'), _('BRANCH')),
4743 ] + remoteopts,
4744 ] + remoteopts,
4744 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4745 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4745 "^push":
4746 "^push":
4746 (push,
4747 (push,
4747 [('f', 'force', None, _('force push')),
4748 [('f', 'force', None, _('force push')),
4748 ('r', 'rev', [],
4749 ('r', 'rev', [],
4749 _('a changeset intended to be included in the destination'),
4750 _('a changeset intended to be included in the destination'),
4750 _('REV')),
4751 _('REV')),
4751 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4752 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4752 ('b', 'branch', [],
4753 ('b', 'branch', [],
4753 _('a specific branch you would like to push'), _('BRANCH')),
4754 _('a specific branch you would like to push'), _('BRANCH')),
4754 ('', 'new-branch', False, _('allow pushing a new branch')),
4755 ('', 'new-branch', False, _('allow pushing a new branch')),
4755 ] + remoteopts,
4756 ] + remoteopts,
4756 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4757 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4757 "recover": (recover, []),
4758 "recover": (recover, []),
4758 "^remove|rm":
4759 "^remove|rm":
4759 (remove,
4760 (remove,
4760 [('A', 'after', None, _('record delete for missing files')),
4761 [('A', 'after', None, _('record delete for missing files')),
4761 ('f', 'force', None,
4762 ('f', 'force', None,
4762 _('remove (and delete) file even if added or modified')),
4763 _('remove (and delete) file even if added or modified')),
4763 ] + walkopts,
4764 ] + walkopts,
4764 _('[OPTION]... FILE...')),
4765 _('[OPTION]... FILE...')),
4765 "rename|move|mv":
4766 "rename|move|mv":
4766 (rename,
4767 (rename,
4767 [('A', 'after', None, _('record a rename that has already occurred')),
4768 [('A', 'after', None, _('record a rename that has already occurred')),
4768 ('f', 'force', None,
4769 ('f', 'force', None,
4769 _('forcibly copy over an existing managed file')),
4770 _('forcibly copy over an existing managed file')),
4770 ] + walkopts + dryrunopts,
4771 ] + walkopts + dryrunopts,
4771 _('[OPTION]... SOURCE... DEST')),
4772 _('[OPTION]... SOURCE... DEST')),
4772 "resolve":
4773 "resolve":
4773 (resolve,
4774 (resolve,
4774 [('a', 'all', None, _('select all unresolved files')),
4775 [('a', 'all', None, _('select all unresolved files')),
4775 ('l', 'list', None, _('list state of files needing merge')),
4776 ('l', 'list', None, _('list state of files needing merge')),
4776 ('m', 'mark', None, _('mark files as resolved')),
4777 ('m', 'mark', None, _('mark files as resolved')),
4777 ('u', 'unmark', None, _('mark files as unresolved')),
4778 ('u', 'unmark', None, _('mark files as unresolved')),
4778 ('t', 'tool', '', _('specify merge tool')),
4779 ('t', 'tool', '', _('specify merge tool')),
4779 ('n', 'no-status', None, _('hide status prefix'))]
4780 ('n', 'no-status', None, _('hide status prefix'))]
4780 + walkopts,
4781 + walkopts,
4781 _('[OPTION]... [FILE]...')),
4782 _('[OPTION]... [FILE]...')),
4782 "revert":
4783 "revert":
4783 (revert,
4784 (revert,
4784 [('a', 'all', None, _('revert all changes when no arguments given')),
4785 [('a', 'all', None, _('revert all changes when no arguments given')),
4785 ('d', 'date', '',
4786 ('d', 'date', '',
4786 _('tipmost revision matching date'), _('DATE')),
4787 _('tipmost revision matching date'), _('DATE')),
4787 ('r', 'rev', '',
4788 ('r', 'rev', '',
4788 _('revert to the specified revision'), _('REV')),
4789 _('revert to the specified revision'), _('REV')),
4789 ('', 'no-backup', None, _('do not save backup copies of files')),
4790 ('', 'no-backup', None, _('do not save backup copies of files')),
4790 ] + walkopts + dryrunopts,
4791 ] + walkopts + dryrunopts,
4791 _('[OPTION]... [-r REV] [NAME]...')),
4792 _('[OPTION]... [-r REV] [NAME]...')),
4792 "rollback": (rollback, dryrunopts),
4793 "rollback": (rollback, dryrunopts),
4793 "root": (root, []),
4794 "root": (root, []),
4794 "^serve":
4795 "^serve":
4795 (serve,
4796 (serve,
4796 [('A', 'accesslog', '',
4797 [('A', 'accesslog', '',
4797 _('name of access log file to write to'), _('FILE')),
4798 _('name of access log file to write to'), _('FILE')),
4798 ('d', 'daemon', None, _('run server in background')),
4799 ('d', 'daemon', None, _('run server in background')),
4799 ('', 'daemon-pipefds', '',
4800 ('', 'daemon-pipefds', '',
4800 _('used internally by daemon mode'), _('NUM')),
4801 _('used internally by daemon mode'), _('NUM')),
4801 ('E', 'errorlog', '',
4802 ('E', 'errorlog', '',
4802 _('name of error log file to write to'), _('FILE')),
4803 _('name of error log file to write to'), _('FILE')),
4803 # use string type, then we can check if something was passed
4804 # use string type, then we can check if something was passed
4804 ('p', 'port', '',
4805 ('p', 'port', '',
4805 _('port to listen on (default: 8000)'), _('PORT')),
4806 _('port to listen on (default: 8000)'), _('PORT')),
4806 ('a', 'address', '',
4807 ('a', 'address', '',
4807 _('address to listen on (default: all interfaces)'), _('ADDR')),
4808 _('address to listen on (default: all interfaces)'), _('ADDR')),
4808 ('', 'prefix', '',
4809 ('', 'prefix', '',
4809 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4810 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4810 ('n', 'name', '',
4811 ('n', 'name', '',
4811 _('name to show in web pages (default: working directory)'),
4812 _('name to show in web pages (default: working directory)'),
4812 _('NAME')),
4813 _('NAME')),
4813 ('', 'web-conf', '',
4814 ('', 'web-conf', '',
4814 _('name of the hgweb config file (see "hg help hgweb")'),
4815 _('name of the hgweb config file (see "hg help hgweb")'),
4815 _('FILE')),
4816 _('FILE')),
4816 ('', 'webdir-conf', '',
4817 ('', 'webdir-conf', '',
4817 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4818 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4818 ('', 'pid-file', '',
4819 ('', 'pid-file', '',
4819 _('name of file to write process ID to'), _('FILE')),
4820 _('name of file to write process ID to'), _('FILE')),
4820 ('', 'stdio', None, _('for remote clients')),
4821 ('', 'stdio', None, _('for remote clients')),
4821 ('t', 'templates', '',
4822 ('t', 'templates', '',
4822 _('web templates to use'), _('TEMPLATE')),
4823 _('web templates to use'), _('TEMPLATE')),
4823 ('', 'style', '',
4824 ('', 'style', '',
4824 _('template style to use'), _('STYLE')),
4825 _('template style to use'), _('STYLE')),
4825 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4826 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4826 ('', 'certificate', '',
4827 ('', 'certificate', '',
4827 _('SSL certificate file'), _('FILE'))],
4828 _('SSL certificate file'), _('FILE'))],
4828 _('[OPTION]...')),
4829 _('[OPTION]...')),
4829 "showconfig|debugconfig":
4830 "showconfig|debugconfig":
4830 (showconfig,
4831 (showconfig,
4831 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4832 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4832 _('[-u] [NAME]...')),
4833 _('[-u] [NAME]...')),
4833 "^summary|sum":
4834 "^summary|sum":
4834 (summary,
4835 (summary,
4835 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4836 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4836 "^status|st":
4837 "^status|st":
4837 (status,
4838 (status,
4838 [('A', 'all', None, _('show status of all files')),
4839 [('A', 'all', None, _('show status of all files')),
4839 ('m', 'modified', None, _('show only modified files')),
4840 ('m', 'modified', None, _('show only modified files')),
4840 ('a', 'added', None, _('show only added files')),
4841 ('a', 'added', None, _('show only added files')),
4841 ('r', 'removed', None, _('show only removed files')),
4842 ('r', 'removed', None, _('show only removed files')),
4842 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4843 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4843 ('c', 'clean', None, _('show only files without changes')),
4844 ('c', 'clean', None, _('show only files without changes')),
4844 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4845 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4845 ('i', 'ignored', None, _('show only ignored files')),
4846 ('i', 'ignored', None, _('show only ignored files')),
4846 ('n', 'no-status', None, _('hide status prefix')),
4847 ('n', 'no-status', None, _('hide status prefix')),
4847 ('C', 'copies', None, _('show source of copied files')),
4848 ('C', 'copies', None, _('show source of copied files')),
4848 ('0', 'print0', None,
4849 ('0', 'print0', None,
4849 _('end filenames with NUL, for use with xargs')),
4850 _('end filenames with NUL, for use with xargs')),
4850 ('', 'rev', [],
4851 ('', 'rev', [],
4851 _('show difference from revision'), _('REV')),
4852 _('show difference from revision'), _('REV')),
4852 ('', 'change', '',
4853 ('', 'change', '',
4853 _('list the changed files of a revision'), _('REV')),
4854 _('list the changed files of a revision'), _('REV')),
4854 ] + walkopts + subrepoopts,
4855 ] + walkopts + subrepoopts,
4855 _('[OPTION]... [FILE]...')),
4856 _('[OPTION]... [FILE]...')),
4856 "tag":
4857 "tag":
4857 (tag,
4858 (tag,
4858 [('f', 'force', None, _('force tag')),
4859 [('f', 'force', None, _('force tag')),
4859 ('l', 'local', None, _('make the tag local')),
4860 ('l', 'local', None, _('make the tag local')),
4860 ('r', 'rev', '',
4861 ('r', 'rev', '',
4861 _('revision to tag'), _('REV')),
4862 _('revision to tag'), _('REV')),
4862 ('', 'remove', None, _('remove a tag')),
4863 ('', 'remove', None, _('remove a tag')),
4863 # -l/--local is already there, commitopts cannot be used
4864 # -l/--local is already there, commitopts cannot be used
4864 ('e', 'edit', None, _('edit commit message')),
4865 ('e', 'edit', None, _('edit commit message')),
4865 ('m', 'message', '',
4866 ('m', 'message', '',
4866 _('use <text> as commit message'), _('TEXT')),
4867 _('use <text> as commit message'), _('TEXT')),
4867 ] + commitopts2,
4868 ] + commitopts2,
4868 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4869 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4869 "tags": (tags, [], ''),
4870 "tags": (tags, [], ''),
4870 "tip":
4871 "tip":
4871 (tip,
4872 (tip,
4872 [('p', 'patch', None, _('show patch')),
4873 [('p', 'patch', None, _('show patch')),
4873 ('g', 'git', None, _('use git extended diff format')),
4874 ('g', 'git', None, _('use git extended diff format')),
4874 ] + templateopts,
4875 ] + templateopts,
4875 _('[-p] [-g]')),
4876 _('[-p] [-g]')),
4876 "unbundle":
4877 "unbundle":
4877 (unbundle,
4878 (unbundle,
4878 [('u', 'update', None,
4879 [('u', 'update', None,
4879 _('update to new branch head if changesets were unbundled'))],
4880 _('update to new branch head if changesets were unbundled'))],
4880 _('[-u] FILE...')),
4881 _('[-u] FILE...')),
4881 "^update|up|checkout|co":
4882 "^update|up|checkout|co":
4882 (update,
4883 (update,
4883 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4884 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4884 ('c', 'check', None,
4885 ('c', 'check', None,
4885 _('update across branches if no uncommitted changes')),
4886 _('update across branches if no uncommitted changes')),
4886 ('d', 'date', '',
4887 ('d', 'date', '',
4887 _('tipmost revision matching date'), _('DATE')),
4888 _('tipmost revision matching date'), _('DATE')),
4888 ('r', 'rev', '',
4889 ('r', 'rev', '',
4889 _('revision'), _('REV'))],
4890 _('revision'), _('REV'))],
4890 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4891 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4891 "verify": (verify, []),
4892 "verify": (verify, []),
4892 "version": (version_, []),
4893 "version": (version_, []),
4893 }
4894 }
4894
4895
4895 norepo = ("clone init version help debugcommands debugcomplete"
4896 norepo = ("clone init version help debugcommands debugcomplete"
4896 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4897 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4897 " debugknown debuggetbundle debugbundle")
4898 " debugknown debuggetbundle debugbundle")
4898 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4899 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4899 " debugdata debugindex debugindexdot")
4900 " debugdata debugindex debugindexdot")
@@ -1,1935 +1,1935 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks
11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import scmutil, util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 import url as urlmod
17 import url as urlmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 propertycache = util.propertycache
20 propertycache = util.propertycache
21
21
22 class localrepository(repo.repository):
22 class localrepository(repo.repository):
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 'known', 'getbundle'))
24 'known', 'getbundle'))
25 supportedformats = set(('revlogv1', 'parentdelta'))
25 supportedformats = set(('revlogv1', 'parentdelta'))
26 supported = supportedformats | set(('store', 'fncache', 'shared',
26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 'dotencode'))
27 'dotencode'))
28
28
29 def __init__(self, baseui, path=None, create=0):
29 def __init__(self, baseui, path=None, create=0):
30 repo.repository.__init__(self)
30 repo.repository.__init__(self)
31 self.root = os.path.realpath(util.expandpath(path))
31 self.root = os.path.realpath(util.expandpath(path))
32 self.path = os.path.join(self.root, ".hg")
32 self.path = os.path.join(self.root, ".hg")
33 self.origroot = path
33 self.origroot = path
34 self.auditor = util.path_auditor(self.root, self._checknested)
34 self.auditor = util.path_auditor(self.root, self._checknested)
35 self.opener = util.opener(self.path)
35 self.opener = scmutil.opener(self.path)
36 self.wopener = util.opener(self.root)
36 self.wopener = scmutil.opener(self.root)
37 self.baseui = baseui
37 self.baseui = baseui
38 self.ui = baseui.copy()
38 self.ui = baseui.copy()
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 extensions.loadall(self.ui)
42 extensions.loadall(self.ui)
43 except IOError:
43 except IOError:
44 pass
44 pass
45
45
46 if not os.path.isdir(self.path):
46 if not os.path.isdir(self.path):
47 if create:
47 if create:
48 if not os.path.exists(path):
48 if not os.path.exists(path):
49 util.makedirs(path)
49 util.makedirs(path)
50 util.makedir(self.path, notindexed=True)
50 util.makedir(self.path, notindexed=True)
51 requirements = ["revlogv1"]
51 requirements = ["revlogv1"]
52 if self.ui.configbool('format', 'usestore', True):
52 if self.ui.configbool('format', 'usestore', True):
53 os.mkdir(os.path.join(self.path, "store"))
53 os.mkdir(os.path.join(self.path, "store"))
54 requirements.append("store")
54 requirements.append("store")
55 if self.ui.configbool('format', 'usefncache', True):
55 if self.ui.configbool('format', 'usefncache', True):
56 requirements.append("fncache")
56 requirements.append("fncache")
57 if self.ui.configbool('format', 'dotencode', True):
57 if self.ui.configbool('format', 'dotencode', True):
58 requirements.append('dotencode')
58 requirements.append('dotencode')
59 # create an invalid changelog
59 # create an invalid changelog
60 self.opener("00changelog.i", "a").write(
60 self.opener("00changelog.i", "a").write(
61 '\0\0\0\2' # represents revlogv2
61 '\0\0\0\2' # represents revlogv2
62 ' dummy changelog to prevent using the old repo layout'
62 ' dummy changelog to prevent using the old repo layout'
63 )
63 )
64 if self.ui.configbool('format', 'parentdelta', False):
64 if self.ui.configbool('format', 'parentdelta', False):
65 requirements.append("parentdelta")
65 requirements.append("parentdelta")
66 else:
66 else:
67 raise error.RepoError(_("repository %s not found") % path)
67 raise error.RepoError(_("repository %s not found") % path)
68 elif create:
68 elif create:
69 raise error.RepoError(_("repository %s already exists") % path)
69 raise error.RepoError(_("repository %s already exists") % path)
70 else:
70 else:
71 # find requirements
71 # find requirements
72 requirements = set()
72 requirements = set()
73 try:
73 try:
74 requirements = set(self.opener("requires").read().splitlines())
74 requirements = set(self.opener("requires").read().splitlines())
75 except IOError, inst:
75 except IOError, inst:
76 if inst.errno != errno.ENOENT:
76 if inst.errno != errno.ENOENT:
77 raise
77 raise
78 for r in requirements - self.supported:
78 for r in requirements - self.supported:
79 raise error.RequirementError(
79 raise error.RequirementError(
80 _("requirement '%s' not supported") % r)
80 _("requirement '%s' not supported") % r)
81
81
82 self.sharedpath = self.path
82 self.sharedpath = self.path
83 try:
83 try:
84 s = os.path.realpath(self.opener("sharedpath").read())
84 s = os.path.realpath(self.opener("sharedpath").read())
85 if not os.path.exists(s):
85 if not os.path.exists(s):
86 raise error.RepoError(
86 raise error.RepoError(
87 _('.hg/sharedpath points to nonexistent directory %s') % s)
87 _('.hg/sharedpath points to nonexistent directory %s') % s)
88 self.sharedpath = s
88 self.sharedpath = s
89 except IOError, inst:
89 except IOError, inst:
90 if inst.errno != errno.ENOENT:
90 if inst.errno != errno.ENOENT:
91 raise
91 raise
92
92
93 self.store = store.store(requirements, self.sharedpath, util.opener)
93 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
94 self.spath = self.store.path
94 self.spath = self.store.path
95 self.sopener = self.store.opener
95 self.sopener = self.store.opener
96 self.sjoin = self.store.join
96 self.sjoin = self.store.join
97 self.opener.createmode = self.store.createmode
97 self.opener.createmode = self.store.createmode
98 self._applyrequirements(requirements)
98 self._applyrequirements(requirements)
99 if create:
99 if create:
100 self._writerequirements()
100 self._writerequirements()
101
101
102 # These two define the set of tags for this repository. _tags
102 # These two define the set of tags for this repository. _tags
103 # maps tag name to node; _tagtypes maps tag name to 'global' or
103 # maps tag name to node; _tagtypes maps tag name to 'global' or
104 # 'local'. (Global tags are defined by .hgtags across all
104 # 'local'. (Global tags are defined by .hgtags across all
105 # heads, and local tags are defined in .hg/localtags.) They
105 # heads, and local tags are defined in .hg/localtags.) They
106 # constitute the in-memory cache of tags.
106 # constitute the in-memory cache of tags.
107 self._tags = None
107 self._tags = None
108 self._tagtypes = None
108 self._tagtypes = None
109
109
110 self._branchcache = None
110 self._branchcache = None
111 self._branchcachetip = None
111 self._branchcachetip = None
112 self.nodetagscache = None
112 self.nodetagscache = None
113 self.filterpats = {}
113 self.filterpats = {}
114 self._datafilters = {}
114 self._datafilters = {}
115 self._transref = self._lockref = self._wlockref = None
115 self._transref = self._lockref = self._wlockref = None
116
116
117 def _applyrequirements(self, requirements):
117 def _applyrequirements(self, requirements):
118 self.requirements = requirements
118 self.requirements = requirements
119 self.sopener.options = {}
119 self.sopener.options = {}
120 if 'parentdelta' in requirements:
120 if 'parentdelta' in requirements:
121 self.sopener.options['parentdelta'] = 1
121 self.sopener.options['parentdelta'] = 1
122
122
123 def _writerequirements(self):
123 def _writerequirements(self):
124 reqfile = self.opener("requires", "w")
124 reqfile = self.opener("requires", "w")
125 for r in self.requirements:
125 for r in self.requirements:
126 reqfile.write("%s\n" % r)
126 reqfile.write("%s\n" % r)
127 reqfile.close()
127 reqfile.close()
128
128
129 def _checknested(self, path):
129 def _checknested(self, path):
130 """Determine if path is a legal nested repository."""
130 """Determine if path is a legal nested repository."""
131 if not path.startswith(self.root):
131 if not path.startswith(self.root):
132 return False
132 return False
133 subpath = path[len(self.root) + 1:]
133 subpath = path[len(self.root) + 1:]
134
134
135 # XXX: Checking against the current working copy is wrong in
135 # XXX: Checking against the current working copy is wrong in
136 # the sense that it can reject things like
136 # the sense that it can reject things like
137 #
137 #
138 # $ hg cat -r 10 sub/x.txt
138 # $ hg cat -r 10 sub/x.txt
139 #
139 #
140 # if sub/ is no longer a subrepository in the working copy
140 # if sub/ is no longer a subrepository in the working copy
141 # parent revision.
141 # parent revision.
142 #
142 #
143 # However, it can of course also allow things that would have
143 # However, it can of course also allow things that would have
144 # been rejected before, such as the above cat command if sub/
144 # been rejected before, such as the above cat command if sub/
145 # is a subrepository now, but was a normal directory before.
145 # is a subrepository now, but was a normal directory before.
146 # The old path auditor would have rejected by mistake since it
146 # The old path auditor would have rejected by mistake since it
147 # panics when it sees sub/.hg/.
147 # panics when it sees sub/.hg/.
148 #
148 #
149 # All in all, checking against the working copy seems sensible
149 # All in all, checking against the working copy seems sensible
150 # since we want to prevent access to nested repositories on
150 # since we want to prevent access to nested repositories on
151 # the filesystem *now*.
151 # the filesystem *now*.
152 ctx = self[None]
152 ctx = self[None]
153 parts = util.splitpath(subpath)
153 parts = util.splitpath(subpath)
154 while parts:
154 while parts:
155 prefix = os.sep.join(parts)
155 prefix = os.sep.join(parts)
156 if prefix in ctx.substate:
156 if prefix in ctx.substate:
157 if prefix == subpath:
157 if prefix == subpath:
158 return True
158 return True
159 else:
159 else:
160 sub = ctx.sub(prefix)
160 sub = ctx.sub(prefix)
161 return sub.checknested(subpath[len(prefix) + 1:])
161 return sub.checknested(subpath[len(prefix) + 1:])
162 else:
162 else:
163 parts.pop()
163 parts.pop()
164 return False
164 return False
165
165
166 @util.propertycache
166 @util.propertycache
167 def _bookmarks(self):
167 def _bookmarks(self):
168 return bookmarks.read(self)
168 return bookmarks.read(self)
169
169
170 @util.propertycache
170 @util.propertycache
171 def _bookmarkcurrent(self):
171 def _bookmarkcurrent(self):
172 return bookmarks.readcurrent(self)
172 return bookmarks.readcurrent(self)
173
173
174 @propertycache
174 @propertycache
175 def changelog(self):
175 def changelog(self):
176 c = changelog.changelog(self.sopener)
176 c = changelog.changelog(self.sopener)
177 if 'HG_PENDING' in os.environ:
177 if 'HG_PENDING' in os.environ:
178 p = os.environ['HG_PENDING']
178 p = os.environ['HG_PENDING']
179 if p.startswith(self.root):
179 if p.startswith(self.root):
180 c.readpending('00changelog.i.a')
180 c.readpending('00changelog.i.a')
181 self.sopener.options['defversion'] = c.version
181 self.sopener.options['defversion'] = c.version
182 return c
182 return c
183
183
184 @propertycache
184 @propertycache
185 def manifest(self):
185 def manifest(self):
186 return manifest.manifest(self.sopener)
186 return manifest.manifest(self.sopener)
187
187
188 @propertycache
188 @propertycache
189 def dirstate(self):
189 def dirstate(self):
190 warned = [0]
190 warned = [0]
191 def validate(node):
191 def validate(node):
192 try:
192 try:
193 r = self.changelog.rev(node)
193 r = self.changelog.rev(node)
194 return node
194 return node
195 except error.LookupError:
195 except error.LookupError:
196 if not warned[0]:
196 if not warned[0]:
197 warned[0] = True
197 warned[0] = True
198 self.ui.warn(_("warning: ignoring unknown"
198 self.ui.warn(_("warning: ignoring unknown"
199 " working parent %s!\n") % short(node))
199 " working parent %s!\n") % short(node))
200 return nullid
200 return nullid
201
201
202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
203
203
204 def __getitem__(self, changeid):
204 def __getitem__(self, changeid):
205 if changeid is None:
205 if changeid is None:
206 return context.workingctx(self)
206 return context.workingctx(self)
207 return context.changectx(self, changeid)
207 return context.changectx(self, changeid)
208
208
209 def __contains__(self, changeid):
209 def __contains__(self, changeid):
210 try:
210 try:
211 return bool(self.lookup(changeid))
211 return bool(self.lookup(changeid))
212 except error.RepoLookupError:
212 except error.RepoLookupError:
213 return False
213 return False
214
214
215 def __nonzero__(self):
215 def __nonzero__(self):
216 return True
216 return True
217
217
218 def __len__(self):
218 def __len__(self):
219 return len(self.changelog)
219 return len(self.changelog)
220
220
221 def __iter__(self):
221 def __iter__(self):
222 for i in xrange(len(self)):
222 for i in xrange(len(self)):
223 yield i
223 yield i
224
224
225 def url(self):
225 def url(self):
226 return 'file:' + self.root
226 return 'file:' + self.root
227
227
228 def hook(self, name, throw=False, **args):
228 def hook(self, name, throw=False, **args):
229 return hook.hook(self.ui, self, name, throw, **args)
229 return hook.hook(self.ui, self, name, throw, **args)
230
230
231 tag_disallowed = ':\r\n'
231 tag_disallowed = ':\r\n'
232
232
233 def _tag(self, names, node, message, local, user, date, extra={}):
233 def _tag(self, names, node, message, local, user, date, extra={}):
234 if isinstance(names, str):
234 if isinstance(names, str):
235 allchars = names
235 allchars = names
236 names = (names,)
236 names = (names,)
237 else:
237 else:
238 allchars = ''.join(names)
238 allchars = ''.join(names)
239 for c in self.tag_disallowed:
239 for c in self.tag_disallowed:
240 if c in allchars:
240 if c in allchars:
241 raise util.Abort(_('%r cannot be used in a tag name') % c)
241 raise util.Abort(_('%r cannot be used in a tag name') % c)
242
242
243 branches = self.branchmap()
243 branches = self.branchmap()
244 for name in names:
244 for name in names:
245 self.hook('pretag', throw=True, node=hex(node), tag=name,
245 self.hook('pretag', throw=True, node=hex(node), tag=name,
246 local=local)
246 local=local)
247 if name in branches:
247 if name in branches:
248 self.ui.warn(_("warning: tag %s conflicts with existing"
248 self.ui.warn(_("warning: tag %s conflicts with existing"
249 " branch name\n") % name)
249 " branch name\n") % name)
250
250
251 def writetags(fp, names, munge, prevtags):
251 def writetags(fp, names, munge, prevtags):
252 fp.seek(0, 2)
252 fp.seek(0, 2)
253 if prevtags and prevtags[-1] != '\n':
253 if prevtags and prevtags[-1] != '\n':
254 fp.write('\n')
254 fp.write('\n')
255 for name in names:
255 for name in names:
256 m = munge and munge(name) or name
256 m = munge and munge(name) or name
257 if self._tagtypes and name in self._tagtypes:
257 if self._tagtypes and name in self._tagtypes:
258 old = self._tags.get(name, nullid)
258 old = self._tags.get(name, nullid)
259 fp.write('%s %s\n' % (hex(old), m))
259 fp.write('%s %s\n' % (hex(old), m))
260 fp.write('%s %s\n' % (hex(node), m))
260 fp.write('%s %s\n' % (hex(node), m))
261 fp.close()
261 fp.close()
262
262
263 prevtags = ''
263 prevtags = ''
264 if local:
264 if local:
265 try:
265 try:
266 fp = self.opener('localtags', 'r+')
266 fp = self.opener('localtags', 'r+')
267 except IOError:
267 except IOError:
268 fp = self.opener('localtags', 'a')
268 fp = self.opener('localtags', 'a')
269 else:
269 else:
270 prevtags = fp.read()
270 prevtags = fp.read()
271
271
272 # local tags are stored in the current charset
272 # local tags are stored in the current charset
273 writetags(fp, names, None, prevtags)
273 writetags(fp, names, None, prevtags)
274 for name in names:
274 for name in names:
275 self.hook('tag', node=hex(node), tag=name, local=local)
275 self.hook('tag', node=hex(node), tag=name, local=local)
276 return
276 return
277
277
278 try:
278 try:
279 fp = self.wfile('.hgtags', 'rb+')
279 fp = self.wfile('.hgtags', 'rb+')
280 except IOError:
280 except IOError:
281 fp = self.wfile('.hgtags', 'ab')
281 fp = self.wfile('.hgtags', 'ab')
282 else:
282 else:
283 prevtags = fp.read()
283 prevtags = fp.read()
284
284
285 # committed tags are stored in UTF-8
285 # committed tags are stored in UTF-8
286 writetags(fp, names, encoding.fromlocal, prevtags)
286 writetags(fp, names, encoding.fromlocal, prevtags)
287
287
288 fp.close()
288 fp.close()
289
289
290 if '.hgtags' not in self.dirstate:
290 if '.hgtags' not in self.dirstate:
291 self[None].add(['.hgtags'])
291 self[None].add(['.hgtags'])
292
292
293 m = matchmod.exact(self.root, '', ['.hgtags'])
293 m = matchmod.exact(self.root, '', ['.hgtags'])
294 tagnode = self.commit(message, user, date, extra=extra, match=m)
294 tagnode = self.commit(message, user, date, extra=extra, match=m)
295
295
296 for name in names:
296 for name in names:
297 self.hook('tag', node=hex(node), tag=name, local=local)
297 self.hook('tag', node=hex(node), tag=name, local=local)
298
298
299 return tagnode
299 return tagnode
300
300
301 def tag(self, names, node, message, local, user, date):
301 def tag(self, names, node, message, local, user, date):
302 '''tag a revision with one or more symbolic names.
302 '''tag a revision with one or more symbolic names.
303
303
304 names is a list of strings or, when adding a single tag, names may be a
304 names is a list of strings or, when adding a single tag, names may be a
305 string.
305 string.
306
306
307 if local is True, the tags are stored in a per-repository file.
307 if local is True, the tags are stored in a per-repository file.
308 otherwise, they are stored in the .hgtags file, and a new
308 otherwise, they are stored in the .hgtags file, and a new
309 changeset is committed with the change.
309 changeset is committed with the change.
310
310
311 keyword arguments:
311 keyword arguments:
312
312
313 local: whether to store tags in non-version-controlled file
313 local: whether to store tags in non-version-controlled file
314 (default False)
314 (default False)
315
315
316 message: commit message to use if committing
316 message: commit message to use if committing
317
317
318 user: name of user to use if committing
318 user: name of user to use if committing
319
319
320 date: date tuple to use if committing'''
320 date: date tuple to use if committing'''
321
321
322 if not local:
322 if not local:
323 for x in self.status()[:5]:
323 for x in self.status()[:5]:
324 if '.hgtags' in x:
324 if '.hgtags' in x:
325 raise util.Abort(_('working copy of .hgtags is changed '
325 raise util.Abort(_('working copy of .hgtags is changed '
326 '(please commit .hgtags manually)'))
326 '(please commit .hgtags manually)'))
327
327
328 self.tags() # instantiate the cache
328 self.tags() # instantiate the cache
329 self._tag(names, node, message, local, user, date)
329 self._tag(names, node, message, local, user, date)
330
330
331 def tags(self):
331 def tags(self):
332 '''return a mapping of tag to node'''
332 '''return a mapping of tag to node'''
333 if self._tags is None:
333 if self._tags is None:
334 (self._tags, self._tagtypes) = self._findtags()
334 (self._tags, self._tagtypes) = self._findtags()
335
335
336 return self._tags
336 return self._tags
337
337
338 def _findtags(self):
338 def _findtags(self):
339 '''Do the hard work of finding tags. Return a pair of dicts
339 '''Do the hard work of finding tags. Return a pair of dicts
340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
341 maps tag name to a string like \'global\' or \'local\'.
341 maps tag name to a string like \'global\' or \'local\'.
342 Subclasses or extensions are free to add their own tags, but
342 Subclasses or extensions are free to add their own tags, but
343 should be aware that the returned dicts will be retained for the
343 should be aware that the returned dicts will be retained for the
344 duration of the localrepo object.'''
344 duration of the localrepo object.'''
345
345
346 # XXX what tagtype should subclasses/extensions use? Currently
346 # XXX what tagtype should subclasses/extensions use? Currently
347 # mq and bookmarks add tags, but do not set the tagtype at all.
347 # mq and bookmarks add tags, but do not set the tagtype at all.
348 # Should each extension invent its own tag type? Should there
348 # Should each extension invent its own tag type? Should there
349 # be one tagtype for all such "virtual" tags? Or is the status
349 # be one tagtype for all such "virtual" tags? Or is the status
350 # quo fine?
350 # quo fine?
351
351
352 alltags = {} # map tag name to (node, hist)
352 alltags = {} # map tag name to (node, hist)
353 tagtypes = {}
353 tagtypes = {}
354
354
355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
357
357
358 # Build the return dicts. Have to re-encode tag names because
358 # Build the return dicts. Have to re-encode tag names because
359 # the tags module always uses UTF-8 (in order not to lose info
359 # the tags module always uses UTF-8 (in order not to lose info
360 # writing to the cache), but the rest of Mercurial wants them in
360 # writing to the cache), but the rest of Mercurial wants them in
361 # local encoding.
361 # local encoding.
362 tags = {}
362 tags = {}
363 for (name, (node, hist)) in alltags.iteritems():
363 for (name, (node, hist)) in alltags.iteritems():
364 if node != nullid:
364 if node != nullid:
365 try:
365 try:
366 # ignore tags to unknown nodes
366 # ignore tags to unknown nodes
367 self.changelog.lookup(node)
367 self.changelog.lookup(node)
368 tags[encoding.tolocal(name)] = node
368 tags[encoding.tolocal(name)] = node
369 except error.LookupError:
369 except error.LookupError:
370 pass
370 pass
371 tags['tip'] = self.changelog.tip()
371 tags['tip'] = self.changelog.tip()
372 tagtypes = dict([(encoding.tolocal(name), value)
372 tagtypes = dict([(encoding.tolocal(name), value)
373 for (name, value) in tagtypes.iteritems()])
373 for (name, value) in tagtypes.iteritems()])
374 return (tags, tagtypes)
374 return (tags, tagtypes)
375
375
376 def tagtype(self, tagname):
376 def tagtype(self, tagname):
377 '''
377 '''
378 return the type of the given tag. result can be:
378 return the type of the given tag. result can be:
379
379
380 'local' : a local tag
380 'local' : a local tag
381 'global' : a global tag
381 'global' : a global tag
382 None : tag does not exist
382 None : tag does not exist
383 '''
383 '''
384
384
385 self.tags()
385 self.tags()
386
386
387 return self._tagtypes.get(tagname)
387 return self._tagtypes.get(tagname)
388
388
389 def tagslist(self):
389 def tagslist(self):
390 '''return a list of tags ordered by revision'''
390 '''return a list of tags ordered by revision'''
391 l = []
391 l = []
392 for t, n in self.tags().iteritems():
392 for t, n in self.tags().iteritems():
393 r = self.changelog.rev(n)
393 r = self.changelog.rev(n)
394 l.append((r, t, n))
394 l.append((r, t, n))
395 return [(t, n) for r, t, n in sorted(l)]
395 return [(t, n) for r, t, n in sorted(l)]
396
396
397 def nodetags(self, node):
397 def nodetags(self, node):
398 '''return the tags associated with a node'''
398 '''return the tags associated with a node'''
399 if not self.nodetagscache:
399 if not self.nodetagscache:
400 self.nodetagscache = {}
400 self.nodetagscache = {}
401 for t, n in self.tags().iteritems():
401 for t, n in self.tags().iteritems():
402 self.nodetagscache.setdefault(n, []).append(t)
402 self.nodetagscache.setdefault(n, []).append(t)
403 for tags in self.nodetagscache.itervalues():
403 for tags in self.nodetagscache.itervalues():
404 tags.sort()
404 tags.sort()
405 return self.nodetagscache.get(node, [])
405 return self.nodetagscache.get(node, [])
406
406
407 def nodebookmarks(self, node):
407 def nodebookmarks(self, node):
408 marks = []
408 marks = []
409 for bookmark, n in self._bookmarks.iteritems():
409 for bookmark, n in self._bookmarks.iteritems():
410 if n == node:
410 if n == node:
411 marks.append(bookmark)
411 marks.append(bookmark)
412 return sorted(marks)
412 return sorted(marks)
413
413
414 def _branchtags(self, partial, lrev):
414 def _branchtags(self, partial, lrev):
415 # TODO: rename this function?
415 # TODO: rename this function?
416 tiprev = len(self) - 1
416 tiprev = len(self) - 1
417 if lrev != tiprev:
417 if lrev != tiprev:
418 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
418 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
419 self._updatebranchcache(partial, ctxgen)
419 self._updatebranchcache(partial, ctxgen)
420 self._writebranchcache(partial, self.changelog.tip(), tiprev)
420 self._writebranchcache(partial, self.changelog.tip(), tiprev)
421
421
422 return partial
422 return partial
423
423
424 def updatebranchcache(self):
424 def updatebranchcache(self):
425 tip = self.changelog.tip()
425 tip = self.changelog.tip()
426 if self._branchcache is not None and self._branchcachetip == tip:
426 if self._branchcache is not None and self._branchcachetip == tip:
427 return self._branchcache
427 return self._branchcache
428
428
429 oldtip = self._branchcachetip
429 oldtip = self._branchcachetip
430 self._branchcachetip = tip
430 self._branchcachetip = tip
431 if oldtip is None or oldtip not in self.changelog.nodemap:
431 if oldtip is None or oldtip not in self.changelog.nodemap:
432 partial, last, lrev = self._readbranchcache()
432 partial, last, lrev = self._readbranchcache()
433 else:
433 else:
434 lrev = self.changelog.rev(oldtip)
434 lrev = self.changelog.rev(oldtip)
435 partial = self._branchcache
435 partial = self._branchcache
436
436
437 self._branchtags(partial, lrev)
437 self._branchtags(partial, lrev)
438 # this private cache holds all heads (not just tips)
438 # this private cache holds all heads (not just tips)
439 self._branchcache = partial
439 self._branchcache = partial
440
440
441 def branchmap(self):
441 def branchmap(self):
442 '''returns a dictionary {branch: [branchheads]}'''
442 '''returns a dictionary {branch: [branchheads]}'''
443 self.updatebranchcache()
443 self.updatebranchcache()
444 return self._branchcache
444 return self._branchcache
445
445
446 def branchtags(self):
446 def branchtags(self):
447 '''return a dict where branch names map to the tipmost head of
447 '''return a dict where branch names map to the tipmost head of
448 the branch, open heads come before closed'''
448 the branch, open heads come before closed'''
449 bt = {}
449 bt = {}
450 for bn, heads in self.branchmap().iteritems():
450 for bn, heads in self.branchmap().iteritems():
451 tip = heads[-1]
451 tip = heads[-1]
452 for h in reversed(heads):
452 for h in reversed(heads):
453 if 'close' not in self.changelog.read(h)[5]:
453 if 'close' not in self.changelog.read(h)[5]:
454 tip = h
454 tip = h
455 break
455 break
456 bt[bn] = tip
456 bt[bn] = tip
457 return bt
457 return bt
458
458
459 def _readbranchcache(self):
459 def _readbranchcache(self):
460 partial = {}
460 partial = {}
461 try:
461 try:
462 f = self.opener("cache/branchheads")
462 f = self.opener("cache/branchheads")
463 lines = f.read().split('\n')
463 lines = f.read().split('\n')
464 f.close()
464 f.close()
465 except (IOError, OSError):
465 except (IOError, OSError):
466 return {}, nullid, nullrev
466 return {}, nullid, nullrev
467
467
468 try:
468 try:
469 last, lrev = lines.pop(0).split(" ", 1)
469 last, lrev = lines.pop(0).split(" ", 1)
470 last, lrev = bin(last), int(lrev)
470 last, lrev = bin(last), int(lrev)
471 if lrev >= len(self) or self[lrev].node() != last:
471 if lrev >= len(self) or self[lrev].node() != last:
472 # invalidate the cache
472 # invalidate the cache
473 raise ValueError('invalidating branch cache (tip differs)')
473 raise ValueError('invalidating branch cache (tip differs)')
474 for l in lines:
474 for l in lines:
475 if not l:
475 if not l:
476 continue
476 continue
477 node, label = l.split(" ", 1)
477 node, label = l.split(" ", 1)
478 label = encoding.tolocal(label.strip())
478 label = encoding.tolocal(label.strip())
479 partial.setdefault(label, []).append(bin(node))
479 partial.setdefault(label, []).append(bin(node))
480 except KeyboardInterrupt:
480 except KeyboardInterrupt:
481 raise
481 raise
482 except Exception, inst:
482 except Exception, inst:
483 if self.ui.debugflag:
483 if self.ui.debugflag:
484 self.ui.warn(str(inst), '\n')
484 self.ui.warn(str(inst), '\n')
485 partial, last, lrev = {}, nullid, nullrev
485 partial, last, lrev = {}, nullid, nullrev
486 return partial, last, lrev
486 return partial, last, lrev
487
487
488 def _writebranchcache(self, branches, tip, tiprev):
488 def _writebranchcache(self, branches, tip, tiprev):
489 try:
489 try:
490 f = self.opener("cache/branchheads", "w", atomictemp=True)
490 f = self.opener("cache/branchheads", "w", atomictemp=True)
491 f.write("%s %s\n" % (hex(tip), tiprev))
491 f.write("%s %s\n" % (hex(tip), tiprev))
492 for label, nodes in branches.iteritems():
492 for label, nodes in branches.iteritems():
493 for node in nodes:
493 for node in nodes:
494 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
494 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
495 f.rename()
495 f.rename()
496 except (IOError, OSError):
496 except (IOError, OSError):
497 pass
497 pass
498
498
499 def _updatebranchcache(self, partial, ctxgen):
499 def _updatebranchcache(self, partial, ctxgen):
500 # collect new branch entries
500 # collect new branch entries
501 newbranches = {}
501 newbranches = {}
502 for c in ctxgen:
502 for c in ctxgen:
503 newbranches.setdefault(c.branch(), []).append(c.node())
503 newbranches.setdefault(c.branch(), []).append(c.node())
504 # if older branchheads are reachable from new ones, they aren't
504 # if older branchheads are reachable from new ones, they aren't
505 # really branchheads. Note checking parents is insufficient:
505 # really branchheads. Note checking parents is insufficient:
506 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
506 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
507 for branch, newnodes in newbranches.iteritems():
507 for branch, newnodes in newbranches.iteritems():
508 bheads = partial.setdefault(branch, [])
508 bheads = partial.setdefault(branch, [])
509 bheads.extend(newnodes)
509 bheads.extend(newnodes)
510 if len(bheads) <= 1:
510 if len(bheads) <= 1:
511 continue
511 continue
512 # starting from tip means fewer passes over reachable
512 # starting from tip means fewer passes over reachable
513 while newnodes:
513 while newnodes:
514 latest = newnodes.pop()
514 latest = newnodes.pop()
515 if latest not in bheads:
515 if latest not in bheads:
516 continue
516 continue
517 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
517 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
518 reachable = self.changelog.reachable(latest, minbhrev)
518 reachable = self.changelog.reachable(latest, minbhrev)
519 reachable.remove(latest)
519 reachable.remove(latest)
520 bheads = [b for b in bheads if b not in reachable]
520 bheads = [b for b in bheads if b not in reachable]
521 partial[branch] = bheads
521 partial[branch] = bheads
522
522
523 def lookup(self, key):
523 def lookup(self, key):
524 if isinstance(key, int):
524 if isinstance(key, int):
525 return self.changelog.node(key)
525 return self.changelog.node(key)
526 elif key == '.':
526 elif key == '.':
527 return self.dirstate.p1()
527 return self.dirstate.p1()
528 elif key == 'null':
528 elif key == 'null':
529 return nullid
529 return nullid
530 elif key == 'tip':
530 elif key == 'tip':
531 return self.changelog.tip()
531 return self.changelog.tip()
532 n = self.changelog._match(key)
532 n = self.changelog._match(key)
533 if n:
533 if n:
534 return n
534 return n
535 if key in self._bookmarks:
535 if key in self._bookmarks:
536 return self._bookmarks[key]
536 return self._bookmarks[key]
537 if key in self.tags():
537 if key in self.tags():
538 return self.tags()[key]
538 return self.tags()[key]
539 if key in self.branchtags():
539 if key in self.branchtags():
540 return self.branchtags()[key]
540 return self.branchtags()[key]
541 n = self.changelog._partialmatch(key)
541 n = self.changelog._partialmatch(key)
542 if n:
542 if n:
543 return n
543 return n
544
544
545 # can't find key, check if it might have come from damaged dirstate
545 # can't find key, check if it might have come from damaged dirstate
546 if key in self.dirstate.parents():
546 if key in self.dirstate.parents():
547 raise error.Abort(_("working directory has unknown parent '%s'!")
547 raise error.Abort(_("working directory has unknown parent '%s'!")
548 % short(key))
548 % short(key))
549 try:
549 try:
550 if len(key) == 20:
550 if len(key) == 20:
551 key = hex(key)
551 key = hex(key)
552 except:
552 except:
553 pass
553 pass
554 raise error.RepoLookupError(_("unknown revision '%s'") % key)
554 raise error.RepoLookupError(_("unknown revision '%s'") % key)
555
555
556 def lookupbranch(self, key, remote=None):
556 def lookupbranch(self, key, remote=None):
557 repo = remote or self
557 repo = remote or self
558 if key in repo.branchmap():
558 if key in repo.branchmap():
559 return key
559 return key
560
560
561 repo = (remote and remote.local()) and remote or self
561 repo = (remote and remote.local()) and remote or self
562 return repo[key].branch()
562 return repo[key].branch()
563
563
564 def known(self, nodes):
564 def known(self, nodes):
565 nm = self.changelog.nodemap
565 nm = self.changelog.nodemap
566 return [(n in nm) for n in nodes]
566 return [(n in nm) for n in nodes]
567
567
568 def local(self):
568 def local(self):
569 return True
569 return True
570
570
571 def join(self, f):
571 def join(self, f):
572 return os.path.join(self.path, f)
572 return os.path.join(self.path, f)
573
573
574 def wjoin(self, f):
574 def wjoin(self, f):
575 return os.path.join(self.root, f)
575 return os.path.join(self.root, f)
576
576
577 def file(self, f):
577 def file(self, f):
578 if f[0] == '/':
578 if f[0] == '/':
579 f = f[1:]
579 f = f[1:]
580 return filelog.filelog(self.sopener, f)
580 return filelog.filelog(self.sopener, f)
581
581
582 def changectx(self, changeid):
582 def changectx(self, changeid):
583 return self[changeid]
583 return self[changeid]
584
584
585 def parents(self, changeid=None):
585 def parents(self, changeid=None):
586 '''get list of changectxs for parents of changeid'''
586 '''get list of changectxs for parents of changeid'''
587 return self[changeid].parents()
587 return self[changeid].parents()
588
588
589 def filectx(self, path, changeid=None, fileid=None):
589 def filectx(self, path, changeid=None, fileid=None):
590 """changeid can be a changeset revision, node, or tag.
590 """changeid can be a changeset revision, node, or tag.
591 fileid can be a file revision or node."""
591 fileid can be a file revision or node."""
592 return context.filectx(self, path, changeid, fileid)
592 return context.filectx(self, path, changeid, fileid)
593
593
594 def getcwd(self):
594 def getcwd(self):
595 return self.dirstate.getcwd()
595 return self.dirstate.getcwd()
596
596
597 def pathto(self, f, cwd=None):
597 def pathto(self, f, cwd=None):
598 return self.dirstate.pathto(f, cwd)
598 return self.dirstate.pathto(f, cwd)
599
599
600 def wfile(self, f, mode='r'):
600 def wfile(self, f, mode='r'):
601 return self.wopener(f, mode)
601 return self.wopener(f, mode)
602
602
603 def _link(self, f):
603 def _link(self, f):
604 return os.path.islink(self.wjoin(f))
604 return os.path.islink(self.wjoin(f))
605
605
606 def _loadfilter(self, filter):
606 def _loadfilter(self, filter):
607 if filter not in self.filterpats:
607 if filter not in self.filterpats:
608 l = []
608 l = []
609 for pat, cmd in self.ui.configitems(filter):
609 for pat, cmd in self.ui.configitems(filter):
610 if cmd == '!':
610 if cmd == '!':
611 continue
611 continue
612 mf = matchmod.match(self.root, '', [pat])
612 mf = matchmod.match(self.root, '', [pat])
613 fn = None
613 fn = None
614 params = cmd
614 params = cmd
615 for name, filterfn in self._datafilters.iteritems():
615 for name, filterfn in self._datafilters.iteritems():
616 if cmd.startswith(name):
616 if cmd.startswith(name):
617 fn = filterfn
617 fn = filterfn
618 params = cmd[len(name):].lstrip()
618 params = cmd[len(name):].lstrip()
619 break
619 break
620 if not fn:
620 if not fn:
621 fn = lambda s, c, **kwargs: util.filter(s, c)
621 fn = lambda s, c, **kwargs: util.filter(s, c)
622 # Wrap old filters not supporting keyword arguments
622 # Wrap old filters not supporting keyword arguments
623 if not inspect.getargspec(fn)[2]:
623 if not inspect.getargspec(fn)[2]:
624 oldfn = fn
624 oldfn = fn
625 fn = lambda s, c, **kwargs: oldfn(s, c)
625 fn = lambda s, c, **kwargs: oldfn(s, c)
626 l.append((mf, fn, params))
626 l.append((mf, fn, params))
627 self.filterpats[filter] = l
627 self.filterpats[filter] = l
628 return self.filterpats[filter]
628 return self.filterpats[filter]
629
629
630 def _filter(self, filterpats, filename, data):
630 def _filter(self, filterpats, filename, data):
631 for mf, fn, cmd in filterpats:
631 for mf, fn, cmd in filterpats:
632 if mf(filename):
632 if mf(filename):
633 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
633 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
634 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
634 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
635 break
635 break
636
636
637 return data
637 return data
638
638
639 @propertycache
639 @propertycache
640 def _encodefilterpats(self):
640 def _encodefilterpats(self):
641 return self._loadfilter('encode')
641 return self._loadfilter('encode')
642
642
643 @propertycache
643 @propertycache
644 def _decodefilterpats(self):
644 def _decodefilterpats(self):
645 return self._loadfilter('decode')
645 return self._loadfilter('decode')
646
646
647 def adddatafilter(self, name, filter):
647 def adddatafilter(self, name, filter):
648 self._datafilters[name] = filter
648 self._datafilters[name] = filter
649
649
650 def wread(self, filename):
650 def wread(self, filename):
651 if self._link(filename):
651 if self._link(filename):
652 data = os.readlink(self.wjoin(filename))
652 data = os.readlink(self.wjoin(filename))
653 else:
653 else:
654 data = self.wopener(filename, 'r').read()
654 data = self.wopener(filename, 'r').read()
655 return self._filter(self._encodefilterpats, filename, data)
655 return self._filter(self._encodefilterpats, filename, data)
656
656
657 def wwrite(self, filename, data, flags):
657 def wwrite(self, filename, data, flags):
658 data = self._filter(self._decodefilterpats, filename, data)
658 data = self._filter(self._decodefilterpats, filename, data)
659 if 'l' in flags:
659 if 'l' in flags:
660 self.wopener.symlink(data, filename)
660 self.wopener.symlink(data, filename)
661 else:
661 else:
662 self.wopener(filename, 'w').write(data)
662 self.wopener(filename, 'w').write(data)
663 if 'x' in flags:
663 if 'x' in flags:
664 util.set_flags(self.wjoin(filename), False, True)
664 util.set_flags(self.wjoin(filename), False, True)
665
665
666 def wwritedata(self, filename, data):
666 def wwritedata(self, filename, data):
667 return self._filter(self._decodefilterpats, filename, data)
667 return self._filter(self._decodefilterpats, filename, data)
668
668
669 def transaction(self, desc):
669 def transaction(self, desc):
670 tr = self._transref and self._transref() or None
670 tr = self._transref and self._transref() or None
671 if tr and tr.running():
671 if tr and tr.running():
672 return tr.nest()
672 return tr.nest()
673
673
674 # abort here if the journal already exists
674 # abort here if the journal already exists
675 if os.path.exists(self.sjoin("journal")):
675 if os.path.exists(self.sjoin("journal")):
676 raise error.RepoError(
676 raise error.RepoError(
677 _("abandoned transaction found - run hg recover"))
677 _("abandoned transaction found - run hg recover"))
678
678
679 # save dirstate for rollback
679 # save dirstate for rollback
680 try:
680 try:
681 ds = self.opener("dirstate").read()
681 ds = self.opener("dirstate").read()
682 except IOError:
682 except IOError:
683 ds = ""
683 ds = ""
684 self.opener("journal.dirstate", "w").write(ds)
684 self.opener("journal.dirstate", "w").write(ds)
685 self.opener("journal.branch", "w").write(
685 self.opener("journal.branch", "w").write(
686 encoding.fromlocal(self.dirstate.branch()))
686 encoding.fromlocal(self.dirstate.branch()))
687 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
687 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
688
688
689 renames = [(self.sjoin("journal"), self.sjoin("undo")),
689 renames = [(self.sjoin("journal"), self.sjoin("undo")),
690 (self.join("journal.dirstate"), self.join("undo.dirstate")),
690 (self.join("journal.dirstate"), self.join("undo.dirstate")),
691 (self.join("journal.branch"), self.join("undo.branch")),
691 (self.join("journal.branch"), self.join("undo.branch")),
692 (self.join("journal.desc"), self.join("undo.desc"))]
692 (self.join("journal.desc"), self.join("undo.desc"))]
693 tr = transaction.transaction(self.ui.warn, self.sopener,
693 tr = transaction.transaction(self.ui.warn, self.sopener,
694 self.sjoin("journal"),
694 self.sjoin("journal"),
695 aftertrans(renames),
695 aftertrans(renames),
696 self.store.createmode)
696 self.store.createmode)
697 self._transref = weakref.ref(tr)
697 self._transref = weakref.ref(tr)
698 return tr
698 return tr
699
699
700 def recover(self):
700 def recover(self):
701 lock = self.lock()
701 lock = self.lock()
702 try:
702 try:
703 if os.path.exists(self.sjoin("journal")):
703 if os.path.exists(self.sjoin("journal")):
704 self.ui.status(_("rolling back interrupted transaction\n"))
704 self.ui.status(_("rolling back interrupted transaction\n"))
705 transaction.rollback(self.sopener, self.sjoin("journal"),
705 transaction.rollback(self.sopener, self.sjoin("journal"),
706 self.ui.warn)
706 self.ui.warn)
707 self.invalidate()
707 self.invalidate()
708 return True
708 return True
709 else:
709 else:
710 self.ui.warn(_("no interrupted transaction available\n"))
710 self.ui.warn(_("no interrupted transaction available\n"))
711 return False
711 return False
712 finally:
712 finally:
713 lock.release()
713 lock.release()
714
714
715 def rollback(self, dryrun=False):
715 def rollback(self, dryrun=False):
716 wlock = lock = None
716 wlock = lock = None
717 try:
717 try:
718 wlock = self.wlock()
718 wlock = self.wlock()
719 lock = self.lock()
719 lock = self.lock()
720 if os.path.exists(self.sjoin("undo")):
720 if os.path.exists(self.sjoin("undo")):
721 try:
721 try:
722 args = self.opener("undo.desc", "r").read().splitlines()
722 args = self.opener("undo.desc", "r").read().splitlines()
723 if len(args) >= 3 and self.ui.verbose:
723 if len(args) >= 3 and self.ui.verbose:
724 desc = _("repository tip rolled back to revision %s"
724 desc = _("repository tip rolled back to revision %s"
725 " (undo %s: %s)\n") % (
725 " (undo %s: %s)\n") % (
726 int(args[0]) - 1, args[1], args[2])
726 int(args[0]) - 1, args[1], args[2])
727 elif len(args) >= 2:
727 elif len(args) >= 2:
728 desc = _("repository tip rolled back to revision %s"
728 desc = _("repository tip rolled back to revision %s"
729 " (undo %s)\n") % (
729 " (undo %s)\n") % (
730 int(args[0]) - 1, args[1])
730 int(args[0]) - 1, args[1])
731 except IOError:
731 except IOError:
732 desc = _("rolling back unknown transaction\n")
732 desc = _("rolling back unknown transaction\n")
733 self.ui.status(desc)
733 self.ui.status(desc)
734 if dryrun:
734 if dryrun:
735 return
735 return
736 transaction.rollback(self.sopener, self.sjoin("undo"),
736 transaction.rollback(self.sopener, self.sjoin("undo"),
737 self.ui.warn)
737 self.ui.warn)
738 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
738 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
739 if os.path.exists(self.join('undo.bookmarks')):
739 if os.path.exists(self.join('undo.bookmarks')):
740 util.rename(self.join('undo.bookmarks'),
740 util.rename(self.join('undo.bookmarks'),
741 self.join('bookmarks'))
741 self.join('bookmarks'))
742 try:
742 try:
743 branch = self.opener("undo.branch").read()
743 branch = self.opener("undo.branch").read()
744 self.dirstate.setbranch(branch)
744 self.dirstate.setbranch(branch)
745 except IOError:
745 except IOError:
746 self.ui.warn(_("named branch could not be reset, "
746 self.ui.warn(_("named branch could not be reset, "
747 "current branch is still: %s\n")
747 "current branch is still: %s\n")
748 % self.dirstate.branch())
748 % self.dirstate.branch())
749 self.invalidate()
749 self.invalidate()
750 self.dirstate.invalidate()
750 self.dirstate.invalidate()
751 self.destroyed()
751 self.destroyed()
752 parents = tuple([p.rev() for p in self.parents()])
752 parents = tuple([p.rev() for p in self.parents()])
753 if len(parents) > 1:
753 if len(parents) > 1:
754 self.ui.status(_("working directory now based on "
754 self.ui.status(_("working directory now based on "
755 "revisions %d and %d\n") % parents)
755 "revisions %d and %d\n") % parents)
756 else:
756 else:
757 self.ui.status(_("working directory now based on "
757 self.ui.status(_("working directory now based on "
758 "revision %d\n") % parents)
758 "revision %d\n") % parents)
759 else:
759 else:
760 self.ui.warn(_("no rollback information available\n"))
760 self.ui.warn(_("no rollback information available\n"))
761 return 1
761 return 1
762 finally:
762 finally:
763 release(lock, wlock)
763 release(lock, wlock)
764
764
765 def invalidatecaches(self):
765 def invalidatecaches(self):
766 self._tags = None
766 self._tags = None
767 self._tagtypes = None
767 self._tagtypes = None
768 self.nodetagscache = None
768 self.nodetagscache = None
769 self._branchcache = None # in UTF-8
769 self._branchcache = None # in UTF-8
770 self._branchcachetip = None
770 self._branchcachetip = None
771
771
772 def invalidate(self):
772 def invalidate(self):
773 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
773 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
774 if a in self.__dict__:
774 if a in self.__dict__:
775 delattr(self, a)
775 delattr(self, a)
776 self.invalidatecaches()
776 self.invalidatecaches()
777
777
778 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
778 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
779 try:
779 try:
780 l = lock.lock(lockname, 0, releasefn, desc=desc)
780 l = lock.lock(lockname, 0, releasefn, desc=desc)
781 except error.LockHeld, inst:
781 except error.LockHeld, inst:
782 if not wait:
782 if not wait:
783 raise
783 raise
784 self.ui.warn(_("waiting for lock on %s held by %r\n") %
784 self.ui.warn(_("waiting for lock on %s held by %r\n") %
785 (desc, inst.locker))
785 (desc, inst.locker))
786 # default to 600 seconds timeout
786 # default to 600 seconds timeout
787 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
787 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
788 releasefn, desc=desc)
788 releasefn, desc=desc)
789 if acquirefn:
789 if acquirefn:
790 acquirefn()
790 acquirefn()
791 return l
791 return l
792
792
793 def lock(self, wait=True):
793 def lock(self, wait=True):
794 '''Lock the repository store (.hg/store) and return a weak reference
794 '''Lock the repository store (.hg/store) and return a weak reference
795 to the lock. Use this before modifying the store (e.g. committing or
795 to the lock. Use this before modifying the store (e.g. committing or
796 stripping). If you are opening a transaction, get a lock as well.)'''
796 stripping). If you are opening a transaction, get a lock as well.)'''
797 l = self._lockref and self._lockref()
797 l = self._lockref and self._lockref()
798 if l is not None and l.held:
798 if l is not None and l.held:
799 l.lock()
799 l.lock()
800 return l
800 return l
801
801
802 l = self._lock(self.sjoin("lock"), wait, self.store.write,
802 l = self._lock(self.sjoin("lock"), wait, self.store.write,
803 self.invalidate, _('repository %s') % self.origroot)
803 self.invalidate, _('repository %s') % self.origroot)
804 self._lockref = weakref.ref(l)
804 self._lockref = weakref.ref(l)
805 return l
805 return l
806
806
807 def wlock(self, wait=True):
807 def wlock(self, wait=True):
808 '''Lock the non-store parts of the repository (everything under
808 '''Lock the non-store parts of the repository (everything under
809 .hg except .hg/store) and return a weak reference to the lock.
809 .hg except .hg/store) and return a weak reference to the lock.
810 Use this before modifying files in .hg.'''
810 Use this before modifying files in .hg.'''
811 l = self._wlockref and self._wlockref()
811 l = self._wlockref and self._wlockref()
812 if l is not None and l.held:
812 if l is not None and l.held:
813 l.lock()
813 l.lock()
814 return l
814 return l
815
815
816 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
816 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
817 self.dirstate.invalidate, _('working directory of %s') %
817 self.dirstate.invalidate, _('working directory of %s') %
818 self.origroot)
818 self.origroot)
819 self._wlockref = weakref.ref(l)
819 self._wlockref = weakref.ref(l)
820 return l
820 return l
821
821
822 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
822 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
823 """
823 """
824 commit an individual file as part of a larger transaction
824 commit an individual file as part of a larger transaction
825 """
825 """
826
826
827 fname = fctx.path()
827 fname = fctx.path()
828 text = fctx.data()
828 text = fctx.data()
829 flog = self.file(fname)
829 flog = self.file(fname)
830 fparent1 = manifest1.get(fname, nullid)
830 fparent1 = manifest1.get(fname, nullid)
831 fparent2 = fparent2o = manifest2.get(fname, nullid)
831 fparent2 = fparent2o = manifest2.get(fname, nullid)
832
832
833 meta = {}
833 meta = {}
834 copy = fctx.renamed()
834 copy = fctx.renamed()
835 if copy and copy[0] != fname:
835 if copy and copy[0] != fname:
836 # Mark the new revision of this file as a copy of another
836 # Mark the new revision of this file as a copy of another
837 # file. This copy data will effectively act as a parent
837 # file. This copy data will effectively act as a parent
838 # of this new revision. If this is a merge, the first
838 # of this new revision. If this is a merge, the first
839 # parent will be the nullid (meaning "look up the copy data")
839 # parent will be the nullid (meaning "look up the copy data")
840 # and the second one will be the other parent. For example:
840 # and the second one will be the other parent. For example:
841 #
841 #
842 # 0 --- 1 --- 3 rev1 changes file foo
842 # 0 --- 1 --- 3 rev1 changes file foo
843 # \ / rev2 renames foo to bar and changes it
843 # \ / rev2 renames foo to bar and changes it
844 # \- 2 -/ rev3 should have bar with all changes and
844 # \- 2 -/ rev3 should have bar with all changes and
845 # should record that bar descends from
845 # should record that bar descends from
846 # bar in rev2 and foo in rev1
846 # bar in rev2 and foo in rev1
847 #
847 #
848 # this allows this merge to succeed:
848 # this allows this merge to succeed:
849 #
849 #
850 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
850 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
851 # \ / merging rev3 and rev4 should use bar@rev2
851 # \ / merging rev3 and rev4 should use bar@rev2
852 # \- 2 --- 4 as the merge base
852 # \- 2 --- 4 as the merge base
853 #
853 #
854
854
855 cfname = copy[0]
855 cfname = copy[0]
856 crev = manifest1.get(cfname)
856 crev = manifest1.get(cfname)
857 newfparent = fparent2
857 newfparent = fparent2
858
858
859 if manifest2: # branch merge
859 if manifest2: # branch merge
860 if fparent2 == nullid or crev is None: # copied on remote side
860 if fparent2 == nullid or crev is None: # copied on remote side
861 if cfname in manifest2:
861 if cfname in manifest2:
862 crev = manifest2[cfname]
862 crev = manifest2[cfname]
863 newfparent = fparent1
863 newfparent = fparent1
864
864
865 # find source in nearest ancestor if we've lost track
865 # find source in nearest ancestor if we've lost track
866 if not crev:
866 if not crev:
867 self.ui.debug(" %s: searching for copy revision for %s\n" %
867 self.ui.debug(" %s: searching for copy revision for %s\n" %
868 (fname, cfname))
868 (fname, cfname))
869 for ancestor in self[None].ancestors():
869 for ancestor in self[None].ancestors():
870 if cfname in ancestor:
870 if cfname in ancestor:
871 crev = ancestor[cfname].filenode()
871 crev = ancestor[cfname].filenode()
872 break
872 break
873
873
874 if crev:
874 if crev:
875 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
875 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
876 meta["copy"] = cfname
876 meta["copy"] = cfname
877 meta["copyrev"] = hex(crev)
877 meta["copyrev"] = hex(crev)
878 fparent1, fparent2 = nullid, newfparent
878 fparent1, fparent2 = nullid, newfparent
879 else:
879 else:
880 self.ui.warn(_("warning: can't find ancestor for '%s' "
880 self.ui.warn(_("warning: can't find ancestor for '%s' "
881 "copied from '%s'!\n") % (fname, cfname))
881 "copied from '%s'!\n") % (fname, cfname))
882
882
883 elif fparent2 != nullid:
883 elif fparent2 != nullid:
884 # is one parent an ancestor of the other?
884 # is one parent an ancestor of the other?
885 fparentancestor = flog.ancestor(fparent1, fparent2)
885 fparentancestor = flog.ancestor(fparent1, fparent2)
886 if fparentancestor == fparent1:
886 if fparentancestor == fparent1:
887 fparent1, fparent2 = fparent2, nullid
887 fparent1, fparent2 = fparent2, nullid
888 elif fparentancestor == fparent2:
888 elif fparentancestor == fparent2:
889 fparent2 = nullid
889 fparent2 = nullid
890
890
891 # is the file changed?
891 # is the file changed?
892 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
892 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
893 changelist.append(fname)
893 changelist.append(fname)
894 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
894 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
895
895
896 # are just the flags changed during merge?
896 # are just the flags changed during merge?
897 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
897 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
898 changelist.append(fname)
898 changelist.append(fname)
899
899
900 return fparent1
900 return fparent1
901
901
902 def commit(self, text="", user=None, date=None, match=None, force=False,
902 def commit(self, text="", user=None, date=None, match=None, force=False,
903 editor=False, extra={}):
903 editor=False, extra={}):
904 """Add a new revision to current repository.
904 """Add a new revision to current repository.
905
905
906 Revision information is gathered from the working directory,
906 Revision information is gathered from the working directory,
907 match can be used to filter the committed files. If editor is
907 match can be used to filter the committed files. If editor is
908 supplied, it is called to get a commit message.
908 supplied, it is called to get a commit message.
909 """
909 """
910
910
911 def fail(f, msg):
911 def fail(f, msg):
912 raise util.Abort('%s: %s' % (f, msg))
912 raise util.Abort('%s: %s' % (f, msg))
913
913
914 if not match:
914 if not match:
915 match = matchmod.always(self.root, '')
915 match = matchmod.always(self.root, '')
916
916
917 if not force:
917 if not force:
918 vdirs = []
918 vdirs = []
919 match.dir = vdirs.append
919 match.dir = vdirs.append
920 match.bad = fail
920 match.bad = fail
921
921
922 wlock = self.wlock()
922 wlock = self.wlock()
923 try:
923 try:
924 wctx = self[None]
924 wctx = self[None]
925 merge = len(wctx.parents()) > 1
925 merge = len(wctx.parents()) > 1
926
926
927 if (not force and merge and match and
927 if (not force and merge and match and
928 (match.files() or match.anypats())):
928 (match.files() or match.anypats())):
929 raise util.Abort(_('cannot partially commit a merge '
929 raise util.Abort(_('cannot partially commit a merge '
930 '(do not specify files or patterns)'))
930 '(do not specify files or patterns)'))
931
931
932 changes = self.status(match=match, clean=force)
932 changes = self.status(match=match, clean=force)
933 if force:
933 if force:
934 changes[0].extend(changes[6]) # mq may commit unchanged files
934 changes[0].extend(changes[6]) # mq may commit unchanged files
935
935
936 # check subrepos
936 # check subrepos
937 subs = []
937 subs = []
938 removedsubs = set()
938 removedsubs = set()
939 for p in wctx.parents():
939 for p in wctx.parents():
940 removedsubs.update(s for s in p.substate if match(s))
940 removedsubs.update(s for s in p.substate if match(s))
941 for s in wctx.substate:
941 for s in wctx.substate:
942 removedsubs.discard(s)
942 removedsubs.discard(s)
943 if match(s) and wctx.sub(s).dirty():
943 if match(s) and wctx.sub(s).dirty():
944 subs.append(s)
944 subs.append(s)
945 if (subs or removedsubs):
945 if (subs or removedsubs):
946 if (not match('.hgsub') and
946 if (not match('.hgsub') and
947 '.hgsub' in (wctx.modified() + wctx.added())):
947 '.hgsub' in (wctx.modified() + wctx.added())):
948 raise util.Abort(_("can't commit subrepos without .hgsub"))
948 raise util.Abort(_("can't commit subrepos without .hgsub"))
949 if '.hgsubstate' not in changes[0]:
949 if '.hgsubstate' not in changes[0]:
950 changes[0].insert(0, '.hgsubstate')
950 changes[0].insert(0, '.hgsubstate')
951
951
952 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
952 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
953 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
953 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
954 if changedsubs:
954 if changedsubs:
955 raise util.Abort(_("uncommitted changes in subrepo %s")
955 raise util.Abort(_("uncommitted changes in subrepo %s")
956 % changedsubs[0])
956 % changedsubs[0])
957
957
958 # make sure all explicit patterns are matched
958 # make sure all explicit patterns are matched
959 if not force and match.files():
959 if not force and match.files():
960 matched = set(changes[0] + changes[1] + changes[2])
960 matched = set(changes[0] + changes[1] + changes[2])
961
961
962 for f in match.files():
962 for f in match.files():
963 if f == '.' or f in matched or f in wctx.substate:
963 if f == '.' or f in matched or f in wctx.substate:
964 continue
964 continue
965 if f in changes[3]: # missing
965 if f in changes[3]: # missing
966 fail(f, _('file not found!'))
966 fail(f, _('file not found!'))
967 if f in vdirs: # visited directory
967 if f in vdirs: # visited directory
968 d = f + '/'
968 d = f + '/'
969 for mf in matched:
969 for mf in matched:
970 if mf.startswith(d):
970 if mf.startswith(d):
971 break
971 break
972 else:
972 else:
973 fail(f, _("no match under directory!"))
973 fail(f, _("no match under directory!"))
974 elif f not in self.dirstate:
974 elif f not in self.dirstate:
975 fail(f, _("file not tracked!"))
975 fail(f, _("file not tracked!"))
976
976
977 if (not force and not extra.get("close") and not merge
977 if (not force and not extra.get("close") and not merge
978 and not (changes[0] or changes[1] or changes[2])
978 and not (changes[0] or changes[1] or changes[2])
979 and wctx.branch() == wctx.p1().branch()):
979 and wctx.branch() == wctx.p1().branch()):
980 return None
980 return None
981
981
982 ms = mergemod.mergestate(self)
982 ms = mergemod.mergestate(self)
983 for f in changes[0]:
983 for f in changes[0]:
984 if f in ms and ms[f] == 'u':
984 if f in ms and ms[f] == 'u':
985 raise util.Abort(_("unresolved merge conflicts "
985 raise util.Abort(_("unresolved merge conflicts "
986 "(see hg help resolve)"))
986 "(see hg help resolve)"))
987
987
988 cctx = context.workingctx(self, text, user, date, extra, changes)
988 cctx = context.workingctx(self, text, user, date, extra, changes)
989 if editor:
989 if editor:
990 cctx._text = editor(self, cctx, subs)
990 cctx._text = editor(self, cctx, subs)
991 edited = (text != cctx._text)
991 edited = (text != cctx._text)
992
992
993 # commit subs
993 # commit subs
994 if subs or removedsubs:
994 if subs or removedsubs:
995 state = wctx.substate.copy()
995 state = wctx.substate.copy()
996 for s in sorted(subs):
996 for s in sorted(subs):
997 sub = wctx.sub(s)
997 sub = wctx.sub(s)
998 self.ui.status(_('committing subrepository %s\n') %
998 self.ui.status(_('committing subrepository %s\n') %
999 subrepo.subrelpath(sub))
999 subrepo.subrelpath(sub))
1000 sr = sub.commit(cctx._text, user, date)
1000 sr = sub.commit(cctx._text, user, date)
1001 state[s] = (state[s][0], sr)
1001 state[s] = (state[s][0], sr)
1002 subrepo.writestate(self, state)
1002 subrepo.writestate(self, state)
1003
1003
1004 # Save commit message in case this transaction gets rolled back
1004 # Save commit message in case this transaction gets rolled back
1005 # (e.g. by a pretxncommit hook). Leave the content alone on
1005 # (e.g. by a pretxncommit hook). Leave the content alone on
1006 # the assumption that the user will use the same editor again.
1006 # the assumption that the user will use the same editor again.
1007 msgfile = self.opener('last-message.txt', 'wb')
1007 msgfile = self.opener('last-message.txt', 'wb')
1008 msgfile.write(cctx._text)
1008 msgfile.write(cctx._text)
1009 msgfile.close()
1009 msgfile.close()
1010
1010
1011 p1, p2 = self.dirstate.parents()
1011 p1, p2 = self.dirstate.parents()
1012 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1012 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1013 try:
1013 try:
1014 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1014 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1015 ret = self.commitctx(cctx, True)
1015 ret = self.commitctx(cctx, True)
1016 except:
1016 except:
1017 if edited:
1017 if edited:
1018 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1018 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1019 self.ui.write(
1019 self.ui.write(
1020 _('note: commit message saved in %s\n') % msgfn)
1020 _('note: commit message saved in %s\n') % msgfn)
1021 raise
1021 raise
1022
1022
1023 # update bookmarks, dirstate and mergestate
1023 # update bookmarks, dirstate and mergestate
1024 bookmarks.update(self, p1, ret)
1024 bookmarks.update(self, p1, ret)
1025 for f in changes[0] + changes[1]:
1025 for f in changes[0] + changes[1]:
1026 self.dirstate.normal(f)
1026 self.dirstate.normal(f)
1027 for f in changes[2]:
1027 for f in changes[2]:
1028 self.dirstate.forget(f)
1028 self.dirstate.forget(f)
1029 self.dirstate.setparents(ret)
1029 self.dirstate.setparents(ret)
1030 ms.reset()
1030 ms.reset()
1031 finally:
1031 finally:
1032 wlock.release()
1032 wlock.release()
1033
1033
1034 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1034 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1035 return ret
1035 return ret
1036
1036
1037 def commitctx(self, ctx, error=False):
1037 def commitctx(self, ctx, error=False):
1038 """Add a new revision to current repository.
1038 """Add a new revision to current repository.
1039 Revision information is passed via the context argument.
1039 Revision information is passed via the context argument.
1040 """
1040 """
1041
1041
1042 tr = lock = None
1042 tr = lock = None
1043 removed = list(ctx.removed())
1043 removed = list(ctx.removed())
1044 p1, p2 = ctx.p1(), ctx.p2()
1044 p1, p2 = ctx.p1(), ctx.p2()
1045 m1 = p1.manifest().copy()
1045 m1 = p1.manifest().copy()
1046 m2 = p2.manifest()
1046 m2 = p2.manifest()
1047 user = ctx.user()
1047 user = ctx.user()
1048
1048
1049 lock = self.lock()
1049 lock = self.lock()
1050 try:
1050 try:
1051 tr = self.transaction("commit")
1051 tr = self.transaction("commit")
1052 trp = weakref.proxy(tr)
1052 trp = weakref.proxy(tr)
1053
1053
1054 # check in files
1054 # check in files
1055 new = {}
1055 new = {}
1056 changed = []
1056 changed = []
1057 linkrev = len(self)
1057 linkrev = len(self)
1058 for f in sorted(ctx.modified() + ctx.added()):
1058 for f in sorted(ctx.modified() + ctx.added()):
1059 self.ui.note(f + "\n")
1059 self.ui.note(f + "\n")
1060 try:
1060 try:
1061 fctx = ctx[f]
1061 fctx = ctx[f]
1062 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1062 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1063 changed)
1063 changed)
1064 m1.set(f, fctx.flags())
1064 m1.set(f, fctx.flags())
1065 except OSError, inst:
1065 except OSError, inst:
1066 self.ui.warn(_("trouble committing %s!\n") % f)
1066 self.ui.warn(_("trouble committing %s!\n") % f)
1067 raise
1067 raise
1068 except IOError, inst:
1068 except IOError, inst:
1069 errcode = getattr(inst, 'errno', errno.ENOENT)
1069 errcode = getattr(inst, 'errno', errno.ENOENT)
1070 if error or errcode and errcode != errno.ENOENT:
1070 if error or errcode and errcode != errno.ENOENT:
1071 self.ui.warn(_("trouble committing %s!\n") % f)
1071 self.ui.warn(_("trouble committing %s!\n") % f)
1072 raise
1072 raise
1073 else:
1073 else:
1074 removed.append(f)
1074 removed.append(f)
1075
1075
1076 # update manifest
1076 # update manifest
1077 m1.update(new)
1077 m1.update(new)
1078 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1078 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1079 drop = [f for f in removed if f in m1]
1079 drop = [f for f in removed if f in m1]
1080 for f in drop:
1080 for f in drop:
1081 del m1[f]
1081 del m1[f]
1082 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1082 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1083 p2.manifestnode(), (new, drop))
1083 p2.manifestnode(), (new, drop))
1084
1084
1085 # update changelog
1085 # update changelog
1086 self.changelog.delayupdate()
1086 self.changelog.delayupdate()
1087 n = self.changelog.add(mn, changed + removed, ctx.description(),
1087 n = self.changelog.add(mn, changed + removed, ctx.description(),
1088 trp, p1.node(), p2.node(),
1088 trp, p1.node(), p2.node(),
1089 user, ctx.date(), ctx.extra().copy())
1089 user, ctx.date(), ctx.extra().copy())
1090 p = lambda: self.changelog.writepending() and self.root or ""
1090 p = lambda: self.changelog.writepending() and self.root or ""
1091 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1091 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1092 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1092 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1093 parent2=xp2, pending=p)
1093 parent2=xp2, pending=p)
1094 self.changelog.finalize(trp)
1094 self.changelog.finalize(trp)
1095 tr.close()
1095 tr.close()
1096
1096
1097 if self._branchcache:
1097 if self._branchcache:
1098 self.updatebranchcache()
1098 self.updatebranchcache()
1099 return n
1099 return n
1100 finally:
1100 finally:
1101 if tr:
1101 if tr:
1102 tr.release()
1102 tr.release()
1103 lock.release()
1103 lock.release()
1104
1104
1105 def destroyed(self):
1105 def destroyed(self):
1106 '''Inform the repository that nodes have been destroyed.
1106 '''Inform the repository that nodes have been destroyed.
1107 Intended for use by strip and rollback, so there's a common
1107 Intended for use by strip and rollback, so there's a common
1108 place for anything that has to be done after destroying history.'''
1108 place for anything that has to be done after destroying history.'''
1109 # XXX it might be nice if we could take the list of destroyed
1109 # XXX it might be nice if we could take the list of destroyed
1110 # nodes, but I don't see an easy way for rollback() to do that
1110 # nodes, but I don't see an easy way for rollback() to do that
1111
1111
1112 # Ensure the persistent tag cache is updated. Doing it now
1112 # Ensure the persistent tag cache is updated. Doing it now
1113 # means that the tag cache only has to worry about destroyed
1113 # means that the tag cache only has to worry about destroyed
1114 # heads immediately after a strip/rollback. That in turn
1114 # heads immediately after a strip/rollback. That in turn
1115 # guarantees that "cachetip == currenttip" (comparing both rev
1115 # guarantees that "cachetip == currenttip" (comparing both rev
1116 # and node) always means no nodes have been added or destroyed.
1116 # and node) always means no nodes have been added or destroyed.
1117
1117
1118 # XXX this is suboptimal when qrefresh'ing: we strip the current
1118 # XXX this is suboptimal when qrefresh'ing: we strip the current
1119 # head, refresh the tag cache, then immediately add a new head.
1119 # head, refresh the tag cache, then immediately add a new head.
1120 # But I think doing it this way is necessary for the "instant
1120 # But I think doing it this way is necessary for the "instant
1121 # tag cache retrieval" case to work.
1121 # tag cache retrieval" case to work.
1122 self.invalidatecaches()
1122 self.invalidatecaches()
1123
1123
1124 def walk(self, match, node=None):
1124 def walk(self, match, node=None):
1125 '''
1125 '''
1126 walk recursively through the directory tree or a given
1126 walk recursively through the directory tree or a given
1127 changeset, finding all files matched by the match
1127 changeset, finding all files matched by the match
1128 function
1128 function
1129 '''
1129 '''
1130 return self[node].walk(match)
1130 return self[node].walk(match)
1131
1131
1132 def status(self, node1='.', node2=None, match=None,
1132 def status(self, node1='.', node2=None, match=None,
1133 ignored=False, clean=False, unknown=False,
1133 ignored=False, clean=False, unknown=False,
1134 listsubrepos=False):
1134 listsubrepos=False):
1135 """return status of files between two nodes or node and working directory
1135 """return status of files between two nodes or node and working directory
1136
1136
1137 If node1 is None, use the first dirstate parent instead.
1137 If node1 is None, use the first dirstate parent instead.
1138 If node2 is None, compare node1 with working directory.
1138 If node2 is None, compare node1 with working directory.
1139 """
1139 """
1140
1140
1141 def mfmatches(ctx):
1141 def mfmatches(ctx):
1142 mf = ctx.manifest().copy()
1142 mf = ctx.manifest().copy()
1143 for fn in mf.keys():
1143 for fn in mf.keys():
1144 if not match(fn):
1144 if not match(fn):
1145 del mf[fn]
1145 del mf[fn]
1146 return mf
1146 return mf
1147
1147
1148 if isinstance(node1, context.changectx):
1148 if isinstance(node1, context.changectx):
1149 ctx1 = node1
1149 ctx1 = node1
1150 else:
1150 else:
1151 ctx1 = self[node1]
1151 ctx1 = self[node1]
1152 if isinstance(node2, context.changectx):
1152 if isinstance(node2, context.changectx):
1153 ctx2 = node2
1153 ctx2 = node2
1154 else:
1154 else:
1155 ctx2 = self[node2]
1155 ctx2 = self[node2]
1156
1156
1157 working = ctx2.rev() is None
1157 working = ctx2.rev() is None
1158 parentworking = working and ctx1 == self['.']
1158 parentworking = working and ctx1 == self['.']
1159 match = match or matchmod.always(self.root, self.getcwd())
1159 match = match or matchmod.always(self.root, self.getcwd())
1160 listignored, listclean, listunknown = ignored, clean, unknown
1160 listignored, listclean, listunknown = ignored, clean, unknown
1161
1161
1162 # load earliest manifest first for caching reasons
1162 # load earliest manifest first for caching reasons
1163 if not working and ctx2.rev() < ctx1.rev():
1163 if not working and ctx2.rev() < ctx1.rev():
1164 ctx2.manifest()
1164 ctx2.manifest()
1165
1165
1166 if not parentworking:
1166 if not parentworking:
1167 def bad(f, msg):
1167 def bad(f, msg):
1168 if f not in ctx1:
1168 if f not in ctx1:
1169 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1169 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1170 match.bad = bad
1170 match.bad = bad
1171
1171
1172 if working: # we need to scan the working dir
1172 if working: # we need to scan the working dir
1173 subrepos = []
1173 subrepos = []
1174 if '.hgsub' in self.dirstate:
1174 if '.hgsub' in self.dirstate:
1175 subrepos = ctx1.substate.keys()
1175 subrepos = ctx1.substate.keys()
1176 s = self.dirstate.status(match, subrepos, listignored,
1176 s = self.dirstate.status(match, subrepos, listignored,
1177 listclean, listunknown)
1177 listclean, listunknown)
1178 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1178 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1179
1179
1180 # check for any possibly clean files
1180 # check for any possibly clean files
1181 if parentworking and cmp:
1181 if parentworking and cmp:
1182 fixup = []
1182 fixup = []
1183 # do a full compare of any files that might have changed
1183 # do a full compare of any files that might have changed
1184 for f in sorted(cmp):
1184 for f in sorted(cmp):
1185 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1185 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1186 or ctx1[f].cmp(ctx2[f])):
1186 or ctx1[f].cmp(ctx2[f])):
1187 modified.append(f)
1187 modified.append(f)
1188 else:
1188 else:
1189 fixup.append(f)
1189 fixup.append(f)
1190
1190
1191 # update dirstate for files that are actually clean
1191 # update dirstate for files that are actually clean
1192 if fixup:
1192 if fixup:
1193 if listclean:
1193 if listclean:
1194 clean += fixup
1194 clean += fixup
1195
1195
1196 try:
1196 try:
1197 # updating the dirstate is optional
1197 # updating the dirstate is optional
1198 # so we don't wait on the lock
1198 # so we don't wait on the lock
1199 wlock = self.wlock(False)
1199 wlock = self.wlock(False)
1200 try:
1200 try:
1201 for f in fixup:
1201 for f in fixup:
1202 self.dirstate.normal(f)
1202 self.dirstate.normal(f)
1203 finally:
1203 finally:
1204 wlock.release()
1204 wlock.release()
1205 except error.LockError:
1205 except error.LockError:
1206 pass
1206 pass
1207
1207
1208 if not parentworking:
1208 if not parentworking:
1209 mf1 = mfmatches(ctx1)
1209 mf1 = mfmatches(ctx1)
1210 if working:
1210 if working:
1211 # we are comparing working dir against non-parent
1211 # we are comparing working dir against non-parent
1212 # generate a pseudo-manifest for the working dir
1212 # generate a pseudo-manifest for the working dir
1213 mf2 = mfmatches(self['.'])
1213 mf2 = mfmatches(self['.'])
1214 for f in cmp + modified + added:
1214 for f in cmp + modified + added:
1215 mf2[f] = None
1215 mf2[f] = None
1216 mf2.set(f, ctx2.flags(f))
1216 mf2.set(f, ctx2.flags(f))
1217 for f in removed:
1217 for f in removed:
1218 if f in mf2:
1218 if f in mf2:
1219 del mf2[f]
1219 del mf2[f]
1220 else:
1220 else:
1221 # we are comparing two revisions
1221 # we are comparing two revisions
1222 deleted, unknown, ignored = [], [], []
1222 deleted, unknown, ignored = [], [], []
1223 mf2 = mfmatches(ctx2)
1223 mf2 = mfmatches(ctx2)
1224
1224
1225 modified, added, clean = [], [], []
1225 modified, added, clean = [], [], []
1226 for fn in mf2:
1226 for fn in mf2:
1227 if fn in mf1:
1227 if fn in mf1:
1228 if (fn not in deleted and
1228 if (fn not in deleted and
1229 (mf1.flags(fn) != mf2.flags(fn) or
1229 (mf1.flags(fn) != mf2.flags(fn) or
1230 (mf1[fn] != mf2[fn] and
1230 (mf1[fn] != mf2[fn] and
1231 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1231 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1232 modified.append(fn)
1232 modified.append(fn)
1233 elif listclean:
1233 elif listclean:
1234 clean.append(fn)
1234 clean.append(fn)
1235 del mf1[fn]
1235 del mf1[fn]
1236 elif fn not in deleted:
1236 elif fn not in deleted:
1237 added.append(fn)
1237 added.append(fn)
1238 removed = mf1.keys()
1238 removed = mf1.keys()
1239
1239
1240 r = modified, added, removed, deleted, unknown, ignored, clean
1240 r = modified, added, removed, deleted, unknown, ignored, clean
1241
1241
1242 if listsubrepos:
1242 if listsubrepos:
1243 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1243 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1244 if working:
1244 if working:
1245 rev2 = None
1245 rev2 = None
1246 else:
1246 else:
1247 rev2 = ctx2.substate[subpath][1]
1247 rev2 = ctx2.substate[subpath][1]
1248 try:
1248 try:
1249 submatch = matchmod.narrowmatcher(subpath, match)
1249 submatch = matchmod.narrowmatcher(subpath, match)
1250 s = sub.status(rev2, match=submatch, ignored=listignored,
1250 s = sub.status(rev2, match=submatch, ignored=listignored,
1251 clean=listclean, unknown=listunknown,
1251 clean=listclean, unknown=listunknown,
1252 listsubrepos=True)
1252 listsubrepos=True)
1253 for rfiles, sfiles in zip(r, s):
1253 for rfiles, sfiles in zip(r, s):
1254 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1254 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1255 except error.LookupError:
1255 except error.LookupError:
1256 self.ui.status(_("skipping missing subrepository: %s\n")
1256 self.ui.status(_("skipping missing subrepository: %s\n")
1257 % subpath)
1257 % subpath)
1258
1258
1259 for l in r:
1259 for l in r:
1260 l.sort()
1260 l.sort()
1261 return r
1261 return r
1262
1262
1263 def heads(self, start=None):
1263 def heads(self, start=None):
1264 heads = self.changelog.heads(start)
1264 heads = self.changelog.heads(start)
1265 # sort the output in rev descending order
1265 # sort the output in rev descending order
1266 return sorted(heads, key=self.changelog.rev, reverse=True)
1266 return sorted(heads, key=self.changelog.rev, reverse=True)
1267
1267
1268 def branchheads(self, branch=None, start=None, closed=False):
1268 def branchheads(self, branch=None, start=None, closed=False):
1269 '''return a (possibly filtered) list of heads for the given branch
1269 '''return a (possibly filtered) list of heads for the given branch
1270
1270
1271 Heads are returned in topological order, from newest to oldest.
1271 Heads are returned in topological order, from newest to oldest.
1272 If branch is None, use the dirstate branch.
1272 If branch is None, use the dirstate branch.
1273 If start is not None, return only heads reachable from start.
1273 If start is not None, return only heads reachable from start.
1274 If closed is True, return heads that are marked as closed as well.
1274 If closed is True, return heads that are marked as closed as well.
1275 '''
1275 '''
1276 if branch is None:
1276 if branch is None:
1277 branch = self[None].branch()
1277 branch = self[None].branch()
1278 branches = self.branchmap()
1278 branches = self.branchmap()
1279 if branch not in branches:
1279 if branch not in branches:
1280 return []
1280 return []
1281 # the cache returns heads ordered lowest to highest
1281 # the cache returns heads ordered lowest to highest
1282 bheads = list(reversed(branches[branch]))
1282 bheads = list(reversed(branches[branch]))
1283 if start is not None:
1283 if start is not None:
1284 # filter out the heads that cannot be reached from startrev
1284 # filter out the heads that cannot be reached from startrev
1285 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1285 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1286 bheads = [h for h in bheads if h in fbheads]
1286 bheads = [h for h in bheads if h in fbheads]
1287 if not closed:
1287 if not closed:
1288 bheads = [h for h in bheads if
1288 bheads = [h for h in bheads if
1289 ('close' not in self.changelog.read(h)[5])]
1289 ('close' not in self.changelog.read(h)[5])]
1290 return bheads
1290 return bheads
1291
1291
1292 def branches(self, nodes):
1292 def branches(self, nodes):
1293 if not nodes:
1293 if not nodes:
1294 nodes = [self.changelog.tip()]
1294 nodes = [self.changelog.tip()]
1295 b = []
1295 b = []
1296 for n in nodes:
1296 for n in nodes:
1297 t = n
1297 t = n
1298 while 1:
1298 while 1:
1299 p = self.changelog.parents(n)
1299 p = self.changelog.parents(n)
1300 if p[1] != nullid or p[0] == nullid:
1300 if p[1] != nullid or p[0] == nullid:
1301 b.append((t, n, p[0], p[1]))
1301 b.append((t, n, p[0], p[1]))
1302 break
1302 break
1303 n = p[0]
1303 n = p[0]
1304 return b
1304 return b
1305
1305
1306 def between(self, pairs):
1306 def between(self, pairs):
1307 r = []
1307 r = []
1308
1308
1309 for top, bottom in pairs:
1309 for top, bottom in pairs:
1310 n, l, i = top, [], 0
1310 n, l, i = top, [], 0
1311 f = 1
1311 f = 1
1312
1312
1313 while n != bottom and n != nullid:
1313 while n != bottom and n != nullid:
1314 p = self.changelog.parents(n)[0]
1314 p = self.changelog.parents(n)[0]
1315 if i == f:
1315 if i == f:
1316 l.append(n)
1316 l.append(n)
1317 f = f * 2
1317 f = f * 2
1318 n = p
1318 n = p
1319 i += 1
1319 i += 1
1320
1320
1321 r.append(l)
1321 r.append(l)
1322
1322
1323 return r
1323 return r
1324
1324
1325 def pull(self, remote, heads=None, force=False):
1325 def pull(self, remote, heads=None, force=False):
1326 lock = self.lock()
1326 lock = self.lock()
1327 try:
1327 try:
1328 usecommon = remote.capable('getbundle')
1328 usecommon = remote.capable('getbundle')
1329 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1329 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1330 force=force, commononly=usecommon)
1330 force=force, commononly=usecommon)
1331 common, fetch, rheads = tmp
1331 common, fetch, rheads = tmp
1332 if not fetch:
1332 if not fetch:
1333 self.ui.status(_("no changes found\n"))
1333 self.ui.status(_("no changes found\n"))
1334 result = 0
1334 result = 0
1335 else:
1335 else:
1336 if heads is None and list(common) == [nullid]:
1336 if heads is None and list(common) == [nullid]:
1337 self.ui.status(_("requesting all changes\n"))
1337 self.ui.status(_("requesting all changes\n"))
1338 elif heads is None and remote.capable('changegroupsubset'):
1338 elif heads is None and remote.capable('changegroupsubset'):
1339 # issue1320, avoid a race if remote changed after discovery
1339 # issue1320, avoid a race if remote changed after discovery
1340 heads = rheads
1340 heads = rheads
1341
1341
1342 if usecommon:
1342 if usecommon:
1343 cg = remote.getbundle('pull', common=common,
1343 cg = remote.getbundle('pull', common=common,
1344 heads=heads or rheads)
1344 heads=heads or rheads)
1345 elif heads is None:
1345 elif heads is None:
1346 cg = remote.changegroup(fetch, 'pull')
1346 cg = remote.changegroup(fetch, 'pull')
1347 elif not remote.capable('changegroupsubset'):
1347 elif not remote.capable('changegroupsubset'):
1348 raise util.Abort(_("partial pull cannot be done because "
1348 raise util.Abort(_("partial pull cannot be done because "
1349 "other repository doesn't support "
1349 "other repository doesn't support "
1350 "changegroupsubset."))
1350 "changegroupsubset."))
1351 else:
1351 else:
1352 cg = remote.changegroupsubset(fetch, heads, 'pull')
1352 cg = remote.changegroupsubset(fetch, heads, 'pull')
1353 result = self.addchangegroup(cg, 'pull', remote.url(),
1353 result = self.addchangegroup(cg, 'pull', remote.url(),
1354 lock=lock)
1354 lock=lock)
1355 finally:
1355 finally:
1356 lock.release()
1356 lock.release()
1357
1357
1358 return result
1358 return result
1359
1359
1360 def checkpush(self, force, revs):
1360 def checkpush(self, force, revs):
1361 """Extensions can override this function if additional checks have
1361 """Extensions can override this function if additional checks have
1362 to be performed before pushing, or call it if they override push
1362 to be performed before pushing, or call it if they override push
1363 command.
1363 command.
1364 """
1364 """
1365 pass
1365 pass
1366
1366
1367 def push(self, remote, force=False, revs=None, newbranch=False):
1367 def push(self, remote, force=False, revs=None, newbranch=False):
1368 '''Push outgoing changesets (limited by revs) from the current
1368 '''Push outgoing changesets (limited by revs) from the current
1369 repository to remote. Return an integer:
1369 repository to remote. Return an integer:
1370 - 0 means HTTP error *or* nothing to push
1370 - 0 means HTTP error *or* nothing to push
1371 - 1 means we pushed and remote head count is unchanged *or*
1371 - 1 means we pushed and remote head count is unchanged *or*
1372 we have outgoing changesets but refused to push
1372 we have outgoing changesets but refused to push
1373 - other values as described by addchangegroup()
1373 - other values as described by addchangegroup()
1374 '''
1374 '''
1375 # there are two ways to push to remote repo:
1375 # there are two ways to push to remote repo:
1376 #
1376 #
1377 # addchangegroup assumes local user can lock remote
1377 # addchangegroup assumes local user can lock remote
1378 # repo (local filesystem, old ssh servers).
1378 # repo (local filesystem, old ssh servers).
1379 #
1379 #
1380 # unbundle assumes local user cannot lock remote repo (new ssh
1380 # unbundle assumes local user cannot lock remote repo (new ssh
1381 # servers, http servers).
1381 # servers, http servers).
1382
1382
1383 self.checkpush(force, revs)
1383 self.checkpush(force, revs)
1384 lock = None
1384 lock = None
1385 unbundle = remote.capable('unbundle')
1385 unbundle = remote.capable('unbundle')
1386 if not unbundle:
1386 if not unbundle:
1387 lock = remote.lock()
1387 lock = remote.lock()
1388 try:
1388 try:
1389 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1389 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1390 newbranch)
1390 newbranch)
1391 ret = remote_heads
1391 ret = remote_heads
1392 if cg is not None:
1392 if cg is not None:
1393 if unbundle:
1393 if unbundle:
1394 # local repo finds heads on server, finds out what
1394 # local repo finds heads on server, finds out what
1395 # revs it must push. once revs transferred, if server
1395 # revs it must push. once revs transferred, if server
1396 # finds it has different heads (someone else won
1396 # finds it has different heads (someone else won
1397 # commit/push race), server aborts.
1397 # commit/push race), server aborts.
1398 if force:
1398 if force:
1399 remote_heads = ['force']
1399 remote_heads = ['force']
1400 # ssh: return remote's addchangegroup()
1400 # ssh: return remote's addchangegroup()
1401 # http: return remote's addchangegroup() or 0 for error
1401 # http: return remote's addchangegroup() or 0 for error
1402 ret = remote.unbundle(cg, remote_heads, 'push')
1402 ret = remote.unbundle(cg, remote_heads, 'push')
1403 else:
1403 else:
1404 # we return an integer indicating remote head count change
1404 # we return an integer indicating remote head count change
1405 ret = remote.addchangegroup(cg, 'push', self.url(),
1405 ret = remote.addchangegroup(cg, 'push', self.url(),
1406 lock=lock)
1406 lock=lock)
1407 finally:
1407 finally:
1408 if lock is not None:
1408 if lock is not None:
1409 lock.release()
1409 lock.release()
1410
1410
1411 self.ui.debug("checking for updated bookmarks\n")
1411 self.ui.debug("checking for updated bookmarks\n")
1412 rb = remote.listkeys('bookmarks')
1412 rb = remote.listkeys('bookmarks')
1413 for k in rb.keys():
1413 for k in rb.keys():
1414 if k in self._bookmarks:
1414 if k in self._bookmarks:
1415 nr, nl = rb[k], hex(self._bookmarks[k])
1415 nr, nl = rb[k], hex(self._bookmarks[k])
1416 if nr in self:
1416 if nr in self:
1417 cr = self[nr]
1417 cr = self[nr]
1418 cl = self[nl]
1418 cl = self[nl]
1419 if cl in cr.descendants():
1419 if cl in cr.descendants():
1420 r = remote.pushkey('bookmarks', k, nr, nl)
1420 r = remote.pushkey('bookmarks', k, nr, nl)
1421 if r:
1421 if r:
1422 self.ui.status(_("updating bookmark %s\n") % k)
1422 self.ui.status(_("updating bookmark %s\n") % k)
1423 else:
1423 else:
1424 self.ui.warn(_('updating bookmark %s'
1424 self.ui.warn(_('updating bookmark %s'
1425 ' failed!\n') % k)
1425 ' failed!\n') % k)
1426
1426
1427 return ret
1427 return ret
1428
1428
1429 def changegroupinfo(self, nodes, source):
1429 def changegroupinfo(self, nodes, source):
1430 if self.ui.verbose or source == 'bundle':
1430 if self.ui.verbose or source == 'bundle':
1431 self.ui.status(_("%d changesets found\n") % len(nodes))
1431 self.ui.status(_("%d changesets found\n") % len(nodes))
1432 if self.ui.debugflag:
1432 if self.ui.debugflag:
1433 self.ui.debug("list of changesets:\n")
1433 self.ui.debug("list of changesets:\n")
1434 for node in nodes:
1434 for node in nodes:
1435 self.ui.debug("%s\n" % hex(node))
1435 self.ui.debug("%s\n" % hex(node))
1436
1436
1437 def changegroupsubset(self, bases, heads, source):
1437 def changegroupsubset(self, bases, heads, source):
1438 """Compute a changegroup consisting of all the nodes that are
1438 """Compute a changegroup consisting of all the nodes that are
1439 descendents of any of the bases and ancestors of any of the heads.
1439 descendents of any of the bases and ancestors of any of the heads.
1440 Return a chunkbuffer object whose read() method will return
1440 Return a chunkbuffer object whose read() method will return
1441 successive changegroup chunks.
1441 successive changegroup chunks.
1442
1442
1443 It is fairly complex as determining which filenodes and which
1443 It is fairly complex as determining which filenodes and which
1444 manifest nodes need to be included for the changeset to be complete
1444 manifest nodes need to be included for the changeset to be complete
1445 is non-trivial.
1445 is non-trivial.
1446
1446
1447 Another wrinkle is doing the reverse, figuring out which changeset in
1447 Another wrinkle is doing the reverse, figuring out which changeset in
1448 the changegroup a particular filenode or manifestnode belongs to.
1448 the changegroup a particular filenode or manifestnode belongs to.
1449 """
1449 """
1450 cl = self.changelog
1450 cl = self.changelog
1451 if not bases:
1451 if not bases:
1452 bases = [nullid]
1452 bases = [nullid]
1453 csets, bases, heads = cl.nodesbetween(bases, heads)
1453 csets, bases, heads = cl.nodesbetween(bases, heads)
1454 # We assume that all ancestors of bases are known
1454 # We assume that all ancestors of bases are known
1455 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1455 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1456 return self._changegroupsubset(common, csets, heads, source)
1456 return self._changegroupsubset(common, csets, heads, source)
1457
1457
1458 def getbundle(self, source, heads=None, common=None):
1458 def getbundle(self, source, heads=None, common=None):
1459 """Like changegroupsubset, but returns the set difference between the
1459 """Like changegroupsubset, but returns the set difference between the
1460 ancestors of heads and the ancestors common.
1460 ancestors of heads and the ancestors common.
1461
1461
1462 If heads is None, use the local heads. If common is None, use [nullid].
1462 If heads is None, use the local heads. If common is None, use [nullid].
1463
1463
1464 The nodes in common might not all be known locally due to the way the
1464 The nodes in common might not all be known locally due to the way the
1465 current discovery protocol works.
1465 current discovery protocol works.
1466 """
1466 """
1467 cl = self.changelog
1467 cl = self.changelog
1468 if common:
1468 if common:
1469 nm = cl.nodemap
1469 nm = cl.nodemap
1470 common = [n for n in common if n in nm]
1470 common = [n for n in common if n in nm]
1471 else:
1471 else:
1472 common = [nullid]
1472 common = [nullid]
1473 if not heads:
1473 if not heads:
1474 heads = cl.heads()
1474 heads = cl.heads()
1475 common, missing = cl.findcommonmissing(common, heads)
1475 common, missing = cl.findcommonmissing(common, heads)
1476 return self._changegroupsubset(common, missing, heads, source)
1476 return self._changegroupsubset(common, missing, heads, source)
1477
1477
1478 def _changegroupsubset(self, commonrevs, csets, heads, source):
1478 def _changegroupsubset(self, commonrevs, csets, heads, source):
1479
1479
1480 cl = self.changelog
1480 cl = self.changelog
1481 mf = self.manifest
1481 mf = self.manifest
1482 mfs = {} # needed manifests
1482 mfs = {} # needed manifests
1483 fnodes = {} # needed file nodes
1483 fnodes = {} # needed file nodes
1484 changedfiles = set()
1484 changedfiles = set()
1485 fstate = ['', {}]
1485 fstate = ['', {}]
1486 count = [0]
1486 count = [0]
1487
1487
1488 # can we go through the fast path ?
1488 # can we go through the fast path ?
1489 heads.sort()
1489 heads.sort()
1490 if heads == sorted(self.heads()):
1490 if heads == sorted(self.heads()):
1491 return self._changegroup(csets, source)
1491 return self._changegroup(csets, source)
1492
1492
1493 # slow path
1493 # slow path
1494 self.hook('preoutgoing', throw=True, source=source)
1494 self.hook('preoutgoing', throw=True, source=source)
1495 self.changegroupinfo(csets, source)
1495 self.changegroupinfo(csets, source)
1496
1496
1497 # filter any nodes that claim to be part of the known set
1497 # filter any nodes that claim to be part of the known set
1498 def prune(revlog, missing):
1498 def prune(revlog, missing):
1499 for n in missing:
1499 for n in missing:
1500 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1500 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1501 yield n
1501 yield n
1502
1502
1503 def lookup(revlog, x):
1503 def lookup(revlog, x):
1504 if revlog == cl:
1504 if revlog == cl:
1505 c = cl.read(x)
1505 c = cl.read(x)
1506 changedfiles.update(c[3])
1506 changedfiles.update(c[3])
1507 mfs.setdefault(c[0], x)
1507 mfs.setdefault(c[0], x)
1508 count[0] += 1
1508 count[0] += 1
1509 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1509 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1510 return x
1510 return x
1511 elif revlog == mf:
1511 elif revlog == mf:
1512 clnode = mfs[x]
1512 clnode = mfs[x]
1513 mdata = mf.readfast(x)
1513 mdata = mf.readfast(x)
1514 for f in changedfiles:
1514 for f in changedfiles:
1515 if f in mdata:
1515 if f in mdata:
1516 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1516 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1517 count[0] += 1
1517 count[0] += 1
1518 self.ui.progress(_('bundling'), count[0],
1518 self.ui.progress(_('bundling'), count[0],
1519 unit=_('manifests'), total=len(mfs))
1519 unit=_('manifests'), total=len(mfs))
1520 return mfs[x]
1520 return mfs[x]
1521 else:
1521 else:
1522 self.ui.progress(
1522 self.ui.progress(
1523 _('bundling'), count[0], item=fstate[0],
1523 _('bundling'), count[0], item=fstate[0],
1524 unit=_('files'), total=len(changedfiles))
1524 unit=_('files'), total=len(changedfiles))
1525 return fstate[1][x]
1525 return fstate[1][x]
1526
1526
1527 bundler = changegroup.bundle10(lookup)
1527 bundler = changegroup.bundle10(lookup)
1528
1528
1529 def gengroup():
1529 def gengroup():
1530 # Create a changenode group generator that will call our functions
1530 # Create a changenode group generator that will call our functions
1531 # back to lookup the owning changenode and collect information.
1531 # back to lookup the owning changenode and collect information.
1532 for chunk in cl.group(csets, bundler):
1532 for chunk in cl.group(csets, bundler):
1533 yield chunk
1533 yield chunk
1534 self.ui.progress(_('bundling'), None)
1534 self.ui.progress(_('bundling'), None)
1535
1535
1536 # Create a generator for the manifestnodes that calls our lookup
1536 # Create a generator for the manifestnodes that calls our lookup
1537 # and data collection functions back.
1537 # and data collection functions back.
1538 count[0] = 0
1538 count[0] = 0
1539 for chunk in mf.group(prune(mf, mfs), bundler):
1539 for chunk in mf.group(prune(mf, mfs), bundler):
1540 yield chunk
1540 yield chunk
1541 self.ui.progress(_('bundling'), None)
1541 self.ui.progress(_('bundling'), None)
1542
1542
1543 mfs.clear()
1543 mfs.clear()
1544
1544
1545 # Go through all our files in order sorted by name.
1545 # Go through all our files in order sorted by name.
1546 count[0] = 0
1546 count[0] = 0
1547 for fname in sorted(changedfiles):
1547 for fname in sorted(changedfiles):
1548 filerevlog = self.file(fname)
1548 filerevlog = self.file(fname)
1549 if not len(filerevlog):
1549 if not len(filerevlog):
1550 raise util.Abort(_("empty or missing revlog for %s") % fname)
1550 raise util.Abort(_("empty or missing revlog for %s") % fname)
1551 fstate[0] = fname
1551 fstate[0] = fname
1552 fstate[1] = fnodes.pop(fname, {})
1552 fstate[1] = fnodes.pop(fname, {})
1553 first = True
1553 first = True
1554
1554
1555 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1555 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1556 bundler):
1556 bundler):
1557 if first:
1557 if first:
1558 if chunk == bundler.close():
1558 if chunk == bundler.close():
1559 break
1559 break
1560 count[0] += 1
1560 count[0] += 1
1561 yield bundler.fileheader(fname)
1561 yield bundler.fileheader(fname)
1562 first = False
1562 first = False
1563 yield chunk
1563 yield chunk
1564 # Signal that no more groups are left.
1564 # Signal that no more groups are left.
1565 yield bundler.close()
1565 yield bundler.close()
1566 self.ui.progress(_('bundling'), None)
1566 self.ui.progress(_('bundling'), None)
1567
1567
1568 if csets:
1568 if csets:
1569 self.hook('outgoing', node=hex(csets[0]), source=source)
1569 self.hook('outgoing', node=hex(csets[0]), source=source)
1570
1570
1571 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1571 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1572
1572
1573 def changegroup(self, basenodes, source):
1573 def changegroup(self, basenodes, source):
1574 # to avoid a race we use changegroupsubset() (issue1320)
1574 # to avoid a race we use changegroupsubset() (issue1320)
1575 return self.changegroupsubset(basenodes, self.heads(), source)
1575 return self.changegroupsubset(basenodes, self.heads(), source)
1576
1576
1577 def _changegroup(self, nodes, source):
1577 def _changegroup(self, nodes, source):
1578 """Compute the changegroup of all nodes that we have that a recipient
1578 """Compute the changegroup of all nodes that we have that a recipient
1579 doesn't. Return a chunkbuffer object whose read() method will return
1579 doesn't. Return a chunkbuffer object whose read() method will return
1580 successive changegroup chunks.
1580 successive changegroup chunks.
1581
1581
1582 This is much easier than the previous function as we can assume that
1582 This is much easier than the previous function as we can assume that
1583 the recipient has any changenode we aren't sending them.
1583 the recipient has any changenode we aren't sending them.
1584
1584
1585 nodes is the set of nodes to send"""
1585 nodes is the set of nodes to send"""
1586
1586
1587 cl = self.changelog
1587 cl = self.changelog
1588 mf = self.manifest
1588 mf = self.manifest
1589 mfs = {}
1589 mfs = {}
1590 changedfiles = set()
1590 changedfiles = set()
1591 fstate = ['']
1591 fstate = ['']
1592 count = [0]
1592 count = [0]
1593
1593
1594 self.hook('preoutgoing', throw=True, source=source)
1594 self.hook('preoutgoing', throw=True, source=source)
1595 self.changegroupinfo(nodes, source)
1595 self.changegroupinfo(nodes, source)
1596
1596
1597 revset = set([cl.rev(n) for n in nodes])
1597 revset = set([cl.rev(n) for n in nodes])
1598
1598
1599 def gennodelst(log):
1599 def gennodelst(log):
1600 for r in log:
1600 for r in log:
1601 if log.linkrev(r) in revset:
1601 if log.linkrev(r) in revset:
1602 yield log.node(r)
1602 yield log.node(r)
1603
1603
1604 def lookup(revlog, x):
1604 def lookup(revlog, x):
1605 if revlog == cl:
1605 if revlog == cl:
1606 c = cl.read(x)
1606 c = cl.read(x)
1607 changedfiles.update(c[3])
1607 changedfiles.update(c[3])
1608 mfs.setdefault(c[0], x)
1608 mfs.setdefault(c[0], x)
1609 count[0] += 1
1609 count[0] += 1
1610 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1610 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1611 return x
1611 return x
1612 elif revlog == mf:
1612 elif revlog == mf:
1613 count[0] += 1
1613 count[0] += 1
1614 self.ui.progress(_('bundling'), count[0],
1614 self.ui.progress(_('bundling'), count[0],
1615 unit=_('manifests'), total=len(mfs))
1615 unit=_('manifests'), total=len(mfs))
1616 return cl.node(revlog.linkrev(revlog.rev(x)))
1616 return cl.node(revlog.linkrev(revlog.rev(x)))
1617 else:
1617 else:
1618 self.ui.progress(
1618 self.ui.progress(
1619 _('bundling'), count[0], item=fstate[0],
1619 _('bundling'), count[0], item=fstate[0],
1620 total=len(changedfiles), unit=_('files'))
1620 total=len(changedfiles), unit=_('files'))
1621 return cl.node(revlog.linkrev(revlog.rev(x)))
1621 return cl.node(revlog.linkrev(revlog.rev(x)))
1622
1622
1623 bundler = changegroup.bundle10(lookup)
1623 bundler = changegroup.bundle10(lookup)
1624
1624
1625 def gengroup():
1625 def gengroup():
1626 '''yield a sequence of changegroup chunks (strings)'''
1626 '''yield a sequence of changegroup chunks (strings)'''
1627 # construct a list of all changed files
1627 # construct a list of all changed files
1628
1628
1629 for chunk in cl.group(nodes, bundler):
1629 for chunk in cl.group(nodes, bundler):
1630 yield chunk
1630 yield chunk
1631 self.ui.progress(_('bundling'), None)
1631 self.ui.progress(_('bundling'), None)
1632
1632
1633 count[0] = 0
1633 count[0] = 0
1634 for chunk in mf.group(gennodelst(mf), bundler):
1634 for chunk in mf.group(gennodelst(mf), bundler):
1635 yield chunk
1635 yield chunk
1636 self.ui.progress(_('bundling'), None)
1636 self.ui.progress(_('bundling'), None)
1637
1637
1638 count[0] = 0
1638 count[0] = 0
1639 for fname in sorted(changedfiles):
1639 for fname in sorted(changedfiles):
1640 filerevlog = self.file(fname)
1640 filerevlog = self.file(fname)
1641 if not len(filerevlog):
1641 if not len(filerevlog):
1642 raise util.Abort(_("empty or missing revlog for %s") % fname)
1642 raise util.Abort(_("empty or missing revlog for %s") % fname)
1643 fstate[0] = fname
1643 fstate[0] = fname
1644 first = True
1644 first = True
1645 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1645 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1646 if first:
1646 if first:
1647 if chunk == bundler.close():
1647 if chunk == bundler.close():
1648 break
1648 break
1649 count[0] += 1
1649 count[0] += 1
1650 yield bundler.fileheader(fname)
1650 yield bundler.fileheader(fname)
1651 first = False
1651 first = False
1652 yield chunk
1652 yield chunk
1653 yield bundler.close()
1653 yield bundler.close()
1654 self.ui.progress(_('bundling'), None)
1654 self.ui.progress(_('bundling'), None)
1655
1655
1656 if nodes:
1656 if nodes:
1657 self.hook('outgoing', node=hex(nodes[0]), source=source)
1657 self.hook('outgoing', node=hex(nodes[0]), source=source)
1658
1658
1659 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1659 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1660
1660
1661 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1661 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1662 """Add the changegroup returned by source.read() to this repo.
1662 """Add the changegroup returned by source.read() to this repo.
1663 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1663 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1664 the URL of the repo where this changegroup is coming from.
1664 the URL of the repo where this changegroup is coming from.
1665 If lock is not None, the function takes ownership of the lock
1665 If lock is not None, the function takes ownership of the lock
1666 and releases it after the changegroup is added.
1666 and releases it after the changegroup is added.
1667
1667
1668 Return an integer summarizing the change to this repo:
1668 Return an integer summarizing the change to this repo:
1669 - nothing changed or no source: 0
1669 - nothing changed or no source: 0
1670 - more heads than before: 1+added heads (2..n)
1670 - more heads than before: 1+added heads (2..n)
1671 - fewer heads than before: -1-removed heads (-2..-n)
1671 - fewer heads than before: -1-removed heads (-2..-n)
1672 - number of heads stays the same: 1
1672 - number of heads stays the same: 1
1673 """
1673 """
1674 def csmap(x):
1674 def csmap(x):
1675 self.ui.debug("add changeset %s\n" % short(x))
1675 self.ui.debug("add changeset %s\n" % short(x))
1676 return len(cl)
1676 return len(cl)
1677
1677
1678 def revmap(x):
1678 def revmap(x):
1679 return cl.rev(x)
1679 return cl.rev(x)
1680
1680
1681 if not source:
1681 if not source:
1682 return 0
1682 return 0
1683
1683
1684 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1684 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1685
1685
1686 changesets = files = revisions = 0
1686 changesets = files = revisions = 0
1687 efiles = set()
1687 efiles = set()
1688
1688
1689 # write changelog data to temp files so concurrent readers will not see
1689 # write changelog data to temp files so concurrent readers will not see
1690 # inconsistent view
1690 # inconsistent view
1691 cl = self.changelog
1691 cl = self.changelog
1692 cl.delayupdate()
1692 cl.delayupdate()
1693 oldheads = len(cl.heads())
1693 oldheads = len(cl.heads())
1694
1694
1695 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1695 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1696 try:
1696 try:
1697 trp = weakref.proxy(tr)
1697 trp = weakref.proxy(tr)
1698 # pull off the changeset group
1698 # pull off the changeset group
1699 self.ui.status(_("adding changesets\n"))
1699 self.ui.status(_("adding changesets\n"))
1700 clstart = len(cl)
1700 clstart = len(cl)
1701 class prog(object):
1701 class prog(object):
1702 step = _('changesets')
1702 step = _('changesets')
1703 count = 1
1703 count = 1
1704 ui = self.ui
1704 ui = self.ui
1705 total = None
1705 total = None
1706 def __call__(self):
1706 def __call__(self):
1707 self.ui.progress(self.step, self.count, unit=_('chunks'),
1707 self.ui.progress(self.step, self.count, unit=_('chunks'),
1708 total=self.total)
1708 total=self.total)
1709 self.count += 1
1709 self.count += 1
1710 pr = prog()
1710 pr = prog()
1711 source.callback = pr
1711 source.callback = pr
1712
1712
1713 if (cl.addgroup(source, csmap, trp) is None
1713 if (cl.addgroup(source, csmap, trp) is None
1714 and not emptyok):
1714 and not emptyok):
1715 raise util.Abort(_("received changelog group is empty"))
1715 raise util.Abort(_("received changelog group is empty"))
1716 clend = len(cl)
1716 clend = len(cl)
1717 changesets = clend - clstart
1717 changesets = clend - clstart
1718 for c in xrange(clstart, clend):
1718 for c in xrange(clstart, clend):
1719 efiles.update(self[c].files())
1719 efiles.update(self[c].files())
1720 efiles = len(efiles)
1720 efiles = len(efiles)
1721 self.ui.progress(_('changesets'), None)
1721 self.ui.progress(_('changesets'), None)
1722
1722
1723 # pull off the manifest group
1723 # pull off the manifest group
1724 self.ui.status(_("adding manifests\n"))
1724 self.ui.status(_("adding manifests\n"))
1725 pr.step = _('manifests')
1725 pr.step = _('manifests')
1726 pr.count = 1
1726 pr.count = 1
1727 pr.total = changesets # manifests <= changesets
1727 pr.total = changesets # manifests <= changesets
1728 # no need to check for empty manifest group here:
1728 # no need to check for empty manifest group here:
1729 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1729 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1730 # no new manifest will be created and the manifest group will
1730 # no new manifest will be created and the manifest group will
1731 # be empty during the pull
1731 # be empty during the pull
1732 self.manifest.addgroup(source, revmap, trp)
1732 self.manifest.addgroup(source, revmap, trp)
1733 self.ui.progress(_('manifests'), None)
1733 self.ui.progress(_('manifests'), None)
1734
1734
1735 needfiles = {}
1735 needfiles = {}
1736 if self.ui.configbool('server', 'validate', default=False):
1736 if self.ui.configbool('server', 'validate', default=False):
1737 # validate incoming csets have their manifests
1737 # validate incoming csets have their manifests
1738 for cset in xrange(clstart, clend):
1738 for cset in xrange(clstart, clend):
1739 mfest = self.changelog.read(self.changelog.node(cset))[0]
1739 mfest = self.changelog.read(self.changelog.node(cset))[0]
1740 mfest = self.manifest.readdelta(mfest)
1740 mfest = self.manifest.readdelta(mfest)
1741 # store file nodes we must see
1741 # store file nodes we must see
1742 for f, n in mfest.iteritems():
1742 for f, n in mfest.iteritems():
1743 needfiles.setdefault(f, set()).add(n)
1743 needfiles.setdefault(f, set()).add(n)
1744
1744
1745 # process the files
1745 # process the files
1746 self.ui.status(_("adding file changes\n"))
1746 self.ui.status(_("adding file changes\n"))
1747 pr.step = 'files'
1747 pr.step = 'files'
1748 pr.count = 1
1748 pr.count = 1
1749 pr.total = efiles
1749 pr.total = efiles
1750 source.callback = None
1750 source.callback = None
1751
1751
1752 while 1:
1752 while 1:
1753 f = source.chunk()
1753 f = source.chunk()
1754 if not f:
1754 if not f:
1755 break
1755 break
1756 self.ui.debug("adding %s revisions\n" % f)
1756 self.ui.debug("adding %s revisions\n" % f)
1757 pr()
1757 pr()
1758 fl = self.file(f)
1758 fl = self.file(f)
1759 o = len(fl)
1759 o = len(fl)
1760 if fl.addgroup(source, revmap, trp) is None:
1760 if fl.addgroup(source, revmap, trp) is None:
1761 raise util.Abort(_("received file revlog group is empty"))
1761 raise util.Abort(_("received file revlog group is empty"))
1762 revisions += len(fl) - o
1762 revisions += len(fl) - o
1763 files += 1
1763 files += 1
1764 if f in needfiles:
1764 if f in needfiles:
1765 needs = needfiles[f]
1765 needs = needfiles[f]
1766 for new in xrange(o, len(fl)):
1766 for new in xrange(o, len(fl)):
1767 n = fl.node(new)
1767 n = fl.node(new)
1768 if n in needs:
1768 if n in needs:
1769 needs.remove(n)
1769 needs.remove(n)
1770 if not needs:
1770 if not needs:
1771 del needfiles[f]
1771 del needfiles[f]
1772 self.ui.progress(_('files'), None)
1772 self.ui.progress(_('files'), None)
1773
1773
1774 for f, needs in needfiles.iteritems():
1774 for f, needs in needfiles.iteritems():
1775 fl = self.file(f)
1775 fl = self.file(f)
1776 for n in needs:
1776 for n in needs:
1777 try:
1777 try:
1778 fl.rev(n)
1778 fl.rev(n)
1779 except error.LookupError:
1779 except error.LookupError:
1780 raise util.Abort(
1780 raise util.Abort(
1781 _('missing file data for %s:%s - run hg verify') %
1781 _('missing file data for %s:%s - run hg verify') %
1782 (f, hex(n)))
1782 (f, hex(n)))
1783
1783
1784 newheads = len(cl.heads())
1784 newheads = len(cl.heads())
1785 heads = ""
1785 heads = ""
1786 if oldheads and newheads != oldheads:
1786 if oldheads and newheads != oldheads:
1787 heads = _(" (%+d heads)") % (newheads - oldheads)
1787 heads = _(" (%+d heads)") % (newheads - oldheads)
1788
1788
1789 self.ui.status(_("added %d changesets"
1789 self.ui.status(_("added %d changesets"
1790 " with %d changes to %d files%s\n")
1790 " with %d changes to %d files%s\n")
1791 % (changesets, revisions, files, heads))
1791 % (changesets, revisions, files, heads))
1792
1792
1793 if changesets > 0:
1793 if changesets > 0:
1794 p = lambda: cl.writepending() and self.root or ""
1794 p = lambda: cl.writepending() and self.root or ""
1795 self.hook('pretxnchangegroup', throw=True,
1795 self.hook('pretxnchangegroup', throw=True,
1796 node=hex(cl.node(clstart)), source=srctype,
1796 node=hex(cl.node(clstart)), source=srctype,
1797 url=url, pending=p)
1797 url=url, pending=p)
1798
1798
1799 # make changelog see real files again
1799 # make changelog see real files again
1800 cl.finalize(trp)
1800 cl.finalize(trp)
1801
1801
1802 tr.close()
1802 tr.close()
1803 finally:
1803 finally:
1804 tr.release()
1804 tr.release()
1805 if lock:
1805 if lock:
1806 lock.release()
1806 lock.release()
1807
1807
1808 if changesets > 0:
1808 if changesets > 0:
1809 # forcefully update the on-disk branch cache
1809 # forcefully update the on-disk branch cache
1810 self.ui.debug("updating the branch cache\n")
1810 self.ui.debug("updating the branch cache\n")
1811 self.updatebranchcache()
1811 self.updatebranchcache()
1812 self.hook("changegroup", node=hex(cl.node(clstart)),
1812 self.hook("changegroup", node=hex(cl.node(clstart)),
1813 source=srctype, url=url)
1813 source=srctype, url=url)
1814
1814
1815 for i in xrange(clstart, clend):
1815 for i in xrange(clstart, clend):
1816 self.hook("incoming", node=hex(cl.node(i)),
1816 self.hook("incoming", node=hex(cl.node(i)),
1817 source=srctype, url=url)
1817 source=srctype, url=url)
1818
1818
1819 # never return 0 here:
1819 # never return 0 here:
1820 if newheads < oldheads:
1820 if newheads < oldheads:
1821 return newheads - oldheads - 1
1821 return newheads - oldheads - 1
1822 else:
1822 else:
1823 return newheads - oldheads + 1
1823 return newheads - oldheads + 1
1824
1824
1825
1825
1826 def stream_in(self, remote, requirements):
1826 def stream_in(self, remote, requirements):
1827 lock = self.lock()
1827 lock = self.lock()
1828 try:
1828 try:
1829 fp = remote.stream_out()
1829 fp = remote.stream_out()
1830 l = fp.readline()
1830 l = fp.readline()
1831 try:
1831 try:
1832 resp = int(l)
1832 resp = int(l)
1833 except ValueError:
1833 except ValueError:
1834 raise error.ResponseError(
1834 raise error.ResponseError(
1835 _('Unexpected response from remote server:'), l)
1835 _('Unexpected response from remote server:'), l)
1836 if resp == 1:
1836 if resp == 1:
1837 raise util.Abort(_('operation forbidden by server'))
1837 raise util.Abort(_('operation forbidden by server'))
1838 elif resp == 2:
1838 elif resp == 2:
1839 raise util.Abort(_('locking the remote repository failed'))
1839 raise util.Abort(_('locking the remote repository failed'))
1840 elif resp != 0:
1840 elif resp != 0:
1841 raise util.Abort(_('the server sent an unknown error code'))
1841 raise util.Abort(_('the server sent an unknown error code'))
1842 self.ui.status(_('streaming all changes\n'))
1842 self.ui.status(_('streaming all changes\n'))
1843 l = fp.readline()
1843 l = fp.readline()
1844 try:
1844 try:
1845 total_files, total_bytes = map(int, l.split(' ', 1))
1845 total_files, total_bytes = map(int, l.split(' ', 1))
1846 except (ValueError, TypeError):
1846 except (ValueError, TypeError):
1847 raise error.ResponseError(
1847 raise error.ResponseError(
1848 _('Unexpected response from remote server:'), l)
1848 _('Unexpected response from remote server:'), l)
1849 self.ui.status(_('%d files to transfer, %s of data\n') %
1849 self.ui.status(_('%d files to transfer, %s of data\n') %
1850 (total_files, util.bytecount(total_bytes)))
1850 (total_files, util.bytecount(total_bytes)))
1851 start = time.time()
1851 start = time.time()
1852 for i in xrange(total_files):
1852 for i in xrange(total_files):
1853 # XXX doesn't support '\n' or '\r' in filenames
1853 # XXX doesn't support '\n' or '\r' in filenames
1854 l = fp.readline()
1854 l = fp.readline()
1855 try:
1855 try:
1856 name, size = l.split('\0', 1)
1856 name, size = l.split('\0', 1)
1857 size = int(size)
1857 size = int(size)
1858 except (ValueError, TypeError):
1858 except (ValueError, TypeError):
1859 raise error.ResponseError(
1859 raise error.ResponseError(
1860 _('Unexpected response from remote server:'), l)
1860 _('Unexpected response from remote server:'), l)
1861 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1861 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1862 # for backwards compat, name was partially encoded
1862 # for backwards compat, name was partially encoded
1863 ofp = self.sopener(store.decodedir(name), 'w')
1863 ofp = self.sopener(store.decodedir(name), 'w')
1864 for chunk in util.filechunkiter(fp, limit=size):
1864 for chunk in util.filechunkiter(fp, limit=size):
1865 ofp.write(chunk)
1865 ofp.write(chunk)
1866 ofp.close()
1866 ofp.close()
1867 elapsed = time.time() - start
1867 elapsed = time.time() - start
1868 if elapsed <= 0:
1868 if elapsed <= 0:
1869 elapsed = 0.001
1869 elapsed = 0.001
1870 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1870 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1871 (util.bytecount(total_bytes), elapsed,
1871 (util.bytecount(total_bytes), elapsed,
1872 util.bytecount(total_bytes / elapsed)))
1872 util.bytecount(total_bytes / elapsed)))
1873
1873
1874 # new requirements = old non-format requirements + new format-related
1874 # new requirements = old non-format requirements + new format-related
1875 # requirements from the streamed-in repository
1875 # requirements from the streamed-in repository
1876 requirements.update(set(self.requirements) - self.supportedformats)
1876 requirements.update(set(self.requirements) - self.supportedformats)
1877 self._applyrequirements(requirements)
1877 self._applyrequirements(requirements)
1878 self._writerequirements()
1878 self._writerequirements()
1879
1879
1880 self.invalidate()
1880 self.invalidate()
1881 return len(self.heads()) + 1
1881 return len(self.heads()) + 1
1882 finally:
1882 finally:
1883 lock.release()
1883 lock.release()
1884
1884
1885 def clone(self, remote, heads=[], stream=False):
1885 def clone(self, remote, heads=[], stream=False):
1886 '''clone remote repository.
1886 '''clone remote repository.
1887
1887
1888 keyword arguments:
1888 keyword arguments:
1889 heads: list of revs to clone (forces use of pull)
1889 heads: list of revs to clone (forces use of pull)
1890 stream: use streaming clone if possible'''
1890 stream: use streaming clone if possible'''
1891
1891
1892 # now, all clients that can request uncompressed clones can
1892 # now, all clients that can request uncompressed clones can
1893 # read repo formats supported by all servers that can serve
1893 # read repo formats supported by all servers that can serve
1894 # them.
1894 # them.
1895
1895
1896 # if revlog format changes, client will have to check version
1896 # if revlog format changes, client will have to check version
1897 # and format flags on "stream" capability, and use
1897 # and format flags on "stream" capability, and use
1898 # uncompressed only if compatible.
1898 # uncompressed only if compatible.
1899
1899
1900 if stream and not heads:
1900 if stream and not heads:
1901 # 'stream' means remote revlog format is revlogv1 only
1901 # 'stream' means remote revlog format is revlogv1 only
1902 if remote.capable('stream'):
1902 if remote.capable('stream'):
1903 return self.stream_in(remote, set(('revlogv1',)))
1903 return self.stream_in(remote, set(('revlogv1',)))
1904 # otherwise, 'streamreqs' contains the remote revlog format
1904 # otherwise, 'streamreqs' contains the remote revlog format
1905 streamreqs = remote.capable('streamreqs')
1905 streamreqs = remote.capable('streamreqs')
1906 if streamreqs:
1906 if streamreqs:
1907 streamreqs = set(streamreqs.split(','))
1907 streamreqs = set(streamreqs.split(','))
1908 # if we support it, stream in and adjust our requirements
1908 # if we support it, stream in and adjust our requirements
1909 if not streamreqs - self.supportedformats:
1909 if not streamreqs - self.supportedformats:
1910 return self.stream_in(remote, streamreqs)
1910 return self.stream_in(remote, streamreqs)
1911 return self.pull(remote, heads)
1911 return self.pull(remote, heads)
1912
1912
1913 def pushkey(self, namespace, key, old, new):
1913 def pushkey(self, namespace, key, old, new):
1914 return pushkey.push(self, namespace, key, old, new)
1914 return pushkey.push(self, namespace, key, old, new)
1915
1915
1916 def listkeys(self, namespace):
1916 def listkeys(self, namespace):
1917 return pushkey.list(self, namespace)
1917 return pushkey.list(self, namespace)
1918
1918
1919 def debugwireargs(self, one, two, three=None, four=None):
1919 def debugwireargs(self, one, two, three=None, four=None):
1920 '''used to test argument passing over the wire'''
1920 '''used to test argument passing over the wire'''
1921 return "%s %s %s %s" % (one, two, three, four)
1921 return "%s %s %s %s" % (one, two, three, four)
1922
1922
1923 # used to avoid circular references so destructors work
1923 # used to avoid circular references so destructors work
1924 def aftertrans(files):
1924 def aftertrans(files):
1925 renamefiles = [tuple(t) for t in files]
1925 renamefiles = [tuple(t) for t in files]
1926 def a():
1926 def a():
1927 for src, dest in renamefiles:
1927 for src, dest in renamefiles:
1928 util.rename(src, dest)
1928 util.rename(src, dest)
1929 return a
1929 return a
1930
1930
1931 def instance(ui, path, create):
1931 def instance(ui, path, create):
1932 return localrepository(ui, urlmod.localpath(path), create)
1932 return localrepository(ui, urlmod.localpath(path), create)
1933
1933
1934 def islocal(path):
1934 def islocal(path):
1935 return True
1935 return True
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now