##// END OF EJS Templates
replace various uses of list.reverse()
Matt Mackall -
r8210:344751cd default
parent child Browse files
Show More
@@ -1,1203 +1,1202 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
4 #
5 # Configuration options:
5 # Configuration options:
6 #
6 #
7 # convert.svn.trunk
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
10 # Relative path to tree of branches (default: "branches")
11 # convert.svn.tags
11 # convert.svn.tags
12 # Relative path to tree of tags (default: "tags")
12 # Relative path to tree of tags (default: "tags")
13 #
13 #
14 # Set these in a hgrc, or on the command line as follows:
14 # Set these in a hgrc, or on the command line as follows:
15 #
15 #
16 # hg convert --config convert.svn.trunk=wackoname [...]
16 # hg convert --config convert.svn.trunk=wackoname [...]
17
17
18 import locale
18 import locale
19 import os
19 import os
20 import re
20 import re
21 import sys
21 import sys
22 import cPickle as pickle
22 import cPickle as pickle
23 import tempfile
23 import tempfile
24 import urllib
24 import urllib
25
25
26 from mercurial import strutil, util
26 from mercurial import strutil, util
27 from mercurial.i18n import _
27 from mercurial.i18n import _
28
28
29 # Subversion stuff. Works best with very recent Python SVN bindings
29 # Subversion stuff. Works best with very recent Python SVN bindings
30 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
31 # these bindings.
31 # these bindings.
32
32
33 from cStringIO import StringIO
33 from cStringIO import StringIO
34
34
35 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
35 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
36 from common import commandline, converter_source, converter_sink, mapfile
36 from common import commandline, converter_source, converter_sink, mapfile
37
37
38 try:
38 try:
39 from svn.core import SubversionException, Pool
39 from svn.core import SubversionException, Pool
40 import svn
40 import svn
41 import svn.client
41 import svn.client
42 import svn.core
42 import svn.core
43 import svn.ra
43 import svn.ra
44 import svn.delta
44 import svn.delta
45 import transport
45 import transport
46 except ImportError:
46 except ImportError:
47 pass
47 pass
48
48
49 class SvnPathNotFound(Exception):
49 class SvnPathNotFound(Exception):
50 pass
50 pass
51
51
52 def geturl(path):
52 def geturl(path):
53 try:
53 try:
54 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
54 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
55 except SubversionException:
55 except SubversionException:
56 pass
56 pass
57 if os.path.isdir(path):
57 if os.path.isdir(path):
58 path = os.path.normpath(os.path.abspath(path))
58 path = os.path.normpath(os.path.abspath(path))
59 if os.name == 'nt':
59 if os.name == 'nt':
60 path = '/' + util.normpath(path)
60 path = '/' + util.normpath(path)
61 return 'file://%s' % urllib.quote(path)
61 return 'file://%s' % urllib.quote(path)
62 return path
62 return path
63
63
64 def optrev(number):
64 def optrev(number):
65 optrev = svn.core.svn_opt_revision_t()
65 optrev = svn.core.svn_opt_revision_t()
66 optrev.kind = svn.core.svn_opt_revision_number
66 optrev.kind = svn.core.svn_opt_revision_number
67 optrev.value.number = number
67 optrev.value.number = number
68 return optrev
68 return optrev
69
69
70 class changedpath(object):
70 class changedpath(object):
71 def __init__(self, p):
71 def __init__(self, p):
72 self.copyfrom_path = p.copyfrom_path
72 self.copyfrom_path = p.copyfrom_path
73 self.copyfrom_rev = p.copyfrom_rev
73 self.copyfrom_rev = p.copyfrom_rev
74 self.action = p.action
74 self.action = p.action
75
75
76 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
76 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
77 strict_node_history=False):
77 strict_node_history=False):
78 protocol = -1
78 protocol = -1
79 def receiver(orig_paths, revnum, author, date, message, pool):
79 def receiver(orig_paths, revnum, author, date, message, pool):
80 if orig_paths is not None:
80 if orig_paths is not None:
81 for k, v in orig_paths.iteritems():
81 for k, v in orig_paths.iteritems():
82 orig_paths[k] = changedpath(v)
82 orig_paths[k] = changedpath(v)
83 pickle.dump((orig_paths, revnum, author, date, message),
83 pickle.dump((orig_paths, revnum, author, date, message),
84 fp, protocol)
84 fp, protocol)
85
85
86 try:
86 try:
87 # Use an ra of our own so that our parent can consume
87 # Use an ra of our own so that our parent can consume
88 # our results without confusing the server.
88 # our results without confusing the server.
89 t = transport.SvnRaTransport(url=url)
89 t = transport.SvnRaTransport(url=url)
90 svn.ra.get_log(t.ra, paths, start, end, limit,
90 svn.ra.get_log(t.ra, paths, start, end, limit,
91 discover_changed_paths,
91 discover_changed_paths,
92 strict_node_history,
92 strict_node_history,
93 receiver)
93 receiver)
94 except SubversionException, (inst, num):
94 except SubversionException, (inst, num):
95 pickle.dump(num, fp, protocol)
95 pickle.dump(num, fp, protocol)
96 except IOError:
96 except IOError:
97 # Caller may interrupt the iteration
97 # Caller may interrupt the iteration
98 pickle.dump(None, fp, protocol)
98 pickle.dump(None, fp, protocol)
99 else:
99 else:
100 pickle.dump(None, fp, protocol)
100 pickle.dump(None, fp, protocol)
101 fp.close()
101 fp.close()
102 # With large history, cleanup process goes crazy and suddenly
102 # With large history, cleanup process goes crazy and suddenly
103 # consumes *huge* amount of memory. The output file being closed,
103 # consumes *huge* amount of memory. The output file being closed,
104 # there is no need for clean termination.
104 # there is no need for clean termination.
105 os._exit(0)
105 os._exit(0)
106
106
107 def debugsvnlog(ui, **opts):
107 def debugsvnlog(ui, **opts):
108 """Fetch SVN log in a subprocess and channel them back to parent to
108 """Fetch SVN log in a subprocess and channel them back to parent to
109 avoid memory collection issues.
109 avoid memory collection issues.
110 """
110 """
111 util.set_binary(sys.stdin)
111 util.set_binary(sys.stdin)
112 util.set_binary(sys.stdout)
112 util.set_binary(sys.stdout)
113 args = decodeargs(sys.stdin.read())
113 args = decodeargs(sys.stdin.read())
114 get_log_child(sys.stdout, *args)
114 get_log_child(sys.stdout, *args)
115
115
116 class logstream:
116 class logstream:
117 """Interruptible revision log iterator."""
117 """Interruptible revision log iterator."""
118 def __init__(self, stdout):
118 def __init__(self, stdout):
119 self._stdout = stdout
119 self._stdout = stdout
120
120
121 def __iter__(self):
121 def __iter__(self):
122 while True:
122 while True:
123 entry = pickle.load(self._stdout)
123 entry = pickle.load(self._stdout)
124 try:
124 try:
125 orig_paths, revnum, author, date, message = entry
125 orig_paths, revnum, author, date, message = entry
126 except:
126 except:
127 if entry is None:
127 if entry is None:
128 break
128 break
129 raise SubversionException("child raised exception", entry)
129 raise SubversionException("child raised exception", entry)
130 yield entry
130 yield entry
131
131
132 def close(self):
132 def close(self):
133 if self._stdout:
133 if self._stdout:
134 self._stdout.close()
134 self._stdout.close()
135 self._stdout = None
135 self._stdout = None
136
136
137
137
138 # Check to see if the given path is a local Subversion repo. Verify this by
138 # Check to see if the given path is a local Subversion repo. Verify this by
139 # looking for several svn-specific files and directories in the given
139 # looking for several svn-specific files and directories in the given
140 # directory.
140 # directory.
141 def filecheck(path, proto):
141 def filecheck(path, proto):
142 for x in ('locks', 'hooks', 'format', 'db', ):
142 for x in ('locks', 'hooks', 'format', 'db', ):
143 if not os.path.exists(os.path.join(path, x)):
143 if not os.path.exists(os.path.join(path, x)):
144 return False
144 return False
145 return True
145 return True
146
146
147 # Check to see if a given path is the root of an svn repo over http. We verify
147 # Check to see if a given path is the root of an svn repo over http. We verify
148 # this by requesting a version-controlled URL we know can't exist and looking
148 # this by requesting a version-controlled URL we know can't exist and looking
149 # for the svn-specific "not found" XML.
149 # for the svn-specific "not found" XML.
150 def httpcheck(path, proto):
150 def httpcheck(path, proto):
151 return ('<m:human-readable errcode="160013">' in
151 return ('<m:human-readable errcode="160013">' in
152 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
152 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
153
153
154 protomap = {'http': httpcheck,
154 protomap = {'http': httpcheck,
155 'https': httpcheck,
155 'https': httpcheck,
156 'file': filecheck,
156 'file': filecheck,
157 }
157 }
158 def issvnurl(url):
158 def issvnurl(url):
159 if not '://' in url:
159 if not '://' in url:
160 return False
160 return False
161 proto, path = url.split('://', 1)
161 proto, path = url.split('://', 1)
162 check = protomap.get(proto, lambda p, p2: False)
162 check = protomap.get(proto, lambda p, p2: False)
163 while '/' in path:
163 while '/' in path:
164 if check(path, proto):
164 if check(path, proto):
165 return True
165 return True
166 path = path.rsplit('/', 1)[0]
166 path = path.rsplit('/', 1)[0]
167 return False
167 return False
168
168
169 # SVN conversion code stolen from bzr-svn and tailor
169 # SVN conversion code stolen from bzr-svn and tailor
170 #
170 #
171 # Subversion looks like a versioned filesystem, branches structures
171 # Subversion looks like a versioned filesystem, branches structures
172 # are defined by conventions and not enforced by the tool. First,
172 # are defined by conventions and not enforced by the tool. First,
173 # we define the potential branches (modules) as "trunk" and "branches"
173 # we define the potential branches (modules) as "trunk" and "branches"
174 # children directories. Revisions are then identified by their
174 # children directories. Revisions are then identified by their
175 # module and revision number (and a repository identifier).
175 # module and revision number (and a repository identifier).
176 #
176 #
177 # The revision graph is really a tree (or a forest). By default, a
177 # The revision graph is really a tree (or a forest). By default, a
178 # revision parent is the previous revision in the same module. If the
178 # revision parent is the previous revision in the same module. If the
179 # module directory is copied/moved from another module then the
179 # module directory is copied/moved from another module then the
180 # revision is the module root and its parent the source revision in
180 # revision is the module root and its parent the source revision in
181 # the parent module. A revision has at most one parent.
181 # the parent module. A revision has at most one parent.
182 #
182 #
183 class svn_source(converter_source):
183 class svn_source(converter_source):
184 def __init__(self, ui, url, rev=None):
184 def __init__(self, ui, url, rev=None):
185 super(svn_source, self).__init__(ui, url, rev=rev)
185 super(svn_source, self).__init__(ui, url, rev=rev)
186
186
187 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
187 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
188 (os.path.exists(url) and
188 (os.path.exists(url) and
189 os.path.exists(os.path.join(url, '.svn'))) or
189 os.path.exists(os.path.join(url, '.svn'))) or
190 issvnurl(url)):
190 issvnurl(url)):
191 raise NoRepo("%s does not look like a Subversion repo" % url)
191 raise NoRepo("%s does not look like a Subversion repo" % url)
192
192
193 try:
193 try:
194 SubversionException
194 SubversionException
195 except NameError:
195 except NameError:
196 raise MissingTool(_('Subversion python bindings could not be loaded'))
196 raise MissingTool(_('Subversion python bindings could not be loaded'))
197
197
198 try:
198 try:
199 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
199 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
200 if version < (1, 4):
200 if version < (1, 4):
201 raise MissingTool(_('Subversion python bindings %d.%d found, '
201 raise MissingTool(_('Subversion python bindings %d.%d found, '
202 '1.4 or later required') % version)
202 '1.4 or later required') % version)
203 except AttributeError:
203 except AttributeError:
204 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
204 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
205 'or later required'))
205 'or later required'))
206
206
207 self.encoding = locale.getpreferredencoding()
207 self.encoding = locale.getpreferredencoding()
208 self.lastrevs = {}
208 self.lastrevs = {}
209
209
210 latest = None
210 latest = None
211 try:
211 try:
212 # Support file://path@rev syntax. Useful e.g. to convert
212 # Support file://path@rev syntax. Useful e.g. to convert
213 # deleted branches.
213 # deleted branches.
214 at = url.rfind('@')
214 at = url.rfind('@')
215 if at >= 0:
215 if at >= 0:
216 latest = int(url[at+1:])
216 latest = int(url[at+1:])
217 url = url[:at]
217 url = url[:at]
218 except ValueError:
218 except ValueError:
219 pass
219 pass
220 self.url = geturl(url)
220 self.url = geturl(url)
221 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
221 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
222 try:
222 try:
223 self.transport = transport.SvnRaTransport(url=self.url)
223 self.transport = transport.SvnRaTransport(url=self.url)
224 self.ra = self.transport.ra
224 self.ra = self.transport.ra
225 self.ctx = self.transport.client
225 self.ctx = self.transport.client
226 self.baseurl = svn.ra.get_repos_root(self.ra)
226 self.baseurl = svn.ra.get_repos_root(self.ra)
227 # Module is either empty or a repository path starting with
227 # Module is either empty or a repository path starting with
228 # a slash and not ending with a slash.
228 # a slash and not ending with a slash.
229 self.module = urllib.unquote(self.url[len(self.baseurl):])
229 self.module = urllib.unquote(self.url[len(self.baseurl):])
230 self.prevmodule = None
230 self.prevmodule = None
231 self.rootmodule = self.module
231 self.rootmodule = self.module
232 self.commits = {}
232 self.commits = {}
233 self.paths = {}
233 self.paths = {}
234 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
234 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
235 except SubversionException:
235 except SubversionException:
236 ui.traceback()
236 ui.traceback()
237 raise NoRepo("%s does not look like a Subversion repo" % self.url)
237 raise NoRepo("%s does not look like a Subversion repo" % self.url)
238
238
239 if rev:
239 if rev:
240 try:
240 try:
241 latest = int(rev)
241 latest = int(rev)
242 except ValueError:
242 except ValueError:
243 raise util.Abort(_('svn: revision %s is not an integer') % rev)
243 raise util.Abort(_('svn: revision %s is not an integer') % rev)
244
244
245 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
245 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
246 try:
246 try:
247 self.startrev = int(self.startrev)
247 self.startrev = int(self.startrev)
248 if self.startrev < 0:
248 if self.startrev < 0:
249 self.startrev = 0
249 self.startrev = 0
250 except ValueError:
250 except ValueError:
251 raise util.Abort(_('svn: start revision %s is not an integer')
251 raise util.Abort(_('svn: start revision %s is not an integer')
252 % self.startrev)
252 % self.startrev)
253
253
254 try:
254 try:
255 self.get_blacklist()
255 self.get_blacklist()
256 except IOError:
256 except IOError:
257 pass
257 pass
258
258
259 self.head = self.latest(self.module, latest)
259 self.head = self.latest(self.module, latest)
260 if not self.head:
260 if not self.head:
261 raise util.Abort(_('no revision found in module %s') %
261 raise util.Abort(_('no revision found in module %s') %
262 self.module.encode(self.encoding))
262 self.module.encode(self.encoding))
263 self.last_changed = self.revnum(self.head)
263 self.last_changed = self.revnum(self.head)
264
264
265 self._changescache = None
265 self._changescache = None
266
266
267 if os.path.exists(os.path.join(url, '.svn/entries')):
267 if os.path.exists(os.path.join(url, '.svn/entries')):
268 self.wc = url
268 self.wc = url
269 else:
269 else:
270 self.wc = None
270 self.wc = None
271 self.convertfp = None
271 self.convertfp = None
272
272
273 def setrevmap(self, revmap):
273 def setrevmap(self, revmap):
274 lastrevs = {}
274 lastrevs = {}
275 for revid in revmap.iterkeys():
275 for revid in revmap.iterkeys():
276 uuid, module, revnum = self.revsplit(revid)
276 uuid, module, revnum = self.revsplit(revid)
277 lastrevnum = lastrevs.setdefault(module, revnum)
277 lastrevnum = lastrevs.setdefault(module, revnum)
278 if revnum > lastrevnum:
278 if revnum > lastrevnum:
279 lastrevs[module] = revnum
279 lastrevs[module] = revnum
280 self.lastrevs = lastrevs
280 self.lastrevs = lastrevs
281
281
282 def exists(self, path, optrev):
282 def exists(self, path, optrev):
283 try:
283 try:
284 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
284 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
285 optrev, False, self.ctx)
285 optrev, False, self.ctx)
286 return True
286 return True
287 except SubversionException:
287 except SubversionException:
288 return False
288 return False
289
289
290 def getheads(self):
290 def getheads(self):
291
291
292 def isdir(path, revnum):
292 def isdir(path, revnum):
293 kind = self._checkpath(path, revnum)
293 kind = self._checkpath(path, revnum)
294 return kind == svn.core.svn_node_dir
294 return kind == svn.core.svn_node_dir
295
295
296 def getcfgpath(name, rev):
296 def getcfgpath(name, rev):
297 cfgpath = self.ui.config('convert', 'svn.' + name)
297 cfgpath = self.ui.config('convert', 'svn.' + name)
298 if cfgpath is not None and cfgpath.strip() == '':
298 if cfgpath is not None and cfgpath.strip() == '':
299 return None
299 return None
300 path = (cfgpath or name).strip('/')
300 path = (cfgpath or name).strip('/')
301 if not self.exists(path, rev):
301 if not self.exists(path, rev):
302 if cfgpath:
302 if cfgpath:
303 raise util.Abort(_('expected %s to be at %r, but not found')
303 raise util.Abort(_('expected %s to be at %r, but not found')
304 % (name, path))
304 % (name, path))
305 return None
305 return None
306 self.ui.note(_('found %s at %r\n') % (name, path))
306 self.ui.note(_('found %s at %r\n') % (name, path))
307 return path
307 return path
308
308
309 rev = optrev(self.last_changed)
309 rev = optrev(self.last_changed)
310 oldmodule = ''
310 oldmodule = ''
311 trunk = getcfgpath('trunk', rev)
311 trunk = getcfgpath('trunk', rev)
312 self.tags = getcfgpath('tags', rev)
312 self.tags = getcfgpath('tags', rev)
313 branches = getcfgpath('branches', rev)
313 branches = getcfgpath('branches', rev)
314
314
315 # If the project has a trunk or branches, we will extract heads
315 # If the project has a trunk or branches, we will extract heads
316 # from them. We keep the project root otherwise.
316 # from them. We keep the project root otherwise.
317 if trunk:
317 if trunk:
318 oldmodule = self.module or ''
318 oldmodule = self.module or ''
319 self.module += '/' + trunk
319 self.module += '/' + trunk
320 self.head = self.latest(self.module, self.last_changed)
320 self.head = self.latest(self.module, self.last_changed)
321 if not self.head:
321 if not self.head:
322 raise util.Abort(_('no revision found in module %s') %
322 raise util.Abort(_('no revision found in module %s') %
323 self.module.encode(self.encoding))
323 self.module.encode(self.encoding))
324
324
325 # First head in the list is the module's head
325 # First head in the list is the module's head
326 self.heads = [self.head]
326 self.heads = [self.head]
327 if self.tags is not None:
327 if self.tags is not None:
328 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
328 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
329
329
330 # Check if branches bring a few more heads to the list
330 # Check if branches bring a few more heads to the list
331 if branches:
331 if branches:
332 rpath = self.url.strip('/')
332 rpath = self.url.strip('/')
333 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
333 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
334 rev, False, self.ctx)
334 rev, False, self.ctx)
335 for branch in branchnames.keys():
335 for branch in branchnames.keys():
336 module = '%s/%s/%s' % (oldmodule, branches, branch)
336 module = '%s/%s/%s' % (oldmodule, branches, branch)
337 if not isdir(module, self.last_changed):
337 if not isdir(module, self.last_changed):
338 continue
338 continue
339 brevid = self.latest(module, self.last_changed)
339 brevid = self.latest(module, self.last_changed)
340 if not brevid:
340 if not brevid:
341 self.ui.note(_('ignoring empty branch %s\n') %
341 self.ui.note(_('ignoring empty branch %s\n') %
342 branch.encode(self.encoding))
342 branch.encode(self.encoding))
343 continue
343 continue
344 self.ui.note(_('found branch %s at %d\n') %
344 self.ui.note(_('found branch %s at %d\n') %
345 (branch, self.revnum(brevid)))
345 (branch, self.revnum(brevid)))
346 self.heads.append(brevid)
346 self.heads.append(brevid)
347
347
348 if self.startrev and self.heads:
348 if self.startrev and self.heads:
349 if len(self.heads) > 1:
349 if len(self.heads) > 1:
350 raise util.Abort(_('svn: start revision is not supported '
350 raise util.Abort(_('svn: start revision is not supported '
351 'with more than one branch'))
351 'with more than one branch'))
352 revnum = self.revnum(self.heads[0])
352 revnum = self.revnum(self.heads[0])
353 if revnum < self.startrev:
353 if revnum < self.startrev:
354 raise util.Abort(_('svn: no revision found after start revision %d')
354 raise util.Abort(_('svn: no revision found after start revision %d')
355 % self.startrev)
355 % self.startrev)
356
356
357 return self.heads
357 return self.heads
358
358
359 def getfile(self, file, rev):
359 def getfile(self, file, rev):
360 data, mode = self._getfile(file, rev)
360 data, mode = self._getfile(file, rev)
361 self.modecache[(file, rev)] = mode
361 self.modecache[(file, rev)] = mode
362 return data
362 return data
363
363
364 def getmode(self, file, rev):
364 def getmode(self, file, rev):
365 return self.modecache[(file, rev)]
365 return self.modecache[(file, rev)]
366
366
367 def getchanges(self, rev):
367 def getchanges(self, rev):
368 if self._changescache and self._changescache[0] == rev:
368 if self._changescache and self._changescache[0] == rev:
369 return self._changescache[1]
369 return self._changescache[1]
370 self._changescache = None
370 self._changescache = None
371 self.modecache = {}
371 self.modecache = {}
372 (paths, parents) = self.paths[rev]
372 (paths, parents) = self.paths[rev]
373 if parents:
373 if parents:
374 files, copies = self.expandpaths(rev, paths, parents)
374 files, copies = self.expandpaths(rev, paths, parents)
375 else:
375 else:
376 # Perform a full checkout on roots
376 # Perform a full checkout on roots
377 uuid, module, revnum = self.revsplit(rev)
377 uuid, module, revnum = self.revsplit(rev)
378 entries = svn.client.ls(self.baseurl + urllib.quote(module),
378 entries = svn.client.ls(self.baseurl + urllib.quote(module),
379 optrev(revnum), True, self.ctx)
379 optrev(revnum), True, self.ctx)
380 files = [n for n,e in entries.iteritems()
380 files = [n for n,e in entries.iteritems()
381 if e.kind == svn.core.svn_node_file]
381 if e.kind == svn.core.svn_node_file]
382 copies = {}
382 copies = {}
383
383
384 files.sort()
384 files.sort()
385 files = zip(files, [rev] * len(files))
385 files = zip(files, [rev] * len(files))
386
386
387 # caller caches the result, so free it here to release memory
387 # caller caches the result, so free it here to release memory
388 del self.paths[rev]
388 del self.paths[rev]
389 return (files, copies)
389 return (files, copies)
390
390
391 def getchangedfiles(self, rev, i):
391 def getchangedfiles(self, rev, i):
392 changes = self.getchanges(rev)
392 changes = self.getchanges(rev)
393 self._changescache = (rev, changes)
393 self._changescache = (rev, changes)
394 return [f[0] for f in changes[0]]
394 return [f[0] for f in changes[0]]
395
395
396 def getcommit(self, rev):
396 def getcommit(self, rev):
397 if rev not in self.commits:
397 if rev not in self.commits:
398 uuid, module, revnum = self.revsplit(rev)
398 uuid, module, revnum = self.revsplit(rev)
399 self.module = module
399 self.module = module
400 self.reparent(module)
400 self.reparent(module)
401 # We assume that:
401 # We assume that:
402 # - requests for revisions after "stop" come from the
402 # - requests for revisions after "stop" come from the
403 # revision graph backward traversal. Cache all of them
403 # revision graph backward traversal. Cache all of them
404 # down to stop, they will be used eventually.
404 # down to stop, they will be used eventually.
405 # - requests for revisions before "stop" come to get
405 # - requests for revisions before "stop" come to get
406 # isolated branches parents. Just fetch what is needed.
406 # isolated branches parents. Just fetch what is needed.
407 stop = self.lastrevs.get(module, 0)
407 stop = self.lastrevs.get(module, 0)
408 if revnum < stop:
408 if revnum < stop:
409 stop = revnum + 1
409 stop = revnum + 1
410 self._fetch_revisions(revnum, stop)
410 self._fetch_revisions(revnum, stop)
411 commit = self.commits[rev]
411 commit = self.commits[rev]
412 # caller caches the result, so free it here to release memory
412 # caller caches the result, so free it here to release memory
413 del self.commits[rev]
413 del self.commits[rev]
414 return commit
414 return commit
415
415
416 def gettags(self):
416 def gettags(self):
417 tags = {}
417 tags = {}
418 if self.tags is None:
418 if self.tags is None:
419 return tags
419 return tags
420
420
421 # svn tags are just a convention, project branches left in a
421 # svn tags are just a convention, project branches left in a
422 # 'tags' directory. There is no other relationship than
422 # 'tags' directory. There is no other relationship than
423 # ancestry, which is expensive to discover and makes them hard
423 # ancestry, which is expensive to discover and makes them hard
424 # to update incrementally. Worse, past revisions may be
424 # to update incrementally. Worse, past revisions may be
425 # referenced by tags far away in the future, requiring a deep
425 # referenced by tags far away in the future, requiring a deep
426 # history traversal on every calculation. Current code
426 # history traversal on every calculation. Current code
427 # performs a single backward traversal, tracking moves within
427 # performs a single backward traversal, tracking moves within
428 # the tags directory (tag renaming) and recording a new tag
428 # the tags directory (tag renaming) and recording a new tag
429 # everytime a project is copied from outside the tags
429 # everytime a project is copied from outside the tags
430 # directory. It also lists deleted tags, this behaviour may
430 # directory. It also lists deleted tags, this behaviour may
431 # change in the future.
431 # change in the future.
432 pendings = []
432 pendings = []
433 tagspath = self.tags
433 tagspath = self.tags
434 start = svn.ra.get_latest_revnum(self.ra)
434 start = svn.ra.get_latest_revnum(self.ra)
435 try:
435 try:
436 for entry in self._getlog([self.tags], start, self.startrev):
436 for entry in self._getlog([self.tags], start, self.startrev):
437 origpaths, revnum, author, date, message = entry
437 origpaths, revnum, author, date, message = entry
438 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
438 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
439 in origpaths.iteritems() if e.copyfrom_path]
439 in origpaths.iteritems() if e.copyfrom_path]
440 copies.sort()
441 # Apply moves/copies from more specific to general
440 # Apply moves/copies from more specific to general
442 copies.reverse()
441 copies.sort(reverse=True)
443
442
444 srctagspath = tagspath
443 srctagspath = tagspath
445 if copies and copies[-1][2] == tagspath:
444 if copies and copies[-1][2] == tagspath:
446 # Track tags directory moves
445 # Track tags directory moves
447 srctagspath = copies.pop()[0]
446 srctagspath = copies.pop()[0]
448
447
449 for source, sourcerev, dest in copies:
448 for source, sourcerev, dest in copies:
450 if not dest.startswith(tagspath + '/'):
449 if not dest.startswith(tagspath + '/'):
451 continue
450 continue
452 for tag in pendings:
451 for tag in pendings:
453 if tag[0].startswith(dest):
452 if tag[0].startswith(dest):
454 tagpath = source + tag[0][len(dest):]
453 tagpath = source + tag[0][len(dest):]
455 tag[:2] = [tagpath, sourcerev]
454 tag[:2] = [tagpath, sourcerev]
456 break
455 break
457 else:
456 else:
458 pendings.append([source, sourcerev, dest.split('/')[-1]])
457 pendings.append([source, sourcerev, dest.split('/')[-1]])
459
458
460 # Tell tag renamings from tag creations
459 # Tell tag renamings from tag creations
461 remainings = []
460 remainings = []
462 for source, sourcerev, tagname in pendings:
461 for source, sourcerev, tagname in pendings:
463 if source.startswith(srctagspath):
462 if source.startswith(srctagspath):
464 remainings.append([source, sourcerev, tagname])
463 remainings.append([source, sourcerev, tagname])
465 continue
464 continue
466 # From revision may be fake, get one with changes
465 # From revision may be fake, get one with changes
467 try:
466 try:
468 tagid = self.latest(source, sourcerev)
467 tagid = self.latest(source, sourcerev)
469 if tagid:
468 if tagid:
470 tags[tagname] = tagid
469 tags[tagname] = tagid
471 except SvnPathNotFound:
470 except SvnPathNotFound:
472 # It happens when we are following directories we assumed
471 # It happens when we are following directories we assumed
473 # were copied with their parents but were really created
472 # were copied with their parents but were really created
474 # in the tag directory.
473 # in the tag directory.
475 pass
474 pass
476 pendings = remainings
475 pendings = remainings
477 tagspath = srctagspath
476 tagspath = srctagspath
478
477
479 except SubversionException:
478 except SubversionException:
480 self.ui.note(_('no tags found at revision %d\n') % start)
479 self.ui.note(_('no tags found at revision %d\n') % start)
481 return tags
480 return tags
482
481
483 def converted(self, rev, destrev):
482 def converted(self, rev, destrev):
484 if not self.wc:
483 if not self.wc:
485 return
484 return
486 if self.convertfp is None:
485 if self.convertfp is None:
487 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
486 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
488 'a')
487 'a')
489 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
488 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
490 self.convertfp.flush()
489 self.convertfp.flush()
491
490
492 # -- helper functions --
491 # -- helper functions --
493
492
494 def revid(self, revnum, module=None):
493 def revid(self, revnum, module=None):
495 if not module:
494 if not module:
496 module = self.module
495 module = self.module
497 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
496 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
498 revnum)
497 revnum)
499
498
500 def revnum(self, rev):
499 def revnum(self, rev):
501 return int(rev.split('@')[-1])
500 return int(rev.split('@')[-1])
502
501
503 def revsplit(self, rev):
502 def revsplit(self, rev):
504 url, revnum = rev.encode(self.encoding).rsplit('@', 1)
503 url, revnum = rev.encode(self.encoding).rsplit('@', 1)
505 revnum = int(revnum)
504 revnum = int(revnum)
506 parts = url.split('/', 1)
505 parts = url.split('/', 1)
507 uuid = parts.pop(0)[4:]
506 uuid = parts.pop(0)[4:]
508 mod = ''
507 mod = ''
509 if parts:
508 if parts:
510 mod = '/' + parts[0]
509 mod = '/' + parts[0]
511 return uuid, mod, revnum
510 return uuid, mod, revnum
512
511
513 def latest(self, path, stop=0):
512 def latest(self, path, stop=0):
514 """Find the latest revid affecting path, up to stop. It may return
513 """Find the latest revid affecting path, up to stop. It may return
515 a revision in a different module, since a branch may be moved without
514 a revision in a different module, since a branch may be moved without
516 a change being reported. Return None if computed module does not
515 a change being reported. Return None if computed module does not
517 belong to rootmodule subtree.
516 belong to rootmodule subtree.
518 """
517 """
519 if not path.startswith(self.rootmodule):
518 if not path.startswith(self.rootmodule):
520 # Requests on foreign branches may be forbidden at server level
519 # Requests on foreign branches may be forbidden at server level
521 self.ui.debug(_('ignoring foreign branch %r\n') % path)
520 self.ui.debug(_('ignoring foreign branch %r\n') % path)
522 return None
521 return None
523
522
524 if not stop:
523 if not stop:
525 stop = svn.ra.get_latest_revnum(self.ra)
524 stop = svn.ra.get_latest_revnum(self.ra)
526 try:
525 try:
527 prevmodule = self.reparent('')
526 prevmodule = self.reparent('')
528 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
527 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
529 self.reparent(prevmodule)
528 self.reparent(prevmodule)
530 except SubversionException:
529 except SubversionException:
531 dirent = None
530 dirent = None
532 if not dirent:
531 if not dirent:
533 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
532 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
534
533
535 # stat() gives us the previous revision on this line of development, but
534 # stat() gives us the previous revision on this line of development, but
536 # it might be in *another module*. Fetch the log and detect renames down
535 # it might be in *another module*. Fetch the log and detect renames down
537 # to the latest revision.
536 # to the latest revision.
538 stream = self._getlog([path], stop, dirent.created_rev)
537 stream = self._getlog([path], stop, dirent.created_rev)
539 try:
538 try:
540 for entry in stream:
539 for entry in stream:
541 paths, revnum, author, date, message = entry
540 paths, revnum, author, date, message = entry
542 if revnum <= dirent.created_rev:
541 if revnum <= dirent.created_rev:
543 break
542 break
544
543
545 for p in paths:
544 for p in paths:
546 if not path.startswith(p) or not paths[p].copyfrom_path:
545 if not path.startswith(p) or not paths[p].copyfrom_path:
547 continue
546 continue
548 newpath = paths[p].copyfrom_path + path[len(p):]
547 newpath = paths[p].copyfrom_path + path[len(p):]
549 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
548 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
550 (path, newpath, revnum))
549 (path, newpath, revnum))
551 path = newpath
550 path = newpath
552 break
551 break
553 finally:
552 finally:
554 stream.close()
553 stream.close()
555
554
556 if not path.startswith(self.rootmodule):
555 if not path.startswith(self.rootmodule):
557 self.ui.debug(_('ignoring foreign branch %r\n') % path)
556 self.ui.debug(_('ignoring foreign branch %r\n') % path)
558 return None
557 return None
559 return self.revid(dirent.created_rev, path)
558 return self.revid(dirent.created_rev, path)
560
559
561 def get_blacklist(self):
560 def get_blacklist(self):
562 """Avoid certain revision numbers.
561 """Avoid certain revision numbers.
563 It is not uncommon for two nearby revisions to cancel each other
562 It is not uncommon for two nearby revisions to cancel each other
564 out, e.g. 'I copied trunk into a subdirectory of itself instead
563 out, e.g. 'I copied trunk into a subdirectory of itself instead
565 of making a branch'. The converted repository is significantly
564 of making a branch'. The converted repository is significantly
566 smaller if we ignore such revisions."""
565 smaller if we ignore such revisions."""
567 self.blacklist = set()
566 self.blacklist = set()
568 blacklist = self.blacklist
567 blacklist = self.blacklist
569 for line in file("blacklist.txt", "r"):
568 for line in file("blacklist.txt", "r"):
570 if not line.startswith("#"):
569 if not line.startswith("#"):
571 try:
570 try:
572 svn_rev = int(line.strip())
571 svn_rev = int(line.strip())
573 blacklist.add(svn_rev)
572 blacklist.add(svn_rev)
574 except ValueError:
573 except ValueError:
575 pass # not an integer or a comment
574 pass # not an integer or a comment
576
575
577 def is_blacklisted(self, svn_rev):
576 def is_blacklisted(self, svn_rev):
578 return svn_rev in self.blacklist
577 return svn_rev in self.blacklist
579
578
580 def reparent(self, module):
579 def reparent(self, module):
581 """Reparent the svn transport and return the previous parent."""
580 """Reparent the svn transport and return the previous parent."""
582 if self.prevmodule == module:
581 if self.prevmodule == module:
583 return module
582 return module
584 svnurl = self.baseurl + urllib.quote(module)
583 svnurl = self.baseurl + urllib.quote(module)
585 prevmodule = self.prevmodule
584 prevmodule = self.prevmodule
586 if prevmodule is None:
585 if prevmodule is None:
587 prevmodule = ''
586 prevmodule = ''
588 self.ui.debug(_("reparent to %s\n") % svnurl)
587 self.ui.debug(_("reparent to %s\n") % svnurl)
589 svn.ra.reparent(self.ra, svnurl)
588 svn.ra.reparent(self.ra, svnurl)
590 self.prevmodule = module
589 self.prevmodule = module
591 return prevmodule
590 return prevmodule
592
591
593 def expandpaths(self, rev, paths, parents):
592 def expandpaths(self, rev, paths, parents):
594 entries = []
593 entries = []
595 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
594 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
596 copies = {}
595 copies = {}
597
596
598 new_module, revnum = self.revsplit(rev)[1:]
597 new_module, revnum = self.revsplit(rev)[1:]
599 if new_module != self.module:
598 if new_module != self.module:
600 self.module = new_module
599 self.module = new_module
601 self.reparent(self.module)
600 self.reparent(self.module)
602
601
603 for path, ent in paths:
602 for path, ent in paths:
604 entrypath = self.getrelpath(path)
603 entrypath = self.getrelpath(path)
605 entry = entrypath.decode(self.encoding)
604 entry = entrypath.decode(self.encoding)
606
605
607 kind = self._checkpath(entrypath, revnum)
606 kind = self._checkpath(entrypath, revnum)
608 if kind == svn.core.svn_node_file:
607 if kind == svn.core.svn_node_file:
609 entries.append(self.recode(entry))
608 entries.append(self.recode(entry))
610 if not ent.copyfrom_path or not parents:
609 if not ent.copyfrom_path or not parents:
611 continue
610 continue
612 # Copy sources not in parent revisions cannot be represented,
611 # Copy sources not in parent revisions cannot be represented,
613 # ignore their origin for now
612 # ignore their origin for now
614 pmodule, prevnum = self.revsplit(parents[0])[1:]
613 pmodule, prevnum = self.revsplit(parents[0])[1:]
615 if ent.copyfrom_rev < prevnum:
614 if ent.copyfrom_rev < prevnum:
616 continue
615 continue
617 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
616 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
618 if not copyfrom_path:
617 if not copyfrom_path:
619 continue
618 continue
620 self.ui.debug(_("copied to %s from %s@%s\n") %
619 self.ui.debug(_("copied to %s from %s@%s\n") %
621 (entrypath, copyfrom_path, ent.copyfrom_rev))
620 (entrypath, copyfrom_path, ent.copyfrom_rev))
622 copies[self.recode(entry)] = self.recode(copyfrom_path)
621 copies[self.recode(entry)] = self.recode(copyfrom_path)
623 elif kind == 0: # gone, but had better be a deleted *file*
622 elif kind == 0: # gone, but had better be a deleted *file*
624 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
623 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
625
624
626 # if a branch is created but entries are removed in the same
625 # if a branch is created but entries are removed in the same
627 # changeset, get the right fromrev
626 # changeset, get the right fromrev
628 # parents cannot be empty here, you cannot remove things from
627 # parents cannot be empty here, you cannot remove things from
629 # a root revision.
628 # a root revision.
630 uuid, old_module, fromrev = self.revsplit(parents[0])
629 uuid, old_module, fromrev = self.revsplit(parents[0])
631
630
632 basepath = old_module + "/" + self.getrelpath(path)
631 basepath = old_module + "/" + self.getrelpath(path)
633 entrypath = basepath
632 entrypath = basepath
634
633
635 def lookup_parts(p):
634 def lookup_parts(p):
636 rc = None
635 rc = None
637 parts = p.split("/")
636 parts = p.split("/")
638 for i in range(len(parts)):
637 for i in range(len(parts)):
639 part = "/".join(parts[:i])
638 part = "/".join(parts[:i])
640 info = part, copyfrom.get(part, None)
639 info = part, copyfrom.get(part, None)
641 if info[1] is not None:
640 if info[1] is not None:
642 self.ui.debug(_("found parent directory %s\n") % info[1])
641 self.ui.debug(_("found parent directory %s\n") % info[1])
643 rc = info
642 rc = info
644 return rc
643 return rc
645
644
646 self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
645 self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
647
646
648 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
647 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
649
648
650 # need to remove fragment from lookup_parts and replace with copyfrom_path
649 # need to remove fragment from lookup_parts and replace with copyfrom_path
651 if frompath is not None:
650 if frompath is not None:
652 self.ui.debug(_("munge-o-matic\n"))
651 self.ui.debug(_("munge-o-matic\n"))
653 self.ui.debug(entrypath + '\n')
652 self.ui.debug(entrypath + '\n')
654 self.ui.debug(entrypath[len(frompath):] + '\n')
653 self.ui.debug(entrypath[len(frompath):] + '\n')
655 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
654 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
656 fromrev = froment.copyfrom_rev
655 fromrev = froment.copyfrom_rev
657 self.ui.debug(_("info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
656 self.ui.debug(_("info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
658
657
659 # We can avoid the reparent calls if the module has not changed
658 # We can avoid the reparent calls if the module has not changed
660 # but it probably does not worth the pain.
659 # but it probably does not worth the pain.
661 prevmodule = self.reparent('')
660 prevmodule = self.reparent('')
662 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
661 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
663 self.reparent(prevmodule)
662 self.reparent(prevmodule)
664
663
665 if fromkind == svn.core.svn_node_file: # a deleted file
664 if fromkind == svn.core.svn_node_file: # a deleted file
666 entries.append(self.recode(entry))
665 entries.append(self.recode(entry))
667 elif fromkind == svn.core.svn_node_dir:
666 elif fromkind == svn.core.svn_node_dir:
668 # print "Deleted/moved non-file:", revnum, path, ent
667 # print "Deleted/moved non-file:", revnum, path, ent
669 # children = self._find_children(path, revnum - 1)
668 # children = self._find_children(path, revnum - 1)
670 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
669 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
671 # Sometimes this is tricky. For example: in
670 # Sometimes this is tricky. For example: in
672 # The Subversion Repository revision 6940 a dir
671 # The Subversion Repository revision 6940 a dir
673 # was copied and one of its files was deleted
672 # was copied and one of its files was deleted
674 # from the new location in the same commit. This
673 # from the new location in the same commit. This
675 # code can't deal with that yet.
674 # code can't deal with that yet.
676 if ent.action == 'C':
675 if ent.action == 'C':
677 children = self._find_children(path, fromrev)
676 children = self._find_children(path, fromrev)
678 else:
677 else:
679 oroot = entrypath.strip('/')
678 oroot = entrypath.strip('/')
680 nroot = path.strip('/')
679 nroot = path.strip('/')
681 children = self._find_children(oroot, fromrev)
680 children = self._find_children(oroot, fromrev)
682 children = [s.replace(oroot,nroot) for s in children]
681 children = [s.replace(oroot,nroot) for s in children]
683 # Mark all [files, not directories] as deleted.
682 # Mark all [files, not directories] as deleted.
684 for child in children:
683 for child in children:
685 # Can we move a child directory and its
684 # Can we move a child directory and its
686 # parent in the same commit? (probably can). Could
685 # parent in the same commit? (probably can). Could
687 # cause problems if instead of revnum -1,
686 # cause problems if instead of revnum -1,
688 # we have to look in (copyfrom_path, revnum - 1)
687 # we have to look in (copyfrom_path, revnum - 1)
689 entrypath = self.getrelpath("/" + child, module=old_module)
688 entrypath = self.getrelpath("/" + child, module=old_module)
690 if entrypath:
689 if entrypath:
691 entry = self.recode(entrypath.decode(self.encoding))
690 entry = self.recode(entrypath.decode(self.encoding))
692 if entry in copies:
691 if entry in copies:
693 # deleted file within a copy
692 # deleted file within a copy
694 del copies[entry]
693 del copies[entry]
695 else:
694 else:
696 entries.append(entry)
695 entries.append(entry)
697 else:
696 else:
698 self.ui.debug(_('unknown path in revision %d: %s\n') % \
697 self.ui.debug(_('unknown path in revision %d: %s\n') % \
699 (revnum, path))
698 (revnum, path))
700 elif kind == svn.core.svn_node_dir:
699 elif kind == svn.core.svn_node_dir:
701 # Should probably synthesize normal file entries
700 # Should probably synthesize normal file entries
702 # and handle as above to clean up copy/rename handling.
701 # and handle as above to clean up copy/rename handling.
703
702
704 # If the directory just had a prop change,
703 # If the directory just had a prop change,
705 # then we shouldn't need to look for its children.
704 # then we shouldn't need to look for its children.
706 if ent.action == 'M':
705 if ent.action == 'M':
707 continue
706 continue
708
707
709 # Also this could create duplicate entries. Not sure
708 # Also this could create duplicate entries. Not sure
710 # whether this will matter. Maybe should make entries a set.
709 # whether this will matter. Maybe should make entries a set.
711 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
710 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
712 # This will fail if a directory was copied
711 # This will fail if a directory was copied
713 # from another branch and then some of its files
712 # from another branch and then some of its files
714 # were deleted in the same transaction.
713 # were deleted in the same transaction.
715 children = sorted(self._find_children(path, revnum))
714 children = sorted(self._find_children(path, revnum))
716 for child in children:
715 for child in children:
717 # Can we move a child directory and its
716 # Can we move a child directory and its
718 # parent in the same commit? (probably can). Could
717 # parent in the same commit? (probably can). Could
719 # cause problems if instead of revnum -1,
718 # cause problems if instead of revnum -1,
720 # we have to look in (copyfrom_path, revnum - 1)
719 # we have to look in (copyfrom_path, revnum - 1)
721 entrypath = self.getrelpath("/" + child)
720 entrypath = self.getrelpath("/" + child)
722 # print child, self.module, entrypath
721 # print child, self.module, entrypath
723 if entrypath:
722 if entrypath:
724 # Need to filter out directories here...
723 # Need to filter out directories here...
725 kind = self._checkpath(entrypath, revnum)
724 kind = self._checkpath(entrypath, revnum)
726 if kind != svn.core.svn_node_dir:
725 if kind != svn.core.svn_node_dir:
727 entries.append(self.recode(entrypath))
726 entries.append(self.recode(entrypath))
728
727
729 # Copies here (must copy all from source)
728 # Copies here (must copy all from source)
730 # Probably not a real problem for us if
729 # Probably not a real problem for us if
731 # source does not exist
730 # source does not exist
732 if not ent.copyfrom_path or not parents:
731 if not ent.copyfrom_path or not parents:
733 continue
732 continue
734 # Copy sources not in parent revisions cannot be represented,
733 # Copy sources not in parent revisions cannot be represented,
735 # ignore their origin for now
734 # ignore their origin for now
736 pmodule, prevnum = self.revsplit(parents[0])[1:]
735 pmodule, prevnum = self.revsplit(parents[0])[1:]
737 if ent.copyfrom_rev < prevnum:
736 if ent.copyfrom_rev < prevnum:
738 continue
737 continue
739 copyfrompath = ent.copyfrom_path.decode(self.encoding)
738 copyfrompath = ent.copyfrom_path.decode(self.encoding)
740 copyfrompath = self.getrelpath(copyfrompath, pmodule)
739 copyfrompath = self.getrelpath(copyfrompath, pmodule)
741 if not copyfrompath:
740 if not copyfrompath:
742 continue
741 continue
743 copyfrom[path] = ent
742 copyfrom[path] = ent
744 self.ui.debug(_("mark %s came from %s:%d\n")
743 self.ui.debug(_("mark %s came from %s:%d\n")
745 % (path, copyfrompath, ent.copyfrom_rev))
744 % (path, copyfrompath, ent.copyfrom_rev))
746 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
745 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
747 children.sort()
746 children.sort()
748 for child in children:
747 for child in children:
749 entrypath = self.getrelpath("/" + child, pmodule)
748 entrypath = self.getrelpath("/" + child, pmodule)
750 if not entrypath:
749 if not entrypath:
751 continue
750 continue
752 entry = entrypath.decode(self.encoding)
751 entry = entrypath.decode(self.encoding)
753 copytopath = path + entry[len(copyfrompath):]
752 copytopath = path + entry[len(copyfrompath):]
754 copytopath = self.getrelpath(copytopath)
753 copytopath = self.getrelpath(copytopath)
755 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
754 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
756
755
757 return (list(set(entries)), copies)
756 return (list(set(entries)), copies)
758
757
759 def _fetch_revisions(self, from_revnum, to_revnum):
758 def _fetch_revisions(self, from_revnum, to_revnum):
760 if from_revnum < to_revnum:
759 if from_revnum < to_revnum:
761 from_revnum, to_revnum = to_revnum, from_revnum
760 from_revnum, to_revnum = to_revnum, from_revnum
762
761
763 self.child_cset = None
762 self.child_cset = None
764
763
765 def parselogentry(orig_paths, revnum, author, date, message):
764 def parselogentry(orig_paths, revnum, author, date, message):
766 """Return the parsed commit object or None, and True if
765 """Return the parsed commit object or None, and True if
767 the revision is a branch root.
766 the revision is a branch root.
768 """
767 """
769 self.ui.debug(_("parsing revision %d (%d changes)\n") %
768 self.ui.debug(_("parsing revision %d (%d changes)\n") %
770 (revnum, len(orig_paths)))
769 (revnum, len(orig_paths)))
771
770
772 branched = False
771 branched = False
773 rev = self.revid(revnum)
772 rev = self.revid(revnum)
774 # branch log might return entries for a parent we already have
773 # branch log might return entries for a parent we already have
775
774
776 if rev in self.commits or revnum < to_revnum:
775 if rev in self.commits or revnum < to_revnum:
777 return None, branched
776 return None, branched
778
777
779 parents = []
778 parents = []
780 # check whether this revision is the start of a branch or part
779 # check whether this revision is the start of a branch or part
781 # of a branch renaming
780 # of a branch renaming
782 orig_paths = sorted(orig_paths.iteritems())
781 orig_paths = sorted(orig_paths.iteritems())
783 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
782 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
784 if root_paths:
783 if root_paths:
785 path, ent = root_paths[-1]
784 path, ent = root_paths[-1]
786 if ent.copyfrom_path:
785 if ent.copyfrom_path:
787 branched = True
786 branched = True
788 newpath = ent.copyfrom_path + self.module[len(path):]
787 newpath = ent.copyfrom_path + self.module[len(path):]
789 # ent.copyfrom_rev may not be the actual last revision
788 # ent.copyfrom_rev may not be the actual last revision
790 previd = self.latest(newpath, ent.copyfrom_rev)
789 previd = self.latest(newpath, ent.copyfrom_rev)
791 if previd is not None:
790 if previd is not None:
792 prevmodule, prevnum = self.revsplit(previd)[1:]
791 prevmodule, prevnum = self.revsplit(previd)[1:]
793 if prevnum >= self.startrev:
792 if prevnum >= self.startrev:
794 parents = [previd]
793 parents = [previd]
795 self.ui.note(_('found parent of branch %s at %d: %s\n') %
794 self.ui.note(_('found parent of branch %s at %d: %s\n') %
796 (self.module, prevnum, prevmodule))
795 (self.module, prevnum, prevmodule))
797 else:
796 else:
798 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
797 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
799
798
800 paths = []
799 paths = []
801 # filter out unrelated paths
800 # filter out unrelated paths
802 for path, ent in orig_paths:
801 for path, ent in orig_paths:
803 if self.getrelpath(path) is None:
802 if self.getrelpath(path) is None:
804 continue
803 continue
805 paths.append((path, ent))
804 paths.append((path, ent))
806
805
807 # Example SVN datetime. Includes microseconds.
806 # Example SVN datetime. Includes microseconds.
808 # ISO-8601 conformant
807 # ISO-8601 conformant
809 # '2007-01-04T17:35:00.902377Z'
808 # '2007-01-04T17:35:00.902377Z'
810 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
809 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
811
810
812 log = message and self.recode(message) or ''
811 log = message and self.recode(message) or ''
813 author = author and self.recode(author) or ''
812 author = author and self.recode(author) or ''
814 try:
813 try:
815 branch = self.module.split("/")[-1]
814 branch = self.module.split("/")[-1]
816 if branch == 'trunk':
815 if branch == 'trunk':
817 branch = ''
816 branch = ''
818 except IndexError:
817 except IndexError:
819 branch = None
818 branch = None
820
819
821 cset = commit(author=author,
820 cset = commit(author=author,
822 date=util.datestr(date),
821 date=util.datestr(date),
823 desc=log,
822 desc=log,
824 parents=parents,
823 parents=parents,
825 branch=branch,
824 branch=branch,
826 rev=rev.encode('utf-8'))
825 rev=rev.encode('utf-8'))
827
826
828 self.commits[rev] = cset
827 self.commits[rev] = cset
829 # The parents list is *shared* among self.paths and the
828 # The parents list is *shared* among self.paths and the
830 # commit object. Both will be updated below.
829 # commit object. Both will be updated below.
831 self.paths[rev] = (paths, cset.parents)
830 self.paths[rev] = (paths, cset.parents)
832 if self.child_cset and not self.child_cset.parents:
831 if self.child_cset and not self.child_cset.parents:
833 self.child_cset.parents[:] = [rev]
832 self.child_cset.parents[:] = [rev]
834 self.child_cset = cset
833 self.child_cset = cset
835 return cset, branched
834 return cset, branched
836
835
837 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
836 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
838 (self.module, from_revnum, to_revnum))
837 (self.module, from_revnum, to_revnum))
839
838
840 try:
839 try:
841 firstcset = None
840 firstcset = None
842 lastonbranch = False
841 lastonbranch = False
843 stream = self._getlog([self.module], from_revnum, to_revnum)
842 stream = self._getlog([self.module], from_revnum, to_revnum)
844 try:
843 try:
845 for entry in stream:
844 for entry in stream:
846 paths, revnum, author, date, message = entry
845 paths, revnum, author, date, message = entry
847 if revnum < self.startrev:
846 if revnum < self.startrev:
848 lastonbranch = True
847 lastonbranch = True
849 break
848 break
850 if self.is_blacklisted(revnum):
849 if self.is_blacklisted(revnum):
851 self.ui.note(_('skipping blacklisted revision %d\n')
850 self.ui.note(_('skipping blacklisted revision %d\n')
852 % revnum)
851 % revnum)
853 continue
852 continue
854 if not paths:
853 if not paths:
855 self.ui.debug(_('revision %d has no entries\n') % revnum)
854 self.ui.debug(_('revision %d has no entries\n') % revnum)
856 continue
855 continue
857 cset, lastonbranch = parselogentry(paths, revnum, author,
856 cset, lastonbranch = parselogentry(paths, revnum, author,
858 date, message)
857 date, message)
859 if cset:
858 if cset:
860 firstcset = cset
859 firstcset = cset
861 if lastonbranch:
860 if lastonbranch:
862 break
861 break
863 finally:
862 finally:
864 stream.close()
863 stream.close()
865
864
866 if not lastonbranch and firstcset and not firstcset.parents:
865 if not lastonbranch and firstcset and not firstcset.parents:
867 # The first revision of the sequence (the last fetched one)
866 # The first revision of the sequence (the last fetched one)
868 # has invalid parents if not a branch root. Find the parent
867 # has invalid parents if not a branch root. Find the parent
869 # revision now, if any.
868 # revision now, if any.
870 try:
869 try:
871 firstrevnum = self.revnum(firstcset.rev)
870 firstrevnum = self.revnum(firstcset.rev)
872 if firstrevnum > 1:
871 if firstrevnum > 1:
873 latest = self.latest(self.module, firstrevnum - 1)
872 latest = self.latest(self.module, firstrevnum - 1)
874 if latest:
873 if latest:
875 firstcset.parents.append(latest)
874 firstcset.parents.append(latest)
876 except SvnPathNotFound:
875 except SvnPathNotFound:
877 pass
876 pass
878 except SubversionException, (inst, num):
877 except SubversionException, (inst, num):
879 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
878 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
880 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
879 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
881 raise
880 raise
882
881
883 def _getfile(self, file, rev):
882 def _getfile(self, file, rev):
884 # TODO: ra.get_file transmits the whole file instead of diffs.
883 # TODO: ra.get_file transmits the whole file instead of diffs.
885 mode = ''
884 mode = ''
886 try:
885 try:
887 new_module, revnum = self.revsplit(rev)[1:]
886 new_module, revnum = self.revsplit(rev)[1:]
888 if self.module != new_module:
887 if self.module != new_module:
889 self.module = new_module
888 self.module = new_module
890 self.reparent(self.module)
889 self.reparent(self.module)
891 io = StringIO()
890 io = StringIO()
892 info = svn.ra.get_file(self.ra, file, revnum, io)
891 info = svn.ra.get_file(self.ra, file, revnum, io)
893 data = io.getvalue()
892 data = io.getvalue()
894 # ra.get_files() seems to keep a reference on the input buffer
893 # ra.get_files() seems to keep a reference on the input buffer
895 # preventing collection. Release it explicitely.
894 # preventing collection. Release it explicitely.
896 io.close()
895 io.close()
897 if isinstance(info, list):
896 if isinstance(info, list):
898 info = info[-1]
897 info = info[-1]
899 mode = ("svn:executable" in info) and 'x' or ''
898 mode = ("svn:executable" in info) and 'x' or ''
900 mode = ("svn:special" in info) and 'l' or mode
899 mode = ("svn:special" in info) and 'l' or mode
901 except SubversionException, e:
900 except SubversionException, e:
902 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
901 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
903 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
902 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
904 if e.apr_err in notfound: # File not found
903 if e.apr_err in notfound: # File not found
905 raise IOError()
904 raise IOError()
906 raise
905 raise
907 if mode == 'l':
906 if mode == 'l':
908 link_prefix = "link "
907 link_prefix = "link "
909 if data.startswith(link_prefix):
908 if data.startswith(link_prefix):
910 data = data[len(link_prefix):]
909 data = data[len(link_prefix):]
911 return data, mode
910 return data, mode
912
911
913 def _find_children(self, path, revnum):
912 def _find_children(self, path, revnum):
914 path = path.strip('/')
913 path = path.strip('/')
915 pool = Pool()
914 pool = Pool()
916 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
915 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
917 return ['%s/%s' % (path, x) for x in
916 return ['%s/%s' % (path, x) for x in
918 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
917 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
919
918
920 def getrelpath(self, path, module=None):
919 def getrelpath(self, path, module=None):
921 if module is None:
920 if module is None:
922 module = self.module
921 module = self.module
923 # Given the repository url of this wc, say
922 # Given the repository url of this wc, say
924 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
923 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
925 # extract the "entry" portion (a relative path) from what
924 # extract the "entry" portion (a relative path) from what
926 # svn log --xml says, ie
925 # svn log --xml says, ie
927 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
926 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
928 # that is to say "tests/PloneTestCase.py"
927 # that is to say "tests/PloneTestCase.py"
929 if path.startswith(module):
928 if path.startswith(module):
930 relative = path.rstrip('/')[len(module):]
929 relative = path.rstrip('/')[len(module):]
931 if relative.startswith('/'):
930 if relative.startswith('/'):
932 return relative[1:]
931 return relative[1:]
933 elif relative == '':
932 elif relative == '':
934 return relative
933 return relative
935
934
936 # The path is outside our tracked tree...
935 # The path is outside our tracked tree...
937 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
936 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
938 return None
937 return None
939
938
940 def _checkpath(self, path, revnum):
939 def _checkpath(self, path, revnum):
941 # ra.check_path does not like leading slashes very much, it leads
940 # ra.check_path does not like leading slashes very much, it leads
942 # to PROPFIND subversion errors
941 # to PROPFIND subversion errors
943 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
942 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
944
943
945 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
944 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
946 strict_node_history=False):
945 strict_node_history=False):
947 # Normalize path names, svn >= 1.5 only wants paths relative to
946 # Normalize path names, svn >= 1.5 only wants paths relative to
948 # supplied URL
947 # supplied URL
949 relpaths = []
948 relpaths = []
950 for p in paths:
949 for p in paths:
951 if not p.startswith('/'):
950 if not p.startswith('/'):
952 p = self.module + '/' + p
951 p = self.module + '/' + p
953 relpaths.append(p.strip('/'))
952 relpaths.append(p.strip('/'))
954 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
953 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
955 strict_node_history]
954 strict_node_history]
956 arg = encodeargs(args)
955 arg = encodeargs(args)
957 hgexe = util.hgexecutable()
956 hgexe = util.hgexecutable()
958 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
957 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
959 stdin, stdout = util.popen2(cmd, 'b')
958 stdin, stdout = util.popen2(cmd, 'b')
960 stdin.write(arg)
959 stdin.write(arg)
961 stdin.close()
960 stdin.close()
962 return logstream(stdout)
961 return logstream(stdout)
963
962
964 pre_revprop_change = '''#!/bin/sh
963 pre_revprop_change = '''#!/bin/sh
965
964
966 REPOS="$1"
965 REPOS="$1"
967 REV="$2"
966 REV="$2"
968 USER="$3"
967 USER="$3"
969 PROPNAME="$4"
968 PROPNAME="$4"
970 ACTION="$5"
969 ACTION="$5"
971
970
972 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
971 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
973 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
972 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
974 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
973 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
975
974
976 echo "Changing prohibited revision property" >&2
975 echo "Changing prohibited revision property" >&2
977 exit 1
976 exit 1
978 '''
977 '''
979
978
980 class svn_sink(converter_sink, commandline):
979 class svn_sink(converter_sink, commandline):
981 commit_re = re.compile(r'Committed revision (\d+).', re.M)
980 commit_re = re.compile(r'Committed revision (\d+).', re.M)
982
981
983 def prerun(self):
982 def prerun(self):
984 if self.wc:
983 if self.wc:
985 os.chdir(self.wc)
984 os.chdir(self.wc)
986
985
987 def postrun(self):
986 def postrun(self):
988 if self.wc:
987 if self.wc:
989 os.chdir(self.cwd)
988 os.chdir(self.cwd)
990
989
991 def join(self, name):
990 def join(self, name):
992 return os.path.join(self.wc, '.svn', name)
991 return os.path.join(self.wc, '.svn', name)
993
992
994 def revmapfile(self):
993 def revmapfile(self):
995 return self.join('hg-shamap')
994 return self.join('hg-shamap')
996
995
997 def authorfile(self):
996 def authorfile(self):
998 return self.join('hg-authormap')
997 return self.join('hg-authormap')
999
998
1000 def __init__(self, ui, path):
999 def __init__(self, ui, path):
1001 converter_sink.__init__(self, ui, path)
1000 converter_sink.__init__(self, ui, path)
1002 commandline.__init__(self, ui, 'svn')
1001 commandline.__init__(self, ui, 'svn')
1003 self.delete = []
1002 self.delete = []
1004 self.setexec = []
1003 self.setexec = []
1005 self.delexec = []
1004 self.delexec = []
1006 self.copies = []
1005 self.copies = []
1007 self.wc = None
1006 self.wc = None
1008 self.cwd = os.getcwd()
1007 self.cwd = os.getcwd()
1009
1008
1010 path = os.path.realpath(path)
1009 path = os.path.realpath(path)
1011
1010
1012 created = False
1011 created = False
1013 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1012 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1014 self.wc = path
1013 self.wc = path
1015 self.run0('update')
1014 self.run0('update')
1016 else:
1015 else:
1017 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1016 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1018
1017
1019 if os.path.isdir(os.path.dirname(path)):
1018 if os.path.isdir(os.path.dirname(path)):
1020 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1019 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1021 ui.status(_('initializing svn repo %r\n') %
1020 ui.status(_('initializing svn repo %r\n') %
1022 os.path.basename(path))
1021 os.path.basename(path))
1023 commandline(ui, 'svnadmin').run0('create', path)
1022 commandline(ui, 'svnadmin').run0('create', path)
1024 created = path
1023 created = path
1025 path = util.normpath(path)
1024 path = util.normpath(path)
1026 if not path.startswith('/'):
1025 if not path.startswith('/'):
1027 path = '/' + path
1026 path = '/' + path
1028 path = 'file://' + path
1027 path = 'file://' + path
1029
1028
1030 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
1029 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
1031 self.run0('checkout', path, wcpath)
1030 self.run0('checkout', path, wcpath)
1032
1031
1033 self.wc = wcpath
1032 self.wc = wcpath
1034 self.opener = util.opener(self.wc)
1033 self.opener = util.opener(self.wc)
1035 self.wopener = util.opener(self.wc)
1034 self.wopener = util.opener(self.wc)
1036 self.childmap = mapfile(ui, self.join('hg-childmap'))
1035 self.childmap = mapfile(ui, self.join('hg-childmap'))
1037 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1036 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1038
1037
1039 if created:
1038 if created:
1040 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1039 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1041 fp = open(hook, 'w')
1040 fp = open(hook, 'w')
1042 fp.write(pre_revprop_change)
1041 fp.write(pre_revprop_change)
1043 fp.close()
1042 fp.close()
1044 util.set_flags(hook, False, True)
1043 util.set_flags(hook, False, True)
1045
1044
1046 xport = transport.SvnRaTransport(url=geturl(path))
1045 xport = transport.SvnRaTransport(url=geturl(path))
1047 self.uuid = svn.ra.get_uuid(xport.ra)
1046 self.uuid = svn.ra.get_uuid(xport.ra)
1048
1047
1049 def wjoin(self, *names):
1048 def wjoin(self, *names):
1050 return os.path.join(self.wc, *names)
1049 return os.path.join(self.wc, *names)
1051
1050
1052 def putfile(self, filename, flags, data):
1051 def putfile(self, filename, flags, data):
1053 if 'l' in flags:
1052 if 'l' in flags:
1054 self.wopener.symlink(data, filename)
1053 self.wopener.symlink(data, filename)
1055 else:
1054 else:
1056 try:
1055 try:
1057 if os.path.islink(self.wjoin(filename)):
1056 if os.path.islink(self.wjoin(filename)):
1058 os.unlink(filename)
1057 os.unlink(filename)
1059 except OSError:
1058 except OSError:
1060 pass
1059 pass
1061 self.wopener(filename, 'w').write(data)
1060 self.wopener(filename, 'w').write(data)
1062
1061
1063 if self.is_exec:
1062 if self.is_exec:
1064 was_exec = self.is_exec(self.wjoin(filename))
1063 was_exec = self.is_exec(self.wjoin(filename))
1065 else:
1064 else:
1066 # On filesystems not supporting execute-bit, there is no way
1065 # On filesystems not supporting execute-bit, there is no way
1067 # to know if it is set but asking subversion. Setting it
1066 # to know if it is set but asking subversion. Setting it
1068 # systematically is just as expensive and much simpler.
1067 # systematically is just as expensive and much simpler.
1069 was_exec = 'x' not in flags
1068 was_exec = 'x' not in flags
1070
1069
1071 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1070 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1072 if was_exec:
1071 if was_exec:
1073 if 'x' not in flags:
1072 if 'x' not in flags:
1074 self.delexec.append(filename)
1073 self.delexec.append(filename)
1075 else:
1074 else:
1076 if 'x' in flags:
1075 if 'x' in flags:
1077 self.setexec.append(filename)
1076 self.setexec.append(filename)
1078
1077
1079 def _copyfile(self, source, dest):
1078 def _copyfile(self, source, dest):
1080 # SVN's copy command pukes if the destination file exists, but
1079 # SVN's copy command pukes if the destination file exists, but
1081 # our copyfile method expects to record a copy that has
1080 # our copyfile method expects to record a copy that has
1082 # already occurred. Cross the semantic gap.
1081 # already occurred. Cross the semantic gap.
1083 wdest = self.wjoin(dest)
1082 wdest = self.wjoin(dest)
1084 exists = os.path.exists(wdest)
1083 exists = os.path.exists(wdest)
1085 if exists:
1084 if exists:
1086 fd, tempname = tempfile.mkstemp(
1085 fd, tempname = tempfile.mkstemp(
1087 prefix='hg-copy-', dir=os.path.dirname(wdest))
1086 prefix='hg-copy-', dir=os.path.dirname(wdest))
1088 os.close(fd)
1087 os.close(fd)
1089 os.unlink(tempname)
1088 os.unlink(tempname)
1090 os.rename(wdest, tempname)
1089 os.rename(wdest, tempname)
1091 try:
1090 try:
1092 self.run0('copy', source, dest)
1091 self.run0('copy', source, dest)
1093 finally:
1092 finally:
1094 if exists:
1093 if exists:
1095 try:
1094 try:
1096 os.unlink(wdest)
1095 os.unlink(wdest)
1097 except OSError:
1096 except OSError:
1098 pass
1097 pass
1099 os.rename(tempname, wdest)
1098 os.rename(tempname, wdest)
1100
1099
1101 def dirs_of(self, files):
1100 def dirs_of(self, files):
1102 dirs = set()
1101 dirs = set()
1103 for f in files:
1102 for f in files:
1104 if os.path.isdir(self.wjoin(f)):
1103 if os.path.isdir(self.wjoin(f)):
1105 dirs.add(f)
1104 dirs.add(f)
1106 for i in strutil.rfindall(f, '/'):
1105 for i in strutil.rfindall(f, '/'):
1107 dirs.add(f[:i])
1106 dirs.add(f[:i])
1108 return dirs
1107 return dirs
1109
1108
1110 def add_dirs(self, files):
1109 def add_dirs(self, files):
1111 add_dirs = [d for d in sorted(self.dirs_of(files))
1110 add_dirs = [d for d in sorted(self.dirs_of(files))
1112 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1111 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1113 if add_dirs:
1112 if add_dirs:
1114 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1113 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1115 return add_dirs
1114 return add_dirs
1116
1115
1117 def add_files(self, files):
1116 def add_files(self, files):
1118 if files:
1117 if files:
1119 self.xargs(files, 'add', quiet=True)
1118 self.xargs(files, 'add', quiet=True)
1120 return files
1119 return files
1121
1120
1122 def tidy_dirs(self, names):
1121 def tidy_dirs(self, names):
1123 deleted = []
1122 deleted = []
1124 for d in sorted(self.dirs_of(names), reverse=True):
1123 for d in sorted(self.dirs_of(names), reverse=True):
1125 wd = self.wjoin(d)
1124 wd = self.wjoin(d)
1126 if os.listdir(wd) == '.svn':
1125 if os.listdir(wd) == '.svn':
1127 self.run0('delete', d)
1126 self.run0('delete', d)
1128 deleted.append(d)
1127 deleted.append(d)
1129 return deleted
1128 return deleted
1130
1129
1131 def addchild(self, parent, child):
1130 def addchild(self, parent, child):
1132 self.childmap[parent] = child
1131 self.childmap[parent] = child
1133
1132
1134 def revid(self, rev):
1133 def revid(self, rev):
1135 return u"svn:%s@%s" % (self.uuid, rev)
1134 return u"svn:%s@%s" % (self.uuid, rev)
1136
1135
1137 def putcommit(self, files, copies, parents, commit, source):
1136 def putcommit(self, files, copies, parents, commit, source):
1138 # Apply changes to working copy
1137 # Apply changes to working copy
1139 for f, v in files:
1138 for f, v in files:
1140 try:
1139 try:
1141 data = source.getfile(f, v)
1140 data = source.getfile(f, v)
1142 except IOError:
1141 except IOError:
1143 self.delete.append(f)
1142 self.delete.append(f)
1144 else:
1143 else:
1145 e = source.getmode(f, v)
1144 e = source.getmode(f, v)
1146 self.putfile(f, e, data)
1145 self.putfile(f, e, data)
1147 if f in copies:
1146 if f in copies:
1148 self.copies.append([copies[f], f])
1147 self.copies.append([copies[f], f])
1149 files = [f[0] for f in files]
1148 files = [f[0] for f in files]
1150
1149
1151 for parent in parents:
1150 for parent in parents:
1152 try:
1151 try:
1153 return self.revid(self.childmap[parent])
1152 return self.revid(self.childmap[parent])
1154 except KeyError:
1153 except KeyError:
1155 pass
1154 pass
1156 entries = set(self.delete)
1155 entries = set(self.delete)
1157 files = frozenset(files)
1156 files = frozenset(files)
1158 entries.update(self.add_dirs(files.difference(entries)))
1157 entries.update(self.add_dirs(files.difference(entries)))
1159 if self.copies:
1158 if self.copies:
1160 for s, d in self.copies:
1159 for s, d in self.copies:
1161 self._copyfile(s, d)
1160 self._copyfile(s, d)
1162 self.copies = []
1161 self.copies = []
1163 if self.delete:
1162 if self.delete:
1164 self.xargs(self.delete, 'delete')
1163 self.xargs(self.delete, 'delete')
1165 self.delete = []
1164 self.delete = []
1166 entries.update(self.add_files(files.difference(entries)))
1165 entries.update(self.add_files(files.difference(entries)))
1167 entries.update(self.tidy_dirs(entries))
1166 entries.update(self.tidy_dirs(entries))
1168 if self.delexec:
1167 if self.delexec:
1169 self.xargs(self.delexec, 'propdel', 'svn:executable')
1168 self.xargs(self.delexec, 'propdel', 'svn:executable')
1170 self.delexec = []
1169 self.delexec = []
1171 if self.setexec:
1170 if self.setexec:
1172 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1171 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1173 self.setexec = []
1172 self.setexec = []
1174
1173
1175 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1174 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1176 fp = os.fdopen(fd, 'w')
1175 fp = os.fdopen(fd, 'w')
1177 fp.write(commit.desc)
1176 fp.write(commit.desc)
1178 fp.close()
1177 fp.close()
1179 try:
1178 try:
1180 output = self.run0('commit',
1179 output = self.run0('commit',
1181 username=util.shortuser(commit.author),
1180 username=util.shortuser(commit.author),
1182 file=messagefile,
1181 file=messagefile,
1183 encoding='utf-8')
1182 encoding='utf-8')
1184 try:
1183 try:
1185 rev = self.commit_re.search(output).group(1)
1184 rev = self.commit_re.search(output).group(1)
1186 except AttributeError:
1185 except AttributeError:
1187 self.ui.warn(_('unexpected svn output:\n'))
1186 self.ui.warn(_('unexpected svn output:\n'))
1188 self.ui.warn(output)
1187 self.ui.warn(output)
1189 raise util.Abort(_('unable to cope with svn output'))
1188 raise util.Abort(_('unable to cope with svn output'))
1190 if commit.rev:
1189 if commit.rev:
1191 self.run('propset', 'hg:convert-rev', commit.rev,
1190 self.run('propset', 'hg:convert-rev', commit.rev,
1192 revprop=True, revision=rev)
1191 revprop=True, revision=rev)
1193 if commit.branch and commit.branch != 'default':
1192 if commit.branch and commit.branch != 'default':
1194 self.run('propset', 'hg:convert-branch', commit.branch,
1193 self.run('propset', 'hg:convert-branch', commit.branch,
1195 revprop=True, revision=rev)
1194 revprop=True, revision=rev)
1196 for parent in parents:
1195 for parent in parents:
1197 self.addchild(parent, rev)
1196 self.addchild(parent, rev)
1198 return self.revid(rev)
1197 return self.revid(rev)
1199 finally:
1198 finally:
1200 os.unlink(messagefile)
1199 os.unlink(messagefile)
1201
1200
1202 def puttags(self, tags):
1201 def puttags(self, tags):
1203 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
1202 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,286 +1,282 b''
1 # GnuPG signing extension for Mercurial
1 # GnuPG signing extension for Mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands
9 from mercurial import util, commands
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 class gpg:
13 class gpg:
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 sigfile = datafile = None
24 sigfile = datafile = None
25 try:
25 try:
26 # create temporary files
26 # create temporary files
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 fp = os.fdopen(fd, 'wb')
28 fp = os.fdopen(fd, 'wb')
29 fp.write(sig)
29 fp.write(sig)
30 fp.close()
30 fp.close()
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(data)
33 fp.write(data)
34 fp.close()
34 fp.close()
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 ret = util.filter("", gpgcmd)
37 ret = util.filter("", gpgcmd)
38 finally:
38 finally:
39 for f in (sigfile, datafile):
39 for f in (sigfile, datafile):
40 try:
40 try:
41 if f: os.unlink(f)
41 if f: os.unlink(f)
42 except: pass
42 except: pass
43 keys = []
43 keys = []
44 key, fingerprint = None, None
44 key, fingerprint = None, None
45 err = ""
45 err = ""
46 for l in ret.splitlines():
46 for l in ret.splitlines():
47 # see DETAILS in the gnupg documentation
47 # see DETAILS in the gnupg documentation
48 # filter the logger output
48 # filter the logger output
49 if not l.startswith("[GNUPG:]"):
49 if not l.startswith("[GNUPG:]"):
50 continue
50 continue
51 l = l[9:]
51 l = l[9:]
52 if l.startswith("ERRSIG"):
52 if l.startswith("ERRSIG"):
53 err = _("error while verifying signature")
53 err = _("error while verifying signature")
54 break
54 break
55 elif l.startswith("VALIDSIG"):
55 elif l.startswith("VALIDSIG"):
56 # fingerprint of the primary key
56 # fingerprint of the primary key
57 fingerprint = l.split()[10]
57 fingerprint = l.split()[10]
58 elif (l.startswith("GOODSIG") or
58 elif (l.startswith("GOODSIG") or
59 l.startswith("EXPSIG") or
59 l.startswith("EXPSIG") or
60 l.startswith("EXPKEYSIG") or
60 l.startswith("EXPKEYSIG") or
61 l.startswith("BADSIG")):
61 l.startswith("BADSIG")):
62 if key is not None:
62 if key is not None:
63 keys.append(key + [fingerprint])
63 keys.append(key + [fingerprint])
64 key = l.split(" ", 2)
64 key = l.split(" ", 2)
65 fingerprint = None
65 fingerprint = None
66 if err:
66 if err:
67 return err, []
67 return err, []
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 return err, keys
70 return err, keys
71
71
72 def newgpg(ui, **opts):
72 def newgpg(ui, **opts):
73 """create a new gpg instance"""
73 """create a new gpg instance"""
74 gpgpath = ui.config("gpg", "cmd", "gpg")
74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 gpgkey = opts.get('key')
75 gpgkey = opts.get('key')
76 if not gpgkey:
76 if not gpgkey:
77 gpgkey = ui.config("gpg", "key", None)
77 gpgkey = ui.config("gpg", "key", None)
78 return gpg(gpgpath, gpgkey)
78 return gpg(gpgpath, gpgkey)
79
79
80 def sigwalk(repo):
80 def sigwalk(repo):
81 """
81 """
82 walk over every sigs, yields a couple
82 walk over every sigs, yields a couple
83 ((node, version, sig), (filename, linenumber))
83 ((node, version, sig), (filename, linenumber))
84 """
84 """
85 def parsefile(fileiter, context):
85 def parsefile(fileiter, context):
86 ln = 1
86 ln = 1
87 for l in fileiter:
87 for l in fileiter:
88 if not l:
88 if not l:
89 continue
89 continue
90 yield (l.split(" ", 2), (context, ln))
90 yield (l.split(" ", 2), (context, ln))
91 ln +=1
91 ln +=1
92
92
93 # read the heads
93 fl = repo.file(".hgsigs")
94 fl = repo.file(".hgsigs")
94 h = fl.heads()
95 for r in reversed(fl.heads()):
95 h.reverse()
96 # read the heads
97 for r in h:
98 fn = ".hgsigs|%s" % hgnode.short(r)
96 fn = ".hgsigs|%s" % hgnode.short(r)
99 for item in parsefile(fl.read(r).splitlines(), fn):
97 for item in parsefile(fl.read(r).splitlines(), fn):
100 yield item
98 yield item
101 try:
99 try:
102 # read local signatures
100 # read local signatures
103 fn = "localsigs"
101 fn = "localsigs"
104 for item in parsefile(repo.opener(fn), fn):
102 for item in parsefile(repo.opener(fn), fn):
105 yield item
103 yield item
106 except IOError:
104 except IOError:
107 pass
105 pass
108
106
109 def getkeys(ui, repo, mygpg, sigdata, context):
107 def getkeys(ui, repo, mygpg, sigdata, context):
110 """get the keys who signed a data"""
108 """get the keys who signed a data"""
111 fn, ln = context
109 fn, ln = context
112 node, version, sig = sigdata
110 node, version, sig = sigdata
113 prefix = "%s:%d" % (fn, ln)
111 prefix = "%s:%d" % (fn, ln)
114 node = hgnode.bin(node)
112 node = hgnode.bin(node)
115
113
116 data = node2txt(repo, node, version)
114 data = node2txt(repo, node, version)
117 sig = binascii.a2b_base64(sig)
115 sig = binascii.a2b_base64(sig)
118 err, keys = mygpg.verify(data, sig)
116 err, keys = mygpg.verify(data, sig)
119 if err:
117 if err:
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
118 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 return None
119 return None
122
120
123 validkeys = []
121 validkeys = []
124 # warn for expired key and/or sigs
122 # warn for expired key and/or sigs
125 for key in keys:
123 for key in keys:
126 if key[0] == "BADSIG":
124 if key[0] == "BADSIG":
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
125 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 continue
126 continue
129 if key[0] == "EXPSIG":
127 if key[0] == "EXPSIG":
130 ui.write(_("%s Note: Signature has expired"
128 ui.write(_("%s Note: Signature has expired"
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
129 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 elif key[0] == "EXPKEYSIG":
130 elif key[0] == "EXPKEYSIG":
133 ui.write(_("%s Note: This key has expired"
131 ui.write(_("%s Note: This key has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 validkeys.append((key[1], key[2], key[3]))
133 validkeys.append((key[1], key[2], key[3]))
136 return validkeys
134 return validkeys
137
135
138 def sigs(ui, repo):
136 def sigs(ui, repo):
139 """list signed changesets"""
137 """list signed changesets"""
140 mygpg = newgpg(ui)
138 mygpg = newgpg(ui)
141 revs = {}
139 revs = {}
142
140
143 for data, context in sigwalk(repo):
141 for data, context in sigwalk(repo):
144 node, version, sig = data
142 node, version, sig = data
145 fn, ln = context
143 fn, ln = context
146 try:
144 try:
147 n = repo.lookup(node)
145 n = repo.lookup(node)
148 except KeyError:
146 except KeyError:
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
147 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 continue
148 continue
151 r = repo.changelog.rev(n)
149 r = repo.changelog.rev(n)
152 keys = getkeys(ui, repo, mygpg, data, context)
150 keys = getkeys(ui, repo, mygpg, data, context)
153 if not keys:
151 if not keys:
154 continue
152 continue
155 revs.setdefault(r, [])
153 revs.setdefault(r, [])
156 revs[r].extend(keys)
154 revs[r].extend(keys)
157 nodes = list(revs)
155 for rev in reversed(revs):
158 nodes.reverse()
159 for rev in nodes:
160 for k in revs[rev]:
156 for k in revs[rev]:
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
157 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
158 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163
159
164 def check(ui, repo, rev):
160 def check(ui, repo, rev):
165 """verify all the signatures there may be for a particular revision"""
161 """verify all the signatures there may be for a particular revision"""
166 mygpg = newgpg(ui)
162 mygpg = newgpg(ui)
167 rev = repo.lookup(rev)
163 rev = repo.lookup(rev)
168 hexrev = hgnode.hex(rev)
164 hexrev = hgnode.hex(rev)
169 keys = []
165 keys = []
170
166
171 for data, context in sigwalk(repo):
167 for data, context in sigwalk(repo):
172 node, version, sig = data
168 node, version, sig = data
173 if node == hexrev:
169 if node == hexrev:
174 k = getkeys(ui, repo, mygpg, data, context)
170 k = getkeys(ui, repo, mygpg, data, context)
175 if k:
171 if k:
176 keys.extend(k)
172 keys.extend(k)
177
173
178 if not keys:
174 if not keys:
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
175 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 return
176 return
181
177
182 # print summary
178 # print summary
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
179 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 for key in keys:
180 for key in keys:
185 ui.write(" %s\n" % keystr(ui, key))
181 ui.write(" %s\n" % keystr(ui, key))
186
182
187 def keystr(ui, key):
183 def keystr(ui, key):
188 """associate a string to a key (username, comment)"""
184 """associate a string to a key (username, comment)"""
189 keyid, user, fingerprint = key
185 keyid, user, fingerprint = key
190 comment = ui.config("gpg", fingerprint, None)
186 comment = ui.config("gpg", fingerprint, None)
191 if comment:
187 if comment:
192 return "%s (%s)" % (user, comment)
188 return "%s (%s)" % (user, comment)
193 else:
189 else:
194 return user
190 return user
195
191
196 def sign(ui, repo, *revs, **opts):
192 def sign(ui, repo, *revs, **opts):
197 """add a signature for the current or given revision
193 """add a signature for the current or given revision
198
194
199 If no revision is given, the parent of the working directory is used,
195 If no revision is given, the parent of the working directory is used,
200 or tip if no revision is checked out.
196 or tip if no revision is checked out.
201
197
202 See 'hg help dates' for a list of formats valid for -d/--date.
198 See 'hg help dates' for a list of formats valid for -d/--date.
203 """
199 """
204
200
205 mygpg = newgpg(ui, **opts)
201 mygpg = newgpg(ui, **opts)
206 sigver = "0"
202 sigver = "0"
207 sigmessage = ""
203 sigmessage = ""
208
204
209 date = opts.get('date')
205 date = opts.get('date')
210 if date:
206 if date:
211 opts['date'] = util.parsedate(date)
207 opts['date'] = util.parsedate(date)
212
208
213 if revs:
209 if revs:
214 nodes = [repo.lookup(n) for n in revs]
210 nodes = [repo.lookup(n) for n in revs]
215 else:
211 else:
216 nodes = [node for node in repo.dirstate.parents()
212 nodes = [node for node in repo.dirstate.parents()
217 if node != hgnode.nullid]
213 if node != hgnode.nullid]
218 if len(nodes) > 1:
214 if len(nodes) > 1:
219 raise util.Abort(_('uncommitted merge - please provide a '
215 raise util.Abort(_('uncommitted merge - please provide a '
220 'specific revision'))
216 'specific revision'))
221 if not nodes:
217 if not nodes:
222 nodes = [repo.changelog.tip()]
218 nodes = [repo.changelog.tip()]
223
219
224 for n in nodes:
220 for n in nodes:
225 hexnode = hgnode.hex(n)
221 hexnode = hgnode.hex(n)
226 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
222 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
227 hgnode.short(n)))
223 hgnode.short(n)))
228 # build data
224 # build data
229 data = node2txt(repo, n, sigver)
225 data = node2txt(repo, n, sigver)
230 sig = mygpg.sign(data)
226 sig = mygpg.sign(data)
231 if not sig:
227 if not sig:
232 raise util.Abort(_("Error while signing"))
228 raise util.Abort(_("Error while signing"))
233 sig = binascii.b2a_base64(sig)
229 sig = binascii.b2a_base64(sig)
234 sig = sig.replace("\n", "")
230 sig = sig.replace("\n", "")
235 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
231 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
236
232
237 # write it
233 # write it
238 if opts['local']:
234 if opts['local']:
239 repo.opener("localsigs", "ab").write(sigmessage)
235 repo.opener("localsigs", "ab").write(sigmessage)
240 return
236 return
241
237
242 for x in repo.status(unknown=True)[:5]:
238 for x in repo.status(unknown=True)[:5]:
243 if ".hgsigs" in x and not opts["force"]:
239 if ".hgsigs" in x and not opts["force"]:
244 raise util.Abort(_("working copy of .hgsigs is changed "
240 raise util.Abort(_("working copy of .hgsigs is changed "
245 "(please commit .hgsigs manually "
241 "(please commit .hgsigs manually "
246 "or use --force)"))
242 "or use --force)"))
247
243
248 repo.wfile(".hgsigs", "ab").write(sigmessage)
244 repo.wfile(".hgsigs", "ab").write(sigmessage)
249
245
250 if '.hgsigs' not in repo.dirstate:
246 if '.hgsigs' not in repo.dirstate:
251 repo.add([".hgsigs"])
247 repo.add([".hgsigs"])
252
248
253 if opts["no_commit"]:
249 if opts["no_commit"]:
254 return
250 return
255
251
256 message = opts['message']
252 message = opts['message']
257 if not message:
253 if not message:
258 message = "\n".join([_("Added signature for changeset %s")
254 message = "\n".join([_("Added signature for changeset %s")
259 % hgnode.short(n)
255 % hgnode.short(n)
260 for n in nodes])
256 for n in nodes])
261 try:
257 try:
262 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
258 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
263 except ValueError, inst:
259 except ValueError, inst:
264 raise util.Abort(str(inst))
260 raise util.Abort(str(inst))
265
261
266 def node2txt(repo, node, ver):
262 def node2txt(repo, node, ver):
267 """map a manifest into some text"""
263 """map a manifest into some text"""
268 if ver == "0":
264 if ver == "0":
269 return "%s\n" % hgnode.hex(node)
265 return "%s\n" % hgnode.hex(node)
270 else:
266 else:
271 raise util.Abort(_("unknown signature version"))
267 raise util.Abort(_("unknown signature version"))
272
268
273 cmdtable = {
269 cmdtable = {
274 "sign":
270 "sign":
275 (sign,
271 (sign,
276 [('l', 'local', None, _('make the signature local')),
272 [('l', 'local', None, _('make the signature local')),
277 ('f', 'force', None, _('sign even if the sigfile is modified')),
273 ('f', 'force', None, _('sign even if the sigfile is modified')),
278 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
274 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
279 ('k', 'key', '', _('the key id to sign with')),
275 ('k', 'key', '', _('the key id to sign with')),
280 ('m', 'message', '', _('commit message')),
276 ('m', 'message', '', _('commit message')),
281 ] + commands.commitopts2,
277 ] + commands.commitopts2,
282 _('hg sign [OPTION]... [REVISION]...')),
278 _('hg sign [OPTION]... [REVISION]...')),
283 "sigcheck": (check, [], _('hg sigcheck REVISION')),
279 "sigcheck": (check, [], _('hg sigcheck REVISION')),
284 "sigs": (sigs, [], _('hg sigs')),
280 "sigs": (sigs, [], _('hg sigs')),
285 }
281 }
286
282
@@ -1,416 +1,415 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7 '''show revision graphs in terminal windows
7 '''show revision graphs in terminal windows
8
8
9 This extension adds a --graph option to the incoming, outgoing and log
9 This extension adds a --graph option to the incoming, outgoing and log
10 commands. When this options is given, an ascii representation of the
10 commands. When this options is given, an ascii representation of the
11 revision graph is also shown.
11 revision graph is also shown.
12 '''
12 '''
13
13
14 import os
14 import os
15 from mercurial.cmdutil import revrange, show_changeset
15 from mercurial.cmdutil import revrange, show_changeset
16 from mercurial.commands import templateopts
16 from mercurial.commands import templateopts
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import nullrev
18 from mercurial.node import nullrev
19 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
19 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
20 from mercurial import hg, url, util
20 from mercurial import hg, url, util
21
21
22 def revisions(repo, start, stop):
22 def revisions(repo, start, stop):
23 """cset DAG generator yielding (rev, node, [parents]) tuples
23 """cset DAG generator yielding (rev, node, [parents]) tuples
24
24
25 This generator function walks through the revision history from revision
25 This generator function walks through the revision history from revision
26 start to revision stop (which must be less than or equal to start).
26 start to revision stop (which must be less than or equal to start).
27 """
27 """
28 assert start >= stop
28 assert start >= stop
29 cur = start
29 cur = start
30 while cur >= stop:
30 while cur >= stop:
31 ctx = repo[cur]
31 ctx = repo[cur]
32 parents = [p.rev() for p in ctx.parents() if p.rev() != nullrev]
32 parents = [p.rev() for p in ctx.parents() if p.rev() != nullrev]
33 parents.sort()
33 parents.sort()
34 yield (ctx, parents)
34 yield (ctx, parents)
35 cur -= 1
35 cur -= 1
36
36
37 def filerevs(repo, path, start, stop):
37 def filerevs(repo, path, start, stop):
38 """file cset DAG generator yielding (rev, node, [parents]) tuples
38 """file cset DAG generator yielding (rev, node, [parents]) tuples
39
39
40 This generator function walks through the revision history of a single
40 This generator function walks through the revision history of a single
41 file from revision start to revision stop (which must be less than or
41 file from revision start to revision stop (which must be less than or
42 equal to start).
42 equal to start).
43 """
43 """
44 assert start >= stop
44 assert start >= stop
45 filerev = len(repo.file(path)) - 1
45 filerev = len(repo.file(path)) - 1
46 while filerev >= 0:
46 while filerev >= 0:
47 fctx = repo.filectx(path, fileid=filerev)
47 fctx = repo.filectx(path, fileid=filerev)
48 parents = [f.linkrev() for f in fctx.parents() if f.path() == path]
48 parents = [f.linkrev() for f in fctx.parents() if f.path() == path]
49 parents.sort()
49 parents.sort()
50 if fctx.rev() <= start:
50 if fctx.rev() <= start:
51 yield (fctx, parents)
51 yield (fctx, parents)
52 if fctx.rev() <= stop:
52 if fctx.rev() <= stop:
53 break
53 break
54 filerev -= 1
54 filerev -= 1
55
55
56 def grapher(nodes):
56 def grapher(nodes):
57 """grapher for asciigraph on a list of nodes and their parents
57 """grapher for asciigraph on a list of nodes and their parents
58
58
59 nodes must generate tuples (node, parents, char, lines) where
59 nodes must generate tuples (node, parents, char, lines) where
60 - parents must generate the parents of node, in sorted order,
60 - parents must generate the parents of node, in sorted order,
61 and max length 2,
61 and max length 2,
62 - char is the char to print as the node symbol, and
62 - char is the char to print as the node symbol, and
63 - lines are the lines to display next to the node.
63 - lines are the lines to display next to the node.
64 """
64 """
65 seen = []
65 seen = []
66 for node, parents, char, lines in nodes:
66 for node, parents, char, lines in nodes:
67 if node not in seen:
67 if node not in seen:
68 seen.append(node)
68 seen.append(node)
69 nodeidx = seen.index(node)
69 nodeidx = seen.index(node)
70
70
71 knownparents = []
71 knownparents = []
72 newparents = []
72 newparents = []
73 for parent in parents:
73 for parent in parents:
74 if parent in seen:
74 if parent in seen:
75 knownparents.append(parent)
75 knownparents.append(parent)
76 else:
76 else:
77 newparents.append(parent)
77 newparents.append(parent)
78
78
79 ncols = len(seen)
79 ncols = len(seen)
80 nextseen = seen[:]
80 nextseen = seen[:]
81 nextseen[nodeidx:nodeidx + 1] = newparents
81 nextseen[nodeidx:nodeidx + 1] = newparents
82 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
82 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
83
83
84 if len(newparents) > 0:
84 if len(newparents) > 0:
85 edges.append((nodeidx, nodeidx))
85 edges.append((nodeidx, nodeidx))
86 if len(newparents) > 1:
86 if len(newparents) > 1:
87 edges.append((nodeidx, nodeidx + 1))
87 edges.append((nodeidx, nodeidx + 1))
88 nmorecols = len(nextseen) - ncols
88 nmorecols = len(nextseen) - ncols
89 seen = nextseen
89 seen = nextseen
90 yield (char, lines, nodeidx, edges, ncols, nmorecols)
90 yield (char, lines, nodeidx, edges, ncols, nmorecols)
91
91
92 def fix_long_right_edges(edges):
92 def fix_long_right_edges(edges):
93 for (i, (start, end)) in enumerate(edges):
93 for (i, (start, end)) in enumerate(edges):
94 if end > start:
94 if end > start:
95 edges[i] = (start, end + 1)
95 edges[i] = (start, end + 1)
96
96
97 def get_nodeline_edges_tail(
97 def get_nodeline_edges_tail(
98 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
98 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
99 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
99 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
100 # Still going in the same non-vertical direction.
100 # Still going in the same non-vertical direction.
101 if n_columns_diff == -1:
101 if n_columns_diff == -1:
102 start = max(node_index + 1, p_node_index)
102 start = max(node_index + 1, p_node_index)
103 tail = ["|", " "] * (start - node_index - 1)
103 tail = ["|", " "] * (start - node_index - 1)
104 tail.extend(["/", " "] * (n_columns - start))
104 tail.extend(["/", " "] * (n_columns - start))
105 return tail
105 return tail
106 else:
106 else:
107 return ["\\", " "] * (n_columns - node_index - 1)
107 return ["\\", " "] * (n_columns - node_index - 1)
108 else:
108 else:
109 return ["|", " "] * (n_columns - node_index - 1)
109 return ["|", " "] * (n_columns - node_index - 1)
110
110
111 def draw_edges(edges, nodeline, interline):
111 def draw_edges(edges, nodeline, interline):
112 for (start, end) in edges:
112 for (start, end) in edges:
113 if start == end + 1:
113 if start == end + 1:
114 interline[2 * end + 1] = "/"
114 interline[2 * end + 1] = "/"
115 elif start == end - 1:
115 elif start == end - 1:
116 interline[2 * start + 1] = "\\"
116 interline[2 * start + 1] = "\\"
117 elif start == end:
117 elif start == end:
118 interline[2 * start] = "|"
118 interline[2 * start] = "|"
119 else:
119 else:
120 nodeline[2 * end] = "+"
120 nodeline[2 * end] = "+"
121 if start > end:
121 if start > end:
122 (start, end) = (end,start)
122 (start, end) = (end,start)
123 for i in range(2 * start + 1, 2 * end):
123 for i in range(2 * start + 1, 2 * end):
124 if nodeline[i] != "+":
124 if nodeline[i] != "+":
125 nodeline[i] = "-"
125 nodeline[i] = "-"
126
126
127 def get_padding_line(ni, n_columns, edges):
127 def get_padding_line(ni, n_columns, edges):
128 line = []
128 line = []
129 line.extend(["|", " "] * ni)
129 line.extend(["|", " "] * ni)
130 if (ni, ni - 1) in edges or (ni, ni) in edges:
130 if (ni, ni - 1) in edges or (ni, ni) in edges:
131 # (ni, ni - 1) (ni, ni)
131 # (ni, ni - 1) (ni, ni)
132 # | | | | | | | |
132 # | | | | | | | |
133 # +---o | | o---+
133 # +---o | | o---+
134 # | | c | | c | |
134 # | | c | | c | |
135 # | |/ / | |/ /
135 # | |/ / | |/ /
136 # | | | | | |
136 # | | | | | |
137 c = "|"
137 c = "|"
138 else:
138 else:
139 c = " "
139 c = " "
140 line.extend([c, " "])
140 line.extend([c, " "])
141 line.extend(["|", " "] * (n_columns - ni - 1))
141 line.extend(["|", " "] * (n_columns - ni - 1))
142 return line
142 return line
143
143
144 def ascii(ui, grapher):
144 def ascii(ui, grapher):
145 """prints an ASCII graph of the DAG returned by the grapher
145 """prints an ASCII graph of the DAG returned by the grapher
146
146
147 grapher is a generator that emits tuples with the following elements:
147 grapher is a generator that emits tuples with the following elements:
148
148
149 - Character to use as node's symbol.
149 - Character to use as node's symbol.
150 - List of lines to display as the node's text.
150 - List of lines to display as the node's text.
151 - Column of the current node in the set of ongoing edges.
151 - Column of the current node in the set of ongoing edges.
152 - Edges; a list of (col, next_col) indicating the edges between
152 - Edges; a list of (col, next_col) indicating the edges between
153 the current node and its parents.
153 the current node and its parents.
154 - Number of columns (ongoing edges) in the current revision.
154 - Number of columns (ongoing edges) in the current revision.
155 - The difference between the number of columns (ongoing edges)
155 - The difference between the number of columns (ongoing edges)
156 in the next revision and the number of columns (ongoing edges)
156 in the next revision and the number of columns (ongoing edges)
157 in the current revision. That is: -1 means one column removed;
157 in the current revision. That is: -1 means one column removed;
158 0 means no columns added or removed; 1 means one column added.
158 0 means no columns added or removed; 1 means one column added.
159 """
159 """
160 prev_n_columns_diff = 0
160 prev_n_columns_diff = 0
161 prev_node_index = 0
161 prev_node_index = 0
162 for (node_ch, node_lines, node_index, edges, n_columns, n_columns_diff) in grapher:
162 for (node_ch, node_lines, node_index, edges, n_columns, n_columns_diff) in grapher:
163
163
164 assert -2 < n_columns_diff < 2
164 assert -2 < n_columns_diff < 2
165 if n_columns_diff == -1:
165 if n_columns_diff == -1:
166 # Transform
166 # Transform
167 #
167 #
168 # | | | | | |
168 # | | | | | |
169 # o | | into o---+
169 # o | | into o---+
170 # |X / |/ /
170 # |X / |/ /
171 # | | | |
171 # | | | |
172 fix_long_right_edges(edges)
172 fix_long_right_edges(edges)
173
173
174 # add_padding_line says whether to rewrite
174 # add_padding_line says whether to rewrite
175 #
175 #
176 # | | | | | | | |
176 # | | | | | | | |
177 # | o---+ into | o---+
177 # | o---+ into | o---+
178 # | / / | | | # <--- padding line
178 # | / / | | | # <--- padding line
179 # o | | | / /
179 # o | | | / /
180 # o | |
180 # o | |
181 add_padding_line = (len(node_lines) > 2 and
181 add_padding_line = (len(node_lines) > 2 and
182 n_columns_diff == -1 and
182 n_columns_diff == -1 and
183 [x for (x, y) in edges if x + 1 < y])
183 [x for (x, y) in edges if x + 1 < y])
184
184
185 # fix_nodeline_tail says whether to rewrite
185 # fix_nodeline_tail says whether to rewrite
186 #
186 #
187 # | | o | | | | o | |
187 # | | o | | | | o | |
188 # | | |/ / | | |/ /
188 # | | |/ / | | |/ /
189 # | o | | into | o / / # <--- fixed nodeline tail
189 # | o | | into | o / / # <--- fixed nodeline tail
190 # | |/ / | |/ /
190 # | |/ / | |/ /
191 # o | | o | |
191 # o | | o | |
192 fix_nodeline_tail = len(node_lines) <= 2 and not add_padding_line
192 fix_nodeline_tail = len(node_lines) <= 2 and not add_padding_line
193
193
194 # nodeline is the line containing the node character (typically o)
194 # nodeline is the line containing the node character (typically o)
195 nodeline = ["|", " "] * node_index
195 nodeline = ["|", " "] * node_index
196 nodeline.extend([node_ch, " "])
196 nodeline.extend([node_ch, " "])
197
197
198 nodeline.extend(
198 nodeline.extend(
199 get_nodeline_edges_tail(
199 get_nodeline_edges_tail(
200 node_index, prev_node_index, n_columns, n_columns_diff,
200 node_index, prev_node_index, n_columns, n_columns_diff,
201 prev_n_columns_diff, fix_nodeline_tail))
201 prev_n_columns_diff, fix_nodeline_tail))
202
202
203 # shift_interline is the line containing the non-vertical
203 # shift_interline is the line containing the non-vertical
204 # edges between this entry and the next
204 # edges between this entry and the next
205 shift_interline = ["|", " "] * node_index
205 shift_interline = ["|", " "] * node_index
206 if n_columns_diff == -1:
206 if n_columns_diff == -1:
207 n_spaces = 1
207 n_spaces = 1
208 edge_ch = "/"
208 edge_ch = "/"
209 elif n_columns_diff == 0:
209 elif n_columns_diff == 0:
210 n_spaces = 2
210 n_spaces = 2
211 edge_ch = "|"
211 edge_ch = "|"
212 else:
212 else:
213 n_spaces = 3
213 n_spaces = 3
214 edge_ch = "\\"
214 edge_ch = "\\"
215 shift_interline.extend(n_spaces * [" "])
215 shift_interline.extend(n_spaces * [" "])
216 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
216 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
217
217
218 # draw edges from the current node to its parents
218 # draw edges from the current node to its parents
219 draw_edges(edges, nodeline, shift_interline)
219 draw_edges(edges, nodeline, shift_interline)
220
220
221 # lines is the list of all graph lines to print
221 # lines is the list of all graph lines to print
222 lines = [nodeline]
222 lines = [nodeline]
223 if add_padding_line:
223 if add_padding_line:
224 lines.append(get_padding_line(node_index, n_columns, edges))
224 lines.append(get_padding_line(node_index, n_columns, edges))
225 lines.append(shift_interline)
225 lines.append(shift_interline)
226
226
227 # make sure that there are as many graph lines as there are
227 # make sure that there are as many graph lines as there are
228 # log strings
228 # log strings
229 while len(node_lines) < len(lines):
229 while len(node_lines) < len(lines):
230 node_lines.append("")
230 node_lines.append("")
231 if len(lines) < len(node_lines):
231 if len(lines) < len(node_lines):
232 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
232 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
233 while len(lines) < len(node_lines):
233 while len(lines) < len(node_lines):
234 lines.append(extra_interline)
234 lines.append(extra_interline)
235
235
236 # print lines
236 # print lines
237 indentation_level = max(n_columns, n_columns + n_columns_diff)
237 indentation_level = max(n_columns, n_columns + n_columns_diff)
238 for (line, logstr) in zip(lines, node_lines):
238 for (line, logstr) in zip(lines, node_lines):
239 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
239 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
240 ui.write(ln.rstrip() + '\n')
240 ui.write(ln.rstrip() + '\n')
241
241
242 # ... and start over
242 # ... and start over
243 prev_node_index = node_index
243 prev_node_index = node_index
244 prev_n_columns_diff = n_columns_diff
244 prev_n_columns_diff = n_columns_diff
245
245
246 def get_revs(repo, rev_opt):
246 def get_revs(repo, rev_opt):
247 if rev_opt:
247 if rev_opt:
248 revs = revrange(repo, rev_opt)
248 revs = revrange(repo, rev_opt)
249 return (max(revs), min(revs))
249 return (max(revs), min(revs))
250 else:
250 else:
251 return (len(repo) - 1, 0)
251 return (len(repo) - 1, 0)
252
252
253 def check_unsupported_flags(opts):
253 def check_unsupported_flags(opts):
254 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
254 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
255 "only_merges", "user", "only_branch", "prune", "newest_first",
255 "only_merges", "user", "only_branch", "prune", "newest_first",
256 "no_merges", "include", "exclude"]:
256 "no_merges", "include", "exclude"]:
257 if op in opts and opts[op]:
257 if op in opts and opts[op]:
258 raise util.Abort(_("--graph option is incompatible with --%s") % op)
258 raise util.Abort(_("--graph option is incompatible with --%s") % op)
259
259
260 def graphlog(ui, repo, path=None, **opts):
260 def graphlog(ui, repo, path=None, **opts):
261 """show revision history alongside an ASCII revision graph
261 """show revision history alongside an ASCII revision graph
262
262
263 Print a revision history alongside a revision graph drawn with
263 Print a revision history alongside a revision graph drawn with
264 ASCII characters.
264 ASCII characters.
265
265
266 Nodes printed as an @ character are parents of the working
266 Nodes printed as an @ character are parents of the working
267 directory.
267 directory.
268 """
268 """
269
269
270 check_unsupported_flags(opts)
270 check_unsupported_flags(opts)
271 limit = cmdutil.loglimit(opts)
271 limit = cmdutil.loglimit(opts)
272 start, stop = get_revs(repo, opts["rev"])
272 start, stop = get_revs(repo, opts["rev"])
273 stop = max(stop, start - limit + 1)
273 stop = max(stop, start - limit + 1)
274 if start == nullrev:
274 if start == nullrev:
275 return
275 return
276
276
277 if path:
277 if path:
278 path = util.canonpath(repo.root, os.getcwd(), path)
278 path = util.canonpath(repo.root, os.getcwd(), path)
279 if path: # could be reset in canonpath
279 if path: # could be reset in canonpath
280 revdag = filerevs(repo, path, start, stop)
280 revdag = filerevs(repo, path, start, stop)
281 else:
281 else:
282 revdag = revisions(repo, start, stop)
282 revdag = revisions(repo, start, stop)
283
283
284 graphdag = graphabledag(ui, repo, revdag, opts)
284 graphdag = graphabledag(ui, repo, revdag, opts)
285 ascii(ui, grapher(graphdag))
285 ascii(ui, grapher(graphdag))
286
286
287 def graphrevs(repo, nodes, opts):
287 def graphrevs(repo, nodes, opts):
288 nodes.reverse()
289 include = set(nodes)
288 include = set(nodes)
290 limit = cmdutil.loglimit(opts)
289 limit = cmdutil.loglimit(opts)
291 count = 0
290 count = 0
292 for node in nodes:
291 for node in reversed(nodes):
293 if count >= limit:
292 if count >= limit:
294 break
293 break
295 ctx = repo[node]
294 ctx = repo[node]
296 parents = [p.rev() for p in ctx.parents() if p.node() in include]
295 parents = [p.rev() for p in ctx.parents() if p.node() in include]
297 parents.sort()
296 parents.sort()
298 yield (ctx, parents)
297 yield (ctx, parents)
299 count += 1
298 count += 1
300
299
301 def graphabledag(ui, repo, revdag, opts):
300 def graphabledag(ui, repo, revdag, opts):
302 showparents = [ctx.node() for ctx in repo[None].parents()]
301 showparents = [ctx.node() for ctx in repo[None].parents()]
303 displayer = show_changeset(ui, repo, opts, buffered=True)
302 displayer = show_changeset(ui, repo, opts, buffered=True)
304 for (ctx, parents) in revdag:
303 for (ctx, parents) in revdag:
305 displayer.show(ctx)
304 displayer.show(ctx)
306 lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1]
305 lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1]
307 char = ctx.node() in showparents and '@' or 'o'
306 char = ctx.node() in showparents and '@' or 'o'
308 yield (ctx.rev(), parents, char, lines)
307 yield (ctx.rev(), parents, char, lines)
309
308
310 def goutgoing(ui, repo, dest=None, **opts):
309 def goutgoing(ui, repo, dest=None, **opts):
311 """show the outgoing changesets alongside an ASCII revision graph
310 """show the outgoing changesets alongside an ASCII revision graph
312
311
313 Print the outgoing changesets alongside a revision graph drawn with
312 Print the outgoing changesets alongside a revision graph drawn with
314 ASCII characters.
313 ASCII characters.
315
314
316 Nodes printed as an @ character are parents of the working
315 Nodes printed as an @ character are parents of the working
317 directory.
316 directory.
318 """
317 """
319
318
320 check_unsupported_flags(opts)
319 check_unsupported_flags(opts)
321 dest, revs, checkout = hg.parseurl(
320 dest, revs, checkout = hg.parseurl(
322 ui.expandpath(dest or 'default-push', dest or 'default'),
321 ui.expandpath(dest or 'default-push', dest or 'default'),
323 opts.get('rev'))
322 opts.get('rev'))
324 if revs:
323 if revs:
325 revs = [repo.lookup(rev) for rev in revs]
324 revs = [repo.lookup(rev) for rev in revs]
326 other = hg.repository(cmdutil.remoteui(ui, opts), dest)
325 other = hg.repository(cmdutil.remoteui(ui, opts), dest)
327 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
326 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
328 o = repo.findoutgoing(other, force=opts.get('force'))
327 o = repo.findoutgoing(other, force=opts.get('force'))
329 if not o:
328 if not o:
330 ui.status(_("no changes found\n"))
329 ui.status(_("no changes found\n"))
331 return
330 return
332
331
333 o = repo.changelog.nodesbetween(o, revs)[0]
332 o = repo.changelog.nodesbetween(o, revs)[0]
334 revdag = graphrevs(repo, o, opts)
333 revdag = graphrevs(repo, o, opts)
335 graphdag = graphabledag(ui, repo, revdag, opts)
334 graphdag = graphabledag(ui, repo, revdag, opts)
336 ascii(ui, grapher(graphdag))
335 ascii(ui, grapher(graphdag))
337
336
338 def gincoming(ui, repo, source="default", **opts):
337 def gincoming(ui, repo, source="default", **opts):
339 """show the incoming changesets alongside an ASCII revision graph
338 """show the incoming changesets alongside an ASCII revision graph
340
339
341 Print the incoming changesets alongside a revision graph drawn with
340 Print the incoming changesets alongside a revision graph drawn with
342 ASCII characters.
341 ASCII characters.
343
342
344 Nodes printed as an @ character are parents of the working
343 Nodes printed as an @ character are parents of the working
345 directory.
344 directory.
346 """
345 """
347
346
348 check_unsupported_flags(opts)
347 check_unsupported_flags(opts)
349 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
348 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
350 other = hg.repository(cmdutil.remoteui(repo, opts), source)
349 other = hg.repository(cmdutil.remoteui(repo, opts), source)
351 ui.status(_('comparing with %s\n') % url.hidepassword(source))
350 ui.status(_('comparing with %s\n') % url.hidepassword(source))
352 if revs:
351 if revs:
353 revs = [other.lookup(rev) for rev in revs]
352 revs = [other.lookup(rev) for rev in revs]
354 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
353 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
355 if not incoming:
354 if not incoming:
356 try:
355 try:
357 os.unlink(opts["bundle"])
356 os.unlink(opts["bundle"])
358 except:
357 except:
359 pass
358 pass
360 ui.status(_("no changes found\n"))
359 ui.status(_("no changes found\n"))
361 return
360 return
362
361
363 cleanup = None
362 cleanup = None
364 try:
363 try:
365
364
366 fname = opts["bundle"]
365 fname = opts["bundle"]
367 if fname or not other.local():
366 if fname or not other.local():
368 # create a bundle (uncompressed if other repo is not local)
367 # create a bundle (uncompressed if other repo is not local)
369 if revs is None:
368 if revs is None:
370 cg = other.changegroup(incoming, "incoming")
369 cg = other.changegroup(incoming, "incoming")
371 else:
370 else:
372 cg = other.changegroupsubset(incoming, revs, 'incoming')
371 cg = other.changegroupsubset(incoming, revs, 'incoming')
373 bundletype = other.local() and "HG10BZ" or "HG10UN"
372 bundletype = other.local() and "HG10BZ" or "HG10UN"
374 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
373 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
375 # keep written bundle?
374 # keep written bundle?
376 if opts["bundle"]:
375 if opts["bundle"]:
377 cleanup = None
376 cleanup = None
378 if not other.local():
377 if not other.local():
379 # use the created uncompressed bundlerepo
378 # use the created uncompressed bundlerepo
380 other = bundlerepo.bundlerepository(ui, repo.root, fname)
379 other = bundlerepo.bundlerepository(ui, repo.root, fname)
381
380
382 chlist = other.changelog.nodesbetween(incoming, revs)[0]
381 chlist = other.changelog.nodesbetween(incoming, revs)[0]
383 revdag = graphrevs(other, chlist, opts)
382 revdag = graphrevs(other, chlist, opts)
384 graphdag = graphabledag(ui, repo, revdag, opts)
383 graphdag = graphabledag(ui, repo, revdag, opts)
385 ascii(ui, grapher(graphdag))
384 ascii(ui, grapher(graphdag))
386
385
387 finally:
386 finally:
388 if hasattr(other, 'close'):
387 if hasattr(other, 'close'):
389 other.close()
388 other.close()
390 if cleanup:
389 if cleanup:
391 os.unlink(cleanup)
390 os.unlink(cleanup)
392
391
393 def uisetup(ui):
392 def uisetup(ui):
394 '''Initialize the extension.'''
393 '''Initialize the extension.'''
395 _wrapcmd(ui, 'log', commands.table, graphlog)
394 _wrapcmd(ui, 'log', commands.table, graphlog)
396 _wrapcmd(ui, 'incoming', commands.table, gincoming)
395 _wrapcmd(ui, 'incoming', commands.table, gincoming)
397 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
396 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
398
397
399 def _wrapcmd(ui, cmd, table, wrapfn):
398 def _wrapcmd(ui, cmd, table, wrapfn):
400 '''wrap the command'''
399 '''wrap the command'''
401 def graph(orig, *args, **kwargs):
400 def graph(orig, *args, **kwargs):
402 if kwargs['graph']:
401 if kwargs['graph']:
403 return wrapfn(*args, **kwargs)
402 return wrapfn(*args, **kwargs)
404 return orig(*args, **kwargs)
403 return orig(*args, **kwargs)
405 entry = extensions.wrapcommand(table, cmd, graph)
404 entry = extensions.wrapcommand(table, cmd, graph)
406 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
405 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
407
406
408 cmdtable = {
407 cmdtable = {
409 "glog":
408 "glog":
410 (graphlog,
409 (graphlog,
411 [('l', 'limit', '', _('limit number of changes displayed')),
410 [('l', 'limit', '', _('limit number of changes displayed')),
412 ('p', 'patch', False, _('show patch')),
411 ('p', 'patch', False, _('show patch')),
413 ('r', 'rev', [], _('show the specified revision or range')),
412 ('r', 'rev', [], _('show the specified revision or range')),
414 ] + templateopts,
413 ] + templateopts,
415 _('hg glog [OPTION]... [FILE]')),
414 _('hg glog [OPTION]... [FILE]')),
416 }
415 }
@@ -1,472 +1,469 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''move sets of revisions to a different ancestor
8 '''move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 http://www.selenic.com/mercurial/wiki/index.cgi/RebaseProject
14 http://www.selenic.com/mercurial/wiki/index.cgi/RebaseProject
15 '''
15 '''
16
16
17 from mercurial import util, repair, merge, cmdutil, commands, error
17 from mercurial import util, repair, merge, cmdutil, commands, error
18 from mercurial import extensions, ancestor, copies, patch
18 from mercurial import extensions, ancestor, copies, patch
19 from mercurial.commands import templateopts
19 from mercurial.commands import templateopts
20 from mercurial.node import nullrev
20 from mercurial.node import nullrev
21 from mercurial.lock import release
21 from mercurial.lock import release
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 import os, errno
23 import os, errno
24
24
25 def rebasemerge(repo, rev, first=False):
25 def rebasemerge(repo, rev, first=False):
26 'return the correct ancestor'
26 'return the correct ancestor'
27 oldancestor = ancestor.ancestor
27 oldancestor = ancestor.ancestor
28
28
29 def newancestor(a, b, pfunc):
29 def newancestor(a, b, pfunc):
30 ancestor.ancestor = oldancestor
30 ancestor.ancestor = oldancestor
31 if b == rev:
31 if b == rev:
32 return repo[rev].parents()[0].rev()
32 return repo[rev].parents()[0].rev()
33 return ancestor.ancestor(a, b, pfunc)
33 return ancestor.ancestor(a, b, pfunc)
34
34
35 if not first:
35 if not first:
36 ancestor.ancestor = newancestor
36 ancestor.ancestor = newancestor
37 else:
37 else:
38 repo.ui.debug(_("first revision, do not change ancestor\n"))
38 repo.ui.debug(_("first revision, do not change ancestor\n"))
39 stats = merge.update(repo, rev, True, True, False)
39 stats = merge.update(repo, rev, True, True, False)
40 return stats
40 return stats
41
41
42 def rebase(ui, repo, **opts):
42 def rebase(ui, repo, **opts):
43 """move changeset (and descendants) to a different branch
43 """move changeset (and descendants) to a different branch
44
44
45 Rebase uses repeated merging to graft changesets from one part of
45 Rebase uses repeated merging to graft changesets from one part of
46 history onto another. This can be useful for linearizing local
46 history onto another. This can be useful for linearizing local
47 changes relative to a master development tree.
47 changes relative to a master development tree.
48
48
49 If a rebase is interrupted to manually resolve a merge, it can be
49 If a rebase is interrupted to manually resolve a merge, it can be
50 continued with --continue/-c or aborted with --abort/-a.
50 continued with --continue/-c or aborted with --abort/-a.
51 """
51 """
52 originalwd = target = None
52 originalwd = target = None
53 external = nullrev
53 external = nullrev
54 state = skipped = {}
54 state = skipped = {}
55
55
56 lock = wlock = None
56 lock = wlock = None
57 try:
57 try:
58 lock = repo.lock()
58 lock = repo.lock()
59 wlock = repo.wlock()
59 wlock = repo.wlock()
60
60
61 # Validate input and define rebasing points
61 # Validate input and define rebasing points
62 destf = opts.get('dest', None)
62 destf = opts.get('dest', None)
63 srcf = opts.get('source', None)
63 srcf = opts.get('source', None)
64 basef = opts.get('base', None)
64 basef = opts.get('base', None)
65 contf = opts.get('continue')
65 contf = opts.get('continue')
66 abortf = opts.get('abort')
66 abortf = opts.get('abort')
67 collapsef = opts.get('collapse', False)
67 collapsef = opts.get('collapse', False)
68 extrafn = opts.get('extrafn')
68 extrafn = opts.get('extrafn')
69 keepf = opts.get('keep', False)
69 keepf = opts.get('keep', False)
70 keepbranchesf = opts.get('keepbranches', False)
70 keepbranchesf = opts.get('keepbranches', False)
71
71
72 if contf or abortf:
72 if contf or abortf:
73 if contf and abortf:
73 if contf and abortf:
74 raise error.ParseError('rebase',
74 raise error.ParseError('rebase',
75 _('cannot use both abort and continue'))
75 _('cannot use both abort and continue'))
76 if collapsef:
76 if collapsef:
77 raise error.ParseError(
77 raise error.ParseError(
78 'rebase', _('cannot use collapse with continue or abort'))
78 'rebase', _('cannot use collapse with continue or abort'))
79
79
80 if srcf or basef or destf:
80 if srcf or basef or destf:
81 raise error.ParseError('rebase',
81 raise error.ParseError('rebase',
82 _('abort and continue do not allow specifying revisions'))
82 _('abort and continue do not allow specifying revisions'))
83
83
84 (originalwd, target, state, collapsef, keepf,
84 (originalwd, target, state, collapsef, keepf,
85 keepbranchesf, external) = restorestatus(repo)
85 keepbranchesf, external) = restorestatus(repo)
86 if abortf:
86 if abortf:
87 abort(repo, originalwd, target, state)
87 abort(repo, originalwd, target, state)
88 return
88 return
89 else:
89 else:
90 if srcf and basef:
90 if srcf and basef:
91 raise error.ParseError('rebase', _('cannot specify both a '
91 raise error.ParseError('rebase', _('cannot specify both a '
92 'revision and a base'))
92 'revision and a base'))
93 cmdutil.bail_if_changed(repo)
93 cmdutil.bail_if_changed(repo)
94 result = buildstate(repo, destf, srcf, basef, collapsef)
94 result = buildstate(repo, destf, srcf, basef, collapsef)
95 if result:
95 if result:
96 originalwd, target, state, external = result
96 originalwd, target, state, external = result
97 else: # Empty state built, nothing to rebase
97 else: # Empty state built, nothing to rebase
98 repo.ui.status(_('nothing to rebase\n'))
98 repo.ui.status(_('nothing to rebase\n'))
99 return
99 return
100
100
101 if keepbranchesf:
101 if keepbranchesf:
102 if extrafn:
102 if extrafn:
103 raise error.ParseError(
103 raise error.ParseError(
104 'rebase', _('cannot use both keepbranches and extrafn'))
104 'rebase', _('cannot use both keepbranches and extrafn'))
105 def extrafn(ctx, extra):
105 def extrafn(ctx, extra):
106 extra['branch'] = ctx.branch()
106 extra['branch'] = ctx.branch()
107
107
108 # Rebase
108 # Rebase
109 targetancestors = list(repo.changelog.ancestors(target))
109 targetancestors = list(repo.changelog.ancestors(target))
110 targetancestors.append(target)
110 targetancestors.append(target)
111
111
112 for rev in sorted(state):
112 for rev in sorted(state):
113 if state[rev] == -1:
113 if state[rev] == -1:
114 storestatus(repo, originalwd, target, state, collapsef, keepf,
114 storestatus(repo, originalwd, target, state, collapsef, keepf,
115 keepbranchesf, external)
115 keepbranchesf, external)
116 rebasenode(repo, rev, target, state, skipped, targetancestors,
116 rebasenode(repo, rev, target, state, skipped, targetancestors,
117 collapsef, extrafn)
117 collapsef, extrafn)
118 ui.note(_('rebase merging completed\n'))
118 ui.note(_('rebase merging completed\n'))
119
119
120 if collapsef:
120 if collapsef:
121 p1, p2 = defineparents(repo, min(state), target,
121 p1, p2 = defineparents(repo, min(state), target,
122 state, targetancestors)
122 state, targetancestors)
123 concludenode(repo, rev, p1, external, state, collapsef,
123 concludenode(repo, rev, p1, external, state, collapsef,
124 last=True, skipped=skipped, extrafn=extrafn)
124 last=True, skipped=skipped, extrafn=extrafn)
125
125
126 if 'qtip' in repo.tags():
126 if 'qtip' in repo.tags():
127 updatemq(repo, state, skipped, **opts)
127 updatemq(repo, state, skipped, **opts)
128
128
129 if not keepf:
129 if not keepf:
130 # Remove no more useful revisions
130 # Remove no more useful revisions
131 if set(repo.changelog.descendants(min(state))) - set(state):
131 if set(repo.changelog.descendants(min(state))) - set(state):
132 ui.warn(_("warning: new changesets detected on source branch, "
132 ui.warn(_("warning: new changesets detected on source branch, "
133 "not stripping\n"))
133 "not stripping\n"))
134 else:
134 else:
135 repair.strip(repo.ui, repo, repo[min(state)].node(), "strip")
135 repair.strip(repo.ui, repo, repo[min(state)].node(), "strip")
136
136
137 clearstatus(repo)
137 clearstatus(repo)
138 ui.status(_("rebase completed\n"))
138 ui.status(_("rebase completed\n"))
139 if os.path.exists(repo.sjoin('undo')):
139 if os.path.exists(repo.sjoin('undo')):
140 util.unlink(repo.sjoin('undo'))
140 util.unlink(repo.sjoin('undo'))
141 if skipped:
141 if skipped:
142 ui.note(_("%d revisions have been skipped\n") % len(skipped))
142 ui.note(_("%d revisions have been skipped\n") % len(skipped))
143 finally:
143 finally:
144 release(lock, wlock)
144 release(lock, wlock)
145
145
146 def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped={},
146 def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped={},
147 extrafn=None):
147 extrafn=None):
148 """Skip commit if collapsing has been required and rev is not the last
148 """Skip commit if collapsing has been required and rev is not the last
149 revision, commit otherwise
149 revision, commit otherwise
150 """
150 """
151 repo.ui.debug(_(" set parents\n"))
151 repo.ui.debug(_(" set parents\n"))
152 if collapse and not last:
152 if collapse and not last:
153 repo.dirstate.setparents(repo[p1].node())
153 repo.dirstate.setparents(repo[p1].node())
154 return None
154 return None
155
155
156 repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
156 repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
157
157
158 # Commit, record the old nodeid
158 # Commit, record the old nodeid
159 m, a, r = repo.status()[:3]
159 m, a, r = repo.status()[:3]
160 newrev = nullrev
160 newrev = nullrev
161 try:
161 try:
162 if last:
162 if last:
163 commitmsg = 'Collapsed revision'
163 commitmsg = 'Collapsed revision'
164 for rebased in state:
164 for rebased in state:
165 if rebased not in skipped:
165 if rebased not in skipped:
166 commitmsg += '\n* %s' % repo[rebased].description()
166 commitmsg += '\n* %s' % repo[rebased].description()
167 commitmsg = repo.ui.edit(commitmsg, repo.ui.username())
167 commitmsg = repo.ui.edit(commitmsg, repo.ui.username())
168 else:
168 else:
169 commitmsg = repo[rev].description()
169 commitmsg = repo[rev].description()
170 # Commit might fail if unresolved files exist
170 # Commit might fail if unresolved files exist
171 extra = {'rebase_source': repo[rev].hex()}
171 extra = {'rebase_source': repo[rev].hex()}
172 if extrafn:
172 if extrafn:
173 extrafn(repo[rev], extra)
173 extrafn(repo[rev], extra)
174 newrev = repo.commit(m+a+r,
174 newrev = repo.commit(m+a+r,
175 text=commitmsg,
175 text=commitmsg,
176 user=repo[rev].user(),
176 user=repo[rev].user(),
177 date=repo[rev].date(),
177 date=repo[rev].date(),
178 extra=extra)
178 extra=extra)
179 return newrev
179 return newrev
180 except util.Abort:
180 except util.Abort:
181 # Invalidate the previous setparents
181 # Invalidate the previous setparents
182 repo.dirstate.invalidate()
182 repo.dirstate.invalidate()
183 raise
183 raise
184
184
185 def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
185 def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
186 extrafn):
186 extrafn):
187 'Rebase a single revision'
187 'Rebase a single revision'
188 repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev]))
188 repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev]))
189
189
190 p1, p2 = defineparents(repo, rev, target, state, targetancestors)
190 p1, p2 = defineparents(repo, rev, target, state, targetancestors)
191
191
192 repo.ui.debug(_(" future parents are %d and %d\n") % (repo[p1].rev(),
192 repo.ui.debug(_(" future parents are %d and %d\n") % (repo[p1].rev(),
193 repo[p2].rev()))
193 repo[p2].rev()))
194
194
195 # Merge phase
195 # Merge phase
196 if len(repo.parents()) != 2:
196 if len(repo.parents()) != 2:
197 # Update to target and merge it with local
197 # Update to target and merge it with local
198 if repo['.'].rev() != repo[p1].rev():
198 if repo['.'].rev() != repo[p1].rev():
199 repo.ui.debug(_(" update to %d:%s\n") % (repo[p1].rev(), repo[p1]))
199 repo.ui.debug(_(" update to %d:%s\n") % (repo[p1].rev(), repo[p1]))
200 merge.update(repo, p1, False, True, False)
200 merge.update(repo, p1, False, True, False)
201 else:
201 else:
202 repo.ui.debug(_(" already in target\n"))
202 repo.ui.debug(_(" already in target\n"))
203 repo.dirstate.write()
203 repo.dirstate.write()
204 repo.ui.debug(_(" merge against %d:%s\n") % (repo[rev].rev(), repo[rev]))
204 repo.ui.debug(_(" merge against %d:%s\n") % (repo[rev].rev(), repo[rev]))
205 first = repo[rev].rev() == repo[min(state)].rev()
205 first = repo[rev].rev() == repo[min(state)].rev()
206 stats = rebasemerge(repo, rev, first)
206 stats = rebasemerge(repo, rev, first)
207
207
208 if stats[3] > 0:
208 if stats[3] > 0:
209 raise util.Abort(_('fix unresolved conflicts with hg resolve then '
209 raise util.Abort(_('fix unresolved conflicts with hg resolve then '
210 'run hg rebase --continue'))
210 'run hg rebase --continue'))
211 else: # we have an interrupted rebase
211 else: # we have an interrupted rebase
212 repo.ui.debug(_('resuming interrupted rebase\n'))
212 repo.ui.debug(_('resuming interrupted rebase\n'))
213
213
214 # Keep track of renamed files in the revision that is going to be rebased
214 # Keep track of renamed files in the revision that is going to be rebased
215 # Here we simulate the copies and renames in the source changeset
215 # Here we simulate the copies and renames in the source changeset
216 cop, diver = copies.copies(repo, repo[rev], repo[target], repo[p2], True)
216 cop, diver = copies.copies(repo, repo[rev], repo[target], repo[p2], True)
217 m1 = repo[rev].manifest()
217 m1 = repo[rev].manifest()
218 m2 = repo[target].manifest()
218 m2 = repo[target].manifest()
219 for k, v in cop.iteritems():
219 for k, v in cop.iteritems():
220 if k in m1:
220 if k in m1:
221 if v in m1 or v in m2:
221 if v in m1 or v in m2:
222 repo.dirstate.copy(v, k)
222 repo.dirstate.copy(v, k)
223 if v in m2 and v not in m1:
223 if v in m2 and v not in m1:
224 repo.dirstate.remove(v)
224 repo.dirstate.remove(v)
225
225
226 newrev = concludenode(repo, rev, p1, p2, state, collapse,
226 newrev = concludenode(repo, rev, p1, p2, state, collapse,
227 extrafn=extrafn)
227 extrafn=extrafn)
228
228
229 # Update the state
229 # Update the state
230 if newrev is not None:
230 if newrev is not None:
231 state[rev] = repo[newrev].rev()
231 state[rev] = repo[newrev].rev()
232 else:
232 else:
233 if not collapse:
233 if not collapse:
234 repo.ui.note(_('no changes, revision %d skipped\n') % rev)
234 repo.ui.note(_('no changes, revision %d skipped\n') % rev)
235 repo.ui.debug(_('next revision set to %s\n') % p1)
235 repo.ui.debug(_('next revision set to %s\n') % p1)
236 skipped[rev] = True
236 skipped[rev] = True
237 state[rev] = p1
237 state[rev] = p1
238
238
239 def defineparents(repo, rev, target, state, targetancestors):
239 def defineparents(repo, rev, target, state, targetancestors):
240 'Return the new parent relationship of the revision that will be rebased'
240 'Return the new parent relationship of the revision that will be rebased'
241 parents = repo[rev].parents()
241 parents = repo[rev].parents()
242 p1 = p2 = nullrev
242 p1 = p2 = nullrev
243
243
244 P1n = parents[0].rev()
244 P1n = parents[0].rev()
245 if P1n in targetancestors:
245 if P1n in targetancestors:
246 p1 = target
246 p1 = target
247 elif P1n in state:
247 elif P1n in state:
248 p1 = state[P1n]
248 p1 = state[P1n]
249 else: # P1n external
249 else: # P1n external
250 p1 = target
250 p1 = target
251 p2 = P1n
251 p2 = P1n
252
252
253 if len(parents) == 2 and parents[1].rev() not in targetancestors:
253 if len(parents) == 2 and parents[1].rev() not in targetancestors:
254 P2n = parents[1].rev()
254 P2n = parents[1].rev()
255 # interesting second parent
255 # interesting second parent
256 if P2n in state:
256 if P2n in state:
257 if p1 == target: # P1n in targetancestors or external
257 if p1 == target: # P1n in targetancestors or external
258 p1 = state[P2n]
258 p1 = state[P2n]
259 else:
259 else:
260 p2 = state[P2n]
260 p2 = state[P2n]
261 else: # P2n external
261 else: # P2n external
262 if p2 != nullrev: # P1n external too => rev is a merged revision
262 if p2 != nullrev: # P1n external too => rev is a merged revision
263 raise util.Abort(_('cannot use revision %d as base, result '
263 raise util.Abort(_('cannot use revision %d as base, result '
264 'would have 3 parents') % rev)
264 'would have 3 parents') % rev)
265 p2 = P2n
265 p2 = P2n
266 return p1, p2
266 return p1, p2
267
267
268 def isagitpatch(repo, patchname):
268 def isagitpatch(repo, patchname):
269 'Return true if the given patch is in git format'
269 'Return true if the given patch is in git format'
270 mqpatch = os.path.join(repo.mq.path, patchname)
270 mqpatch = os.path.join(repo.mq.path, patchname)
271 for line in patch.linereader(file(mqpatch, 'rb')):
271 for line in patch.linereader(file(mqpatch, 'rb')):
272 if line.startswith('diff --git'):
272 if line.startswith('diff --git'):
273 return True
273 return True
274 return False
274 return False
275
275
276 def updatemq(repo, state, skipped, **opts):
276 def updatemq(repo, state, skipped, **opts):
277 'Update rebased mq patches - finalize and then import them'
277 'Update rebased mq patches - finalize and then import them'
278 mqrebase = {}
278 mqrebase = {}
279 for p in repo.mq.applied:
279 for p in repo.mq.applied:
280 if repo[p.rev].rev() in state:
280 if repo[p.rev].rev() in state:
281 repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') %
281 repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') %
282 (repo[p.rev].rev(), p.name))
282 (repo[p.rev].rev(), p.name))
283 mqrebase[repo[p.rev].rev()] = (p.name, isagitpatch(repo, p.name))
283 mqrebase[repo[p.rev].rev()] = (p.name, isagitpatch(repo, p.name))
284
284
285 if mqrebase:
285 if mqrebase:
286 repo.mq.finish(repo, mqrebase.keys())
286 repo.mq.finish(repo, mqrebase.keys())
287
287
288 # We must start import from the newest revision
288 # We must start import from the newest revision
289 mq = mqrebase.keys()
289 for rev in sorted(mqrebase, reverse=True):
290 mq.sort()
291 mq.reverse()
292 for rev in mq:
293 if rev not in skipped:
290 if rev not in skipped:
294 repo.ui.debug(_('import mq patch %d (%s)\n')
291 repo.ui.debug(_('import mq patch %d (%s)\n')
295 % (state[rev], mqrebase[rev][0]))
292 % (state[rev], mqrebase[rev][0]))
296 repo.mq.qimport(repo, (), patchname=mqrebase[rev][0],
293 repo.mq.qimport(repo, (), patchname=mqrebase[rev][0],
297 git=mqrebase[rev][1],rev=[str(state[rev])])
294 git=mqrebase[rev][1],rev=[str(state[rev])])
298 repo.mq.save_dirty()
295 repo.mq.save_dirty()
299
296
300 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
297 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
301 external):
298 external):
302 'Store the current status to allow recovery'
299 'Store the current status to allow recovery'
303 f = repo.opener("rebasestate", "w")
300 f = repo.opener("rebasestate", "w")
304 f.write(repo[originalwd].hex() + '\n')
301 f.write(repo[originalwd].hex() + '\n')
305 f.write(repo[target].hex() + '\n')
302 f.write(repo[target].hex() + '\n')
306 f.write(repo[external].hex() + '\n')
303 f.write(repo[external].hex() + '\n')
307 f.write('%d\n' % int(collapse))
304 f.write('%d\n' % int(collapse))
308 f.write('%d\n' % int(keep))
305 f.write('%d\n' % int(keep))
309 f.write('%d\n' % int(keepbranches))
306 f.write('%d\n' % int(keepbranches))
310 for d, v in state.iteritems():
307 for d, v in state.iteritems():
311 oldrev = repo[d].hex()
308 oldrev = repo[d].hex()
312 newrev = repo[v].hex()
309 newrev = repo[v].hex()
313 f.write("%s:%s\n" % (oldrev, newrev))
310 f.write("%s:%s\n" % (oldrev, newrev))
314 f.close()
311 f.close()
315 repo.ui.debug(_('rebase status stored\n'))
312 repo.ui.debug(_('rebase status stored\n'))
316
313
317 def clearstatus(repo):
314 def clearstatus(repo):
318 'Remove the status files'
315 'Remove the status files'
319 if os.path.exists(repo.join("rebasestate")):
316 if os.path.exists(repo.join("rebasestate")):
320 util.unlink(repo.join("rebasestate"))
317 util.unlink(repo.join("rebasestate"))
321
318
322 def restorestatus(repo):
319 def restorestatus(repo):
323 'Restore a previously stored status'
320 'Restore a previously stored status'
324 try:
321 try:
325 target = None
322 target = None
326 collapse = False
323 collapse = False
327 external = nullrev
324 external = nullrev
328 state = {}
325 state = {}
329 f = repo.opener("rebasestate")
326 f = repo.opener("rebasestate")
330 for i, l in enumerate(f.read().splitlines()):
327 for i, l in enumerate(f.read().splitlines()):
331 if i == 0:
328 if i == 0:
332 originalwd = repo[l].rev()
329 originalwd = repo[l].rev()
333 elif i == 1:
330 elif i == 1:
334 target = repo[l].rev()
331 target = repo[l].rev()
335 elif i == 2:
332 elif i == 2:
336 external = repo[l].rev()
333 external = repo[l].rev()
337 elif i == 3:
334 elif i == 3:
338 collapse = bool(int(l))
335 collapse = bool(int(l))
339 elif i == 4:
336 elif i == 4:
340 keep = bool(int(l))
337 keep = bool(int(l))
341 elif i == 5:
338 elif i == 5:
342 keepbranches = bool(int(l))
339 keepbranches = bool(int(l))
343 else:
340 else:
344 oldrev, newrev = l.split(':')
341 oldrev, newrev = l.split(':')
345 state[repo[oldrev].rev()] = repo[newrev].rev()
342 state[repo[oldrev].rev()] = repo[newrev].rev()
346 repo.ui.debug(_('rebase status resumed\n'))
343 repo.ui.debug(_('rebase status resumed\n'))
347 return originalwd, target, state, collapse, keep, keepbranches, external
344 return originalwd, target, state, collapse, keep, keepbranches, external
348 except IOError, err:
345 except IOError, err:
349 if err.errno != errno.ENOENT:
346 if err.errno != errno.ENOENT:
350 raise
347 raise
351 raise util.Abort(_('no rebase in progress'))
348 raise util.Abort(_('no rebase in progress'))
352
349
353 def abort(repo, originalwd, target, state):
350 def abort(repo, originalwd, target, state):
354 'Restore the repository to its original state'
351 'Restore the repository to its original state'
355 if set(repo.changelog.descendants(target)) - set(state.values()):
352 if set(repo.changelog.descendants(target)) - set(state.values()):
356 repo.ui.warn(_("warning: new changesets detected on target branch, "
353 repo.ui.warn(_("warning: new changesets detected on target branch, "
357 "not stripping\n"))
354 "not stripping\n"))
358 else:
355 else:
359 # Strip from the first rebased revision
356 # Strip from the first rebased revision
360 merge.update(repo, repo[originalwd].rev(), False, True, False)
357 merge.update(repo, repo[originalwd].rev(), False, True, False)
361 rebased = filter(lambda x: x > -1, state.values())
358 rebased = filter(lambda x: x > -1, state.values())
362 if rebased:
359 if rebased:
363 strippoint = min(rebased)
360 strippoint = min(rebased)
364 repair.strip(repo.ui, repo, repo[strippoint].node(), "strip")
361 repair.strip(repo.ui, repo, repo[strippoint].node(), "strip")
365 clearstatus(repo)
362 clearstatus(repo)
366 repo.ui.status(_('rebase aborted\n'))
363 repo.ui.status(_('rebase aborted\n'))
367
364
368 def buildstate(repo, dest, src, base, collapse):
365 def buildstate(repo, dest, src, base, collapse):
369 'Define which revisions are going to be rebased and where'
366 'Define which revisions are going to be rebased and where'
370 targetancestors = set()
367 targetancestors = set()
371
368
372 if not dest:
369 if not dest:
373 # Destination defaults to the latest revision in the current branch
370 # Destination defaults to the latest revision in the current branch
374 branch = repo[None].branch()
371 branch = repo[None].branch()
375 dest = repo[branch].rev()
372 dest = repo[branch].rev()
376 else:
373 else:
377 if 'qtip' in repo.tags() and (repo[dest].hex() in
374 if 'qtip' in repo.tags() and (repo[dest].hex() in
378 [s.rev for s in repo.mq.applied]):
375 [s.rev for s in repo.mq.applied]):
379 raise util.Abort(_('cannot rebase onto an applied mq patch'))
376 raise util.Abort(_('cannot rebase onto an applied mq patch'))
380 dest = repo[dest].rev()
377 dest = repo[dest].rev()
381
378
382 if src:
379 if src:
383 commonbase = repo[src].ancestor(repo[dest])
380 commonbase = repo[src].ancestor(repo[dest])
384 if commonbase == repo[src]:
381 if commonbase == repo[src]:
385 raise util.Abort(_('cannot rebase an ancestor'))
382 raise util.Abort(_('cannot rebase an ancestor'))
386 if commonbase == repo[dest]:
383 if commonbase == repo[dest]:
387 raise util.Abort(_('cannot rebase a descendant'))
384 raise util.Abort(_('cannot rebase a descendant'))
388 source = repo[src].rev()
385 source = repo[src].rev()
389 else:
386 else:
390 if base:
387 if base:
391 cwd = repo[base].rev()
388 cwd = repo[base].rev()
392 else:
389 else:
393 cwd = repo['.'].rev()
390 cwd = repo['.'].rev()
394
391
395 if cwd == dest:
392 if cwd == dest:
396 repo.ui.debug(_('already working on current\n'))
393 repo.ui.debug(_('already working on current\n'))
397 return None
394 return None
398
395
399 targetancestors = set(repo.changelog.ancestors(dest))
396 targetancestors = set(repo.changelog.ancestors(dest))
400 if cwd in targetancestors:
397 if cwd in targetancestors:
401 repo.ui.debug(_('already working on the current branch\n'))
398 repo.ui.debug(_('already working on the current branch\n'))
402 return None
399 return None
403
400
404 cwdancestors = set(repo.changelog.ancestors(cwd))
401 cwdancestors = set(repo.changelog.ancestors(cwd))
405 cwdancestors.add(cwd)
402 cwdancestors.add(cwd)
406 rebasingbranch = cwdancestors - targetancestors
403 rebasingbranch = cwdancestors - targetancestors
407 source = min(rebasingbranch)
404 source = min(rebasingbranch)
408
405
409 repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source))
406 repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source))
410 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
407 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
411 external = nullrev
408 external = nullrev
412 if collapse:
409 if collapse:
413 if not targetancestors:
410 if not targetancestors:
414 targetancestors = set(repo.changelog.ancestors(dest))
411 targetancestors = set(repo.changelog.ancestors(dest))
415 for rev in state:
412 for rev in state:
416 # Check externals and fail if there are more than one
413 # Check externals and fail if there are more than one
417 for p in repo[rev].parents():
414 for p in repo[rev].parents():
418 if (p.rev() not in state and p.rev() != source
415 if (p.rev() not in state and p.rev() != source
419 and p.rev() not in targetancestors):
416 and p.rev() not in targetancestors):
420 if external != nullrev:
417 if external != nullrev:
421 raise util.Abort(_('unable to collapse, there is more '
418 raise util.Abort(_('unable to collapse, there is more '
422 'than one external parent'))
419 'than one external parent'))
423 external = p.rev()
420 external = p.rev()
424
421
425 state[source] = nullrev
422 state[source] = nullrev
426 return repo['.'].rev(), repo[dest].rev(), state, external
423 return repo['.'].rev(), repo[dest].rev(), state, external
427
424
428 def pullrebase(orig, ui, repo, *args, **opts):
425 def pullrebase(orig, ui, repo, *args, **opts):
429 'Call rebase after pull if the latter has been invoked with --rebase'
426 'Call rebase after pull if the latter has been invoked with --rebase'
430 if opts.get('rebase'):
427 if opts.get('rebase'):
431 if opts.get('update'):
428 if opts.get('update'):
432 del opts.get['update']
429 del opts.get['update']
433 ui.debug(_('--update and --rebase are not compatible, ignoring '
430 ui.debug(_('--update and --rebase are not compatible, ignoring '
434 'the update flag\n'))
431 'the update flag\n'))
435
432
436 cmdutil.bail_if_changed(repo)
433 cmdutil.bail_if_changed(repo)
437 revsprepull = len(repo)
434 revsprepull = len(repo)
438 orig(ui, repo, *args, **opts)
435 orig(ui, repo, *args, **opts)
439 revspostpull = len(repo)
436 revspostpull = len(repo)
440 if revspostpull > revsprepull:
437 if revspostpull > revsprepull:
441 rebase(ui, repo, **opts)
438 rebase(ui, repo, **opts)
442 branch = repo[None].branch()
439 branch = repo[None].branch()
443 dest = repo[branch].rev()
440 dest = repo[branch].rev()
444 if dest != repo['.'].rev():
441 if dest != repo['.'].rev():
445 # there was nothing to rebase we force an update
442 # there was nothing to rebase we force an update
446 merge.update(repo, dest, False, False, False)
443 merge.update(repo, dest, False, False, False)
447 else:
444 else:
448 orig(ui, repo, *args, **opts)
445 orig(ui, repo, *args, **opts)
449
446
450 def uisetup(ui):
447 def uisetup(ui):
451 'Replace pull with a decorator to provide --rebase option'
448 'Replace pull with a decorator to provide --rebase option'
452 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
449 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
453 entry[1].append(('', 'rebase', None,
450 entry[1].append(('', 'rebase', None,
454 _("rebase working directory to branch head"))
451 _("rebase working directory to branch head"))
455 )
452 )
456
453
457 cmdtable = {
454 cmdtable = {
458 "rebase":
455 "rebase":
459 (rebase,
456 (rebase,
460 [
457 [
461 ('s', 'source', '', _('rebase from a given revision')),
458 ('s', 'source', '', _('rebase from a given revision')),
462 ('b', 'base', '', _('rebase from the base of a given revision')),
459 ('b', 'base', '', _('rebase from the base of a given revision')),
463 ('d', 'dest', '', _('rebase onto a given revision')),
460 ('d', 'dest', '', _('rebase onto a given revision')),
464 ('', 'collapse', False, _('collapse the rebased revisions')),
461 ('', 'collapse', False, _('collapse the rebased revisions')),
465 ('', 'keep', False, _('keep original revisions')),
462 ('', 'keep', False, _('keep original revisions')),
466 ('', 'keepbranches', False, _('keep original branches')),
463 ('', 'keepbranches', False, _('keep original branches')),
467 ('c', 'continue', False, _('continue an interrupted rebase')),
464 ('c', 'continue', False, _('continue an interrupted rebase')),
468 ('a', 'abort', False, _('abort an interrupted rebase')),] +
465 ('a', 'abort', False, _('abort an interrupted rebase')),] +
469 templateopts,
466 templateopts,
470 _('hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--keep] '
467 _('hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--keep] '
471 '[--keepbranches] | [-c] | [-a]')),
468 '[--keepbranches] | [-c] | [-a]')),
472 }
469 }
@@ -1,1226 +1,1225 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat, encoding
10 import os, sys, bisect, stat, encoding
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno, error
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno, error
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
104 dst = src.baseui # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src # keep all global options
107 dst = src # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = {}, []
157 seen, l = {}, []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen[rev] = 1
167 seen[rev] = 1
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen[rev] = 1
173 seen[rev] = 1
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 if not globbed and default == 'relpath':
239 if not globbed and default == 'relpath':
240 pats = util.expand_glob(pats or [])
240 pats = util.expand_glob(pats or [])
241 m = _match.match(repo.root, repo.getcwd(), pats,
241 m = _match.match(repo.root, repo.getcwd(), pats,
242 opts.get('include'), opts.get('exclude'), default)
242 opts.get('include'), opts.get('exclude'), default)
243 def badfn(f, msg):
243 def badfn(f, msg):
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 return False
245 return False
246 m.bad = badfn
246 m.bad = badfn
247 return m
247 return m
248
248
249 def matchall(repo):
249 def matchall(repo):
250 return _match.always(repo.root, repo.getcwd())
250 return _match.always(repo.root, repo.getcwd())
251
251
252 def matchfiles(repo, files):
252 def matchfiles(repo, files):
253 return _match.exact(repo.root, repo.getcwd(), files)
253 return _match.exact(repo.root, repo.getcwd(), files)
254
254
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
256 '''find renamed files -- yields (before, after, score) tuples'''
256 '''find renamed files -- yields (before, after, score) tuples'''
257 if added is None or removed is None:
257 if added is None or removed is None:
258 added, removed = repo.status()[1:3]
258 added, removed = repo.status()[1:3]
259 ctx = repo['.']
259 ctx = repo['.']
260 for a in added:
260 for a in added:
261 aa = repo.wread(a)
261 aa = repo.wread(a)
262 bestname, bestscore = None, threshold
262 bestname, bestscore = None, threshold
263 for r in removed:
263 for r in removed:
264 rr = ctx.filectx(r).data()
264 rr = ctx.filectx(r).data()
265
265
266 # bdiff.blocks() returns blocks of matching lines
266 # bdiff.blocks() returns blocks of matching lines
267 # count the number of bytes in each
267 # count the number of bytes in each
268 equal = 0
268 equal = 0
269 alines = mdiff.splitnewlines(aa)
269 alines = mdiff.splitnewlines(aa)
270 matches = bdiff.blocks(aa, rr)
270 matches = bdiff.blocks(aa, rr)
271 for x1,x2,y1,y2 in matches:
271 for x1,x2,y1,y2 in matches:
272 for line in alines[x1:x2]:
272 for line in alines[x1:x2]:
273 equal += len(line)
273 equal += len(line)
274
274
275 lengths = len(aa) + len(rr)
275 lengths = len(aa) + len(rr)
276 if lengths:
276 if lengths:
277 myscore = equal*2.0 / lengths
277 myscore = equal*2.0 / lengths
278 if myscore >= bestscore:
278 if myscore >= bestscore:
279 bestname, bestscore = r, myscore
279 bestname, bestscore = r, myscore
280 if bestname:
280 if bestname:
281 yield bestname, a, bestscore
281 yield bestname, a, bestscore
282
282
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
284 if dry_run is None:
284 if dry_run is None:
285 dry_run = opts.get('dry_run')
285 dry_run = opts.get('dry_run')
286 if similarity is None:
286 if similarity is None:
287 similarity = float(opts.get('similarity') or 0)
287 similarity = float(opts.get('similarity') or 0)
288 add, remove = [], []
288 add, remove = [], []
289 mapping = {}
289 mapping = {}
290 audit_path = util.path_auditor(repo.root)
290 audit_path = util.path_auditor(repo.root)
291 m = match(repo, pats, opts)
291 m = match(repo, pats, opts)
292 for abs in repo.walk(m):
292 for abs in repo.walk(m):
293 target = repo.wjoin(abs)
293 target = repo.wjoin(abs)
294 good = True
294 good = True
295 try:
295 try:
296 audit_path(abs)
296 audit_path(abs)
297 except:
297 except:
298 good = False
298 good = False
299 rel = m.rel(abs)
299 rel = m.rel(abs)
300 exact = m.exact(abs)
300 exact = m.exact(abs)
301 if good and abs not in repo.dirstate:
301 if good and abs not in repo.dirstate:
302 add.append(abs)
302 add.append(abs)
303 mapping[abs] = rel, m.exact(abs)
303 mapping[abs] = rel, m.exact(abs)
304 if repo.ui.verbose or not exact:
304 if repo.ui.verbose or not exact:
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
307 or (os.path.isdir(target) and not os.path.islink(target))):
307 or (os.path.isdir(target) and not os.path.islink(target))):
308 remove.append(abs)
308 remove.append(abs)
309 mapping[abs] = rel, exact
309 mapping[abs] = rel, exact
310 if repo.ui.verbose or not exact:
310 if repo.ui.verbose or not exact:
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
312 if not dry_run:
312 if not dry_run:
313 repo.remove(remove)
313 repo.remove(remove)
314 repo.add(add)
314 repo.add(add)
315 if similarity > 0:
315 if similarity > 0:
316 for old, new, score in findrenames(repo, add, remove, similarity):
316 for old, new, score in findrenames(repo, add, remove, similarity):
317 oldrel, oldexact = mapping[old]
317 oldrel, oldexact = mapping[old]
318 newrel, newexact = mapping[new]
318 newrel, newexact = mapping[new]
319 if repo.ui.verbose or not oldexact or not newexact:
319 if repo.ui.verbose or not oldexact or not newexact:
320 repo.ui.status(_('recording removal of %s as rename to %s '
320 repo.ui.status(_('recording removal of %s as rename to %s '
321 '(%d%% similar)\n') %
321 '(%d%% similar)\n') %
322 (oldrel, newrel, score * 100))
322 (oldrel, newrel, score * 100))
323 if not dry_run:
323 if not dry_run:
324 repo.copy(old, new)
324 repo.copy(old, new)
325
325
326 def copy(ui, repo, pats, opts, rename=False):
326 def copy(ui, repo, pats, opts, rename=False):
327 # called with the repo lock held
327 # called with the repo lock held
328 #
328 #
329 # hgsep => pathname that uses "/" to separate directories
329 # hgsep => pathname that uses "/" to separate directories
330 # ossep => pathname that uses os.sep to separate directories
330 # ossep => pathname that uses os.sep to separate directories
331 cwd = repo.getcwd()
331 cwd = repo.getcwd()
332 targets = {}
332 targets = {}
333 after = opts.get("after")
333 after = opts.get("after")
334 dryrun = opts.get("dry_run")
334 dryrun = opts.get("dry_run")
335
335
336 def walkpat(pat):
336 def walkpat(pat):
337 srcs = []
337 srcs = []
338 m = match(repo, [pat], opts, globbed=True)
338 m = match(repo, [pat], opts, globbed=True)
339 for abs in repo.walk(m):
339 for abs in repo.walk(m):
340 state = repo.dirstate[abs]
340 state = repo.dirstate[abs]
341 rel = m.rel(abs)
341 rel = m.rel(abs)
342 exact = m.exact(abs)
342 exact = m.exact(abs)
343 if state in '?r':
343 if state in '?r':
344 if exact and state == '?':
344 if exact and state == '?':
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
346 if exact and state == 'r':
346 if exact and state == 'r':
347 ui.warn(_('%s: not copying - file has been marked for'
347 ui.warn(_('%s: not copying - file has been marked for'
348 ' remove\n') % rel)
348 ' remove\n') % rel)
349 continue
349 continue
350 # abs: hgsep
350 # abs: hgsep
351 # rel: ossep
351 # rel: ossep
352 srcs.append((abs, rel, exact))
352 srcs.append((abs, rel, exact))
353 return srcs
353 return srcs
354
354
355 # abssrc: hgsep
355 # abssrc: hgsep
356 # relsrc: ossep
356 # relsrc: ossep
357 # otarget: ossep
357 # otarget: ossep
358 def copyfile(abssrc, relsrc, otarget, exact):
358 def copyfile(abssrc, relsrc, otarget, exact):
359 abstarget = util.canonpath(repo.root, cwd, otarget)
359 abstarget = util.canonpath(repo.root, cwd, otarget)
360 reltarget = repo.pathto(abstarget, cwd)
360 reltarget = repo.pathto(abstarget, cwd)
361 target = repo.wjoin(abstarget)
361 target = repo.wjoin(abstarget)
362 src = repo.wjoin(abssrc)
362 src = repo.wjoin(abssrc)
363 state = repo.dirstate[abstarget]
363 state = repo.dirstate[abstarget]
364
364
365 # check for collisions
365 # check for collisions
366 prevsrc = targets.get(abstarget)
366 prevsrc = targets.get(abstarget)
367 if prevsrc is not None:
367 if prevsrc is not None:
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
369 (reltarget, repo.pathto(abssrc, cwd),
369 (reltarget, repo.pathto(abssrc, cwd),
370 repo.pathto(prevsrc, cwd)))
370 repo.pathto(prevsrc, cwd)))
371 return
371 return
372
372
373 # check for overwrites
373 # check for overwrites
374 exists = os.path.exists(target)
374 exists = os.path.exists(target)
375 if not after and exists or after and state in 'mn':
375 if not after and exists or after and state in 'mn':
376 if not opts['force']:
376 if not opts['force']:
377 ui.warn(_('%s: not overwriting - file exists\n') %
377 ui.warn(_('%s: not overwriting - file exists\n') %
378 reltarget)
378 reltarget)
379 return
379 return
380
380
381 if after:
381 if after:
382 if not exists:
382 if not exists:
383 return
383 return
384 elif not dryrun:
384 elif not dryrun:
385 try:
385 try:
386 if exists:
386 if exists:
387 os.unlink(target)
387 os.unlink(target)
388 targetdir = os.path.dirname(target) or '.'
388 targetdir = os.path.dirname(target) or '.'
389 if not os.path.isdir(targetdir):
389 if not os.path.isdir(targetdir):
390 os.makedirs(targetdir)
390 os.makedirs(targetdir)
391 util.copyfile(src, target)
391 util.copyfile(src, target)
392 except IOError, inst:
392 except IOError, inst:
393 if inst.errno == errno.ENOENT:
393 if inst.errno == errno.ENOENT:
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
395 else:
395 else:
396 ui.warn(_('%s: cannot copy - %s\n') %
396 ui.warn(_('%s: cannot copy - %s\n') %
397 (relsrc, inst.strerror))
397 (relsrc, inst.strerror))
398 return True # report a failure
398 return True # report a failure
399
399
400 if ui.verbose or not exact:
400 if ui.verbose or not exact:
401 if rename:
401 if rename:
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
403 else:
403 else:
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
405
405
406 targets[abstarget] = abssrc
406 targets[abstarget] = abssrc
407
407
408 # fix up dirstate
408 # fix up dirstate
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
410 if abstarget == origsrc: # copying back a copy?
410 if abstarget == origsrc: # copying back a copy?
411 if state not in 'mn' and not dryrun:
411 if state not in 'mn' and not dryrun:
412 repo.dirstate.normallookup(abstarget)
412 repo.dirstate.normallookup(abstarget)
413 else:
413 else:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
415 if not ui.quiet:
415 if not ui.quiet:
416 ui.warn(_("%s has not been committed yet, so no copy "
416 ui.warn(_("%s has not been committed yet, so no copy "
417 "data will be stored for %s.\n")
417 "data will be stored for %s.\n")
418 % (repo.pathto(origsrc, cwd), reltarget))
418 % (repo.pathto(origsrc, cwd), reltarget))
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
420 repo.add([abstarget])
420 repo.add([abstarget])
421 elif not dryrun:
421 elif not dryrun:
422 repo.copy(origsrc, abstarget)
422 repo.copy(origsrc, abstarget)
423
423
424 if rename and not dryrun:
424 if rename and not dryrun:
425 repo.remove([abssrc], not after)
425 repo.remove([abssrc], not after)
426
426
427 # pat: ossep
427 # pat: ossep
428 # dest ossep
428 # dest ossep
429 # srcs: list of (hgsep, hgsep, ossep, bool)
429 # srcs: list of (hgsep, hgsep, ossep, bool)
430 # return: function that takes hgsep and returns ossep
430 # return: function that takes hgsep and returns ossep
431 def targetpathfn(pat, dest, srcs):
431 def targetpathfn(pat, dest, srcs):
432 if os.path.isdir(pat):
432 if os.path.isdir(pat):
433 abspfx = util.canonpath(repo.root, cwd, pat)
433 abspfx = util.canonpath(repo.root, cwd, pat)
434 abspfx = util.localpath(abspfx)
434 abspfx = util.localpath(abspfx)
435 if destdirexists:
435 if destdirexists:
436 striplen = len(os.path.split(abspfx)[0])
436 striplen = len(os.path.split(abspfx)[0])
437 else:
437 else:
438 striplen = len(abspfx)
438 striplen = len(abspfx)
439 if striplen:
439 if striplen:
440 striplen += len(os.sep)
440 striplen += len(os.sep)
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
442 elif destdirexists:
442 elif destdirexists:
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 res = lambda p: dest
446 res = lambda p: dest
447 return res
447 return res
448
448
449 # pat: ossep
449 # pat: ossep
450 # dest ossep
450 # dest ossep
451 # srcs: list of (hgsep, hgsep, ossep, bool)
451 # srcs: list of (hgsep, hgsep, ossep, bool)
452 # return: function that takes hgsep and returns ossep
452 # return: function that takes hgsep and returns ossep
453 def targetpathafterfn(pat, dest, srcs):
453 def targetpathafterfn(pat, dest, srcs):
454 if util.patkind(pat, None)[0]:
454 if util.patkind(pat, None)[0]:
455 # a mercurial pattern
455 # a mercurial pattern
456 res = lambda p: os.path.join(dest,
456 res = lambda p: os.path.join(dest,
457 os.path.basename(util.localpath(p)))
457 os.path.basename(util.localpath(p)))
458 else:
458 else:
459 abspfx = util.canonpath(repo.root, cwd, pat)
459 abspfx = util.canonpath(repo.root, cwd, pat)
460 if len(abspfx) < len(srcs[0][0]):
460 if len(abspfx) < len(srcs[0][0]):
461 # A directory. Either the target path contains the last
461 # A directory. Either the target path contains the last
462 # component of the source path or it does not.
462 # component of the source path or it does not.
463 def evalpath(striplen):
463 def evalpath(striplen):
464 score = 0
464 score = 0
465 for s in srcs:
465 for s in srcs:
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
467 if os.path.exists(t):
467 if os.path.exists(t):
468 score += 1
468 score += 1
469 return score
469 return score
470
470
471 abspfx = util.localpath(abspfx)
471 abspfx = util.localpath(abspfx)
472 striplen = len(abspfx)
472 striplen = len(abspfx)
473 if striplen:
473 if striplen:
474 striplen += len(os.sep)
474 striplen += len(os.sep)
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
476 score = evalpath(striplen)
476 score = evalpath(striplen)
477 striplen1 = len(os.path.split(abspfx)[0])
477 striplen1 = len(os.path.split(abspfx)[0])
478 if striplen1:
478 if striplen1:
479 striplen1 += len(os.sep)
479 striplen1 += len(os.sep)
480 if evalpath(striplen1) > score:
480 if evalpath(striplen1) > score:
481 striplen = striplen1
481 striplen = striplen1
482 res = lambda p: os.path.join(dest,
482 res = lambda p: os.path.join(dest,
483 util.localpath(p)[striplen:])
483 util.localpath(p)[striplen:])
484 else:
484 else:
485 # a file
485 # a file
486 if destdirexists:
486 if destdirexists:
487 res = lambda p: os.path.join(dest,
487 res = lambda p: os.path.join(dest,
488 os.path.basename(util.localpath(p)))
488 os.path.basename(util.localpath(p)))
489 else:
489 else:
490 res = lambda p: dest
490 res = lambda p: dest
491 return res
491 return res
492
492
493
493
494 pats = util.expand_glob(pats)
494 pats = util.expand_glob(pats)
495 if not pats:
495 if not pats:
496 raise util.Abort(_('no source or destination specified'))
496 raise util.Abort(_('no source or destination specified'))
497 if len(pats) == 1:
497 if len(pats) == 1:
498 raise util.Abort(_('no destination specified'))
498 raise util.Abort(_('no destination specified'))
499 dest = pats.pop()
499 dest = pats.pop()
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
501 if not destdirexists:
501 if not destdirexists:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
503 raise util.Abort(_('with multiple sources, destination must be an '
503 raise util.Abort(_('with multiple sources, destination must be an '
504 'existing directory'))
504 'existing directory'))
505 if util.endswithsep(dest):
505 if util.endswithsep(dest):
506 raise util.Abort(_('destination %s is not a directory') % dest)
506 raise util.Abort(_('destination %s is not a directory') % dest)
507
507
508 tfn = targetpathfn
508 tfn = targetpathfn
509 if after:
509 if after:
510 tfn = targetpathafterfn
510 tfn = targetpathafterfn
511 copylist = []
511 copylist = []
512 for pat in pats:
512 for pat in pats:
513 srcs = walkpat(pat)
513 srcs = walkpat(pat)
514 if not srcs:
514 if not srcs:
515 continue
515 continue
516 copylist.append((tfn(pat, dest, srcs), srcs))
516 copylist.append((tfn(pat, dest, srcs), srcs))
517 if not copylist:
517 if not copylist:
518 raise util.Abort(_('no files to copy'))
518 raise util.Abort(_('no files to copy'))
519
519
520 errors = 0
520 errors = 0
521 for targetpath, srcs in copylist:
521 for targetpath, srcs in copylist:
522 for abssrc, relsrc, exact in srcs:
522 for abssrc, relsrc, exact in srcs:
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
524 errors += 1
524 errors += 1
525
525
526 if errors:
526 if errors:
527 ui.warn(_('(consider using --after)\n'))
527 ui.warn(_('(consider using --after)\n'))
528
528
529 return errors
529 return errors
530
530
531 def service(opts, parentfn=None, initfn=None, runfn=None):
531 def service(opts, parentfn=None, initfn=None, runfn=None):
532 '''Run a command as a service.'''
532 '''Run a command as a service.'''
533
533
534 if opts['daemon'] and not opts['daemon_pipefds']:
534 if opts['daemon'] and not opts['daemon_pipefds']:
535 rfd, wfd = os.pipe()
535 rfd, wfd = os.pipe()
536 args = sys.argv[:]
536 args = sys.argv[:]
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
538 # Don't pass --cwd to the child process, because we've already
538 # Don't pass --cwd to the child process, because we've already
539 # changed directory.
539 # changed directory.
540 for i in xrange(1,len(args)):
540 for i in xrange(1,len(args)):
541 if args[i].startswith('--cwd='):
541 if args[i].startswith('--cwd='):
542 del args[i]
542 del args[i]
543 break
543 break
544 elif args[i].startswith('--cwd'):
544 elif args[i].startswith('--cwd'):
545 del args[i:i+2]
545 del args[i:i+2]
546 break
546 break
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
548 args[0], args)
548 args[0], args)
549 os.close(wfd)
549 os.close(wfd)
550 os.read(rfd, 1)
550 os.read(rfd, 1)
551 if parentfn:
551 if parentfn:
552 return parentfn(pid)
552 return parentfn(pid)
553 else:
553 else:
554 os._exit(0)
554 os._exit(0)
555
555
556 if initfn:
556 if initfn:
557 initfn()
557 initfn()
558
558
559 if opts['pid_file']:
559 if opts['pid_file']:
560 fp = open(opts['pid_file'], 'w')
560 fp = open(opts['pid_file'], 'w')
561 fp.write(str(os.getpid()) + '\n')
561 fp.write(str(os.getpid()) + '\n')
562 fp.close()
562 fp.close()
563
563
564 if opts['daemon_pipefds']:
564 if opts['daemon_pipefds']:
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
566 os.close(rfd)
566 os.close(rfd)
567 try:
567 try:
568 os.setsid()
568 os.setsid()
569 except AttributeError:
569 except AttributeError:
570 pass
570 pass
571 os.write(wfd, 'y')
571 os.write(wfd, 'y')
572 os.close(wfd)
572 os.close(wfd)
573 sys.stdout.flush()
573 sys.stdout.flush()
574 sys.stderr.flush()
574 sys.stderr.flush()
575 fd = os.open(util.nulldev, os.O_RDWR)
575 fd = os.open(util.nulldev, os.O_RDWR)
576 if fd != 0: os.dup2(fd, 0)
576 if fd != 0: os.dup2(fd, 0)
577 if fd != 1: os.dup2(fd, 1)
577 if fd != 1: os.dup2(fd, 1)
578 if fd != 2: os.dup2(fd, 2)
578 if fd != 2: os.dup2(fd, 2)
579 if fd not in (0, 1, 2): os.close(fd)
579 if fd not in (0, 1, 2): os.close(fd)
580
580
581 if runfn:
581 if runfn:
582 return runfn()
582 return runfn()
583
583
584 class changeset_printer(object):
584 class changeset_printer(object):
585 '''show changeset information when templating not requested.'''
585 '''show changeset information when templating not requested.'''
586
586
587 def __init__(self, ui, repo, patch, diffopts, buffered):
587 def __init__(self, ui, repo, patch, diffopts, buffered):
588 self.ui = ui
588 self.ui = ui
589 self.repo = repo
589 self.repo = repo
590 self.buffered = buffered
590 self.buffered = buffered
591 self.patch = patch
591 self.patch = patch
592 self.diffopts = diffopts
592 self.diffopts = diffopts
593 self.header = {}
593 self.header = {}
594 self.hunk = {}
594 self.hunk = {}
595 self.lastheader = None
595 self.lastheader = None
596
596
597 def flush(self, rev):
597 def flush(self, rev):
598 if rev in self.header:
598 if rev in self.header:
599 h = self.header[rev]
599 h = self.header[rev]
600 if h != self.lastheader:
600 if h != self.lastheader:
601 self.lastheader = h
601 self.lastheader = h
602 self.ui.write(h)
602 self.ui.write(h)
603 del self.header[rev]
603 del self.header[rev]
604 if rev in self.hunk:
604 if rev in self.hunk:
605 self.ui.write(self.hunk[rev])
605 self.ui.write(self.hunk[rev])
606 del self.hunk[rev]
606 del self.hunk[rev]
607 return 1
607 return 1
608 return 0
608 return 0
609
609
610 def show(self, ctx, copies=(), **props):
610 def show(self, ctx, copies=(), **props):
611 if self.buffered:
611 if self.buffered:
612 self.ui.pushbuffer()
612 self.ui.pushbuffer()
613 self._show(ctx, copies, props)
613 self._show(ctx, copies, props)
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
615 else:
615 else:
616 self._show(ctx, copies, props)
616 self._show(ctx, copies, props)
617
617
618 def _show(self, ctx, copies, props):
618 def _show(self, ctx, copies, props):
619 '''show a single changeset or file revision'''
619 '''show a single changeset or file revision'''
620 changenode = ctx.node()
620 changenode = ctx.node()
621 rev = ctx.rev()
621 rev = ctx.rev()
622
622
623 if self.ui.quiet:
623 if self.ui.quiet:
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
625 return
625 return
626
626
627 log = self.repo.changelog
627 log = self.repo.changelog
628 changes = log.read(changenode)
628 changes = log.read(changenode)
629 date = util.datestr(changes[2])
629 date = util.datestr(changes[2])
630 extra = changes[5]
630 extra = changes[5]
631 branch = extra.get("branch")
631 branch = extra.get("branch")
632
632
633 hexfunc = self.ui.debugflag and hex or short
633 hexfunc = self.ui.debugflag and hex or short
634
634
635 parents = [(p, hexfunc(log.node(p)))
635 parents = [(p, hexfunc(log.node(p)))
636 for p in self._meaningful_parentrevs(log, rev)]
636 for p in self._meaningful_parentrevs(log, rev)]
637
637
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
639
639
640 # don't show the default branch name
640 # don't show the default branch name
641 if branch != 'default':
641 if branch != 'default':
642 branch = encoding.tolocal(branch)
642 branch = encoding.tolocal(branch)
643 self.ui.write(_("branch: %s\n") % branch)
643 self.ui.write(_("branch: %s\n") % branch)
644 for tag in self.repo.nodetags(changenode):
644 for tag in self.repo.nodetags(changenode):
645 self.ui.write(_("tag: %s\n") % tag)
645 self.ui.write(_("tag: %s\n") % tag)
646 for parent in parents:
646 for parent in parents:
647 self.ui.write(_("parent: %d:%s\n") % parent)
647 self.ui.write(_("parent: %d:%s\n") % parent)
648
648
649 if self.ui.debugflag:
649 if self.ui.debugflag:
650 self.ui.write(_("manifest: %d:%s\n") %
650 self.ui.write(_("manifest: %d:%s\n") %
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
652 self.ui.write(_("user: %s\n") % changes[1])
652 self.ui.write(_("user: %s\n") % changes[1])
653 self.ui.write(_("date: %s\n") % date)
653 self.ui.write(_("date: %s\n") % date)
654
654
655 if self.ui.debugflag:
655 if self.ui.debugflag:
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
658 files):
658 files):
659 if value:
659 if value:
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
661 elif changes[3] and self.ui.verbose:
661 elif changes[3] and self.ui.verbose:
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
663 if copies and self.ui.verbose:
663 if copies and self.ui.verbose:
664 copies = ['%s (%s)' % c for c in copies]
664 copies = ['%s (%s)' % c for c in copies]
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
666
666
667 if extra and self.ui.debugflag:
667 if extra and self.ui.debugflag:
668 for key, value in sorted(extra.items()):
668 for key, value in sorted(extra.items()):
669 self.ui.write(_("extra: %s=%s\n")
669 self.ui.write(_("extra: %s=%s\n")
670 % (key, value.encode('string_escape')))
670 % (key, value.encode('string_escape')))
671
671
672 description = changes[4].strip()
672 description = changes[4].strip()
673 if description:
673 if description:
674 if self.ui.verbose:
674 if self.ui.verbose:
675 self.ui.write(_("description:\n"))
675 self.ui.write(_("description:\n"))
676 self.ui.write(description)
676 self.ui.write(description)
677 self.ui.write("\n\n")
677 self.ui.write("\n\n")
678 else:
678 else:
679 self.ui.write(_("summary: %s\n") %
679 self.ui.write(_("summary: %s\n") %
680 description.splitlines()[0])
680 description.splitlines()[0])
681 self.ui.write("\n")
681 self.ui.write("\n")
682
682
683 self.showpatch(changenode)
683 self.showpatch(changenode)
684
684
685 def showpatch(self, node):
685 def showpatch(self, node):
686 if self.patch:
686 if self.patch:
687 prev = self.repo.changelog.parents(node)[0]
687 prev = self.repo.changelog.parents(node)[0]
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
689 opts=patch.diffopts(self.ui, self.diffopts))
689 opts=patch.diffopts(self.ui, self.diffopts))
690 for chunk in chunks:
690 for chunk in chunks:
691 self.ui.write(chunk)
691 self.ui.write(chunk)
692 self.ui.write("\n")
692 self.ui.write("\n")
693
693
694 def _meaningful_parentrevs(self, log, rev):
694 def _meaningful_parentrevs(self, log, rev):
695 """Return list of meaningful (or all if debug) parentrevs for rev.
695 """Return list of meaningful (or all if debug) parentrevs for rev.
696
696
697 For merges (two non-nullrev revisions) both parents are meaningful.
697 For merges (two non-nullrev revisions) both parents are meaningful.
698 Otherwise the first parent revision is considered meaningful if it
698 Otherwise the first parent revision is considered meaningful if it
699 is not the preceding revision.
699 is not the preceding revision.
700 """
700 """
701 parents = log.parentrevs(rev)
701 parents = log.parentrevs(rev)
702 if not self.ui.debugflag and parents[1] == nullrev:
702 if not self.ui.debugflag and parents[1] == nullrev:
703 if parents[0] >= rev - 1:
703 if parents[0] >= rev - 1:
704 parents = []
704 parents = []
705 else:
705 else:
706 parents = [parents[0]]
706 parents = [parents[0]]
707 return parents
707 return parents
708
708
709
709
710 class changeset_templater(changeset_printer):
710 class changeset_templater(changeset_printer):
711 '''format changeset information.'''
711 '''format changeset information.'''
712
712
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
715 filters = templatefilters.filters.copy()
715 filters = templatefilters.filters.copy()
716 filters['formatnode'] = (ui.debugflag and (lambda x: x)
716 filters['formatnode'] = (ui.debugflag and (lambda x: x)
717 or (lambda x: x[:12]))
717 or (lambda x: x[:12]))
718 self.t = templater.templater(mapfile, filters,
718 self.t = templater.templater(mapfile, filters,
719 cache={
719 cache={
720 'parent': '{rev}:{node|formatnode} ',
720 'parent': '{rev}:{node|formatnode} ',
721 'manifest': '{rev}:{node|formatnode}',
721 'manifest': '{rev}:{node|formatnode}',
722 'filecopy': '{name} ({source})'})
722 'filecopy': '{name} ({source})'})
723
723
724 def use_template(self, t):
724 def use_template(self, t):
725 '''set template string to use'''
725 '''set template string to use'''
726 self.t.cache['changeset'] = t
726 self.t.cache['changeset'] = t
727
727
728 def _meaningful_parentrevs(self, ctx):
728 def _meaningful_parentrevs(self, ctx):
729 """Return list of meaningful (or all if debug) parentrevs for rev.
729 """Return list of meaningful (or all if debug) parentrevs for rev.
730 """
730 """
731 parents = ctx.parents()
731 parents = ctx.parents()
732 if len(parents) > 1:
732 if len(parents) > 1:
733 return parents
733 return parents
734 if self.ui.debugflag:
734 if self.ui.debugflag:
735 return [parents[0], self.repo['null']]
735 return [parents[0], self.repo['null']]
736 if parents[0].rev() >= ctx.rev() - 1:
736 if parents[0].rev() >= ctx.rev() - 1:
737 return []
737 return []
738 return parents
738 return parents
739
739
740 def _show(self, ctx, copies, props):
740 def _show(self, ctx, copies, props):
741 '''show a single changeset or file revision'''
741 '''show a single changeset or file revision'''
742
742
743 def showlist(name, values, plural=None, **args):
743 def showlist(name, values, plural=None, **args):
744 '''expand set of values.
744 '''expand set of values.
745 name is name of key in template map.
745 name is name of key in template map.
746 values is list of strings or dicts.
746 values is list of strings or dicts.
747 plural is plural of name, if not simply name + 's'.
747 plural is plural of name, if not simply name + 's'.
748
748
749 expansion works like this, given name 'foo'.
749 expansion works like this, given name 'foo'.
750
750
751 if values is empty, expand 'no_foos'.
751 if values is empty, expand 'no_foos'.
752
752
753 if 'foo' not in template map, return values as a string,
753 if 'foo' not in template map, return values as a string,
754 joined by space.
754 joined by space.
755
755
756 expand 'start_foos'.
756 expand 'start_foos'.
757
757
758 for each value, expand 'foo'. if 'last_foo' in template
758 for each value, expand 'foo'. if 'last_foo' in template
759 map, expand it instead of 'foo' for last key.
759 map, expand it instead of 'foo' for last key.
760
760
761 expand 'end_foos'.
761 expand 'end_foos'.
762 '''
762 '''
763 if plural: names = plural
763 if plural: names = plural
764 else: names = name + 's'
764 else: names = name + 's'
765 if not values:
765 if not values:
766 noname = 'no_' + names
766 noname = 'no_' + names
767 if noname in self.t:
767 if noname in self.t:
768 yield self.t(noname, **args)
768 yield self.t(noname, **args)
769 return
769 return
770 if name not in self.t:
770 if name not in self.t:
771 if isinstance(values[0], str):
771 if isinstance(values[0], str):
772 yield ' '.join(values)
772 yield ' '.join(values)
773 else:
773 else:
774 for v in values:
774 for v in values:
775 yield dict(v, **args)
775 yield dict(v, **args)
776 return
776 return
777 startname = 'start_' + names
777 startname = 'start_' + names
778 if startname in self.t:
778 if startname in self.t:
779 yield self.t(startname, **args)
779 yield self.t(startname, **args)
780 vargs = args.copy()
780 vargs = args.copy()
781 def one(v, tag=name):
781 def one(v, tag=name):
782 try:
782 try:
783 vargs.update(v)
783 vargs.update(v)
784 except (AttributeError, ValueError):
784 except (AttributeError, ValueError):
785 try:
785 try:
786 for a, b in v:
786 for a, b in v:
787 vargs[a] = b
787 vargs[a] = b
788 except ValueError:
788 except ValueError:
789 vargs[name] = v
789 vargs[name] = v
790 return self.t(tag, **vargs)
790 return self.t(tag, **vargs)
791 lastname = 'last_' + name
791 lastname = 'last_' + name
792 if lastname in self.t:
792 if lastname in self.t:
793 last = values.pop()
793 last = values.pop()
794 else:
794 else:
795 last = None
795 last = None
796 for v in values:
796 for v in values:
797 yield one(v)
797 yield one(v)
798 if last is not None:
798 if last is not None:
799 yield one(last, tag=lastname)
799 yield one(last, tag=lastname)
800 endname = 'end_' + names
800 endname = 'end_' + names
801 if endname in self.t:
801 if endname in self.t:
802 yield self.t(endname, **args)
802 yield self.t(endname, **args)
803
803
804 def showbranches(**args):
804 def showbranches(**args):
805 branch = ctx.branch()
805 branch = ctx.branch()
806 if branch != 'default':
806 if branch != 'default':
807 branch = encoding.tolocal(branch)
807 branch = encoding.tolocal(branch)
808 return showlist('branch', [branch], plural='branches', **args)
808 return showlist('branch', [branch], plural='branches', **args)
809
809
810 def showparents(**args):
810 def showparents(**args):
811 parents = [[('rev', p.rev()), ('node', p.hex())]
811 parents = [[('rev', p.rev()), ('node', p.hex())]
812 for p in self._meaningful_parentrevs(ctx)]
812 for p in self._meaningful_parentrevs(ctx)]
813 return showlist('parent', parents, **args)
813 return showlist('parent', parents, **args)
814
814
815 def showtags(**args):
815 def showtags(**args):
816 return showlist('tag', ctx.tags(), **args)
816 return showlist('tag', ctx.tags(), **args)
817
817
818 def showextras(**args):
818 def showextras(**args):
819 for key, value in sorted(ctx.extra().items()):
819 for key, value in sorted(ctx.extra().items()):
820 args = args.copy()
820 args = args.copy()
821 args.update(dict(key=key, value=value))
821 args.update(dict(key=key, value=value))
822 yield self.t('extra', **args)
822 yield self.t('extra', **args)
823
823
824 def showcopies(**args):
824 def showcopies(**args):
825 c = [{'name': x[0], 'source': x[1]} for x in copies]
825 c = [{'name': x[0], 'source': x[1]} for x in copies]
826 return showlist('file_copy', c, plural='file_copies', **args)
826 return showlist('file_copy', c, plural='file_copies', **args)
827
827
828 files = []
828 files = []
829 def getfiles():
829 def getfiles():
830 if not files:
830 if not files:
831 files[:] = self.repo.status(ctx.parents()[0].node(),
831 files[:] = self.repo.status(ctx.parents()[0].node(),
832 ctx.node())[:3]
832 ctx.node())[:3]
833 return files
833 return files
834 def showfiles(**args):
834 def showfiles(**args):
835 return showlist('file', ctx.files(), **args)
835 return showlist('file', ctx.files(), **args)
836 def showmods(**args):
836 def showmods(**args):
837 return showlist('file_mod', getfiles()[0], **args)
837 return showlist('file_mod', getfiles()[0], **args)
838 def showadds(**args):
838 def showadds(**args):
839 return showlist('file_add', getfiles()[1], **args)
839 return showlist('file_add', getfiles()[1], **args)
840 def showdels(**args):
840 def showdels(**args):
841 return showlist('file_del', getfiles()[2], **args)
841 return showlist('file_del', getfiles()[2], **args)
842 def showmanifest(**args):
842 def showmanifest(**args):
843 args = args.copy()
843 args = args.copy()
844 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
844 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
845 node=hex(ctx.changeset()[0])))
845 node=hex(ctx.changeset()[0])))
846 return self.t('manifest', **args)
846 return self.t('manifest', **args)
847
847
848 def showdiffstat(**args):
848 def showdiffstat(**args):
849 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
849 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
850 files, adds, removes = 0, 0, 0
850 files, adds, removes = 0, 0, 0
851 for i in patch.diffstatdata(util.iterlines(diff)):
851 for i in patch.diffstatdata(util.iterlines(diff)):
852 files += 1
852 files += 1
853 adds += i[1]
853 adds += i[1]
854 removes += i[2]
854 removes += i[2]
855 return '%s: +%s/-%s' % (files, adds, removes)
855 return '%s: +%s/-%s' % (files, adds, removes)
856
856
857 defprops = {
857 defprops = {
858 'author': ctx.user(),
858 'author': ctx.user(),
859 'branches': showbranches,
859 'branches': showbranches,
860 'date': ctx.date(),
860 'date': ctx.date(),
861 'desc': ctx.description().strip(),
861 'desc': ctx.description().strip(),
862 'file_adds': showadds,
862 'file_adds': showadds,
863 'file_dels': showdels,
863 'file_dels': showdels,
864 'file_mods': showmods,
864 'file_mods': showmods,
865 'files': showfiles,
865 'files': showfiles,
866 'file_copies': showcopies,
866 'file_copies': showcopies,
867 'manifest': showmanifest,
867 'manifest': showmanifest,
868 'node': ctx.hex(),
868 'node': ctx.hex(),
869 'parents': showparents,
869 'parents': showparents,
870 'rev': ctx.rev(),
870 'rev': ctx.rev(),
871 'tags': showtags,
871 'tags': showtags,
872 'extras': showextras,
872 'extras': showextras,
873 'diffstat': showdiffstat,
873 'diffstat': showdiffstat,
874 }
874 }
875 props = props.copy()
875 props = props.copy()
876 props.update(defprops)
876 props.update(defprops)
877
877
878 # find correct templates for current mode
878 # find correct templates for current mode
879
879
880 tmplmodes = [
880 tmplmodes = [
881 (True, None),
881 (True, None),
882 (self.ui.verbose, 'verbose'),
882 (self.ui.verbose, 'verbose'),
883 (self.ui.quiet, 'quiet'),
883 (self.ui.quiet, 'quiet'),
884 (self.ui.debugflag, 'debug'),
884 (self.ui.debugflag, 'debug'),
885 ]
885 ]
886
886
887 types = {'header': '', 'changeset': 'changeset'}
887 types = {'header': '', 'changeset': 'changeset'}
888 for mode, postfix in tmplmodes:
888 for mode, postfix in tmplmodes:
889 for type in types:
889 for type in types:
890 cur = postfix and ('%s_%s' % (type, postfix)) or type
890 cur = postfix and ('%s_%s' % (type, postfix)) or type
891 if mode and cur in self.t:
891 if mode and cur in self.t:
892 types[type] = cur
892 types[type] = cur
893
893
894 try:
894 try:
895
895
896 # write header
896 # write header
897 if types['header']:
897 if types['header']:
898 h = templater.stringify(self.t(types['header'], **props))
898 h = templater.stringify(self.t(types['header'], **props))
899 if self.buffered:
899 if self.buffered:
900 self.header[ctx.rev()] = h
900 self.header[ctx.rev()] = h
901 else:
901 else:
902 self.ui.write(h)
902 self.ui.write(h)
903
903
904 # write changeset metadata, then patch if requested
904 # write changeset metadata, then patch if requested
905 key = types['changeset']
905 key = types['changeset']
906 self.ui.write(templater.stringify(self.t(key, **props)))
906 self.ui.write(templater.stringify(self.t(key, **props)))
907 self.showpatch(ctx.node())
907 self.showpatch(ctx.node())
908
908
909 except KeyError, inst:
909 except KeyError, inst:
910 msg = _("%s: no key named '%s'")
910 msg = _("%s: no key named '%s'")
911 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
911 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
912 except SyntaxError, inst:
912 except SyntaxError, inst:
913 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
913 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
914
914
915 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
915 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
916 """show one changeset using template or regular display.
916 """show one changeset using template or regular display.
917
917
918 Display format will be the first non-empty hit of:
918 Display format will be the first non-empty hit of:
919 1. option 'template'
919 1. option 'template'
920 2. option 'style'
920 2. option 'style'
921 3. [ui] setting 'logtemplate'
921 3. [ui] setting 'logtemplate'
922 4. [ui] setting 'style'
922 4. [ui] setting 'style'
923 If all of these values are either the unset or the empty string,
923 If all of these values are either the unset or the empty string,
924 regular display via changeset_printer() is done.
924 regular display via changeset_printer() is done.
925 """
925 """
926 # options
926 # options
927 patch = False
927 patch = False
928 if opts.get('patch'):
928 if opts.get('patch'):
929 patch = matchfn or matchall(repo)
929 patch = matchfn or matchall(repo)
930
930
931 tmpl = opts.get('template')
931 tmpl = opts.get('template')
932 style = None
932 style = None
933 if tmpl:
933 if tmpl:
934 tmpl = templater.parsestring(tmpl, quoted=False)
934 tmpl = templater.parsestring(tmpl, quoted=False)
935 else:
935 else:
936 style = opts.get('style')
936 style = opts.get('style')
937
937
938 # ui settings
938 # ui settings
939 if not (tmpl or style):
939 if not (tmpl or style):
940 tmpl = ui.config('ui', 'logtemplate')
940 tmpl = ui.config('ui', 'logtemplate')
941 if tmpl:
941 if tmpl:
942 tmpl = templater.parsestring(tmpl)
942 tmpl = templater.parsestring(tmpl)
943 else:
943 else:
944 style = ui.config('ui', 'style')
944 style = ui.config('ui', 'style')
945
945
946 if not (tmpl or style):
946 if not (tmpl or style):
947 return changeset_printer(ui, repo, patch, opts, buffered)
947 return changeset_printer(ui, repo, patch, opts, buffered)
948
948
949 mapfile = None
949 mapfile = None
950 if style and not tmpl:
950 if style and not tmpl:
951 mapfile = style
951 mapfile = style
952 if not os.path.split(mapfile)[0]:
952 if not os.path.split(mapfile)[0]:
953 mapname = (templater.templatepath('map-cmdline.' + mapfile)
953 mapname = (templater.templatepath('map-cmdline.' + mapfile)
954 or templater.templatepath(mapfile))
954 or templater.templatepath(mapfile))
955 if mapname: mapfile = mapname
955 if mapname: mapfile = mapname
956
956
957 try:
957 try:
958 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
958 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
959 except SyntaxError, inst:
959 except SyntaxError, inst:
960 raise util.Abort(inst.args[0])
960 raise util.Abort(inst.args[0])
961 if tmpl: t.use_template(tmpl)
961 if tmpl: t.use_template(tmpl)
962 return t
962 return t
963
963
964 def finddate(ui, repo, date):
964 def finddate(ui, repo, date):
965 """Find the tipmost changeset that matches the given date spec"""
965 """Find the tipmost changeset that matches the given date spec"""
966 df = util.matchdate(date)
966 df = util.matchdate(date)
967 get = util.cachefunc(lambda r: repo[r].changeset())
967 get = util.cachefunc(lambda r: repo[r].changeset())
968 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
968 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
969 results = {}
969 results = {}
970 for st, rev, fns in changeiter:
970 for st, rev, fns in changeiter:
971 if st == 'add':
971 if st == 'add':
972 d = get(rev)[2]
972 d = get(rev)[2]
973 if df(d[0]):
973 if df(d[0]):
974 results[rev] = d
974 results[rev] = d
975 elif st == 'iter':
975 elif st == 'iter':
976 if rev in results:
976 if rev in results:
977 ui.status(_("Found revision %s from %s\n") %
977 ui.status(_("Found revision %s from %s\n") %
978 (rev, util.datestr(results[rev])))
978 (rev, util.datestr(results[rev])))
979 return str(rev)
979 return str(rev)
980
980
981 raise util.Abort(_("revision matching date not found"))
981 raise util.Abort(_("revision matching date not found"))
982
982
983 def walkchangerevs(ui, repo, pats, change, opts):
983 def walkchangerevs(ui, repo, pats, change, opts):
984 '''Iterate over files and the revs in which they changed.
984 '''Iterate over files and the revs in which they changed.
985
985
986 Callers most commonly need to iterate backwards over the history
986 Callers most commonly need to iterate backwards over the history
987 in which they are interested. Doing so has awful (quadratic-looking)
987 in which they are interested. Doing so has awful (quadratic-looking)
988 performance, so we use iterators in a "windowed" way.
988 performance, so we use iterators in a "windowed" way.
989
989
990 We walk a window of revisions in the desired order. Within the
990 We walk a window of revisions in the desired order. Within the
991 window, we first walk forwards to gather data, then in the desired
991 window, we first walk forwards to gather data, then in the desired
992 order (usually backwards) to display it.
992 order (usually backwards) to display it.
993
993
994 This function returns an (iterator, matchfn) tuple. The iterator
994 This function returns an (iterator, matchfn) tuple. The iterator
995 yields 3-tuples. They will be of one of the following forms:
995 yields 3-tuples. They will be of one of the following forms:
996
996
997 "window", incrementing, lastrev: stepping through a window,
997 "window", incrementing, lastrev: stepping through a window,
998 positive if walking forwards through revs, last rev in the
998 positive if walking forwards through revs, last rev in the
999 sequence iterated over - use to reset state for the current window
999 sequence iterated over - use to reset state for the current window
1000
1000
1001 "add", rev, fns: out-of-order traversal of the given file names
1001 "add", rev, fns: out-of-order traversal of the given file names
1002 fns, which changed during revision rev - use to gather data for
1002 fns, which changed during revision rev - use to gather data for
1003 possible display
1003 possible display
1004
1004
1005 "iter", rev, None: in-order traversal of the revs earlier iterated
1005 "iter", rev, None: in-order traversal of the revs earlier iterated
1006 over with "add" - use to display data'''
1006 over with "add" - use to display data'''
1007
1007
1008 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1008 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1009 if start < end:
1009 if start < end:
1010 while start < end:
1010 while start < end:
1011 yield start, min(windowsize, end-start)
1011 yield start, min(windowsize, end-start)
1012 start += windowsize
1012 start += windowsize
1013 if windowsize < sizelimit:
1013 if windowsize < sizelimit:
1014 windowsize *= 2
1014 windowsize *= 2
1015 else:
1015 else:
1016 while start > end:
1016 while start > end:
1017 yield start, min(windowsize, start-end-1)
1017 yield start, min(windowsize, start-end-1)
1018 start -= windowsize
1018 start -= windowsize
1019 if windowsize < sizelimit:
1019 if windowsize < sizelimit:
1020 windowsize *= 2
1020 windowsize *= 2
1021
1021
1022 m = match(repo, pats, opts)
1022 m = match(repo, pats, opts)
1023 follow = opts.get('follow') or opts.get('follow_first')
1023 follow = opts.get('follow') or opts.get('follow_first')
1024
1024
1025 if not len(repo):
1025 if not len(repo):
1026 return [], m
1026 return [], m
1027
1027
1028 if follow:
1028 if follow:
1029 defrange = '%s:0' % repo['.'].rev()
1029 defrange = '%s:0' % repo['.'].rev()
1030 else:
1030 else:
1031 defrange = '-1:0'
1031 defrange = '-1:0'
1032 revs = revrange(repo, opts['rev'] or [defrange])
1032 revs = revrange(repo, opts['rev'] or [defrange])
1033 wanted = set()
1033 wanted = set()
1034 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1034 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1035 fncache = {}
1035 fncache = {}
1036
1036
1037 if not slowpath and not m.files():
1037 if not slowpath and not m.files():
1038 # No files, no patterns. Display all revs.
1038 # No files, no patterns. Display all revs.
1039 wanted = set(revs)
1039 wanted = set(revs)
1040 copies = []
1040 copies = []
1041 if not slowpath:
1041 if not slowpath:
1042 # Only files, no patterns. Check the history of each file.
1042 # Only files, no patterns. Check the history of each file.
1043 def filerevgen(filelog, node):
1043 def filerevgen(filelog, node):
1044 cl_count = len(repo)
1044 cl_count = len(repo)
1045 if node is None:
1045 if node is None:
1046 last = len(filelog) - 1
1046 last = len(filelog) - 1
1047 else:
1047 else:
1048 last = filelog.rev(node)
1048 last = filelog.rev(node)
1049 for i, window in increasing_windows(last, nullrev):
1049 for i, window in increasing_windows(last, nullrev):
1050 revs = []
1050 revs = []
1051 for j in xrange(i - window, i + 1):
1051 for j in xrange(i - window, i + 1):
1052 n = filelog.node(j)
1052 n = filelog.node(j)
1053 revs.append((filelog.linkrev(j),
1053 revs.append((filelog.linkrev(j),
1054 follow and filelog.renamed(n)))
1054 follow and filelog.renamed(n)))
1055 revs.reverse()
1055 for rev in reversed(revs):
1056 for rev in revs:
1057 # only yield rev for which we have the changelog, it can
1056 # only yield rev for which we have the changelog, it can
1058 # happen while doing "hg log" during a pull or commit
1057 # happen while doing "hg log" during a pull or commit
1059 if rev[0] < cl_count:
1058 if rev[0] < cl_count:
1060 yield rev
1059 yield rev
1061 def iterfiles():
1060 def iterfiles():
1062 for filename in m.files():
1061 for filename in m.files():
1063 yield filename, None
1062 yield filename, None
1064 for filename_node in copies:
1063 for filename_node in copies:
1065 yield filename_node
1064 yield filename_node
1066 minrev, maxrev = min(revs), max(revs)
1065 minrev, maxrev = min(revs), max(revs)
1067 for file_, node in iterfiles():
1066 for file_, node in iterfiles():
1068 filelog = repo.file(file_)
1067 filelog = repo.file(file_)
1069 if not len(filelog):
1068 if not len(filelog):
1070 if node is None:
1069 if node is None:
1071 # A zero count may be a directory or deleted file, so
1070 # A zero count may be a directory or deleted file, so
1072 # try to find matching entries on the slow path.
1071 # try to find matching entries on the slow path.
1073 if follow:
1072 if follow:
1074 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1073 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1075 slowpath = True
1074 slowpath = True
1076 break
1075 break
1077 else:
1076 else:
1078 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1077 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1079 % (file_, short(node)))
1078 % (file_, short(node)))
1080 continue
1079 continue
1081 for rev, copied in filerevgen(filelog, node):
1080 for rev, copied in filerevgen(filelog, node):
1082 if rev <= maxrev:
1081 if rev <= maxrev:
1083 if rev < minrev:
1082 if rev < minrev:
1084 break
1083 break
1085 fncache.setdefault(rev, [])
1084 fncache.setdefault(rev, [])
1086 fncache[rev].append(file_)
1085 fncache[rev].append(file_)
1087 wanted.add(rev)
1086 wanted.add(rev)
1088 if follow and copied:
1087 if follow and copied:
1089 copies.append(copied)
1088 copies.append(copied)
1090 if slowpath:
1089 if slowpath:
1091 if follow:
1090 if follow:
1092 raise util.Abort(_('can only follow copies/renames for explicit '
1091 raise util.Abort(_('can only follow copies/renames for explicit '
1093 'file names'))
1092 'file names'))
1094
1093
1095 # The slow path checks files modified in every changeset.
1094 # The slow path checks files modified in every changeset.
1096 def changerevgen():
1095 def changerevgen():
1097 for i, window in increasing_windows(len(repo) - 1, nullrev):
1096 for i, window in increasing_windows(len(repo) - 1, nullrev):
1098 for j in xrange(i - window, i + 1):
1097 for j in xrange(i - window, i + 1):
1099 yield j, change(j)[3]
1098 yield j, change(j)[3]
1100
1099
1101 for rev, changefiles in changerevgen():
1100 for rev, changefiles in changerevgen():
1102 matches = filter(m, changefiles)
1101 matches = filter(m, changefiles)
1103 if matches:
1102 if matches:
1104 fncache[rev] = matches
1103 fncache[rev] = matches
1105 wanted.add(rev)
1104 wanted.add(rev)
1106
1105
1107 class followfilter:
1106 class followfilter:
1108 def __init__(self, onlyfirst=False):
1107 def __init__(self, onlyfirst=False):
1109 self.startrev = nullrev
1108 self.startrev = nullrev
1110 self.roots = []
1109 self.roots = []
1111 self.onlyfirst = onlyfirst
1110 self.onlyfirst = onlyfirst
1112
1111
1113 def match(self, rev):
1112 def match(self, rev):
1114 def realparents(rev):
1113 def realparents(rev):
1115 if self.onlyfirst:
1114 if self.onlyfirst:
1116 return repo.changelog.parentrevs(rev)[0:1]
1115 return repo.changelog.parentrevs(rev)[0:1]
1117 else:
1116 else:
1118 return filter(lambda x: x != nullrev,
1117 return filter(lambda x: x != nullrev,
1119 repo.changelog.parentrevs(rev))
1118 repo.changelog.parentrevs(rev))
1120
1119
1121 if self.startrev == nullrev:
1120 if self.startrev == nullrev:
1122 self.startrev = rev
1121 self.startrev = rev
1123 return True
1122 return True
1124
1123
1125 if rev > self.startrev:
1124 if rev > self.startrev:
1126 # forward: all descendants
1125 # forward: all descendants
1127 if not self.roots:
1126 if not self.roots:
1128 self.roots.append(self.startrev)
1127 self.roots.append(self.startrev)
1129 for parent in realparents(rev):
1128 for parent in realparents(rev):
1130 if parent in self.roots:
1129 if parent in self.roots:
1131 self.roots.append(rev)
1130 self.roots.append(rev)
1132 return True
1131 return True
1133 else:
1132 else:
1134 # backwards: all parents
1133 # backwards: all parents
1135 if not self.roots:
1134 if not self.roots:
1136 self.roots.extend(realparents(self.startrev))
1135 self.roots.extend(realparents(self.startrev))
1137 if rev in self.roots:
1136 if rev in self.roots:
1138 self.roots.remove(rev)
1137 self.roots.remove(rev)
1139 self.roots.extend(realparents(rev))
1138 self.roots.extend(realparents(rev))
1140 return True
1139 return True
1141
1140
1142 return False
1141 return False
1143
1142
1144 # it might be worthwhile to do this in the iterator if the rev range
1143 # it might be worthwhile to do this in the iterator if the rev range
1145 # is descending and the prune args are all within that range
1144 # is descending and the prune args are all within that range
1146 for rev in opts.get('prune', ()):
1145 for rev in opts.get('prune', ()):
1147 rev = repo.changelog.rev(repo.lookup(rev))
1146 rev = repo.changelog.rev(repo.lookup(rev))
1148 ff = followfilter()
1147 ff = followfilter()
1149 stop = min(revs[0], revs[-1])
1148 stop = min(revs[0], revs[-1])
1150 for x in xrange(rev, stop-1, -1):
1149 for x in xrange(rev, stop-1, -1):
1151 if ff.match(x):
1150 if ff.match(x):
1152 wanted.discard(x)
1151 wanted.discard(x)
1153
1152
1154 def iterate():
1153 def iterate():
1155 if follow and not m.files():
1154 if follow and not m.files():
1156 ff = followfilter(onlyfirst=opts.get('follow_first'))
1155 ff = followfilter(onlyfirst=opts.get('follow_first'))
1157 def want(rev):
1156 def want(rev):
1158 return ff.match(rev) and rev in wanted
1157 return ff.match(rev) and rev in wanted
1159 else:
1158 else:
1160 def want(rev):
1159 def want(rev):
1161 return rev in wanted
1160 return rev in wanted
1162
1161
1163 for i, window in increasing_windows(0, len(revs)):
1162 for i, window in increasing_windows(0, len(revs)):
1164 yield 'window', revs[0] < revs[-1], revs[-1]
1163 yield 'window', revs[0] < revs[-1], revs[-1]
1165 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1164 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1166 for rev in sorted(nrevs):
1165 for rev in sorted(nrevs):
1167 fns = fncache.get(rev)
1166 fns = fncache.get(rev)
1168 if not fns:
1167 if not fns:
1169 def fns_generator():
1168 def fns_generator():
1170 for f in change(rev)[3]:
1169 for f in change(rev)[3]:
1171 if m(f):
1170 if m(f):
1172 yield f
1171 yield f
1173 fns = fns_generator()
1172 fns = fns_generator()
1174 yield 'add', rev, fns
1173 yield 'add', rev, fns
1175 for rev in nrevs:
1174 for rev in nrevs:
1176 yield 'iter', rev, None
1175 yield 'iter', rev, None
1177 return iterate(), m
1176 return iterate(), m
1178
1177
1179 def commit(ui, repo, commitfunc, pats, opts):
1178 def commit(ui, repo, commitfunc, pats, opts):
1180 '''commit the specified files or all outstanding changes'''
1179 '''commit the specified files or all outstanding changes'''
1181 date = opts.get('date')
1180 date = opts.get('date')
1182 if date:
1181 if date:
1183 opts['date'] = util.parsedate(date)
1182 opts['date'] = util.parsedate(date)
1184 message = logmessage(opts)
1183 message = logmessage(opts)
1185
1184
1186 # extract addremove carefully -- this function can be called from a command
1185 # extract addremove carefully -- this function can be called from a command
1187 # that doesn't support addremove
1186 # that doesn't support addremove
1188 if opts.get('addremove'):
1187 if opts.get('addremove'):
1189 addremove(repo, pats, opts)
1188 addremove(repo, pats, opts)
1190
1189
1191 m = match(repo, pats, opts)
1190 m = match(repo, pats, opts)
1192 if pats:
1191 if pats:
1193 modified, added, removed = repo.status(match=m)[:3]
1192 modified, added, removed = repo.status(match=m)[:3]
1194 files = sorted(modified + added + removed)
1193 files = sorted(modified + added + removed)
1195
1194
1196 def is_dir(f):
1195 def is_dir(f):
1197 name = f + '/'
1196 name = f + '/'
1198 i = bisect.bisect(files, name)
1197 i = bisect.bisect(files, name)
1199 return i < len(files) and files[i].startswith(name)
1198 return i < len(files) and files[i].startswith(name)
1200
1199
1201 for f in m.files():
1200 for f in m.files():
1202 if f == '.':
1201 if f == '.':
1203 continue
1202 continue
1204 if f not in files:
1203 if f not in files:
1205 rf = repo.wjoin(f)
1204 rf = repo.wjoin(f)
1206 rel = repo.pathto(f)
1205 rel = repo.pathto(f)
1207 try:
1206 try:
1208 mode = os.lstat(rf)[stat.ST_MODE]
1207 mode = os.lstat(rf)[stat.ST_MODE]
1209 except OSError:
1208 except OSError:
1210 if is_dir(f): # deleted directory ?
1209 if is_dir(f): # deleted directory ?
1211 continue
1210 continue
1212 raise util.Abort(_("file %s not found!") % rel)
1211 raise util.Abort(_("file %s not found!") % rel)
1213 if stat.S_ISDIR(mode):
1212 if stat.S_ISDIR(mode):
1214 if not is_dir(f):
1213 if not is_dir(f):
1215 raise util.Abort(_("no match under directory %s!")
1214 raise util.Abort(_("no match under directory %s!")
1216 % rel)
1215 % rel)
1217 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1216 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1218 raise util.Abort(_("can't commit %s: "
1217 raise util.Abort(_("can't commit %s: "
1219 "unsupported file type!") % rel)
1218 "unsupported file type!") % rel)
1220 elif f not in repo.dirstate:
1219 elif f not in repo.dirstate:
1221 raise util.Abort(_("file %s not tracked!") % rel)
1220 raise util.Abort(_("file %s not tracked!") % rel)
1222 m = matchfiles(repo, files)
1221 m = matchfiles(repo, files)
1223 try:
1222 try:
1224 return commitfunc(ui, repo, message, m, opts)
1223 return commitfunc(ui, repo, message, m, opts)
1225 except ValueError, inst:
1224 except ValueError, inst:
1226 raise util.Abort(str(inst))
1225 raise util.Abort(str(inst))
@@ -1,3465 +1,3463 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, textwrap
11 import os, re, sys, textwrap
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 import difflib, patch, time, help, mdiff, tempfile, url, encoding
13 import difflib, patch, time, help, mdiff, tempfile, url, encoding
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 import merge as merge_
15 import merge as merge_
16
16
17 # Commands start here, listed alphabetically
17 # Commands start here, listed alphabetically
18
18
19 def add(ui, repo, *pats, **opts):
19 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
20 """add the specified files on the next commit
21
21
22 Schedule files to be version controlled and added to the
22 Schedule files to be version controlled and added to the
23 repository.
23 repository.
24
24
25 The files will be added to the repository at the next commit. To
25 The files will be added to the repository at the next commit. To
26 undo an add before that, see hg revert.
26 undo an add before that, see hg revert.
27
27
28 If no names are given, add all files to the repository.
28 If no names are given, add all files to the repository.
29 """
29 """
30
30
31 rejected = None
31 rejected = None
32 exacts = {}
32 exacts = {}
33 names = []
33 names = []
34 m = cmdutil.match(repo, pats, opts)
34 m = cmdutil.match(repo, pats, opts)
35 m.bad = lambda x,y: True
35 m.bad = lambda x,y: True
36 for abs in repo.walk(m):
36 for abs in repo.walk(m):
37 if m.exact(abs):
37 if m.exact(abs):
38 if ui.verbose:
38 if ui.verbose:
39 ui.status(_('adding %s\n') % m.rel(abs))
39 ui.status(_('adding %s\n') % m.rel(abs))
40 names.append(abs)
40 names.append(abs)
41 exacts[abs] = 1
41 exacts[abs] = 1
42 elif abs not in repo.dirstate:
42 elif abs not in repo.dirstate:
43 ui.status(_('adding %s\n') % m.rel(abs))
43 ui.status(_('adding %s\n') % m.rel(abs))
44 names.append(abs)
44 names.append(abs)
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 rejected = repo.add(names)
46 rejected = repo.add(names)
47 rejected = [p for p in rejected if p in exacts]
47 rejected = [p for p in rejected if p in exacts]
48 return rejected and 1 or 0
48 return rejected and 1 or 0
49
49
50 def addremove(ui, repo, *pats, **opts):
50 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
51 """add all new files, delete all missing files
52
52
53 Add all new files and remove all missing files from the
53 Add all new files and remove all missing files from the
54 repository.
54 repository.
55
55
56 New files are ignored if they match any of the patterns in
56 New files are ignored if they match any of the patterns in
57 .hgignore. As with add, these changes take effect at the next
57 .hgignore. As with add, these changes take effect at the next
58 commit.
58 commit.
59
59
60 Use the -s/--similarity option to detect renamed files. With a
60 Use the -s/--similarity option to detect renamed files. With a
61 parameter > 0, this compares every removed file with every added
61 parameter > 0, this compares every removed file with every added
62 file and records those similar enough as renames. This option
62 file and records those similar enough as renames. This option
63 takes a percentage between 0 (disabled) and 100 (files must be
63 takes a percentage between 0 (disabled) and 100 (files must be
64 identical) as its parameter. Detecting renamed files this way can
64 identical) as its parameter. Detecting renamed files this way can
65 be expensive.
65 be expensive.
66 """
66 """
67 try:
67 try:
68 sim = float(opts.get('similarity') or 0)
68 sim = float(opts.get('similarity') or 0)
69 except ValueError:
69 except ValueError:
70 raise util.Abort(_('similarity must be a number'))
70 raise util.Abort(_('similarity must be a number'))
71 if sim < 0 or sim > 100:
71 if sim < 0 or sim > 100:
72 raise util.Abort(_('similarity must be between 0 and 100'))
72 raise util.Abort(_('similarity must be between 0 and 100'))
73 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
73 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74
74
75 def annotate(ui, repo, *pats, **opts):
75 def annotate(ui, repo, *pats, **opts):
76 """show changeset information per file line
76 """show changeset information per file line
77
77
78 List changes in files, showing the revision id responsible for
78 List changes in files, showing the revision id responsible for
79 each line
79 each line
80
80
81 This command is useful to discover who did a change or when a
81 This command is useful to discover who did a change or when a
82 change took place.
82 change took place.
83
83
84 Without the -a/--text option, annotate will avoid processing files
84 Without the -a/--text option, annotate will avoid processing files
85 it detects as binary. With -a, annotate will generate an
85 it detects as binary. With -a, annotate will generate an
86 annotation anyway, probably with undesirable results.
86 annotation anyway, probably with undesirable results.
87 """
87 """
88 datefunc = ui.quiet and util.shortdate or util.datestr
88 datefunc = ui.quiet and util.shortdate or util.datestr
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90
90
91 if not pats:
91 if not pats:
92 raise util.Abort(_('at least one file name or pattern required'))
92 raise util.Abort(_('at least one file name or pattern required'))
93
93
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 ('number', lambda x: str(x[0].rev())),
95 ('number', lambda x: str(x[0].rev())),
96 ('changeset', lambda x: short(x[0].node())),
96 ('changeset', lambda x: short(x[0].node())),
97 ('date', getdate),
97 ('date', getdate),
98 ('follow', lambda x: x[0].path()),
98 ('follow', lambda x: x[0].path()),
99 ]
99 ]
100
100
101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 and not opts.get('follow')):
102 and not opts.get('follow')):
103 opts['number'] = 1
103 opts['number'] = 1
104
104
105 linenumber = opts.get('line_number') is not None
105 linenumber = opts.get('line_number') is not None
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
108
108
109 funcmap = [func for op, func in opmap if opts.get(op)]
109 funcmap = [func for op, func in opmap if opts.get(op)]
110 if linenumber:
110 if linenumber:
111 lastfunc = funcmap[-1]
111 lastfunc = funcmap[-1]
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113
113
114 ctx = repo[opts.get('rev')]
114 ctx = repo[opts.get('rev')]
115
115
116 m = cmdutil.match(repo, pats, opts)
116 m = cmdutil.match(repo, pats, opts)
117 for abs in ctx.walk(m):
117 for abs in ctx.walk(m):
118 fctx = ctx[abs]
118 fctx = ctx[abs]
119 if not opts.get('text') and util.binary(fctx.data()):
119 if not opts.get('text') and util.binary(fctx.data()):
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 continue
121 continue
122
122
123 lines = fctx.annotate(follow=opts.get('follow'),
123 lines = fctx.annotate(follow=opts.get('follow'),
124 linenumber=linenumber)
124 linenumber=linenumber)
125 pieces = []
125 pieces = []
126
126
127 for f in funcmap:
127 for f in funcmap:
128 l = [f(n) for n, dummy in lines]
128 l = [f(n) for n, dummy in lines]
129 if l:
129 if l:
130 ml = max(map(len, l))
130 ml = max(map(len, l))
131 pieces.append(["%*s" % (ml, x) for x in l])
131 pieces.append(["%*s" % (ml, x) for x in l])
132
132
133 if pieces:
133 if pieces:
134 for p, l in zip(zip(*pieces), lines):
134 for p, l in zip(zip(*pieces), lines):
135 ui.write("%s: %s" % (" ".join(p), l[1]))
135 ui.write("%s: %s" % (" ".join(p), l[1]))
136
136
137 def archive(ui, repo, dest, **opts):
137 def archive(ui, repo, dest, **opts):
138 '''create unversioned archive of a repository revision
138 '''create unversioned archive of a repository revision
139
139
140 By default, the revision used is the parent of the working
140 By default, the revision used is the parent of the working
141 directory; use -r/--rev to specify a different revision.
141 directory; use -r/--rev to specify a different revision.
142
142
143 To specify the type of archive to create, use -t/--type. Valid
143 To specify the type of archive to create, use -t/--type. Valid
144 types are:
144 types are:
145
145
146 "files" (default): a directory full of files
146 "files" (default): a directory full of files
147 "tar": tar archive, uncompressed
147 "tar": tar archive, uncompressed
148 "tbz2": tar archive, compressed using bzip2
148 "tbz2": tar archive, compressed using bzip2
149 "tgz": tar archive, compressed using gzip
149 "tgz": tar archive, compressed using gzip
150 "uzip": zip archive, uncompressed
150 "uzip": zip archive, uncompressed
151 "zip": zip archive, compressed using deflate
151 "zip": zip archive, compressed using deflate
152
152
153 The exact name of the destination archive or directory is given
153 The exact name of the destination archive or directory is given
154 using a format string; see 'hg help export' for details.
154 using a format string; see 'hg help export' for details.
155
155
156 Each member added to an archive file has a directory prefix
156 Each member added to an archive file has a directory prefix
157 prepended. Use -p/--prefix to specify a format string for the
157 prepended. Use -p/--prefix to specify a format string for the
158 prefix. The default is the basename of the archive, with suffixes
158 prefix. The default is the basename of the archive, with suffixes
159 removed.
159 removed.
160 '''
160 '''
161
161
162 ctx = repo[opts.get('rev')]
162 ctx = repo[opts.get('rev')]
163 if not ctx:
163 if not ctx:
164 raise util.Abort(_('no working directory: please specify a revision'))
164 raise util.Abort(_('no working directory: please specify a revision'))
165 node = ctx.node()
165 node = ctx.node()
166 dest = cmdutil.make_filename(repo, dest, node)
166 dest = cmdutil.make_filename(repo, dest, node)
167 if os.path.realpath(dest) == repo.root:
167 if os.path.realpath(dest) == repo.root:
168 raise util.Abort(_('repository root cannot be destination'))
168 raise util.Abort(_('repository root cannot be destination'))
169 matchfn = cmdutil.match(repo, [], opts)
169 matchfn = cmdutil.match(repo, [], opts)
170 kind = opts.get('type') or 'files'
170 kind = opts.get('type') or 'files'
171 prefix = opts.get('prefix')
171 prefix = opts.get('prefix')
172 if dest == '-':
172 if dest == '-':
173 if kind == 'files':
173 if kind == 'files':
174 raise util.Abort(_('cannot archive plain files to stdout'))
174 raise util.Abort(_('cannot archive plain files to stdout'))
175 dest = sys.stdout
175 dest = sys.stdout
176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 prefix = cmdutil.make_filename(repo, prefix, node)
177 prefix = cmdutil.make_filename(repo, prefix, node)
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 matchfn, prefix)
179 matchfn, prefix)
180
180
181 def backout(ui, repo, node=None, rev=None, **opts):
181 def backout(ui, repo, node=None, rev=None, **opts):
182 '''reverse effect of earlier changeset
182 '''reverse effect of earlier changeset
183
183
184 Commit the backed out changes as a new changeset. The new
184 Commit the backed out changes as a new changeset. The new
185 changeset is a child of the backed out changeset.
185 changeset is a child of the backed out changeset.
186
186
187 If you back out a changeset other than the tip, a new head is
187 If you back out a changeset other than the tip, a new head is
188 created. This head will be the new tip and you should merge this
188 created. This head will be the new tip and you should merge this
189 backout changeset with another head (current one by default).
189 backout changeset with another head (current one by default).
190
190
191 The --merge option remembers the parent of the working directory
191 The --merge option remembers the parent of the working directory
192 before starting the backout, then merges the new head with that
192 before starting the backout, then merges the new head with that
193 changeset afterwards. This saves you from doing the merge by hand.
193 changeset afterwards. This saves you from doing the merge by hand.
194 The result of this merge is not committed, as with a normal merge.
194 The result of this merge is not committed, as with a normal merge.
195
195
196 See \'hg help dates\' for a list of formats valid for -d/--date.
196 See \'hg help dates\' for a list of formats valid for -d/--date.
197 '''
197 '''
198 if rev and node:
198 if rev and node:
199 raise util.Abort(_("please specify just one revision"))
199 raise util.Abort(_("please specify just one revision"))
200
200
201 if not rev:
201 if not rev:
202 rev = node
202 rev = node
203
203
204 if not rev:
204 if not rev:
205 raise util.Abort(_("please specify a revision to backout"))
205 raise util.Abort(_("please specify a revision to backout"))
206
206
207 date = opts.get('date')
207 date = opts.get('date')
208 if date:
208 if date:
209 opts['date'] = util.parsedate(date)
209 opts['date'] = util.parsedate(date)
210
210
211 cmdutil.bail_if_changed(repo)
211 cmdutil.bail_if_changed(repo)
212 node = repo.lookup(rev)
212 node = repo.lookup(rev)
213
213
214 op1, op2 = repo.dirstate.parents()
214 op1, op2 = repo.dirstate.parents()
215 a = repo.changelog.ancestor(op1, node)
215 a = repo.changelog.ancestor(op1, node)
216 if a != node:
216 if a != node:
217 raise util.Abort(_('cannot back out change on a different branch'))
217 raise util.Abort(_('cannot back out change on a different branch'))
218
218
219 p1, p2 = repo.changelog.parents(node)
219 p1, p2 = repo.changelog.parents(node)
220 if p1 == nullid:
220 if p1 == nullid:
221 raise util.Abort(_('cannot back out a change with no parents'))
221 raise util.Abort(_('cannot back out a change with no parents'))
222 if p2 != nullid:
222 if p2 != nullid:
223 if not opts.get('parent'):
223 if not opts.get('parent'):
224 raise util.Abort(_('cannot back out a merge changeset without '
224 raise util.Abort(_('cannot back out a merge changeset without '
225 '--parent'))
225 '--parent'))
226 p = repo.lookup(opts['parent'])
226 p = repo.lookup(opts['parent'])
227 if p not in (p1, p2):
227 if p not in (p1, p2):
228 raise util.Abort(_('%s is not a parent of %s') %
228 raise util.Abort(_('%s is not a parent of %s') %
229 (short(p), short(node)))
229 (short(p), short(node)))
230 parent = p
230 parent = p
231 else:
231 else:
232 if opts.get('parent'):
232 if opts.get('parent'):
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 parent = p1
234 parent = p1
235
235
236 # the backout should appear on the same branch
236 # the backout should appear on the same branch
237 branch = repo.dirstate.branch()
237 branch = repo.dirstate.branch()
238 hg.clean(repo, node, show_stats=False)
238 hg.clean(repo, node, show_stats=False)
239 repo.dirstate.setbranch(branch)
239 repo.dirstate.setbranch(branch)
240 revert_opts = opts.copy()
240 revert_opts = opts.copy()
241 revert_opts['date'] = None
241 revert_opts['date'] = None
242 revert_opts['all'] = True
242 revert_opts['all'] = True
243 revert_opts['rev'] = hex(parent)
243 revert_opts['rev'] = hex(parent)
244 revert_opts['no_backup'] = None
244 revert_opts['no_backup'] = None
245 revert(ui, repo, **revert_opts)
245 revert(ui, repo, **revert_opts)
246 commit_opts = opts.copy()
246 commit_opts = opts.copy()
247 commit_opts['addremove'] = False
247 commit_opts['addremove'] = False
248 if not commit_opts['message'] and not commit_opts['logfile']:
248 if not commit_opts['message'] and not commit_opts['logfile']:
249 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
249 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 commit_opts['force_editor'] = True
250 commit_opts['force_editor'] = True
251 commit(ui, repo, **commit_opts)
251 commit(ui, repo, **commit_opts)
252 def nice(node):
252 def nice(node):
253 return '%d:%s' % (repo.changelog.rev(node), short(node))
253 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 ui.status(_('changeset %s backs out changeset %s\n') %
254 ui.status(_('changeset %s backs out changeset %s\n') %
255 (nice(repo.changelog.tip()), nice(node)))
255 (nice(repo.changelog.tip()), nice(node)))
256 if op1 != node:
256 if op1 != node:
257 hg.clean(repo, op1, show_stats=False)
257 hg.clean(repo, op1, show_stats=False)
258 if opts.get('merge'):
258 if opts.get('merge'):
259 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
259 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 hg.merge(repo, hex(repo.changelog.tip()))
260 hg.merge(repo, hex(repo.changelog.tip()))
261 else:
261 else:
262 ui.status(_('the backout changeset is a new head - '
262 ui.status(_('the backout changeset is a new head - '
263 'do not forget to merge\n'))
263 'do not forget to merge\n'))
264 ui.status(_('(use "backout --merge" '
264 ui.status(_('(use "backout --merge" '
265 'if you want to auto-merge)\n'))
265 'if you want to auto-merge)\n'))
266
266
267 def bisect(ui, repo, rev=None, extra=None, command=None,
267 def bisect(ui, repo, rev=None, extra=None, command=None,
268 reset=None, good=None, bad=None, skip=None, noupdate=None):
268 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 """subdivision search of changesets
269 """subdivision search of changesets
270
270
271 This command helps to find changesets which introduce problems. To
271 This command helps to find changesets which introduce problems. To
272 use, mark the earliest changeset you know exhibits the problem as
272 use, mark the earliest changeset you know exhibits the problem as
273 bad, then mark the latest changeset which is free from the problem
273 bad, then mark the latest changeset which is free from the problem
274 as good. Bisect will update your working directory to a revision
274 as good. Bisect will update your working directory to a revision
275 for testing (unless the -U/--noupdate option is specified). Once
275 for testing (unless the -U/--noupdate option is specified). Once
276 you have performed tests, mark the working directory as bad or
276 you have performed tests, mark the working directory as bad or
277 good and bisect will either update to another candidate changeset
277 good and bisect will either update to another candidate changeset
278 or announce that it has found the bad revision.
278 or announce that it has found the bad revision.
279
279
280 As a shortcut, you can also use the revision argument to mark a
280 As a shortcut, you can also use the revision argument to mark a
281 revision as good or bad without checking it out first.
281 revision as good or bad without checking it out first.
282
282
283 If you supply a command it will be used for automatic bisection.
283 If you supply a command it will be used for automatic bisection.
284 Its exit status will be used as flag to mark revision as bad or
284 Its exit status will be used as flag to mark revision as bad or
285 good. In case exit status is 0 the revision is marked as good, 125
285 good. In case exit status is 0 the revision is marked as good, 125
286 - skipped, 127 (command not found) - bisection will be aborted;
286 - skipped, 127 (command not found) - bisection will be aborted;
287 any other status bigger than 0 will mark revision as bad.
287 any other status bigger than 0 will mark revision as bad.
288 """
288 """
289 def print_result(nodes, good):
289 def print_result(nodes, good):
290 displayer = cmdutil.show_changeset(ui, repo, {})
290 displayer = cmdutil.show_changeset(ui, repo, {})
291 if len(nodes) == 1:
291 if len(nodes) == 1:
292 # narrowed it down to a single revision
292 # narrowed it down to a single revision
293 if good:
293 if good:
294 ui.write(_("The first good revision is:\n"))
294 ui.write(_("The first good revision is:\n"))
295 else:
295 else:
296 ui.write(_("The first bad revision is:\n"))
296 ui.write(_("The first bad revision is:\n"))
297 displayer.show(repo[nodes[0]])
297 displayer.show(repo[nodes[0]])
298 else:
298 else:
299 # multiple possible revisions
299 # multiple possible revisions
300 if good:
300 if good:
301 ui.write(_("Due to skipped revisions, the first "
301 ui.write(_("Due to skipped revisions, the first "
302 "good revision could be any of:\n"))
302 "good revision could be any of:\n"))
303 else:
303 else:
304 ui.write(_("Due to skipped revisions, the first "
304 ui.write(_("Due to skipped revisions, the first "
305 "bad revision could be any of:\n"))
305 "bad revision could be any of:\n"))
306 for n in nodes:
306 for n in nodes:
307 displayer.show(repo[n])
307 displayer.show(repo[n])
308
308
309 def check_state(state, interactive=True):
309 def check_state(state, interactive=True):
310 if not state['good'] or not state['bad']:
310 if not state['good'] or not state['bad']:
311 if (good or bad or skip or reset) and interactive:
311 if (good or bad or skip or reset) and interactive:
312 return
312 return
313 if not state['good']:
313 if not state['good']:
314 raise util.Abort(_('cannot bisect (no known good revisions)'))
314 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 else:
315 else:
316 raise util.Abort(_('cannot bisect (no known bad revisions)'))
316 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 return True
317 return True
318
318
319 # backward compatibility
319 # backward compatibility
320 if rev in "good bad reset init".split():
320 if rev in "good bad reset init".split():
321 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
321 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 cmd, rev, extra = rev, extra, None
322 cmd, rev, extra = rev, extra, None
323 if cmd == "good":
323 if cmd == "good":
324 good = True
324 good = True
325 elif cmd == "bad":
325 elif cmd == "bad":
326 bad = True
326 bad = True
327 else:
327 else:
328 reset = True
328 reset = True
329 elif extra or good + bad + skip + reset + bool(command) > 1:
329 elif extra or good + bad + skip + reset + bool(command) > 1:
330 raise util.Abort(_('incompatible arguments'))
330 raise util.Abort(_('incompatible arguments'))
331
331
332 if reset:
332 if reset:
333 p = repo.join("bisect.state")
333 p = repo.join("bisect.state")
334 if os.path.exists(p):
334 if os.path.exists(p):
335 os.unlink(p)
335 os.unlink(p)
336 return
336 return
337
337
338 state = hbisect.load_state(repo)
338 state = hbisect.load_state(repo)
339
339
340 if command:
340 if command:
341 commandpath = util.find_exe(command)
341 commandpath = util.find_exe(command)
342 changesets = 1
342 changesets = 1
343 try:
343 try:
344 while changesets:
344 while changesets:
345 # update state
345 # update state
346 status = os.spawnl(os.P_WAIT, commandpath, commandpath)
346 status = os.spawnl(os.P_WAIT, commandpath, commandpath)
347 if status == 125:
347 if status == 125:
348 transition = "skip"
348 transition = "skip"
349 elif status == 0:
349 elif status == 0:
350 transition = "good"
350 transition = "good"
351 # status < 0 means process was killed
351 # status < 0 means process was killed
352 elif status == 127:
352 elif status == 127:
353 raise util.Abort(_("failed to execute %s") % command)
353 raise util.Abort(_("failed to execute %s") % command)
354 elif status < 0:
354 elif status < 0:
355 raise util.Abort(_("%s killed") % command)
355 raise util.Abort(_("%s killed") % command)
356 else:
356 else:
357 transition = "bad"
357 transition = "bad"
358 node = repo.lookup(rev or '.')
358 node = repo.lookup(rev or '.')
359 state[transition].append(node)
359 state[transition].append(node)
360 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
360 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
361 check_state(state, interactive=False)
361 check_state(state, interactive=False)
362 # bisect
362 # bisect
363 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
363 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
364 # update to next check
364 # update to next check
365 cmdutil.bail_if_changed(repo)
365 cmdutil.bail_if_changed(repo)
366 hg.clean(repo, nodes[0], show_stats=False)
366 hg.clean(repo, nodes[0], show_stats=False)
367 finally:
367 finally:
368 hbisect.save_state(repo, state)
368 hbisect.save_state(repo, state)
369 return print_result(nodes, not status)
369 return print_result(nodes, not status)
370
370
371 # update state
371 # update state
372 node = repo.lookup(rev or '.')
372 node = repo.lookup(rev or '.')
373 if good:
373 if good:
374 state['good'].append(node)
374 state['good'].append(node)
375 elif bad:
375 elif bad:
376 state['bad'].append(node)
376 state['bad'].append(node)
377 elif skip:
377 elif skip:
378 state['skip'].append(node)
378 state['skip'].append(node)
379
379
380 hbisect.save_state(repo, state)
380 hbisect.save_state(repo, state)
381
381
382 if not check_state(state):
382 if not check_state(state):
383 return
383 return
384
384
385 # actually bisect
385 # actually bisect
386 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
386 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
387 if changesets == 0:
387 if changesets == 0:
388 print_result(nodes, good)
388 print_result(nodes, good)
389 else:
389 else:
390 assert len(nodes) == 1 # only a single node can be tested next
390 assert len(nodes) == 1 # only a single node can be tested next
391 node = nodes[0]
391 node = nodes[0]
392 # compute the approximate number of remaining tests
392 # compute the approximate number of remaining tests
393 tests, size = 0, 2
393 tests, size = 0, 2
394 while size <= changesets:
394 while size <= changesets:
395 tests, size = tests + 1, size * 2
395 tests, size = tests + 1, size * 2
396 rev = repo.changelog.rev(node)
396 rev = repo.changelog.rev(node)
397 ui.write(_("Testing changeset %s:%s "
397 ui.write(_("Testing changeset %s:%s "
398 "(%s changesets remaining, ~%s tests)\n")
398 "(%s changesets remaining, ~%s tests)\n")
399 % (rev, short(node), changesets, tests))
399 % (rev, short(node), changesets, tests))
400 if not noupdate:
400 if not noupdate:
401 cmdutil.bail_if_changed(repo)
401 cmdutil.bail_if_changed(repo)
402 return hg.clean(repo, node)
402 return hg.clean(repo, node)
403
403
404 def branch(ui, repo, label=None, **opts):
404 def branch(ui, repo, label=None, **opts):
405 """set or show the current branch name
405 """set or show the current branch name
406
406
407 With no argument, show the current branch name. With one argument,
407 With no argument, show the current branch name. With one argument,
408 set the working directory branch name (the branch does not exist
408 set the working directory branch name (the branch does not exist
409 in the repository until the next commit). It is recommended to use
409 in the repository until the next commit). It is recommended to use
410 the 'default' branch as your primary development branch.
410 the 'default' branch as your primary development branch.
411
411
412 Unless -f/--force is specified, branch will not let you set a
412 Unless -f/--force is specified, branch will not let you set a
413 branch name that shadows an existing branch.
413 branch name that shadows an existing branch.
414
414
415 Use -C/--clean to reset the working directory branch to that of
415 Use -C/--clean to reset the working directory branch to that of
416 the parent of the working directory, negating a previous branch
416 the parent of the working directory, negating a previous branch
417 change.
417 change.
418
418
419 Use the command 'hg update' to switch to an existing branch.
419 Use the command 'hg update' to switch to an existing branch.
420 """
420 """
421
421
422 if opts.get('clean'):
422 if opts.get('clean'):
423 label = repo[None].parents()[0].branch()
423 label = repo[None].parents()[0].branch()
424 repo.dirstate.setbranch(label)
424 repo.dirstate.setbranch(label)
425 ui.status(_('reset working directory to branch %s\n') % label)
425 ui.status(_('reset working directory to branch %s\n') % label)
426 elif label:
426 elif label:
427 if not opts.get('force') and label in repo.branchtags():
427 if not opts.get('force') and label in repo.branchtags():
428 if label not in [p.branch() for p in repo.parents()]:
428 if label not in [p.branch() for p in repo.parents()]:
429 raise util.Abort(_('a branch of the same name already exists'
429 raise util.Abort(_('a branch of the same name already exists'
430 ' (use --force to override)'))
430 ' (use --force to override)'))
431 repo.dirstate.setbranch(encoding.fromlocal(label))
431 repo.dirstate.setbranch(encoding.fromlocal(label))
432 ui.status(_('marked working directory as branch %s\n') % label)
432 ui.status(_('marked working directory as branch %s\n') % label)
433 else:
433 else:
434 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
434 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
435
435
436 def branches(ui, repo, active=False):
436 def branches(ui, repo, active=False):
437 """list repository named branches
437 """list repository named branches
438
438
439 List the repository's named branches, indicating which ones are
439 List the repository's named branches, indicating which ones are
440 inactive. If active is specified, only show active branches.
440 inactive. If active is specified, only show active branches.
441
441
442 A branch is considered active if it contains repository heads.
442 A branch is considered active if it contains repository heads.
443
443
444 Use the command 'hg update' to switch to an existing branch.
444 Use the command 'hg update' to switch to an existing branch.
445 """
445 """
446 hexfunc = ui.debugflag and hex or short
446 hexfunc = ui.debugflag and hex or short
447 activebranches = [encoding.tolocal(repo[n].branch())
447 activebranches = [encoding.tolocal(repo[n].branch())
448 for n in repo.heads(closed=False)]
448 for n in repo.heads(closed=False)]
449 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
449 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
450 for tag, node in repo.branchtags().items()])
450 for tag, node in repo.branchtags().items()],
451 branches.reverse()
451 reverse=True)
452
452
453 for isactive, node, tag in branches:
453 for isactive, node, tag in branches:
454 if (not active) or isactive:
454 if (not active) or isactive:
455 if ui.quiet:
455 if ui.quiet:
456 ui.write("%s\n" % tag)
456 ui.write("%s\n" % tag)
457 else:
457 else:
458 hn = repo.lookup(node)
458 hn = repo.lookup(node)
459 if isactive:
459 if isactive:
460 notice = ''
460 notice = ''
461 elif hn not in repo.branchheads(tag, closed=False):
461 elif hn not in repo.branchheads(tag, closed=False):
462 notice = ' (closed)'
462 notice = ' (closed)'
463 else:
463 else:
464 notice = ' (inactive)'
464 notice = ' (inactive)'
465 rev = str(node).rjust(31 - encoding.colwidth(tag))
465 rev = str(node).rjust(31 - encoding.colwidth(tag))
466 data = tag, rev, hexfunc(hn), notice
466 data = tag, rev, hexfunc(hn), notice
467 ui.write("%s %s:%s%s\n" % data)
467 ui.write("%s %s:%s%s\n" % data)
468
468
469 def bundle(ui, repo, fname, dest=None, **opts):
469 def bundle(ui, repo, fname, dest=None, **opts):
470 """create a changegroup file
470 """create a changegroup file
471
471
472 Generate a compressed changegroup file collecting changesets not
472 Generate a compressed changegroup file collecting changesets not
473 known to be in another repository.
473 known to be in another repository.
474
474
475 If no destination repository is specified the destination is
475 If no destination repository is specified the destination is
476 assumed to have all the nodes specified by one or more --base
476 assumed to have all the nodes specified by one or more --base
477 parameters. To create a bundle containing all changesets, use
477 parameters. To create a bundle containing all changesets, use
478 -a/--all (or --base null). To change the compression method
478 -a/--all (or --base null). To change the compression method
479 applied, use the -t/--type option (by default, bundles are
479 applied, use the -t/--type option (by default, bundles are
480 compressed using bz2).
480 compressed using bz2).
481
481
482 The bundle file can then be transferred using conventional means
482 The bundle file can then be transferred using conventional means
483 and applied to another repository with the unbundle or pull
483 and applied to another repository with the unbundle or pull
484 command. This is useful when direct push and pull are not
484 command. This is useful when direct push and pull are not
485 available or when exporting an entire repository is undesirable.
485 available or when exporting an entire repository is undesirable.
486
486
487 Applying bundles preserves all changeset contents including
487 Applying bundles preserves all changeset contents including
488 permissions, copy/rename information, and revision history.
488 permissions, copy/rename information, and revision history.
489 """
489 """
490 revs = opts.get('rev') or None
490 revs = opts.get('rev') or None
491 if revs:
491 if revs:
492 revs = [repo.lookup(rev) for rev in revs]
492 revs = [repo.lookup(rev) for rev in revs]
493 if opts.get('all'):
493 if opts.get('all'):
494 base = ['null']
494 base = ['null']
495 else:
495 else:
496 base = opts.get('base')
496 base = opts.get('base')
497 if base:
497 if base:
498 if dest:
498 if dest:
499 raise util.Abort(_("--base is incompatible with specifiying "
499 raise util.Abort(_("--base is incompatible with specifiying "
500 "a destination"))
500 "a destination"))
501 base = [repo.lookup(rev) for rev in base]
501 base = [repo.lookup(rev) for rev in base]
502 # create the right base
502 # create the right base
503 # XXX: nodesbetween / changegroup* should be "fixed" instead
503 # XXX: nodesbetween / changegroup* should be "fixed" instead
504 o = []
504 o = []
505 has = {nullid: None}
505 has = {nullid: None}
506 for n in base:
506 for n in base:
507 has.update(repo.changelog.reachable(n))
507 has.update(repo.changelog.reachable(n))
508 if revs:
508 if revs:
509 visit = list(revs)
509 visit = list(revs)
510 else:
510 else:
511 visit = repo.changelog.heads()
511 visit = repo.changelog.heads()
512 seen = {}
512 seen = {}
513 while visit:
513 while visit:
514 n = visit.pop(0)
514 n = visit.pop(0)
515 parents = [p for p in repo.changelog.parents(n) if p not in has]
515 parents = [p for p in repo.changelog.parents(n) if p not in has]
516 if len(parents) == 0:
516 if len(parents) == 0:
517 o.insert(0, n)
517 o.insert(0, n)
518 else:
518 else:
519 for p in parents:
519 for p in parents:
520 if p not in seen:
520 if p not in seen:
521 seen[p] = 1
521 seen[p] = 1
522 visit.append(p)
522 visit.append(p)
523 else:
523 else:
524 dest, revs, checkout = hg.parseurl(
524 dest, revs, checkout = hg.parseurl(
525 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
525 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
526 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
526 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
527 o = repo.findoutgoing(other, force=opts.get('force'))
527 o = repo.findoutgoing(other, force=opts.get('force'))
528
528
529 if revs:
529 if revs:
530 cg = repo.changegroupsubset(o, revs, 'bundle')
530 cg = repo.changegroupsubset(o, revs, 'bundle')
531 else:
531 else:
532 cg = repo.changegroup(o, 'bundle')
532 cg = repo.changegroup(o, 'bundle')
533
533
534 bundletype = opts.get('type', 'bzip2').lower()
534 bundletype = opts.get('type', 'bzip2').lower()
535 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
535 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
536 bundletype = btypes.get(bundletype)
536 bundletype = btypes.get(bundletype)
537 if bundletype not in changegroup.bundletypes:
537 if bundletype not in changegroup.bundletypes:
538 raise util.Abort(_('unknown bundle type specified with --type'))
538 raise util.Abort(_('unknown bundle type specified with --type'))
539
539
540 changegroup.writebundle(cg, fname, bundletype)
540 changegroup.writebundle(cg, fname, bundletype)
541
541
542 def cat(ui, repo, file1, *pats, **opts):
542 def cat(ui, repo, file1, *pats, **opts):
543 """output the current or given revision of files
543 """output the current or given revision of files
544
544
545 Print the specified files as they were at the given revision. If
545 Print the specified files as they were at the given revision. If
546 no revision is given, the parent of the working directory is used,
546 no revision is given, the parent of the working directory is used,
547 or tip if no revision is checked out.
547 or tip if no revision is checked out.
548
548
549 Output may be to a file, in which case the name of the file is
549 Output may be to a file, in which case the name of the file is
550 given using a format string. The formatting rules are the same as
550 given using a format string. The formatting rules are the same as
551 for the export command, with the following additions:
551 for the export command, with the following additions:
552
552
553 %s basename of file being printed
553 %s basename of file being printed
554 %d dirname of file being printed, or '.' if in repository root
554 %d dirname of file being printed, or '.' if in repository root
555 %p root-relative path name of file being printed
555 %p root-relative path name of file being printed
556 """
556 """
557 ctx = repo[opts.get('rev')]
557 ctx = repo[opts.get('rev')]
558 err = 1
558 err = 1
559 m = cmdutil.match(repo, (file1,) + pats, opts)
559 m = cmdutil.match(repo, (file1,) + pats, opts)
560 for abs in ctx.walk(m):
560 for abs in ctx.walk(m):
561 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
561 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
562 data = ctx[abs].data()
562 data = ctx[abs].data()
563 if opts.get('decode'):
563 if opts.get('decode'):
564 data = repo.wwritedata(abs, data)
564 data = repo.wwritedata(abs, data)
565 fp.write(data)
565 fp.write(data)
566 err = 0
566 err = 0
567 return err
567 return err
568
568
569 def clone(ui, source, dest=None, **opts):
569 def clone(ui, source, dest=None, **opts):
570 """make a copy of an existing repository
570 """make a copy of an existing repository
571
571
572 Create a copy of an existing repository in a new directory.
572 Create a copy of an existing repository in a new directory.
573
573
574 If no destination directory name is specified, it defaults to the
574 If no destination directory name is specified, it defaults to the
575 basename of the source.
575 basename of the source.
576
576
577 The location of the source is added to the new repository's
577 The location of the source is added to the new repository's
578 .hg/hgrc file, as the default to be used for future pulls.
578 .hg/hgrc file, as the default to be used for future pulls.
579
579
580 If you use the -r/--rev option to clone up to a specific revision,
580 If you use the -r/--rev option to clone up to a specific revision,
581 no subsequent revisions (including subsequent tags) will be
581 no subsequent revisions (including subsequent tags) will be
582 present in the cloned repository. This option implies --pull, even
582 present in the cloned repository. This option implies --pull, even
583 on local repositories.
583 on local repositories.
584
584
585 By default, clone will check out the head of the 'default' branch.
585 By default, clone will check out the head of the 'default' branch.
586 If the -U/--noupdate option is used, the new clone will contain
586 If the -U/--noupdate option is used, the new clone will contain
587 only a repository (.hg) and no working copy (the working copy
587 only a repository (.hg) and no working copy (the working copy
588 parent is the null revision).
588 parent is the null revision).
589
589
590 See 'hg help urls' for valid source format details.
590 See 'hg help urls' for valid source format details.
591
591
592 It is possible to specify an ssh:// URL as the destination, but no
592 It is possible to specify an ssh:// URL as the destination, but no
593 .hg/hgrc and working directory will be created on the remote side.
593 .hg/hgrc and working directory will be created on the remote side.
594 Look at the help text for URLs for important details about ssh://
594 Look at the help text for URLs for important details about ssh://
595 URLs.
595 URLs.
596
596
597 For efficiency, hardlinks are used for cloning whenever the source
597 For efficiency, hardlinks are used for cloning whenever the source
598 and destination are on the same filesystem (note this applies only
598 and destination are on the same filesystem (note this applies only
599 to the repository data, not to the checked out files). Some
599 to the repository data, not to the checked out files). Some
600 filesystems, such as AFS, implement hardlinking incorrectly, but
600 filesystems, such as AFS, implement hardlinking incorrectly, but
601 do not report errors. In these cases, use the --pull option to
601 do not report errors. In these cases, use the --pull option to
602 avoid hardlinking.
602 avoid hardlinking.
603
603
604 In some cases, you can clone repositories and checked out files
604 In some cases, you can clone repositories and checked out files
605 using full hardlinks with
605 using full hardlinks with
606
606
607 $ cp -al REPO REPOCLONE
607 $ cp -al REPO REPOCLONE
608
608
609 This is the fastest way to clone, but it is not always safe. The
609 This is the fastest way to clone, but it is not always safe. The
610 operation is not atomic (making sure REPO is not modified during
610 operation is not atomic (making sure REPO is not modified during
611 the operation is up to you) and you have to make sure your editor
611 the operation is up to you) and you have to make sure your editor
612 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
612 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
613 this is not compatible with certain extensions that place their
613 this is not compatible with certain extensions that place their
614 metadata under the .hg directory, such as mq.
614 metadata under the .hg directory, such as mq.
615
615
616 """
616 """
617 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
617 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
618 pull=opts.get('pull'),
618 pull=opts.get('pull'),
619 stream=opts.get('uncompressed'),
619 stream=opts.get('uncompressed'),
620 rev=opts.get('rev'),
620 rev=opts.get('rev'),
621 update=not opts.get('noupdate'))
621 update=not opts.get('noupdate'))
622
622
623 def commit(ui, repo, *pats, **opts):
623 def commit(ui, repo, *pats, **opts):
624 """commit the specified files or all outstanding changes
624 """commit the specified files or all outstanding changes
625
625
626 Commit changes to the given files into the repository. Unlike a
626 Commit changes to the given files into the repository. Unlike a
627 centralized RCS, this operation is a local operation. See hg push
627 centralized RCS, this operation is a local operation. See hg push
628 for means to actively distribute your changes.
628 for means to actively distribute your changes.
629
629
630 If a list of files is omitted, all changes reported by "hg status"
630 If a list of files is omitted, all changes reported by "hg status"
631 will be committed.
631 will be committed.
632
632
633 If you are committing the result of a merge, do not provide any
633 If you are committing the result of a merge, do not provide any
634 file names or -I/-X filters.
634 file names or -I/-X filters.
635
635
636 If no commit message is specified, the configured editor is
636 If no commit message is specified, the configured editor is
637 started to prompt you for a message.
637 started to prompt you for a message.
638
638
639 See 'hg help dates' for a list of formats valid for -d/--date.
639 See 'hg help dates' for a list of formats valid for -d/--date.
640 """
640 """
641 extra = {}
641 extra = {}
642 if opts.get('close_branch'):
642 if opts.get('close_branch'):
643 extra['close'] = 1
643 extra['close'] = 1
644 def commitfunc(ui, repo, message, match, opts):
644 def commitfunc(ui, repo, message, match, opts):
645 return repo.commit(match.files(), message, opts.get('user'),
645 return repo.commit(match.files(), message, opts.get('user'),
646 opts.get('date'), match, force_editor=opts.get('force_editor'),
646 opts.get('date'), match, force_editor=opts.get('force_editor'),
647 extra=extra)
647 extra=extra)
648
648
649 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
649 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
650 if not node:
650 if not node:
651 return
651 return
652 cl = repo.changelog
652 cl = repo.changelog
653 rev = cl.rev(node)
653 rev = cl.rev(node)
654 parents = cl.parentrevs(rev)
654 parents = cl.parentrevs(rev)
655 if rev - 1 in parents:
655 if rev - 1 in parents:
656 # one of the parents was the old tip
656 # one of the parents was the old tip
657 pass
657 pass
658 elif (parents == (nullrev, nullrev) or
658 elif (parents == (nullrev, nullrev) or
659 len(cl.heads(cl.node(parents[0]))) > 1 and
659 len(cl.heads(cl.node(parents[0]))) > 1 and
660 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
660 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
661 ui.status(_('created new head\n'))
661 ui.status(_('created new head\n'))
662
662
663 if ui.debugflag:
663 if ui.debugflag:
664 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
664 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
665 elif ui.verbose:
665 elif ui.verbose:
666 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
666 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
667
667
668 def copy(ui, repo, *pats, **opts):
668 def copy(ui, repo, *pats, **opts):
669 """mark files as copied for the next commit
669 """mark files as copied for the next commit
670
670
671 Mark dest as having copies of source files. If dest is a
671 Mark dest as having copies of source files. If dest is a
672 directory, copies are put in that directory. If dest is a file,
672 directory, copies are put in that directory. If dest is a file,
673 the source must be a single file.
673 the source must be a single file.
674
674
675 By default, this command copies the contents of files as they
675 By default, this command copies the contents of files as they
676 stand in the working directory. If invoked with -A/--after, the
676 stand in the working directory. If invoked with -A/--after, the
677 operation is recorded, but no copying is performed.
677 operation is recorded, but no copying is performed.
678
678
679 This command takes effect with the next commit. To undo a copy
679 This command takes effect with the next commit. To undo a copy
680 before that, see hg revert.
680 before that, see hg revert.
681 """
681 """
682 wlock = repo.wlock(False)
682 wlock = repo.wlock(False)
683 try:
683 try:
684 return cmdutil.copy(ui, repo, pats, opts)
684 return cmdutil.copy(ui, repo, pats, opts)
685 finally:
685 finally:
686 wlock.release()
686 wlock.release()
687
687
688 def debugancestor(ui, repo, *args):
688 def debugancestor(ui, repo, *args):
689 """find the ancestor revision of two revisions in a given index"""
689 """find the ancestor revision of two revisions in a given index"""
690 if len(args) == 3:
690 if len(args) == 3:
691 index, rev1, rev2 = args
691 index, rev1, rev2 = args
692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
693 lookup = r.lookup
693 lookup = r.lookup
694 elif len(args) == 2:
694 elif len(args) == 2:
695 if not repo:
695 if not repo:
696 raise util.Abort(_("There is no Mercurial repository here "
696 raise util.Abort(_("There is no Mercurial repository here "
697 "(.hg not found)"))
697 "(.hg not found)"))
698 rev1, rev2 = args
698 rev1, rev2 = args
699 r = repo.changelog
699 r = repo.changelog
700 lookup = repo.lookup
700 lookup = repo.lookup
701 else:
701 else:
702 raise util.Abort(_('either two or three arguments required'))
702 raise util.Abort(_('either two or three arguments required'))
703 a = r.ancestor(lookup(rev1), lookup(rev2))
703 a = r.ancestor(lookup(rev1), lookup(rev2))
704 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
704 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
705
705
706 def debugcommands(ui, cmd='', *args):
706 def debugcommands(ui, cmd='', *args):
707 for cmd, vals in sorted(table.iteritems()):
707 for cmd, vals in sorted(table.iteritems()):
708 cmd = cmd.split('|')[0].strip('^')
708 cmd = cmd.split('|')[0].strip('^')
709 opts = ', '.join([i[1] for i in vals[1]])
709 opts = ', '.join([i[1] for i in vals[1]])
710 ui.write('%s: %s\n' % (cmd, opts))
710 ui.write('%s: %s\n' % (cmd, opts))
711
711
712 def debugcomplete(ui, cmd='', **opts):
712 def debugcomplete(ui, cmd='', **opts):
713 """returns the completion list associated with the given command"""
713 """returns the completion list associated with the given command"""
714
714
715 if opts.get('options'):
715 if opts.get('options'):
716 options = []
716 options = []
717 otables = [globalopts]
717 otables = [globalopts]
718 if cmd:
718 if cmd:
719 aliases, entry = cmdutil.findcmd(cmd, table, False)
719 aliases, entry = cmdutil.findcmd(cmd, table, False)
720 otables.append(entry[1])
720 otables.append(entry[1])
721 for t in otables:
721 for t in otables:
722 for o in t:
722 for o in t:
723 if o[0]:
723 if o[0]:
724 options.append('-%s' % o[0])
724 options.append('-%s' % o[0])
725 options.append('--%s' % o[1])
725 options.append('--%s' % o[1])
726 ui.write("%s\n" % "\n".join(options))
726 ui.write("%s\n" % "\n".join(options))
727 return
727 return
728
728
729 cmdlist = cmdutil.findpossible(cmd, table)
729 cmdlist = cmdutil.findpossible(cmd, table)
730 if ui.verbose:
730 if ui.verbose:
731 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
731 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
732 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
732 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
733
733
734 def debugfsinfo(ui, path = "."):
734 def debugfsinfo(ui, path = "."):
735 file('.debugfsinfo', 'w').write('')
735 file('.debugfsinfo', 'w').write('')
736 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
736 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
737 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
737 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
738 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
738 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
739 and 'yes' or 'no'))
739 and 'yes' or 'no'))
740 os.unlink('.debugfsinfo')
740 os.unlink('.debugfsinfo')
741
741
742 def debugrebuildstate(ui, repo, rev="tip"):
742 def debugrebuildstate(ui, repo, rev="tip"):
743 """rebuild the dirstate as it would look like for the given revision"""
743 """rebuild the dirstate as it would look like for the given revision"""
744 ctx = repo[rev]
744 ctx = repo[rev]
745 wlock = repo.wlock()
745 wlock = repo.wlock()
746 try:
746 try:
747 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
747 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
748 finally:
748 finally:
749 wlock.release()
749 wlock.release()
750
750
751 def debugcheckstate(ui, repo):
751 def debugcheckstate(ui, repo):
752 """validate the correctness of the current dirstate"""
752 """validate the correctness of the current dirstate"""
753 parent1, parent2 = repo.dirstate.parents()
753 parent1, parent2 = repo.dirstate.parents()
754 m1 = repo[parent1].manifest()
754 m1 = repo[parent1].manifest()
755 m2 = repo[parent2].manifest()
755 m2 = repo[parent2].manifest()
756 errors = 0
756 errors = 0
757 for f in repo.dirstate:
757 for f in repo.dirstate:
758 state = repo.dirstate[f]
758 state = repo.dirstate[f]
759 if state in "nr" and f not in m1:
759 if state in "nr" and f not in m1:
760 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
760 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
761 errors += 1
761 errors += 1
762 if state in "a" and f in m1:
762 if state in "a" and f in m1:
763 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
763 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
764 errors += 1
764 errors += 1
765 if state in "m" and f not in m1 and f not in m2:
765 if state in "m" and f not in m1 and f not in m2:
766 ui.warn(_("%s in state %s, but not in either manifest\n") %
766 ui.warn(_("%s in state %s, but not in either manifest\n") %
767 (f, state))
767 (f, state))
768 errors += 1
768 errors += 1
769 for f in m1:
769 for f in m1:
770 state = repo.dirstate[f]
770 state = repo.dirstate[f]
771 if state not in "nrm":
771 if state not in "nrm":
772 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
772 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
773 errors += 1
773 errors += 1
774 if errors:
774 if errors:
775 error = _(".hg/dirstate inconsistent with current parent's manifest")
775 error = _(".hg/dirstate inconsistent with current parent's manifest")
776 raise util.Abort(error)
776 raise util.Abort(error)
777
777
778 def showconfig(ui, repo, *values, **opts):
778 def showconfig(ui, repo, *values, **opts):
779 """show combined config settings from all hgrc files
779 """show combined config settings from all hgrc files
780
780
781 With no args, print names and values of all config items.
781 With no args, print names and values of all config items.
782
782
783 With one arg of the form section.name, print just the value of
783 With one arg of the form section.name, print just the value of
784 that config item.
784 that config item.
785
785
786 With multiple args, print names and values of all config items
786 With multiple args, print names and values of all config items
787 with matching section names."""
787 with matching section names."""
788
788
789 untrusted = bool(opts.get('untrusted'))
789 untrusted = bool(opts.get('untrusted'))
790 if values:
790 if values:
791 if len([v for v in values if '.' in v]) > 1:
791 if len([v for v in values if '.' in v]) > 1:
792 raise util.Abort(_('only one config item permitted'))
792 raise util.Abort(_('only one config item permitted'))
793 for section, name, value in ui.walkconfig(untrusted=untrusted):
793 for section, name, value in ui.walkconfig(untrusted=untrusted):
794 sectname = section + '.' + name
794 sectname = section + '.' + name
795 if values:
795 if values:
796 for v in values:
796 for v in values:
797 if v == section:
797 if v == section:
798 ui.debug('%s: ' %
798 ui.debug('%s: ' %
799 ui.configsource(section, name, untrusted))
799 ui.configsource(section, name, untrusted))
800 ui.write('%s=%s\n' % (sectname, value))
800 ui.write('%s=%s\n' % (sectname, value))
801 elif v == sectname:
801 elif v == sectname:
802 ui.debug('%s: ' %
802 ui.debug('%s: ' %
803 ui.configsource(section, name, untrusted))
803 ui.configsource(section, name, untrusted))
804 ui.write(value, '\n')
804 ui.write(value, '\n')
805 else:
805 else:
806 ui.debug('%s: ' %
806 ui.debug('%s: ' %
807 ui.configsource(section, name, untrusted))
807 ui.configsource(section, name, untrusted))
808 ui.write('%s=%s\n' % (sectname, value))
808 ui.write('%s=%s\n' % (sectname, value))
809
809
810 def debugsetparents(ui, repo, rev1, rev2=None):
810 def debugsetparents(ui, repo, rev1, rev2=None):
811 """manually set the parents of the current working directory
811 """manually set the parents of the current working directory
812
812
813 This is useful for writing repository conversion tools, but should
813 This is useful for writing repository conversion tools, but should
814 be used with care.
814 be used with care.
815 """
815 """
816
816
817 if not rev2:
817 if not rev2:
818 rev2 = hex(nullid)
818 rev2 = hex(nullid)
819
819
820 wlock = repo.wlock()
820 wlock = repo.wlock()
821 try:
821 try:
822 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
822 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
823 finally:
823 finally:
824 wlock.release()
824 wlock.release()
825
825
826 def debugstate(ui, repo, nodates=None):
826 def debugstate(ui, repo, nodates=None):
827 """show the contents of the current dirstate"""
827 """show the contents of the current dirstate"""
828 timestr = ""
828 timestr = ""
829 showdate = not nodates
829 showdate = not nodates
830 for file_, ent in sorted(repo.dirstate._map.iteritems()):
830 for file_, ent in sorted(repo.dirstate._map.iteritems()):
831 if showdate:
831 if showdate:
832 if ent[3] == -1:
832 if ent[3] == -1:
833 # Pad or slice to locale representation
833 # Pad or slice to locale representation
834 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
834 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
835 timestr = 'unset'
835 timestr = 'unset'
836 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
836 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
837 else:
837 else:
838 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
838 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
839 if ent[1] & 020000:
839 if ent[1] & 020000:
840 mode = 'lnk'
840 mode = 'lnk'
841 else:
841 else:
842 mode = '%3o' % (ent[1] & 0777)
842 mode = '%3o' % (ent[1] & 0777)
843 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
843 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
844 for f in repo.dirstate.copies():
844 for f in repo.dirstate.copies():
845 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
845 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
846
846
847 def debugdata(ui, file_, rev):
847 def debugdata(ui, file_, rev):
848 """dump the contents of a data file revision"""
848 """dump the contents of a data file revision"""
849 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
849 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
850 try:
850 try:
851 ui.write(r.revision(r.lookup(rev)))
851 ui.write(r.revision(r.lookup(rev)))
852 except KeyError:
852 except KeyError:
853 raise util.Abort(_('invalid revision identifier %s') % rev)
853 raise util.Abort(_('invalid revision identifier %s') % rev)
854
854
855 def debugdate(ui, date, range=None, **opts):
855 def debugdate(ui, date, range=None, **opts):
856 """parse and display a date"""
856 """parse and display a date"""
857 if opts["extended"]:
857 if opts["extended"]:
858 d = util.parsedate(date, util.extendeddateformats)
858 d = util.parsedate(date, util.extendeddateformats)
859 else:
859 else:
860 d = util.parsedate(date)
860 d = util.parsedate(date)
861 ui.write("internal: %s %s\n" % d)
861 ui.write("internal: %s %s\n" % d)
862 ui.write("standard: %s\n" % util.datestr(d))
862 ui.write("standard: %s\n" % util.datestr(d))
863 if range:
863 if range:
864 m = util.matchdate(range)
864 m = util.matchdate(range)
865 ui.write("match: %s\n" % m(d[0]))
865 ui.write("match: %s\n" % m(d[0]))
866
866
867 def debugindex(ui, file_):
867 def debugindex(ui, file_):
868 """dump the contents of an index file"""
868 """dump the contents of an index file"""
869 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
869 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
870 ui.write(" rev offset length base linkrev"
870 ui.write(" rev offset length base linkrev"
871 " nodeid p1 p2\n")
871 " nodeid p1 p2\n")
872 for i in r:
872 for i in r:
873 node = r.node(i)
873 node = r.node(i)
874 try:
874 try:
875 pp = r.parents(node)
875 pp = r.parents(node)
876 except:
876 except:
877 pp = [nullid, nullid]
877 pp = [nullid, nullid]
878 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
878 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
879 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
879 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
880 short(node), short(pp[0]), short(pp[1])))
880 short(node), short(pp[0]), short(pp[1])))
881
881
882 def debugindexdot(ui, file_):
882 def debugindexdot(ui, file_):
883 """dump an index DAG as a .dot file"""
883 """dump an index DAG as a .dot file"""
884 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
884 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
885 ui.write("digraph G {\n")
885 ui.write("digraph G {\n")
886 for i in r:
886 for i in r:
887 node = r.node(i)
887 node = r.node(i)
888 pp = r.parents(node)
888 pp = r.parents(node)
889 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
889 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
890 if pp[1] != nullid:
890 if pp[1] != nullid:
891 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
891 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
892 ui.write("}\n")
892 ui.write("}\n")
893
893
894 def debuginstall(ui):
894 def debuginstall(ui):
895 '''test Mercurial installation'''
895 '''test Mercurial installation'''
896
896
897 def writetemp(contents):
897 def writetemp(contents):
898 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
898 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
899 f = os.fdopen(fd, "wb")
899 f = os.fdopen(fd, "wb")
900 f.write(contents)
900 f.write(contents)
901 f.close()
901 f.close()
902 return name
902 return name
903
903
904 problems = 0
904 problems = 0
905
905
906 # encoding
906 # encoding
907 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
907 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
908 try:
908 try:
909 encoding.fromlocal("test")
909 encoding.fromlocal("test")
910 except util.Abort, inst:
910 except util.Abort, inst:
911 ui.write(" %s\n" % inst)
911 ui.write(" %s\n" % inst)
912 ui.write(_(" (check that your locale is properly set)\n"))
912 ui.write(_(" (check that your locale is properly set)\n"))
913 problems += 1
913 problems += 1
914
914
915 # compiled modules
915 # compiled modules
916 ui.status(_("Checking extensions...\n"))
916 ui.status(_("Checking extensions...\n"))
917 try:
917 try:
918 import bdiff, mpatch, base85
918 import bdiff, mpatch, base85
919 except Exception, inst:
919 except Exception, inst:
920 ui.write(" %s\n" % inst)
920 ui.write(" %s\n" % inst)
921 ui.write(_(" One or more extensions could not be found"))
921 ui.write(_(" One or more extensions could not be found"))
922 ui.write(_(" (check that you compiled the extensions)\n"))
922 ui.write(_(" (check that you compiled the extensions)\n"))
923 problems += 1
923 problems += 1
924
924
925 # templates
925 # templates
926 ui.status(_("Checking templates...\n"))
926 ui.status(_("Checking templates...\n"))
927 try:
927 try:
928 import templater
928 import templater
929 templater.templater(templater.templatepath("map-cmdline.default"))
929 templater.templater(templater.templatepath("map-cmdline.default"))
930 except Exception, inst:
930 except Exception, inst:
931 ui.write(" %s\n" % inst)
931 ui.write(" %s\n" % inst)
932 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
932 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
933 problems += 1
933 problems += 1
934
934
935 # patch
935 # patch
936 ui.status(_("Checking patch...\n"))
936 ui.status(_("Checking patch...\n"))
937 patchproblems = 0
937 patchproblems = 0
938 a = "1\n2\n3\n4\n"
938 a = "1\n2\n3\n4\n"
939 b = "1\n2\n3\ninsert\n4\n"
939 b = "1\n2\n3\ninsert\n4\n"
940 fa = writetemp(a)
940 fa = writetemp(a)
941 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
941 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
942 os.path.basename(fa))
942 os.path.basename(fa))
943 fd = writetemp(d)
943 fd = writetemp(d)
944
944
945 files = {}
945 files = {}
946 try:
946 try:
947 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
947 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
948 except util.Abort, e:
948 except util.Abort, e:
949 ui.write(_(" patch call failed:\n"))
949 ui.write(_(" patch call failed:\n"))
950 ui.write(" " + str(e) + "\n")
950 ui.write(" " + str(e) + "\n")
951 patchproblems += 1
951 patchproblems += 1
952 else:
952 else:
953 if list(files) != [os.path.basename(fa)]:
953 if list(files) != [os.path.basename(fa)]:
954 ui.write(_(" unexpected patch output!\n"))
954 ui.write(_(" unexpected patch output!\n"))
955 patchproblems += 1
955 patchproblems += 1
956 a = file(fa).read()
956 a = file(fa).read()
957 if a != b:
957 if a != b:
958 ui.write(_(" patch test failed!\n"))
958 ui.write(_(" patch test failed!\n"))
959 patchproblems += 1
959 patchproblems += 1
960
960
961 if patchproblems:
961 if patchproblems:
962 if ui.config('ui', 'patch'):
962 if ui.config('ui', 'patch'):
963 ui.write(_(" (Current patch tool may be incompatible with patch,"
963 ui.write(_(" (Current patch tool may be incompatible with patch,"
964 " or misconfigured. Please check your .hgrc file)\n"))
964 " or misconfigured. Please check your .hgrc file)\n"))
965 else:
965 else:
966 ui.write(_(" Internal patcher failure, please report this error"
966 ui.write(_(" Internal patcher failure, please report this error"
967 " to http://www.selenic.com/mercurial/bts\n"))
967 " to http://www.selenic.com/mercurial/bts\n"))
968 problems += patchproblems
968 problems += patchproblems
969
969
970 os.unlink(fa)
970 os.unlink(fa)
971 os.unlink(fd)
971 os.unlink(fd)
972
972
973 # editor
973 # editor
974 ui.status(_("Checking commit editor...\n"))
974 ui.status(_("Checking commit editor...\n"))
975 editor = ui.geteditor()
975 editor = ui.geteditor()
976 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
976 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
977 if not cmdpath:
977 if not cmdpath:
978 if editor == 'vi':
978 if editor == 'vi':
979 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
979 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
980 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
980 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
981 else:
981 else:
982 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
982 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
983 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
983 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
984 problems += 1
984 problems += 1
985
985
986 # check username
986 # check username
987 ui.status(_("Checking username...\n"))
987 ui.status(_("Checking username...\n"))
988 user = os.environ.get("HGUSER")
988 user = os.environ.get("HGUSER")
989 if user is None:
989 if user is None:
990 user = ui.config("ui", "username")
990 user = ui.config("ui", "username")
991 if user is None:
991 if user is None:
992 user = os.environ.get("EMAIL")
992 user = os.environ.get("EMAIL")
993 if not user:
993 if not user:
994 ui.warn(" ")
994 ui.warn(" ")
995 ui.username()
995 ui.username()
996 ui.write(_(" (specify a username in your .hgrc file)\n"))
996 ui.write(_(" (specify a username in your .hgrc file)\n"))
997
997
998 if not problems:
998 if not problems:
999 ui.status(_("No problems detected\n"))
999 ui.status(_("No problems detected\n"))
1000 else:
1000 else:
1001 ui.write(_("%s problems detected,"
1001 ui.write(_("%s problems detected,"
1002 " please check your install!\n") % problems)
1002 " please check your install!\n") % problems)
1003
1003
1004 return problems
1004 return problems
1005
1005
1006 def debugrename(ui, repo, file1, *pats, **opts):
1006 def debugrename(ui, repo, file1, *pats, **opts):
1007 """dump rename information"""
1007 """dump rename information"""
1008
1008
1009 ctx = repo[opts.get('rev')]
1009 ctx = repo[opts.get('rev')]
1010 m = cmdutil.match(repo, (file1,) + pats, opts)
1010 m = cmdutil.match(repo, (file1,) + pats, opts)
1011 for abs in ctx.walk(m):
1011 for abs in ctx.walk(m):
1012 fctx = ctx[abs]
1012 fctx = ctx[abs]
1013 o = fctx.filelog().renamed(fctx.filenode())
1013 o = fctx.filelog().renamed(fctx.filenode())
1014 rel = m.rel(abs)
1014 rel = m.rel(abs)
1015 if o:
1015 if o:
1016 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1016 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1017 else:
1017 else:
1018 ui.write(_("%s not renamed\n") % rel)
1018 ui.write(_("%s not renamed\n") % rel)
1019
1019
1020 def debugwalk(ui, repo, *pats, **opts):
1020 def debugwalk(ui, repo, *pats, **opts):
1021 """show how files match on given patterns"""
1021 """show how files match on given patterns"""
1022 m = cmdutil.match(repo, pats, opts)
1022 m = cmdutil.match(repo, pats, opts)
1023 items = list(repo.walk(m))
1023 items = list(repo.walk(m))
1024 if not items:
1024 if not items:
1025 return
1025 return
1026 fmt = 'f %%-%ds %%-%ds %%s' % (
1026 fmt = 'f %%-%ds %%-%ds %%s' % (
1027 max([len(abs) for abs in items]),
1027 max([len(abs) for abs in items]),
1028 max([len(m.rel(abs)) for abs in items]))
1028 max([len(m.rel(abs)) for abs in items]))
1029 for abs in items:
1029 for abs in items:
1030 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1030 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1031 ui.write("%s\n" % line.rstrip())
1031 ui.write("%s\n" % line.rstrip())
1032
1032
1033 def diff(ui, repo, *pats, **opts):
1033 def diff(ui, repo, *pats, **opts):
1034 """diff repository (or selected files)
1034 """diff repository (or selected files)
1035
1035
1036 Show differences between revisions for the specified files.
1036 Show differences between revisions for the specified files.
1037
1037
1038 Differences between files are shown using the unified diff format.
1038 Differences between files are shown using the unified diff format.
1039
1039
1040 NOTE: diff may generate unexpected results for merges, as it will
1040 NOTE: diff may generate unexpected results for merges, as it will
1041 default to comparing against the working directory's first parent
1041 default to comparing against the working directory's first parent
1042 changeset if no revisions are specified.
1042 changeset if no revisions are specified.
1043
1043
1044 When two revision arguments are given, then changes are shown
1044 When two revision arguments are given, then changes are shown
1045 between those revisions. If only one revision is specified then
1045 between those revisions. If only one revision is specified then
1046 that revision is compared to the working directory, and, when no
1046 that revision is compared to the working directory, and, when no
1047 revisions are specified, the working directory files are compared
1047 revisions are specified, the working directory files are compared
1048 to its parent.
1048 to its parent.
1049
1049
1050 Without the -a/--text option, diff will avoid generating diffs of
1050 Without the -a/--text option, diff will avoid generating diffs of
1051 files it detects as binary. With -a, diff will generate a diff
1051 files it detects as binary. With -a, diff will generate a diff
1052 anyway, probably with undesirable results.
1052 anyway, probably with undesirable results.
1053
1053
1054 Use the -g/--git option to generate diffs in the git extended diff
1054 Use the -g/--git option to generate diffs in the git extended diff
1055 format. For more information, read 'hg help diffs'.
1055 format. For more information, read 'hg help diffs'.
1056 """
1056 """
1057
1057
1058 revs = opts.get('rev')
1058 revs = opts.get('rev')
1059 change = opts.get('change')
1059 change = opts.get('change')
1060
1060
1061 if revs and change:
1061 if revs and change:
1062 msg = _('cannot specify --rev and --change at the same time')
1062 msg = _('cannot specify --rev and --change at the same time')
1063 raise util.Abort(msg)
1063 raise util.Abort(msg)
1064 elif change:
1064 elif change:
1065 node2 = repo.lookup(change)
1065 node2 = repo.lookup(change)
1066 node1 = repo[node2].parents()[0].node()
1066 node1 = repo[node2].parents()[0].node()
1067 else:
1067 else:
1068 node1, node2 = cmdutil.revpair(repo, revs)
1068 node1, node2 = cmdutil.revpair(repo, revs)
1069
1069
1070 m = cmdutil.match(repo, pats, opts)
1070 m = cmdutil.match(repo, pats, opts)
1071 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1071 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1072 for chunk in it:
1072 for chunk in it:
1073 repo.ui.write(chunk)
1073 repo.ui.write(chunk)
1074
1074
1075 def export(ui, repo, *changesets, **opts):
1075 def export(ui, repo, *changesets, **opts):
1076 """dump the header and diffs for one or more changesets
1076 """dump the header and diffs for one or more changesets
1077
1077
1078 Print the changeset header and diffs for one or more revisions.
1078 Print the changeset header and diffs for one or more revisions.
1079
1079
1080 The information shown in the changeset header is: author,
1080 The information shown in the changeset header is: author,
1081 changeset hash, parent(s) and commit comment.
1081 changeset hash, parent(s) and commit comment.
1082
1082
1083 NOTE: export may generate unexpected diff output for merge
1083 NOTE: export may generate unexpected diff output for merge
1084 changesets, as it will compare the merge changeset against its
1084 changesets, as it will compare the merge changeset against its
1085 first parent only.
1085 first parent only.
1086
1086
1087 Output may be to a file, in which case the name of the file is
1087 Output may be to a file, in which case the name of the file is
1088 given using a format string. The formatting rules are as follows:
1088 given using a format string. The formatting rules are as follows:
1089
1089
1090 %% literal "%" character
1090 %% literal "%" character
1091 %H changeset hash (40 bytes of hexadecimal)
1091 %H changeset hash (40 bytes of hexadecimal)
1092 %N number of patches being generated
1092 %N number of patches being generated
1093 %R changeset revision number
1093 %R changeset revision number
1094 %b basename of the exporting repository
1094 %b basename of the exporting repository
1095 %h short-form changeset hash (12 bytes of hexadecimal)
1095 %h short-form changeset hash (12 bytes of hexadecimal)
1096 %n zero-padded sequence number, starting at 1
1096 %n zero-padded sequence number, starting at 1
1097 %r zero-padded changeset revision number
1097 %r zero-padded changeset revision number
1098
1098
1099 Without the -a/--text option, export will avoid generating diffs
1099 Without the -a/--text option, export will avoid generating diffs
1100 of files it detects as binary. With -a, export will generate a
1100 of files it detects as binary. With -a, export will generate a
1101 diff anyway, probably with undesirable results.
1101 diff anyway, probably with undesirable results.
1102
1102
1103 Use the -g/--git option to generate diffs in the git extended diff
1103 Use the -g/--git option to generate diffs in the git extended diff
1104 format. Read the diffs help topic for more information.
1104 format. Read the diffs help topic for more information.
1105
1105
1106 With the --switch-parent option, the diff will be against the
1106 With the --switch-parent option, the diff will be against the
1107 second parent. It can be useful to review a merge.
1107 second parent. It can be useful to review a merge.
1108 """
1108 """
1109 if not changesets:
1109 if not changesets:
1110 raise util.Abort(_("export requires at least one changeset"))
1110 raise util.Abort(_("export requires at least one changeset"))
1111 revs = cmdutil.revrange(repo, changesets)
1111 revs = cmdutil.revrange(repo, changesets)
1112 if len(revs) > 1:
1112 if len(revs) > 1:
1113 ui.note(_('exporting patches:\n'))
1113 ui.note(_('exporting patches:\n'))
1114 else:
1114 else:
1115 ui.note(_('exporting patch:\n'))
1115 ui.note(_('exporting patch:\n'))
1116 patch.export(repo, revs, template=opts.get('output'),
1116 patch.export(repo, revs, template=opts.get('output'),
1117 switch_parent=opts.get('switch_parent'),
1117 switch_parent=opts.get('switch_parent'),
1118 opts=patch.diffopts(ui, opts))
1118 opts=patch.diffopts(ui, opts))
1119
1119
1120 def grep(ui, repo, pattern, *pats, **opts):
1120 def grep(ui, repo, pattern, *pats, **opts):
1121 """search for a pattern in specified files and revisions
1121 """search for a pattern in specified files and revisions
1122
1122
1123 Search revisions of files for a regular expression.
1123 Search revisions of files for a regular expression.
1124
1124
1125 This command behaves differently than Unix grep. It only accepts
1125 This command behaves differently than Unix grep. It only accepts
1126 Python/Perl regexps. It searches repository history, not the
1126 Python/Perl regexps. It searches repository history, not the
1127 working directory. It always prints the revision number in which a
1127 working directory. It always prints the revision number in which a
1128 match appears.
1128 match appears.
1129
1129
1130 By default, grep only prints output for the first revision of a
1130 By default, grep only prints output for the first revision of a
1131 file in which it finds a match. To get it to print every revision
1131 file in which it finds a match. To get it to print every revision
1132 that contains a change in match status ("-" for a match that
1132 that contains a change in match status ("-" for a match that
1133 becomes a non-match, or "+" for a non-match that becomes a match),
1133 becomes a non-match, or "+" for a non-match that becomes a match),
1134 use the --all flag.
1134 use the --all flag.
1135 """
1135 """
1136 reflags = 0
1136 reflags = 0
1137 if opts.get('ignore_case'):
1137 if opts.get('ignore_case'):
1138 reflags |= re.I
1138 reflags |= re.I
1139 try:
1139 try:
1140 regexp = re.compile(pattern, reflags)
1140 regexp = re.compile(pattern, reflags)
1141 except Exception, inst:
1141 except Exception, inst:
1142 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1142 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1143 return None
1143 return None
1144 sep, eol = ':', '\n'
1144 sep, eol = ':', '\n'
1145 if opts.get('print0'):
1145 if opts.get('print0'):
1146 sep = eol = '\0'
1146 sep = eol = '\0'
1147
1147
1148 fcache = {}
1148 fcache = {}
1149 def getfile(fn):
1149 def getfile(fn):
1150 if fn not in fcache:
1150 if fn not in fcache:
1151 fcache[fn] = repo.file(fn)
1151 fcache[fn] = repo.file(fn)
1152 return fcache[fn]
1152 return fcache[fn]
1153
1153
1154 def matchlines(body):
1154 def matchlines(body):
1155 begin = 0
1155 begin = 0
1156 linenum = 0
1156 linenum = 0
1157 while True:
1157 while True:
1158 match = regexp.search(body, begin)
1158 match = regexp.search(body, begin)
1159 if not match:
1159 if not match:
1160 break
1160 break
1161 mstart, mend = match.span()
1161 mstart, mend = match.span()
1162 linenum += body.count('\n', begin, mstart) + 1
1162 linenum += body.count('\n', begin, mstart) + 1
1163 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1163 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1164 begin = body.find('\n', mend) + 1 or len(body)
1164 begin = body.find('\n', mend) + 1 or len(body)
1165 lend = begin - 1
1165 lend = begin - 1
1166 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1166 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1167
1167
1168 class linestate(object):
1168 class linestate(object):
1169 def __init__(self, line, linenum, colstart, colend):
1169 def __init__(self, line, linenum, colstart, colend):
1170 self.line = line
1170 self.line = line
1171 self.linenum = linenum
1171 self.linenum = linenum
1172 self.colstart = colstart
1172 self.colstart = colstart
1173 self.colend = colend
1173 self.colend = colend
1174
1174
1175 def __hash__(self):
1175 def __hash__(self):
1176 return hash((self.linenum, self.line))
1176 return hash((self.linenum, self.line))
1177
1177
1178 def __eq__(self, other):
1178 def __eq__(self, other):
1179 return self.line == other.line
1179 return self.line == other.line
1180
1180
1181 matches = {}
1181 matches = {}
1182 copies = {}
1182 copies = {}
1183 def grepbody(fn, rev, body):
1183 def grepbody(fn, rev, body):
1184 matches[rev].setdefault(fn, [])
1184 matches[rev].setdefault(fn, [])
1185 m = matches[rev][fn]
1185 m = matches[rev][fn]
1186 for lnum, cstart, cend, line in matchlines(body):
1186 for lnum, cstart, cend, line in matchlines(body):
1187 s = linestate(line, lnum, cstart, cend)
1187 s = linestate(line, lnum, cstart, cend)
1188 m.append(s)
1188 m.append(s)
1189
1189
1190 def difflinestates(a, b):
1190 def difflinestates(a, b):
1191 sm = difflib.SequenceMatcher(None, a, b)
1191 sm = difflib.SequenceMatcher(None, a, b)
1192 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1192 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1193 if tag == 'insert':
1193 if tag == 'insert':
1194 for i in xrange(blo, bhi):
1194 for i in xrange(blo, bhi):
1195 yield ('+', b[i])
1195 yield ('+', b[i])
1196 elif tag == 'delete':
1196 elif tag == 'delete':
1197 for i in xrange(alo, ahi):
1197 for i in xrange(alo, ahi):
1198 yield ('-', a[i])
1198 yield ('-', a[i])
1199 elif tag == 'replace':
1199 elif tag == 'replace':
1200 for i in xrange(alo, ahi):
1200 for i in xrange(alo, ahi):
1201 yield ('-', a[i])
1201 yield ('-', a[i])
1202 for i in xrange(blo, bhi):
1202 for i in xrange(blo, bhi):
1203 yield ('+', b[i])
1203 yield ('+', b[i])
1204
1204
1205 prev = {}
1205 prev = {}
1206 def display(fn, rev, states, prevstates):
1206 def display(fn, rev, states, prevstates):
1207 datefunc = ui.quiet and util.shortdate or util.datestr
1207 datefunc = ui.quiet and util.shortdate or util.datestr
1208 found = False
1208 found = False
1209 filerevmatches = {}
1209 filerevmatches = {}
1210 r = prev.get(fn, -1)
1210 r = prev.get(fn, -1)
1211 if opts.get('all'):
1211 if opts.get('all'):
1212 iter = difflinestates(states, prevstates)
1212 iter = difflinestates(states, prevstates)
1213 else:
1213 else:
1214 iter = [('', l) for l in prevstates]
1214 iter = [('', l) for l in prevstates]
1215 for change, l in iter:
1215 for change, l in iter:
1216 cols = [fn, str(r)]
1216 cols = [fn, str(r)]
1217 if opts.get('line_number'):
1217 if opts.get('line_number'):
1218 cols.append(str(l.linenum))
1218 cols.append(str(l.linenum))
1219 if opts.get('all'):
1219 if opts.get('all'):
1220 cols.append(change)
1220 cols.append(change)
1221 if opts.get('user'):
1221 if opts.get('user'):
1222 cols.append(ui.shortuser(get(r)[1]))
1222 cols.append(ui.shortuser(get(r)[1]))
1223 if opts.get('date'):
1223 if opts.get('date'):
1224 cols.append(datefunc(get(r)[2]))
1224 cols.append(datefunc(get(r)[2]))
1225 if opts.get('files_with_matches'):
1225 if opts.get('files_with_matches'):
1226 c = (fn, r)
1226 c = (fn, r)
1227 if c in filerevmatches:
1227 if c in filerevmatches:
1228 continue
1228 continue
1229 filerevmatches[c] = 1
1229 filerevmatches[c] = 1
1230 else:
1230 else:
1231 cols.append(l.line)
1231 cols.append(l.line)
1232 ui.write(sep.join(cols), eol)
1232 ui.write(sep.join(cols), eol)
1233 found = True
1233 found = True
1234 return found
1234 return found
1235
1235
1236 fstate = {}
1236 fstate = {}
1237 skip = {}
1237 skip = {}
1238 get = util.cachefunc(lambda r: repo[r].changeset())
1238 get = util.cachefunc(lambda r: repo[r].changeset())
1239 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1239 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1240 found = False
1240 found = False
1241 follow = opts.get('follow')
1241 follow = opts.get('follow')
1242 for st, rev, fns in changeiter:
1242 for st, rev, fns in changeiter:
1243 if st == 'window':
1243 if st == 'window':
1244 matches.clear()
1244 matches.clear()
1245 elif st == 'add':
1245 elif st == 'add':
1246 ctx = repo[rev]
1246 ctx = repo[rev]
1247 matches[rev] = {}
1247 matches[rev] = {}
1248 for fn in fns:
1248 for fn in fns:
1249 if fn in skip:
1249 if fn in skip:
1250 continue
1250 continue
1251 try:
1251 try:
1252 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1252 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1253 fstate.setdefault(fn, [])
1253 fstate.setdefault(fn, [])
1254 if follow:
1254 if follow:
1255 copied = getfile(fn).renamed(ctx.filenode(fn))
1255 copied = getfile(fn).renamed(ctx.filenode(fn))
1256 if copied:
1256 if copied:
1257 copies.setdefault(rev, {})[fn] = copied[0]
1257 copies.setdefault(rev, {})[fn] = copied[0]
1258 except error.LookupError:
1258 except error.LookupError:
1259 pass
1259 pass
1260 elif st == 'iter':
1260 elif st == 'iter':
1261 for fn, m in sorted(matches[rev].items()):
1261 for fn, m in sorted(matches[rev].items()):
1262 copy = copies.get(rev, {}).get(fn)
1262 copy = copies.get(rev, {}).get(fn)
1263 if fn in skip:
1263 if fn in skip:
1264 if copy:
1264 if copy:
1265 skip[copy] = True
1265 skip[copy] = True
1266 continue
1266 continue
1267 if fn in prev or fstate[fn]:
1267 if fn in prev or fstate[fn]:
1268 r = display(fn, rev, m, fstate[fn])
1268 r = display(fn, rev, m, fstate[fn])
1269 found = found or r
1269 found = found or r
1270 if r and not opts.get('all'):
1270 if r and not opts.get('all'):
1271 skip[fn] = True
1271 skip[fn] = True
1272 if copy:
1272 if copy:
1273 skip[copy] = True
1273 skip[copy] = True
1274 fstate[fn] = m
1274 fstate[fn] = m
1275 if copy:
1275 if copy:
1276 fstate[copy] = m
1276 fstate[copy] = m
1277 prev[fn] = rev
1277 prev[fn] = rev
1278
1278
1279 for fn, state in sorted(fstate.items()):
1279 for fn, state in sorted(fstate.items()):
1280 if fn in skip:
1280 if fn in skip:
1281 continue
1281 continue
1282 if fn not in copies.get(prev[fn], {}):
1282 if fn not in copies.get(prev[fn], {}):
1283 found = display(fn, rev, {}, state) or found
1283 found = display(fn, rev, {}, state) or found
1284 return (not found and 1) or 0
1284 return (not found and 1) or 0
1285
1285
1286 def heads(ui, repo, *branchrevs, **opts):
1286 def heads(ui, repo, *branchrevs, **opts):
1287 """show current repository heads or show branch heads
1287 """show current repository heads or show branch heads
1288
1288
1289 With no arguments, show all repository head changesets.
1289 With no arguments, show all repository head changesets.
1290
1290
1291 If branch or revisions names are given this will show the heads of
1291 If branch or revisions names are given this will show the heads of
1292 the specified branches or the branches those revisions are tagged
1292 the specified branches or the branches those revisions are tagged
1293 with.
1293 with.
1294
1294
1295 Repository "heads" are changesets that don't have child
1295 Repository "heads" are changesets that don't have child
1296 changesets. They are where development generally takes place and
1296 changesets. They are where development generally takes place and
1297 are the usual targets for update and merge operations.
1297 are the usual targets for update and merge operations.
1298
1298
1299 Branch heads are changesets that have a given branch tag, but have
1299 Branch heads are changesets that have a given branch tag, but have
1300 no child changesets with that tag. They are usually where
1300 no child changesets with that tag. They are usually where
1301 development on the given branch takes place.
1301 development on the given branch takes place.
1302 """
1302 """
1303 if opts.get('rev'):
1303 if opts.get('rev'):
1304 start = repo.lookup(opts['rev'])
1304 start = repo.lookup(opts['rev'])
1305 else:
1305 else:
1306 start = None
1306 start = None
1307 closed = not opts.get('active')
1307 closed = not opts.get('active')
1308 if not branchrevs:
1308 if not branchrevs:
1309 # Assume we're looking repo-wide heads if no revs were specified.
1309 # Assume we're looking repo-wide heads if no revs were specified.
1310 heads = repo.heads(start, closed=closed)
1310 heads = repo.heads(start, closed=closed)
1311 else:
1311 else:
1312 heads = []
1312 heads = []
1313 visitedset = set()
1313 visitedset = set()
1314 for branchrev in branchrevs:
1314 for branchrev in branchrevs:
1315 branch = repo[branchrev].branch()
1315 branch = repo[branchrev].branch()
1316 if branch in visitedset:
1316 if branch in visitedset:
1317 continue
1317 continue
1318 visitedset.add(branch)
1318 visitedset.add(branch)
1319 bheads = repo.branchheads(branch, start, closed=closed)
1319 bheads = repo.branchheads(branch, start, closed=closed)
1320 if not bheads:
1320 if not bheads:
1321 if branch != branchrev:
1321 if branch != branchrev:
1322 ui.warn(_("no changes on branch %s containing %s are "
1322 ui.warn(_("no changes on branch %s containing %s are "
1323 "reachable from %s\n")
1323 "reachable from %s\n")
1324 % (branch, branchrev, opts.get('rev')))
1324 % (branch, branchrev, opts.get('rev')))
1325 else:
1325 else:
1326 ui.warn(_("no changes on branch %s are reachable from %s\n")
1326 ui.warn(_("no changes on branch %s are reachable from %s\n")
1327 % (branch, opts.get('rev')))
1327 % (branch, opts.get('rev')))
1328 heads.extend(bheads)
1328 heads.extend(bheads)
1329 if not heads:
1329 if not heads:
1330 return 1
1330 return 1
1331 displayer = cmdutil.show_changeset(ui, repo, opts)
1331 displayer = cmdutil.show_changeset(ui, repo, opts)
1332 for n in heads:
1332 for n in heads:
1333 displayer.show(repo[n])
1333 displayer.show(repo[n])
1334
1334
1335 def help_(ui, name=None, with_version=False):
1335 def help_(ui, name=None, with_version=False):
1336 """show help for a given topic or a help overview
1336 """show help for a given topic or a help overview
1337
1337
1338 With no arguments, print a list of commands and short help.
1338 With no arguments, print a list of commands and short help.
1339
1339
1340 Given a topic, extension, or command name, print help for that
1340 Given a topic, extension, or command name, print help for that
1341 topic."""
1341 topic."""
1342 option_lists = []
1342 option_lists = []
1343
1343
1344 def addglobalopts(aliases):
1344 def addglobalopts(aliases):
1345 if ui.verbose:
1345 if ui.verbose:
1346 option_lists.append((_("global options:"), globalopts))
1346 option_lists.append((_("global options:"), globalopts))
1347 if name == 'shortlist':
1347 if name == 'shortlist':
1348 option_lists.append((_('use "hg help" for the full list '
1348 option_lists.append((_('use "hg help" for the full list '
1349 'of commands'), ()))
1349 'of commands'), ()))
1350 else:
1350 else:
1351 if name == 'shortlist':
1351 if name == 'shortlist':
1352 msg = _('use "hg help" for the full list of commands '
1352 msg = _('use "hg help" for the full list of commands '
1353 'or "hg -v" for details')
1353 'or "hg -v" for details')
1354 elif aliases:
1354 elif aliases:
1355 msg = _('use "hg -v help%s" to show aliases and '
1355 msg = _('use "hg -v help%s" to show aliases and '
1356 'global options') % (name and " " + name or "")
1356 'global options') % (name and " " + name or "")
1357 else:
1357 else:
1358 msg = _('use "hg -v help %s" to show global options') % name
1358 msg = _('use "hg -v help %s" to show global options') % name
1359 option_lists.append((msg, ()))
1359 option_lists.append((msg, ()))
1360
1360
1361 def helpcmd(name):
1361 def helpcmd(name):
1362 if with_version:
1362 if with_version:
1363 version_(ui)
1363 version_(ui)
1364 ui.write('\n')
1364 ui.write('\n')
1365
1365
1366 try:
1366 try:
1367 aliases, i = cmdutil.findcmd(name, table, False)
1367 aliases, i = cmdutil.findcmd(name, table, False)
1368 except error.AmbiguousCommand, inst:
1368 except error.AmbiguousCommand, inst:
1369 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1369 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1370 helplist(_('list of commands:\n\n'), select)
1370 helplist(_('list of commands:\n\n'), select)
1371 return
1371 return
1372
1372
1373 # synopsis
1373 # synopsis
1374 if len(i) > 2:
1374 if len(i) > 2:
1375 if i[2].startswith('hg'):
1375 if i[2].startswith('hg'):
1376 ui.write("%s\n" % i[2])
1376 ui.write("%s\n" % i[2])
1377 else:
1377 else:
1378 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1378 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1379 else:
1379 else:
1380 ui.write('hg %s\n' % aliases[0])
1380 ui.write('hg %s\n' % aliases[0])
1381
1381
1382 # aliases
1382 # aliases
1383 if not ui.quiet and len(aliases) > 1:
1383 if not ui.quiet and len(aliases) > 1:
1384 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1384 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1385
1385
1386 # description
1386 # description
1387 doc = gettext(i[0].__doc__)
1387 doc = gettext(i[0].__doc__)
1388 if not doc:
1388 if not doc:
1389 doc = _("(no help text available)")
1389 doc = _("(no help text available)")
1390 if ui.quiet:
1390 if ui.quiet:
1391 doc = doc.splitlines(0)[0]
1391 doc = doc.splitlines(0)[0]
1392 ui.write("\n%s\n" % doc.rstrip())
1392 ui.write("\n%s\n" % doc.rstrip())
1393
1393
1394 if not ui.quiet:
1394 if not ui.quiet:
1395 # options
1395 # options
1396 if i[1]:
1396 if i[1]:
1397 option_lists.append((_("options:\n"), i[1]))
1397 option_lists.append((_("options:\n"), i[1]))
1398
1398
1399 addglobalopts(False)
1399 addglobalopts(False)
1400
1400
1401 def helplist(header, select=None):
1401 def helplist(header, select=None):
1402 h = {}
1402 h = {}
1403 cmds = {}
1403 cmds = {}
1404 for c, e in table.iteritems():
1404 for c, e in table.iteritems():
1405 f = c.split("|", 1)[0]
1405 f = c.split("|", 1)[0]
1406 if select and not select(f):
1406 if select and not select(f):
1407 continue
1407 continue
1408 if (not select and name != 'shortlist' and
1408 if (not select and name != 'shortlist' and
1409 e[0].__module__ != __name__):
1409 e[0].__module__ != __name__):
1410 continue
1410 continue
1411 if name == "shortlist" and not f.startswith("^"):
1411 if name == "shortlist" and not f.startswith("^"):
1412 continue
1412 continue
1413 f = f.lstrip("^")
1413 f = f.lstrip("^")
1414 if not ui.debugflag and f.startswith("debug"):
1414 if not ui.debugflag and f.startswith("debug"):
1415 continue
1415 continue
1416 doc = gettext(e[0].__doc__)
1416 doc = gettext(e[0].__doc__)
1417 if not doc:
1417 if not doc:
1418 doc = _("(no help text available)")
1418 doc = _("(no help text available)")
1419 h[f] = doc.splitlines(0)[0].rstrip()
1419 h[f] = doc.splitlines(0)[0].rstrip()
1420 cmds[f] = c.lstrip("^")
1420 cmds[f] = c.lstrip("^")
1421
1421
1422 if not h:
1422 if not h:
1423 ui.status(_('no commands defined\n'))
1423 ui.status(_('no commands defined\n'))
1424 return
1424 return
1425
1425
1426 ui.status(header)
1426 ui.status(header)
1427 fns = sorted(h)
1427 fns = sorted(h)
1428 m = max(map(len, fns))
1428 m = max(map(len, fns))
1429 for f in fns:
1429 for f in fns:
1430 if ui.verbose:
1430 if ui.verbose:
1431 commands = cmds[f].replace("|",", ")
1431 commands = cmds[f].replace("|",", ")
1432 ui.write(" %s:\n %s\n"%(commands, h[f]))
1432 ui.write(" %s:\n %s\n"%(commands, h[f]))
1433 else:
1433 else:
1434 ui.write(' %-*s %s\n' % (m, f, h[f]))
1434 ui.write(' %-*s %s\n' % (m, f, h[f]))
1435
1435
1436 exts = list(extensions.extensions())
1436 exts = list(extensions.extensions())
1437 if exts and name != 'shortlist':
1437 if exts and name != 'shortlist':
1438 ui.write(_('\nenabled extensions:\n\n'))
1438 ui.write(_('\nenabled extensions:\n\n'))
1439 maxlength = 0
1439 maxlength = 0
1440 exthelps = []
1440 exthelps = []
1441 for ename, ext in exts:
1441 for ename, ext in exts:
1442 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1442 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1443 ename = ename.split('.')[-1]
1443 ename = ename.split('.')[-1]
1444 maxlength = max(len(ename), maxlength)
1444 maxlength = max(len(ename), maxlength)
1445 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1445 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1446 for ename, text in exthelps:
1446 for ename, text in exthelps:
1447 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1447 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1448
1448
1449 if not ui.quiet:
1449 if not ui.quiet:
1450 addglobalopts(True)
1450 addglobalopts(True)
1451
1451
1452 def helptopic(name):
1452 def helptopic(name):
1453 for names, header, doc in help.helptable:
1453 for names, header, doc in help.helptable:
1454 if name in names:
1454 if name in names:
1455 break
1455 break
1456 else:
1456 else:
1457 raise error.UnknownCommand(name)
1457 raise error.UnknownCommand(name)
1458
1458
1459 # description
1459 # description
1460 if not doc:
1460 if not doc:
1461 doc = _("(no help text available)")
1461 doc = _("(no help text available)")
1462 if callable(doc):
1462 if callable(doc):
1463 doc = doc()
1463 doc = doc()
1464
1464
1465 ui.write("%s\n" % header)
1465 ui.write("%s\n" % header)
1466 ui.write("%s\n" % doc.rstrip())
1466 ui.write("%s\n" % doc.rstrip())
1467
1467
1468 def helpext(name):
1468 def helpext(name):
1469 try:
1469 try:
1470 mod = extensions.find(name)
1470 mod = extensions.find(name)
1471 except KeyError:
1471 except KeyError:
1472 raise error.UnknownCommand(name)
1472 raise error.UnknownCommand(name)
1473
1473
1474 doc = gettext(mod.__doc__) or _('no help text available')
1474 doc = gettext(mod.__doc__) or _('no help text available')
1475 doc = doc.splitlines(0)
1475 doc = doc.splitlines(0)
1476 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1476 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1477 for d in doc[1:]:
1477 for d in doc[1:]:
1478 ui.write(d, '\n')
1478 ui.write(d, '\n')
1479
1479
1480 ui.status('\n')
1480 ui.status('\n')
1481
1481
1482 try:
1482 try:
1483 ct = mod.cmdtable
1483 ct = mod.cmdtable
1484 except AttributeError:
1484 except AttributeError:
1485 ct = {}
1485 ct = {}
1486
1486
1487 modcmds = set([c.split('|', 1)[0] for c in ct])
1487 modcmds = set([c.split('|', 1)[0] for c in ct])
1488 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1488 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1489
1489
1490 if name and name != 'shortlist':
1490 if name and name != 'shortlist':
1491 i = None
1491 i = None
1492 for f in (helptopic, helpcmd, helpext):
1492 for f in (helptopic, helpcmd, helpext):
1493 try:
1493 try:
1494 f(name)
1494 f(name)
1495 i = None
1495 i = None
1496 break
1496 break
1497 except error.UnknownCommand, inst:
1497 except error.UnknownCommand, inst:
1498 i = inst
1498 i = inst
1499 if i:
1499 if i:
1500 raise i
1500 raise i
1501
1501
1502 else:
1502 else:
1503 # program name
1503 # program name
1504 if ui.verbose or with_version:
1504 if ui.verbose or with_version:
1505 version_(ui)
1505 version_(ui)
1506 else:
1506 else:
1507 ui.status(_("Mercurial Distributed SCM\n"))
1507 ui.status(_("Mercurial Distributed SCM\n"))
1508 ui.status('\n')
1508 ui.status('\n')
1509
1509
1510 # list of commands
1510 # list of commands
1511 if name == "shortlist":
1511 if name == "shortlist":
1512 header = _('basic commands:\n\n')
1512 header = _('basic commands:\n\n')
1513 else:
1513 else:
1514 header = _('list of commands:\n\n')
1514 header = _('list of commands:\n\n')
1515
1515
1516 helplist(header)
1516 helplist(header)
1517
1517
1518 # list all option lists
1518 # list all option lists
1519 opt_output = []
1519 opt_output = []
1520 for title, options in option_lists:
1520 for title, options in option_lists:
1521 opt_output.append(("\n%s" % title, None))
1521 opt_output.append(("\n%s" % title, None))
1522 for shortopt, longopt, default, desc in options:
1522 for shortopt, longopt, default, desc in options:
1523 if "DEPRECATED" in desc and not ui.verbose: continue
1523 if "DEPRECATED" in desc and not ui.verbose: continue
1524 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1524 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1525 longopt and " --%s" % longopt),
1525 longopt and " --%s" % longopt),
1526 "%s%s" % (desc,
1526 "%s%s" % (desc,
1527 default
1527 default
1528 and _(" (default: %s)") % default
1528 and _(" (default: %s)") % default
1529 or "")))
1529 or "")))
1530
1530
1531 if not name:
1531 if not name:
1532 ui.write(_("\nadditional help topics:\n\n"))
1532 ui.write(_("\nadditional help topics:\n\n"))
1533 topics = []
1533 topics = []
1534 for names, header, doc in help.helptable:
1534 for names, header, doc in help.helptable:
1535 names = [(-len(name), name) for name in names]
1535 names = [(-len(name), name) for name in names]
1536 names.sort()
1536 names.sort()
1537 topics.append((names[0][1], header))
1537 topics.append((names[0][1], header))
1538 topics_len = max([len(s[0]) for s in topics])
1538 topics_len = max([len(s[0]) for s in topics])
1539 for t, desc in topics:
1539 for t, desc in topics:
1540 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1540 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1541
1541
1542 if opt_output:
1542 if opt_output:
1543 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1543 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1544 for first, second in opt_output:
1544 for first, second in opt_output:
1545 if second:
1545 if second:
1546 # wrap descriptions at 70 characters, just like the
1546 # wrap descriptions at 70 characters, just like the
1547 # main help texts
1547 # main help texts
1548 second = textwrap.wrap(second, width=70 - opts_len - 3)
1548 second = textwrap.wrap(second, width=70 - opts_len - 3)
1549 pad = '\n' + ' ' * (opts_len + 3)
1549 pad = '\n' + ' ' * (opts_len + 3)
1550 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1550 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1551 else:
1551 else:
1552 ui.write("%s\n" % first)
1552 ui.write("%s\n" % first)
1553
1553
1554 def identify(ui, repo, source=None,
1554 def identify(ui, repo, source=None,
1555 rev=None, num=None, id=None, branch=None, tags=None):
1555 rev=None, num=None, id=None, branch=None, tags=None):
1556 """identify the working copy or specified revision
1556 """identify the working copy or specified revision
1557
1557
1558 With no revision, print a summary of the current state of the
1558 With no revision, print a summary of the current state of the
1559 repository.
1559 repository.
1560
1560
1561 With a path, do a lookup in another repository.
1561 With a path, do a lookup in another repository.
1562
1562
1563 This summary identifies the repository state using one or two
1563 This summary identifies the repository state using one or two
1564 parent hash identifiers, followed by a "+" if there are
1564 parent hash identifiers, followed by a "+" if there are
1565 uncommitted changes in the working directory, a list of tags for
1565 uncommitted changes in the working directory, a list of tags for
1566 this revision and a branch name for non-default branches.
1566 this revision and a branch name for non-default branches.
1567 """
1567 """
1568
1568
1569 if not repo and not source:
1569 if not repo and not source:
1570 raise util.Abort(_("There is no Mercurial repository here "
1570 raise util.Abort(_("There is no Mercurial repository here "
1571 "(.hg not found)"))
1571 "(.hg not found)"))
1572
1572
1573 hexfunc = ui.debugflag and hex or short
1573 hexfunc = ui.debugflag and hex or short
1574 default = not (num or id or branch or tags)
1574 default = not (num or id or branch or tags)
1575 output = []
1575 output = []
1576
1576
1577 revs = []
1577 revs = []
1578 if source:
1578 if source:
1579 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1579 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1580 repo = hg.repository(ui, source)
1580 repo = hg.repository(ui, source)
1581
1581
1582 if not repo.local():
1582 if not repo.local():
1583 if not rev and revs:
1583 if not rev and revs:
1584 rev = revs[0]
1584 rev = revs[0]
1585 if not rev:
1585 if not rev:
1586 rev = "tip"
1586 rev = "tip"
1587 if num or branch or tags:
1587 if num or branch or tags:
1588 raise util.Abort(
1588 raise util.Abort(
1589 "can't query remote revision number, branch, or tags")
1589 "can't query remote revision number, branch, or tags")
1590 output = [hexfunc(repo.lookup(rev))]
1590 output = [hexfunc(repo.lookup(rev))]
1591 elif not rev:
1591 elif not rev:
1592 ctx = repo[None]
1592 ctx = repo[None]
1593 parents = ctx.parents()
1593 parents = ctx.parents()
1594 changed = False
1594 changed = False
1595 if default or id or num:
1595 if default or id or num:
1596 changed = ctx.files() + ctx.deleted()
1596 changed = ctx.files() + ctx.deleted()
1597 if default or id:
1597 if default or id:
1598 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1598 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1599 (changed) and "+" or "")]
1599 (changed) and "+" or "")]
1600 if num:
1600 if num:
1601 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1601 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1602 (changed) and "+" or ""))
1602 (changed) and "+" or ""))
1603 else:
1603 else:
1604 ctx = repo[rev]
1604 ctx = repo[rev]
1605 if default or id:
1605 if default or id:
1606 output = [hexfunc(ctx.node())]
1606 output = [hexfunc(ctx.node())]
1607 if num:
1607 if num:
1608 output.append(str(ctx.rev()))
1608 output.append(str(ctx.rev()))
1609
1609
1610 if repo.local() and default and not ui.quiet:
1610 if repo.local() and default and not ui.quiet:
1611 b = encoding.tolocal(ctx.branch())
1611 b = encoding.tolocal(ctx.branch())
1612 if b != 'default':
1612 if b != 'default':
1613 output.append("(%s)" % b)
1613 output.append("(%s)" % b)
1614
1614
1615 # multiple tags for a single parent separated by '/'
1615 # multiple tags for a single parent separated by '/'
1616 t = "/".join(ctx.tags())
1616 t = "/".join(ctx.tags())
1617 if t:
1617 if t:
1618 output.append(t)
1618 output.append(t)
1619
1619
1620 if branch:
1620 if branch:
1621 output.append(encoding.tolocal(ctx.branch()))
1621 output.append(encoding.tolocal(ctx.branch()))
1622
1622
1623 if tags:
1623 if tags:
1624 output.extend(ctx.tags())
1624 output.extend(ctx.tags())
1625
1625
1626 ui.write("%s\n" % ' '.join(output))
1626 ui.write("%s\n" % ' '.join(output))
1627
1627
1628 def import_(ui, repo, patch1, *patches, **opts):
1628 def import_(ui, repo, patch1, *patches, **opts):
1629 """import an ordered set of patches
1629 """import an ordered set of patches
1630
1630
1631 Import a list of patches and commit them individually.
1631 Import a list of patches and commit them individually.
1632
1632
1633 If there are outstanding changes in the working directory, import
1633 If there are outstanding changes in the working directory, import
1634 will abort unless given the -f/--force flag.
1634 will abort unless given the -f/--force flag.
1635
1635
1636 You can import a patch straight from a mail message. Even patches
1636 You can import a patch straight from a mail message. Even patches
1637 as attachments work (body part must be type text/plain or
1637 as attachments work (body part must be type text/plain or
1638 text/x-patch to be used). From and Subject headers of email
1638 text/x-patch to be used). From and Subject headers of email
1639 message are used as default committer and commit message. All
1639 message are used as default committer and commit message. All
1640 text/plain body parts before first diff are added to commit
1640 text/plain body parts before first diff are added to commit
1641 message.
1641 message.
1642
1642
1643 If the imported patch was generated by hg export, user and
1643 If the imported patch was generated by hg export, user and
1644 description from patch override values from message headers and
1644 description from patch override values from message headers and
1645 body. Values given on command line with -m/--message and -u/--user
1645 body. Values given on command line with -m/--message and -u/--user
1646 override these.
1646 override these.
1647
1647
1648 If --exact is specified, import will set the working directory to
1648 If --exact is specified, import will set the working directory to
1649 the parent of each patch before applying it, and will abort if the
1649 the parent of each patch before applying it, and will abort if the
1650 resulting changeset has a different ID than the one recorded in
1650 resulting changeset has a different ID than the one recorded in
1651 the patch. This may happen due to character set problems or other
1651 the patch. This may happen due to character set problems or other
1652 deficiencies in the text patch format.
1652 deficiencies in the text patch format.
1653
1653
1654 With -s/--similarity, hg will attempt to discover renames and
1654 With -s/--similarity, hg will attempt to discover renames and
1655 copies in the patch in the same way as 'addremove'.
1655 copies in the patch in the same way as 'addremove'.
1656
1656
1657 To read a patch from standard input, use patch name "-". See 'hg
1657 To read a patch from standard input, use patch name "-". See 'hg
1658 help dates' for a list of formats valid for -d/--date.
1658 help dates' for a list of formats valid for -d/--date.
1659 """
1659 """
1660 patches = (patch1,) + patches
1660 patches = (patch1,) + patches
1661
1661
1662 date = opts.get('date')
1662 date = opts.get('date')
1663 if date:
1663 if date:
1664 opts['date'] = util.parsedate(date)
1664 opts['date'] = util.parsedate(date)
1665
1665
1666 try:
1666 try:
1667 sim = float(opts.get('similarity') or 0)
1667 sim = float(opts.get('similarity') or 0)
1668 except ValueError:
1668 except ValueError:
1669 raise util.Abort(_('similarity must be a number'))
1669 raise util.Abort(_('similarity must be a number'))
1670 if sim < 0 or sim > 100:
1670 if sim < 0 or sim > 100:
1671 raise util.Abort(_('similarity must be between 0 and 100'))
1671 raise util.Abort(_('similarity must be between 0 and 100'))
1672
1672
1673 if opts.get('exact') or not opts.get('force'):
1673 if opts.get('exact') or not opts.get('force'):
1674 cmdutil.bail_if_changed(repo)
1674 cmdutil.bail_if_changed(repo)
1675
1675
1676 d = opts["base"]
1676 d = opts["base"]
1677 strip = opts["strip"]
1677 strip = opts["strip"]
1678 wlock = lock = None
1678 wlock = lock = None
1679 try:
1679 try:
1680 wlock = repo.wlock()
1680 wlock = repo.wlock()
1681 lock = repo.lock()
1681 lock = repo.lock()
1682 for p in patches:
1682 for p in patches:
1683 pf = os.path.join(d, p)
1683 pf = os.path.join(d, p)
1684
1684
1685 if pf == '-':
1685 if pf == '-':
1686 ui.status(_("applying patch from stdin\n"))
1686 ui.status(_("applying patch from stdin\n"))
1687 pf = sys.stdin
1687 pf = sys.stdin
1688 else:
1688 else:
1689 ui.status(_("applying %s\n") % p)
1689 ui.status(_("applying %s\n") % p)
1690 pf = url.open(ui, pf)
1690 pf = url.open(ui, pf)
1691 data = patch.extract(ui, pf)
1691 data = patch.extract(ui, pf)
1692 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1692 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1693
1693
1694 if tmpname is None:
1694 if tmpname is None:
1695 raise util.Abort(_('no diffs found'))
1695 raise util.Abort(_('no diffs found'))
1696
1696
1697 try:
1697 try:
1698 cmdline_message = cmdutil.logmessage(opts)
1698 cmdline_message = cmdutil.logmessage(opts)
1699 if cmdline_message:
1699 if cmdline_message:
1700 # pickup the cmdline msg
1700 # pickup the cmdline msg
1701 message = cmdline_message
1701 message = cmdline_message
1702 elif message:
1702 elif message:
1703 # pickup the patch msg
1703 # pickup the patch msg
1704 message = message.strip()
1704 message = message.strip()
1705 else:
1705 else:
1706 # launch the editor
1706 # launch the editor
1707 message = None
1707 message = None
1708 ui.debug(_('message:\n%s\n') % message)
1708 ui.debug(_('message:\n%s\n') % message)
1709
1709
1710 wp = repo.parents()
1710 wp = repo.parents()
1711 if opts.get('exact'):
1711 if opts.get('exact'):
1712 if not nodeid or not p1:
1712 if not nodeid or not p1:
1713 raise util.Abort(_('not a mercurial patch'))
1713 raise util.Abort(_('not a mercurial patch'))
1714 p1 = repo.lookup(p1)
1714 p1 = repo.lookup(p1)
1715 p2 = repo.lookup(p2 or hex(nullid))
1715 p2 = repo.lookup(p2 or hex(nullid))
1716
1716
1717 if p1 != wp[0].node():
1717 if p1 != wp[0].node():
1718 hg.clean(repo, p1)
1718 hg.clean(repo, p1)
1719 repo.dirstate.setparents(p1, p2)
1719 repo.dirstate.setparents(p1, p2)
1720 elif p2:
1720 elif p2:
1721 try:
1721 try:
1722 p1 = repo.lookup(p1)
1722 p1 = repo.lookup(p1)
1723 p2 = repo.lookup(p2)
1723 p2 = repo.lookup(p2)
1724 if p1 == wp[0].node():
1724 if p1 == wp[0].node():
1725 repo.dirstate.setparents(p1, p2)
1725 repo.dirstate.setparents(p1, p2)
1726 except error.RepoError:
1726 except error.RepoError:
1727 pass
1727 pass
1728 if opts.get('exact') or opts.get('import_branch'):
1728 if opts.get('exact') or opts.get('import_branch'):
1729 repo.dirstate.setbranch(branch or 'default')
1729 repo.dirstate.setbranch(branch or 'default')
1730
1730
1731 files = {}
1731 files = {}
1732 try:
1732 try:
1733 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1733 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1734 files=files)
1734 files=files)
1735 finally:
1735 finally:
1736 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1736 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1737 if not opts.get('no_commit'):
1737 if not opts.get('no_commit'):
1738 n = repo.commit(files, message, opts.get('user') or user,
1738 n = repo.commit(files, message, opts.get('user') or user,
1739 opts.get('date') or date)
1739 opts.get('date') or date)
1740 if opts.get('exact'):
1740 if opts.get('exact'):
1741 if hex(n) != nodeid:
1741 if hex(n) != nodeid:
1742 repo.rollback()
1742 repo.rollback()
1743 raise util.Abort(_('patch is damaged'
1743 raise util.Abort(_('patch is damaged'
1744 ' or loses information'))
1744 ' or loses information'))
1745 # Force a dirstate write so that the next transaction
1745 # Force a dirstate write so that the next transaction
1746 # backups an up-do-date file.
1746 # backups an up-do-date file.
1747 repo.dirstate.write()
1747 repo.dirstate.write()
1748 finally:
1748 finally:
1749 os.unlink(tmpname)
1749 os.unlink(tmpname)
1750 finally:
1750 finally:
1751 release(lock, wlock)
1751 release(lock, wlock)
1752
1752
1753 def incoming(ui, repo, source="default", **opts):
1753 def incoming(ui, repo, source="default", **opts):
1754 """show new changesets found in source
1754 """show new changesets found in source
1755
1755
1756 Show new changesets found in the specified path/URL or the default
1756 Show new changesets found in the specified path/URL or the default
1757 pull location. These are the changesets that would be pulled if a
1757 pull location. These are the changesets that would be pulled if a
1758 pull was requested.
1758 pull was requested.
1759
1759
1760 For remote repository, using --bundle avoids downloading the
1760 For remote repository, using --bundle avoids downloading the
1761 changesets twice if the incoming is followed by a pull.
1761 changesets twice if the incoming is followed by a pull.
1762
1762
1763 See pull for valid source format details.
1763 See pull for valid source format details.
1764 """
1764 """
1765 limit = cmdutil.loglimit(opts)
1765 limit = cmdutil.loglimit(opts)
1766 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1766 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1767 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1767 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1768 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1768 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1769 if revs:
1769 if revs:
1770 revs = [other.lookup(rev) for rev in revs]
1770 revs = [other.lookup(rev) for rev in revs]
1771 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1771 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1772 force=opts["force"])
1772 force=opts["force"])
1773 if not incoming:
1773 if not incoming:
1774 try:
1774 try:
1775 os.unlink(opts["bundle"])
1775 os.unlink(opts["bundle"])
1776 except:
1776 except:
1777 pass
1777 pass
1778 ui.status(_("no changes found\n"))
1778 ui.status(_("no changes found\n"))
1779 return 1
1779 return 1
1780
1780
1781 cleanup = None
1781 cleanup = None
1782 try:
1782 try:
1783 fname = opts["bundle"]
1783 fname = opts["bundle"]
1784 if fname or not other.local():
1784 if fname or not other.local():
1785 # create a bundle (uncompressed if other repo is not local)
1785 # create a bundle (uncompressed if other repo is not local)
1786
1786
1787 if revs is None and other.capable('changegroupsubset'):
1787 if revs is None and other.capable('changegroupsubset'):
1788 revs = rheads
1788 revs = rheads
1789
1789
1790 if revs is None:
1790 if revs is None:
1791 cg = other.changegroup(incoming, "incoming")
1791 cg = other.changegroup(incoming, "incoming")
1792 else:
1792 else:
1793 cg = other.changegroupsubset(incoming, revs, 'incoming')
1793 cg = other.changegroupsubset(incoming, revs, 'incoming')
1794 bundletype = other.local() and "HG10BZ" or "HG10UN"
1794 bundletype = other.local() and "HG10BZ" or "HG10UN"
1795 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1795 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1796 # keep written bundle?
1796 # keep written bundle?
1797 if opts["bundle"]:
1797 if opts["bundle"]:
1798 cleanup = None
1798 cleanup = None
1799 if not other.local():
1799 if not other.local():
1800 # use the created uncompressed bundlerepo
1800 # use the created uncompressed bundlerepo
1801 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1801 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1802
1802
1803 o = other.changelog.nodesbetween(incoming, revs)[0]
1803 o = other.changelog.nodesbetween(incoming, revs)[0]
1804 if opts.get('newest_first'):
1804 if opts.get('newest_first'):
1805 o.reverse()
1805 o.reverse()
1806 displayer = cmdutil.show_changeset(ui, other, opts)
1806 displayer = cmdutil.show_changeset(ui, other, opts)
1807 count = 0
1807 count = 0
1808 for n in o:
1808 for n in o:
1809 if count >= limit:
1809 if count >= limit:
1810 break
1810 break
1811 parents = [p for p in other.changelog.parents(n) if p != nullid]
1811 parents = [p for p in other.changelog.parents(n) if p != nullid]
1812 if opts.get('no_merges') and len(parents) == 2:
1812 if opts.get('no_merges') and len(parents) == 2:
1813 continue
1813 continue
1814 count += 1
1814 count += 1
1815 displayer.show(other[n])
1815 displayer.show(other[n])
1816 finally:
1816 finally:
1817 if hasattr(other, 'close'):
1817 if hasattr(other, 'close'):
1818 other.close()
1818 other.close()
1819 if cleanup:
1819 if cleanup:
1820 os.unlink(cleanup)
1820 os.unlink(cleanup)
1821
1821
1822 def init(ui, dest=".", **opts):
1822 def init(ui, dest=".", **opts):
1823 """create a new repository in the given directory
1823 """create a new repository in the given directory
1824
1824
1825 Initialize a new repository in the given directory. If the given
1825 Initialize a new repository in the given directory. If the given
1826 directory does not exist, it is created.
1826 directory does not exist, it is created.
1827
1827
1828 If no directory is given, the current directory is used.
1828 If no directory is given, the current directory is used.
1829
1829
1830 It is possible to specify an ssh:// URL as the destination.
1830 It is possible to specify an ssh:// URL as the destination.
1831 See 'hg help urls' for more information.
1831 See 'hg help urls' for more information.
1832 """
1832 """
1833 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1833 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1834
1834
1835 def locate(ui, repo, *pats, **opts):
1835 def locate(ui, repo, *pats, **opts):
1836 """locate files matching specific patterns
1836 """locate files matching specific patterns
1837
1837
1838 Print all files under Mercurial control whose names match the
1838 Print all files under Mercurial control whose names match the
1839 given patterns.
1839 given patterns.
1840
1840
1841 This command searches the entire repository by default. To search
1841 This command searches the entire repository by default. To search
1842 just the current directory and its subdirectories, use
1842 just the current directory and its subdirectories, use
1843 "--include .".
1843 "--include .".
1844
1844
1845 If no patterns are given to match, this command prints all file
1845 If no patterns are given to match, this command prints all file
1846 names.
1846 names.
1847
1847
1848 If you want to feed the output of this command into the "xargs"
1848 If you want to feed the output of this command into the "xargs"
1849 command, use the -0 option to both this command and "xargs". This
1849 command, use the -0 option to both this command and "xargs". This
1850 will avoid the problem of "xargs" treating single filenames that
1850 will avoid the problem of "xargs" treating single filenames that
1851 contain white space as multiple filenames.
1851 contain white space as multiple filenames.
1852 """
1852 """
1853 end = opts.get('print0') and '\0' or '\n'
1853 end = opts.get('print0') and '\0' or '\n'
1854 rev = opts.get('rev') or None
1854 rev = opts.get('rev') or None
1855
1855
1856 ret = 1
1856 ret = 1
1857 m = cmdutil.match(repo, pats, opts, default='relglob')
1857 m = cmdutil.match(repo, pats, opts, default='relglob')
1858 m.bad = lambda x,y: False
1858 m.bad = lambda x,y: False
1859 for abs in repo[rev].walk(m):
1859 for abs in repo[rev].walk(m):
1860 if not rev and abs not in repo.dirstate:
1860 if not rev and abs not in repo.dirstate:
1861 continue
1861 continue
1862 if opts.get('fullpath'):
1862 if opts.get('fullpath'):
1863 ui.write(repo.wjoin(abs), end)
1863 ui.write(repo.wjoin(abs), end)
1864 else:
1864 else:
1865 ui.write(((pats and m.rel(abs)) or abs), end)
1865 ui.write(((pats and m.rel(abs)) or abs), end)
1866 ret = 0
1866 ret = 0
1867
1867
1868 return ret
1868 return ret
1869
1869
1870 def log(ui, repo, *pats, **opts):
1870 def log(ui, repo, *pats, **opts):
1871 """show revision history of entire repository or files
1871 """show revision history of entire repository or files
1872
1872
1873 Print the revision history of the specified files or the entire
1873 Print the revision history of the specified files or the entire
1874 project.
1874 project.
1875
1875
1876 File history is shown without following rename or copy history of
1876 File history is shown without following rename or copy history of
1877 files. Use -f/--follow with a file name to follow history across
1877 files. Use -f/--follow with a file name to follow history across
1878 renames and copies. --follow without a file name will only show
1878 renames and copies. --follow without a file name will only show
1879 ancestors or descendants of the starting revision. --follow-first
1879 ancestors or descendants of the starting revision. --follow-first
1880 only follows the first parent of merge revisions.
1880 only follows the first parent of merge revisions.
1881
1881
1882 If no revision range is specified, the default is tip:0 unless
1882 If no revision range is specified, the default is tip:0 unless
1883 --follow is set, in which case the working directory parent is
1883 --follow is set, in which case the working directory parent is
1884 used as the starting revision.
1884 used as the starting revision.
1885
1885
1886 See 'hg help dates' for a list of formats valid for -d/--date.
1886 See 'hg help dates' for a list of formats valid for -d/--date.
1887
1887
1888 By default this command outputs: changeset id and hash, tags,
1888 By default this command outputs: changeset id and hash, tags,
1889 non-trivial parents, user, date and time, and a summary for each
1889 non-trivial parents, user, date and time, and a summary for each
1890 commit. When the -v/--verbose switch is used, the list of changed
1890 commit. When the -v/--verbose switch is used, the list of changed
1891 files and full commit message is shown.
1891 files and full commit message is shown.
1892
1892
1893 NOTE: log -p/--patch may generate unexpected diff output for merge
1893 NOTE: log -p/--patch may generate unexpected diff output for merge
1894 changesets, as it will only compare the merge changeset against
1894 changesets, as it will only compare the merge changeset against
1895 its first parent. Also, the files: list will only reflect files
1895 its first parent. Also, the files: list will only reflect files
1896 that are different from BOTH parents.
1896 that are different from BOTH parents.
1897
1897
1898 """
1898 """
1899
1899
1900 get = util.cachefunc(lambda r: repo[r].changeset())
1900 get = util.cachefunc(lambda r: repo[r].changeset())
1901 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1901 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1902
1902
1903 limit = cmdutil.loglimit(opts)
1903 limit = cmdutil.loglimit(opts)
1904 count = 0
1904 count = 0
1905
1905
1906 if opts.get('copies') and opts.get('rev'):
1906 if opts.get('copies') and opts.get('rev'):
1907 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1907 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1908 else:
1908 else:
1909 endrev = len(repo)
1909 endrev = len(repo)
1910 rcache = {}
1910 rcache = {}
1911 ncache = {}
1911 ncache = {}
1912 def getrenamed(fn, rev):
1912 def getrenamed(fn, rev):
1913 '''looks up all renames for a file (up to endrev) the first
1913 '''looks up all renames for a file (up to endrev) the first
1914 time the file is given. It indexes on the changerev and only
1914 time the file is given. It indexes on the changerev and only
1915 parses the manifest if linkrev != changerev.
1915 parses the manifest if linkrev != changerev.
1916 Returns rename info for fn at changerev rev.'''
1916 Returns rename info for fn at changerev rev.'''
1917 if fn not in rcache:
1917 if fn not in rcache:
1918 rcache[fn] = {}
1918 rcache[fn] = {}
1919 ncache[fn] = {}
1919 ncache[fn] = {}
1920 fl = repo.file(fn)
1920 fl = repo.file(fn)
1921 for i in fl:
1921 for i in fl:
1922 node = fl.node(i)
1922 node = fl.node(i)
1923 lr = fl.linkrev(i)
1923 lr = fl.linkrev(i)
1924 renamed = fl.renamed(node)
1924 renamed = fl.renamed(node)
1925 rcache[fn][lr] = renamed
1925 rcache[fn][lr] = renamed
1926 if renamed:
1926 if renamed:
1927 ncache[fn][node] = renamed
1927 ncache[fn][node] = renamed
1928 if lr >= endrev:
1928 if lr >= endrev:
1929 break
1929 break
1930 if rev in rcache[fn]:
1930 if rev in rcache[fn]:
1931 return rcache[fn][rev]
1931 return rcache[fn][rev]
1932
1932
1933 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1933 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1934 # filectx logic.
1934 # filectx logic.
1935
1935
1936 try:
1936 try:
1937 return repo[rev][fn].renamed()
1937 return repo[rev][fn].renamed()
1938 except error.LookupError:
1938 except error.LookupError:
1939 pass
1939 pass
1940 return None
1940 return None
1941
1941
1942 df = False
1942 df = False
1943 if opts["date"]:
1943 if opts["date"]:
1944 df = util.matchdate(opts["date"])
1944 df = util.matchdate(opts["date"])
1945
1945
1946 only_branches = opts.get('only_branch')
1946 only_branches = opts.get('only_branch')
1947
1947
1948 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1948 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1949 for st, rev, fns in changeiter:
1949 for st, rev, fns in changeiter:
1950 if st == 'add':
1950 if st == 'add':
1951 parents = [p for p in repo.changelog.parentrevs(rev)
1951 parents = [p for p in repo.changelog.parentrevs(rev)
1952 if p != nullrev]
1952 if p != nullrev]
1953 if opts.get('no_merges') and len(parents) == 2:
1953 if opts.get('no_merges') and len(parents) == 2:
1954 continue
1954 continue
1955 if opts.get('only_merges') and len(parents) != 2:
1955 if opts.get('only_merges') and len(parents) != 2:
1956 continue
1956 continue
1957
1957
1958 if only_branches:
1958 if only_branches:
1959 revbranch = get(rev)[5]['branch']
1959 revbranch = get(rev)[5]['branch']
1960 if revbranch not in only_branches:
1960 if revbranch not in only_branches:
1961 continue
1961 continue
1962
1962
1963 if df:
1963 if df:
1964 changes = get(rev)
1964 changes = get(rev)
1965 if not df(changes[2][0]):
1965 if not df(changes[2][0]):
1966 continue
1966 continue
1967
1967
1968 if opts.get('keyword'):
1968 if opts.get('keyword'):
1969 changes = get(rev)
1969 changes = get(rev)
1970 miss = 0
1970 miss = 0
1971 for k in [kw.lower() for kw in opts['keyword']]:
1971 for k in [kw.lower() for kw in opts['keyword']]:
1972 if not (k in changes[1].lower() or
1972 if not (k in changes[1].lower() or
1973 k in changes[4].lower() or
1973 k in changes[4].lower() or
1974 k in " ".join(changes[3]).lower()):
1974 k in " ".join(changes[3]).lower()):
1975 miss = 1
1975 miss = 1
1976 break
1976 break
1977 if miss:
1977 if miss:
1978 continue
1978 continue
1979
1979
1980 if opts['user']:
1980 if opts['user']:
1981 changes = get(rev)
1981 changes = get(rev)
1982 if not [k for k in opts['user'] if k in changes[1]]:
1982 if not [k for k in opts['user'] if k in changes[1]]:
1983 continue
1983 continue
1984
1984
1985 copies = []
1985 copies = []
1986 if opts.get('copies') and rev:
1986 if opts.get('copies') and rev:
1987 for fn in get(rev)[3]:
1987 for fn in get(rev)[3]:
1988 rename = getrenamed(fn, rev)
1988 rename = getrenamed(fn, rev)
1989 if rename:
1989 if rename:
1990 copies.append((fn, rename[0]))
1990 copies.append((fn, rename[0]))
1991 displayer.show(context.changectx(repo, rev), copies=copies)
1991 displayer.show(context.changectx(repo, rev), copies=copies)
1992 elif st == 'iter':
1992 elif st == 'iter':
1993 if count == limit: break
1993 if count == limit: break
1994 if displayer.flush(rev):
1994 if displayer.flush(rev):
1995 count += 1
1995 count += 1
1996
1996
1997 def manifest(ui, repo, node=None, rev=None):
1997 def manifest(ui, repo, node=None, rev=None):
1998 """output the current or given revision of the project manifest
1998 """output the current or given revision of the project manifest
1999
1999
2000 Print a list of version controlled files for the given revision.
2000 Print a list of version controlled files for the given revision.
2001 If no revision is given, the first parent of the working directory
2001 If no revision is given, the first parent of the working directory
2002 is used, or the null revision if none is checked out.
2002 is used, or the null revision if none is checked out.
2003
2003
2004 With -v flag, print file permissions, symlink and executable bits.
2004 With -v flag, print file permissions, symlink and executable bits.
2005 With --debug flag, print file revision hashes.
2005 With --debug flag, print file revision hashes.
2006 """
2006 """
2007
2007
2008 if rev and node:
2008 if rev and node:
2009 raise util.Abort(_("please specify just one revision"))
2009 raise util.Abort(_("please specify just one revision"))
2010
2010
2011 if not node:
2011 if not node:
2012 node = rev
2012 node = rev
2013
2013
2014 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2014 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2015 ctx = repo[node]
2015 ctx = repo[node]
2016 for f in ctx:
2016 for f in ctx:
2017 if ui.debugflag:
2017 if ui.debugflag:
2018 ui.write("%40s " % hex(ctx.manifest()[f]))
2018 ui.write("%40s " % hex(ctx.manifest()[f]))
2019 if ui.verbose:
2019 if ui.verbose:
2020 ui.write(decor[ctx.flags(f)])
2020 ui.write(decor[ctx.flags(f)])
2021 ui.write("%s\n" % f)
2021 ui.write("%s\n" % f)
2022
2022
2023 def merge(ui, repo, node=None, force=None, rev=None):
2023 def merge(ui, repo, node=None, force=None, rev=None):
2024 """merge working directory with another revision
2024 """merge working directory with another revision
2025
2025
2026 The contents of the current working directory is updated with all
2026 The contents of the current working directory is updated with all
2027 changes made in the requested revision since the last common
2027 changes made in the requested revision since the last common
2028 predecessor revision.
2028 predecessor revision.
2029
2029
2030 Files that changed between either parent are marked as changed for
2030 Files that changed between either parent are marked as changed for
2031 the next commit and a commit must be performed before any further
2031 the next commit and a commit must be performed before any further
2032 updates are allowed. The next commit has two parents.
2032 updates are allowed. The next commit has two parents.
2033
2033
2034 If no revision is specified, the working directory's parent is a
2034 If no revision is specified, the working directory's parent is a
2035 head revision, and the current branch contains exactly one other
2035 head revision, and the current branch contains exactly one other
2036 head, the other head is merged with by default. Otherwise, an
2036 head, the other head is merged with by default. Otherwise, an
2037 explicit revision to merge with must be provided.
2037 explicit revision to merge with must be provided.
2038 """
2038 """
2039
2039
2040 if rev and node:
2040 if rev and node:
2041 raise util.Abort(_("please specify just one revision"))
2041 raise util.Abort(_("please specify just one revision"))
2042 if not node:
2042 if not node:
2043 node = rev
2043 node = rev
2044
2044
2045 if not node:
2045 if not node:
2046 branch = repo.changectx(None).branch()
2046 branch = repo.changectx(None).branch()
2047 bheads = repo.branchheads(branch)
2047 bheads = repo.branchheads(branch)
2048 if len(bheads) > 2:
2048 if len(bheads) > 2:
2049 raise util.Abort(_("branch '%s' has %d heads - "
2049 raise util.Abort(_("branch '%s' has %d heads - "
2050 "please merge with an explicit rev") %
2050 "please merge with an explicit rev") %
2051 (branch, len(bheads)))
2051 (branch, len(bheads)))
2052
2052
2053 parent = repo.dirstate.parents()[0]
2053 parent = repo.dirstate.parents()[0]
2054 if len(bheads) == 1:
2054 if len(bheads) == 1:
2055 if len(repo.heads()) > 1:
2055 if len(repo.heads()) > 1:
2056 raise util.Abort(_("branch '%s' has one head - "
2056 raise util.Abort(_("branch '%s' has one head - "
2057 "please merge with an explicit rev") %
2057 "please merge with an explicit rev") %
2058 branch)
2058 branch)
2059 msg = _('there is nothing to merge')
2059 msg = _('there is nothing to merge')
2060 if parent != repo.lookup(repo[None].branch()):
2060 if parent != repo.lookup(repo[None].branch()):
2061 msg = _('%s - use "hg update" instead') % msg
2061 msg = _('%s - use "hg update" instead') % msg
2062 raise util.Abort(msg)
2062 raise util.Abort(msg)
2063
2063
2064 if parent not in bheads:
2064 if parent not in bheads:
2065 raise util.Abort(_('working dir not at a head rev - '
2065 raise util.Abort(_('working dir not at a head rev - '
2066 'use "hg update" or merge with an explicit rev'))
2066 'use "hg update" or merge with an explicit rev'))
2067 node = parent == bheads[0] and bheads[-1] or bheads[0]
2067 node = parent == bheads[0] and bheads[-1] or bheads[0]
2068 return hg.merge(repo, node, force=force)
2068 return hg.merge(repo, node, force=force)
2069
2069
2070 def outgoing(ui, repo, dest=None, **opts):
2070 def outgoing(ui, repo, dest=None, **opts):
2071 """show changesets not found in destination
2071 """show changesets not found in destination
2072
2072
2073 Show changesets not found in the specified destination repository
2073 Show changesets not found in the specified destination repository
2074 or the default push location. These are the changesets that would
2074 or the default push location. These are the changesets that would
2075 be pushed if a push was requested.
2075 be pushed if a push was requested.
2076
2076
2077 See pull for valid destination format details.
2077 See pull for valid destination format details.
2078 """
2078 """
2079 limit = cmdutil.loglimit(opts)
2079 limit = cmdutil.loglimit(opts)
2080 dest, revs, checkout = hg.parseurl(
2080 dest, revs, checkout = hg.parseurl(
2081 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2081 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2082 if revs:
2082 if revs:
2083 revs = [repo.lookup(rev) for rev in revs]
2083 revs = [repo.lookup(rev) for rev in revs]
2084
2084
2085 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2085 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2086 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2086 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2087 o = repo.findoutgoing(other, force=opts.get('force'))
2087 o = repo.findoutgoing(other, force=opts.get('force'))
2088 if not o:
2088 if not o:
2089 ui.status(_("no changes found\n"))
2089 ui.status(_("no changes found\n"))
2090 return 1
2090 return 1
2091 o = repo.changelog.nodesbetween(o, revs)[0]
2091 o = repo.changelog.nodesbetween(o, revs)[0]
2092 if opts.get('newest_first'):
2092 if opts.get('newest_first'):
2093 o.reverse()
2093 o.reverse()
2094 displayer = cmdutil.show_changeset(ui, repo, opts)
2094 displayer = cmdutil.show_changeset(ui, repo, opts)
2095 count = 0
2095 count = 0
2096 for n in o:
2096 for n in o:
2097 if count >= limit:
2097 if count >= limit:
2098 break
2098 break
2099 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2099 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2100 if opts.get('no_merges') and len(parents) == 2:
2100 if opts.get('no_merges') and len(parents) == 2:
2101 continue
2101 continue
2102 count += 1
2102 count += 1
2103 displayer.show(repo[n])
2103 displayer.show(repo[n])
2104
2104
2105 def parents(ui, repo, file_=None, **opts):
2105 def parents(ui, repo, file_=None, **opts):
2106 """show the parents of the working directory or revision
2106 """show the parents of the working directory or revision
2107
2107
2108 Print the working directory's parent revisions. If a revision is
2108 Print the working directory's parent revisions. If a revision is
2109 given via -r/--rev, the parent of that revision will be printed.
2109 given via -r/--rev, the parent of that revision will be printed.
2110 If a file argument is given, revision in which the file was last
2110 If a file argument is given, revision in which the file was last
2111 changed (before the working directory revision or the argument to
2111 changed (before the working directory revision or the argument to
2112 --rev if given) is printed.
2112 --rev if given) is printed.
2113 """
2113 """
2114 rev = opts.get('rev')
2114 rev = opts.get('rev')
2115 if rev:
2115 if rev:
2116 ctx = repo[rev]
2116 ctx = repo[rev]
2117 else:
2117 else:
2118 ctx = repo[None]
2118 ctx = repo[None]
2119
2119
2120 if file_:
2120 if file_:
2121 m = cmdutil.match(repo, (file_,), opts)
2121 m = cmdutil.match(repo, (file_,), opts)
2122 if m.anypats() or len(m.files()) != 1:
2122 if m.anypats() or len(m.files()) != 1:
2123 raise util.Abort(_('can only specify an explicit file name'))
2123 raise util.Abort(_('can only specify an explicit file name'))
2124 file_ = m.files()[0]
2124 file_ = m.files()[0]
2125 filenodes = []
2125 filenodes = []
2126 for cp in ctx.parents():
2126 for cp in ctx.parents():
2127 if not cp:
2127 if not cp:
2128 continue
2128 continue
2129 try:
2129 try:
2130 filenodes.append(cp.filenode(file_))
2130 filenodes.append(cp.filenode(file_))
2131 except error.LookupError:
2131 except error.LookupError:
2132 pass
2132 pass
2133 if not filenodes:
2133 if not filenodes:
2134 raise util.Abort(_("'%s' not found in manifest!") % file_)
2134 raise util.Abort(_("'%s' not found in manifest!") % file_)
2135 fl = repo.file(file_)
2135 fl = repo.file(file_)
2136 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2136 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2137 else:
2137 else:
2138 p = [cp.node() for cp in ctx.parents()]
2138 p = [cp.node() for cp in ctx.parents()]
2139
2139
2140 displayer = cmdutil.show_changeset(ui, repo, opts)
2140 displayer = cmdutil.show_changeset(ui, repo, opts)
2141 for n in p:
2141 for n in p:
2142 if n != nullid:
2142 if n != nullid:
2143 displayer.show(repo[n])
2143 displayer.show(repo[n])
2144
2144
2145 def paths(ui, repo, search=None):
2145 def paths(ui, repo, search=None):
2146 """show aliases for remote repositories
2146 """show aliases for remote repositories
2147
2147
2148 Show definition of symbolic path name NAME. If no name is given,
2148 Show definition of symbolic path name NAME. If no name is given,
2149 show definition of available names.
2149 show definition of available names.
2150
2150
2151 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2151 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2152 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2152 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2153
2153
2154 See 'hg help urls' for more information.
2154 See 'hg help urls' for more information.
2155 """
2155 """
2156 if search:
2156 if search:
2157 for name, path in ui.configitems("paths"):
2157 for name, path in ui.configitems("paths"):
2158 if name == search:
2158 if name == search:
2159 ui.write("%s\n" % url.hidepassword(path))
2159 ui.write("%s\n" % url.hidepassword(path))
2160 return
2160 return
2161 ui.warn(_("not found!\n"))
2161 ui.warn(_("not found!\n"))
2162 return 1
2162 return 1
2163 else:
2163 else:
2164 for name, path in ui.configitems("paths"):
2164 for name, path in ui.configitems("paths"):
2165 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2165 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2166
2166
2167 def postincoming(ui, repo, modheads, optupdate, checkout):
2167 def postincoming(ui, repo, modheads, optupdate, checkout):
2168 if modheads == 0:
2168 if modheads == 0:
2169 return
2169 return
2170 if optupdate:
2170 if optupdate:
2171 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2171 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2172 return hg.update(repo, checkout)
2172 return hg.update(repo, checkout)
2173 else:
2173 else:
2174 ui.status(_("not updating, since new heads added\n"))
2174 ui.status(_("not updating, since new heads added\n"))
2175 if modheads > 1:
2175 if modheads > 1:
2176 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2176 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2177 else:
2177 else:
2178 ui.status(_("(run 'hg update' to get a working copy)\n"))
2178 ui.status(_("(run 'hg update' to get a working copy)\n"))
2179
2179
2180 def pull(ui, repo, source="default", **opts):
2180 def pull(ui, repo, source="default", **opts):
2181 """pull changes from the specified source
2181 """pull changes from the specified source
2182
2182
2183 Pull changes from a remote repository to the local one.
2183 Pull changes from a remote repository to the local one.
2184
2184
2185 This finds all changes from the repository at the specified path
2185 This finds all changes from the repository at the specified path
2186 or URL and adds them to the local repository. By default, this
2186 or URL and adds them to the local repository. By default, this
2187 does not update the copy of the project in the working directory.
2187 does not update the copy of the project in the working directory.
2188
2188
2189 Use hg incoming if you want to see what will be added by the next
2189 Use hg incoming if you want to see what will be added by the next
2190 pull without actually adding the changes to the repository.
2190 pull without actually adding the changes to the repository.
2191
2191
2192 If SOURCE is omitted, the 'default' path will be used.
2192 If SOURCE is omitted, the 'default' path will be used.
2193 See 'hg help urls' for more information.
2193 See 'hg help urls' for more information.
2194 """
2194 """
2195 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2195 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2196 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2196 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2197 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2197 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2198 if revs:
2198 if revs:
2199 try:
2199 try:
2200 revs = [other.lookup(rev) for rev in revs]
2200 revs = [other.lookup(rev) for rev in revs]
2201 except error.CapabilityError:
2201 except error.CapabilityError:
2202 err = _("Other repository doesn't support revision lookup, "
2202 err = _("Other repository doesn't support revision lookup, "
2203 "so a rev cannot be specified.")
2203 "so a rev cannot be specified.")
2204 raise util.Abort(err)
2204 raise util.Abort(err)
2205
2205
2206 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2206 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2207 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2207 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2208
2208
2209 def push(ui, repo, dest=None, **opts):
2209 def push(ui, repo, dest=None, **opts):
2210 """push changes to the specified destination
2210 """push changes to the specified destination
2211
2211
2212 Push changes from the local repository to the given destination.
2212 Push changes from the local repository to the given destination.
2213
2213
2214 This is the symmetrical operation for pull. It moves changes from
2214 This is the symmetrical operation for pull. It moves changes from
2215 the current repository to a different one. If the destination is
2215 the current repository to a different one. If the destination is
2216 local this is identical to a pull in that directory from the
2216 local this is identical to a pull in that directory from the
2217 current one.
2217 current one.
2218
2218
2219 By default, push will refuse to run if it detects the result would
2219 By default, push will refuse to run if it detects the result would
2220 increase the number of remote heads. This generally indicates the
2220 increase the number of remote heads. This generally indicates the
2221 the client has forgotten to pull and merge before pushing.
2221 the client has forgotten to pull and merge before pushing.
2222
2222
2223 If -r/--rev is used, the named revision and all its ancestors will
2223 If -r/--rev is used, the named revision and all its ancestors will
2224 be pushed to the remote repository.
2224 be pushed to the remote repository.
2225
2225
2226 Look at the help text for URLs for important details about ssh://
2226 Look at the help text for URLs for important details about ssh://
2227 URLs. If DESTINATION is omitted, a default path will be used.
2227 URLs. If DESTINATION is omitted, a default path will be used.
2228 See 'hg help urls' for more information.
2228 See 'hg help urls' for more information.
2229 """
2229 """
2230 dest, revs, checkout = hg.parseurl(
2230 dest, revs, checkout = hg.parseurl(
2231 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2231 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2232 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2232 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2233 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2233 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2234 if revs:
2234 if revs:
2235 revs = [repo.lookup(rev) for rev in revs]
2235 revs = [repo.lookup(rev) for rev in revs]
2236 r = repo.push(other, opts.get('force'), revs=revs)
2236 r = repo.push(other, opts.get('force'), revs=revs)
2237 return r == 0
2237 return r == 0
2238
2238
2239 def rawcommit(ui, repo, *pats, **opts):
2239 def rawcommit(ui, repo, *pats, **opts):
2240 """raw commit interface (DEPRECATED)
2240 """raw commit interface (DEPRECATED)
2241
2241
2242 (DEPRECATED)
2242 (DEPRECATED)
2243 Lowlevel commit, for use in helper scripts.
2243 Lowlevel commit, for use in helper scripts.
2244
2244
2245 This command is not intended to be used by normal users, as it is
2245 This command is not intended to be used by normal users, as it is
2246 primarily useful for importing from other SCMs.
2246 primarily useful for importing from other SCMs.
2247
2247
2248 This command is now deprecated and will be removed in a future
2248 This command is now deprecated and will be removed in a future
2249 release, please use debugsetparents and commit instead.
2249 release, please use debugsetparents and commit instead.
2250 """
2250 """
2251
2251
2252 ui.warn(_("(the rawcommit command is deprecated)\n"))
2252 ui.warn(_("(the rawcommit command is deprecated)\n"))
2253
2253
2254 message = cmdutil.logmessage(opts)
2254 message = cmdutil.logmessage(opts)
2255
2255
2256 files = cmdutil.match(repo, pats, opts).files()
2256 files = cmdutil.match(repo, pats, opts).files()
2257 if opts.get('files'):
2257 if opts.get('files'):
2258 files += open(opts['files']).read().splitlines()
2258 files += open(opts['files']).read().splitlines()
2259
2259
2260 parents = [repo.lookup(p) for p in opts['parent']]
2260 parents = [repo.lookup(p) for p in opts['parent']]
2261
2261
2262 try:
2262 try:
2263 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2263 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2264 except ValueError, inst:
2264 except ValueError, inst:
2265 raise util.Abort(str(inst))
2265 raise util.Abort(str(inst))
2266
2266
2267 def recover(ui, repo):
2267 def recover(ui, repo):
2268 """roll back an interrupted transaction
2268 """roll back an interrupted transaction
2269
2269
2270 Recover from an interrupted commit or pull.
2270 Recover from an interrupted commit or pull.
2271
2271
2272 This command tries to fix the repository status after an
2272 This command tries to fix the repository status after an
2273 interrupted operation. It should only be necessary when Mercurial
2273 interrupted operation. It should only be necessary when Mercurial
2274 suggests it.
2274 suggests it.
2275 """
2275 """
2276 if repo.recover():
2276 if repo.recover():
2277 return hg.verify(repo)
2277 return hg.verify(repo)
2278 return 1
2278 return 1
2279
2279
2280 def remove(ui, repo, *pats, **opts):
2280 def remove(ui, repo, *pats, **opts):
2281 """remove the specified files on the next commit
2281 """remove the specified files on the next commit
2282
2282
2283 Schedule the indicated files for removal from the repository.
2283 Schedule the indicated files for removal from the repository.
2284
2284
2285 This only removes files from the current branch, not from the
2285 This only removes files from the current branch, not from the
2286 entire project history. -A/--after can be used to remove only
2286 entire project history. -A/--after can be used to remove only
2287 files that have already been deleted, -f/--force can be used to
2287 files that have already been deleted, -f/--force can be used to
2288 force deletion, and -Af can be used to remove files from the next
2288 force deletion, and -Af can be used to remove files from the next
2289 revision without deleting them.
2289 revision without deleting them.
2290
2290
2291 The following table details the behavior of remove for different
2291 The following table details the behavior of remove for different
2292 file states (columns) and option combinations (rows). The file
2292 file states (columns) and option combinations (rows). The file
2293 states are Added, Clean, Modified and Missing (as reported by hg
2293 states are Added, Clean, Modified and Missing (as reported by hg
2294 status). The actions are Warn, Remove (from branch) and Delete
2294 status). The actions are Warn, Remove (from branch) and Delete
2295 (from disk).
2295 (from disk).
2296
2296
2297 A C M !
2297 A C M !
2298 none W RD W R
2298 none W RD W R
2299 -f R RD RD R
2299 -f R RD RD R
2300 -A W W W R
2300 -A W W W R
2301 -Af R R R R
2301 -Af R R R R
2302
2302
2303 This command schedules the files to be removed at the next commit.
2303 This command schedules the files to be removed at the next commit.
2304 To undo a remove before that, see hg revert.
2304 To undo a remove before that, see hg revert.
2305 """
2305 """
2306
2306
2307 after, force = opts.get('after'), opts.get('force')
2307 after, force = opts.get('after'), opts.get('force')
2308 if not pats and not after:
2308 if not pats and not after:
2309 raise util.Abort(_('no files specified'))
2309 raise util.Abort(_('no files specified'))
2310
2310
2311 m = cmdutil.match(repo, pats, opts)
2311 m = cmdutil.match(repo, pats, opts)
2312 s = repo.status(match=m, clean=True)
2312 s = repo.status(match=m, clean=True)
2313 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2313 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2314
2314
2315 def warn(files, reason):
2315 def warn(files, reason):
2316 for f in files:
2316 for f in files:
2317 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2317 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2318 % (m.rel(f), reason))
2318 % (m.rel(f), reason))
2319
2319
2320 if force:
2320 if force:
2321 remove, forget = modified + deleted + clean, added
2321 remove, forget = modified + deleted + clean, added
2322 elif after:
2322 elif after:
2323 remove, forget = deleted, []
2323 remove, forget = deleted, []
2324 warn(modified + added + clean, _('still exists'))
2324 warn(modified + added + clean, _('still exists'))
2325 else:
2325 else:
2326 remove, forget = deleted + clean, []
2326 remove, forget = deleted + clean, []
2327 warn(modified, _('is modified'))
2327 warn(modified, _('is modified'))
2328 warn(added, _('has been marked for add'))
2328 warn(added, _('has been marked for add'))
2329
2329
2330 for f in sorted(remove + forget):
2330 for f in sorted(remove + forget):
2331 if ui.verbose or not m.exact(f):
2331 if ui.verbose or not m.exact(f):
2332 ui.status(_('removing %s\n') % m.rel(f))
2332 ui.status(_('removing %s\n') % m.rel(f))
2333
2333
2334 repo.forget(forget)
2334 repo.forget(forget)
2335 repo.remove(remove, unlink=not after)
2335 repo.remove(remove, unlink=not after)
2336
2336
2337 def rename(ui, repo, *pats, **opts):
2337 def rename(ui, repo, *pats, **opts):
2338 """rename files; equivalent of copy + remove
2338 """rename files; equivalent of copy + remove
2339
2339
2340 Mark dest as copies of sources; mark sources for deletion. If dest
2340 Mark dest as copies of sources; mark sources for deletion. If dest
2341 is a directory, copies are put in that directory. If dest is a
2341 is a directory, copies are put in that directory. If dest is a
2342 file, there can only be one source.
2342 file, there can only be one source.
2343
2343
2344 By default, this command copies the contents of files as they
2344 By default, this command copies the contents of files as they
2345 exist in the working directory. If invoked with -A/--after, the
2345 exist in the working directory. If invoked with -A/--after, the
2346 operation is recorded, but no copying is performed.
2346 operation is recorded, but no copying is performed.
2347
2347
2348 This command takes effect at the next commit. To undo a rename
2348 This command takes effect at the next commit. To undo a rename
2349 before that, see hg revert.
2349 before that, see hg revert.
2350 """
2350 """
2351 wlock = repo.wlock(False)
2351 wlock = repo.wlock(False)
2352 try:
2352 try:
2353 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2353 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2354 finally:
2354 finally:
2355 wlock.release()
2355 wlock.release()
2356
2356
2357 def resolve(ui, repo, *pats, **opts):
2357 def resolve(ui, repo, *pats, **opts):
2358 """retry file merges from a merge or update
2358 """retry file merges from a merge or update
2359
2359
2360 This command will cleanly retry unresolved file merges using file
2360 This command will cleanly retry unresolved file merges using file
2361 revisions preserved from the last update or merge. To attempt to
2361 revisions preserved from the last update or merge. To attempt to
2362 resolve all unresolved files, use the -a/--all switch.
2362 resolve all unresolved files, use the -a/--all switch.
2363
2363
2364 If a conflict is resolved manually, please note that the changes
2364 If a conflict is resolved manually, please note that the changes
2365 will be overwritten if the merge is retried with resolve. The
2365 will be overwritten if the merge is retried with resolve. The
2366 -m/--mark switch should be used to mark the file as resolved.
2366 -m/--mark switch should be used to mark the file as resolved.
2367
2367
2368 This command will also allow listing resolved files and manually
2368 This command will also allow listing resolved files and manually
2369 marking and unmarking files as resolved. All files must be marked
2369 marking and unmarking files as resolved. All files must be marked
2370 as resolved before the new commits are permitted.
2370 as resolved before the new commits are permitted.
2371
2371
2372 The codes used to show the status of files are:
2372 The codes used to show the status of files are:
2373 U = unresolved
2373 U = unresolved
2374 R = resolved
2374 R = resolved
2375 """
2375 """
2376
2376
2377 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2377 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2378
2378
2379 if (show and (mark or unmark)) or (mark and unmark):
2379 if (show and (mark or unmark)) or (mark and unmark):
2380 raise util.Abort(_("too many options specified"))
2380 raise util.Abort(_("too many options specified"))
2381 if pats and all:
2381 if pats and all:
2382 raise util.Abort(_("can't specify --all and patterns"))
2382 raise util.Abort(_("can't specify --all and patterns"))
2383 if not (all or pats or show or mark or unmark):
2383 if not (all or pats or show or mark or unmark):
2384 raise util.Abort(_('no files or directories specified; '
2384 raise util.Abort(_('no files or directories specified; '
2385 'use --all to remerge all files'))
2385 'use --all to remerge all files'))
2386
2386
2387 ms = merge_.mergestate(repo)
2387 ms = merge_.mergestate(repo)
2388 m = cmdutil.match(repo, pats, opts)
2388 m = cmdutil.match(repo, pats, opts)
2389
2389
2390 for f in ms:
2390 for f in ms:
2391 if m(f):
2391 if m(f):
2392 if show:
2392 if show:
2393 ui.write("%s %s\n" % (ms[f].upper(), f))
2393 ui.write("%s %s\n" % (ms[f].upper(), f))
2394 elif mark:
2394 elif mark:
2395 ms.mark(f, "r")
2395 ms.mark(f, "r")
2396 elif unmark:
2396 elif unmark:
2397 ms.mark(f, "u")
2397 ms.mark(f, "u")
2398 else:
2398 else:
2399 wctx = repo[None]
2399 wctx = repo[None]
2400 mctx = wctx.parents()[-1]
2400 mctx = wctx.parents()[-1]
2401
2401
2402 # backup pre-resolve (merge uses .orig for its own purposes)
2402 # backup pre-resolve (merge uses .orig for its own purposes)
2403 a = repo.wjoin(f)
2403 a = repo.wjoin(f)
2404 util.copyfile(a, a + ".resolve")
2404 util.copyfile(a, a + ".resolve")
2405
2405
2406 # resolve file
2406 # resolve file
2407 ms.resolve(f, wctx, mctx)
2407 ms.resolve(f, wctx, mctx)
2408
2408
2409 # replace filemerge's .orig file with our resolve file
2409 # replace filemerge's .orig file with our resolve file
2410 util.rename(a + ".resolve", a + ".orig")
2410 util.rename(a + ".resolve", a + ".orig")
2411
2411
2412 def revert(ui, repo, *pats, **opts):
2412 def revert(ui, repo, *pats, **opts):
2413 """restore individual files or directories to an earlier state
2413 """restore individual files or directories to an earlier state
2414
2414
2415 (use update -r to check out earlier revisions, revert does not
2415 (use update -r to check out earlier revisions, revert does not
2416 change the working directory parents)
2416 change the working directory parents)
2417
2417
2418 With no revision specified, revert the named files or directories
2418 With no revision specified, revert the named files or directories
2419 to the contents they had in the parent of the working directory.
2419 to the contents they had in the parent of the working directory.
2420 This restores the contents of the affected files to an unmodified
2420 This restores the contents of the affected files to an unmodified
2421 state and unschedules adds, removes, copies, and renames. If the
2421 state and unschedules adds, removes, copies, and renames. If the
2422 working directory has two parents, you must explicitly specify the
2422 working directory has two parents, you must explicitly specify the
2423 revision to revert to.
2423 revision to revert to.
2424
2424
2425 Using the -r/--rev option, revert the given files or directories
2425 Using the -r/--rev option, revert the given files or directories
2426 to their contents as of a specific revision. This can be helpful
2426 to their contents as of a specific revision. This can be helpful
2427 to "roll back" some or all of an earlier change. See 'hg help
2427 to "roll back" some or all of an earlier change. See 'hg help
2428 dates' for a list of formats valid for -d/--date.
2428 dates' for a list of formats valid for -d/--date.
2429
2429
2430 Revert modifies the working directory. It does not commit any
2430 Revert modifies the working directory. It does not commit any
2431 changes, or change the parent of the working directory. If you
2431 changes, or change the parent of the working directory. If you
2432 revert to a revision other than the parent of the working
2432 revert to a revision other than the parent of the working
2433 directory, the reverted files will thus appear modified
2433 directory, the reverted files will thus appear modified
2434 afterwards.
2434 afterwards.
2435
2435
2436 If a file has been deleted, it is restored. If the executable mode
2436 If a file has been deleted, it is restored. If the executable mode
2437 of a file was changed, it is reset.
2437 of a file was changed, it is reset.
2438
2438
2439 If names are given, all files matching the names are reverted.
2439 If names are given, all files matching the names are reverted.
2440 If no arguments are given, no files are reverted.
2440 If no arguments are given, no files are reverted.
2441
2441
2442 Modified files are saved with a .orig suffix before reverting.
2442 Modified files are saved with a .orig suffix before reverting.
2443 To disable these backups, use --no-backup.
2443 To disable these backups, use --no-backup.
2444 """
2444 """
2445
2445
2446 if opts["date"]:
2446 if opts["date"]:
2447 if opts["rev"]:
2447 if opts["rev"]:
2448 raise util.Abort(_("you can't specify a revision and a date"))
2448 raise util.Abort(_("you can't specify a revision and a date"))
2449 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2449 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2450
2450
2451 if not pats and not opts.get('all'):
2451 if not pats and not opts.get('all'):
2452 raise util.Abort(_('no files or directories specified; '
2452 raise util.Abort(_('no files or directories specified; '
2453 'use --all to revert the whole repo'))
2453 'use --all to revert the whole repo'))
2454
2454
2455 parent, p2 = repo.dirstate.parents()
2455 parent, p2 = repo.dirstate.parents()
2456 if not opts.get('rev') and p2 != nullid:
2456 if not opts.get('rev') and p2 != nullid:
2457 raise util.Abort(_('uncommitted merge - please provide a '
2457 raise util.Abort(_('uncommitted merge - please provide a '
2458 'specific revision'))
2458 'specific revision'))
2459 ctx = repo[opts.get('rev')]
2459 ctx = repo[opts.get('rev')]
2460 node = ctx.node()
2460 node = ctx.node()
2461 mf = ctx.manifest()
2461 mf = ctx.manifest()
2462 if node == parent:
2462 if node == parent:
2463 pmf = mf
2463 pmf = mf
2464 else:
2464 else:
2465 pmf = None
2465 pmf = None
2466
2466
2467 # need all matching names in dirstate and manifest of target rev,
2467 # need all matching names in dirstate and manifest of target rev,
2468 # so have to walk both. do not print errors if files exist in one
2468 # so have to walk both. do not print errors if files exist in one
2469 # but not other.
2469 # but not other.
2470
2470
2471 names = {}
2471 names = {}
2472
2472
2473 wlock = repo.wlock()
2473 wlock = repo.wlock()
2474 try:
2474 try:
2475 # walk dirstate.
2475 # walk dirstate.
2476
2476
2477 m = cmdutil.match(repo, pats, opts)
2477 m = cmdutil.match(repo, pats, opts)
2478 m.bad = lambda x,y: False
2478 m.bad = lambda x,y: False
2479 for abs in repo.walk(m):
2479 for abs in repo.walk(m):
2480 names[abs] = m.rel(abs), m.exact(abs)
2480 names[abs] = m.rel(abs), m.exact(abs)
2481
2481
2482 # walk target manifest.
2482 # walk target manifest.
2483
2483
2484 def badfn(path, msg):
2484 def badfn(path, msg):
2485 if path in names:
2485 if path in names:
2486 return False
2486 return False
2487 path_ = path + '/'
2487 path_ = path + '/'
2488 for f in names:
2488 for f in names:
2489 if f.startswith(path_):
2489 if f.startswith(path_):
2490 return False
2490 return False
2491 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2491 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2492 return False
2492 return False
2493
2493
2494 m = cmdutil.match(repo, pats, opts)
2494 m = cmdutil.match(repo, pats, opts)
2495 m.bad = badfn
2495 m.bad = badfn
2496 for abs in repo[node].walk(m):
2496 for abs in repo[node].walk(m):
2497 if abs not in names:
2497 if abs not in names:
2498 names[abs] = m.rel(abs), m.exact(abs)
2498 names[abs] = m.rel(abs), m.exact(abs)
2499
2499
2500 m = cmdutil.matchfiles(repo, names)
2500 m = cmdutil.matchfiles(repo, names)
2501 changes = repo.status(match=m)[:4]
2501 changes = repo.status(match=m)[:4]
2502 modified, added, removed, deleted = map(set, changes)
2502 modified, added, removed, deleted = map(set, changes)
2503
2503
2504 # if f is a rename, also revert the source
2504 # if f is a rename, also revert the source
2505 cwd = repo.getcwd()
2505 cwd = repo.getcwd()
2506 for f in added:
2506 for f in added:
2507 src = repo.dirstate.copied(f)
2507 src = repo.dirstate.copied(f)
2508 if src and src not in names and repo.dirstate[src] == 'r':
2508 if src and src not in names and repo.dirstate[src] == 'r':
2509 removed.add(src)
2509 removed.add(src)
2510 names[src] = (repo.pathto(src, cwd), True)
2510 names[src] = (repo.pathto(src, cwd), True)
2511
2511
2512 def removeforget(abs):
2512 def removeforget(abs):
2513 if repo.dirstate[abs] == 'a':
2513 if repo.dirstate[abs] == 'a':
2514 return _('forgetting %s\n')
2514 return _('forgetting %s\n')
2515 return _('removing %s\n')
2515 return _('removing %s\n')
2516
2516
2517 revert = ([], _('reverting %s\n'))
2517 revert = ([], _('reverting %s\n'))
2518 add = ([], _('adding %s\n'))
2518 add = ([], _('adding %s\n'))
2519 remove = ([], removeforget)
2519 remove = ([], removeforget)
2520 undelete = ([], _('undeleting %s\n'))
2520 undelete = ([], _('undeleting %s\n'))
2521
2521
2522 disptable = (
2522 disptable = (
2523 # dispatch table:
2523 # dispatch table:
2524 # file state
2524 # file state
2525 # action if in target manifest
2525 # action if in target manifest
2526 # action if not in target manifest
2526 # action if not in target manifest
2527 # make backup if in target manifest
2527 # make backup if in target manifest
2528 # make backup if not in target manifest
2528 # make backup if not in target manifest
2529 (modified, revert, remove, True, True),
2529 (modified, revert, remove, True, True),
2530 (added, revert, remove, True, False),
2530 (added, revert, remove, True, False),
2531 (removed, undelete, None, False, False),
2531 (removed, undelete, None, False, False),
2532 (deleted, revert, remove, False, False),
2532 (deleted, revert, remove, False, False),
2533 )
2533 )
2534
2534
2535 for abs, (rel, exact) in sorted(names.items()):
2535 for abs, (rel, exact) in sorted(names.items()):
2536 mfentry = mf.get(abs)
2536 mfentry = mf.get(abs)
2537 target = repo.wjoin(abs)
2537 target = repo.wjoin(abs)
2538 def handle(xlist, dobackup):
2538 def handle(xlist, dobackup):
2539 xlist[0].append(abs)
2539 xlist[0].append(abs)
2540 if dobackup and not opts.get('no_backup') and util.lexists(target):
2540 if dobackup and not opts.get('no_backup') and util.lexists(target):
2541 bakname = "%s.orig" % rel
2541 bakname = "%s.orig" % rel
2542 ui.note(_('saving current version of %s as %s\n') %
2542 ui.note(_('saving current version of %s as %s\n') %
2543 (rel, bakname))
2543 (rel, bakname))
2544 if not opts.get('dry_run'):
2544 if not opts.get('dry_run'):
2545 util.copyfile(target, bakname)
2545 util.copyfile(target, bakname)
2546 if ui.verbose or not exact:
2546 if ui.verbose or not exact:
2547 msg = xlist[1]
2547 msg = xlist[1]
2548 if not isinstance(msg, basestring):
2548 if not isinstance(msg, basestring):
2549 msg = msg(abs)
2549 msg = msg(abs)
2550 ui.status(msg % rel)
2550 ui.status(msg % rel)
2551 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2551 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2552 if abs not in table: continue
2552 if abs not in table: continue
2553 # file has changed in dirstate
2553 # file has changed in dirstate
2554 if mfentry:
2554 if mfentry:
2555 handle(hitlist, backuphit)
2555 handle(hitlist, backuphit)
2556 elif misslist is not None:
2556 elif misslist is not None:
2557 handle(misslist, backupmiss)
2557 handle(misslist, backupmiss)
2558 break
2558 break
2559 else:
2559 else:
2560 if abs not in repo.dirstate:
2560 if abs not in repo.dirstate:
2561 if mfentry:
2561 if mfentry:
2562 handle(add, True)
2562 handle(add, True)
2563 elif exact:
2563 elif exact:
2564 ui.warn(_('file not managed: %s\n') % rel)
2564 ui.warn(_('file not managed: %s\n') % rel)
2565 continue
2565 continue
2566 # file has not changed in dirstate
2566 # file has not changed in dirstate
2567 if node == parent:
2567 if node == parent:
2568 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2568 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2569 continue
2569 continue
2570 if pmf is None:
2570 if pmf is None:
2571 # only need parent manifest in this unlikely case,
2571 # only need parent manifest in this unlikely case,
2572 # so do not read by default
2572 # so do not read by default
2573 pmf = repo[parent].manifest()
2573 pmf = repo[parent].manifest()
2574 if abs in pmf:
2574 if abs in pmf:
2575 if mfentry:
2575 if mfentry:
2576 # if version of file is same in parent and target
2576 # if version of file is same in parent and target
2577 # manifests, do nothing
2577 # manifests, do nothing
2578 if (pmf[abs] != mfentry or
2578 if (pmf[abs] != mfentry or
2579 pmf.flags(abs) != mf.flags(abs)):
2579 pmf.flags(abs) != mf.flags(abs)):
2580 handle(revert, False)
2580 handle(revert, False)
2581 else:
2581 else:
2582 handle(remove, False)
2582 handle(remove, False)
2583
2583
2584 if not opts.get('dry_run'):
2584 if not opts.get('dry_run'):
2585 def checkout(f):
2585 def checkout(f):
2586 fc = ctx[f]
2586 fc = ctx[f]
2587 repo.wwrite(f, fc.data(), fc.flags())
2587 repo.wwrite(f, fc.data(), fc.flags())
2588
2588
2589 audit_path = util.path_auditor(repo.root)
2589 audit_path = util.path_auditor(repo.root)
2590 for f in remove[0]:
2590 for f in remove[0]:
2591 if repo.dirstate[f] == 'a':
2591 if repo.dirstate[f] == 'a':
2592 repo.dirstate.forget(f)
2592 repo.dirstate.forget(f)
2593 continue
2593 continue
2594 audit_path(f)
2594 audit_path(f)
2595 try:
2595 try:
2596 util.unlink(repo.wjoin(f))
2596 util.unlink(repo.wjoin(f))
2597 except OSError:
2597 except OSError:
2598 pass
2598 pass
2599 repo.dirstate.remove(f)
2599 repo.dirstate.remove(f)
2600
2600
2601 normal = None
2601 normal = None
2602 if node == parent:
2602 if node == parent:
2603 # We're reverting to our parent. If possible, we'd like status
2603 # We're reverting to our parent. If possible, we'd like status
2604 # to report the file as clean. We have to use normallookup for
2604 # to report the file as clean. We have to use normallookup for
2605 # merges to avoid losing information about merged/dirty files.
2605 # merges to avoid losing information about merged/dirty files.
2606 if p2 != nullid:
2606 if p2 != nullid:
2607 normal = repo.dirstate.normallookup
2607 normal = repo.dirstate.normallookup
2608 else:
2608 else:
2609 normal = repo.dirstate.normal
2609 normal = repo.dirstate.normal
2610 for f in revert[0]:
2610 for f in revert[0]:
2611 checkout(f)
2611 checkout(f)
2612 if normal:
2612 if normal:
2613 normal(f)
2613 normal(f)
2614
2614
2615 for f in add[0]:
2615 for f in add[0]:
2616 checkout(f)
2616 checkout(f)
2617 repo.dirstate.add(f)
2617 repo.dirstate.add(f)
2618
2618
2619 normal = repo.dirstate.normallookup
2619 normal = repo.dirstate.normallookup
2620 if node == parent and p2 == nullid:
2620 if node == parent and p2 == nullid:
2621 normal = repo.dirstate.normal
2621 normal = repo.dirstate.normal
2622 for f in undelete[0]:
2622 for f in undelete[0]:
2623 checkout(f)
2623 checkout(f)
2624 normal(f)
2624 normal(f)
2625
2625
2626 finally:
2626 finally:
2627 wlock.release()
2627 wlock.release()
2628
2628
2629 def rollback(ui, repo):
2629 def rollback(ui, repo):
2630 """roll back the last transaction
2630 """roll back the last transaction
2631
2631
2632 This command should be used with care. There is only one level of
2632 This command should be used with care. There is only one level of
2633 rollback, and there is no way to undo a rollback. It will also
2633 rollback, and there is no way to undo a rollback. It will also
2634 restore the dirstate at the time of the last transaction, losing
2634 restore the dirstate at the time of the last transaction, losing
2635 any dirstate changes since that time.
2635 any dirstate changes since that time.
2636
2636
2637 Transactions are used to encapsulate the effects of all commands
2637 Transactions are used to encapsulate the effects of all commands
2638 that create new changesets or propagate existing changesets into a
2638 that create new changesets or propagate existing changesets into a
2639 repository. For example, the following commands are transactional,
2639 repository. For example, the following commands are transactional,
2640 and their effects can be rolled back:
2640 and their effects can be rolled back:
2641
2641
2642 commit
2642 commit
2643 import
2643 import
2644 pull
2644 pull
2645 push (with this repository as destination)
2645 push (with this repository as destination)
2646 unbundle
2646 unbundle
2647
2647
2648 This command is not intended for use on public repositories. Once
2648 This command is not intended for use on public repositories. Once
2649 changes are visible for pull by other users, rolling a transaction
2649 changes are visible for pull by other users, rolling a transaction
2650 back locally is ineffective (someone else may already have pulled
2650 back locally is ineffective (someone else may already have pulled
2651 the changes). Furthermore, a race is possible with readers of the
2651 the changes). Furthermore, a race is possible with readers of the
2652 repository; for example an in-progress pull from the repository
2652 repository; for example an in-progress pull from the repository
2653 may fail if a rollback is performed.
2653 may fail if a rollback is performed.
2654 """
2654 """
2655 repo.rollback()
2655 repo.rollback()
2656
2656
2657 def root(ui, repo):
2657 def root(ui, repo):
2658 """print the root (top) of the current working directory
2658 """print the root (top) of the current working directory
2659
2659
2660 Print the root directory of the current repository.
2660 Print the root directory of the current repository.
2661 """
2661 """
2662 ui.write(repo.root + "\n")
2662 ui.write(repo.root + "\n")
2663
2663
2664 def serve(ui, repo, **opts):
2664 def serve(ui, repo, **opts):
2665 """export the repository via HTTP
2665 """export the repository via HTTP
2666
2666
2667 Start a local HTTP repository browser and pull server.
2667 Start a local HTTP repository browser and pull server.
2668
2668
2669 By default, the server logs accesses to stdout and errors to
2669 By default, the server logs accesses to stdout and errors to
2670 stderr. Use the -A and -E options to log to files.
2670 stderr. Use the -A and -E options to log to files.
2671 """
2671 """
2672
2672
2673 if opts["stdio"]:
2673 if opts["stdio"]:
2674 if repo is None:
2674 if repo is None:
2675 raise error.RepoError(_("There is no Mercurial repository here"
2675 raise error.RepoError(_("There is no Mercurial repository here"
2676 " (.hg not found)"))
2676 " (.hg not found)"))
2677 s = sshserver.sshserver(ui, repo)
2677 s = sshserver.sshserver(ui, repo)
2678 s.serve_forever()
2678 s.serve_forever()
2679
2679
2680 baseui = repo and repo.baseui or ui
2680 baseui = repo and repo.baseui or ui
2681 optlist = ("name templates style address port prefix ipv6"
2681 optlist = ("name templates style address port prefix ipv6"
2682 " accesslog errorlog webdir_conf certificate")
2682 " accesslog errorlog webdir_conf certificate")
2683 for o in optlist.split():
2683 for o in optlist.split():
2684 if opts[o]:
2684 if opts[o]:
2685 baseui.setconfig("web", o, str(opts[o]))
2685 baseui.setconfig("web", o, str(opts[o]))
2686 if (repo is not None) and (repo.ui != baseui):
2686 if (repo is not None) and (repo.ui != baseui):
2687 repo.ui.setconfig("web", o, str(opts[o]))
2687 repo.ui.setconfig("web", o, str(opts[o]))
2688
2688
2689 if repo is None and not ui.config("web", "webdir_conf"):
2689 if repo is None and not ui.config("web", "webdir_conf"):
2690 raise error.RepoError(_("There is no Mercurial repository here"
2690 raise error.RepoError(_("There is no Mercurial repository here"
2691 " (.hg not found)"))
2691 " (.hg not found)"))
2692
2692
2693 class service:
2693 class service:
2694 def init(self):
2694 def init(self):
2695 util.set_signal_handler()
2695 util.set_signal_handler()
2696 self.httpd = hgweb.server.create_server(baseui, repo)
2696 self.httpd = hgweb.server.create_server(baseui, repo)
2697
2697
2698 if not ui.verbose: return
2698 if not ui.verbose: return
2699
2699
2700 if self.httpd.prefix:
2700 if self.httpd.prefix:
2701 prefix = self.httpd.prefix.strip('/') + '/'
2701 prefix = self.httpd.prefix.strip('/') + '/'
2702 else:
2702 else:
2703 prefix = ''
2703 prefix = ''
2704
2704
2705 port = ':%d' % self.httpd.port
2705 port = ':%d' % self.httpd.port
2706 if port == ':80':
2706 if port == ':80':
2707 port = ''
2707 port = ''
2708
2708
2709 bindaddr = self.httpd.addr
2709 bindaddr = self.httpd.addr
2710 if bindaddr == '0.0.0.0':
2710 if bindaddr == '0.0.0.0':
2711 bindaddr = '*'
2711 bindaddr = '*'
2712 elif ':' in bindaddr: # IPv6
2712 elif ':' in bindaddr: # IPv6
2713 bindaddr = '[%s]' % bindaddr
2713 bindaddr = '[%s]' % bindaddr
2714
2714
2715 fqaddr = self.httpd.fqaddr
2715 fqaddr = self.httpd.fqaddr
2716 if ':' in fqaddr:
2716 if ':' in fqaddr:
2717 fqaddr = '[%s]' % fqaddr
2717 fqaddr = '[%s]' % fqaddr
2718 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2718 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2719 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2719 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2720
2720
2721 def run(self):
2721 def run(self):
2722 self.httpd.serve_forever()
2722 self.httpd.serve_forever()
2723
2723
2724 service = service()
2724 service = service()
2725
2725
2726 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2726 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2727
2727
2728 def status(ui, repo, *pats, **opts):
2728 def status(ui, repo, *pats, **opts):
2729 """show changed files in the working directory
2729 """show changed files in the working directory
2730
2730
2731 Show status of files in the repository. If names are given, only
2731 Show status of files in the repository. If names are given, only
2732 files that match are shown. Files that are clean or ignored or
2732 files that match are shown. Files that are clean or ignored or
2733 source of a copy/move operation, are not listed unless -c/--clean,
2733 source of a copy/move operation, are not listed unless -c/--clean,
2734 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2734 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2735 described with "show only ..." are given, the options -mardu are
2735 described with "show only ..." are given, the options -mardu are
2736 used.
2736 used.
2737
2737
2738 Option -q/--quiet hides untracked (unknown and ignored) files
2738 Option -q/--quiet hides untracked (unknown and ignored) files
2739 unless explicitly requested with -u/--unknown or -i/--ignored.
2739 unless explicitly requested with -u/--unknown or -i/--ignored.
2740
2740
2741 NOTE: status may appear to disagree with diff if permissions have
2741 NOTE: status may appear to disagree with diff if permissions have
2742 changed or a merge has occurred. The standard diff format does not
2742 changed or a merge has occurred. The standard diff format does not
2743 report permission changes and diff only reports changes relative
2743 report permission changes and diff only reports changes relative
2744 to one merge parent.
2744 to one merge parent.
2745
2745
2746 If one revision is given, it is used as the base revision.
2746 If one revision is given, it is used as the base revision.
2747 If two revisions are given, the difference between them is shown.
2747 If two revisions are given, the difference between them is shown.
2748
2748
2749 The codes used to show the status of files are:
2749 The codes used to show the status of files are:
2750 M = modified
2750 M = modified
2751 A = added
2751 A = added
2752 R = removed
2752 R = removed
2753 C = clean
2753 C = clean
2754 ! = missing (deleted by non-hg command, but still tracked)
2754 ! = missing (deleted by non-hg command, but still tracked)
2755 ? = not tracked
2755 ? = not tracked
2756 I = ignored
2756 I = ignored
2757 = the previous added file was copied from here
2757 = the previous added file was copied from here
2758 """
2758 """
2759
2759
2760 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2760 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2761 cwd = (pats and repo.getcwd()) or ''
2761 cwd = (pats and repo.getcwd()) or ''
2762 end = opts.get('print0') and '\0' or '\n'
2762 end = opts.get('print0') and '\0' or '\n'
2763 copy = {}
2763 copy = {}
2764 states = 'modified added removed deleted unknown ignored clean'.split()
2764 states = 'modified added removed deleted unknown ignored clean'.split()
2765 show = [k for k in states if opts.get(k)]
2765 show = [k for k in states if opts.get(k)]
2766 if opts.get('all'):
2766 if opts.get('all'):
2767 show += ui.quiet and (states[:4] + ['clean']) or states
2767 show += ui.quiet and (states[:4] + ['clean']) or states
2768 if not show:
2768 if not show:
2769 show = ui.quiet and states[:4] or states[:5]
2769 show = ui.quiet and states[:4] or states[:5]
2770
2770
2771 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2771 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2772 'ignored' in show, 'clean' in show, 'unknown' in show)
2772 'ignored' in show, 'clean' in show, 'unknown' in show)
2773 changestates = zip(states, 'MAR!?IC', stat)
2773 changestates = zip(states, 'MAR!?IC', stat)
2774
2774
2775 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2775 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2776 ctxn = repo[nullid]
2776 ctxn = repo[nullid]
2777 ctx1 = repo[node1]
2777 ctx1 = repo[node1]
2778 ctx2 = repo[node2]
2778 ctx2 = repo[node2]
2779 added = stat[1]
2779 added = stat[1]
2780 if node2 is None:
2780 if node2 is None:
2781 added = stat[0] + stat[1] # merged?
2781 added = stat[0] + stat[1] # merged?
2782
2782
2783 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2783 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2784 if k in added:
2784 if k in added:
2785 copy[k] = v
2785 copy[k] = v
2786 elif v in added:
2786 elif v in added:
2787 copy[v] = k
2787 copy[v] = k
2788
2788
2789 for state, char, files in changestates:
2789 for state, char, files in changestates:
2790 if state in show:
2790 if state in show:
2791 format = "%s %%s%s" % (char, end)
2791 format = "%s %%s%s" % (char, end)
2792 if opts.get('no_status'):
2792 if opts.get('no_status'):
2793 format = "%%s%s" % end
2793 format = "%%s%s" % end
2794
2794
2795 for f in files:
2795 for f in files:
2796 ui.write(format % repo.pathto(f, cwd))
2796 ui.write(format % repo.pathto(f, cwd))
2797 if f in copy:
2797 if f in copy:
2798 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2798 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2799
2799
2800 def tag(ui, repo, name1, *names, **opts):
2800 def tag(ui, repo, name1, *names, **opts):
2801 """add one or more tags for the current or given revision
2801 """add one or more tags for the current or given revision
2802
2802
2803 Name a particular revision using <name>.
2803 Name a particular revision using <name>.
2804
2804
2805 Tags are used to name particular revisions of the repository and are
2805 Tags are used to name particular revisions of the repository and are
2806 very useful to compare different revisions, to go back to significant
2806 very useful to compare different revisions, to go back to significant
2807 earlier versions or to mark branch points as releases, etc.
2807 earlier versions or to mark branch points as releases, etc.
2808
2808
2809 If no revision is given, the parent of the working directory is
2809 If no revision is given, the parent of the working directory is
2810 used, or tip if no revision is checked out.
2810 used, or tip if no revision is checked out.
2811
2811
2812 To facilitate version control, distribution, and merging of tags,
2812 To facilitate version control, distribution, and merging of tags,
2813 they are stored as a file named ".hgtags" which is managed
2813 they are stored as a file named ".hgtags" which is managed
2814 similarly to other project files and can be hand-edited if
2814 similarly to other project files and can be hand-edited if
2815 necessary. The file '.hg/localtags' is used for local tags (not
2815 necessary. The file '.hg/localtags' is used for local tags (not
2816 shared among repositories).
2816 shared among repositories).
2817
2817
2818 See 'hg help dates' for a list of formats valid for -d/--date.
2818 See 'hg help dates' for a list of formats valid for -d/--date.
2819 """
2819 """
2820
2820
2821 rev_ = "."
2821 rev_ = "."
2822 names = (name1,) + names
2822 names = (name1,) + names
2823 if len(names) != len(set(names)):
2823 if len(names) != len(set(names)):
2824 raise util.Abort(_('tag names must be unique'))
2824 raise util.Abort(_('tag names must be unique'))
2825 for n in names:
2825 for n in names:
2826 if n in ['tip', '.', 'null']:
2826 if n in ['tip', '.', 'null']:
2827 raise util.Abort(_('the name \'%s\' is reserved') % n)
2827 raise util.Abort(_('the name \'%s\' is reserved') % n)
2828 if opts.get('rev') and opts.get('remove'):
2828 if opts.get('rev') and opts.get('remove'):
2829 raise util.Abort(_("--rev and --remove are incompatible"))
2829 raise util.Abort(_("--rev and --remove are incompatible"))
2830 if opts.get('rev'):
2830 if opts.get('rev'):
2831 rev_ = opts['rev']
2831 rev_ = opts['rev']
2832 message = opts.get('message')
2832 message = opts.get('message')
2833 if opts.get('remove'):
2833 if opts.get('remove'):
2834 expectedtype = opts.get('local') and 'local' or 'global'
2834 expectedtype = opts.get('local') and 'local' or 'global'
2835 for n in names:
2835 for n in names:
2836 if not repo.tagtype(n):
2836 if not repo.tagtype(n):
2837 raise util.Abort(_('tag \'%s\' does not exist') % n)
2837 raise util.Abort(_('tag \'%s\' does not exist') % n)
2838 if repo.tagtype(n) != expectedtype:
2838 if repo.tagtype(n) != expectedtype:
2839 if expectedtype == 'global':
2839 if expectedtype == 'global':
2840 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2840 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2841 else:
2841 else:
2842 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2842 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2843 rev_ = nullid
2843 rev_ = nullid
2844 if not message:
2844 if not message:
2845 message = _('Removed tag %s') % ', '.join(names)
2845 message = _('Removed tag %s') % ', '.join(names)
2846 elif not opts.get('force'):
2846 elif not opts.get('force'):
2847 for n in names:
2847 for n in names:
2848 if n in repo.tags():
2848 if n in repo.tags():
2849 raise util.Abort(_('tag \'%s\' already exists '
2849 raise util.Abort(_('tag \'%s\' already exists '
2850 '(use -f to force)') % n)
2850 '(use -f to force)') % n)
2851 if not rev_ and repo.dirstate.parents()[1] != nullid:
2851 if not rev_ and repo.dirstate.parents()[1] != nullid:
2852 raise util.Abort(_('uncommitted merge - please provide a '
2852 raise util.Abort(_('uncommitted merge - please provide a '
2853 'specific revision'))
2853 'specific revision'))
2854 r = repo[rev_].node()
2854 r = repo[rev_].node()
2855
2855
2856 if not message:
2856 if not message:
2857 message = (_('Added tag %s for changeset %s') %
2857 message = (_('Added tag %s for changeset %s') %
2858 (', '.join(names), short(r)))
2858 (', '.join(names), short(r)))
2859
2859
2860 date = opts.get('date')
2860 date = opts.get('date')
2861 if date:
2861 if date:
2862 date = util.parsedate(date)
2862 date = util.parsedate(date)
2863
2863
2864 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2864 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2865
2865
2866 def tags(ui, repo):
2866 def tags(ui, repo):
2867 """list repository tags
2867 """list repository tags
2868
2868
2869 This lists both regular and local tags. When the -v/--verbose
2869 This lists both regular and local tags. When the -v/--verbose
2870 switch is used, a third column "local" is printed for local tags.
2870 switch is used, a third column "local" is printed for local tags.
2871 """
2871 """
2872
2872
2873 l = repo.tagslist()
2874 l.reverse()
2875 hexfunc = ui.debugflag and hex or short
2873 hexfunc = ui.debugflag and hex or short
2876 tagtype = ""
2874 tagtype = ""
2877
2875
2878 for t, n in l:
2876 for t, n in reversed(repo.tagslist()):
2879 if ui.quiet:
2877 if ui.quiet:
2880 ui.write("%s\n" % t)
2878 ui.write("%s\n" % t)
2881 continue
2879 continue
2882
2880
2883 try:
2881 try:
2884 hn = hexfunc(n)
2882 hn = hexfunc(n)
2885 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2883 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2886 except error.LookupError:
2884 except error.LookupError:
2887 r = " ?:%s" % hn
2885 r = " ?:%s" % hn
2888 else:
2886 else:
2889 spaces = " " * (30 - encoding.colwidth(t))
2887 spaces = " " * (30 - encoding.colwidth(t))
2890 if ui.verbose:
2888 if ui.verbose:
2891 if repo.tagtype(t) == 'local':
2889 if repo.tagtype(t) == 'local':
2892 tagtype = " local"
2890 tagtype = " local"
2893 else:
2891 else:
2894 tagtype = ""
2892 tagtype = ""
2895 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2893 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2896
2894
2897 def tip(ui, repo, **opts):
2895 def tip(ui, repo, **opts):
2898 """show the tip revision
2896 """show the tip revision
2899
2897
2900 The tip revision (usually just called the tip) is the most
2898 The tip revision (usually just called the tip) is the most
2901 recently added changeset in the repository, the most recently
2899 recently added changeset in the repository, the most recently
2902 changed head.
2900 changed head.
2903
2901
2904 If you have just made a commit, that commit will be the tip. If
2902 If you have just made a commit, that commit will be the tip. If
2905 you have just pulled changes from another repository, the tip of
2903 you have just pulled changes from another repository, the tip of
2906 that repository becomes the current tip. The "tip" tag is special
2904 that repository becomes the current tip. The "tip" tag is special
2907 and cannot be renamed or assigned to a different changeset.
2905 and cannot be renamed or assigned to a different changeset.
2908 """
2906 """
2909 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2907 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2910
2908
2911 def unbundle(ui, repo, fname1, *fnames, **opts):
2909 def unbundle(ui, repo, fname1, *fnames, **opts):
2912 """apply one or more changegroup files
2910 """apply one or more changegroup files
2913
2911
2914 Apply one or more compressed changegroup files generated by the
2912 Apply one or more compressed changegroup files generated by the
2915 bundle command.
2913 bundle command.
2916 """
2914 """
2917 fnames = (fname1,) + fnames
2915 fnames = (fname1,) + fnames
2918
2916
2919 lock = repo.lock()
2917 lock = repo.lock()
2920 try:
2918 try:
2921 for fname in fnames:
2919 for fname in fnames:
2922 f = url.open(ui, fname)
2920 f = url.open(ui, fname)
2923 gen = changegroup.readbundle(f, fname)
2921 gen = changegroup.readbundle(f, fname)
2924 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2922 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2925 finally:
2923 finally:
2926 lock.release()
2924 lock.release()
2927
2925
2928 return postincoming(ui, repo, modheads, opts.get('update'), None)
2926 return postincoming(ui, repo, modheads, opts.get('update'), None)
2929
2927
2930 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2928 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2931 """update working directory
2929 """update working directory
2932
2930
2933 Update the repository's working directory to the specified
2931 Update the repository's working directory to the specified
2934 revision, or the tip of the current branch if none is specified.
2932 revision, or the tip of the current branch if none is specified.
2935 Use null as the revision to remove the working copy (like 'hg
2933 Use null as the revision to remove the working copy (like 'hg
2936 clone -U').
2934 clone -U').
2937
2935
2938 When the working directory contains no uncommitted changes, it
2936 When the working directory contains no uncommitted changes, it
2939 will be replaced by the state of the requested revision from the
2937 will be replaced by the state of the requested revision from the
2940 repository. When the requested revision is on a different branch,
2938 repository. When the requested revision is on a different branch,
2941 the working directory will additionally be switched to that
2939 the working directory will additionally be switched to that
2942 branch.
2940 branch.
2943
2941
2944 When there are uncommitted changes, use option -C to discard them,
2942 When there are uncommitted changes, use option -C to discard them,
2945 forcibly replacing the state of the working directory with the
2943 forcibly replacing the state of the working directory with the
2946 requested revision.
2944 requested revision.
2947
2945
2948 When there are uncommitted changes and option -C is not used, and
2946 When there are uncommitted changes and option -C is not used, and
2949 the parent revision and requested revision are on the same branch,
2947 the parent revision and requested revision are on the same branch,
2950 and one of them is an ancestor of the other, then the new working
2948 and one of them is an ancestor of the other, then the new working
2951 directory will contain the requested revision merged with the
2949 directory will contain the requested revision merged with the
2952 uncommitted changes. Otherwise, the update will fail with a
2950 uncommitted changes. Otherwise, the update will fail with a
2953 suggestion to use 'merge' or 'update -C' instead.
2951 suggestion to use 'merge' or 'update -C' instead.
2954
2952
2955 If you want to update just one file to an older revision, use
2953 If you want to update just one file to an older revision, use
2956 revert.
2954 revert.
2957
2955
2958 See 'hg help dates' for a list of formats valid for -d/--date.
2956 See 'hg help dates' for a list of formats valid for -d/--date.
2959 """
2957 """
2960 if rev and node:
2958 if rev and node:
2961 raise util.Abort(_("please specify just one revision"))
2959 raise util.Abort(_("please specify just one revision"))
2962
2960
2963 if not rev:
2961 if not rev:
2964 rev = node
2962 rev = node
2965
2963
2966 if date:
2964 if date:
2967 if rev:
2965 if rev:
2968 raise util.Abort(_("you can't specify a revision and a date"))
2966 raise util.Abort(_("you can't specify a revision and a date"))
2969 rev = cmdutil.finddate(ui, repo, date)
2967 rev = cmdutil.finddate(ui, repo, date)
2970
2968
2971 if clean:
2969 if clean:
2972 return hg.clean(repo, rev)
2970 return hg.clean(repo, rev)
2973 else:
2971 else:
2974 return hg.update(repo, rev)
2972 return hg.update(repo, rev)
2975
2973
2976 def verify(ui, repo):
2974 def verify(ui, repo):
2977 """verify the integrity of the repository
2975 """verify the integrity of the repository
2978
2976
2979 Verify the integrity of the current repository.
2977 Verify the integrity of the current repository.
2980
2978
2981 This will perform an extensive check of the repository's
2979 This will perform an extensive check of the repository's
2982 integrity, validating the hashes and checksums of each entry in
2980 integrity, validating the hashes and checksums of each entry in
2983 the changelog, manifest, and tracked files, as well as the
2981 the changelog, manifest, and tracked files, as well as the
2984 integrity of their crosslinks and indices.
2982 integrity of their crosslinks and indices.
2985 """
2983 """
2986 return hg.verify(repo)
2984 return hg.verify(repo)
2987
2985
2988 def version_(ui):
2986 def version_(ui):
2989 """output version and copyright information"""
2987 """output version and copyright information"""
2990 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2988 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2991 % util.version())
2989 % util.version())
2992 ui.status(_(
2990 ui.status(_(
2993 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2991 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2994 "This is free software; see the source for copying conditions. "
2992 "This is free software; see the source for copying conditions. "
2995 "There is NO\nwarranty; "
2993 "There is NO\nwarranty; "
2996 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2994 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2997 ))
2995 ))
2998
2996
2999 # Command options and aliases are listed here, alphabetically
2997 # Command options and aliases are listed here, alphabetically
3000
2998
3001 globalopts = [
2999 globalopts = [
3002 ('R', 'repository', '',
3000 ('R', 'repository', '',
3003 _('repository root directory or symbolic path name')),
3001 _('repository root directory or symbolic path name')),
3004 ('', 'cwd', '', _('change working directory')),
3002 ('', 'cwd', '', _('change working directory')),
3005 ('y', 'noninteractive', None,
3003 ('y', 'noninteractive', None,
3006 _('do not prompt, assume \'yes\' for any required answers')),
3004 _('do not prompt, assume \'yes\' for any required answers')),
3007 ('q', 'quiet', None, _('suppress output')),
3005 ('q', 'quiet', None, _('suppress output')),
3008 ('v', 'verbose', None, _('enable additional output')),
3006 ('v', 'verbose', None, _('enable additional output')),
3009 ('', 'config', [], _('set/override config option')),
3007 ('', 'config', [], _('set/override config option')),
3010 ('', 'debug', None, _('enable debugging output')),
3008 ('', 'debug', None, _('enable debugging output')),
3011 ('', 'debugger', None, _('start debugger')),
3009 ('', 'debugger', None, _('start debugger')),
3012 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3010 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3013 ('', 'encodingmode', encoding.encodingmode,
3011 ('', 'encodingmode', encoding.encodingmode,
3014 _('set the charset encoding mode')),
3012 _('set the charset encoding mode')),
3015 ('', 'traceback', None, _('print traceback on exception')),
3013 ('', 'traceback', None, _('print traceback on exception')),
3016 ('', 'time', None, _('time how long the command takes')),
3014 ('', 'time', None, _('time how long the command takes')),
3017 ('', 'profile', None, _('print command execution profile')),
3015 ('', 'profile', None, _('print command execution profile')),
3018 ('', 'version', None, _('output version information and exit')),
3016 ('', 'version', None, _('output version information and exit')),
3019 ('h', 'help', None, _('display help and exit')),
3017 ('h', 'help', None, _('display help and exit')),
3020 ]
3018 ]
3021
3019
3022 dryrunopts = [('n', 'dry-run', None,
3020 dryrunopts = [('n', 'dry-run', None,
3023 _('do not perform actions, just print output'))]
3021 _('do not perform actions, just print output'))]
3024
3022
3025 remoteopts = [
3023 remoteopts = [
3026 ('e', 'ssh', '', _('specify ssh command to use')),
3024 ('e', 'ssh', '', _('specify ssh command to use')),
3027 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3025 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3028 ]
3026 ]
3029
3027
3030 walkopts = [
3028 walkopts = [
3031 ('I', 'include', [], _('include names matching the given patterns')),
3029 ('I', 'include', [], _('include names matching the given patterns')),
3032 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3030 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3033 ]
3031 ]
3034
3032
3035 commitopts = [
3033 commitopts = [
3036 ('m', 'message', '', _('use <text> as commit message')),
3034 ('m', 'message', '', _('use <text> as commit message')),
3037 ('l', 'logfile', '', _('read commit message from <file>')),
3035 ('l', 'logfile', '', _('read commit message from <file>')),
3038 ]
3036 ]
3039
3037
3040 commitopts2 = [
3038 commitopts2 = [
3041 ('d', 'date', '', _('record datecode as commit date')),
3039 ('d', 'date', '', _('record datecode as commit date')),
3042 ('u', 'user', '', _('record user as committer')),
3040 ('u', 'user', '', _('record user as committer')),
3043 ]
3041 ]
3044
3042
3045 templateopts = [
3043 templateopts = [
3046 ('', 'style', '', _('display using template map file')),
3044 ('', 'style', '', _('display using template map file')),
3047 ('', 'template', '', _('display with template')),
3045 ('', 'template', '', _('display with template')),
3048 ]
3046 ]
3049
3047
3050 logopts = [
3048 logopts = [
3051 ('p', 'patch', None, _('show patch')),
3049 ('p', 'patch', None, _('show patch')),
3052 ('g', 'git', None, _('use git extended diff format')),
3050 ('g', 'git', None, _('use git extended diff format')),
3053 ('l', 'limit', '', _('limit number of changes displayed')),
3051 ('l', 'limit', '', _('limit number of changes displayed')),
3054 ('M', 'no-merges', None, _('do not show merges')),
3052 ('M', 'no-merges', None, _('do not show merges')),
3055 ] + templateopts
3053 ] + templateopts
3056
3054
3057 diffopts = [
3055 diffopts = [
3058 ('a', 'text', None, _('treat all files as text')),
3056 ('a', 'text', None, _('treat all files as text')),
3059 ('g', 'git', None, _('use git extended diff format')),
3057 ('g', 'git', None, _('use git extended diff format')),
3060 ('', 'nodates', None, _("don't include dates in diff headers"))
3058 ('', 'nodates', None, _("don't include dates in diff headers"))
3061 ]
3059 ]
3062
3060
3063 diffopts2 = [
3061 diffopts2 = [
3064 ('p', 'show-function', None, _('show which function each change is in')),
3062 ('p', 'show-function', None, _('show which function each change is in')),
3065 ('w', 'ignore-all-space', None,
3063 ('w', 'ignore-all-space', None,
3066 _('ignore white space when comparing lines')),
3064 _('ignore white space when comparing lines')),
3067 ('b', 'ignore-space-change', None,
3065 ('b', 'ignore-space-change', None,
3068 _('ignore changes in the amount of white space')),
3066 _('ignore changes in the amount of white space')),
3069 ('B', 'ignore-blank-lines', None,
3067 ('B', 'ignore-blank-lines', None,
3070 _('ignore changes whose lines are all blank')),
3068 _('ignore changes whose lines are all blank')),
3071 ('U', 'unified', '', _('number of lines of context to show'))
3069 ('U', 'unified', '', _('number of lines of context to show'))
3072 ]
3070 ]
3073
3071
3074 similarityopts = [
3072 similarityopts = [
3075 ('s', 'similarity', '',
3073 ('s', 'similarity', '',
3076 _('guess renamed files by similarity (0<=s<=100)'))
3074 _('guess renamed files by similarity (0<=s<=100)'))
3077 ]
3075 ]
3078
3076
3079 table = {
3077 table = {
3080 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3078 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3081 "addremove":
3079 "addremove":
3082 (addremove, similarityopts + walkopts + dryrunopts,
3080 (addremove, similarityopts + walkopts + dryrunopts,
3083 _('[OPTION]... [FILE]...')),
3081 _('[OPTION]... [FILE]...')),
3084 "^annotate|blame":
3082 "^annotate|blame":
3085 (annotate,
3083 (annotate,
3086 [('r', 'rev', '', _('annotate the specified revision')),
3084 [('r', 'rev', '', _('annotate the specified revision')),
3087 ('f', 'follow', None, _('follow file copies and renames')),
3085 ('f', 'follow', None, _('follow file copies and renames')),
3088 ('a', 'text', None, _('treat all files as text')),
3086 ('a', 'text', None, _('treat all files as text')),
3089 ('u', 'user', None, _('list the author (long with -v)')),
3087 ('u', 'user', None, _('list the author (long with -v)')),
3090 ('d', 'date', None, _('list the date (short with -q)')),
3088 ('d', 'date', None, _('list the date (short with -q)')),
3091 ('n', 'number', None, _('list the revision number (default)')),
3089 ('n', 'number', None, _('list the revision number (default)')),
3092 ('c', 'changeset', None, _('list the changeset')),
3090 ('c', 'changeset', None, _('list the changeset')),
3093 ('l', 'line-number', None,
3091 ('l', 'line-number', None,
3094 _('show line number at the first appearance'))
3092 _('show line number at the first appearance'))
3095 ] + walkopts,
3093 ] + walkopts,
3096 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3094 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3097 "archive":
3095 "archive":
3098 (archive,
3096 (archive,
3099 [('', 'no-decode', None, _('do not pass files through decoders')),
3097 [('', 'no-decode', None, _('do not pass files through decoders')),
3100 ('p', 'prefix', '', _('directory prefix for files in archive')),
3098 ('p', 'prefix', '', _('directory prefix for files in archive')),
3101 ('r', 'rev', '', _('revision to distribute')),
3099 ('r', 'rev', '', _('revision to distribute')),
3102 ('t', 'type', '', _('type of distribution to create')),
3100 ('t', 'type', '', _('type of distribution to create')),
3103 ] + walkopts,
3101 ] + walkopts,
3104 _('[OPTION]... DEST')),
3102 _('[OPTION]... DEST')),
3105 "backout":
3103 "backout":
3106 (backout,
3104 (backout,
3107 [('', 'merge', None,
3105 [('', 'merge', None,
3108 _('merge with old dirstate parent after backout')),
3106 _('merge with old dirstate parent after backout')),
3109 ('', 'parent', '', _('parent to choose when backing out merge')),
3107 ('', 'parent', '', _('parent to choose when backing out merge')),
3110 ('r', 'rev', '', _('revision to backout')),
3108 ('r', 'rev', '', _('revision to backout')),
3111 ] + walkopts + commitopts + commitopts2,
3109 ] + walkopts + commitopts + commitopts2,
3112 _('[OPTION]... [-r] REV')),
3110 _('[OPTION]... [-r] REV')),
3113 "bisect":
3111 "bisect":
3114 (bisect,
3112 (bisect,
3115 [('r', 'reset', False, _('reset bisect state')),
3113 [('r', 'reset', False, _('reset bisect state')),
3116 ('g', 'good', False, _('mark changeset good')),
3114 ('g', 'good', False, _('mark changeset good')),
3117 ('b', 'bad', False, _('mark changeset bad')),
3115 ('b', 'bad', False, _('mark changeset bad')),
3118 ('s', 'skip', False, _('skip testing changeset')),
3116 ('s', 'skip', False, _('skip testing changeset')),
3119 ('c', 'command', '', _('use command to check changeset state')),
3117 ('c', 'command', '', _('use command to check changeset state')),
3120 ('U', 'noupdate', False, _('do not update to target'))],
3118 ('U', 'noupdate', False, _('do not update to target'))],
3121 _("[-gbsr] [-c CMD] [REV]")),
3119 _("[-gbsr] [-c CMD] [REV]")),
3122 "branch":
3120 "branch":
3123 (branch,
3121 (branch,
3124 [('f', 'force', None,
3122 [('f', 'force', None,
3125 _('set branch name even if it shadows an existing branch')),
3123 _('set branch name even if it shadows an existing branch')),
3126 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3124 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3127 _('[-fC] [NAME]')),
3125 _('[-fC] [NAME]')),
3128 "branches":
3126 "branches":
3129 (branches,
3127 (branches,
3130 [('a', 'active', False,
3128 [('a', 'active', False,
3131 _('show only branches that have unmerged heads'))],
3129 _('show only branches that have unmerged heads'))],
3132 _('[-a]')),
3130 _('[-a]')),
3133 "bundle":
3131 "bundle":
3134 (bundle,
3132 (bundle,
3135 [('f', 'force', None,
3133 [('f', 'force', None,
3136 _('run even when remote repository is unrelated')),
3134 _('run even when remote repository is unrelated')),
3137 ('r', 'rev', [],
3135 ('r', 'rev', [],
3138 _('a changeset up to which you would like to bundle')),
3136 _('a changeset up to which you would like to bundle')),
3139 ('', 'base', [],
3137 ('', 'base', [],
3140 _('a base changeset to specify instead of a destination')),
3138 _('a base changeset to specify instead of a destination')),
3141 ('a', 'all', None, _('bundle all changesets in the repository')),
3139 ('a', 'all', None, _('bundle all changesets in the repository')),
3142 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3140 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3143 ] + remoteopts,
3141 ] + remoteopts,
3144 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3142 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3145 "cat":
3143 "cat":
3146 (cat,
3144 (cat,
3147 [('o', 'output', '', _('print output to file with formatted name')),
3145 [('o', 'output', '', _('print output to file with formatted name')),
3148 ('r', 'rev', '', _('print the given revision')),
3146 ('r', 'rev', '', _('print the given revision')),
3149 ('', 'decode', None, _('apply any matching decode filter')),
3147 ('', 'decode', None, _('apply any matching decode filter')),
3150 ] + walkopts,
3148 ] + walkopts,
3151 _('[OPTION]... FILE...')),
3149 _('[OPTION]... FILE...')),
3152 "^clone":
3150 "^clone":
3153 (clone,
3151 (clone,
3154 [('U', 'noupdate', None,
3152 [('U', 'noupdate', None,
3155 _('the clone will only contain a repository (no working copy)')),
3153 _('the clone will only contain a repository (no working copy)')),
3156 ('r', 'rev', [],
3154 ('r', 'rev', [],
3157 _('a changeset you would like to have after cloning')),
3155 _('a changeset you would like to have after cloning')),
3158 ('', 'pull', None, _('use pull protocol to copy metadata')),
3156 ('', 'pull', None, _('use pull protocol to copy metadata')),
3159 ('', 'uncompressed', None,
3157 ('', 'uncompressed', None,
3160 _('use uncompressed transfer (fast over LAN)')),
3158 _('use uncompressed transfer (fast over LAN)')),
3161 ] + remoteopts,
3159 ] + remoteopts,
3162 _('[OPTION]... SOURCE [DEST]')),
3160 _('[OPTION]... SOURCE [DEST]')),
3163 "^commit|ci":
3161 "^commit|ci":
3164 (commit,
3162 (commit,
3165 [('A', 'addremove', None,
3163 [('A', 'addremove', None,
3166 _('mark new/missing files as added/removed before committing')),
3164 _('mark new/missing files as added/removed before committing')),
3167 ('', 'close-branch', None,
3165 ('', 'close-branch', None,
3168 _('mark a branch as closed, hiding it from the branch list')),
3166 _('mark a branch as closed, hiding it from the branch list')),
3169 ] + walkopts + commitopts + commitopts2,
3167 ] + walkopts + commitopts + commitopts2,
3170 _('[OPTION]... [FILE]...')),
3168 _('[OPTION]... [FILE]...')),
3171 "copy|cp":
3169 "copy|cp":
3172 (copy,
3170 (copy,
3173 [('A', 'after', None, _('record a copy that has already occurred')),
3171 [('A', 'after', None, _('record a copy that has already occurred')),
3174 ('f', 'force', None,
3172 ('f', 'force', None,
3175 _('forcibly copy over an existing managed file')),
3173 _('forcibly copy over an existing managed file')),
3176 ] + walkopts + dryrunopts,
3174 ] + walkopts + dryrunopts,
3177 _('[OPTION]... [SOURCE]... DEST')),
3175 _('[OPTION]... [SOURCE]... DEST')),
3178 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3176 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3179 "debugcheckstate": (debugcheckstate, []),
3177 "debugcheckstate": (debugcheckstate, []),
3180 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3178 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3181 "debugcomplete":
3179 "debugcomplete":
3182 (debugcomplete,
3180 (debugcomplete,
3183 [('o', 'options', None, _('show the command options'))],
3181 [('o', 'options', None, _('show the command options'))],
3184 _('[-o] CMD')),
3182 _('[-o] CMD')),
3185 "debugdate":
3183 "debugdate":
3186 (debugdate,
3184 (debugdate,
3187 [('e', 'extended', None, _('try extended date formats'))],
3185 [('e', 'extended', None, _('try extended date formats'))],
3188 _('[-e] DATE [RANGE]')),
3186 _('[-e] DATE [RANGE]')),
3189 "debugdata": (debugdata, [], _('FILE REV')),
3187 "debugdata": (debugdata, [], _('FILE REV')),
3190 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3188 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3191 "debugindex": (debugindex, [], _('FILE')),
3189 "debugindex": (debugindex, [], _('FILE')),
3192 "debugindexdot": (debugindexdot, [], _('FILE')),
3190 "debugindexdot": (debugindexdot, [], _('FILE')),
3193 "debuginstall": (debuginstall, []),
3191 "debuginstall": (debuginstall, []),
3194 "debugrawcommit|rawcommit":
3192 "debugrawcommit|rawcommit":
3195 (rawcommit,
3193 (rawcommit,
3196 [('p', 'parent', [], _('parent')),
3194 [('p', 'parent', [], _('parent')),
3197 ('F', 'files', '', _('file list'))
3195 ('F', 'files', '', _('file list'))
3198 ] + commitopts + commitopts2,
3196 ] + commitopts + commitopts2,
3199 _('[OPTION]... [FILE]...')),
3197 _('[OPTION]... [FILE]...')),
3200 "debugrebuildstate":
3198 "debugrebuildstate":
3201 (debugrebuildstate,
3199 (debugrebuildstate,
3202 [('r', 'rev', '', _('revision to rebuild to'))],
3200 [('r', 'rev', '', _('revision to rebuild to'))],
3203 _('[-r REV] [REV]')),
3201 _('[-r REV] [REV]')),
3204 "debugrename":
3202 "debugrename":
3205 (debugrename,
3203 (debugrename,
3206 [('r', 'rev', '', _('revision to debug'))],
3204 [('r', 'rev', '', _('revision to debug'))],
3207 _('[-r REV] FILE')),
3205 _('[-r REV] FILE')),
3208 "debugsetparents":
3206 "debugsetparents":
3209 (debugsetparents, [], _('REV1 [REV2]')),
3207 (debugsetparents, [], _('REV1 [REV2]')),
3210 "debugstate":
3208 "debugstate":
3211 (debugstate,
3209 (debugstate,
3212 [('', 'nodates', None, _('do not display the saved mtime'))],
3210 [('', 'nodates', None, _('do not display the saved mtime'))],
3213 _('[OPTION]...')),
3211 _('[OPTION]...')),
3214 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3212 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3215 "^diff":
3213 "^diff":
3216 (diff,
3214 (diff,
3217 [('r', 'rev', [], _('revision')),
3215 [('r', 'rev', [], _('revision')),
3218 ('c', 'change', '', _('change made by revision'))
3216 ('c', 'change', '', _('change made by revision'))
3219 ] + diffopts + diffopts2 + walkopts,
3217 ] + diffopts + diffopts2 + walkopts,
3220 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3218 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3221 "^export":
3219 "^export":
3222 (export,
3220 (export,
3223 [('o', 'output', '', _('print output to file with formatted name')),
3221 [('o', 'output', '', _('print output to file with formatted name')),
3224 ('', 'switch-parent', None, _('diff against the second parent'))
3222 ('', 'switch-parent', None, _('diff against the second parent'))
3225 ] + diffopts,
3223 ] + diffopts,
3226 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3224 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3227 "grep":
3225 "grep":
3228 (grep,
3226 (grep,
3229 [('0', 'print0', None, _('end fields with NUL')),
3227 [('0', 'print0', None, _('end fields with NUL')),
3230 ('', 'all', None, _('print all revisions that match')),
3228 ('', 'all', None, _('print all revisions that match')),
3231 ('f', 'follow', None,
3229 ('f', 'follow', None,
3232 _('follow changeset history, or file history across copies and renames')),
3230 _('follow changeset history, or file history across copies and renames')),
3233 ('i', 'ignore-case', None, _('ignore case when matching')),
3231 ('i', 'ignore-case', None, _('ignore case when matching')),
3234 ('l', 'files-with-matches', None,
3232 ('l', 'files-with-matches', None,
3235 _('print only filenames and revisions that match')),
3233 _('print only filenames and revisions that match')),
3236 ('n', 'line-number', None, _('print matching line numbers')),
3234 ('n', 'line-number', None, _('print matching line numbers')),
3237 ('r', 'rev', [], _('search in given revision range')),
3235 ('r', 'rev', [], _('search in given revision range')),
3238 ('u', 'user', None, _('list the author (long with -v)')),
3236 ('u', 'user', None, _('list the author (long with -v)')),
3239 ('d', 'date', None, _('list the date (short with -q)')),
3237 ('d', 'date', None, _('list the date (short with -q)')),
3240 ] + walkopts,
3238 ] + walkopts,
3241 _('[OPTION]... PATTERN [FILE]...')),
3239 _('[OPTION]... PATTERN [FILE]...')),
3242 "heads":
3240 "heads":
3243 (heads,
3241 (heads,
3244 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3242 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3245 ('a', 'active', False,
3243 ('a', 'active', False,
3246 _('show only the active heads from open branches')),
3244 _('show only the active heads from open branches')),
3247 ] + templateopts,
3245 ] + templateopts,
3248 _('[-r REV] [REV]...')),
3246 _('[-r REV] [REV]...')),
3249 "help": (help_, [], _('[TOPIC]')),
3247 "help": (help_, [], _('[TOPIC]')),
3250 "identify|id":
3248 "identify|id":
3251 (identify,
3249 (identify,
3252 [('r', 'rev', '', _('identify the specified revision')),
3250 [('r', 'rev', '', _('identify the specified revision')),
3253 ('n', 'num', None, _('show local revision number')),
3251 ('n', 'num', None, _('show local revision number')),
3254 ('i', 'id', None, _('show global revision id')),
3252 ('i', 'id', None, _('show global revision id')),
3255 ('b', 'branch', None, _('show branch')),
3253 ('b', 'branch', None, _('show branch')),
3256 ('t', 'tags', None, _('show tags'))],
3254 ('t', 'tags', None, _('show tags'))],
3257 _('[-nibt] [-r REV] [SOURCE]')),
3255 _('[-nibt] [-r REV] [SOURCE]')),
3258 "import|patch":
3256 "import|patch":
3259 (import_,
3257 (import_,
3260 [('p', 'strip', 1,
3258 [('p', 'strip', 1,
3261 _('directory strip option for patch. This has the same '
3259 _('directory strip option for patch. This has the same '
3262 'meaning as the corresponding patch option')),
3260 'meaning as the corresponding patch option')),
3263 ('b', 'base', '', _('base path')),
3261 ('b', 'base', '', _('base path')),
3264 ('f', 'force', None,
3262 ('f', 'force', None,
3265 _('skip check for outstanding uncommitted changes')),
3263 _('skip check for outstanding uncommitted changes')),
3266 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3264 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3267 ('', 'exact', None,
3265 ('', 'exact', None,
3268 _('apply patch to the nodes from which it was generated')),
3266 _('apply patch to the nodes from which it was generated')),
3269 ('', 'import-branch', None,
3267 ('', 'import-branch', None,
3270 _('use any branch information in patch (implied by --exact)'))] +
3268 _('use any branch information in patch (implied by --exact)'))] +
3271 commitopts + commitopts2 + similarityopts,
3269 commitopts + commitopts2 + similarityopts,
3272 _('[OPTION]... PATCH...')),
3270 _('[OPTION]... PATCH...')),
3273 "incoming|in":
3271 "incoming|in":
3274 (incoming,
3272 (incoming,
3275 [('f', 'force', None,
3273 [('f', 'force', None,
3276 _('run even when remote repository is unrelated')),
3274 _('run even when remote repository is unrelated')),
3277 ('n', 'newest-first', None, _('show newest record first')),
3275 ('n', 'newest-first', None, _('show newest record first')),
3278 ('', 'bundle', '', _('file to store the bundles into')),
3276 ('', 'bundle', '', _('file to store the bundles into')),
3279 ('r', 'rev', [],
3277 ('r', 'rev', [],
3280 _('a specific revision up to which you would like to pull')),
3278 _('a specific revision up to which you would like to pull')),
3281 ] + logopts + remoteopts,
3279 ] + logopts + remoteopts,
3282 _('[-p] [-n] [-M] [-f] [-r REV]...'
3280 _('[-p] [-n] [-M] [-f] [-r REV]...'
3283 ' [--bundle FILENAME] [SOURCE]')),
3281 ' [--bundle FILENAME] [SOURCE]')),
3284 "^init":
3282 "^init":
3285 (init,
3283 (init,
3286 remoteopts,
3284 remoteopts,
3287 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3285 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3288 "locate":
3286 "locate":
3289 (locate,
3287 (locate,
3290 [('r', 'rev', '', _('search the repository as it stood at REV')),
3288 [('r', 'rev', '', _('search the repository as it stood at REV')),
3291 ('0', 'print0', None,
3289 ('0', 'print0', None,
3292 _('end filenames with NUL, for use with xargs')),
3290 _('end filenames with NUL, for use with xargs')),
3293 ('f', 'fullpath', None,
3291 ('f', 'fullpath', None,
3294 _('print complete paths from the filesystem root')),
3292 _('print complete paths from the filesystem root')),
3295 ] + walkopts,
3293 ] + walkopts,
3296 _('[OPTION]... [PATTERN]...')),
3294 _('[OPTION]... [PATTERN]...')),
3297 "^log|history":
3295 "^log|history":
3298 (log,
3296 (log,
3299 [('f', 'follow', None,
3297 [('f', 'follow', None,
3300 _('follow changeset history, or file history across copies and renames')),
3298 _('follow changeset history, or file history across copies and renames')),
3301 ('', 'follow-first', None,
3299 ('', 'follow-first', None,
3302 _('only follow the first parent of merge changesets')),
3300 _('only follow the first parent of merge changesets')),
3303 ('d', 'date', '', _('show revisions matching date spec')),
3301 ('d', 'date', '', _('show revisions matching date spec')),
3304 ('C', 'copies', None, _('show copied files')),
3302 ('C', 'copies', None, _('show copied files')),
3305 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3303 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3306 ('r', 'rev', [], _('show the specified revision or range')),
3304 ('r', 'rev', [], _('show the specified revision or range')),
3307 ('', 'removed', None, _('include revisions where files were removed')),
3305 ('', 'removed', None, _('include revisions where files were removed')),
3308 ('m', 'only-merges', None, _('show only merges')),
3306 ('m', 'only-merges', None, _('show only merges')),
3309 ('u', 'user', [], _('revisions committed by user')),
3307 ('u', 'user', [], _('revisions committed by user')),
3310 ('b', 'only-branch', [],
3308 ('b', 'only-branch', [],
3311 _('show only changesets within the given named branch')),
3309 _('show only changesets within the given named branch')),
3312 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3310 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3313 ] + logopts + walkopts,
3311 ] + logopts + walkopts,
3314 _('[OPTION]... [FILE]')),
3312 _('[OPTION]... [FILE]')),
3315 "manifest":
3313 "manifest":
3316 (manifest,
3314 (manifest,
3317 [('r', 'rev', '', _('revision to display'))],
3315 [('r', 'rev', '', _('revision to display'))],
3318 _('[-r REV]')),
3316 _('[-r REV]')),
3319 "^merge":
3317 "^merge":
3320 (merge,
3318 (merge,
3321 [('f', 'force', None, _('force a merge with outstanding changes')),
3319 [('f', 'force', None, _('force a merge with outstanding changes')),
3322 ('r', 'rev', '', _('revision to merge')),
3320 ('r', 'rev', '', _('revision to merge')),
3323 ],
3321 ],
3324 _('[-f] [[-r] REV]')),
3322 _('[-f] [[-r] REV]')),
3325 "outgoing|out":
3323 "outgoing|out":
3326 (outgoing,
3324 (outgoing,
3327 [('f', 'force', None,
3325 [('f', 'force', None,
3328 _('run even when remote repository is unrelated')),
3326 _('run even when remote repository is unrelated')),
3329 ('r', 'rev', [],
3327 ('r', 'rev', [],
3330 _('a specific revision up to which you would like to push')),
3328 _('a specific revision up to which you would like to push')),
3331 ('n', 'newest-first', None, _('show newest record first')),
3329 ('n', 'newest-first', None, _('show newest record first')),
3332 ] + logopts + remoteopts,
3330 ] + logopts + remoteopts,
3333 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3331 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3334 "^parents":
3332 "^parents":
3335 (parents,
3333 (parents,
3336 [('r', 'rev', '', _('show parents from the specified revision')),
3334 [('r', 'rev', '', _('show parents from the specified revision')),
3337 ] + templateopts,
3335 ] + templateopts,
3338 _('hg parents [-r REV] [FILE]')),
3336 _('hg parents [-r REV] [FILE]')),
3339 "paths": (paths, [], _('[NAME]')),
3337 "paths": (paths, [], _('[NAME]')),
3340 "^pull":
3338 "^pull":
3341 (pull,
3339 (pull,
3342 [('u', 'update', None,
3340 [('u', 'update', None,
3343 _('update to new tip if changesets were pulled')),
3341 _('update to new tip if changesets were pulled')),
3344 ('f', 'force', None,
3342 ('f', 'force', None,
3345 _('run even when remote repository is unrelated')),
3343 _('run even when remote repository is unrelated')),
3346 ('r', 'rev', [],
3344 ('r', 'rev', [],
3347 _('a specific revision up to which you would like to pull')),
3345 _('a specific revision up to which you would like to pull')),
3348 ] + remoteopts,
3346 ] + remoteopts,
3349 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3347 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3350 "^push":
3348 "^push":
3351 (push,
3349 (push,
3352 [('f', 'force', None, _('force push')),
3350 [('f', 'force', None, _('force push')),
3353 ('r', 'rev', [],
3351 ('r', 'rev', [],
3354 _('a specific revision up to which you would like to push')),
3352 _('a specific revision up to which you would like to push')),
3355 ] + remoteopts,
3353 ] + remoteopts,
3356 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3354 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3357 "recover": (recover, []),
3355 "recover": (recover, []),
3358 "^remove|rm":
3356 "^remove|rm":
3359 (remove,
3357 (remove,
3360 [('A', 'after', None, _('record delete for missing files')),
3358 [('A', 'after', None, _('record delete for missing files')),
3361 ('f', 'force', None,
3359 ('f', 'force', None,
3362 _('remove (and delete) file even if added or modified')),
3360 _('remove (and delete) file even if added or modified')),
3363 ] + walkopts,
3361 ] + walkopts,
3364 _('[OPTION]... FILE...')),
3362 _('[OPTION]... FILE...')),
3365 "rename|mv":
3363 "rename|mv":
3366 (rename,
3364 (rename,
3367 [('A', 'after', None, _('record a rename that has already occurred')),
3365 [('A', 'after', None, _('record a rename that has already occurred')),
3368 ('f', 'force', None,
3366 ('f', 'force', None,
3369 _('forcibly copy over an existing managed file')),
3367 _('forcibly copy over an existing managed file')),
3370 ] + walkopts + dryrunopts,
3368 ] + walkopts + dryrunopts,
3371 _('[OPTION]... SOURCE... DEST')),
3369 _('[OPTION]... SOURCE... DEST')),
3372 "resolve":
3370 "resolve":
3373 (resolve,
3371 (resolve,
3374 [('a', 'all', None, _('remerge all unresolved files')),
3372 [('a', 'all', None, _('remerge all unresolved files')),
3375 ('l', 'list', None, _('list state of files needing merge')),
3373 ('l', 'list', None, _('list state of files needing merge')),
3376 ('m', 'mark', None, _('mark files as resolved')),
3374 ('m', 'mark', None, _('mark files as resolved')),
3377 ('u', 'unmark', None, _('unmark files as resolved'))]
3375 ('u', 'unmark', None, _('unmark files as resolved'))]
3378 + walkopts,
3376 + walkopts,
3379 _('[OPTION]... [FILE]...')),
3377 _('[OPTION]... [FILE]...')),
3380 "revert":
3378 "revert":
3381 (revert,
3379 (revert,
3382 [('a', 'all', None, _('revert all changes when no arguments given')),
3380 [('a', 'all', None, _('revert all changes when no arguments given')),
3383 ('d', 'date', '', _('tipmost revision matching date')),
3381 ('d', 'date', '', _('tipmost revision matching date')),
3384 ('r', 'rev', '', _('revision to revert to')),
3382 ('r', 'rev', '', _('revision to revert to')),
3385 ('', 'no-backup', None, _('do not save backup copies of files')),
3383 ('', 'no-backup', None, _('do not save backup copies of files')),
3386 ] + walkopts + dryrunopts,
3384 ] + walkopts + dryrunopts,
3387 _('[OPTION]... [-r REV] [NAME]...')),
3385 _('[OPTION]... [-r REV] [NAME]...')),
3388 "rollback": (rollback, []),
3386 "rollback": (rollback, []),
3389 "root": (root, []),
3387 "root": (root, []),
3390 "^serve":
3388 "^serve":
3391 (serve,
3389 (serve,
3392 [('A', 'accesslog', '', _('name of access log file to write to')),
3390 [('A', 'accesslog', '', _('name of access log file to write to')),
3393 ('d', 'daemon', None, _('run server in background')),
3391 ('d', 'daemon', None, _('run server in background')),
3394 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3392 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3395 ('E', 'errorlog', '', _('name of error log file to write to')),
3393 ('E', 'errorlog', '', _('name of error log file to write to')),
3396 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3394 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3397 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3395 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3398 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3396 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3399 ('n', 'name', '',
3397 ('n', 'name', '',
3400 _('name to show in web pages (default: working directory)')),
3398 _('name to show in web pages (default: working directory)')),
3401 ('', 'webdir-conf', '', _('name of the webdir config file'
3399 ('', 'webdir-conf', '', _('name of the webdir config file'
3402 ' (serve more than one repository)')),
3400 ' (serve more than one repository)')),
3403 ('', 'pid-file', '', _('name of file to write process ID to')),
3401 ('', 'pid-file', '', _('name of file to write process ID to')),
3404 ('', 'stdio', None, _('for remote clients')),
3402 ('', 'stdio', None, _('for remote clients')),
3405 ('t', 'templates', '', _('web templates to use')),
3403 ('t', 'templates', '', _('web templates to use')),
3406 ('', 'style', '', _('template style to use')),
3404 ('', 'style', '', _('template style to use')),
3407 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3405 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3408 ('', 'certificate', '', _('SSL certificate file'))],
3406 ('', 'certificate', '', _('SSL certificate file'))],
3409 _('[OPTION]...')),
3407 _('[OPTION]...')),
3410 "showconfig|debugconfig":
3408 "showconfig|debugconfig":
3411 (showconfig,
3409 (showconfig,
3412 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3410 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3413 _('[-u] [NAME]...')),
3411 _('[-u] [NAME]...')),
3414 "^status|st":
3412 "^status|st":
3415 (status,
3413 (status,
3416 [('A', 'all', None, _('show status of all files')),
3414 [('A', 'all', None, _('show status of all files')),
3417 ('m', 'modified', None, _('show only modified files')),
3415 ('m', 'modified', None, _('show only modified files')),
3418 ('a', 'added', None, _('show only added files')),
3416 ('a', 'added', None, _('show only added files')),
3419 ('r', 'removed', None, _('show only removed files')),
3417 ('r', 'removed', None, _('show only removed files')),
3420 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3418 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3421 ('c', 'clean', None, _('show only files without changes')),
3419 ('c', 'clean', None, _('show only files without changes')),
3422 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3420 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3423 ('i', 'ignored', None, _('show only ignored files')),
3421 ('i', 'ignored', None, _('show only ignored files')),
3424 ('n', 'no-status', None, _('hide status prefix')),
3422 ('n', 'no-status', None, _('hide status prefix')),
3425 ('C', 'copies', None, _('show source of copied files')),
3423 ('C', 'copies', None, _('show source of copied files')),
3426 ('0', 'print0', None,
3424 ('0', 'print0', None,
3427 _('end filenames with NUL, for use with xargs')),
3425 _('end filenames with NUL, for use with xargs')),
3428 ('', 'rev', [], _('show difference from revision')),
3426 ('', 'rev', [], _('show difference from revision')),
3429 ] + walkopts,
3427 ] + walkopts,
3430 _('[OPTION]... [FILE]...')),
3428 _('[OPTION]... [FILE]...')),
3431 "tag":
3429 "tag":
3432 (tag,
3430 (tag,
3433 [('f', 'force', None, _('replace existing tag')),
3431 [('f', 'force', None, _('replace existing tag')),
3434 ('l', 'local', None, _('make the tag local')),
3432 ('l', 'local', None, _('make the tag local')),
3435 ('r', 'rev', '', _('revision to tag')),
3433 ('r', 'rev', '', _('revision to tag')),
3436 ('', 'remove', None, _('remove a tag')),
3434 ('', 'remove', None, _('remove a tag')),
3437 # -l/--local is already there, commitopts cannot be used
3435 # -l/--local is already there, commitopts cannot be used
3438 ('m', 'message', '', _('use <text> as commit message')),
3436 ('m', 'message', '', _('use <text> as commit message')),
3439 ] + commitopts2,
3437 ] + commitopts2,
3440 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3438 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3441 "tags": (tags, []),
3439 "tags": (tags, []),
3442 "tip":
3440 "tip":
3443 (tip,
3441 (tip,
3444 [('p', 'patch', None, _('show patch')),
3442 [('p', 'patch', None, _('show patch')),
3445 ('g', 'git', None, _('use git extended diff format')),
3443 ('g', 'git', None, _('use git extended diff format')),
3446 ] + templateopts,
3444 ] + templateopts,
3447 _('[-p]')),
3445 _('[-p]')),
3448 "unbundle":
3446 "unbundle":
3449 (unbundle,
3447 (unbundle,
3450 [('u', 'update', None,
3448 [('u', 'update', None,
3451 _('update to new tip if changesets were unbundled'))],
3449 _('update to new tip if changesets were unbundled'))],
3452 _('[-u] FILE...')),
3450 _('[-u] FILE...')),
3453 "^update|up|checkout|co":
3451 "^update|up|checkout|co":
3454 (update,
3452 (update,
3455 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3453 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3456 ('d', 'date', '', _('tipmost revision matching date')),
3454 ('d', 'date', '', _('tipmost revision matching date')),
3457 ('r', 'rev', '', _('revision'))],
3455 ('r', 'rev', '', _('revision'))],
3458 _('[-C] [-d DATE] [[-r] REV]')),
3456 _('[-C] [-d DATE] [[-r] REV]')),
3459 "verify": (verify, []),
3457 "verify": (verify, []),
3460 "version": (version_, []),
3458 "version": (version_, []),
3461 }
3459 }
3462
3460
3463 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3461 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3464 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3462 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3465 optionalrepo = ("identify paths serve showconfig debugancestor")
3463 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2175 +1,2173 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store, encoding
12 import lock, transaction, stat, errno, ui, store, encoding
13 import os, time, util, extensions, hook, inspect, error
13 import os, time, util, extensions, hook, inspect, error
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
16
17 from lock import release
17 from lock import release
18
18
19 class localrepository(repo.repository):
19 class localrepository(repo.repository):
20 capabilities = set(('lookup', 'changegroupsubset'))
20 capabilities = set(('lookup', 'changegroupsubset'))
21 supported = ('revlogv1', 'store', 'fncache')
21 supported = ('revlogv1', 'store', 'fncache')
22
22
23 def __init__(self, baseui, path=None, create=0):
23 def __init__(self, baseui, path=None, create=0):
24 repo.repository.__init__(self)
24 repo.repository.__init__(self)
25 self.root = os.path.realpath(path)
25 self.root = os.path.realpath(path)
26 self.path = os.path.join(self.root, ".hg")
26 self.path = os.path.join(self.root, ".hg")
27 self.origroot = path
27 self.origroot = path
28 self.opener = util.opener(self.path)
28 self.opener = util.opener(self.path)
29 self.wopener = util.opener(self.root)
29 self.wopener = util.opener(self.root)
30
30
31 if not os.path.isdir(self.path):
31 if not os.path.isdir(self.path):
32 if create:
32 if create:
33 if not os.path.exists(path):
33 if not os.path.exists(path):
34 os.mkdir(path)
34 os.mkdir(path)
35 os.mkdir(self.path)
35 os.mkdir(self.path)
36 requirements = ["revlogv1"]
36 requirements = ["revlogv1"]
37 if baseui.configbool('format', 'usestore', True):
37 if baseui.configbool('format', 'usestore', True):
38 os.mkdir(os.path.join(self.path, "store"))
38 os.mkdir(os.path.join(self.path, "store"))
39 requirements.append("store")
39 requirements.append("store")
40 if baseui.configbool('format', 'usefncache', True):
40 if baseui.configbool('format', 'usefncache', True):
41 requirements.append("fncache")
41 requirements.append("fncache")
42 # create an invalid changelog
42 # create an invalid changelog
43 self.opener("00changelog.i", "a").write(
43 self.opener("00changelog.i", "a").write(
44 '\0\0\0\2' # represents revlogv2
44 '\0\0\0\2' # represents revlogv2
45 ' dummy changelog to prevent using the old repo layout'
45 ' dummy changelog to prevent using the old repo layout'
46 )
46 )
47 reqfile = self.opener("requires", "w")
47 reqfile = self.opener("requires", "w")
48 for r in requirements:
48 for r in requirements:
49 reqfile.write("%s\n" % r)
49 reqfile.write("%s\n" % r)
50 reqfile.close()
50 reqfile.close()
51 else:
51 else:
52 raise error.RepoError(_("repository %s not found") % path)
52 raise error.RepoError(_("repository %s not found") % path)
53 elif create:
53 elif create:
54 raise error.RepoError(_("repository %s already exists") % path)
54 raise error.RepoError(_("repository %s already exists") % path)
55 else:
55 else:
56 # find requirements
56 # find requirements
57 requirements = []
57 requirements = []
58 try:
58 try:
59 requirements = self.opener("requires").read().splitlines()
59 requirements = self.opener("requires").read().splitlines()
60 for r in requirements:
60 for r in requirements:
61 if r not in self.supported:
61 if r not in self.supported:
62 raise error.RepoError(_("requirement '%s' not supported") % r)
62 raise error.RepoError(_("requirement '%s' not supported") % r)
63 except IOError, inst:
63 except IOError, inst:
64 if inst.errno != errno.ENOENT:
64 if inst.errno != errno.ENOENT:
65 raise
65 raise
66
66
67 self.store = store.store(requirements, self.path, util.opener)
67 self.store = store.store(requirements, self.path, util.opener)
68 self.spath = self.store.path
68 self.spath = self.store.path
69 self.sopener = self.store.opener
69 self.sopener = self.store.opener
70 self.sjoin = self.store.join
70 self.sjoin = self.store.join
71 self.opener.createmode = self.store.createmode
71 self.opener.createmode = self.store.createmode
72
72
73 self.baseui = baseui
73 self.baseui = baseui
74 self.ui = baseui.copy()
74 self.ui = baseui.copy()
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self._ubranchcache = None # UTF-8 version of branchcache
84 self._ubranchcache = None # UTF-8 version of branchcache
85 self._branchcachetip = None
85 self._branchcachetip = None
86 self.nodetagscache = None
86 self.nodetagscache = None
87 self.filterpats = {}
87 self.filterpats = {}
88 self._datafilters = {}
88 self._datafilters = {}
89 self._transref = self._lockref = self._wlockref = None
89 self._transref = self._lockref = self._wlockref = None
90
90
91 def __getattr__(self, name):
91 def __getattr__(self, name):
92 if name == 'changelog':
92 if name == 'changelog':
93 self.changelog = changelog.changelog(self.sopener)
93 self.changelog = changelog.changelog(self.sopener)
94 if 'HG_PENDING' in os.environ:
94 if 'HG_PENDING' in os.environ:
95 p = os.environ['HG_PENDING']
95 p = os.environ['HG_PENDING']
96 if p.startswith(self.root):
96 if p.startswith(self.root):
97 self.changelog.readpending('00changelog.i.a')
97 self.changelog.readpending('00changelog.i.a')
98 self.sopener.defversion = self.changelog.version
98 self.sopener.defversion = self.changelog.version
99 return self.changelog
99 return self.changelog
100 if name == 'manifest':
100 if name == 'manifest':
101 self.changelog
101 self.changelog
102 self.manifest = manifest.manifest(self.sopener)
102 self.manifest = manifest.manifest(self.sopener)
103 return self.manifest
103 return self.manifest
104 if name == 'dirstate':
104 if name == 'dirstate':
105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
106 return self.dirstate
106 return self.dirstate
107 else:
107 else:
108 raise AttributeError(name)
108 raise AttributeError(name)
109
109
110 def __getitem__(self, changeid):
110 def __getitem__(self, changeid):
111 if changeid == None:
111 if changeid == None:
112 return context.workingctx(self)
112 return context.workingctx(self)
113 return context.changectx(self, changeid)
113 return context.changectx(self, changeid)
114
114
115 def __nonzero__(self):
115 def __nonzero__(self):
116 return True
116 return True
117
117
118 def __len__(self):
118 def __len__(self):
119 return len(self.changelog)
119 return len(self.changelog)
120
120
121 def __iter__(self):
121 def __iter__(self):
122 for i in xrange(len(self)):
122 for i in xrange(len(self)):
123 yield i
123 yield i
124
124
125 def url(self):
125 def url(self):
126 return 'file:' + self.root
126 return 'file:' + self.root
127
127
128 def hook(self, name, throw=False, **args):
128 def hook(self, name, throw=False, **args):
129 return hook.hook(self.ui, self, name, throw, **args)
129 return hook.hook(self.ui, self, name, throw, **args)
130
130
131 tag_disallowed = ':\r\n'
131 tag_disallowed = ':\r\n'
132
132
133 def _tag(self, names, node, message, local, user, date, parent=None,
133 def _tag(self, names, node, message, local, user, date, parent=None,
134 extra={}):
134 extra={}):
135 use_dirstate = parent is None
135 use_dirstate = parent is None
136
136
137 if isinstance(names, str):
137 if isinstance(names, str):
138 allchars = names
138 allchars = names
139 names = (names,)
139 names = (names,)
140 else:
140 else:
141 allchars = ''.join(names)
141 allchars = ''.join(names)
142 for c in self.tag_disallowed:
142 for c in self.tag_disallowed:
143 if c in allchars:
143 if c in allchars:
144 raise util.Abort(_('%r cannot be used in a tag name') % c)
144 raise util.Abort(_('%r cannot be used in a tag name') % c)
145
145
146 for name in names:
146 for name in names:
147 self.hook('pretag', throw=True, node=hex(node), tag=name,
147 self.hook('pretag', throw=True, node=hex(node), tag=name,
148 local=local)
148 local=local)
149
149
150 def writetags(fp, names, munge, prevtags):
150 def writetags(fp, names, munge, prevtags):
151 fp.seek(0, 2)
151 fp.seek(0, 2)
152 if prevtags and prevtags[-1] != '\n':
152 if prevtags and prevtags[-1] != '\n':
153 fp.write('\n')
153 fp.write('\n')
154 for name in names:
154 for name in names:
155 m = munge and munge(name) or name
155 m = munge and munge(name) or name
156 if self._tagstypecache and name in self._tagstypecache:
156 if self._tagstypecache and name in self._tagstypecache:
157 old = self.tagscache.get(name, nullid)
157 old = self.tagscache.get(name, nullid)
158 fp.write('%s %s\n' % (hex(old), m))
158 fp.write('%s %s\n' % (hex(old), m))
159 fp.write('%s %s\n' % (hex(node), m))
159 fp.write('%s %s\n' % (hex(node), m))
160 fp.close()
160 fp.close()
161
161
162 prevtags = ''
162 prevtags = ''
163 if local:
163 if local:
164 try:
164 try:
165 fp = self.opener('localtags', 'r+')
165 fp = self.opener('localtags', 'r+')
166 except IOError:
166 except IOError:
167 fp = self.opener('localtags', 'a')
167 fp = self.opener('localtags', 'a')
168 else:
168 else:
169 prevtags = fp.read()
169 prevtags = fp.read()
170
170
171 # local tags are stored in the current charset
171 # local tags are stored in the current charset
172 writetags(fp, names, None, prevtags)
172 writetags(fp, names, None, prevtags)
173 for name in names:
173 for name in names:
174 self.hook('tag', node=hex(node), tag=name, local=local)
174 self.hook('tag', node=hex(node), tag=name, local=local)
175 return
175 return
176
176
177 if use_dirstate:
177 if use_dirstate:
178 try:
178 try:
179 fp = self.wfile('.hgtags', 'rb+')
179 fp = self.wfile('.hgtags', 'rb+')
180 except IOError:
180 except IOError:
181 fp = self.wfile('.hgtags', 'ab')
181 fp = self.wfile('.hgtags', 'ab')
182 else:
182 else:
183 prevtags = fp.read()
183 prevtags = fp.read()
184 else:
184 else:
185 try:
185 try:
186 prevtags = self.filectx('.hgtags', parent).data()
186 prevtags = self.filectx('.hgtags', parent).data()
187 except error.LookupError:
187 except error.LookupError:
188 pass
188 pass
189 fp = self.wfile('.hgtags', 'wb')
189 fp = self.wfile('.hgtags', 'wb')
190 if prevtags:
190 if prevtags:
191 fp.write(prevtags)
191 fp.write(prevtags)
192
192
193 # committed tags are stored in UTF-8
193 # committed tags are stored in UTF-8
194 writetags(fp, names, encoding.fromlocal, prevtags)
194 writetags(fp, names, encoding.fromlocal, prevtags)
195
195
196 if use_dirstate and '.hgtags' not in self.dirstate:
196 if use_dirstate and '.hgtags' not in self.dirstate:
197 self.add(['.hgtags'])
197 self.add(['.hgtags'])
198
198
199 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
199 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
200 extra=extra)
200 extra=extra)
201
201
202 for name in names:
202 for name in names:
203 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
204
204
205 return tagnode
205 return tagnode
206
206
207 def tag(self, names, node, message, local, user, date):
207 def tag(self, names, node, message, local, user, date):
208 '''tag a revision with one or more symbolic names.
208 '''tag a revision with one or more symbolic names.
209
209
210 names is a list of strings or, when adding a single tag, names may be a
210 names is a list of strings or, when adding a single tag, names may be a
211 string.
211 string.
212
212
213 if local is True, the tags are stored in a per-repository file.
213 if local is True, the tags are stored in a per-repository file.
214 otherwise, they are stored in the .hgtags file, and a new
214 otherwise, they are stored in the .hgtags file, and a new
215 changeset is committed with the change.
215 changeset is committed with the change.
216
216
217 keyword arguments:
217 keyword arguments:
218
218
219 local: whether to store tags in non-version-controlled file
219 local: whether to store tags in non-version-controlled file
220 (default False)
220 (default False)
221
221
222 message: commit message to use if committing
222 message: commit message to use if committing
223
223
224 user: name of user to use if committing
224 user: name of user to use if committing
225
225
226 date: date tuple to use if committing'''
226 date: date tuple to use if committing'''
227
227
228 for x in self.status()[:5]:
228 for x in self.status()[:5]:
229 if '.hgtags' in x:
229 if '.hgtags' in x:
230 raise util.Abort(_('working copy of .hgtags is changed '
230 raise util.Abort(_('working copy of .hgtags is changed '
231 '(please commit .hgtags manually)'))
231 '(please commit .hgtags manually)'))
232
232
233 self.tags() # instantiate the cache
233 self.tags() # instantiate the cache
234 self._tag(names, node, message, local, user, date)
234 self._tag(names, node, message, local, user, date)
235
235
236 def tags(self):
236 def tags(self):
237 '''return a mapping of tag to node'''
237 '''return a mapping of tag to node'''
238 if self.tagscache:
238 if self.tagscache:
239 return self.tagscache
239 return self.tagscache
240
240
241 globaltags = {}
241 globaltags = {}
242 tagtypes = {}
242 tagtypes = {}
243
243
244 def readtags(lines, fn, tagtype):
244 def readtags(lines, fn, tagtype):
245 filetags = {}
245 filetags = {}
246 count = 0
246 count = 0
247
247
248 def warn(msg):
248 def warn(msg):
249 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
249 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
250
250
251 for l in lines:
251 for l in lines:
252 count += 1
252 count += 1
253 if not l:
253 if not l:
254 continue
254 continue
255 s = l.split(" ", 1)
255 s = l.split(" ", 1)
256 if len(s) != 2:
256 if len(s) != 2:
257 warn(_("cannot parse entry"))
257 warn(_("cannot parse entry"))
258 continue
258 continue
259 node, key = s
259 node, key = s
260 key = encoding.tolocal(key.strip()) # stored in UTF-8
260 key = encoding.tolocal(key.strip()) # stored in UTF-8
261 try:
261 try:
262 bin_n = bin(node)
262 bin_n = bin(node)
263 except TypeError:
263 except TypeError:
264 warn(_("node '%s' is not well formed") % node)
264 warn(_("node '%s' is not well formed") % node)
265 continue
265 continue
266 if bin_n not in self.changelog.nodemap:
266 if bin_n not in self.changelog.nodemap:
267 warn(_("tag '%s' refers to unknown node") % key)
267 warn(_("tag '%s' refers to unknown node") % key)
268 continue
268 continue
269
269
270 h = []
270 h = []
271 if key in filetags:
271 if key in filetags:
272 n, h = filetags[key]
272 n, h = filetags[key]
273 h.append(n)
273 h.append(n)
274 filetags[key] = (bin_n, h)
274 filetags[key] = (bin_n, h)
275
275
276 for k, nh in filetags.iteritems():
276 for k, nh in filetags.iteritems():
277 if k not in globaltags:
277 if k not in globaltags:
278 globaltags[k] = nh
278 globaltags[k] = nh
279 tagtypes[k] = tagtype
279 tagtypes[k] = tagtype
280 continue
280 continue
281
281
282 # we prefer the global tag if:
282 # we prefer the global tag if:
283 # it supercedes us OR
283 # it supercedes us OR
284 # mutual supercedes and it has a higher rank
284 # mutual supercedes and it has a higher rank
285 # otherwise we win because we're tip-most
285 # otherwise we win because we're tip-most
286 an, ah = nh
286 an, ah = nh
287 bn, bh = globaltags[k]
287 bn, bh = globaltags[k]
288 if (bn != an and an in bh and
288 if (bn != an and an in bh and
289 (bn not in ah or len(bh) > len(ah))):
289 (bn not in ah or len(bh) > len(ah))):
290 an = bn
290 an = bn
291 ah.extend([n for n in bh if n not in ah])
291 ah.extend([n for n in bh if n not in ah])
292 globaltags[k] = an, ah
292 globaltags[k] = an, ah
293 tagtypes[k] = tagtype
293 tagtypes[k] = tagtype
294
294
295 # read the tags file from each head, ending with the tip
295 # read the tags file from each head, ending with the tip
296 f = None
296 f = None
297 for rev, node, fnode in self._hgtagsnodes():
297 for rev, node, fnode in self._hgtagsnodes():
298 f = (f and f.filectx(fnode) or
298 f = (f and f.filectx(fnode) or
299 self.filectx('.hgtags', fileid=fnode))
299 self.filectx('.hgtags', fileid=fnode))
300 readtags(f.data().splitlines(), f, "global")
300 readtags(f.data().splitlines(), f, "global")
301
301
302 try:
302 try:
303 data = encoding.fromlocal(self.opener("localtags").read())
303 data = encoding.fromlocal(self.opener("localtags").read())
304 # localtags are stored in the local character set
304 # localtags are stored in the local character set
305 # while the internal tag table is stored in UTF-8
305 # while the internal tag table is stored in UTF-8
306 readtags(data.splitlines(), "localtags", "local")
306 readtags(data.splitlines(), "localtags", "local")
307 except IOError:
307 except IOError:
308 pass
308 pass
309
309
310 self.tagscache = {}
310 self.tagscache = {}
311 self._tagstypecache = {}
311 self._tagstypecache = {}
312 for k, nh in globaltags.iteritems():
312 for k, nh in globaltags.iteritems():
313 n = nh[0]
313 n = nh[0]
314 if n != nullid:
314 if n != nullid:
315 self.tagscache[k] = n
315 self.tagscache[k] = n
316 self._tagstypecache[k] = tagtypes[k]
316 self._tagstypecache[k] = tagtypes[k]
317 self.tagscache['tip'] = self.changelog.tip()
317 self.tagscache['tip'] = self.changelog.tip()
318 return self.tagscache
318 return self.tagscache
319
319
320 def tagtype(self, tagname):
320 def tagtype(self, tagname):
321 '''
321 '''
322 return the type of the given tag. result can be:
322 return the type of the given tag. result can be:
323
323
324 'local' : a local tag
324 'local' : a local tag
325 'global' : a global tag
325 'global' : a global tag
326 None : tag does not exist
326 None : tag does not exist
327 '''
327 '''
328
328
329 self.tags()
329 self.tags()
330
330
331 return self._tagstypecache.get(tagname)
331 return self._tagstypecache.get(tagname)
332
332
333 def _hgtagsnodes(self):
333 def _hgtagsnodes(self):
334 heads = self.heads()
335 heads.reverse()
336 last = {}
334 last = {}
337 ret = []
335 ret = []
338 for node in heads:
336 for node in reversed(self.heads()):
339 c = self[node]
337 c = self[node]
340 rev = c.rev()
338 rev = c.rev()
341 try:
339 try:
342 fnode = c.filenode('.hgtags')
340 fnode = c.filenode('.hgtags')
343 except error.LookupError:
341 except error.LookupError:
344 continue
342 continue
345 ret.append((rev, node, fnode))
343 ret.append((rev, node, fnode))
346 if fnode in last:
344 if fnode in last:
347 ret[last[fnode]] = None
345 ret[last[fnode]] = None
348 last[fnode] = len(ret) - 1
346 last[fnode] = len(ret) - 1
349 return [item for item in ret if item]
347 return [item for item in ret if item]
350
348
351 def tagslist(self):
349 def tagslist(self):
352 '''return a list of tags ordered by revision'''
350 '''return a list of tags ordered by revision'''
353 l = []
351 l = []
354 for t, n in self.tags().iteritems():
352 for t, n in self.tags().iteritems():
355 try:
353 try:
356 r = self.changelog.rev(n)
354 r = self.changelog.rev(n)
357 except:
355 except:
358 r = -2 # sort to the beginning of the list if unknown
356 r = -2 # sort to the beginning of the list if unknown
359 l.append((r, t, n))
357 l.append((r, t, n))
360 return [(t, n) for r, t, n in sorted(l)]
358 return [(t, n) for r, t, n in sorted(l)]
361
359
362 def nodetags(self, node):
360 def nodetags(self, node):
363 '''return the tags associated with a node'''
361 '''return the tags associated with a node'''
364 if not self.nodetagscache:
362 if not self.nodetagscache:
365 self.nodetagscache = {}
363 self.nodetagscache = {}
366 for t, n in self.tags().iteritems():
364 for t, n in self.tags().iteritems():
367 self.nodetagscache.setdefault(n, []).append(t)
365 self.nodetagscache.setdefault(n, []).append(t)
368 return self.nodetagscache.get(node, [])
366 return self.nodetagscache.get(node, [])
369
367
370 def _branchtags(self, partial, lrev):
368 def _branchtags(self, partial, lrev):
371 # TODO: rename this function?
369 # TODO: rename this function?
372 tiprev = len(self) - 1
370 tiprev = len(self) - 1
373 if lrev != tiprev:
371 if lrev != tiprev:
374 self._updatebranchcache(partial, lrev+1, tiprev+1)
372 self._updatebranchcache(partial, lrev+1, tiprev+1)
375 self._writebranchcache(partial, self.changelog.tip(), tiprev)
373 self._writebranchcache(partial, self.changelog.tip(), tiprev)
376
374
377 return partial
375 return partial
378
376
379 def _branchheads(self):
377 def _branchheads(self):
380 tip = self.changelog.tip()
378 tip = self.changelog.tip()
381 if self.branchcache is not None and self._branchcachetip == tip:
379 if self.branchcache is not None and self._branchcachetip == tip:
382 return self.branchcache
380 return self.branchcache
383
381
384 oldtip = self._branchcachetip
382 oldtip = self._branchcachetip
385 self._branchcachetip = tip
383 self._branchcachetip = tip
386 if self.branchcache is None:
384 if self.branchcache is None:
387 self.branchcache = {} # avoid recursion in changectx
385 self.branchcache = {} # avoid recursion in changectx
388 else:
386 else:
389 self.branchcache.clear() # keep using the same dict
387 self.branchcache.clear() # keep using the same dict
390 if oldtip is None or oldtip not in self.changelog.nodemap:
388 if oldtip is None or oldtip not in self.changelog.nodemap:
391 partial, last, lrev = self._readbranchcache()
389 partial, last, lrev = self._readbranchcache()
392 else:
390 else:
393 lrev = self.changelog.rev(oldtip)
391 lrev = self.changelog.rev(oldtip)
394 partial = self._ubranchcache
392 partial = self._ubranchcache
395
393
396 self._branchtags(partial, lrev)
394 self._branchtags(partial, lrev)
397 # this private cache holds all heads (not just tips)
395 # this private cache holds all heads (not just tips)
398 self._ubranchcache = partial
396 self._ubranchcache = partial
399
397
400 # the branch cache is stored on disk as UTF-8, but in the local
398 # the branch cache is stored on disk as UTF-8, but in the local
401 # charset internally
399 # charset internally
402 for k, v in partial.iteritems():
400 for k, v in partial.iteritems():
403 self.branchcache[encoding.tolocal(k)] = v
401 self.branchcache[encoding.tolocal(k)] = v
404 return self.branchcache
402 return self.branchcache
405
403
406
404
407 def branchtags(self):
405 def branchtags(self):
408 '''return a dict where branch names map to the tipmost head of
406 '''return a dict where branch names map to the tipmost head of
409 the branch, open heads come before closed'''
407 the branch, open heads come before closed'''
410 bt = {}
408 bt = {}
411 for bn, heads in self._branchheads().iteritems():
409 for bn, heads in self._branchheads().iteritems():
412 head = None
410 head = None
413 for i in range(len(heads)-1, -1, -1):
411 for i in range(len(heads)-1, -1, -1):
414 h = heads[i]
412 h = heads[i]
415 if 'close' not in self.changelog.read(h)[5]:
413 if 'close' not in self.changelog.read(h)[5]:
416 head = h
414 head = h
417 break
415 break
418 # no open heads were found
416 # no open heads were found
419 if head is None:
417 if head is None:
420 head = heads[-1]
418 head = heads[-1]
421 bt[bn] = head
419 bt[bn] = head
422 return bt
420 return bt
423
421
424
422
425 def _readbranchcache(self):
423 def _readbranchcache(self):
426 partial = {}
424 partial = {}
427 try:
425 try:
428 f = self.opener("branchheads.cache")
426 f = self.opener("branchheads.cache")
429 lines = f.read().split('\n')
427 lines = f.read().split('\n')
430 f.close()
428 f.close()
431 except (IOError, OSError):
429 except (IOError, OSError):
432 return {}, nullid, nullrev
430 return {}, nullid, nullrev
433
431
434 try:
432 try:
435 last, lrev = lines.pop(0).split(" ", 1)
433 last, lrev = lines.pop(0).split(" ", 1)
436 last, lrev = bin(last), int(lrev)
434 last, lrev = bin(last), int(lrev)
437 if lrev >= len(self) or self[lrev].node() != last:
435 if lrev >= len(self) or self[lrev].node() != last:
438 # invalidate the cache
436 # invalidate the cache
439 raise ValueError('invalidating branch cache (tip differs)')
437 raise ValueError('invalidating branch cache (tip differs)')
440 for l in lines:
438 for l in lines:
441 if not l: continue
439 if not l: continue
442 node, label = l.split(" ", 1)
440 node, label = l.split(" ", 1)
443 partial.setdefault(label.strip(), []).append(bin(node))
441 partial.setdefault(label.strip(), []).append(bin(node))
444 except KeyboardInterrupt:
442 except KeyboardInterrupt:
445 raise
443 raise
446 except Exception, inst:
444 except Exception, inst:
447 if self.ui.debugflag:
445 if self.ui.debugflag:
448 self.ui.warn(str(inst), '\n')
446 self.ui.warn(str(inst), '\n')
449 partial, last, lrev = {}, nullid, nullrev
447 partial, last, lrev = {}, nullid, nullrev
450 return partial, last, lrev
448 return partial, last, lrev
451
449
452 def _writebranchcache(self, branches, tip, tiprev):
450 def _writebranchcache(self, branches, tip, tiprev):
453 try:
451 try:
454 f = self.opener("branchheads.cache", "w", atomictemp=True)
452 f = self.opener("branchheads.cache", "w", atomictemp=True)
455 f.write("%s %s\n" % (hex(tip), tiprev))
453 f.write("%s %s\n" % (hex(tip), tiprev))
456 for label, nodes in branches.iteritems():
454 for label, nodes in branches.iteritems():
457 for node in nodes:
455 for node in nodes:
458 f.write("%s %s\n" % (hex(node), label))
456 f.write("%s %s\n" % (hex(node), label))
459 f.rename()
457 f.rename()
460 except (IOError, OSError):
458 except (IOError, OSError):
461 pass
459 pass
462
460
463 def _updatebranchcache(self, partial, start, end):
461 def _updatebranchcache(self, partial, start, end):
464 for r in xrange(start, end):
462 for r in xrange(start, end):
465 c = self[r]
463 c = self[r]
466 b = c.branch()
464 b = c.branch()
467 bheads = partial.setdefault(b, [])
465 bheads = partial.setdefault(b, [])
468 bheads.append(c.node())
466 bheads.append(c.node())
469 for p in c.parents():
467 for p in c.parents():
470 pn = p.node()
468 pn = p.node()
471 if pn in bheads:
469 if pn in bheads:
472 bheads.remove(pn)
470 bheads.remove(pn)
473
471
474 def lookup(self, key):
472 def lookup(self, key):
475 if isinstance(key, int):
473 if isinstance(key, int):
476 return self.changelog.node(key)
474 return self.changelog.node(key)
477 elif key == '.':
475 elif key == '.':
478 return self.dirstate.parents()[0]
476 return self.dirstate.parents()[0]
479 elif key == 'null':
477 elif key == 'null':
480 return nullid
478 return nullid
481 elif key == 'tip':
479 elif key == 'tip':
482 return self.changelog.tip()
480 return self.changelog.tip()
483 n = self.changelog._match(key)
481 n = self.changelog._match(key)
484 if n:
482 if n:
485 return n
483 return n
486 if key in self.tags():
484 if key in self.tags():
487 return self.tags()[key]
485 return self.tags()[key]
488 if key in self.branchtags():
486 if key in self.branchtags():
489 return self.branchtags()[key]
487 return self.branchtags()[key]
490 n = self.changelog._partialmatch(key)
488 n = self.changelog._partialmatch(key)
491 if n:
489 if n:
492 return n
490 return n
493 try:
491 try:
494 if len(key) == 20:
492 if len(key) == 20:
495 key = hex(key)
493 key = hex(key)
496 except:
494 except:
497 pass
495 pass
498 raise error.RepoError(_("unknown revision '%s'") % key)
496 raise error.RepoError(_("unknown revision '%s'") % key)
499
497
500 def local(self):
498 def local(self):
501 return True
499 return True
502
500
503 def join(self, f):
501 def join(self, f):
504 return os.path.join(self.path, f)
502 return os.path.join(self.path, f)
505
503
506 def wjoin(self, f):
504 def wjoin(self, f):
507 return os.path.join(self.root, f)
505 return os.path.join(self.root, f)
508
506
509 def rjoin(self, f):
507 def rjoin(self, f):
510 return os.path.join(self.root, util.pconvert(f))
508 return os.path.join(self.root, util.pconvert(f))
511
509
512 def file(self, f):
510 def file(self, f):
513 if f[0] == '/':
511 if f[0] == '/':
514 f = f[1:]
512 f = f[1:]
515 return filelog.filelog(self.sopener, f)
513 return filelog.filelog(self.sopener, f)
516
514
517 def changectx(self, changeid):
515 def changectx(self, changeid):
518 return self[changeid]
516 return self[changeid]
519
517
520 def parents(self, changeid=None):
518 def parents(self, changeid=None):
521 '''get list of changectxs for parents of changeid'''
519 '''get list of changectxs for parents of changeid'''
522 return self[changeid].parents()
520 return self[changeid].parents()
523
521
524 def filectx(self, path, changeid=None, fileid=None):
522 def filectx(self, path, changeid=None, fileid=None):
525 """changeid can be a changeset revision, node, or tag.
523 """changeid can be a changeset revision, node, or tag.
526 fileid can be a file revision or node."""
524 fileid can be a file revision or node."""
527 return context.filectx(self, path, changeid, fileid)
525 return context.filectx(self, path, changeid, fileid)
528
526
529 def getcwd(self):
527 def getcwd(self):
530 return self.dirstate.getcwd()
528 return self.dirstate.getcwd()
531
529
532 def pathto(self, f, cwd=None):
530 def pathto(self, f, cwd=None):
533 return self.dirstate.pathto(f, cwd)
531 return self.dirstate.pathto(f, cwd)
534
532
535 def wfile(self, f, mode='r'):
533 def wfile(self, f, mode='r'):
536 return self.wopener(f, mode)
534 return self.wopener(f, mode)
537
535
538 def _link(self, f):
536 def _link(self, f):
539 return os.path.islink(self.wjoin(f))
537 return os.path.islink(self.wjoin(f))
540
538
541 def _filter(self, filter, filename, data):
539 def _filter(self, filter, filename, data):
542 if filter not in self.filterpats:
540 if filter not in self.filterpats:
543 l = []
541 l = []
544 for pat, cmd in self.ui.configitems(filter):
542 for pat, cmd in self.ui.configitems(filter):
545 if cmd == '!':
543 if cmd == '!':
546 continue
544 continue
547 mf = util.matcher(self.root, "", [pat], [], [])[1]
545 mf = util.matcher(self.root, "", [pat], [], [])[1]
548 fn = None
546 fn = None
549 params = cmd
547 params = cmd
550 for name, filterfn in self._datafilters.iteritems():
548 for name, filterfn in self._datafilters.iteritems():
551 if cmd.startswith(name):
549 if cmd.startswith(name):
552 fn = filterfn
550 fn = filterfn
553 params = cmd[len(name):].lstrip()
551 params = cmd[len(name):].lstrip()
554 break
552 break
555 if not fn:
553 if not fn:
556 fn = lambda s, c, **kwargs: util.filter(s, c)
554 fn = lambda s, c, **kwargs: util.filter(s, c)
557 # Wrap old filters not supporting keyword arguments
555 # Wrap old filters not supporting keyword arguments
558 if not inspect.getargspec(fn)[2]:
556 if not inspect.getargspec(fn)[2]:
559 oldfn = fn
557 oldfn = fn
560 fn = lambda s, c, **kwargs: oldfn(s, c)
558 fn = lambda s, c, **kwargs: oldfn(s, c)
561 l.append((mf, fn, params))
559 l.append((mf, fn, params))
562 self.filterpats[filter] = l
560 self.filterpats[filter] = l
563
561
564 for mf, fn, cmd in self.filterpats[filter]:
562 for mf, fn, cmd in self.filterpats[filter]:
565 if mf(filename):
563 if mf(filename):
566 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
564 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
567 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
565 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
568 break
566 break
569
567
570 return data
568 return data
571
569
572 def adddatafilter(self, name, filter):
570 def adddatafilter(self, name, filter):
573 self._datafilters[name] = filter
571 self._datafilters[name] = filter
574
572
575 def wread(self, filename):
573 def wread(self, filename):
576 if self._link(filename):
574 if self._link(filename):
577 data = os.readlink(self.wjoin(filename))
575 data = os.readlink(self.wjoin(filename))
578 else:
576 else:
579 data = self.wopener(filename, 'r').read()
577 data = self.wopener(filename, 'r').read()
580 return self._filter("encode", filename, data)
578 return self._filter("encode", filename, data)
581
579
582 def wwrite(self, filename, data, flags):
580 def wwrite(self, filename, data, flags):
583 data = self._filter("decode", filename, data)
581 data = self._filter("decode", filename, data)
584 try:
582 try:
585 os.unlink(self.wjoin(filename))
583 os.unlink(self.wjoin(filename))
586 except OSError:
584 except OSError:
587 pass
585 pass
588 if 'l' in flags:
586 if 'l' in flags:
589 self.wopener.symlink(data, filename)
587 self.wopener.symlink(data, filename)
590 else:
588 else:
591 self.wopener(filename, 'w').write(data)
589 self.wopener(filename, 'w').write(data)
592 if 'x' in flags:
590 if 'x' in flags:
593 util.set_flags(self.wjoin(filename), False, True)
591 util.set_flags(self.wjoin(filename), False, True)
594
592
595 def wwritedata(self, filename, data):
593 def wwritedata(self, filename, data):
596 return self._filter("decode", filename, data)
594 return self._filter("decode", filename, data)
597
595
598 def transaction(self):
596 def transaction(self):
599 tr = self._transref and self._transref() or None
597 tr = self._transref and self._transref() or None
600 if tr and tr.running():
598 if tr and tr.running():
601 return tr.nest()
599 return tr.nest()
602
600
603 # abort here if the journal already exists
601 # abort here if the journal already exists
604 if os.path.exists(self.sjoin("journal")):
602 if os.path.exists(self.sjoin("journal")):
605 raise error.RepoError(_("journal already exists - run hg recover"))
603 raise error.RepoError(_("journal already exists - run hg recover"))
606
604
607 # save dirstate for rollback
605 # save dirstate for rollback
608 try:
606 try:
609 ds = self.opener("dirstate").read()
607 ds = self.opener("dirstate").read()
610 except IOError:
608 except IOError:
611 ds = ""
609 ds = ""
612 self.opener("journal.dirstate", "w").write(ds)
610 self.opener("journal.dirstate", "w").write(ds)
613 self.opener("journal.branch", "w").write(self.dirstate.branch())
611 self.opener("journal.branch", "w").write(self.dirstate.branch())
614
612
615 renames = [(self.sjoin("journal"), self.sjoin("undo")),
613 renames = [(self.sjoin("journal"), self.sjoin("undo")),
616 (self.join("journal.dirstate"), self.join("undo.dirstate")),
614 (self.join("journal.dirstate"), self.join("undo.dirstate")),
617 (self.join("journal.branch"), self.join("undo.branch"))]
615 (self.join("journal.branch"), self.join("undo.branch"))]
618 tr = transaction.transaction(self.ui.warn, self.sopener,
616 tr = transaction.transaction(self.ui.warn, self.sopener,
619 self.sjoin("journal"),
617 self.sjoin("journal"),
620 aftertrans(renames),
618 aftertrans(renames),
621 self.store.createmode)
619 self.store.createmode)
622 self._transref = weakref.ref(tr)
620 self._transref = weakref.ref(tr)
623 return tr
621 return tr
624
622
625 def recover(self):
623 def recover(self):
626 lock = self.lock()
624 lock = self.lock()
627 try:
625 try:
628 if os.path.exists(self.sjoin("journal")):
626 if os.path.exists(self.sjoin("journal")):
629 self.ui.status(_("rolling back interrupted transaction\n"))
627 self.ui.status(_("rolling back interrupted transaction\n"))
630 transaction.rollback(self.sopener, self.sjoin("journal"))
628 transaction.rollback(self.sopener, self.sjoin("journal"))
631 self.invalidate()
629 self.invalidate()
632 return True
630 return True
633 else:
631 else:
634 self.ui.warn(_("no interrupted transaction available\n"))
632 self.ui.warn(_("no interrupted transaction available\n"))
635 return False
633 return False
636 finally:
634 finally:
637 lock.release()
635 lock.release()
638
636
639 def rollback(self):
637 def rollback(self):
640 wlock = lock = None
638 wlock = lock = None
641 try:
639 try:
642 wlock = self.wlock()
640 wlock = self.wlock()
643 lock = self.lock()
641 lock = self.lock()
644 if os.path.exists(self.sjoin("undo")):
642 if os.path.exists(self.sjoin("undo")):
645 self.ui.status(_("rolling back last transaction\n"))
643 self.ui.status(_("rolling back last transaction\n"))
646 transaction.rollback(self.sopener, self.sjoin("undo"))
644 transaction.rollback(self.sopener, self.sjoin("undo"))
647 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
645 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
648 try:
646 try:
649 branch = self.opener("undo.branch").read()
647 branch = self.opener("undo.branch").read()
650 self.dirstate.setbranch(branch)
648 self.dirstate.setbranch(branch)
651 except IOError:
649 except IOError:
652 self.ui.warn(_("Named branch could not be reset, "
650 self.ui.warn(_("Named branch could not be reset, "
653 "current branch still is: %s\n")
651 "current branch still is: %s\n")
654 % encoding.tolocal(self.dirstate.branch()))
652 % encoding.tolocal(self.dirstate.branch()))
655 self.invalidate()
653 self.invalidate()
656 self.dirstate.invalidate()
654 self.dirstate.invalidate()
657 else:
655 else:
658 self.ui.warn(_("no rollback information available\n"))
656 self.ui.warn(_("no rollback information available\n"))
659 finally:
657 finally:
660 release(lock, wlock)
658 release(lock, wlock)
661
659
662 def invalidate(self):
660 def invalidate(self):
663 for a in "changelog manifest".split():
661 for a in "changelog manifest".split():
664 if a in self.__dict__:
662 if a in self.__dict__:
665 delattr(self, a)
663 delattr(self, a)
666 self.tagscache = None
664 self.tagscache = None
667 self._tagstypecache = None
665 self._tagstypecache = None
668 self.nodetagscache = None
666 self.nodetagscache = None
669 self.branchcache = None
667 self.branchcache = None
670 self._ubranchcache = None
668 self._ubranchcache = None
671 self._branchcachetip = None
669 self._branchcachetip = None
672
670
673 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
671 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
674 try:
672 try:
675 l = lock.lock(lockname, 0, releasefn, desc=desc)
673 l = lock.lock(lockname, 0, releasefn, desc=desc)
676 except error.LockHeld, inst:
674 except error.LockHeld, inst:
677 if not wait:
675 if not wait:
678 raise
676 raise
679 self.ui.warn(_("waiting for lock on %s held by %r\n") %
677 self.ui.warn(_("waiting for lock on %s held by %r\n") %
680 (desc, inst.locker))
678 (desc, inst.locker))
681 # default to 600 seconds timeout
679 # default to 600 seconds timeout
682 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
680 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
683 releasefn, desc=desc)
681 releasefn, desc=desc)
684 if acquirefn:
682 if acquirefn:
685 acquirefn()
683 acquirefn()
686 return l
684 return l
687
685
688 def lock(self, wait=True):
686 def lock(self, wait=True):
689 l = self._lockref and self._lockref()
687 l = self._lockref and self._lockref()
690 if l is not None and l.held:
688 if l is not None and l.held:
691 l.lock()
689 l.lock()
692 return l
690 return l
693
691
694 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
692 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
695 _('repository %s') % self.origroot)
693 _('repository %s') % self.origroot)
696 self._lockref = weakref.ref(l)
694 self._lockref = weakref.ref(l)
697 return l
695 return l
698
696
699 def wlock(self, wait=True):
697 def wlock(self, wait=True):
700 l = self._wlockref and self._wlockref()
698 l = self._wlockref and self._wlockref()
701 if l is not None and l.held:
699 if l is not None and l.held:
702 l.lock()
700 l.lock()
703 return l
701 return l
704
702
705 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
703 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
706 self.dirstate.invalidate, _('working directory of %s') %
704 self.dirstate.invalidate, _('working directory of %s') %
707 self.origroot)
705 self.origroot)
708 self._wlockref = weakref.ref(l)
706 self._wlockref = weakref.ref(l)
709 return l
707 return l
710
708
711 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
709 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
712 """
710 """
713 commit an individual file as part of a larger transaction
711 commit an individual file as part of a larger transaction
714 """
712 """
715
713
716 fn = fctx.path()
714 fn = fctx.path()
717 t = fctx.data()
715 t = fctx.data()
718 fl = self.file(fn)
716 fl = self.file(fn)
719 fp1 = manifest1.get(fn, nullid)
717 fp1 = manifest1.get(fn, nullid)
720 fp2 = manifest2.get(fn, nullid)
718 fp2 = manifest2.get(fn, nullid)
721
719
722 meta = {}
720 meta = {}
723 cp = fctx.renamed()
721 cp = fctx.renamed()
724 if cp and cp[0] != fn:
722 if cp and cp[0] != fn:
725 # Mark the new revision of this file as a copy of another
723 # Mark the new revision of this file as a copy of another
726 # file. This copy data will effectively act as a parent
724 # file. This copy data will effectively act as a parent
727 # of this new revision. If this is a merge, the first
725 # of this new revision. If this is a merge, the first
728 # parent will be the nullid (meaning "look up the copy data")
726 # parent will be the nullid (meaning "look up the copy data")
729 # and the second one will be the other parent. For example:
727 # and the second one will be the other parent. For example:
730 #
728 #
731 # 0 --- 1 --- 3 rev1 changes file foo
729 # 0 --- 1 --- 3 rev1 changes file foo
732 # \ / rev2 renames foo to bar and changes it
730 # \ / rev2 renames foo to bar and changes it
733 # \- 2 -/ rev3 should have bar with all changes and
731 # \- 2 -/ rev3 should have bar with all changes and
734 # should record that bar descends from
732 # should record that bar descends from
735 # bar in rev2 and foo in rev1
733 # bar in rev2 and foo in rev1
736 #
734 #
737 # this allows this merge to succeed:
735 # this allows this merge to succeed:
738 #
736 #
739 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
737 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
740 # \ / merging rev3 and rev4 should use bar@rev2
738 # \ / merging rev3 and rev4 should use bar@rev2
741 # \- 2 --- 4 as the merge base
739 # \- 2 --- 4 as the merge base
742 #
740 #
743
741
744 cf = cp[0]
742 cf = cp[0]
745 cr = manifest1.get(cf)
743 cr = manifest1.get(cf)
746 nfp = fp2
744 nfp = fp2
747
745
748 if manifest2: # branch merge
746 if manifest2: # branch merge
749 if fp2 == nullid or cr is None: # copied on remote side
747 if fp2 == nullid or cr is None: # copied on remote side
750 if cf in manifest2:
748 if cf in manifest2:
751 cr = manifest2[cf]
749 cr = manifest2[cf]
752 nfp = fp1
750 nfp = fp1
753
751
754 # find source in nearest ancestor if we've lost track
752 # find source in nearest ancestor if we've lost track
755 if not cr:
753 if not cr:
756 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
754 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
757 (fn, cf))
755 (fn, cf))
758 for a in self['.'].ancestors():
756 for a in self['.'].ancestors():
759 if cf in a:
757 if cf in a:
760 cr = a[cf].filenode()
758 cr = a[cf].filenode()
761 break
759 break
762
760
763 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
761 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
764 meta["copy"] = cf
762 meta["copy"] = cf
765 meta["copyrev"] = hex(cr)
763 meta["copyrev"] = hex(cr)
766 fp1, fp2 = nullid, nfp
764 fp1, fp2 = nullid, nfp
767 elif fp2 != nullid:
765 elif fp2 != nullid:
768 # is one parent an ancestor of the other?
766 # is one parent an ancestor of the other?
769 fpa = fl.ancestor(fp1, fp2)
767 fpa = fl.ancestor(fp1, fp2)
770 if fpa == fp1:
768 if fpa == fp1:
771 fp1, fp2 = fp2, nullid
769 fp1, fp2 = fp2, nullid
772 elif fpa == fp2:
770 elif fpa == fp2:
773 fp2 = nullid
771 fp2 = nullid
774
772
775 # is the file unmodified from the parent? report existing entry
773 # is the file unmodified from the parent? report existing entry
776 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
774 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
777 return fp1
775 return fp1
778
776
779 changelist.append(fn)
777 changelist.append(fn)
780 return fl.add(t, meta, tr, linkrev, fp1, fp2)
778 return fl.add(t, meta, tr, linkrev, fp1, fp2)
781
779
782 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
780 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
783 if p1 is None:
781 if p1 is None:
784 p1, p2 = self.dirstate.parents()
782 p1, p2 = self.dirstate.parents()
785 return self.commit(files=files, text=text, user=user, date=date,
783 return self.commit(files=files, text=text, user=user, date=date,
786 p1=p1, p2=p2, extra=extra, empty_ok=True)
784 p1=p1, p2=p2, extra=extra, empty_ok=True)
787
785
788 def commit(self, files=None, text="", user=None, date=None,
786 def commit(self, files=None, text="", user=None, date=None,
789 match=None, force=False, force_editor=False,
787 match=None, force=False, force_editor=False,
790 p1=None, p2=None, extra={}, empty_ok=False):
788 p1=None, p2=None, extra={}, empty_ok=False):
791 wlock = lock = None
789 wlock = lock = None
792 if extra.get("close"):
790 if extra.get("close"):
793 force = True
791 force = True
794 if files:
792 if files:
795 files = list(set(files))
793 files = list(set(files))
796 try:
794 try:
797 wlock = self.wlock()
795 wlock = self.wlock()
798 lock = self.lock()
796 lock = self.lock()
799 use_dirstate = (p1 is None) # not rawcommit
797 use_dirstate = (p1 is None) # not rawcommit
800
798
801 if use_dirstate:
799 if use_dirstate:
802 p1, p2 = self.dirstate.parents()
800 p1, p2 = self.dirstate.parents()
803 update_dirstate = True
801 update_dirstate = True
804
802
805 if (not force and p2 != nullid and
803 if (not force and p2 != nullid and
806 (match and (match.files() or match.anypats()))):
804 (match and (match.files() or match.anypats()))):
807 raise util.Abort(_('cannot partially commit a merge '
805 raise util.Abort(_('cannot partially commit a merge '
808 '(do not specify files or patterns)'))
806 '(do not specify files or patterns)'))
809
807
810 if files:
808 if files:
811 modified, removed = [], []
809 modified, removed = [], []
812 for f in files:
810 for f in files:
813 s = self.dirstate[f]
811 s = self.dirstate[f]
814 if s in 'nma':
812 if s in 'nma':
815 modified.append(f)
813 modified.append(f)
816 elif s == 'r':
814 elif s == 'r':
817 removed.append(f)
815 removed.append(f)
818 else:
816 else:
819 self.ui.warn(_("%s not tracked!\n") % f)
817 self.ui.warn(_("%s not tracked!\n") % f)
820 changes = [modified, [], removed, [], []]
818 changes = [modified, [], removed, [], []]
821 else:
819 else:
822 changes = self.status(match=match)
820 changes = self.status(match=match)
823 else:
821 else:
824 p1, p2 = p1, p2 or nullid
822 p1, p2 = p1, p2 or nullid
825 update_dirstate = (self.dirstate.parents()[0] == p1)
823 update_dirstate = (self.dirstate.parents()[0] == p1)
826 changes = [files, [], [], [], []]
824 changes = [files, [], [], [], []]
827
825
828 ms = merge_.mergestate(self)
826 ms = merge_.mergestate(self)
829 for f in changes[0]:
827 for f in changes[0]:
830 if f in ms and ms[f] == 'u':
828 if f in ms and ms[f] == 'u':
831 raise util.Abort(_("unresolved merge conflicts "
829 raise util.Abort(_("unresolved merge conflicts "
832 "(see hg resolve)"))
830 "(see hg resolve)"))
833 wctx = context.workingctx(self, (p1, p2), text, user, date,
831 wctx = context.workingctx(self, (p1, p2), text, user, date,
834 extra, changes)
832 extra, changes)
835 r = self._commitctx(wctx, force, force_editor, empty_ok,
833 r = self._commitctx(wctx, force, force_editor, empty_ok,
836 use_dirstate, update_dirstate)
834 use_dirstate, update_dirstate)
837 ms.reset()
835 ms.reset()
838 return r
836 return r
839
837
840 finally:
838 finally:
841 release(lock, wlock)
839 release(lock, wlock)
842
840
843 def commitctx(self, ctx):
841 def commitctx(self, ctx):
844 """Add a new revision to current repository.
842 """Add a new revision to current repository.
845
843
846 Revision information is passed in the context.memctx argument.
844 Revision information is passed in the context.memctx argument.
847 commitctx() does not touch the working directory.
845 commitctx() does not touch the working directory.
848 """
846 """
849 wlock = lock = None
847 wlock = lock = None
850 try:
848 try:
851 wlock = self.wlock()
849 wlock = self.wlock()
852 lock = self.lock()
850 lock = self.lock()
853 return self._commitctx(ctx, force=True, force_editor=False,
851 return self._commitctx(ctx, force=True, force_editor=False,
854 empty_ok=True, use_dirstate=False,
852 empty_ok=True, use_dirstate=False,
855 update_dirstate=False)
853 update_dirstate=False)
856 finally:
854 finally:
857 release(lock, wlock)
855 release(lock, wlock)
858
856
859 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
857 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
860 use_dirstate=True, update_dirstate=True):
858 use_dirstate=True, update_dirstate=True):
861 tr = None
859 tr = None
862 valid = 0 # don't save the dirstate if this isn't set
860 valid = 0 # don't save the dirstate if this isn't set
863 try:
861 try:
864 commit = sorted(wctx.modified() + wctx.added())
862 commit = sorted(wctx.modified() + wctx.added())
865 remove = wctx.removed()
863 remove = wctx.removed()
866 extra = wctx.extra().copy()
864 extra = wctx.extra().copy()
867 branchname = extra['branch']
865 branchname = extra['branch']
868 user = wctx.user()
866 user = wctx.user()
869 text = wctx.description()
867 text = wctx.description()
870
868
871 p1, p2 = [p.node() for p in wctx.parents()]
869 p1, p2 = [p.node() for p in wctx.parents()]
872 c1 = self.changelog.read(p1)
870 c1 = self.changelog.read(p1)
873 c2 = self.changelog.read(p2)
871 c2 = self.changelog.read(p2)
874 m1 = self.manifest.read(c1[0]).copy()
872 m1 = self.manifest.read(c1[0]).copy()
875 m2 = self.manifest.read(c2[0])
873 m2 = self.manifest.read(c2[0])
876
874
877 if use_dirstate:
875 if use_dirstate:
878 oldname = c1[5].get("branch") # stored in UTF-8
876 oldname = c1[5].get("branch") # stored in UTF-8
879 if (not commit and not remove and not force and p2 == nullid
877 if (not commit and not remove and not force and p2 == nullid
880 and branchname == oldname):
878 and branchname == oldname):
881 self.ui.status(_("nothing changed\n"))
879 self.ui.status(_("nothing changed\n"))
882 return None
880 return None
883
881
884 xp1 = hex(p1)
882 xp1 = hex(p1)
885 if p2 == nullid: xp2 = ''
883 if p2 == nullid: xp2 = ''
886 else: xp2 = hex(p2)
884 else: xp2 = hex(p2)
887
885
888 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
886 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
889
887
890 tr = self.transaction()
888 tr = self.transaction()
891 trp = weakref.proxy(tr)
889 trp = weakref.proxy(tr)
892
890
893 # check in files
891 # check in files
894 new = {}
892 new = {}
895 changed = []
893 changed = []
896 linkrev = len(self)
894 linkrev = len(self)
897 for f in commit:
895 for f in commit:
898 self.ui.note(f + "\n")
896 self.ui.note(f + "\n")
899 try:
897 try:
900 fctx = wctx.filectx(f)
898 fctx = wctx.filectx(f)
901 newflags = fctx.flags()
899 newflags = fctx.flags()
902 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
900 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
903 if ((not changed or changed[-1] != f) and
901 if ((not changed or changed[-1] != f) and
904 m2.get(f) != new[f]):
902 m2.get(f) != new[f]):
905 # mention the file in the changelog if some
903 # mention the file in the changelog if some
906 # flag changed, even if there was no content
904 # flag changed, even if there was no content
907 # change.
905 # change.
908 if m1.flags(f) != newflags:
906 if m1.flags(f) != newflags:
909 changed.append(f)
907 changed.append(f)
910 m1.set(f, newflags)
908 m1.set(f, newflags)
911 if use_dirstate:
909 if use_dirstate:
912 self.dirstate.normal(f)
910 self.dirstate.normal(f)
913
911
914 except (OSError, IOError):
912 except (OSError, IOError):
915 if use_dirstate:
913 if use_dirstate:
916 self.ui.warn(_("trouble committing %s!\n") % f)
914 self.ui.warn(_("trouble committing %s!\n") % f)
917 raise
915 raise
918 else:
916 else:
919 remove.append(f)
917 remove.append(f)
920
918
921 updated, added = [], []
919 updated, added = [], []
922 for f in sorted(changed):
920 for f in sorted(changed):
923 if f in m1 or f in m2:
921 if f in m1 or f in m2:
924 updated.append(f)
922 updated.append(f)
925 else:
923 else:
926 added.append(f)
924 added.append(f)
927
925
928 # update manifest
926 # update manifest
929 m1.update(new)
927 m1.update(new)
930 removed = [f for f in sorted(remove) if f in m1 or f in m2]
928 removed = [f for f in sorted(remove) if f in m1 or f in m2]
931 removed1 = []
929 removed1 = []
932
930
933 for f in removed:
931 for f in removed:
934 if f in m1:
932 if f in m1:
935 del m1[f]
933 del m1[f]
936 removed1.append(f)
934 removed1.append(f)
937 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
935 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
938 (new, removed1))
936 (new, removed1))
939
937
940 # add changeset
938 # add changeset
941 if (not empty_ok and not text) or force_editor:
939 if (not empty_ok and not text) or force_editor:
942 edittext = []
940 edittext = []
943 if text:
941 if text:
944 edittext.append(text)
942 edittext.append(text)
945 edittext.append("")
943 edittext.append("")
946 edittext.append("") # Empty line between message and comments.
944 edittext.append("") # Empty line between message and comments.
947 edittext.append(_("HG: Enter commit message."
945 edittext.append(_("HG: Enter commit message."
948 " Lines beginning with 'HG:' are removed."))
946 " Lines beginning with 'HG:' are removed."))
949 edittext.append("HG: --")
947 edittext.append("HG: --")
950 edittext.append("HG: user: %s" % user)
948 edittext.append("HG: user: %s" % user)
951 if p2 != nullid:
949 if p2 != nullid:
952 edittext.append("HG: branch merge")
950 edittext.append("HG: branch merge")
953 if branchname:
951 if branchname:
954 edittext.append("HG: branch '%s'"
952 edittext.append("HG: branch '%s'"
955 % encoding.tolocal(branchname))
953 % encoding.tolocal(branchname))
956 edittext.extend(["HG: added %s" % f for f in added])
954 edittext.extend(["HG: added %s" % f for f in added])
957 edittext.extend(["HG: changed %s" % f for f in updated])
955 edittext.extend(["HG: changed %s" % f for f in updated])
958 edittext.extend(["HG: removed %s" % f for f in removed])
956 edittext.extend(["HG: removed %s" % f for f in removed])
959 if not added and not updated and not removed:
957 if not added and not updated and not removed:
960 edittext.append("HG: no files changed")
958 edittext.append("HG: no files changed")
961 edittext.append("")
959 edittext.append("")
962 # run editor in the repository root
960 # run editor in the repository root
963 olddir = os.getcwd()
961 olddir = os.getcwd()
964 os.chdir(self.root)
962 os.chdir(self.root)
965 text = self.ui.edit("\n".join(edittext), user)
963 text = self.ui.edit("\n".join(edittext), user)
966 os.chdir(olddir)
964 os.chdir(olddir)
967
965
968 lines = [line.rstrip() for line in text.rstrip().splitlines()]
966 lines = [line.rstrip() for line in text.rstrip().splitlines()]
969 while lines and not lines[0]:
967 while lines and not lines[0]:
970 del lines[0]
968 del lines[0]
971 if not lines and use_dirstate:
969 if not lines and use_dirstate:
972 raise util.Abort(_("empty commit message"))
970 raise util.Abort(_("empty commit message"))
973 text = '\n'.join(lines)
971 text = '\n'.join(lines)
974
972
975 self.changelog.delayupdate()
973 self.changelog.delayupdate()
976 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
974 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
977 user, wctx.date(), extra)
975 user, wctx.date(), extra)
978 p = lambda: self.changelog.writepending() and self.root or ""
976 p = lambda: self.changelog.writepending() and self.root or ""
979 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
977 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
980 parent2=xp2, pending=p)
978 parent2=xp2, pending=p)
981 self.changelog.finalize(trp)
979 self.changelog.finalize(trp)
982 tr.close()
980 tr.close()
983
981
984 if self.branchcache:
982 if self.branchcache:
985 self.branchtags()
983 self.branchtags()
986
984
987 if use_dirstate or update_dirstate:
985 if use_dirstate or update_dirstate:
988 self.dirstate.setparents(n)
986 self.dirstate.setparents(n)
989 if use_dirstate:
987 if use_dirstate:
990 for f in removed:
988 for f in removed:
991 self.dirstate.forget(f)
989 self.dirstate.forget(f)
992 valid = 1 # our dirstate updates are complete
990 valid = 1 # our dirstate updates are complete
993
991
994 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
992 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
995 return n
993 return n
996 finally:
994 finally:
997 if not valid: # don't save our updated dirstate
995 if not valid: # don't save our updated dirstate
998 self.dirstate.invalidate()
996 self.dirstate.invalidate()
999 del tr
997 del tr
1000
998
1001 def walk(self, match, node=None):
999 def walk(self, match, node=None):
1002 '''
1000 '''
1003 walk recursively through the directory tree or a given
1001 walk recursively through the directory tree or a given
1004 changeset, finding all files matched by the match
1002 changeset, finding all files matched by the match
1005 function
1003 function
1006 '''
1004 '''
1007 return self[node].walk(match)
1005 return self[node].walk(match)
1008
1006
1009 def status(self, node1='.', node2=None, match=None,
1007 def status(self, node1='.', node2=None, match=None,
1010 ignored=False, clean=False, unknown=False):
1008 ignored=False, clean=False, unknown=False):
1011 """return status of files between two nodes or node and working directory
1009 """return status of files between two nodes or node and working directory
1012
1010
1013 If node1 is None, use the first dirstate parent instead.
1011 If node1 is None, use the first dirstate parent instead.
1014 If node2 is None, compare node1 with working directory.
1012 If node2 is None, compare node1 with working directory.
1015 """
1013 """
1016
1014
1017 def mfmatches(ctx):
1015 def mfmatches(ctx):
1018 mf = ctx.manifest().copy()
1016 mf = ctx.manifest().copy()
1019 for fn in mf.keys():
1017 for fn in mf.keys():
1020 if not match(fn):
1018 if not match(fn):
1021 del mf[fn]
1019 del mf[fn]
1022 return mf
1020 return mf
1023
1021
1024 if isinstance(node1, context.changectx):
1022 if isinstance(node1, context.changectx):
1025 ctx1 = node1
1023 ctx1 = node1
1026 else:
1024 else:
1027 ctx1 = self[node1]
1025 ctx1 = self[node1]
1028 if isinstance(node2, context.changectx):
1026 if isinstance(node2, context.changectx):
1029 ctx2 = node2
1027 ctx2 = node2
1030 else:
1028 else:
1031 ctx2 = self[node2]
1029 ctx2 = self[node2]
1032
1030
1033 working = ctx2.rev() is None
1031 working = ctx2.rev() is None
1034 parentworking = working and ctx1 == self['.']
1032 parentworking = working and ctx1 == self['.']
1035 match = match or match_.always(self.root, self.getcwd())
1033 match = match or match_.always(self.root, self.getcwd())
1036 listignored, listclean, listunknown = ignored, clean, unknown
1034 listignored, listclean, listunknown = ignored, clean, unknown
1037
1035
1038 # load earliest manifest first for caching reasons
1036 # load earliest manifest first for caching reasons
1039 if not working and ctx2.rev() < ctx1.rev():
1037 if not working and ctx2.rev() < ctx1.rev():
1040 ctx2.manifest()
1038 ctx2.manifest()
1041
1039
1042 if not parentworking:
1040 if not parentworking:
1043 def bad(f, msg):
1041 def bad(f, msg):
1044 if f not in ctx1:
1042 if f not in ctx1:
1045 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1043 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1046 return False
1044 return False
1047 match.bad = bad
1045 match.bad = bad
1048
1046
1049 if working: # we need to scan the working dir
1047 if working: # we need to scan the working dir
1050 s = self.dirstate.status(match, listignored, listclean, listunknown)
1048 s = self.dirstate.status(match, listignored, listclean, listunknown)
1051 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1049 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1052
1050
1053 # check for any possibly clean files
1051 # check for any possibly clean files
1054 if parentworking and cmp:
1052 if parentworking and cmp:
1055 fixup = []
1053 fixup = []
1056 # do a full compare of any files that might have changed
1054 # do a full compare of any files that might have changed
1057 for f in cmp:
1055 for f in cmp:
1058 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1056 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1059 or ctx1[f].cmp(ctx2[f].data())):
1057 or ctx1[f].cmp(ctx2[f].data())):
1060 modified.append(f)
1058 modified.append(f)
1061 else:
1059 else:
1062 fixup.append(f)
1060 fixup.append(f)
1063
1061
1064 if listclean:
1062 if listclean:
1065 clean += fixup
1063 clean += fixup
1066
1064
1067 # update dirstate for files that are actually clean
1065 # update dirstate for files that are actually clean
1068 if fixup:
1066 if fixup:
1069 wlock = None
1067 wlock = None
1070 try:
1068 try:
1071 try:
1069 try:
1072 # updating the dirstate is optional
1070 # updating the dirstate is optional
1073 # so we dont wait on the lock
1071 # so we dont wait on the lock
1074 wlock = self.wlock(False)
1072 wlock = self.wlock(False)
1075 for f in fixup:
1073 for f in fixup:
1076 self.dirstate.normal(f)
1074 self.dirstate.normal(f)
1077 except error.LockError:
1075 except error.LockError:
1078 pass
1076 pass
1079 finally:
1077 finally:
1080 release(wlock)
1078 release(wlock)
1081
1079
1082 if not parentworking:
1080 if not parentworking:
1083 mf1 = mfmatches(ctx1)
1081 mf1 = mfmatches(ctx1)
1084 if working:
1082 if working:
1085 # we are comparing working dir against non-parent
1083 # we are comparing working dir against non-parent
1086 # generate a pseudo-manifest for the working dir
1084 # generate a pseudo-manifest for the working dir
1087 mf2 = mfmatches(self['.'])
1085 mf2 = mfmatches(self['.'])
1088 for f in cmp + modified + added:
1086 for f in cmp + modified + added:
1089 mf2[f] = None
1087 mf2[f] = None
1090 mf2.set(f, ctx2.flags(f))
1088 mf2.set(f, ctx2.flags(f))
1091 for f in removed:
1089 for f in removed:
1092 if f in mf2:
1090 if f in mf2:
1093 del mf2[f]
1091 del mf2[f]
1094 else:
1092 else:
1095 # we are comparing two revisions
1093 # we are comparing two revisions
1096 deleted, unknown, ignored = [], [], []
1094 deleted, unknown, ignored = [], [], []
1097 mf2 = mfmatches(ctx2)
1095 mf2 = mfmatches(ctx2)
1098
1096
1099 modified, added, clean = [], [], []
1097 modified, added, clean = [], [], []
1100 for fn in mf2:
1098 for fn in mf2:
1101 if fn in mf1:
1099 if fn in mf1:
1102 if (mf1.flags(fn) != mf2.flags(fn) or
1100 if (mf1.flags(fn) != mf2.flags(fn) or
1103 (mf1[fn] != mf2[fn] and
1101 (mf1[fn] != mf2[fn] and
1104 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1102 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1105 modified.append(fn)
1103 modified.append(fn)
1106 elif listclean:
1104 elif listclean:
1107 clean.append(fn)
1105 clean.append(fn)
1108 del mf1[fn]
1106 del mf1[fn]
1109 else:
1107 else:
1110 added.append(fn)
1108 added.append(fn)
1111 removed = mf1.keys()
1109 removed = mf1.keys()
1112
1110
1113 r = modified, added, removed, deleted, unknown, ignored, clean
1111 r = modified, added, removed, deleted, unknown, ignored, clean
1114 [l.sort() for l in r]
1112 [l.sort() for l in r]
1115 return r
1113 return r
1116
1114
1117 def add(self, list):
1115 def add(self, list):
1118 wlock = self.wlock()
1116 wlock = self.wlock()
1119 try:
1117 try:
1120 rejected = []
1118 rejected = []
1121 for f in list:
1119 for f in list:
1122 p = self.wjoin(f)
1120 p = self.wjoin(f)
1123 try:
1121 try:
1124 st = os.lstat(p)
1122 st = os.lstat(p)
1125 except:
1123 except:
1126 self.ui.warn(_("%s does not exist!\n") % f)
1124 self.ui.warn(_("%s does not exist!\n") % f)
1127 rejected.append(f)
1125 rejected.append(f)
1128 continue
1126 continue
1129 if st.st_size > 10000000:
1127 if st.st_size > 10000000:
1130 self.ui.warn(_("%s: files over 10MB may cause memory and"
1128 self.ui.warn(_("%s: files over 10MB may cause memory and"
1131 " performance problems\n"
1129 " performance problems\n"
1132 "(use 'hg revert %s' to unadd the file)\n")
1130 "(use 'hg revert %s' to unadd the file)\n")
1133 % (f, f))
1131 % (f, f))
1134 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1132 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1135 self.ui.warn(_("%s not added: only files and symlinks "
1133 self.ui.warn(_("%s not added: only files and symlinks "
1136 "supported currently\n") % f)
1134 "supported currently\n") % f)
1137 rejected.append(p)
1135 rejected.append(p)
1138 elif self.dirstate[f] in 'amn':
1136 elif self.dirstate[f] in 'amn':
1139 self.ui.warn(_("%s already tracked!\n") % f)
1137 self.ui.warn(_("%s already tracked!\n") % f)
1140 elif self.dirstate[f] == 'r':
1138 elif self.dirstate[f] == 'r':
1141 self.dirstate.normallookup(f)
1139 self.dirstate.normallookup(f)
1142 else:
1140 else:
1143 self.dirstate.add(f)
1141 self.dirstate.add(f)
1144 return rejected
1142 return rejected
1145 finally:
1143 finally:
1146 wlock.release()
1144 wlock.release()
1147
1145
1148 def forget(self, list):
1146 def forget(self, list):
1149 wlock = self.wlock()
1147 wlock = self.wlock()
1150 try:
1148 try:
1151 for f in list:
1149 for f in list:
1152 if self.dirstate[f] != 'a':
1150 if self.dirstate[f] != 'a':
1153 self.ui.warn(_("%s not added!\n") % f)
1151 self.ui.warn(_("%s not added!\n") % f)
1154 else:
1152 else:
1155 self.dirstate.forget(f)
1153 self.dirstate.forget(f)
1156 finally:
1154 finally:
1157 wlock.release()
1155 wlock.release()
1158
1156
1159 def remove(self, list, unlink=False):
1157 def remove(self, list, unlink=False):
1160 wlock = None
1158 wlock = None
1161 try:
1159 try:
1162 if unlink:
1160 if unlink:
1163 for f in list:
1161 for f in list:
1164 try:
1162 try:
1165 util.unlink(self.wjoin(f))
1163 util.unlink(self.wjoin(f))
1166 except OSError, inst:
1164 except OSError, inst:
1167 if inst.errno != errno.ENOENT:
1165 if inst.errno != errno.ENOENT:
1168 raise
1166 raise
1169 wlock = self.wlock()
1167 wlock = self.wlock()
1170 for f in list:
1168 for f in list:
1171 if unlink and os.path.exists(self.wjoin(f)):
1169 if unlink and os.path.exists(self.wjoin(f)):
1172 self.ui.warn(_("%s still exists!\n") % f)
1170 self.ui.warn(_("%s still exists!\n") % f)
1173 elif self.dirstate[f] == 'a':
1171 elif self.dirstate[f] == 'a':
1174 self.dirstate.forget(f)
1172 self.dirstate.forget(f)
1175 elif f not in self.dirstate:
1173 elif f not in self.dirstate:
1176 self.ui.warn(_("%s not tracked!\n") % f)
1174 self.ui.warn(_("%s not tracked!\n") % f)
1177 else:
1175 else:
1178 self.dirstate.remove(f)
1176 self.dirstate.remove(f)
1179 finally:
1177 finally:
1180 release(wlock)
1178 release(wlock)
1181
1179
1182 def undelete(self, list):
1180 def undelete(self, list):
1183 manifests = [self.manifest.read(self.changelog.read(p)[0])
1181 manifests = [self.manifest.read(self.changelog.read(p)[0])
1184 for p in self.dirstate.parents() if p != nullid]
1182 for p in self.dirstate.parents() if p != nullid]
1185 wlock = self.wlock()
1183 wlock = self.wlock()
1186 try:
1184 try:
1187 for f in list:
1185 for f in list:
1188 if self.dirstate[f] != 'r':
1186 if self.dirstate[f] != 'r':
1189 self.ui.warn(_("%s not removed!\n") % f)
1187 self.ui.warn(_("%s not removed!\n") % f)
1190 else:
1188 else:
1191 m = f in manifests[0] and manifests[0] or manifests[1]
1189 m = f in manifests[0] and manifests[0] or manifests[1]
1192 t = self.file(f).read(m[f])
1190 t = self.file(f).read(m[f])
1193 self.wwrite(f, t, m.flags(f))
1191 self.wwrite(f, t, m.flags(f))
1194 self.dirstate.normal(f)
1192 self.dirstate.normal(f)
1195 finally:
1193 finally:
1196 wlock.release()
1194 wlock.release()
1197
1195
1198 def copy(self, source, dest):
1196 def copy(self, source, dest):
1199 p = self.wjoin(dest)
1197 p = self.wjoin(dest)
1200 if not (os.path.exists(p) or os.path.islink(p)):
1198 if not (os.path.exists(p) or os.path.islink(p)):
1201 self.ui.warn(_("%s does not exist!\n") % dest)
1199 self.ui.warn(_("%s does not exist!\n") % dest)
1202 elif not (os.path.isfile(p) or os.path.islink(p)):
1200 elif not (os.path.isfile(p) or os.path.islink(p)):
1203 self.ui.warn(_("copy failed: %s is not a file or a "
1201 self.ui.warn(_("copy failed: %s is not a file or a "
1204 "symbolic link\n") % dest)
1202 "symbolic link\n") % dest)
1205 else:
1203 else:
1206 wlock = self.wlock()
1204 wlock = self.wlock()
1207 try:
1205 try:
1208 if self.dirstate[dest] in '?r':
1206 if self.dirstate[dest] in '?r':
1209 self.dirstate.add(dest)
1207 self.dirstate.add(dest)
1210 self.dirstate.copy(source, dest)
1208 self.dirstate.copy(source, dest)
1211 finally:
1209 finally:
1212 wlock.release()
1210 wlock.release()
1213
1211
1214 def heads(self, start=None, closed=True):
1212 def heads(self, start=None, closed=True):
1215 heads = self.changelog.heads(start)
1213 heads = self.changelog.heads(start)
1216 def display(head):
1214 def display(head):
1217 if closed:
1215 if closed:
1218 return True
1216 return True
1219 extras = self.changelog.read(head)[5]
1217 extras = self.changelog.read(head)[5]
1220 return ('close' not in extras)
1218 return ('close' not in extras)
1221 # sort the output in rev descending order
1219 # sort the output in rev descending order
1222 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1220 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1223 return [n for (r, n) in sorted(heads)]
1221 return [n for (r, n) in sorted(heads)]
1224
1222
1225 def branchheads(self, branch=None, start=None, closed=True):
1223 def branchheads(self, branch=None, start=None, closed=True):
1226 if branch is None:
1224 if branch is None:
1227 branch = self[None].branch()
1225 branch = self[None].branch()
1228 branches = self._branchheads()
1226 branches = self._branchheads()
1229 if branch not in branches:
1227 if branch not in branches:
1230 return []
1228 return []
1231 bheads = branches[branch]
1229 bheads = branches[branch]
1232 # the cache returns heads ordered lowest to highest
1230 # the cache returns heads ordered lowest to highest
1233 bheads.reverse()
1231 bheads.reverse()
1234 if start is not None:
1232 if start is not None:
1235 # filter out the heads that cannot be reached from startrev
1233 # filter out the heads that cannot be reached from startrev
1236 bheads = self.changelog.nodesbetween([start], bheads)[2]
1234 bheads = self.changelog.nodesbetween([start], bheads)[2]
1237 if not closed:
1235 if not closed:
1238 bheads = [h for h in bheads if
1236 bheads = [h for h in bheads if
1239 ('close' not in self.changelog.read(h)[5])]
1237 ('close' not in self.changelog.read(h)[5])]
1240 return bheads
1238 return bheads
1241
1239
1242 def branches(self, nodes):
1240 def branches(self, nodes):
1243 if not nodes:
1241 if not nodes:
1244 nodes = [self.changelog.tip()]
1242 nodes = [self.changelog.tip()]
1245 b = []
1243 b = []
1246 for n in nodes:
1244 for n in nodes:
1247 t = n
1245 t = n
1248 while 1:
1246 while 1:
1249 p = self.changelog.parents(n)
1247 p = self.changelog.parents(n)
1250 if p[1] != nullid or p[0] == nullid:
1248 if p[1] != nullid or p[0] == nullid:
1251 b.append((t, n, p[0], p[1]))
1249 b.append((t, n, p[0], p[1]))
1252 break
1250 break
1253 n = p[0]
1251 n = p[0]
1254 return b
1252 return b
1255
1253
1256 def between(self, pairs):
1254 def between(self, pairs):
1257 r = []
1255 r = []
1258
1256
1259 for top, bottom in pairs:
1257 for top, bottom in pairs:
1260 n, l, i = top, [], 0
1258 n, l, i = top, [], 0
1261 f = 1
1259 f = 1
1262
1260
1263 while n != bottom and n != nullid:
1261 while n != bottom and n != nullid:
1264 p = self.changelog.parents(n)[0]
1262 p = self.changelog.parents(n)[0]
1265 if i == f:
1263 if i == f:
1266 l.append(n)
1264 l.append(n)
1267 f = f * 2
1265 f = f * 2
1268 n = p
1266 n = p
1269 i += 1
1267 i += 1
1270
1268
1271 r.append(l)
1269 r.append(l)
1272
1270
1273 return r
1271 return r
1274
1272
1275 def findincoming(self, remote, base=None, heads=None, force=False):
1273 def findincoming(self, remote, base=None, heads=None, force=False):
1276 """Return list of roots of the subsets of missing nodes from remote
1274 """Return list of roots of the subsets of missing nodes from remote
1277
1275
1278 If base dict is specified, assume that these nodes and their parents
1276 If base dict is specified, assume that these nodes and their parents
1279 exist on the remote side and that no child of a node of base exists
1277 exist on the remote side and that no child of a node of base exists
1280 in both remote and self.
1278 in both remote and self.
1281 Furthermore base will be updated to include the nodes that exists
1279 Furthermore base will be updated to include the nodes that exists
1282 in self and remote but no children exists in self and remote.
1280 in self and remote but no children exists in self and remote.
1283 If a list of heads is specified, return only nodes which are heads
1281 If a list of heads is specified, return only nodes which are heads
1284 or ancestors of these heads.
1282 or ancestors of these heads.
1285
1283
1286 All the ancestors of base are in self and in remote.
1284 All the ancestors of base are in self and in remote.
1287 All the descendants of the list returned are missing in self.
1285 All the descendants of the list returned are missing in self.
1288 (and so we know that the rest of the nodes are missing in remote, see
1286 (and so we know that the rest of the nodes are missing in remote, see
1289 outgoing)
1287 outgoing)
1290 """
1288 """
1291 return self.findcommonincoming(remote, base, heads, force)[1]
1289 return self.findcommonincoming(remote, base, heads, force)[1]
1292
1290
1293 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1291 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1294 """Return a tuple (common, missing roots, heads) used to identify
1292 """Return a tuple (common, missing roots, heads) used to identify
1295 missing nodes from remote.
1293 missing nodes from remote.
1296
1294
1297 If base dict is specified, assume that these nodes and their parents
1295 If base dict is specified, assume that these nodes and their parents
1298 exist on the remote side and that no child of a node of base exists
1296 exist on the remote side and that no child of a node of base exists
1299 in both remote and self.
1297 in both remote and self.
1300 Furthermore base will be updated to include the nodes that exists
1298 Furthermore base will be updated to include the nodes that exists
1301 in self and remote but no children exists in self and remote.
1299 in self and remote but no children exists in self and remote.
1302 If a list of heads is specified, return only nodes which are heads
1300 If a list of heads is specified, return only nodes which are heads
1303 or ancestors of these heads.
1301 or ancestors of these heads.
1304
1302
1305 All the ancestors of base are in self and in remote.
1303 All the ancestors of base are in self and in remote.
1306 """
1304 """
1307 m = self.changelog.nodemap
1305 m = self.changelog.nodemap
1308 search = []
1306 search = []
1309 fetch = set()
1307 fetch = set()
1310 seen = set()
1308 seen = set()
1311 seenbranch = set()
1309 seenbranch = set()
1312 if base == None:
1310 if base == None:
1313 base = {}
1311 base = {}
1314
1312
1315 if not heads:
1313 if not heads:
1316 heads = remote.heads()
1314 heads = remote.heads()
1317
1315
1318 if self.changelog.tip() == nullid:
1316 if self.changelog.tip() == nullid:
1319 base[nullid] = 1
1317 base[nullid] = 1
1320 if heads != [nullid]:
1318 if heads != [nullid]:
1321 return [nullid], [nullid], list(heads)
1319 return [nullid], [nullid], list(heads)
1322 return [nullid], [], []
1320 return [nullid], [], []
1323
1321
1324 # assume we're closer to the tip than the root
1322 # assume we're closer to the tip than the root
1325 # and start by examining the heads
1323 # and start by examining the heads
1326 self.ui.status(_("searching for changes\n"))
1324 self.ui.status(_("searching for changes\n"))
1327
1325
1328 unknown = []
1326 unknown = []
1329 for h in heads:
1327 for h in heads:
1330 if h not in m:
1328 if h not in m:
1331 unknown.append(h)
1329 unknown.append(h)
1332 else:
1330 else:
1333 base[h] = 1
1331 base[h] = 1
1334
1332
1335 heads = unknown
1333 heads = unknown
1336 if not unknown:
1334 if not unknown:
1337 return base.keys(), [], []
1335 return base.keys(), [], []
1338
1336
1339 req = set(unknown)
1337 req = set(unknown)
1340 reqcnt = 0
1338 reqcnt = 0
1341
1339
1342 # search through remote branches
1340 # search through remote branches
1343 # a 'branch' here is a linear segment of history, with four parts:
1341 # a 'branch' here is a linear segment of history, with four parts:
1344 # head, root, first parent, second parent
1342 # head, root, first parent, second parent
1345 # (a branch always has two parents (or none) by definition)
1343 # (a branch always has two parents (or none) by definition)
1346 unknown = remote.branches(unknown)
1344 unknown = remote.branches(unknown)
1347 while unknown:
1345 while unknown:
1348 r = []
1346 r = []
1349 while unknown:
1347 while unknown:
1350 n = unknown.pop(0)
1348 n = unknown.pop(0)
1351 if n[0] in seen:
1349 if n[0] in seen:
1352 continue
1350 continue
1353
1351
1354 self.ui.debug(_("examining %s:%s\n")
1352 self.ui.debug(_("examining %s:%s\n")
1355 % (short(n[0]), short(n[1])))
1353 % (short(n[0]), short(n[1])))
1356 if n[0] == nullid: # found the end of the branch
1354 if n[0] == nullid: # found the end of the branch
1357 pass
1355 pass
1358 elif n in seenbranch:
1356 elif n in seenbranch:
1359 self.ui.debug(_("branch already found\n"))
1357 self.ui.debug(_("branch already found\n"))
1360 continue
1358 continue
1361 elif n[1] and n[1] in m: # do we know the base?
1359 elif n[1] and n[1] in m: # do we know the base?
1362 self.ui.debug(_("found incomplete branch %s:%s\n")
1360 self.ui.debug(_("found incomplete branch %s:%s\n")
1363 % (short(n[0]), short(n[1])))
1361 % (short(n[0]), short(n[1])))
1364 search.append(n[0:2]) # schedule branch range for scanning
1362 search.append(n[0:2]) # schedule branch range for scanning
1365 seenbranch.add(n)
1363 seenbranch.add(n)
1366 else:
1364 else:
1367 if n[1] not in seen and n[1] not in fetch:
1365 if n[1] not in seen and n[1] not in fetch:
1368 if n[2] in m and n[3] in m:
1366 if n[2] in m and n[3] in m:
1369 self.ui.debug(_("found new changeset %s\n") %
1367 self.ui.debug(_("found new changeset %s\n") %
1370 short(n[1]))
1368 short(n[1]))
1371 fetch.add(n[1]) # earliest unknown
1369 fetch.add(n[1]) # earliest unknown
1372 for p in n[2:4]:
1370 for p in n[2:4]:
1373 if p in m:
1371 if p in m:
1374 base[p] = 1 # latest known
1372 base[p] = 1 # latest known
1375
1373
1376 for p in n[2:4]:
1374 for p in n[2:4]:
1377 if p not in req and p not in m:
1375 if p not in req and p not in m:
1378 r.append(p)
1376 r.append(p)
1379 req.add(p)
1377 req.add(p)
1380 seen.add(n[0])
1378 seen.add(n[0])
1381
1379
1382 if r:
1380 if r:
1383 reqcnt += 1
1381 reqcnt += 1
1384 self.ui.debug(_("request %d: %s\n") %
1382 self.ui.debug(_("request %d: %s\n") %
1385 (reqcnt, " ".join(map(short, r))))
1383 (reqcnt, " ".join(map(short, r))))
1386 for p in xrange(0, len(r), 10):
1384 for p in xrange(0, len(r), 10):
1387 for b in remote.branches(r[p:p+10]):
1385 for b in remote.branches(r[p:p+10]):
1388 self.ui.debug(_("received %s:%s\n") %
1386 self.ui.debug(_("received %s:%s\n") %
1389 (short(b[0]), short(b[1])))
1387 (short(b[0]), short(b[1])))
1390 unknown.append(b)
1388 unknown.append(b)
1391
1389
1392 # do binary search on the branches we found
1390 # do binary search on the branches we found
1393 while search:
1391 while search:
1394 newsearch = []
1392 newsearch = []
1395 reqcnt += 1
1393 reqcnt += 1
1396 for n, l in zip(search, remote.between(search)):
1394 for n, l in zip(search, remote.between(search)):
1397 l.append(n[1])
1395 l.append(n[1])
1398 p = n[0]
1396 p = n[0]
1399 f = 1
1397 f = 1
1400 for i in l:
1398 for i in l:
1401 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1399 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1402 if i in m:
1400 if i in m:
1403 if f <= 2:
1401 if f <= 2:
1404 self.ui.debug(_("found new branch changeset %s\n") %
1402 self.ui.debug(_("found new branch changeset %s\n") %
1405 short(p))
1403 short(p))
1406 fetch.add(p)
1404 fetch.add(p)
1407 base[i] = 1
1405 base[i] = 1
1408 else:
1406 else:
1409 self.ui.debug(_("narrowed branch search to %s:%s\n")
1407 self.ui.debug(_("narrowed branch search to %s:%s\n")
1410 % (short(p), short(i)))
1408 % (short(p), short(i)))
1411 newsearch.append((p, i))
1409 newsearch.append((p, i))
1412 break
1410 break
1413 p, f = i, f * 2
1411 p, f = i, f * 2
1414 search = newsearch
1412 search = newsearch
1415
1413
1416 # sanity check our fetch list
1414 # sanity check our fetch list
1417 for f in fetch:
1415 for f in fetch:
1418 if f in m:
1416 if f in m:
1419 raise error.RepoError(_("already have changeset ")
1417 raise error.RepoError(_("already have changeset ")
1420 + short(f[:4]))
1418 + short(f[:4]))
1421
1419
1422 if base.keys() == [nullid]:
1420 if base.keys() == [nullid]:
1423 if force:
1421 if force:
1424 self.ui.warn(_("warning: repository is unrelated\n"))
1422 self.ui.warn(_("warning: repository is unrelated\n"))
1425 else:
1423 else:
1426 raise util.Abort(_("repository is unrelated"))
1424 raise util.Abort(_("repository is unrelated"))
1427
1425
1428 self.ui.debug(_("found new changesets starting at ") +
1426 self.ui.debug(_("found new changesets starting at ") +
1429 " ".join([short(f) for f in fetch]) + "\n")
1427 " ".join([short(f) for f in fetch]) + "\n")
1430
1428
1431 self.ui.debug(_("%d total queries\n") % reqcnt)
1429 self.ui.debug(_("%d total queries\n") % reqcnt)
1432
1430
1433 return base.keys(), list(fetch), heads
1431 return base.keys(), list(fetch), heads
1434
1432
1435 def findoutgoing(self, remote, base=None, heads=None, force=False):
1433 def findoutgoing(self, remote, base=None, heads=None, force=False):
1436 """Return list of nodes that are roots of subsets not in remote
1434 """Return list of nodes that are roots of subsets not in remote
1437
1435
1438 If base dict is specified, assume that these nodes and their parents
1436 If base dict is specified, assume that these nodes and their parents
1439 exist on the remote side.
1437 exist on the remote side.
1440 If a list of heads is specified, return only nodes which are heads
1438 If a list of heads is specified, return only nodes which are heads
1441 or ancestors of these heads, and return a second element which
1439 or ancestors of these heads, and return a second element which
1442 contains all remote heads which get new children.
1440 contains all remote heads which get new children.
1443 """
1441 """
1444 if base == None:
1442 if base == None:
1445 base = {}
1443 base = {}
1446 self.findincoming(remote, base, heads, force=force)
1444 self.findincoming(remote, base, heads, force=force)
1447
1445
1448 self.ui.debug(_("common changesets up to ")
1446 self.ui.debug(_("common changesets up to ")
1449 + " ".join(map(short, base.keys())) + "\n")
1447 + " ".join(map(short, base.keys())) + "\n")
1450
1448
1451 remain = set(self.changelog.nodemap)
1449 remain = set(self.changelog.nodemap)
1452
1450
1453 # prune everything remote has from the tree
1451 # prune everything remote has from the tree
1454 remain.remove(nullid)
1452 remain.remove(nullid)
1455 remove = base.keys()
1453 remove = base.keys()
1456 while remove:
1454 while remove:
1457 n = remove.pop(0)
1455 n = remove.pop(0)
1458 if n in remain:
1456 if n in remain:
1459 remain.remove(n)
1457 remain.remove(n)
1460 for p in self.changelog.parents(n):
1458 for p in self.changelog.parents(n):
1461 remove.append(p)
1459 remove.append(p)
1462
1460
1463 # find every node whose parents have been pruned
1461 # find every node whose parents have been pruned
1464 subset = []
1462 subset = []
1465 # find every remote head that will get new children
1463 # find every remote head that will get new children
1466 updated_heads = {}
1464 updated_heads = {}
1467 for n in remain:
1465 for n in remain:
1468 p1, p2 = self.changelog.parents(n)
1466 p1, p2 = self.changelog.parents(n)
1469 if p1 not in remain and p2 not in remain:
1467 if p1 not in remain and p2 not in remain:
1470 subset.append(n)
1468 subset.append(n)
1471 if heads:
1469 if heads:
1472 if p1 in heads:
1470 if p1 in heads:
1473 updated_heads[p1] = True
1471 updated_heads[p1] = True
1474 if p2 in heads:
1472 if p2 in heads:
1475 updated_heads[p2] = True
1473 updated_heads[p2] = True
1476
1474
1477 # this is the set of all roots we have to push
1475 # this is the set of all roots we have to push
1478 if heads:
1476 if heads:
1479 return subset, updated_heads.keys()
1477 return subset, updated_heads.keys()
1480 else:
1478 else:
1481 return subset
1479 return subset
1482
1480
1483 def pull(self, remote, heads=None, force=False):
1481 def pull(self, remote, heads=None, force=False):
1484 lock = self.lock()
1482 lock = self.lock()
1485 try:
1483 try:
1486 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1484 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1487 force=force)
1485 force=force)
1488 if fetch == [nullid]:
1486 if fetch == [nullid]:
1489 self.ui.status(_("requesting all changes\n"))
1487 self.ui.status(_("requesting all changes\n"))
1490
1488
1491 if not fetch:
1489 if not fetch:
1492 self.ui.status(_("no changes found\n"))
1490 self.ui.status(_("no changes found\n"))
1493 return 0
1491 return 0
1494
1492
1495 if heads is None and remote.capable('changegroupsubset'):
1493 if heads is None and remote.capable('changegroupsubset'):
1496 heads = rheads
1494 heads = rheads
1497
1495
1498 if heads is None:
1496 if heads is None:
1499 cg = remote.changegroup(fetch, 'pull')
1497 cg = remote.changegroup(fetch, 'pull')
1500 else:
1498 else:
1501 if not remote.capable('changegroupsubset'):
1499 if not remote.capable('changegroupsubset'):
1502 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1500 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1503 cg = remote.changegroupsubset(fetch, heads, 'pull')
1501 cg = remote.changegroupsubset(fetch, heads, 'pull')
1504 return self.addchangegroup(cg, 'pull', remote.url())
1502 return self.addchangegroup(cg, 'pull', remote.url())
1505 finally:
1503 finally:
1506 lock.release()
1504 lock.release()
1507
1505
1508 def push(self, remote, force=False, revs=None):
1506 def push(self, remote, force=False, revs=None):
1509 # there are two ways to push to remote repo:
1507 # there are two ways to push to remote repo:
1510 #
1508 #
1511 # addchangegroup assumes local user can lock remote
1509 # addchangegroup assumes local user can lock remote
1512 # repo (local filesystem, old ssh servers).
1510 # repo (local filesystem, old ssh servers).
1513 #
1511 #
1514 # unbundle assumes local user cannot lock remote repo (new ssh
1512 # unbundle assumes local user cannot lock remote repo (new ssh
1515 # servers, http servers).
1513 # servers, http servers).
1516
1514
1517 if remote.capable('unbundle'):
1515 if remote.capable('unbundle'):
1518 return self.push_unbundle(remote, force, revs)
1516 return self.push_unbundle(remote, force, revs)
1519 return self.push_addchangegroup(remote, force, revs)
1517 return self.push_addchangegroup(remote, force, revs)
1520
1518
1521 def prepush(self, remote, force, revs):
1519 def prepush(self, remote, force, revs):
1522 common = {}
1520 common = {}
1523 remote_heads = remote.heads()
1521 remote_heads = remote.heads()
1524 inc = self.findincoming(remote, common, remote_heads, force=force)
1522 inc = self.findincoming(remote, common, remote_heads, force=force)
1525
1523
1526 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1524 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1527 if revs is not None:
1525 if revs is not None:
1528 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1526 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1529 else:
1527 else:
1530 bases, heads = update, self.changelog.heads()
1528 bases, heads = update, self.changelog.heads()
1531
1529
1532 if not bases:
1530 if not bases:
1533 self.ui.status(_("no changes found\n"))
1531 self.ui.status(_("no changes found\n"))
1534 return None, 1
1532 return None, 1
1535 elif not force:
1533 elif not force:
1536 # check if we're creating new remote heads
1534 # check if we're creating new remote heads
1537 # to be a remote head after push, node must be either
1535 # to be a remote head after push, node must be either
1538 # - unknown locally
1536 # - unknown locally
1539 # - a local outgoing head descended from update
1537 # - a local outgoing head descended from update
1540 # - a remote head that's known locally and not
1538 # - a remote head that's known locally and not
1541 # ancestral to an outgoing head
1539 # ancestral to an outgoing head
1542
1540
1543 warn = 0
1541 warn = 0
1544
1542
1545 if remote_heads == [nullid]:
1543 if remote_heads == [nullid]:
1546 warn = 0
1544 warn = 0
1547 elif not revs and len(heads) > len(remote_heads):
1545 elif not revs and len(heads) > len(remote_heads):
1548 warn = 1
1546 warn = 1
1549 else:
1547 else:
1550 newheads = list(heads)
1548 newheads = list(heads)
1551 for r in remote_heads:
1549 for r in remote_heads:
1552 if r in self.changelog.nodemap:
1550 if r in self.changelog.nodemap:
1553 desc = self.changelog.heads(r, heads)
1551 desc = self.changelog.heads(r, heads)
1554 l = [h for h in heads if h in desc]
1552 l = [h for h in heads if h in desc]
1555 if not l:
1553 if not l:
1556 newheads.append(r)
1554 newheads.append(r)
1557 else:
1555 else:
1558 newheads.append(r)
1556 newheads.append(r)
1559 if len(newheads) > len(remote_heads):
1557 if len(newheads) > len(remote_heads):
1560 warn = 1
1558 warn = 1
1561
1559
1562 if warn:
1560 if warn:
1563 self.ui.warn(_("abort: push creates new remote heads!\n"))
1561 self.ui.warn(_("abort: push creates new remote heads!\n"))
1564 self.ui.status(_("(did you forget to merge?"
1562 self.ui.status(_("(did you forget to merge?"
1565 " use push -f to force)\n"))
1563 " use push -f to force)\n"))
1566 return None, 0
1564 return None, 0
1567 elif inc:
1565 elif inc:
1568 self.ui.warn(_("note: unsynced remote changes!\n"))
1566 self.ui.warn(_("note: unsynced remote changes!\n"))
1569
1567
1570
1568
1571 if revs is None:
1569 if revs is None:
1572 # use the fast path, no race possible on push
1570 # use the fast path, no race possible on push
1573 cg = self._changegroup(common.keys(), 'push')
1571 cg = self._changegroup(common.keys(), 'push')
1574 else:
1572 else:
1575 cg = self.changegroupsubset(update, revs, 'push')
1573 cg = self.changegroupsubset(update, revs, 'push')
1576 return cg, remote_heads
1574 return cg, remote_heads
1577
1575
1578 def push_addchangegroup(self, remote, force, revs):
1576 def push_addchangegroup(self, remote, force, revs):
1579 lock = remote.lock()
1577 lock = remote.lock()
1580 try:
1578 try:
1581 ret = self.prepush(remote, force, revs)
1579 ret = self.prepush(remote, force, revs)
1582 if ret[0] is not None:
1580 if ret[0] is not None:
1583 cg, remote_heads = ret
1581 cg, remote_heads = ret
1584 return remote.addchangegroup(cg, 'push', self.url())
1582 return remote.addchangegroup(cg, 'push', self.url())
1585 return ret[1]
1583 return ret[1]
1586 finally:
1584 finally:
1587 lock.release()
1585 lock.release()
1588
1586
1589 def push_unbundle(self, remote, force, revs):
1587 def push_unbundle(self, remote, force, revs):
1590 # local repo finds heads on server, finds out what revs it
1588 # local repo finds heads on server, finds out what revs it
1591 # must push. once revs transferred, if server finds it has
1589 # must push. once revs transferred, if server finds it has
1592 # different heads (someone else won commit/push race), server
1590 # different heads (someone else won commit/push race), server
1593 # aborts.
1591 # aborts.
1594
1592
1595 ret = self.prepush(remote, force, revs)
1593 ret = self.prepush(remote, force, revs)
1596 if ret[0] is not None:
1594 if ret[0] is not None:
1597 cg, remote_heads = ret
1595 cg, remote_heads = ret
1598 if force: remote_heads = ['force']
1596 if force: remote_heads = ['force']
1599 return remote.unbundle(cg, remote_heads, 'push')
1597 return remote.unbundle(cg, remote_heads, 'push')
1600 return ret[1]
1598 return ret[1]
1601
1599
1602 def changegroupinfo(self, nodes, source):
1600 def changegroupinfo(self, nodes, source):
1603 if self.ui.verbose or source == 'bundle':
1601 if self.ui.verbose or source == 'bundle':
1604 self.ui.status(_("%d changesets found\n") % len(nodes))
1602 self.ui.status(_("%d changesets found\n") % len(nodes))
1605 if self.ui.debugflag:
1603 if self.ui.debugflag:
1606 self.ui.debug(_("list of changesets:\n"))
1604 self.ui.debug(_("list of changesets:\n"))
1607 for node in nodes:
1605 for node in nodes:
1608 self.ui.debug("%s\n" % hex(node))
1606 self.ui.debug("%s\n" % hex(node))
1609
1607
1610 def changegroupsubset(self, bases, heads, source, extranodes=None):
1608 def changegroupsubset(self, bases, heads, source, extranodes=None):
1611 """This function generates a changegroup consisting of all the nodes
1609 """This function generates a changegroup consisting of all the nodes
1612 that are descendents of any of the bases, and ancestors of any of
1610 that are descendents of any of the bases, and ancestors of any of
1613 the heads.
1611 the heads.
1614
1612
1615 It is fairly complex as determining which filenodes and which
1613 It is fairly complex as determining which filenodes and which
1616 manifest nodes need to be included for the changeset to be complete
1614 manifest nodes need to be included for the changeset to be complete
1617 is non-trivial.
1615 is non-trivial.
1618
1616
1619 Another wrinkle is doing the reverse, figuring out which changeset in
1617 Another wrinkle is doing the reverse, figuring out which changeset in
1620 the changegroup a particular filenode or manifestnode belongs to.
1618 the changegroup a particular filenode or manifestnode belongs to.
1621
1619
1622 The caller can specify some nodes that must be included in the
1620 The caller can specify some nodes that must be included in the
1623 changegroup using the extranodes argument. It should be a dict
1621 changegroup using the extranodes argument. It should be a dict
1624 where the keys are the filenames (or 1 for the manifest), and the
1622 where the keys are the filenames (or 1 for the manifest), and the
1625 values are lists of (node, linknode) tuples, where node is a wanted
1623 values are lists of (node, linknode) tuples, where node is a wanted
1626 node and linknode is the changelog node that should be transmitted as
1624 node and linknode is the changelog node that should be transmitted as
1627 the linkrev.
1625 the linkrev.
1628 """
1626 """
1629
1627
1630 if extranodes is None:
1628 if extranodes is None:
1631 # can we go through the fast path ?
1629 # can we go through the fast path ?
1632 heads.sort()
1630 heads.sort()
1633 allheads = self.heads()
1631 allheads = self.heads()
1634 allheads.sort()
1632 allheads.sort()
1635 if heads == allheads:
1633 if heads == allheads:
1636 common = []
1634 common = []
1637 # parents of bases are known from both sides
1635 # parents of bases are known from both sides
1638 for n in bases:
1636 for n in bases:
1639 for p in self.changelog.parents(n):
1637 for p in self.changelog.parents(n):
1640 if p != nullid:
1638 if p != nullid:
1641 common.append(p)
1639 common.append(p)
1642 return self._changegroup(common, source)
1640 return self._changegroup(common, source)
1643
1641
1644 self.hook('preoutgoing', throw=True, source=source)
1642 self.hook('preoutgoing', throw=True, source=source)
1645
1643
1646 # Set up some initial variables
1644 # Set up some initial variables
1647 # Make it easy to refer to self.changelog
1645 # Make it easy to refer to self.changelog
1648 cl = self.changelog
1646 cl = self.changelog
1649 # msng is short for missing - compute the list of changesets in this
1647 # msng is short for missing - compute the list of changesets in this
1650 # changegroup.
1648 # changegroup.
1651 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1649 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1652 self.changegroupinfo(msng_cl_lst, source)
1650 self.changegroupinfo(msng_cl_lst, source)
1653 # Some bases may turn out to be superfluous, and some heads may be
1651 # Some bases may turn out to be superfluous, and some heads may be
1654 # too. nodesbetween will return the minimal set of bases and heads
1652 # too. nodesbetween will return the minimal set of bases and heads
1655 # necessary to re-create the changegroup.
1653 # necessary to re-create the changegroup.
1656
1654
1657 # Known heads are the list of heads that it is assumed the recipient
1655 # Known heads are the list of heads that it is assumed the recipient
1658 # of this changegroup will know about.
1656 # of this changegroup will know about.
1659 knownheads = {}
1657 knownheads = {}
1660 # We assume that all parents of bases are known heads.
1658 # We assume that all parents of bases are known heads.
1661 for n in bases:
1659 for n in bases:
1662 for p in cl.parents(n):
1660 for p in cl.parents(n):
1663 if p != nullid:
1661 if p != nullid:
1664 knownheads[p] = 1
1662 knownheads[p] = 1
1665 knownheads = knownheads.keys()
1663 knownheads = knownheads.keys()
1666 if knownheads:
1664 if knownheads:
1667 # Now that we know what heads are known, we can compute which
1665 # Now that we know what heads are known, we can compute which
1668 # changesets are known. The recipient must know about all
1666 # changesets are known. The recipient must know about all
1669 # changesets required to reach the known heads from the null
1667 # changesets required to reach the known heads from the null
1670 # changeset.
1668 # changeset.
1671 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1669 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1672 junk = None
1670 junk = None
1673 # Transform the list into an ersatz set.
1671 # Transform the list into an ersatz set.
1674 has_cl_set = set(has_cl_set)
1672 has_cl_set = set(has_cl_set)
1675 else:
1673 else:
1676 # If there were no known heads, the recipient cannot be assumed to
1674 # If there were no known heads, the recipient cannot be assumed to
1677 # know about any changesets.
1675 # know about any changesets.
1678 has_cl_set = set()
1676 has_cl_set = set()
1679
1677
1680 # Make it easy to refer to self.manifest
1678 # Make it easy to refer to self.manifest
1681 mnfst = self.manifest
1679 mnfst = self.manifest
1682 # We don't know which manifests are missing yet
1680 # We don't know which manifests are missing yet
1683 msng_mnfst_set = {}
1681 msng_mnfst_set = {}
1684 # Nor do we know which filenodes are missing.
1682 # Nor do we know which filenodes are missing.
1685 msng_filenode_set = {}
1683 msng_filenode_set = {}
1686
1684
1687 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1685 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1688 junk = None
1686 junk = None
1689
1687
1690 # A changeset always belongs to itself, so the changenode lookup
1688 # A changeset always belongs to itself, so the changenode lookup
1691 # function for a changenode is identity.
1689 # function for a changenode is identity.
1692 def identity(x):
1690 def identity(x):
1693 return x
1691 return x
1694
1692
1695 # A function generating function. Sets up an environment for the
1693 # A function generating function. Sets up an environment for the
1696 # inner function.
1694 # inner function.
1697 def cmp_by_rev_func(revlog):
1695 def cmp_by_rev_func(revlog):
1698 # Compare two nodes by their revision number in the environment's
1696 # Compare two nodes by their revision number in the environment's
1699 # revision history. Since the revision number both represents the
1697 # revision history. Since the revision number both represents the
1700 # most efficient order to read the nodes in, and represents a
1698 # most efficient order to read the nodes in, and represents a
1701 # topological sorting of the nodes, this function is often useful.
1699 # topological sorting of the nodes, this function is often useful.
1702 def cmp_by_rev(a, b):
1700 def cmp_by_rev(a, b):
1703 return cmp(revlog.rev(a), revlog.rev(b))
1701 return cmp(revlog.rev(a), revlog.rev(b))
1704 return cmp_by_rev
1702 return cmp_by_rev
1705
1703
1706 # If we determine that a particular file or manifest node must be a
1704 # If we determine that a particular file or manifest node must be a
1707 # node that the recipient of the changegroup will already have, we can
1705 # node that the recipient of the changegroup will already have, we can
1708 # also assume the recipient will have all the parents. This function
1706 # also assume the recipient will have all the parents. This function
1709 # prunes them from the set of missing nodes.
1707 # prunes them from the set of missing nodes.
1710 def prune_parents(revlog, hasset, msngset):
1708 def prune_parents(revlog, hasset, msngset):
1711 haslst = hasset.keys()
1709 haslst = hasset.keys()
1712 haslst.sort(cmp_by_rev_func(revlog))
1710 haslst.sort(cmp_by_rev_func(revlog))
1713 for node in haslst:
1711 for node in haslst:
1714 parentlst = [p for p in revlog.parents(node) if p != nullid]
1712 parentlst = [p for p in revlog.parents(node) if p != nullid]
1715 while parentlst:
1713 while parentlst:
1716 n = parentlst.pop()
1714 n = parentlst.pop()
1717 if n not in hasset:
1715 if n not in hasset:
1718 hasset[n] = 1
1716 hasset[n] = 1
1719 p = [p for p in revlog.parents(n) if p != nullid]
1717 p = [p for p in revlog.parents(n) if p != nullid]
1720 parentlst.extend(p)
1718 parentlst.extend(p)
1721 for n in hasset:
1719 for n in hasset:
1722 msngset.pop(n, None)
1720 msngset.pop(n, None)
1723
1721
1724 # This is a function generating function used to set up an environment
1722 # This is a function generating function used to set up an environment
1725 # for the inner function to execute in.
1723 # for the inner function to execute in.
1726 def manifest_and_file_collector(changedfileset):
1724 def manifest_and_file_collector(changedfileset):
1727 # This is an information gathering function that gathers
1725 # This is an information gathering function that gathers
1728 # information from each changeset node that goes out as part of
1726 # information from each changeset node that goes out as part of
1729 # the changegroup. The information gathered is a list of which
1727 # the changegroup. The information gathered is a list of which
1730 # manifest nodes are potentially required (the recipient may
1728 # manifest nodes are potentially required (the recipient may
1731 # already have them) and total list of all files which were
1729 # already have them) and total list of all files which were
1732 # changed in any changeset in the changegroup.
1730 # changed in any changeset in the changegroup.
1733 #
1731 #
1734 # We also remember the first changenode we saw any manifest
1732 # We also remember the first changenode we saw any manifest
1735 # referenced by so we can later determine which changenode 'owns'
1733 # referenced by so we can later determine which changenode 'owns'
1736 # the manifest.
1734 # the manifest.
1737 def collect_manifests_and_files(clnode):
1735 def collect_manifests_and_files(clnode):
1738 c = cl.read(clnode)
1736 c = cl.read(clnode)
1739 for f in c[3]:
1737 for f in c[3]:
1740 # This is to make sure we only have one instance of each
1738 # This is to make sure we only have one instance of each
1741 # filename string for each filename.
1739 # filename string for each filename.
1742 changedfileset.setdefault(f, f)
1740 changedfileset.setdefault(f, f)
1743 msng_mnfst_set.setdefault(c[0], clnode)
1741 msng_mnfst_set.setdefault(c[0], clnode)
1744 return collect_manifests_and_files
1742 return collect_manifests_and_files
1745
1743
1746 # Figure out which manifest nodes (of the ones we think might be part
1744 # Figure out which manifest nodes (of the ones we think might be part
1747 # of the changegroup) the recipient must know about and remove them
1745 # of the changegroup) the recipient must know about and remove them
1748 # from the changegroup.
1746 # from the changegroup.
1749 def prune_manifests():
1747 def prune_manifests():
1750 has_mnfst_set = {}
1748 has_mnfst_set = {}
1751 for n in msng_mnfst_set:
1749 for n in msng_mnfst_set:
1752 # If a 'missing' manifest thinks it belongs to a changenode
1750 # If a 'missing' manifest thinks it belongs to a changenode
1753 # the recipient is assumed to have, obviously the recipient
1751 # the recipient is assumed to have, obviously the recipient
1754 # must have that manifest.
1752 # must have that manifest.
1755 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1753 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1756 if linknode in has_cl_set:
1754 if linknode in has_cl_set:
1757 has_mnfst_set[n] = 1
1755 has_mnfst_set[n] = 1
1758 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1756 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1759
1757
1760 # Use the information collected in collect_manifests_and_files to say
1758 # Use the information collected in collect_manifests_and_files to say
1761 # which changenode any manifestnode belongs to.
1759 # which changenode any manifestnode belongs to.
1762 def lookup_manifest_link(mnfstnode):
1760 def lookup_manifest_link(mnfstnode):
1763 return msng_mnfst_set[mnfstnode]
1761 return msng_mnfst_set[mnfstnode]
1764
1762
1765 # A function generating function that sets up the initial environment
1763 # A function generating function that sets up the initial environment
1766 # the inner function.
1764 # the inner function.
1767 def filenode_collector(changedfiles):
1765 def filenode_collector(changedfiles):
1768 next_rev = [0]
1766 next_rev = [0]
1769 # This gathers information from each manifestnode included in the
1767 # This gathers information from each manifestnode included in the
1770 # changegroup about which filenodes the manifest node references
1768 # changegroup about which filenodes the manifest node references
1771 # so we can include those in the changegroup too.
1769 # so we can include those in the changegroup too.
1772 #
1770 #
1773 # It also remembers which changenode each filenode belongs to. It
1771 # It also remembers which changenode each filenode belongs to. It
1774 # does this by assuming the a filenode belongs to the changenode
1772 # does this by assuming the a filenode belongs to the changenode
1775 # the first manifest that references it belongs to.
1773 # the first manifest that references it belongs to.
1776 def collect_msng_filenodes(mnfstnode):
1774 def collect_msng_filenodes(mnfstnode):
1777 r = mnfst.rev(mnfstnode)
1775 r = mnfst.rev(mnfstnode)
1778 if r == next_rev[0]:
1776 if r == next_rev[0]:
1779 # If the last rev we looked at was the one just previous,
1777 # If the last rev we looked at was the one just previous,
1780 # we only need to see a diff.
1778 # we only need to see a diff.
1781 deltamf = mnfst.readdelta(mnfstnode)
1779 deltamf = mnfst.readdelta(mnfstnode)
1782 # For each line in the delta
1780 # For each line in the delta
1783 for f, fnode in deltamf.iteritems():
1781 for f, fnode in deltamf.iteritems():
1784 f = changedfiles.get(f, None)
1782 f = changedfiles.get(f, None)
1785 # And if the file is in the list of files we care
1783 # And if the file is in the list of files we care
1786 # about.
1784 # about.
1787 if f is not None:
1785 if f is not None:
1788 # Get the changenode this manifest belongs to
1786 # Get the changenode this manifest belongs to
1789 clnode = msng_mnfst_set[mnfstnode]
1787 clnode = msng_mnfst_set[mnfstnode]
1790 # Create the set of filenodes for the file if
1788 # Create the set of filenodes for the file if
1791 # there isn't one already.
1789 # there isn't one already.
1792 ndset = msng_filenode_set.setdefault(f, {})
1790 ndset = msng_filenode_set.setdefault(f, {})
1793 # And set the filenode's changelog node to the
1791 # And set the filenode's changelog node to the
1794 # manifest's if it hasn't been set already.
1792 # manifest's if it hasn't been set already.
1795 ndset.setdefault(fnode, clnode)
1793 ndset.setdefault(fnode, clnode)
1796 else:
1794 else:
1797 # Otherwise we need a full manifest.
1795 # Otherwise we need a full manifest.
1798 m = mnfst.read(mnfstnode)
1796 m = mnfst.read(mnfstnode)
1799 # For every file in we care about.
1797 # For every file in we care about.
1800 for f in changedfiles:
1798 for f in changedfiles:
1801 fnode = m.get(f, None)
1799 fnode = m.get(f, None)
1802 # If it's in the manifest
1800 # If it's in the manifest
1803 if fnode is not None:
1801 if fnode is not None:
1804 # See comments above.
1802 # See comments above.
1805 clnode = msng_mnfst_set[mnfstnode]
1803 clnode = msng_mnfst_set[mnfstnode]
1806 ndset = msng_filenode_set.setdefault(f, {})
1804 ndset = msng_filenode_set.setdefault(f, {})
1807 ndset.setdefault(fnode, clnode)
1805 ndset.setdefault(fnode, clnode)
1808 # Remember the revision we hope to see next.
1806 # Remember the revision we hope to see next.
1809 next_rev[0] = r + 1
1807 next_rev[0] = r + 1
1810 return collect_msng_filenodes
1808 return collect_msng_filenodes
1811
1809
1812 # We have a list of filenodes we think we need for a file, lets remove
1810 # We have a list of filenodes we think we need for a file, lets remove
1813 # all those we now the recipient must have.
1811 # all those we now the recipient must have.
1814 def prune_filenodes(f, filerevlog):
1812 def prune_filenodes(f, filerevlog):
1815 msngset = msng_filenode_set[f]
1813 msngset = msng_filenode_set[f]
1816 hasset = {}
1814 hasset = {}
1817 # If a 'missing' filenode thinks it belongs to a changenode we
1815 # If a 'missing' filenode thinks it belongs to a changenode we
1818 # assume the recipient must have, then the recipient must have
1816 # assume the recipient must have, then the recipient must have
1819 # that filenode.
1817 # that filenode.
1820 for n in msngset:
1818 for n in msngset:
1821 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1819 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1822 if clnode in has_cl_set:
1820 if clnode in has_cl_set:
1823 hasset[n] = 1
1821 hasset[n] = 1
1824 prune_parents(filerevlog, hasset, msngset)
1822 prune_parents(filerevlog, hasset, msngset)
1825
1823
1826 # A function generator function that sets up the a context for the
1824 # A function generator function that sets up the a context for the
1827 # inner function.
1825 # inner function.
1828 def lookup_filenode_link_func(fname):
1826 def lookup_filenode_link_func(fname):
1829 msngset = msng_filenode_set[fname]
1827 msngset = msng_filenode_set[fname]
1830 # Lookup the changenode the filenode belongs to.
1828 # Lookup the changenode the filenode belongs to.
1831 def lookup_filenode_link(fnode):
1829 def lookup_filenode_link(fnode):
1832 return msngset[fnode]
1830 return msngset[fnode]
1833 return lookup_filenode_link
1831 return lookup_filenode_link
1834
1832
1835 # Add the nodes that were explicitly requested.
1833 # Add the nodes that were explicitly requested.
1836 def add_extra_nodes(name, nodes):
1834 def add_extra_nodes(name, nodes):
1837 if not extranodes or name not in extranodes:
1835 if not extranodes or name not in extranodes:
1838 return
1836 return
1839
1837
1840 for node, linknode in extranodes[name]:
1838 for node, linknode in extranodes[name]:
1841 if node not in nodes:
1839 if node not in nodes:
1842 nodes[node] = linknode
1840 nodes[node] = linknode
1843
1841
1844 # Now that we have all theses utility functions to help out and
1842 # Now that we have all theses utility functions to help out and
1845 # logically divide up the task, generate the group.
1843 # logically divide up the task, generate the group.
1846 def gengroup():
1844 def gengroup():
1847 # The set of changed files starts empty.
1845 # The set of changed files starts empty.
1848 changedfiles = {}
1846 changedfiles = {}
1849 # Create a changenode group generator that will call our functions
1847 # Create a changenode group generator that will call our functions
1850 # back to lookup the owning changenode and collect information.
1848 # back to lookup the owning changenode and collect information.
1851 group = cl.group(msng_cl_lst, identity,
1849 group = cl.group(msng_cl_lst, identity,
1852 manifest_and_file_collector(changedfiles))
1850 manifest_and_file_collector(changedfiles))
1853 for chnk in group:
1851 for chnk in group:
1854 yield chnk
1852 yield chnk
1855
1853
1856 # The list of manifests has been collected by the generator
1854 # The list of manifests has been collected by the generator
1857 # calling our functions back.
1855 # calling our functions back.
1858 prune_manifests()
1856 prune_manifests()
1859 add_extra_nodes(1, msng_mnfst_set)
1857 add_extra_nodes(1, msng_mnfst_set)
1860 msng_mnfst_lst = msng_mnfst_set.keys()
1858 msng_mnfst_lst = msng_mnfst_set.keys()
1861 # Sort the manifestnodes by revision number.
1859 # Sort the manifestnodes by revision number.
1862 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1860 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1863 # Create a generator for the manifestnodes that calls our lookup
1861 # Create a generator for the manifestnodes that calls our lookup
1864 # and data collection functions back.
1862 # and data collection functions back.
1865 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1863 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1866 filenode_collector(changedfiles))
1864 filenode_collector(changedfiles))
1867 for chnk in group:
1865 for chnk in group:
1868 yield chnk
1866 yield chnk
1869
1867
1870 # These are no longer needed, dereference and toss the memory for
1868 # These are no longer needed, dereference and toss the memory for
1871 # them.
1869 # them.
1872 msng_mnfst_lst = None
1870 msng_mnfst_lst = None
1873 msng_mnfst_set.clear()
1871 msng_mnfst_set.clear()
1874
1872
1875 if extranodes:
1873 if extranodes:
1876 for fname in extranodes:
1874 for fname in extranodes:
1877 if isinstance(fname, int):
1875 if isinstance(fname, int):
1878 continue
1876 continue
1879 msng_filenode_set.setdefault(fname, {})
1877 msng_filenode_set.setdefault(fname, {})
1880 changedfiles[fname] = 1
1878 changedfiles[fname] = 1
1881 # Go through all our files in order sorted by name.
1879 # Go through all our files in order sorted by name.
1882 for fname in sorted(changedfiles):
1880 for fname in sorted(changedfiles):
1883 filerevlog = self.file(fname)
1881 filerevlog = self.file(fname)
1884 if not len(filerevlog):
1882 if not len(filerevlog):
1885 raise util.Abort(_("empty or missing revlog for %s") % fname)
1883 raise util.Abort(_("empty or missing revlog for %s") % fname)
1886 # Toss out the filenodes that the recipient isn't really
1884 # Toss out the filenodes that the recipient isn't really
1887 # missing.
1885 # missing.
1888 if fname in msng_filenode_set:
1886 if fname in msng_filenode_set:
1889 prune_filenodes(fname, filerevlog)
1887 prune_filenodes(fname, filerevlog)
1890 add_extra_nodes(fname, msng_filenode_set[fname])
1888 add_extra_nodes(fname, msng_filenode_set[fname])
1891 msng_filenode_lst = msng_filenode_set[fname].keys()
1889 msng_filenode_lst = msng_filenode_set[fname].keys()
1892 else:
1890 else:
1893 msng_filenode_lst = []
1891 msng_filenode_lst = []
1894 # If any filenodes are left, generate the group for them,
1892 # If any filenodes are left, generate the group for them,
1895 # otherwise don't bother.
1893 # otherwise don't bother.
1896 if len(msng_filenode_lst) > 0:
1894 if len(msng_filenode_lst) > 0:
1897 yield changegroup.chunkheader(len(fname))
1895 yield changegroup.chunkheader(len(fname))
1898 yield fname
1896 yield fname
1899 # Sort the filenodes by their revision #
1897 # Sort the filenodes by their revision #
1900 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1898 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1901 # Create a group generator and only pass in a changenode
1899 # Create a group generator and only pass in a changenode
1902 # lookup function as we need to collect no information
1900 # lookup function as we need to collect no information
1903 # from filenodes.
1901 # from filenodes.
1904 group = filerevlog.group(msng_filenode_lst,
1902 group = filerevlog.group(msng_filenode_lst,
1905 lookup_filenode_link_func(fname))
1903 lookup_filenode_link_func(fname))
1906 for chnk in group:
1904 for chnk in group:
1907 yield chnk
1905 yield chnk
1908 if fname in msng_filenode_set:
1906 if fname in msng_filenode_set:
1909 # Don't need this anymore, toss it to free memory.
1907 # Don't need this anymore, toss it to free memory.
1910 del msng_filenode_set[fname]
1908 del msng_filenode_set[fname]
1911 # Signal that no more groups are left.
1909 # Signal that no more groups are left.
1912 yield changegroup.closechunk()
1910 yield changegroup.closechunk()
1913
1911
1914 if msng_cl_lst:
1912 if msng_cl_lst:
1915 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1913 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1916
1914
1917 return util.chunkbuffer(gengroup())
1915 return util.chunkbuffer(gengroup())
1918
1916
1919 def changegroup(self, basenodes, source):
1917 def changegroup(self, basenodes, source):
1920 # to avoid a race we use changegroupsubset() (issue1320)
1918 # to avoid a race we use changegroupsubset() (issue1320)
1921 return self.changegroupsubset(basenodes, self.heads(), source)
1919 return self.changegroupsubset(basenodes, self.heads(), source)
1922
1920
1923 def _changegroup(self, common, source):
1921 def _changegroup(self, common, source):
1924 """Generate a changegroup of all nodes that we have that a recipient
1922 """Generate a changegroup of all nodes that we have that a recipient
1925 doesn't.
1923 doesn't.
1926
1924
1927 This is much easier than the previous function as we can assume that
1925 This is much easier than the previous function as we can assume that
1928 the recipient has any changenode we aren't sending them.
1926 the recipient has any changenode we aren't sending them.
1929
1927
1930 common is the set of common nodes between remote and self"""
1928 common is the set of common nodes between remote and self"""
1931
1929
1932 self.hook('preoutgoing', throw=True, source=source)
1930 self.hook('preoutgoing', throw=True, source=source)
1933
1931
1934 cl = self.changelog
1932 cl = self.changelog
1935 nodes = cl.findmissing(common)
1933 nodes = cl.findmissing(common)
1936 revset = set([cl.rev(n) for n in nodes])
1934 revset = set([cl.rev(n) for n in nodes])
1937 self.changegroupinfo(nodes, source)
1935 self.changegroupinfo(nodes, source)
1938
1936
1939 def identity(x):
1937 def identity(x):
1940 return x
1938 return x
1941
1939
1942 def gennodelst(log):
1940 def gennodelst(log):
1943 for r in log:
1941 for r in log:
1944 if log.linkrev(r) in revset:
1942 if log.linkrev(r) in revset:
1945 yield log.node(r)
1943 yield log.node(r)
1946
1944
1947 def changed_file_collector(changedfileset):
1945 def changed_file_collector(changedfileset):
1948 def collect_changed_files(clnode):
1946 def collect_changed_files(clnode):
1949 c = cl.read(clnode)
1947 c = cl.read(clnode)
1950 for fname in c[3]:
1948 for fname in c[3]:
1951 changedfileset[fname] = 1
1949 changedfileset[fname] = 1
1952 return collect_changed_files
1950 return collect_changed_files
1953
1951
1954 def lookuprevlink_func(revlog):
1952 def lookuprevlink_func(revlog):
1955 def lookuprevlink(n):
1953 def lookuprevlink(n):
1956 return cl.node(revlog.linkrev(revlog.rev(n)))
1954 return cl.node(revlog.linkrev(revlog.rev(n)))
1957 return lookuprevlink
1955 return lookuprevlink
1958
1956
1959 def gengroup():
1957 def gengroup():
1960 # construct a list of all changed files
1958 # construct a list of all changed files
1961 changedfiles = {}
1959 changedfiles = {}
1962
1960
1963 for chnk in cl.group(nodes, identity,
1961 for chnk in cl.group(nodes, identity,
1964 changed_file_collector(changedfiles)):
1962 changed_file_collector(changedfiles)):
1965 yield chnk
1963 yield chnk
1966
1964
1967 mnfst = self.manifest
1965 mnfst = self.manifest
1968 nodeiter = gennodelst(mnfst)
1966 nodeiter = gennodelst(mnfst)
1969 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1967 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1970 yield chnk
1968 yield chnk
1971
1969
1972 for fname in sorted(changedfiles):
1970 for fname in sorted(changedfiles):
1973 filerevlog = self.file(fname)
1971 filerevlog = self.file(fname)
1974 if not len(filerevlog):
1972 if not len(filerevlog):
1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1973 raise util.Abort(_("empty or missing revlog for %s") % fname)
1976 nodeiter = gennodelst(filerevlog)
1974 nodeiter = gennodelst(filerevlog)
1977 nodeiter = list(nodeiter)
1975 nodeiter = list(nodeiter)
1978 if nodeiter:
1976 if nodeiter:
1979 yield changegroup.chunkheader(len(fname))
1977 yield changegroup.chunkheader(len(fname))
1980 yield fname
1978 yield fname
1981 lookup = lookuprevlink_func(filerevlog)
1979 lookup = lookuprevlink_func(filerevlog)
1982 for chnk in filerevlog.group(nodeiter, lookup):
1980 for chnk in filerevlog.group(nodeiter, lookup):
1983 yield chnk
1981 yield chnk
1984
1982
1985 yield changegroup.closechunk()
1983 yield changegroup.closechunk()
1986
1984
1987 if nodes:
1985 if nodes:
1988 self.hook('outgoing', node=hex(nodes[0]), source=source)
1986 self.hook('outgoing', node=hex(nodes[0]), source=source)
1989
1987
1990 return util.chunkbuffer(gengroup())
1988 return util.chunkbuffer(gengroup())
1991
1989
1992 def addchangegroup(self, source, srctype, url, emptyok=False):
1990 def addchangegroup(self, source, srctype, url, emptyok=False):
1993 """add changegroup to repo.
1991 """add changegroup to repo.
1994
1992
1995 return values:
1993 return values:
1996 - nothing changed or no source: 0
1994 - nothing changed or no source: 0
1997 - more heads than before: 1+added heads (2..n)
1995 - more heads than before: 1+added heads (2..n)
1998 - less heads than before: -1-removed heads (-2..-n)
1996 - less heads than before: -1-removed heads (-2..-n)
1999 - number of heads stays the same: 1
1997 - number of heads stays the same: 1
2000 """
1998 """
2001 def csmap(x):
1999 def csmap(x):
2002 self.ui.debug(_("add changeset %s\n") % short(x))
2000 self.ui.debug(_("add changeset %s\n") % short(x))
2003 return len(cl)
2001 return len(cl)
2004
2002
2005 def revmap(x):
2003 def revmap(x):
2006 return cl.rev(x)
2004 return cl.rev(x)
2007
2005
2008 if not source:
2006 if not source:
2009 return 0
2007 return 0
2010
2008
2011 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2009 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2012
2010
2013 changesets = files = revisions = 0
2011 changesets = files = revisions = 0
2014
2012
2015 # write changelog data to temp files so concurrent readers will not see
2013 # write changelog data to temp files so concurrent readers will not see
2016 # inconsistent view
2014 # inconsistent view
2017 cl = self.changelog
2015 cl = self.changelog
2018 cl.delayupdate()
2016 cl.delayupdate()
2019 oldheads = len(cl.heads())
2017 oldheads = len(cl.heads())
2020
2018
2021 tr = self.transaction()
2019 tr = self.transaction()
2022 try:
2020 try:
2023 trp = weakref.proxy(tr)
2021 trp = weakref.proxy(tr)
2024 # pull off the changeset group
2022 # pull off the changeset group
2025 self.ui.status(_("adding changesets\n"))
2023 self.ui.status(_("adding changesets\n"))
2026 cor = len(cl) - 1
2024 cor = len(cl) - 1
2027 chunkiter = changegroup.chunkiter(source)
2025 chunkiter = changegroup.chunkiter(source)
2028 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2026 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2029 raise util.Abort(_("received changelog group is empty"))
2027 raise util.Abort(_("received changelog group is empty"))
2030 cnr = len(cl) - 1
2028 cnr = len(cl) - 1
2031 changesets = cnr - cor
2029 changesets = cnr - cor
2032
2030
2033 # pull off the manifest group
2031 # pull off the manifest group
2034 self.ui.status(_("adding manifests\n"))
2032 self.ui.status(_("adding manifests\n"))
2035 chunkiter = changegroup.chunkiter(source)
2033 chunkiter = changegroup.chunkiter(source)
2036 # no need to check for empty manifest group here:
2034 # no need to check for empty manifest group here:
2037 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2035 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2038 # no new manifest will be created and the manifest group will
2036 # no new manifest will be created and the manifest group will
2039 # be empty during the pull
2037 # be empty during the pull
2040 self.manifest.addgroup(chunkiter, revmap, trp)
2038 self.manifest.addgroup(chunkiter, revmap, trp)
2041
2039
2042 # process the files
2040 # process the files
2043 self.ui.status(_("adding file changes\n"))
2041 self.ui.status(_("adding file changes\n"))
2044 while 1:
2042 while 1:
2045 f = changegroup.getchunk(source)
2043 f = changegroup.getchunk(source)
2046 if not f:
2044 if not f:
2047 break
2045 break
2048 self.ui.debug(_("adding %s revisions\n") % f)
2046 self.ui.debug(_("adding %s revisions\n") % f)
2049 fl = self.file(f)
2047 fl = self.file(f)
2050 o = len(fl)
2048 o = len(fl)
2051 chunkiter = changegroup.chunkiter(source)
2049 chunkiter = changegroup.chunkiter(source)
2052 if fl.addgroup(chunkiter, revmap, trp) is None:
2050 if fl.addgroup(chunkiter, revmap, trp) is None:
2053 raise util.Abort(_("received file revlog group is empty"))
2051 raise util.Abort(_("received file revlog group is empty"))
2054 revisions += len(fl) - o
2052 revisions += len(fl) - o
2055 files += 1
2053 files += 1
2056
2054
2057 newheads = len(self.changelog.heads())
2055 newheads = len(self.changelog.heads())
2058 heads = ""
2056 heads = ""
2059 if oldheads and newheads != oldheads:
2057 if oldheads and newheads != oldheads:
2060 heads = _(" (%+d heads)") % (newheads - oldheads)
2058 heads = _(" (%+d heads)") % (newheads - oldheads)
2061
2059
2062 self.ui.status(_("added %d changesets"
2060 self.ui.status(_("added %d changesets"
2063 " with %d changes to %d files%s\n")
2061 " with %d changes to %d files%s\n")
2064 % (changesets, revisions, files, heads))
2062 % (changesets, revisions, files, heads))
2065
2063
2066 if changesets > 0:
2064 if changesets > 0:
2067 p = lambda: self.changelog.writepending() and self.root or ""
2065 p = lambda: self.changelog.writepending() and self.root or ""
2068 self.hook('pretxnchangegroup', throw=True,
2066 self.hook('pretxnchangegroup', throw=True,
2069 node=hex(self.changelog.node(cor+1)), source=srctype,
2067 node=hex(self.changelog.node(cor+1)), source=srctype,
2070 url=url, pending=p)
2068 url=url, pending=p)
2071
2069
2072 # make changelog see real files again
2070 # make changelog see real files again
2073 cl.finalize(trp)
2071 cl.finalize(trp)
2074
2072
2075 tr.close()
2073 tr.close()
2076 finally:
2074 finally:
2077 del tr
2075 del tr
2078
2076
2079 if changesets > 0:
2077 if changesets > 0:
2080 # forcefully update the on-disk branch cache
2078 # forcefully update the on-disk branch cache
2081 self.ui.debug(_("updating the branch cache\n"))
2079 self.ui.debug(_("updating the branch cache\n"))
2082 self.branchtags()
2080 self.branchtags()
2083 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2081 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2084 source=srctype, url=url)
2082 source=srctype, url=url)
2085
2083
2086 for i in xrange(cor + 1, cnr + 1):
2084 for i in xrange(cor + 1, cnr + 1):
2087 self.hook("incoming", node=hex(self.changelog.node(i)),
2085 self.hook("incoming", node=hex(self.changelog.node(i)),
2088 source=srctype, url=url)
2086 source=srctype, url=url)
2089
2087
2090 # never return 0 here:
2088 # never return 0 here:
2091 if newheads < oldheads:
2089 if newheads < oldheads:
2092 return newheads - oldheads - 1
2090 return newheads - oldheads - 1
2093 else:
2091 else:
2094 return newheads - oldheads + 1
2092 return newheads - oldheads + 1
2095
2093
2096
2094
2097 def stream_in(self, remote):
2095 def stream_in(self, remote):
2098 fp = remote.stream_out()
2096 fp = remote.stream_out()
2099 l = fp.readline()
2097 l = fp.readline()
2100 try:
2098 try:
2101 resp = int(l)
2099 resp = int(l)
2102 except ValueError:
2100 except ValueError:
2103 raise error.ResponseError(
2101 raise error.ResponseError(
2104 _('Unexpected response from remote server:'), l)
2102 _('Unexpected response from remote server:'), l)
2105 if resp == 1:
2103 if resp == 1:
2106 raise util.Abort(_('operation forbidden by server'))
2104 raise util.Abort(_('operation forbidden by server'))
2107 elif resp == 2:
2105 elif resp == 2:
2108 raise util.Abort(_('locking the remote repository failed'))
2106 raise util.Abort(_('locking the remote repository failed'))
2109 elif resp != 0:
2107 elif resp != 0:
2110 raise util.Abort(_('the server sent an unknown error code'))
2108 raise util.Abort(_('the server sent an unknown error code'))
2111 self.ui.status(_('streaming all changes\n'))
2109 self.ui.status(_('streaming all changes\n'))
2112 l = fp.readline()
2110 l = fp.readline()
2113 try:
2111 try:
2114 total_files, total_bytes = map(int, l.split(' ', 1))
2112 total_files, total_bytes = map(int, l.split(' ', 1))
2115 except (ValueError, TypeError):
2113 except (ValueError, TypeError):
2116 raise error.ResponseError(
2114 raise error.ResponseError(
2117 _('Unexpected response from remote server:'), l)
2115 _('Unexpected response from remote server:'), l)
2118 self.ui.status(_('%d files to transfer, %s of data\n') %
2116 self.ui.status(_('%d files to transfer, %s of data\n') %
2119 (total_files, util.bytecount(total_bytes)))
2117 (total_files, util.bytecount(total_bytes)))
2120 start = time.time()
2118 start = time.time()
2121 for i in xrange(total_files):
2119 for i in xrange(total_files):
2122 # XXX doesn't support '\n' or '\r' in filenames
2120 # XXX doesn't support '\n' or '\r' in filenames
2123 l = fp.readline()
2121 l = fp.readline()
2124 try:
2122 try:
2125 name, size = l.split('\0', 1)
2123 name, size = l.split('\0', 1)
2126 size = int(size)
2124 size = int(size)
2127 except (ValueError, TypeError):
2125 except (ValueError, TypeError):
2128 raise error.ResponseError(
2126 raise error.ResponseError(
2129 _('Unexpected response from remote server:'), l)
2127 _('Unexpected response from remote server:'), l)
2130 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2128 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2131 ofp = self.sopener(name, 'w')
2129 ofp = self.sopener(name, 'w')
2132 for chunk in util.filechunkiter(fp, limit=size):
2130 for chunk in util.filechunkiter(fp, limit=size):
2133 ofp.write(chunk)
2131 ofp.write(chunk)
2134 ofp.close()
2132 ofp.close()
2135 elapsed = time.time() - start
2133 elapsed = time.time() - start
2136 if elapsed <= 0:
2134 if elapsed <= 0:
2137 elapsed = 0.001
2135 elapsed = 0.001
2138 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2136 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2139 (util.bytecount(total_bytes), elapsed,
2137 (util.bytecount(total_bytes), elapsed,
2140 util.bytecount(total_bytes / elapsed)))
2138 util.bytecount(total_bytes / elapsed)))
2141 self.invalidate()
2139 self.invalidate()
2142 return len(self.heads()) + 1
2140 return len(self.heads()) + 1
2143
2141
2144 def clone(self, remote, heads=[], stream=False):
2142 def clone(self, remote, heads=[], stream=False):
2145 '''clone remote repository.
2143 '''clone remote repository.
2146
2144
2147 keyword arguments:
2145 keyword arguments:
2148 heads: list of revs to clone (forces use of pull)
2146 heads: list of revs to clone (forces use of pull)
2149 stream: use streaming clone if possible'''
2147 stream: use streaming clone if possible'''
2150
2148
2151 # now, all clients that can request uncompressed clones can
2149 # now, all clients that can request uncompressed clones can
2152 # read repo formats supported by all servers that can serve
2150 # read repo formats supported by all servers that can serve
2153 # them.
2151 # them.
2154
2152
2155 # if revlog format changes, client will have to check version
2153 # if revlog format changes, client will have to check version
2156 # and format flags on "stream" capability, and use
2154 # and format flags on "stream" capability, and use
2157 # uncompressed only if compatible.
2155 # uncompressed only if compatible.
2158
2156
2159 if stream and not heads and remote.capable('stream'):
2157 if stream and not heads and remote.capable('stream'):
2160 return self.stream_in(remote)
2158 return self.stream_in(remote)
2161 return self.pull(remote, heads)
2159 return self.pull(remote, heads)
2162
2160
2163 # used to avoid circular references so destructors work
2161 # used to avoid circular references so destructors work
2164 def aftertrans(files):
2162 def aftertrans(files):
2165 renamefiles = [tuple(t) for t in files]
2163 renamefiles = [tuple(t) for t in files]
2166 def a():
2164 def a():
2167 for src, dest in renamefiles:
2165 for src, dest in renamefiles:
2168 util.rename(src, dest)
2166 util.rename(src, dest)
2169 return a
2167 return a
2170
2168
2171 def instance(ui, path, create):
2169 def instance(ui, path, create):
2172 return localrepository(ui, util.drop_scheme('file', path), create)
2170 return localrepository(ui, util.drop_scheme('file', path), create)
2173
2171
2174 def islocal(path):
2172 def islocal(path):
2175 return True
2173 return True
@@ -1,247 +1,246 b''
1 # sshrepo.py - ssh repository proxy class for mercurial
1 # sshrepo.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex
8 from node import bin, hex
9 from i18n import _
9 from i18n import _
10 import repo, re, util, error
10 import repo, re, util, error
11
11
12 class remotelock(object):
12 class remotelock(object):
13 def __init__(self, repo):
13 def __init__(self, repo):
14 self.repo = repo
14 self.repo = repo
15 def release(self):
15 def release(self):
16 self.repo.unlock()
16 self.repo.unlock()
17 self.repo = None
17 self.repo = None
18 def __del__(self):
18 def __del__(self):
19 if self.repo:
19 if self.repo:
20 self.release()
20 self.release()
21
21
22 class sshrepository(repo.repository):
22 class sshrepository(repo.repository):
23 def __init__(self, ui, path, create=0):
23 def __init__(self, ui, path, create=0):
24 self._url = path
24 self._url = path
25 self.ui = ui
25 self.ui = ui
26
26
27 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
27 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
28 if not m:
28 if not m:
29 self.abort(error.RepoError(_("couldn't parse location %s") % path))
29 self.abort(error.RepoError(_("couldn't parse location %s") % path))
30
30
31 self.user = m.group(2)
31 self.user = m.group(2)
32 self.host = m.group(3)
32 self.host = m.group(3)
33 self.port = m.group(5)
33 self.port = m.group(5)
34 self.path = m.group(7) or "."
34 self.path = m.group(7) or "."
35
35
36 sshcmd = self.ui.config("ui", "ssh", "ssh")
36 sshcmd = self.ui.config("ui", "ssh", "ssh")
37 remotecmd = self.ui.config("ui", "remotecmd", "hg")
37 remotecmd = self.ui.config("ui", "remotecmd", "hg")
38
38
39 args = util.sshargs(sshcmd, self.host, self.user, self.port)
39 args = util.sshargs(sshcmd, self.host, self.user, self.port)
40
40
41 if create:
41 if create:
42 cmd = '%s %s "%s init %s"'
42 cmd = '%s %s "%s init %s"'
43 cmd = cmd % (sshcmd, args, remotecmd, self.path)
43 cmd = cmd % (sshcmd, args, remotecmd, self.path)
44
44
45 ui.note(_('running %s\n') % cmd)
45 ui.note(_('running %s\n') % cmd)
46 res = util.system(cmd)
46 res = util.system(cmd)
47 if res != 0:
47 if res != 0:
48 self.abort(error.RepoError(_("could not create remote repo")))
48 self.abort(error.RepoError(_("could not create remote repo")))
49
49
50 self.validate_repo(ui, sshcmd, args, remotecmd)
50 self.validate_repo(ui, sshcmd, args, remotecmd)
51
51
52 def url(self):
52 def url(self):
53 return self._url
53 return self._url
54
54
55 def validate_repo(self, ui, sshcmd, args, remotecmd):
55 def validate_repo(self, ui, sshcmd, args, remotecmd):
56 # cleanup up previous run
56 # cleanup up previous run
57 self.cleanup()
57 self.cleanup()
58
58
59 cmd = '%s %s "%s -R %s serve --stdio"'
59 cmd = '%s %s "%s -R %s serve --stdio"'
60 cmd = cmd % (sshcmd, args, remotecmd, self.path)
60 cmd = cmd % (sshcmd, args, remotecmd, self.path)
61
61
62 cmd = util.quotecommand(cmd)
62 cmd = util.quotecommand(cmd)
63 ui.note(_('running %s\n') % cmd)
63 ui.note(_('running %s\n') % cmd)
64 self.pipeo, self.pipei, self.pipee = util.popen3(cmd, 'b')
64 self.pipeo, self.pipei, self.pipee = util.popen3(cmd, 'b')
65
65
66 # skip any noise generated by remote shell
66 # skip any noise generated by remote shell
67 self.do_cmd("hello")
67 self.do_cmd("hello")
68 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
68 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
69 lines = ["", "dummy"]
69 lines = ["", "dummy"]
70 max_noise = 500
70 max_noise = 500
71 while lines[-1] and max_noise:
71 while lines[-1] and max_noise:
72 l = r.readline()
72 l = r.readline()
73 self.readerr()
73 self.readerr()
74 if lines[-1] == "1\n" and l == "\n":
74 if lines[-1] == "1\n" and l == "\n":
75 break
75 break
76 if l:
76 if l:
77 ui.debug(_("remote: "), l)
77 ui.debug(_("remote: "), l)
78 lines.append(l)
78 lines.append(l)
79 max_noise -= 1
79 max_noise -= 1
80 else:
80 else:
81 self.abort(error.RepoError(_("no suitable response from remote hg")))
81 self.abort(error.RepoError(_("no suitable response from remote hg")))
82
82
83 self.capabilities = set()
83 self.capabilities = set()
84 lines.reverse()
84 for l in reversed(lines):
85 for l in lines:
86 if l.startswith("capabilities:"):
85 if l.startswith("capabilities:"):
87 self.capabilities.update(l[:-1].split(":")[1].split())
86 self.capabilities.update(l[:-1].split(":")[1].split())
88 break
87 break
89
88
90 def readerr(self):
89 def readerr(self):
91 while 1:
90 while 1:
92 size = util.fstat(self.pipee).st_size
91 size = util.fstat(self.pipee).st_size
93 if size == 0: break
92 if size == 0: break
94 l = self.pipee.readline()
93 l = self.pipee.readline()
95 if not l: break
94 if not l: break
96 self.ui.status(_("remote: "), l)
95 self.ui.status(_("remote: "), l)
97
96
98 def abort(self, exception):
97 def abort(self, exception):
99 self.cleanup()
98 self.cleanup()
100 raise exception
99 raise exception
101
100
102 def cleanup(self):
101 def cleanup(self):
103 try:
102 try:
104 self.pipeo.close()
103 self.pipeo.close()
105 self.pipei.close()
104 self.pipei.close()
106 # read the error descriptor until EOF
105 # read the error descriptor until EOF
107 for l in self.pipee:
106 for l in self.pipee:
108 self.ui.status(_("remote: "), l)
107 self.ui.status(_("remote: "), l)
109 self.pipee.close()
108 self.pipee.close()
110 except:
109 except:
111 pass
110 pass
112
111
113 __del__ = cleanup
112 __del__ = cleanup
114
113
115 def do_cmd(self, cmd, **args):
114 def do_cmd(self, cmd, **args):
116 self.ui.debug(_("sending %s command\n") % cmd)
115 self.ui.debug(_("sending %s command\n") % cmd)
117 self.pipeo.write("%s\n" % cmd)
116 self.pipeo.write("%s\n" % cmd)
118 for k, v in args.iteritems():
117 for k, v in args.iteritems():
119 self.pipeo.write("%s %d\n" % (k, len(v)))
118 self.pipeo.write("%s %d\n" % (k, len(v)))
120 self.pipeo.write(v)
119 self.pipeo.write(v)
121 self.pipeo.flush()
120 self.pipeo.flush()
122
121
123 return self.pipei
122 return self.pipei
124
123
125 def call(self, cmd, **args):
124 def call(self, cmd, **args):
126 self.do_cmd(cmd, **args)
125 self.do_cmd(cmd, **args)
127 return self._recv()
126 return self._recv()
128
127
129 def _recv(self):
128 def _recv(self):
130 l = self.pipei.readline()
129 l = self.pipei.readline()
131 self.readerr()
130 self.readerr()
132 try:
131 try:
133 l = int(l)
132 l = int(l)
134 except:
133 except:
135 self.abort(error.ResponseError(_("unexpected response:"), l))
134 self.abort(error.ResponseError(_("unexpected response:"), l))
136 return self.pipei.read(l)
135 return self.pipei.read(l)
137
136
138 def _send(self, data, flush=False):
137 def _send(self, data, flush=False):
139 self.pipeo.write("%d\n" % len(data))
138 self.pipeo.write("%d\n" % len(data))
140 if data:
139 if data:
141 self.pipeo.write(data)
140 self.pipeo.write(data)
142 if flush:
141 if flush:
143 self.pipeo.flush()
142 self.pipeo.flush()
144 self.readerr()
143 self.readerr()
145
144
146 def lock(self):
145 def lock(self):
147 self.call("lock")
146 self.call("lock")
148 return remotelock(self)
147 return remotelock(self)
149
148
150 def unlock(self):
149 def unlock(self):
151 self.call("unlock")
150 self.call("unlock")
152
151
153 def lookup(self, key):
152 def lookup(self, key):
154 self.requirecap('lookup', _('look up remote revision'))
153 self.requirecap('lookup', _('look up remote revision'))
155 d = self.call("lookup", key=key)
154 d = self.call("lookup", key=key)
156 success, data = d[:-1].split(" ", 1)
155 success, data = d[:-1].split(" ", 1)
157 if int(success):
156 if int(success):
158 return bin(data)
157 return bin(data)
159 else:
158 else:
160 self.abort(error.RepoError(data))
159 self.abort(error.RepoError(data))
161
160
162 def heads(self):
161 def heads(self):
163 d = self.call("heads")
162 d = self.call("heads")
164 try:
163 try:
165 return map(bin, d[:-1].split(" "))
164 return map(bin, d[:-1].split(" "))
166 except:
165 except:
167 self.abort(error.ResponseError(_("unexpected response:"), d))
166 self.abort(error.ResponseError(_("unexpected response:"), d))
168
167
169 def branches(self, nodes):
168 def branches(self, nodes):
170 n = " ".join(map(hex, nodes))
169 n = " ".join(map(hex, nodes))
171 d = self.call("branches", nodes=n)
170 d = self.call("branches", nodes=n)
172 try:
171 try:
173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
172 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
174 return br
173 return br
175 except:
174 except:
176 self.abort(error.ResponseError(_("unexpected response:"), d))
175 self.abort(error.ResponseError(_("unexpected response:"), d))
177
176
178 def between(self, pairs):
177 def between(self, pairs):
179 n = " ".join(["-".join(map(hex, p)) for p in pairs])
178 n = " ".join(["-".join(map(hex, p)) for p in pairs])
180 d = self.call("between", pairs=n)
179 d = self.call("between", pairs=n)
181 try:
180 try:
182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
181 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
183 return p
182 return p
184 except:
183 except:
185 self.abort(error.ResponseError(_("unexpected response:"), d))
184 self.abort(error.ResponseError(_("unexpected response:"), d))
186
185
187 def changegroup(self, nodes, kind):
186 def changegroup(self, nodes, kind):
188 n = " ".join(map(hex, nodes))
187 n = " ".join(map(hex, nodes))
189 return self.do_cmd("changegroup", roots=n)
188 return self.do_cmd("changegroup", roots=n)
190
189
191 def changegroupsubset(self, bases, heads, kind):
190 def changegroupsubset(self, bases, heads, kind):
192 self.requirecap('changegroupsubset', _('look up remote changes'))
191 self.requirecap('changegroupsubset', _('look up remote changes'))
193 bases = " ".join(map(hex, bases))
192 bases = " ".join(map(hex, bases))
194 heads = " ".join(map(hex, heads))
193 heads = " ".join(map(hex, heads))
195 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
194 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
196
195
197 def unbundle(self, cg, heads, source):
196 def unbundle(self, cg, heads, source):
198 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
197 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
199 if d:
198 if d:
200 # remote may send "unsynced changes"
199 # remote may send "unsynced changes"
201 self.abort(error.RepoError(_("push refused: %s") % d))
200 self.abort(error.RepoError(_("push refused: %s") % d))
202
201
203 while 1:
202 while 1:
204 d = cg.read(4096)
203 d = cg.read(4096)
205 if not d:
204 if not d:
206 break
205 break
207 self._send(d)
206 self._send(d)
208
207
209 self._send("", flush=True)
208 self._send("", flush=True)
210
209
211 r = self._recv()
210 r = self._recv()
212 if r:
211 if r:
213 # remote may send "unsynced changes"
212 # remote may send "unsynced changes"
214 self.abort(error.RepoError(_("push failed: %s") % r))
213 self.abort(error.RepoError(_("push failed: %s") % r))
215
214
216 r = self._recv()
215 r = self._recv()
217 try:
216 try:
218 return int(r)
217 return int(r)
219 except:
218 except:
220 self.abort(error.ResponseError(_("unexpected response:"), r))
219 self.abort(error.ResponseError(_("unexpected response:"), r))
221
220
222 def addchangegroup(self, cg, source, url):
221 def addchangegroup(self, cg, source, url):
223 d = self.call("addchangegroup")
222 d = self.call("addchangegroup")
224 if d:
223 if d:
225 self.abort(error.RepoError(_("push refused: %s") % d))
224 self.abort(error.RepoError(_("push refused: %s") % d))
226 while 1:
225 while 1:
227 d = cg.read(4096)
226 d = cg.read(4096)
228 if not d:
227 if not d:
229 break
228 break
230 self.pipeo.write(d)
229 self.pipeo.write(d)
231 self.readerr()
230 self.readerr()
232
231
233 self.pipeo.flush()
232 self.pipeo.flush()
234
233
235 self.readerr()
234 self.readerr()
236 r = self._recv()
235 r = self._recv()
237 if not r:
236 if not r:
238 return 1
237 return 1
239 try:
238 try:
240 return int(r)
239 return int(r)
241 except:
240 except:
242 self.abort(error.ResponseError(_("unexpected response:"), r))
241 self.abort(error.ResponseError(_("unexpected response:"), r))
243
242
244 def stream_out(self):
243 def stream_out(self):
245 return self.do_cmd('stream_out')
244 return self.do_cmd('stream_out')
246
245
247 instance = sshrepository
246 instance = sshrepository
@@ -1,297 +1,295 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import os, stat, osutil, util
9 import os, stat, osutil, util
10
10
11 _sha = util.sha1
11 _sha = util.sha1
12
12
13 def _buildencodefun():
13 def _buildencodefun():
14 e = '_'
14 e = '_'
15 win_reserved = [ord(x) for x in '\\:*?"<>|']
15 win_reserved = [ord(x) for x in '\\:*?"<>|']
16 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
16 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
17 for x in (range(32) + range(126, 256) + win_reserved):
17 for x in (range(32) + range(126, 256) + win_reserved):
18 cmap[chr(x)] = "~%02x" % x
18 cmap[chr(x)] = "~%02x" % x
19 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
19 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
20 cmap[chr(x)] = e + chr(x).lower()
20 cmap[chr(x)] = e + chr(x).lower()
21 dmap = {}
21 dmap = {}
22 for k, v in cmap.iteritems():
22 for k, v in cmap.iteritems():
23 dmap[v] = k
23 dmap[v] = k
24 def decode(s):
24 def decode(s):
25 i = 0
25 i = 0
26 while i < len(s):
26 while i < len(s):
27 for l in xrange(1, 4):
27 for l in xrange(1, 4):
28 try:
28 try:
29 yield dmap[s[i:i+l]]
29 yield dmap[s[i:i+l]]
30 i += l
30 i += l
31 break
31 break
32 except KeyError:
32 except KeyError:
33 pass
33 pass
34 else:
34 else:
35 raise KeyError
35 raise KeyError
36 return (lambda s: "".join([cmap[c] for c in s]),
36 return (lambda s: "".join([cmap[c] for c in s]),
37 lambda s: "".join(list(decode(s))))
37 lambda s: "".join(list(decode(s))))
38
38
39 encodefilename, decodefilename = _buildencodefun()
39 encodefilename, decodefilename = _buildencodefun()
40
40
41 def _build_lower_encodefun():
41 def _build_lower_encodefun():
42 win_reserved = [ord(x) for x in '\\:*?"<>|']
42 win_reserved = [ord(x) for x in '\\:*?"<>|']
43 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
43 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
44 for x in (range(32) + range(126, 256) + win_reserved):
44 for x in (range(32) + range(126, 256) + win_reserved):
45 cmap[chr(x)] = "~%02x" % x
45 cmap[chr(x)] = "~%02x" % x
46 for x in range(ord("A"), ord("Z")+1):
46 for x in range(ord("A"), ord("Z")+1):
47 cmap[chr(x)] = chr(x).lower()
47 cmap[chr(x)] = chr(x).lower()
48 return lambda s: "".join([cmap[c] for c in s])
48 return lambda s: "".join([cmap[c] for c in s])
49
49
50 lowerencode = _build_lower_encodefun()
50 lowerencode = _build_lower_encodefun()
51
51
52 _windows_reserved_filenames = '''con prn aux nul
52 _windows_reserved_filenames = '''con prn aux nul
53 com1 com2 com3 com4 com5 com6 com7 com8 com9
53 com1 com2 com3 com4 com5 com6 com7 com8 com9
54 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
54 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
55 def auxencode(path):
55 def auxencode(path):
56 res = []
56 res = []
57 for n in path.split('/'):
57 for n in path.split('/'):
58 if n:
58 if n:
59 base = n.split('.')[0]
59 base = n.split('.')[0]
60 if base and (base in _windows_reserved_filenames):
60 if base and (base in _windows_reserved_filenames):
61 # encode third letter ('aux' -> 'au~78')
61 # encode third letter ('aux' -> 'au~78')
62 ec = "~%02x" % ord(n[2])
62 ec = "~%02x" % ord(n[2])
63 n = n[0:2] + ec + n[3:]
63 n = n[0:2] + ec + n[3:]
64 if n[-1] in '. ':
64 if n[-1] in '. ':
65 # encode last period or space ('foo...' -> 'foo..~2e')
65 # encode last period or space ('foo...' -> 'foo..~2e')
66 n = n[:-1] + "~%02x" % ord(n[-1])
66 n = n[:-1] + "~%02x" % ord(n[-1])
67 res.append(n)
67 res.append(n)
68 return '/'.join(res)
68 return '/'.join(res)
69
69
70 MAX_PATH_LEN_IN_HGSTORE = 120
70 MAX_PATH_LEN_IN_HGSTORE = 120
71 DIR_PREFIX_LEN = 8
71 DIR_PREFIX_LEN = 8
72 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
72 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
73 def hybridencode(path):
73 def hybridencode(path):
74 '''encodes path with a length limit
74 '''encodes path with a length limit
75
75
76 Encodes all paths that begin with 'data/', according to the following.
76 Encodes all paths that begin with 'data/', according to the following.
77
77
78 Default encoding (reversible):
78 Default encoding (reversible):
79
79
80 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
80 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
81 characters are encoded as '~xx', where xx is the two digit hex code
81 characters are encoded as '~xx', where xx is the two digit hex code
82 of the character (see encodefilename).
82 of the character (see encodefilename).
83 Relevant path components consisting of Windows reserved filenames are
83 Relevant path components consisting of Windows reserved filenames are
84 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
84 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
85
85
86 Hashed encoding (not reversible):
86 Hashed encoding (not reversible):
87
87
88 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
88 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
89 non-reversible hybrid hashing of the path is done instead.
89 non-reversible hybrid hashing of the path is done instead.
90 This encoding uses up to DIR_PREFIX_LEN characters of all directory
90 This encoding uses up to DIR_PREFIX_LEN characters of all directory
91 levels of the lowerencoded path, but not more levels than can fit into
91 levels of the lowerencoded path, but not more levels than can fit into
92 _MAX_SHORTENED_DIRS_LEN.
92 _MAX_SHORTENED_DIRS_LEN.
93 Then follows the filler followed by the sha digest of the full path.
93 Then follows the filler followed by the sha digest of the full path.
94 The filler is the beginning of the basename of the lowerencoded path
94 The filler is the beginning of the basename of the lowerencoded path
95 (the basename is everything after the last path separator). The filler
95 (the basename is everything after the last path separator). The filler
96 is as long as possible, filling in characters from the basename until
96 is as long as possible, filling in characters from the basename until
97 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
97 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
98 of the basename have been taken).
98 of the basename have been taken).
99 The extension (e.g. '.i' or '.d') is preserved.
99 The extension (e.g. '.i' or '.d') is preserved.
100
100
101 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
101 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
102 encoding was used.
102 encoding was used.
103 '''
103 '''
104 if not path.startswith('data/'):
104 if not path.startswith('data/'):
105 return path
105 return path
106 ndpath = path[len('data/'):]
106 ndpath = path[len('data/'):]
107 res = 'data/' + auxencode(encodefilename(ndpath))
107 res = 'data/' + auxencode(encodefilename(ndpath))
108 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
108 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
109 digest = _sha(path).hexdigest()
109 digest = _sha(path).hexdigest()
110 aep = auxencode(lowerencode(ndpath))
110 aep = auxencode(lowerencode(ndpath))
111 _root, ext = os.path.splitext(aep)
111 _root, ext = os.path.splitext(aep)
112 parts = aep.split('/')
112 parts = aep.split('/')
113 basename = parts[-1]
113 basename = parts[-1]
114 sdirs = []
114 sdirs = []
115 for p in parts[:-1]:
115 for p in parts[:-1]:
116 d = p[:DIR_PREFIX_LEN]
116 d = p[:DIR_PREFIX_LEN]
117 if d[-1] in '. ':
117 if d[-1] in '. ':
118 # Windows can't access dirs ending in period or space
118 # Windows can't access dirs ending in period or space
119 d = d[:-1] + '_'
119 d = d[:-1] + '_'
120 t = '/'.join(sdirs) + '/' + d
120 t = '/'.join(sdirs) + '/' + d
121 if len(t) > _MAX_SHORTENED_DIRS_LEN:
121 if len(t) > _MAX_SHORTENED_DIRS_LEN:
122 break
122 break
123 sdirs.append(d)
123 sdirs.append(d)
124 dirs = '/'.join(sdirs)
124 dirs = '/'.join(sdirs)
125 if len(dirs) > 0:
125 if len(dirs) > 0:
126 dirs += '/'
126 dirs += '/'
127 res = 'dh/' + dirs + digest + ext
127 res = 'dh/' + dirs + digest + ext
128 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
128 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
129 if space_left > 0:
129 if space_left > 0:
130 filler = basename[:space_left]
130 filler = basename[:space_left]
131 res = 'dh/' + dirs + filler + digest + ext
131 res = 'dh/' + dirs + filler + digest + ext
132 return res
132 return res
133
133
134 def _calcmode(path):
134 def _calcmode(path):
135 try:
135 try:
136 # files in .hg/ will be created using this mode
136 # files in .hg/ will be created using this mode
137 mode = os.stat(path).st_mode
137 mode = os.stat(path).st_mode
138 # avoid some useless chmods
138 # avoid some useless chmods
139 if (0777 & ~util.umask) == (0777 & mode):
139 if (0777 & ~util.umask) == (0777 & mode):
140 mode = None
140 mode = None
141 except OSError:
141 except OSError:
142 mode = None
142 mode = None
143 return mode
143 return mode
144
144
145 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
145 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
146
146
147 class basicstore:
147 class basicstore:
148 '''base class for local repository stores'''
148 '''base class for local repository stores'''
149 def __init__(self, path, opener, pathjoiner):
149 def __init__(self, path, opener, pathjoiner):
150 self.pathjoiner = pathjoiner
150 self.pathjoiner = pathjoiner
151 self.path = path
151 self.path = path
152 self.createmode = _calcmode(path)
152 self.createmode = _calcmode(path)
153 self.opener = opener(self.path)
153 self.opener = opener(self.path)
154 self.opener.createmode = self.createmode
154 self.opener.createmode = self.createmode
155
155
156 def join(self, f):
156 def join(self, f):
157 return self.pathjoiner(self.path, f)
157 return self.pathjoiner(self.path, f)
158
158
159 def _walk(self, relpath, recurse):
159 def _walk(self, relpath, recurse):
160 '''yields (unencoded, encoded, size)'''
160 '''yields (unencoded, encoded, size)'''
161 path = self.pathjoiner(self.path, relpath)
161 path = self.pathjoiner(self.path, relpath)
162 striplen = len(self.path) + len(os.sep)
162 striplen = len(self.path) + len(os.sep)
163 l = []
163 l = []
164 if os.path.isdir(path):
164 if os.path.isdir(path):
165 visit = [path]
165 visit = [path]
166 while visit:
166 while visit:
167 p = visit.pop()
167 p = visit.pop()
168 for f, kind, st in osutil.listdir(p, stat=True):
168 for f, kind, st in osutil.listdir(p, stat=True):
169 fp = self.pathjoiner(p, f)
169 fp = self.pathjoiner(p, f)
170 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
170 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
171 n = util.pconvert(fp[striplen:])
171 n = util.pconvert(fp[striplen:])
172 l.append((n, n, st.st_size))
172 l.append((n, n, st.st_size))
173 elif kind == stat.S_IFDIR and recurse:
173 elif kind == stat.S_IFDIR and recurse:
174 visit.append(fp)
174 visit.append(fp)
175 return sorted(l)
175 return sorted(l)
176
176
177 def datafiles(self):
177 def datafiles(self):
178 return self._walk('data', True)
178 return self._walk('data', True)
179
179
180 def walk(self):
180 def walk(self):
181 '''yields (unencoded, encoded, size)'''
181 '''yields (unencoded, encoded, size)'''
182 # yield data files first
182 # yield data files first
183 for x in self.datafiles():
183 for x in self.datafiles():
184 yield x
184 yield x
185 # yield manifest before changelog
185 # yield manifest before changelog
186 meta = self._walk('', False)
186 for x in reversed(self._walk('', False)):
187 meta.reverse()
188 for x in meta:
189 yield x
187 yield x
190
188
191 def copylist(self):
189 def copylist(self):
192 return ['requires'] + _data.split()
190 return ['requires'] + _data.split()
193
191
194 class encodedstore(basicstore):
192 class encodedstore(basicstore):
195 def __init__(self, path, opener, pathjoiner):
193 def __init__(self, path, opener, pathjoiner):
196 self.pathjoiner = pathjoiner
194 self.pathjoiner = pathjoiner
197 self.path = self.pathjoiner(path, 'store')
195 self.path = self.pathjoiner(path, 'store')
198 self.createmode = _calcmode(self.path)
196 self.createmode = _calcmode(self.path)
199 op = opener(self.path)
197 op = opener(self.path)
200 op.createmode = self.createmode
198 op.createmode = self.createmode
201 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
199 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
202
200
203 def datafiles(self):
201 def datafiles(self):
204 for a, b, size in self._walk('data', True):
202 for a, b, size in self._walk('data', True):
205 try:
203 try:
206 a = decodefilename(a)
204 a = decodefilename(a)
207 except KeyError:
205 except KeyError:
208 a = None
206 a = None
209 yield a, b, size
207 yield a, b, size
210
208
211 def join(self, f):
209 def join(self, f):
212 return self.pathjoiner(self.path, encodefilename(f))
210 return self.pathjoiner(self.path, encodefilename(f))
213
211
214 def copylist(self):
212 def copylist(self):
215 return (['requires', '00changelog.i'] +
213 return (['requires', '00changelog.i'] +
216 [self.pathjoiner('store', f) for f in _data.split()])
214 [self.pathjoiner('store', f) for f in _data.split()])
217
215
218 def fncache(opener):
216 def fncache(opener):
219 '''yields the entries in the fncache file'''
217 '''yields the entries in the fncache file'''
220 try:
218 try:
221 fp = opener('fncache', mode='rb')
219 fp = opener('fncache', mode='rb')
222 except IOError:
220 except IOError:
223 # skip nonexistent file
221 # skip nonexistent file
224 return
222 return
225 for n, line in enumerate(fp):
223 for n, line in enumerate(fp):
226 if (len(line) < 2) or (line[-1] != '\n'):
224 if (len(line) < 2) or (line[-1] != '\n'):
227 t = _('invalid entry in fncache, line %s') % (n + 1)
225 t = _('invalid entry in fncache, line %s') % (n + 1)
228 raise util.Abort(t)
226 raise util.Abort(t)
229 yield line[:-1]
227 yield line[:-1]
230 fp.close()
228 fp.close()
231
229
232 class fncacheopener(object):
230 class fncacheopener(object):
233 def __init__(self, opener):
231 def __init__(self, opener):
234 self.opener = opener
232 self.opener = opener
235 self.entries = None
233 self.entries = None
236
234
237 def loadfncache(self):
235 def loadfncache(self):
238 self.entries = {}
236 self.entries = {}
239 for f in fncache(self.opener):
237 for f in fncache(self.opener):
240 self.entries[f] = True
238 self.entries[f] = True
241
239
242 def __call__(self, path, mode='r', *args, **kw):
240 def __call__(self, path, mode='r', *args, **kw):
243 if mode not in ('r', 'rb') and path.startswith('data/'):
241 if mode not in ('r', 'rb') and path.startswith('data/'):
244 if self.entries is None:
242 if self.entries is None:
245 self.loadfncache()
243 self.loadfncache()
246 if path not in self.entries:
244 if path not in self.entries:
247 self.opener('fncache', 'ab').write(path + '\n')
245 self.opener('fncache', 'ab').write(path + '\n')
248 # fncache may contain non-existent files after rollback / strip
246 # fncache may contain non-existent files after rollback / strip
249 self.entries[path] = True
247 self.entries[path] = True
250 return self.opener(hybridencode(path), mode, *args, **kw)
248 return self.opener(hybridencode(path), mode, *args, **kw)
251
249
252 class fncachestore(basicstore):
250 class fncachestore(basicstore):
253 def __init__(self, path, opener, pathjoiner):
251 def __init__(self, path, opener, pathjoiner):
254 self.pathjoiner = pathjoiner
252 self.pathjoiner = pathjoiner
255 self.path = self.pathjoiner(path, 'store')
253 self.path = self.pathjoiner(path, 'store')
256 self.createmode = _calcmode(self.path)
254 self.createmode = _calcmode(self.path)
257 self._op = opener(self.path)
255 self._op = opener(self.path)
258 self._op.createmode = self.createmode
256 self._op.createmode = self.createmode
259 self.opener = fncacheopener(self._op)
257 self.opener = fncacheopener(self._op)
260
258
261 def join(self, f):
259 def join(self, f):
262 return self.pathjoiner(self.path, hybridencode(f))
260 return self.pathjoiner(self.path, hybridencode(f))
263
261
264 def datafiles(self):
262 def datafiles(self):
265 rewrite = False
263 rewrite = False
266 existing = []
264 existing = []
267 pjoin = self.pathjoiner
265 pjoin = self.pathjoiner
268 spath = self.path
266 spath = self.path
269 for f in fncache(self._op):
267 for f in fncache(self._op):
270 ef = hybridencode(f)
268 ef = hybridencode(f)
271 try:
269 try:
272 st = os.stat(pjoin(spath, ef))
270 st = os.stat(pjoin(spath, ef))
273 yield f, ef, st.st_size
271 yield f, ef, st.st_size
274 existing.append(f)
272 existing.append(f)
275 except OSError:
273 except OSError:
276 # nonexistent entry
274 # nonexistent entry
277 rewrite = True
275 rewrite = True
278 if rewrite:
276 if rewrite:
279 # rewrite fncache to remove nonexistent entries
277 # rewrite fncache to remove nonexistent entries
280 # (may be caused by rollback / strip)
278 # (may be caused by rollback / strip)
281 fp = self._op('fncache', mode='wb')
279 fp = self._op('fncache', mode='wb')
282 for p in existing:
280 for p in existing:
283 fp.write(p + '\n')
281 fp.write(p + '\n')
284 fp.close()
282 fp.close()
285
283
286 def copylist(self):
284 def copylist(self):
287 d = _data + ' dh fncache'
285 d = _data + ' dh fncache'
288 return (['requires', '00changelog.i'] +
286 return (['requires', '00changelog.i'] +
289 [self.pathjoiner('store', f) for f in d.split()])
287 [self.pathjoiner('store', f) for f in d.split()])
290
288
291 def store(requirements, path, opener, pathjoiner=None):
289 def store(requirements, path, opener, pathjoiner=None):
292 pathjoiner = pathjoiner or os.path.join
290 pathjoiner = pathjoiner or os.path.join
293 if 'store' in requirements:
291 if 'store' in requirements:
294 if 'fncache' in requirements:
292 if 'fncache' in requirements:
295 return fncachestore(path, opener, pathjoiner)
293 return fncachestore(path, opener, pathjoiner)
296 return encodedstore(path, opener, pathjoiner)
294 return encodedstore(path, opener, pathjoiner)
297 return basicstore(path, opener, pathjoiner)
295 return basicstore(path, opener, pathjoiner)
General Comments 0
You need to be logged in to leave comments. Login now