##// END OF EJS Templates
merge with stable
Matt Mackall -
r16088:6c1daae0 merge default
parent child Browse files
Show More
@@ -1,1189 +1,1189 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os
5 import os
6 import re
6 import re
7 import sys
7 import sys
8 import cPickle as pickle
8 import cPickle as pickle
9 import tempfile
9 import tempfile
10 import urllib
10 import urllib
11 import urllib2
11 import urllib2
12
12
13 from mercurial import strutil, scmutil, util, encoding
13 from mercurial import strutil, scmutil, util, encoding
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 # Subversion stuff. Works best with very recent Python SVN bindings
16 # Subversion stuff. Works best with very recent Python SVN bindings
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 # these bindings.
18 # these bindings.
19
19
20 from cStringIO import StringIO
20 from cStringIO import StringIO
21
21
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 from common import commandline, converter_source, converter_sink, mapfile
23 from common import commandline, converter_source, converter_sink, mapfile
24
24
25 try:
25 try:
26 from svn.core import SubversionException, Pool
26 from svn.core import SubversionException, Pool
27 import svn
27 import svn
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.ra
30 import svn.ra
31 import svn.delta
31 import svn.delta
32 import transport
32 import transport
33 import warnings
33 import warnings
34 warnings.filterwarnings('ignore',
34 warnings.filterwarnings('ignore',
35 module='svn.core',
35 module='svn.core',
36 category=DeprecationWarning)
36 category=DeprecationWarning)
37
37
38 except ImportError:
38 except ImportError:
39 svn = None
39 svn = None
40
40
41 class SvnPathNotFound(Exception):
41 class SvnPathNotFound(Exception):
42 pass
42 pass
43
43
44 def revsplit(rev):
44 def revsplit(rev):
45 """Parse a revision string and return (uuid, path, revnum)."""
45 """Parse a revision string and return (uuid, path, revnum)."""
46 url, revnum = rev.rsplit('@', 1)
46 url, revnum = rev.rsplit('@', 1)
47 parts = url.split('/', 1)
47 parts = url.split('/', 1)
48 mod = ''
48 mod = ''
49 if len(parts) > 1:
49 if len(parts) > 1:
50 mod = '/' + parts[1]
50 mod = '/' + parts[1]
51 return parts[0][4:], mod, int(revnum)
51 return parts[0][4:], mod, int(revnum)
52
52
53 def quote(s):
53 def quote(s):
54 # As of svn 1.7, many svn calls expect "canonical" paths. In
54 # As of svn 1.7, many svn calls expect "canonical" paths. In
55 # theory, we should call svn.core.*canonicalize() on all paths
55 # theory, we should call svn.core.*canonicalize() on all paths
56 # before passing them to the API. Instead, we assume the base url
56 # before passing them to the API. Instead, we assume the base url
57 # is canonical and copy the behaviour of svn URL encoding function
57 # is canonical and copy the behaviour of svn URL encoding function
58 # so we can extend it safely with new components. The "safe"
58 # so we can extend it safely with new components. The "safe"
59 # characters were taken from the "svn_uri__char_validity" table in
59 # characters were taken from the "svn_uri__char_validity" table in
60 # libsvn_subr/path.c.
60 # libsvn_subr/path.c.
61 return urllib.quote(s, "!$&'()*+,-./:=@_~")
61 return urllib.quote(s, "!$&'()*+,-./:=@_~")
62
62
63 def geturl(path):
63 def geturl(path):
64 try:
64 try:
65 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
65 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
66 except SubversionException:
66 except SubversionException:
67 # svn.client.url_from_path() fails with local repositories
67 # svn.client.url_from_path() fails with local repositories
68 pass
68 pass
69 if os.path.isdir(path):
69 if os.path.isdir(path):
70 path = os.path.normpath(os.path.abspath(path))
70 path = os.path.normpath(os.path.abspath(path))
71 if os.name == 'nt':
71 if os.name == 'nt':
72 path = '/' + util.normpath(path)
72 path = '/' + util.normpath(path)
73 # Module URL is later compared with the repository URL returned
73 # Module URL is later compared with the repository URL returned
74 # by svn API, which is UTF-8.
74 # by svn API, which is UTF-8.
75 path = encoding.tolocal(path)
75 path = encoding.tolocal(path)
76 path = 'file://%s' % quote(path)
76 path = 'file://%s' % quote(path)
77 return svn.core.svn_path_canonicalize(path)
77 return svn.core.svn_path_canonicalize(path)
78
78
79 def optrev(number):
79 def optrev(number):
80 optrev = svn.core.svn_opt_revision_t()
80 optrev = svn.core.svn_opt_revision_t()
81 optrev.kind = svn.core.svn_opt_revision_number
81 optrev.kind = svn.core.svn_opt_revision_number
82 optrev.value.number = number
82 optrev.value.number = number
83 return optrev
83 return optrev
84
84
85 class changedpath(object):
85 class changedpath(object):
86 def __init__(self, p):
86 def __init__(self, p):
87 self.copyfrom_path = p.copyfrom_path
87 self.copyfrom_path = p.copyfrom_path
88 self.copyfrom_rev = p.copyfrom_rev
88 self.copyfrom_rev = p.copyfrom_rev
89 self.action = p.action
89 self.action = p.action
90
90
91 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
91 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
92 strict_node_history=False):
92 strict_node_history=False):
93 protocol = -1
93 protocol = -1
94 def receiver(orig_paths, revnum, author, date, message, pool):
94 def receiver(orig_paths, revnum, author, date, message, pool):
95 if orig_paths is not None:
95 if orig_paths is not None:
96 for k, v in orig_paths.iteritems():
96 for k, v in orig_paths.iteritems():
97 orig_paths[k] = changedpath(v)
97 orig_paths[k] = changedpath(v)
98 pickle.dump((orig_paths, revnum, author, date, message),
98 pickle.dump((orig_paths, revnum, author, date, message),
99 fp, protocol)
99 fp, protocol)
100
100
101 try:
101 try:
102 # Use an ra of our own so that our parent can consume
102 # Use an ra of our own so that our parent can consume
103 # our results without confusing the server.
103 # our results without confusing the server.
104 t = transport.SvnRaTransport(url=url)
104 t = transport.SvnRaTransport(url=url)
105 svn.ra.get_log(t.ra, paths, start, end, limit,
105 svn.ra.get_log(t.ra, paths, start, end, limit,
106 discover_changed_paths,
106 discover_changed_paths,
107 strict_node_history,
107 strict_node_history,
108 receiver)
108 receiver)
109 except IOError:
109 except IOError:
110 # Caller may interrupt the iteration
110 # Caller may interrupt the iteration
111 pickle.dump(None, fp, protocol)
111 pickle.dump(None, fp, protocol)
112 except Exception, inst:
112 except Exception, inst:
113 pickle.dump(str(inst), fp, protocol)
113 pickle.dump(str(inst), fp, protocol)
114 else:
114 else:
115 pickle.dump(None, fp, protocol)
115 pickle.dump(None, fp, protocol)
116 fp.close()
116 fp.close()
117 # With large history, cleanup process goes crazy and suddenly
117 # With large history, cleanup process goes crazy and suddenly
118 # consumes *huge* amount of memory. The output file being closed,
118 # consumes *huge* amount of memory. The output file being closed,
119 # there is no need for clean termination.
119 # there is no need for clean termination.
120 os._exit(0)
120 os._exit(0)
121
121
122 def debugsvnlog(ui, **opts):
122 def debugsvnlog(ui, **opts):
123 """Fetch SVN log in a subprocess and channel them back to parent to
123 """Fetch SVN log in a subprocess and channel them back to parent to
124 avoid memory collection issues.
124 avoid memory collection issues.
125 """
125 """
126 util.setbinary(sys.stdin)
126 util.setbinary(sys.stdin)
127 util.setbinary(sys.stdout)
127 util.setbinary(sys.stdout)
128 args = decodeargs(sys.stdin.read())
128 args = decodeargs(sys.stdin.read())
129 get_log_child(sys.stdout, *args)
129 get_log_child(sys.stdout, *args)
130
130
131 class logstream(object):
131 class logstream(object):
132 """Interruptible revision log iterator."""
132 """Interruptible revision log iterator."""
133 def __init__(self, stdout):
133 def __init__(self, stdout):
134 self._stdout = stdout
134 self._stdout = stdout
135
135
136 def __iter__(self):
136 def __iter__(self):
137 while True:
137 while True:
138 try:
138 try:
139 entry = pickle.load(self._stdout)
139 entry = pickle.load(self._stdout)
140 except EOFError:
140 except EOFError:
141 raise util.Abort(_('Mercurial failed to run itself, check'
141 raise util.Abort(_('Mercurial failed to run itself, check'
142 ' hg executable is in PATH'))
142 ' hg executable is in PATH'))
143 try:
143 try:
144 orig_paths, revnum, author, date, message = entry
144 orig_paths, revnum, author, date, message = entry
145 except:
145 except:
146 if entry is None:
146 if entry is None:
147 break
147 break
148 raise util.Abort(_("log stream exception '%s'") % entry)
148 raise util.Abort(_("log stream exception '%s'") % entry)
149 yield entry
149 yield entry
150
150
151 def close(self):
151 def close(self):
152 if self._stdout:
152 if self._stdout:
153 self._stdout.close()
153 self._stdout.close()
154 self._stdout = None
154 self._stdout = None
155
155
156
156
157 # Check to see if the given path is a local Subversion repo. Verify this by
157 # Check to see if the given path is a local Subversion repo. Verify this by
158 # looking for several svn-specific files and directories in the given
158 # looking for several svn-specific files and directories in the given
159 # directory.
159 # directory.
160 def filecheck(ui, path, proto):
160 def filecheck(ui, path, proto):
161 for x in ('locks', 'hooks', 'format', 'db'):
161 for x in ('locks', 'hooks', 'format', 'db'):
162 if not os.path.exists(os.path.join(path, x)):
162 if not os.path.exists(os.path.join(path, x)):
163 return False
163 return False
164 return True
164 return True
165
165
166 # Check to see if a given path is the root of an svn repo over http. We verify
166 # Check to see if a given path is the root of an svn repo over http. We verify
167 # this by requesting a version-controlled URL we know can't exist and looking
167 # this by requesting a version-controlled URL we know can't exist and looking
168 # for the svn-specific "not found" XML.
168 # for the svn-specific "not found" XML.
169 def httpcheck(ui, path, proto):
169 def httpcheck(ui, path, proto):
170 try:
170 try:
171 opener = urllib2.build_opener()
171 opener = urllib2.build_opener()
172 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
172 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
173 data = rsp.read()
173 data = rsp.read()
174 except urllib2.HTTPError, inst:
174 except urllib2.HTTPError, inst:
175 if inst.code != 404:
175 if inst.code != 404:
176 # Except for 404 we cannot know for sure this is not an svn repo
176 # Except for 404 we cannot know for sure this is not an svn repo
177 ui.warn(_('svn: cannot probe remote repository, assume it could '
177 ui.warn(_('svn: cannot probe remote repository, assume it could '
178 'be a subversion repository. Use --source-type if you '
178 'be a subversion repository. Use --source-type if you '
179 'know better.\n'))
179 'know better.\n'))
180 return True
180 return True
181 data = inst.fp.read()
181 data = inst.fp.read()
182 except:
182 except:
183 # Could be urllib2.URLError if the URL is invalid or anything else.
183 # Could be urllib2.URLError if the URL is invalid or anything else.
184 return False
184 return False
185 return '<m:human-readable errcode="160013">' in data
185 return '<m:human-readable errcode="160013">' in data
186
186
187 protomap = {'http': httpcheck,
187 protomap = {'http': httpcheck,
188 'https': httpcheck,
188 'https': httpcheck,
189 'file': filecheck,
189 'file': filecheck,
190 }
190 }
191 def issvnurl(ui, url):
191 def issvnurl(ui, url):
192 try:
192 try:
193 proto, path = url.split('://', 1)
193 proto, path = url.split('://', 1)
194 if proto == 'file':
194 if proto == 'file':
195 path = urllib.url2pathname(path)
195 path = urllib.url2pathname(path)
196 except ValueError:
196 except ValueError:
197 proto = 'file'
197 proto = 'file'
198 path = os.path.abspath(url)
198 path = os.path.abspath(url)
199 if proto == 'file':
199 if proto == 'file':
200 path = path.replace(os.sep, '/')
200 path = util.pconvert(path)
201 check = protomap.get(proto, lambda *args: False)
201 check = protomap.get(proto, lambda *args: False)
202 while '/' in path:
202 while '/' in path:
203 if check(ui, path, proto):
203 if check(ui, path, proto):
204 return True
204 return True
205 path = path.rsplit('/', 1)[0]
205 path = path.rsplit('/', 1)[0]
206 return False
206 return False
207
207
208 # SVN conversion code stolen from bzr-svn and tailor
208 # SVN conversion code stolen from bzr-svn and tailor
209 #
209 #
210 # Subversion looks like a versioned filesystem, branches structures
210 # Subversion looks like a versioned filesystem, branches structures
211 # are defined by conventions and not enforced by the tool. First,
211 # are defined by conventions and not enforced by the tool. First,
212 # we define the potential branches (modules) as "trunk" and "branches"
212 # we define the potential branches (modules) as "trunk" and "branches"
213 # children directories. Revisions are then identified by their
213 # children directories. Revisions are then identified by their
214 # module and revision number (and a repository identifier).
214 # module and revision number (and a repository identifier).
215 #
215 #
216 # The revision graph is really a tree (or a forest). By default, a
216 # The revision graph is really a tree (or a forest). By default, a
217 # revision parent is the previous revision in the same module. If the
217 # revision parent is the previous revision in the same module. If the
218 # module directory is copied/moved from another module then the
218 # module directory is copied/moved from another module then the
219 # revision is the module root and its parent the source revision in
219 # revision is the module root and its parent the source revision in
220 # the parent module. A revision has at most one parent.
220 # the parent module. A revision has at most one parent.
221 #
221 #
222 class svn_source(converter_source):
222 class svn_source(converter_source):
223 def __init__(self, ui, url, rev=None):
223 def __init__(self, ui, url, rev=None):
224 super(svn_source, self).__init__(ui, url, rev=rev)
224 super(svn_source, self).__init__(ui, url, rev=rev)
225
225
226 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
226 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
227 (os.path.exists(url) and
227 (os.path.exists(url) and
228 os.path.exists(os.path.join(url, '.svn'))) or
228 os.path.exists(os.path.join(url, '.svn'))) or
229 issvnurl(ui, url)):
229 issvnurl(ui, url)):
230 raise NoRepo(_("%s does not look like a Subversion repository")
230 raise NoRepo(_("%s does not look like a Subversion repository")
231 % url)
231 % url)
232 if svn is None:
232 if svn is None:
233 raise MissingTool(_('Could not load Subversion python bindings'))
233 raise MissingTool(_('Could not load Subversion python bindings'))
234
234
235 try:
235 try:
236 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
236 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
237 if version < (1, 4):
237 if version < (1, 4):
238 raise MissingTool(_('Subversion python bindings %d.%d found, '
238 raise MissingTool(_('Subversion python bindings %d.%d found, '
239 '1.4 or later required') % version)
239 '1.4 or later required') % version)
240 except AttributeError:
240 except AttributeError:
241 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
241 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
242 'or later required'))
242 'or later required'))
243
243
244 self.lastrevs = {}
244 self.lastrevs = {}
245
245
246 latest = None
246 latest = None
247 try:
247 try:
248 # Support file://path@rev syntax. Useful e.g. to convert
248 # Support file://path@rev syntax. Useful e.g. to convert
249 # deleted branches.
249 # deleted branches.
250 at = url.rfind('@')
250 at = url.rfind('@')
251 if at >= 0:
251 if at >= 0:
252 latest = int(url[at + 1:])
252 latest = int(url[at + 1:])
253 url = url[:at]
253 url = url[:at]
254 except ValueError:
254 except ValueError:
255 pass
255 pass
256 self.url = geturl(url)
256 self.url = geturl(url)
257 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
257 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
258 try:
258 try:
259 self.transport = transport.SvnRaTransport(url=self.url)
259 self.transport = transport.SvnRaTransport(url=self.url)
260 self.ra = self.transport.ra
260 self.ra = self.transport.ra
261 self.ctx = self.transport.client
261 self.ctx = self.transport.client
262 self.baseurl = svn.ra.get_repos_root(self.ra)
262 self.baseurl = svn.ra.get_repos_root(self.ra)
263 # Module is either empty or a repository path starting with
263 # Module is either empty or a repository path starting with
264 # a slash and not ending with a slash.
264 # a slash and not ending with a slash.
265 self.module = urllib.unquote(self.url[len(self.baseurl):])
265 self.module = urllib.unquote(self.url[len(self.baseurl):])
266 self.prevmodule = None
266 self.prevmodule = None
267 self.rootmodule = self.module
267 self.rootmodule = self.module
268 self.commits = {}
268 self.commits = {}
269 self.paths = {}
269 self.paths = {}
270 self.uuid = svn.ra.get_uuid(self.ra)
270 self.uuid = svn.ra.get_uuid(self.ra)
271 except SubversionException:
271 except SubversionException:
272 ui.traceback()
272 ui.traceback()
273 raise NoRepo(_("%s does not look like a Subversion repository")
273 raise NoRepo(_("%s does not look like a Subversion repository")
274 % self.url)
274 % self.url)
275
275
276 if rev:
276 if rev:
277 try:
277 try:
278 latest = int(rev)
278 latest = int(rev)
279 except ValueError:
279 except ValueError:
280 raise util.Abort(_('svn: revision %s is not an integer') % rev)
280 raise util.Abort(_('svn: revision %s is not an integer') % rev)
281
281
282 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
282 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
283 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
283 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
284 try:
284 try:
285 self.startrev = int(self.startrev)
285 self.startrev = int(self.startrev)
286 if self.startrev < 0:
286 if self.startrev < 0:
287 self.startrev = 0
287 self.startrev = 0
288 except ValueError:
288 except ValueError:
289 raise util.Abort(_('svn: start revision %s is not an integer')
289 raise util.Abort(_('svn: start revision %s is not an integer')
290 % self.startrev)
290 % self.startrev)
291
291
292 try:
292 try:
293 self.head = self.latest(self.module, latest)
293 self.head = self.latest(self.module, latest)
294 except SvnPathNotFound:
294 except SvnPathNotFound:
295 self.head = None
295 self.head = None
296 if not self.head:
296 if not self.head:
297 raise util.Abort(_('no revision found in module %s')
297 raise util.Abort(_('no revision found in module %s')
298 % self.module)
298 % self.module)
299 self.last_changed = self.revnum(self.head)
299 self.last_changed = self.revnum(self.head)
300
300
301 self._changescache = None
301 self._changescache = None
302
302
303 if os.path.exists(os.path.join(url, '.svn/entries')):
303 if os.path.exists(os.path.join(url, '.svn/entries')):
304 self.wc = url
304 self.wc = url
305 else:
305 else:
306 self.wc = None
306 self.wc = None
307 self.convertfp = None
307 self.convertfp = None
308
308
309 def setrevmap(self, revmap):
309 def setrevmap(self, revmap):
310 lastrevs = {}
310 lastrevs = {}
311 for revid in revmap.iterkeys():
311 for revid in revmap.iterkeys():
312 uuid, module, revnum = revsplit(revid)
312 uuid, module, revnum = revsplit(revid)
313 lastrevnum = lastrevs.setdefault(module, revnum)
313 lastrevnum = lastrevs.setdefault(module, revnum)
314 if revnum > lastrevnum:
314 if revnum > lastrevnum:
315 lastrevs[module] = revnum
315 lastrevs[module] = revnum
316 self.lastrevs = lastrevs
316 self.lastrevs = lastrevs
317
317
318 def exists(self, path, optrev):
318 def exists(self, path, optrev):
319 try:
319 try:
320 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
320 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
321 optrev, False, self.ctx)
321 optrev, False, self.ctx)
322 return True
322 return True
323 except SubversionException:
323 except SubversionException:
324 return False
324 return False
325
325
326 def getheads(self):
326 def getheads(self):
327
327
328 def isdir(path, revnum):
328 def isdir(path, revnum):
329 kind = self._checkpath(path, revnum)
329 kind = self._checkpath(path, revnum)
330 return kind == svn.core.svn_node_dir
330 return kind == svn.core.svn_node_dir
331
331
332 def getcfgpath(name, rev):
332 def getcfgpath(name, rev):
333 cfgpath = self.ui.config('convert', 'svn.' + name)
333 cfgpath = self.ui.config('convert', 'svn.' + name)
334 if cfgpath is not None and cfgpath.strip() == '':
334 if cfgpath is not None and cfgpath.strip() == '':
335 return None
335 return None
336 path = (cfgpath or name).strip('/')
336 path = (cfgpath or name).strip('/')
337 if not self.exists(path, rev):
337 if not self.exists(path, rev):
338 if self.module.endswith(path) and name == 'trunk':
338 if self.module.endswith(path) and name == 'trunk':
339 # we are converting from inside this directory
339 # we are converting from inside this directory
340 return None
340 return None
341 if cfgpath:
341 if cfgpath:
342 raise util.Abort(_('expected %s to be at %r, but not found')
342 raise util.Abort(_('expected %s to be at %r, but not found')
343 % (name, path))
343 % (name, path))
344 return None
344 return None
345 self.ui.note(_('found %s at %r\n') % (name, path))
345 self.ui.note(_('found %s at %r\n') % (name, path))
346 return path
346 return path
347
347
348 rev = optrev(self.last_changed)
348 rev = optrev(self.last_changed)
349 oldmodule = ''
349 oldmodule = ''
350 trunk = getcfgpath('trunk', rev)
350 trunk = getcfgpath('trunk', rev)
351 self.tags = getcfgpath('tags', rev)
351 self.tags = getcfgpath('tags', rev)
352 branches = getcfgpath('branches', rev)
352 branches = getcfgpath('branches', rev)
353
353
354 # If the project has a trunk or branches, we will extract heads
354 # If the project has a trunk or branches, we will extract heads
355 # from them. We keep the project root otherwise.
355 # from them. We keep the project root otherwise.
356 if trunk:
356 if trunk:
357 oldmodule = self.module or ''
357 oldmodule = self.module or ''
358 self.module += '/' + trunk
358 self.module += '/' + trunk
359 self.head = self.latest(self.module, self.last_changed)
359 self.head = self.latest(self.module, self.last_changed)
360 if not self.head:
360 if not self.head:
361 raise util.Abort(_('no revision found in module %s')
361 raise util.Abort(_('no revision found in module %s')
362 % self.module)
362 % self.module)
363
363
364 # First head in the list is the module's head
364 # First head in the list is the module's head
365 self.heads = [self.head]
365 self.heads = [self.head]
366 if self.tags is not None:
366 if self.tags is not None:
367 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
367 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
368
368
369 # Check if branches bring a few more heads to the list
369 # Check if branches bring a few more heads to the list
370 if branches:
370 if branches:
371 rpath = self.url.strip('/')
371 rpath = self.url.strip('/')
372 branchnames = svn.client.ls(rpath + '/' + quote(branches),
372 branchnames = svn.client.ls(rpath + '/' + quote(branches),
373 rev, False, self.ctx)
373 rev, False, self.ctx)
374 for branch in branchnames.keys():
374 for branch in branchnames.keys():
375 module = '%s/%s/%s' % (oldmodule, branches, branch)
375 module = '%s/%s/%s' % (oldmodule, branches, branch)
376 if not isdir(module, self.last_changed):
376 if not isdir(module, self.last_changed):
377 continue
377 continue
378 brevid = self.latest(module, self.last_changed)
378 brevid = self.latest(module, self.last_changed)
379 if not brevid:
379 if not brevid:
380 self.ui.note(_('ignoring empty branch %s\n') % branch)
380 self.ui.note(_('ignoring empty branch %s\n') % branch)
381 continue
381 continue
382 self.ui.note(_('found branch %s at %d\n') %
382 self.ui.note(_('found branch %s at %d\n') %
383 (branch, self.revnum(brevid)))
383 (branch, self.revnum(brevid)))
384 self.heads.append(brevid)
384 self.heads.append(brevid)
385
385
386 if self.startrev and self.heads:
386 if self.startrev and self.heads:
387 if len(self.heads) > 1:
387 if len(self.heads) > 1:
388 raise util.Abort(_('svn: start revision is not supported '
388 raise util.Abort(_('svn: start revision is not supported '
389 'with more than one branch'))
389 'with more than one branch'))
390 revnum = self.revnum(self.heads[0])
390 revnum = self.revnum(self.heads[0])
391 if revnum < self.startrev:
391 if revnum < self.startrev:
392 raise util.Abort(
392 raise util.Abort(
393 _('svn: no revision found after start revision %d')
393 _('svn: no revision found after start revision %d')
394 % self.startrev)
394 % self.startrev)
395
395
396 return self.heads
396 return self.heads
397
397
398 def getchanges(self, rev):
398 def getchanges(self, rev):
399 if self._changescache and self._changescache[0] == rev:
399 if self._changescache and self._changescache[0] == rev:
400 return self._changescache[1]
400 return self._changescache[1]
401 self._changescache = None
401 self._changescache = None
402 (paths, parents) = self.paths[rev]
402 (paths, parents) = self.paths[rev]
403 if parents:
403 if parents:
404 files, self.removed, copies = self.expandpaths(rev, paths, parents)
404 files, self.removed, copies = self.expandpaths(rev, paths, parents)
405 else:
405 else:
406 # Perform a full checkout on roots
406 # Perform a full checkout on roots
407 uuid, module, revnum = revsplit(rev)
407 uuid, module, revnum = revsplit(rev)
408 entries = svn.client.ls(self.baseurl + quote(module),
408 entries = svn.client.ls(self.baseurl + quote(module),
409 optrev(revnum), True, self.ctx)
409 optrev(revnum), True, self.ctx)
410 files = [n for n, e in entries.iteritems()
410 files = [n for n, e in entries.iteritems()
411 if e.kind == svn.core.svn_node_file]
411 if e.kind == svn.core.svn_node_file]
412 copies = {}
412 copies = {}
413 self.removed = set()
413 self.removed = set()
414
414
415 files.sort()
415 files.sort()
416 files = zip(files, [rev] * len(files))
416 files = zip(files, [rev] * len(files))
417
417
418 # caller caches the result, so free it here to release memory
418 # caller caches the result, so free it here to release memory
419 del self.paths[rev]
419 del self.paths[rev]
420 return (files, copies)
420 return (files, copies)
421
421
422 def getchangedfiles(self, rev, i):
422 def getchangedfiles(self, rev, i):
423 changes = self.getchanges(rev)
423 changes = self.getchanges(rev)
424 self._changescache = (rev, changes)
424 self._changescache = (rev, changes)
425 return [f[0] for f in changes[0]]
425 return [f[0] for f in changes[0]]
426
426
427 def getcommit(self, rev):
427 def getcommit(self, rev):
428 if rev not in self.commits:
428 if rev not in self.commits:
429 uuid, module, revnum = revsplit(rev)
429 uuid, module, revnum = revsplit(rev)
430 self.module = module
430 self.module = module
431 self.reparent(module)
431 self.reparent(module)
432 # We assume that:
432 # We assume that:
433 # - requests for revisions after "stop" come from the
433 # - requests for revisions after "stop" come from the
434 # revision graph backward traversal. Cache all of them
434 # revision graph backward traversal. Cache all of them
435 # down to stop, they will be used eventually.
435 # down to stop, they will be used eventually.
436 # - requests for revisions before "stop" come to get
436 # - requests for revisions before "stop" come to get
437 # isolated branches parents. Just fetch what is needed.
437 # isolated branches parents. Just fetch what is needed.
438 stop = self.lastrevs.get(module, 0)
438 stop = self.lastrevs.get(module, 0)
439 if revnum < stop:
439 if revnum < stop:
440 stop = revnum + 1
440 stop = revnum + 1
441 self._fetch_revisions(revnum, stop)
441 self._fetch_revisions(revnum, stop)
442 if rev not in self.commits:
442 if rev not in self.commits:
443 raise util.Abort(_('svn: revision %s not found') % revnum)
443 raise util.Abort(_('svn: revision %s not found') % revnum)
444 commit = self.commits[rev]
444 commit = self.commits[rev]
445 # caller caches the result, so free it here to release memory
445 # caller caches the result, so free it here to release memory
446 del self.commits[rev]
446 del self.commits[rev]
447 return commit
447 return commit
448
448
449 def gettags(self):
449 def gettags(self):
450 tags = {}
450 tags = {}
451 if self.tags is None:
451 if self.tags is None:
452 return tags
452 return tags
453
453
454 # svn tags are just a convention, project branches left in a
454 # svn tags are just a convention, project branches left in a
455 # 'tags' directory. There is no other relationship than
455 # 'tags' directory. There is no other relationship than
456 # ancestry, which is expensive to discover and makes them hard
456 # ancestry, which is expensive to discover and makes them hard
457 # to update incrementally. Worse, past revisions may be
457 # to update incrementally. Worse, past revisions may be
458 # referenced by tags far away in the future, requiring a deep
458 # referenced by tags far away in the future, requiring a deep
459 # history traversal on every calculation. Current code
459 # history traversal on every calculation. Current code
460 # performs a single backward traversal, tracking moves within
460 # performs a single backward traversal, tracking moves within
461 # the tags directory (tag renaming) and recording a new tag
461 # the tags directory (tag renaming) and recording a new tag
462 # everytime a project is copied from outside the tags
462 # everytime a project is copied from outside the tags
463 # directory. It also lists deleted tags, this behaviour may
463 # directory. It also lists deleted tags, this behaviour may
464 # change in the future.
464 # change in the future.
465 pendings = []
465 pendings = []
466 tagspath = self.tags
466 tagspath = self.tags
467 start = svn.ra.get_latest_revnum(self.ra)
467 start = svn.ra.get_latest_revnum(self.ra)
468 stream = self._getlog([self.tags], start, self.startrev)
468 stream = self._getlog([self.tags], start, self.startrev)
469 try:
469 try:
470 for entry in stream:
470 for entry in stream:
471 origpaths, revnum, author, date, message = entry
471 origpaths, revnum, author, date, message = entry
472 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
472 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
473 in origpaths.iteritems() if e.copyfrom_path]
473 in origpaths.iteritems() if e.copyfrom_path]
474 # Apply moves/copies from more specific to general
474 # Apply moves/copies from more specific to general
475 copies.sort(reverse=True)
475 copies.sort(reverse=True)
476
476
477 srctagspath = tagspath
477 srctagspath = tagspath
478 if copies and copies[-1][2] == tagspath:
478 if copies and copies[-1][2] == tagspath:
479 # Track tags directory moves
479 # Track tags directory moves
480 srctagspath = copies.pop()[0]
480 srctagspath = copies.pop()[0]
481
481
482 for source, sourcerev, dest in copies:
482 for source, sourcerev, dest in copies:
483 if not dest.startswith(tagspath + '/'):
483 if not dest.startswith(tagspath + '/'):
484 continue
484 continue
485 for tag in pendings:
485 for tag in pendings:
486 if tag[0].startswith(dest):
486 if tag[0].startswith(dest):
487 tagpath = source + tag[0][len(dest):]
487 tagpath = source + tag[0][len(dest):]
488 tag[:2] = [tagpath, sourcerev]
488 tag[:2] = [tagpath, sourcerev]
489 break
489 break
490 else:
490 else:
491 pendings.append([source, sourcerev, dest])
491 pendings.append([source, sourcerev, dest])
492
492
493 # Filter out tags with children coming from different
493 # Filter out tags with children coming from different
494 # parts of the repository like:
494 # parts of the repository like:
495 # /tags/tag.1 (from /trunk:10)
495 # /tags/tag.1 (from /trunk:10)
496 # /tags/tag.1/foo (from /branches/foo:12)
496 # /tags/tag.1/foo (from /branches/foo:12)
497 # Here/tags/tag.1 discarded as well as its children.
497 # Here/tags/tag.1 discarded as well as its children.
498 # It happens with tools like cvs2svn. Such tags cannot
498 # It happens with tools like cvs2svn. Such tags cannot
499 # be represented in mercurial.
499 # be represented in mercurial.
500 addeds = dict((p, e.copyfrom_path) for p, e
500 addeds = dict((p, e.copyfrom_path) for p, e
501 in origpaths.iteritems()
501 in origpaths.iteritems()
502 if e.action == 'A' and e.copyfrom_path)
502 if e.action == 'A' and e.copyfrom_path)
503 badroots = set()
503 badroots = set()
504 for destroot in addeds:
504 for destroot in addeds:
505 for source, sourcerev, dest in pendings:
505 for source, sourcerev, dest in pendings:
506 if (not dest.startswith(destroot + '/')
506 if (not dest.startswith(destroot + '/')
507 or source.startswith(addeds[destroot] + '/')):
507 or source.startswith(addeds[destroot] + '/')):
508 continue
508 continue
509 badroots.add(destroot)
509 badroots.add(destroot)
510 break
510 break
511
511
512 for badroot in badroots:
512 for badroot in badroots:
513 pendings = [p for p in pendings if p[2] != badroot
513 pendings = [p for p in pendings if p[2] != badroot
514 and not p[2].startswith(badroot + '/')]
514 and not p[2].startswith(badroot + '/')]
515
515
516 # Tell tag renamings from tag creations
516 # Tell tag renamings from tag creations
517 renamings = []
517 renamings = []
518 for source, sourcerev, dest in pendings:
518 for source, sourcerev, dest in pendings:
519 tagname = dest.split('/')[-1]
519 tagname = dest.split('/')[-1]
520 if source.startswith(srctagspath):
520 if source.startswith(srctagspath):
521 renamings.append([source, sourcerev, tagname])
521 renamings.append([source, sourcerev, tagname])
522 continue
522 continue
523 if tagname in tags:
523 if tagname in tags:
524 # Keep the latest tag value
524 # Keep the latest tag value
525 continue
525 continue
526 # From revision may be fake, get one with changes
526 # From revision may be fake, get one with changes
527 try:
527 try:
528 tagid = self.latest(source, sourcerev)
528 tagid = self.latest(source, sourcerev)
529 if tagid and tagname not in tags:
529 if tagid and tagname not in tags:
530 tags[tagname] = tagid
530 tags[tagname] = tagid
531 except SvnPathNotFound:
531 except SvnPathNotFound:
532 # It happens when we are following directories
532 # It happens when we are following directories
533 # we assumed were copied with their parents
533 # we assumed were copied with their parents
534 # but were really created in the tag
534 # but were really created in the tag
535 # directory.
535 # directory.
536 pass
536 pass
537 pendings = renamings
537 pendings = renamings
538 tagspath = srctagspath
538 tagspath = srctagspath
539 finally:
539 finally:
540 stream.close()
540 stream.close()
541 return tags
541 return tags
542
542
543 def converted(self, rev, destrev):
543 def converted(self, rev, destrev):
544 if not self.wc:
544 if not self.wc:
545 return
545 return
546 if self.convertfp is None:
546 if self.convertfp is None:
547 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
547 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
548 'a')
548 'a')
549 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
549 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
550 self.convertfp.flush()
550 self.convertfp.flush()
551
551
552 def revid(self, revnum, module=None):
552 def revid(self, revnum, module=None):
553 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
553 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
554
554
555 def revnum(self, rev):
555 def revnum(self, rev):
556 return int(rev.split('@')[-1])
556 return int(rev.split('@')[-1])
557
557
558 def latest(self, path, stop=0):
558 def latest(self, path, stop=0):
559 """Find the latest revid affecting path, up to stop. It may return
559 """Find the latest revid affecting path, up to stop. It may return
560 a revision in a different module, since a branch may be moved without
560 a revision in a different module, since a branch may be moved without
561 a change being reported. Return None if computed module does not
561 a change being reported. Return None if computed module does not
562 belong to rootmodule subtree.
562 belong to rootmodule subtree.
563 """
563 """
564 if not path.startswith(self.rootmodule):
564 if not path.startswith(self.rootmodule):
565 # Requests on foreign branches may be forbidden at server level
565 # Requests on foreign branches may be forbidden at server level
566 self.ui.debug('ignoring foreign branch %r\n' % path)
566 self.ui.debug('ignoring foreign branch %r\n' % path)
567 return None
567 return None
568
568
569 if not stop:
569 if not stop:
570 stop = svn.ra.get_latest_revnum(self.ra)
570 stop = svn.ra.get_latest_revnum(self.ra)
571 try:
571 try:
572 prevmodule = self.reparent('')
572 prevmodule = self.reparent('')
573 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
573 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
574 self.reparent(prevmodule)
574 self.reparent(prevmodule)
575 except SubversionException:
575 except SubversionException:
576 dirent = None
576 dirent = None
577 if not dirent:
577 if not dirent:
578 raise SvnPathNotFound(_('%s not found up to revision %d')
578 raise SvnPathNotFound(_('%s not found up to revision %d')
579 % (path, stop))
579 % (path, stop))
580
580
581 # stat() gives us the previous revision on this line of
581 # stat() gives us the previous revision on this line of
582 # development, but it might be in *another module*. Fetch the
582 # development, but it might be in *another module*. Fetch the
583 # log and detect renames down to the latest revision.
583 # log and detect renames down to the latest revision.
584 stream = self._getlog([path], stop, dirent.created_rev)
584 stream = self._getlog([path], stop, dirent.created_rev)
585 try:
585 try:
586 for entry in stream:
586 for entry in stream:
587 paths, revnum, author, date, message = entry
587 paths, revnum, author, date, message = entry
588 if revnum <= dirent.created_rev:
588 if revnum <= dirent.created_rev:
589 break
589 break
590
590
591 for p in paths:
591 for p in paths:
592 if not path.startswith(p) or not paths[p].copyfrom_path:
592 if not path.startswith(p) or not paths[p].copyfrom_path:
593 continue
593 continue
594 newpath = paths[p].copyfrom_path + path[len(p):]
594 newpath = paths[p].copyfrom_path + path[len(p):]
595 self.ui.debug("branch renamed from %s to %s at %d\n" %
595 self.ui.debug("branch renamed from %s to %s at %d\n" %
596 (path, newpath, revnum))
596 (path, newpath, revnum))
597 path = newpath
597 path = newpath
598 break
598 break
599 finally:
599 finally:
600 stream.close()
600 stream.close()
601
601
602 if not path.startswith(self.rootmodule):
602 if not path.startswith(self.rootmodule):
603 self.ui.debug('ignoring foreign branch %r\n' % path)
603 self.ui.debug('ignoring foreign branch %r\n' % path)
604 return None
604 return None
605 return self.revid(dirent.created_rev, path)
605 return self.revid(dirent.created_rev, path)
606
606
607 def reparent(self, module):
607 def reparent(self, module):
608 """Reparent the svn transport and return the previous parent."""
608 """Reparent the svn transport and return the previous parent."""
609 if self.prevmodule == module:
609 if self.prevmodule == module:
610 return module
610 return module
611 svnurl = self.baseurl + quote(module)
611 svnurl = self.baseurl + quote(module)
612 prevmodule = self.prevmodule
612 prevmodule = self.prevmodule
613 if prevmodule is None:
613 if prevmodule is None:
614 prevmodule = ''
614 prevmodule = ''
615 self.ui.debug("reparent to %s\n" % svnurl)
615 self.ui.debug("reparent to %s\n" % svnurl)
616 svn.ra.reparent(self.ra, svnurl)
616 svn.ra.reparent(self.ra, svnurl)
617 self.prevmodule = module
617 self.prevmodule = module
618 return prevmodule
618 return prevmodule
619
619
620 def expandpaths(self, rev, paths, parents):
620 def expandpaths(self, rev, paths, parents):
621 changed, removed = set(), set()
621 changed, removed = set(), set()
622 copies = {}
622 copies = {}
623
623
624 new_module, revnum = revsplit(rev)[1:]
624 new_module, revnum = revsplit(rev)[1:]
625 if new_module != self.module:
625 if new_module != self.module:
626 self.module = new_module
626 self.module = new_module
627 self.reparent(self.module)
627 self.reparent(self.module)
628
628
629 for i, (path, ent) in enumerate(paths):
629 for i, (path, ent) in enumerate(paths):
630 self.ui.progress(_('scanning paths'), i, item=path,
630 self.ui.progress(_('scanning paths'), i, item=path,
631 total=len(paths))
631 total=len(paths))
632 entrypath = self.getrelpath(path)
632 entrypath = self.getrelpath(path)
633
633
634 kind = self._checkpath(entrypath, revnum)
634 kind = self._checkpath(entrypath, revnum)
635 if kind == svn.core.svn_node_file:
635 if kind == svn.core.svn_node_file:
636 changed.add(self.recode(entrypath))
636 changed.add(self.recode(entrypath))
637 if not ent.copyfrom_path or not parents:
637 if not ent.copyfrom_path or not parents:
638 continue
638 continue
639 # Copy sources not in parent revisions cannot be
639 # Copy sources not in parent revisions cannot be
640 # represented, ignore their origin for now
640 # represented, ignore their origin for now
641 pmodule, prevnum = revsplit(parents[0])[1:]
641 pmodule, prevnum = revsplit(parents[0])[1:]
642 if ent.copyfrom_rev < prevnum:
642 if ent.copyfrom_rev < prevnum:
643 continue
643 continue
644 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
644 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
645 if not copyfrom_path:
645 if not copyfrom_path:
646 continue
646 continue
647 self.ui.debug("copied to %s from %s@%s\n" %
647 self.ui.debug("copied to %s from %s@%s\n" %
648 (entrypath, copyfrom_path, ent.copyfrom_rev))
648 (entrypath, copyfrom_path, ent.copyfrom_rev))
649 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
649 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
650 elif kind == 0: # gone, but had better be a deleted *file*
650 elif kind == 0: # gone, but had better be a deleted *file*
651 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
651 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
652 pmodule, prevnum = revsplit(parents[0])[1:]
652 pmodule, prevnum = revsplit(parents[0])[1:]
653 parentpath = pmodule + "/" + entrypath
653 parentpath = pmodule + "/" + entrypath
654 fromkind = self._checkpath(entrypath, prevnum, pmodule)
654 fromkind = self._checkpath(entrypath, prevnum, pmodule)
655
655
656 if fromkind == svn.core.svn_node_file:
656 if fromkind == svn.core.svn_node_file:
657 removed.add(self.recode(entrypath))
657 removed.add(self.recode(entrypath))
658 elif fromkind == svn.core.svn_node_dir:
658 elif fromkind == svn.core.svn_node_dir:
659 oroot = parentpath.strip('/')
659 oroot = parentpath.strip('/')
660 nroot = path.strip('/')
660 nroot = path.strip('/')
661 children = self._iterfiles(oroot, prevnum)
661 children = self._iterfiles(oroot, prevnum)
662 for childpath in children:
662 for childpath in children:
663 childpath = childpath.replace(oroot, nroot)
663 childpath = childpath.replace(oroot, nroot)
664 childpath = self.getrelpath("/" + childpath, pmodule)
664 childpath = self.getrelpath("/" + childpath, pmodule)
665 if childpath:
665 if childpath:
666 removed.add(self.recode(childpath))
666 removed.add(self.recode(childpath))
667 else:
667 else:
668 self.ui.debug('unknown path in revision %d: %s\n' % \
668 self.ui.debug('unknown path in revision %d: %s\n' % \
669 (revnum, path))
669 (revnum, path))
670 elif kind == svn.core.svn_node_dir:
670 elif kind == svn.core.svn_node_dir:
671 if ent.action == 'M':
671 if ent.action == 'M':
672 # If the directory just had a prop change,
672 # If the directory just had a prop change,
673 # then we shouldn't need to look for its children.
673 # then we shouldn't need to look for its children.
674 continue
674 continue
675 if ent.action == 'R' and parents:
675 if ent.action == 'R' and parents:
676 # If a directory is replacing a file, mark the previous
676 # If a directory is replacing a file, mark the previous
677 # file as deleted
677 # file as deleted
678 pmodule, prevnum = revsplit(parents[0])[1:]
678 pmodule, prevnum = revsplit(parents[0])[1:]
679 pkind = self._checkpath(entrypath, prevnum, pmodule)
679 pkind = self._checkpath(entrypath, prevnum, pmodule)
680 if pkind == svn.core.svn_node_file:
680 if pkind == svn.core.svn_node_file:
681 removed.add(self.recode(entrypath))
681 removed.add(self.recode(entrypath))
682 elif pkind == svn.core.svn_node_dir:
682 elif pkind == svn.core.svn_node_dir:
683 # We do not know what files were kept or removed,
683 # We do not know what files were kept or removed,
684 # mark them all as changed.
684 # mark them all as changed.
685 for childpath in self._iterfiles(pmodule, prevnum):
685 for childpath in self._iterfiles(pmodule, prevnum):
686 childpath = self.getrelpath("/" + childpath)
686 childpath = self.getrelpath("/" + childpath)
687 if childpath:
687 if childpath:
688 changed.add(self.recode(childpath))
688 changed.add(self.recode(childpath))
689
689
690 for childpath in self._iterfiles(path, revnum):
690 for childpath in self._iterfiles(path, revnum):
691 childpath = self.getrelpath("/" + childpath)
691 childpath = self.getrelpath("/" + childpath)
692 if childpath:
692 if childpath:
693 changed.add(self.recode(childpath))
693 changed.add(self.recode(childpath))
694
694
695 # Handle directory copies
695 # Handle directory copies
696 if not ent.copyfrom_path or not parents:
696 if not ent.copyfrom_path or not parents:
697 continue
697 continue
698 # Copy sources not in parent revisions cannot be
698 # Copy sources not in parent revisions cannot be
699 # represented, ignore their origin for now
699 # represented, ignore their origin for now
700 pmodule, prevnum = revsplit(parents[0])[1:]
700 pmodule, prevnum = revsplit(parents[0])[1:]
701 if ent.copyfrom_rev < prevnum:
701 if ent.copyfrom_rev < prevnum:
702 continue
702 continue
703 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
703 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
704 if not copyfrompath:
704 if not copyfrompath:
705 continue
705 continue
706 self.ui.debug("mark %s came from %s:%d\n"
706 self.ui.debug("mark %s came from %s:%d\n"
707 % (path, copyfrompath, ent.copyfrom_rev))
707 % (path, copyfrompath, ent.copyfrom_rev))
708 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
708 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
709 for childpath in children:
709 for childpath in children:
710 childpath = self.getrelpath("/" + childpath, pmodule)
710 childpath = self.getrelpath("/" + childpath, pmodule)
711 if not childpath:
711 if not childpath:
712 continue
712 continue
713 copytopath = path + childpath[len(copyfrompath):]
713 copytopath = path + childpath[len(copyfrompath):]
714 copytopath = self.getrelpath(copytopath)
714 copytopath = self.getrelpath(copytopath)
715 copies[self.recode(copytopath)] = self.recode(childpath)
715 copies[self.recode(copytopath)] = self.recode(childpath)
716
716
717 self.ui.progress(_('scanning paths'), None)
717 self.ui.progress(_('scanning paths'), None)
718 changed.update(removed)
718 changed.update(removed)
719 return (list(changed), removed, copies)
719 return (list(changed), removed, copies)
720
720
721 def _fetch_revisions(self, from_revnum, to_revnum):
721 def _fetch_revisions(self, from_revnum, to_revnum):
722 if from_revnum < to_revnum:
722 if from_revnum < to_revnum:
723 from_revnum, to_revnum = to_revnum, from_revnum
723 from_revnum, to_revnum = to_revnum, from_revnum
724
724
725 self.child_cset = None
725 self.child_cset = None
726
726
727 def parselogentry(orig_paths, revnum, author, date, message):
727 def parselogentry(orig_paths, revnum, author, date, message):
728 """Return the parsed commit object or None, and True if
728 """Return the parsed commit object or None, and True if
729 the revision is a branch root.
729 the revision is a branch root.
730 """
730 """
731 self.ui.debug("parsing revision %d (%d changes)\n" %
731 self.ui.debug("parsing revision %d (%d changes)\n" %
732 (revnum, len(orig_paths)))
732 (revnum, len(orig_paths)))
733
733
734 branched = False
734 branched = False
735 rev = self.revid(revnum)
735 rev = self.revid(revnum)
736 # branch log might return entries for a parent we already have
736 # branch log might return entries for a parent we already have
737
737
738 if rev in self.commits or revnum < to_revnum:
738 if rev in self.commits or revnum < to_revnum:
739 return None, branched
739 return None, branched
740
740
741 parents = []
741 parents = []
742 # check whether this revision is the start of a branch or part
742 # check whether this revision is the start of a branch or part
743 # of a branch renaming
743 # of a branch renaming
744 orig_paths = sorted(orig_paths.iteritems())
744 orig_paths = sorted(orig_paths.iteritems())
745 root_paths = [(p, e) for p, e in orig_paths
745 root_paths = [(p, e) for p, e in orig_paths
746 if self.module.startswith(p)]
746 if self.module.startswith(p)]
747 if root_paths:
747 if root_paths:
748 path, ent = root_paths[-1]
748 path, ent = root_paths[-1]
749 if ent.copyfrom_path:
749 if ent.copyfrom_path:
750 branched = True
750 branched = True
751 newpath = ent.copyfrom_path + self.module[len(path):]
751 newpath = ent.copyfrom_path + self.module[len(path):]
752 # ent.copyfrom_rev may not be the actual last revision
752 # ent.copyfrom_rev may not be the actual last revision
753 previd = self.latest(newpath, ent.copyfrom_rev)
753 previd = self.latest(newpath, ent.copyfrom_rev)
754 if previd is not None:
754 if previd is not None:
755 prevmodule, prevnum = revsplit(previd)[1:]
755 prevmodule, prevnum = revsplit(previd)[1:]
756 if prevnum >= self.startrev:
756 if prevnum >= self.startrev:
757 parents = [previd]
757 parents = [previd]
758 self.ui.note(
758 self.ui.note(
759 _('found parent of branch %s at %d: %s\n') %
759 _('found parent of branch %s at %d: %s\n') %
760 (self.module, prevnum, prevmodule))
760 (self.module, prevnum, prevmodule))
761 else:
761 else:
762 self.ui.debug("no copyfrom path, don't know what to do.\n")
762 self.ui.debug("no copyfrom path, don't know what to do.\n")
763
763
764 paths = []
764 paths = []
765 # filter out unrelated paths
765 # filter out unrelated paths
766 for path, ent in orig_paths:
766 for path, ent in orig_paths:
767 if self.getrelpath(path) is None:
767 if self.getrelpath(path) is None:
768 continue
768 continue
769 paths.append((path, ent))
769 paths.append((path, ent))
770
770
771 # Example SVN datetime. Includes microseconds.
771 # Example SVN datetime. Includes microseconds.
772 # ISO-8601 conformant
772 # ISO-8601 conformant
773 # '2007-01-04T17:35:00.902377Z'
773 # '2007-01-04T17:35:00.902377Z'
774 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
774 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
775
775
776 log = message and self.recode(message) or ''
776 log = message and self.recode(message) or ''
777 author = author and self.recode(author) or ''
777 author = author and self.recode(author) or ''
778 try:
778 try:
779 branch = self.module.split("/")[-1]
779 branch = self.module.split("/")[-1]
780 if branch == self.trunkname:
780 if branch == self.trunkname:
781 branch = None
781 branch = None
782 except IndexError:
782 except IndexError:
783 branch = None
783 branch = None
784
784
785 cset = commit(author=author,
785 cset = commit(author=author,
786 date=util.datestr(date),
786 date=util.datestr(date),
787 desc=log,
787 desc=log,
788 parents=parents,
788 parents=parents,
789 branch=branch,
789 branch=branch,
790 rev=rev)
790 rev=rev)
791
791
792 self.commits[rev] = cset
792 self.commits[rev] = cset
793 # The parents list is *shared* among self.paths and the
793 # The parents list is *shared* among self.paths and the
794 # commit object. Both will be updated below.
794 # commit object. Both will be updated below.
795 self.paths[rev] = (paths, cset.parents)
795 self.paths[rev] = (paths, cset.parents)
796 if self.child_cset and not self.child_cset.parents:
796 if self.child_cset and not self.child_cset.parents:
797 self.child_cset.parents[:] = [rev]
797 self.child_cset.parents[:] = [rev]
798 self.child_cset = cset
798 self.child_cset = cset
799 return cset, branched
799 return cset, branched
800
800
801 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
801 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
802 (self.module, from_revnum, to_revnum))
802 (self.module, from_revnum, to_revnum))
803
803
804 try:
804 try:
805 firstcset = None
805 firstcset = None
806 lastonbranch = False
806 lastonbranch = False
807 stream = self._getlog([self.module], from_revnum, to_revnum)
807 stream = self._getlog([self.module], from_revnum, to_revnum)
808 try:
808 try:
809 for entry in stream:
809 for entry in stream:
810 paths, revnum, author, date, message = entry
810 paths, revnum, author, date, message = entry
811 if revnum < self.startrev:
811 if revnum < self.startrev:
812 lastonbranch = True
812 lastonbranch = True
813 break
813 break
814 if not paths:
814 if not paths:
815 self.ui.debug('revision %d has no entries\n' % revnum)
815 self.ui.debug('revision %d has no entries\n' % revnum)
816 # If we ever leave the loop on an empty
816 # If we ever leave the loop on an empty
817 # revision, do not try to get a parent branch
817 # revision, do not try to get a parent branch
818 lastonbranch = lastonbranch or revnum == 0
818 lastonbranch = lastonbranch or revnum == 0
819 continue
819 continue
820 cset, lastonbranch = parselogentry(paths, revnum, author,
820 cset, lastonbranch = parselogentry(paths, revnum, author,
821 date, message)
821 date, message)
822 if cset:
822 if cset:
823 firstcset = cset
823 firstcset = cset
824 if lastonbranch:
824 if lastonbranch:
825 break
825 break
826 finally:
826 finally:
827 stream.close()
827 stream.close()
828
828
829 if not lastonbranch and firstcset and not firstcset.parents:
829 if not lastonbranch and firstcset and not firstcset.parents:
830 # The first revision of the sequence (the last fetched one)
830 # The first revision of the sequence (the last fetched one)
831 # has invalid parents if not a branch root. Find the parent
831 # has invalid parents if not a branch root. Find the parent
832 # revision now, if any.
832 # revision now, if any.
833 try:
833 try:
834 firstrevnum = self.revnum(firstcset.rev)
834 firstrevnum = self.revnum(firstcset.rev)
835 if firstrevnum > 1:
835 if firstrevnum > 1:
836 latest = self.latest(self.module, firstrevnum - 1)
836 latest = self.latest(self.module, firstrevnum - 1)
837 if latest:
837 if latest:
838 firstcset.parents.append(latest)
838 firstcset.parents.append(latest)
839 except SvnPathNotFound:
839 except SvnPathNotFound:
840 pass
840 pass
841 except SubversionException, (inst, num):
841 except SubversionException, (inst, num):
842 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
842 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
843 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
843 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
844 raise
844 raise
845
845
846 def getfile(self, file, rev):
846 def getfile(self, file, rev):
847 # TODO: ra.get_file transmits the whole file instead of diffs.
847 # TODO: ra.get_file transmits the whole file instead of diffs.
848 if file in self.removed:
848 if file in self.removed:
849 raise IOError()
849 raise IOError()
850 mode = ''
850 mode = ''
851 try:
851 try:
852 new_module, revnum = revsplit(rev)[1:]
852 new_module, revnum = revsplit(rev)[1:]
853 if self.module != new_module:
853 if self.module != new_module:
854 self.module = new_module
854 self.module = new_module
855 self.reparent(self.module)
855 self.reparent(self.module)
856 io = StringIO()
856 io = StringIO()
857 info = svn.ra.get_file(self.ra, file, revnum, io)
857 info = svn.ra.get_file(self.ra, file, revnum, io)
858 data = io.getvalue()
858 data = io.getvalue()
859 # ra.get_files() seems to keep a reference on the input buffer
859 # ra.get_files() seems to keep a reference on the input buffer
860 # preventing collection. Release it explicitely.
860 # preventing collection. Release it explicitely.
861 io.close()
861 io.close()
862 if isinstance(info, list):
862 if isinstance(info, list):
863 info = info[-1]
863 info = info[-1]
864 mode = ("svn:executable" in info) and 'x' or ''
864 mode = ("svn:executable" in info) and 'x' or ''
865 mode = ("svn:special" in info) and 'l' or mode
865 mode = ("svn:special" in info) and 'l' or mode
866 except SubversionException, e:
866 except SubversionException, e:
867 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
867 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
868 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
868 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
869 if e.apr_err in notfound: # File not found
869 if e.apr_err in notfound: # File not found
870 raise IOError()
870 raise IOError()
871 raise
871 raise
872 if mode == 'l':
872 if mode == 'l':
873 link_prefix = "link "
873 link_prefix = "link "
874 if data.startswith(link_prefix):
874 if data.startswith(link_prefix):
875 data = data[len(link_prefix):]
875 data = data[len(link_prefix):]
876 return data, mode
876 return data, mode
877
877
878 def _iterfiles(self, path, revnum):
878 def _iterfiles(self, path, revnum):
879 """Enumerate all files in path at revnum, recursively."""
879 """Enumerate all files in path at revnum, recursively."""
880 path = path.strip('/')
880 path = path.strip('/')
881 pool = Pool()
881 pool = Pool()
882 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
882 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
883 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
883 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
884 if path:
884 if path:
885 path += '/'
885 path += '/'
886 return ((path + p) for p, e in entries.iteritems()
886 return ((path + p) for p, e in entries.iteritems()
887 if e.kind == svn.core.svn_node_file)
887 if e.kind == svn.core.svn_node_file)
888
888
889 def getrelpath(self, path, module=None):
889 def getrelpath(self, path, module=None):
890 if module is None:
890 if module is None:
891 module = self.module
891 module = self.module
892 # Given the repository url of this wc, say
892 # Given the repository url of this wc, say
893 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
893 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
894 # extract the "entry" portion (a relative path) from what
894 # extract the "entry" portion (a relative path) from what
895 # svn log --xml says, ie
895 # svn log --xml says, ie
896 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
896 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
897 # that is to say "tests/PloneTestCase.py"
897 # that is to say "tests/PloneTestCase.py"
898 if path.startswith(module):
898 if path.startswith(module):
899 relative = path.rstrip('/')[len(module):]
899 relative = path.rstrip('/')[len(module):]
900 if relative.startswith('/'):
900 if relative.startswith('/'):
901 return relative[1:]
901 return relative[1:]
902 elif relative == '':
902 elif relative == '':
903 return relative
903 return relative
904
904
905 # The path is outside our tracked tree...
905 # The path is outside our tracked tree...
906 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
906 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
907 return None
907 return None
908
908
909 def _checkpath(self, path, revnum, module=None):
909 def _checkpath(self, path, revnum, module=None):
910 if module is not None:
910 if module is not None:
911 prevmodule = self.reparent('')
911 prevmodule = self.reparent('')
912 path = module + '/' + path
912 path = module + '/' + path
913 try:
913 try:
914 # ra.check_path does not like leading slashes very much, it leads
914 # ra.check_path does not like leading slashes very much, it leads
915 # to PROPFIND subversion errors
915 # to PROPFIND subversion errors
916 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
916 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
917 finally:
917 finally:
918 if module is not None:
918 if module is not None:
919 self.reparent(prevmodule)
919 self.reparent(prevmodule)
920
920
921 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
921 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
922 strict_node_history=False):
922 strict_node_history=False):
923 # Normalize path names, svn >= 1.5 only wants paths relative to
923 # Normalize path names, svn >= 1.5 only wants paths relative to
924 # supplied URL
924 # supplied URL
925 relpaths = []
925 relpaths = []
926 for p in paths:
926 for p in paths:
927 if not p.startswith('/'):
927 if not p.startswith('/'):
928 p = self.module + '/' + p
928 p = self.module + '/' + p
929 relpaths.append(p.strip('/'))
929 relpaths.append(p.strip('/'))
930 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
930 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
931 strict_node_history]
931 strict_node_history]
932 arg = encodeargs(args)
932 arg = encodeargs(args)
933 hgexe = util.hgexecutable()
933 hgexe = util.hgexecutable()
934 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
934 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
935 stdin, stdout = util.popen2(util.quotecommand(cmd))
935 stdin, stdout = util.popen2(util.quotecommand(cmd))
936 stdin.write(arg)
936 stdin.write(arg)
937 try:
937 try:
938 stdin.close()
938 stdin.close()
939 except IOError:
939 except IOError:
940 raise util.Abort(_('Mercurial failed to run itself, check'
940 raise util.Abort(_('Mercurial failed to run itself, check'
941 ' hg executable is in PATH'))
941 ' hg executable is in PATH'))
942 return logstream(stdout)
942 return logstream(stdout)
943
943
944 pre_revprop_change = '''#!/bin/sh
944 pre_revprop_change = '''#!/bin/sh
945
945
946 REPOS="$1"
946 REPOS="$1"
947 REV="$2"
947 REV="$2"
948 USER="$3"
948 USER="$3"
949 PROPNAME="$4"
949 PROPNAME="$4"
950 ACTION="$5"
950 ACTION="$5"
951
951
952 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
952 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
953 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
953 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
954 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
954 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
955
955
956 echo "Changing prohibited revision property" >&2
956 echo "Changing prohibited revision property" >&2
957 exit 1
957 exit 1
958 '''
958 '''
959
959
960 class svn_sink(converter_sink, commandline):
960 class svn_sink(converter_sink, commandline):
961 commit_re = re.compile(r'Committed revision (\d+).', re.M)
961 commit_re = re.compile(r'Committed revision (\d+).', re.M)
962 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
962 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
963
963
964 def prerun(self):
964 def prerun(self):
965 if self.wc:
965 if self.wc:
966 os.chdir(self.wc)
966 os.chdir(self.wc)
967
967
968 def postrun(self):
968 def postrun(self):
969 if self.wc:
969 if self.wc:
970 os.chdir(self.cwd)
970 os.chdir(self.cwd)
971
971
972 def join(self, name):
972 def join(self, name):
973 return os.path.join(self.wc, '.svn', name)
973 return os.path.join(self.wc, '.svn', name)
974
974
975 def revmapfile(self):
975 def revmapfile(self):
976 return self.join('hg-shamap')
976 return self.join('hg-shamap')
977
977
978 def authorfile(self):
978 def authorfile(self):
979 return self.join('hg-authormap')
979 return self.join('hg-authormap')
980
980
981 def __init__(self, ui, path):
981 def __init__(self, ui, path):
982
982
983 converter_sink.__init__(self, ui, path)
983 converter_sink.__init__(self, ui, path)
984 commandline.__init__(self, ui, 'svn')
984 commandline.__init__(self, ui, 'svn')
985 self.delete = []
985 self.delete = []
986 self.setexec = []
986 self.setexec = []
987 self.delexec = []
987 self.delexec = []
988 self.copies = []
988 self.copies = []
989 self.wc = None
989 self.wc = None
990 self.cwd = os.getcwd()
990 self.cwd = os.getcwd()
991
991
992 path = os.path.realpath(path)
992 path = os.path.realpath(path)
993
993
994 created = False
994 created = False
995 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
995 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
996 self.wc = path
996 self.wc = path
997 self.run0('update')
997 self.run0('update')
998 else:
998 else:
999 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
999 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1000
1000
1001 if os.path.isdir(os.path.dirname(path)):
1001 if os.path.isdir(os.path.dirname(path)):
1002 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1002 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1003 ui.status(_('initializing svn repository %r\n') %
1003 ui.status(_('initializing svn repository %r\n') %
1004 os.path.basename(path))
1004 os.path.basename(path))
1005 commandline(ui, 'svnadmin').run0('create', path)
1005 commandline(ui, 'svnadmin').run0('create', path)
1006 created = path
1006 created = path
1007 path = util.normpath(path)
1007 path = util.normpath(path)
1008 if not path.startswith('/'):
1008 if not path.startswith('/'):
1009 path = '/' + path
1009 path = '/' + path
1010 path = 'file://' + path
1010 path = 'file://' + path
1011
1011
1012 ui.status(_('initializing svn working copy %r\n')
1012 ui.status(_('initializing svn working copy %r\n')
1013 % os.path.basename(wcpath))
1013 % os.path.basename(wcpath))
1014 self.run0('checkout', path, wcpath)
1014 self.run0('checkout', path, wcpath)
1015
1015
1016 self.wc = wcpath
1016 self.wc = wcpath
1017 self.opener = scmutil.opener(self.wc)
1017 self.opener = scmutil.opener(self.wc)
1018 self.wopener = scmutil.opener(self.wc)
1018 self.wopener = scmutil.opener(self.wc)
1019 self.childmap = mapfile(ui, self.join('hg-childmap'))
1019 self.childmap = mapfile(ui, self.join('hg-childmap'))
1020 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1020 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1021
1021
1022 if created:
1022 if created:
1023 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1023 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1024 fp = open(hook, 'w')
1024 fp = open(hook, 'w')
1025 fp.write(pre_revprop_change)
1025 fp.write(pre_revprop_change)
1026 fp.close()
1026 fp.close()
1027 util.setflags(hook, False, True)
1027 util.setflags(hook, False, True)
1028
1028
1029 output = self.run0('info')
1029 output = self.run0('info')
1030 self.uuid = self.uuid_re.search(output).group(1).strip()
1030 self.uuid = self.uuid_re.search(output).group(1).strip()
1031
1031
1032 def wjoin(self, *names):
1032 def wjoin(self, *names):
1033 return os.path.join(self.wc, *names)
1033 return os.path.join(self.wc, *names)
1034
1034
1035 def putfile(self, filename, flags, data):
1035 def putfile(self, filename, flags, data):
1036 if 'l' in flags:
1036 if 'l' in flags:
1037 self.wopener.symlink(data, filename)
1037 self.wopener.symlink(data, filename)
1038 else:
1038 else:
1039 try:
1039 try:
1040 if os.path.islink(self.wjoin(filename)):
1040 if os.path.islink(self.wjoin(filename)):
1041 os.unlink(filename)
1041 os.unlink(filename)
1042 except OSError:
1042 except OSError:
1043 pass
1043 pass
1044 self.wopener.write(filename, data)
1044 self.wopener.write(filename, data)
1045
1045
1046 if self.is_exec:
1046 if self.is_exec:
1047 was_exec = self.is_exec(self.wjoin(filename))
1047 was_exec = self.is_exec(self.wjoin(filename))
1048 else:
1048 else:
1049 # On filesystems not supporting execute-bit, there is no way
1049 # On filesystems not supporting execute-bit, there is no way
1050 # to know if it is set but asking subversion. Setting it
1050 # to know if it is set but asking subversion. Setting it
1051 # systematically is just as expensive and much simpler.
1051 # systematically is just as expensive and much simpler.
1052 was_exec = 'x' not in flags
1052 was_exec = 'x' not in flags
1053
1053
1054 util.setflags(self.wjoin(filename), False, 'x' in flags)
1054 util.setflags(self.wjoin(filename), False, 'x' in flags)
1055 if was_exec:
1055 if was_exec:
1056 if 'x' not in flags:
1056 if 'x' not in flags:
1057 self.delexec.append(filename)
1057 self.delexec.append(filename)
1058 else:
1058 else:
1059 if 'x' in flags:
1059 if 'x' in flags:
1060 self.setexec.append(filename)
1060 self.setexec.append(filename)
1061
1061
1062 def _copyfile(self, source, dest):
1062 def _copyfile(self, source, dest):
1063 # SVN's copy command pukes if the destination file exists, but
1063 # SVN's copy command pukes if the destination file exists, but
1064 # our copyfile method expects to record a copy that has
1064 # our copyfile method expects to record a copy that has
1065 # already occurred. Cross the semantic gap.
1065 # already occurred. Cross the semantic gap.
1066 wdest = self.wjoin(dest)
1066 wdest = self.wjoin(dest)
1067 exists = os.path.lexists(wdest)
1067 exists = os.path.lexists(wdest)
1068 if exists:
1068 if exists:
1069 fd, tempname = tempfile.mkstemp(
1069 fd, tempname = tempfile.mkstemp(
1070 prefix='hg-copy-', dir=os.path.dirname(wdest))
1070 prefix='hg-copy-', dir=os.path.dirname(wdest))
1071 os.close(fd)
1071 os.close(fd)
1072 os.unlink(tempname)
1072 os.unlink(tempname)
1073 os.rename(wdest, tempname)
1073 os.rename(wdest, tempname)
1074 try:
1074 try:
1075 self.run0('copy', source, dest)
1075 self.run0('copy', source, dest)
1076 finally:
1076 finally:
1077 if exists:
1077 if exists:
1078 try:
1078 try:
1079 os.unlink(wdest)
1079 os.unlink(wdest)
1080 except OSError:
1080 except OSError:
1081 pass
1081 pass
1082 os.rename(tempname, wdest)
1082 os.rename(tempname, wdest)
1083
1083
1084 def dirs_of(self, files):
1084 def dirs_of(self, files):
1085 dirs = set()
1085 dirs = set()
1086 for f in files:
1086 for f in files:
1087 if os.path.isdir(self.wjoin(f)):
1087 if os.path.isdir(self.wjoin(f)):
1088 dirs.add(f)
1088 dirs.add(f)
1089 for i in strutil.rfindall(f, '/'):
1089 for i in strutil.rfindall(f, '/'):
1090 dirs.add(f[:i])
1090 dirs.add(f[:i])
1091 return dirs
1091 return dirs
1092
1092
1093 def add_dirs(self, files):
1093 def add_dirs(self, files):
1094 add_dirs = [d for d in sorted(self.dirs_of(files))
1094 add_dirs = [d for d in sorted(self.dirs_of(files))
1095 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1095 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1096 if add_dirs:
1096 if add_dirs:
1097 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1097 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1098 return add_dirs
1098 return add_dirs
1099
1099
1100 def add_files(self, files):
1100 def add_files(self, files):
1101 if files:
1101 if files:
1102 self.xargs(files, 'add', quiet=True)
1102 self.xargs(files, 'add', quiet=True)
1103 return files
1103 return files
1104
1104
1105 def tidy_dirs(self, names):
1105 def tidy_dirs(self, names):
1106 deleted = []
1106 deleted = []
1107 for d in sorted(self.dirs_of(names), reverse=True):
1107 for d in sorted(self.dirs_of(names), reverse=True):
1108 wd = self.wjoin(d)
1108 wd = self.wjoin(d)
1109 if os.listdir(wd) == '.svn':
1109 if os.listdir(wd) == '.svn':
1110 self.run0('delete', d)
1110 self.run0('delete', d)
1111 deleted.append(d)
1111 deleted.append(d)
1112 return deleted
1112 return deleted
1113
1113
1114 def addchild(self, parent, child):
1114 def addchild(self, parent, child):
1115 self.childmap[parent] = child
1115 self.childmap[parent] = child
1116
1116
1117 def revid(self, rev):
1117 def revid(self, rev):
1118 return u"svn:%s@%s" % (self.uuid, rev)
1118 return u"svn:%s@%s" % (self.uuid, rev)
1119
1119
1120 def putcommit(self, files, copies, parents, commit, source, revmap):
1120 def putcommit(self, files, copies, parents, commit, source, revmap):
1121 for parent in parents:
1121 for parent in parents:
1122 try:
1122 try:
1123 return self.revid(self.childmap[parent])
1123 return self.revid(self.childmap[parent])
1124 except KeyError:
1124 except KeyError:
1125 pass
1125 pass
1126
1126
1127 # Apply changes to working copy
1127 # Apply changes to working copy
1128 for f, v in files:
1128 for f, v in files:
1129 try:
1129 try:
1130 data, mode = source.getfile(f, v)
1130 data, mode = source.getfile(f, v)
1131 except IOError:
1131 except IOError:
1132 self.delete.append(f)
1132 self.delete.append(f)
1133 else:
1133 else:
1134 self.putfile(f, mode, data)
1134 self.putfile(f, mode, data)
1135 if f in copies:
1135 if f in copies:
1136 self.copies.append([copies[f], f])
1136 self.copies.append([copies[f], f])
1137 files = [f[0] for f in files]
1137 files = [f[0] for f in files]
1138
1138
1139 entries = set(self.delete)
1139 entries = set(self.delete)
1140 files = frozenset(files)
1140 files = frozenset(files)
1141 entries.update(self.add_dirs(files.difference(entries)))
1141 entries.update(self.add_dirs(files.difference(entries)))
1142 if self.copies:
1142 if self.copies:
1143 for s, d in self.copies:
1143 for s, d in self.copies:
1144 self._copyfile(s, d)
1144 self._copyfile(s, d)
1145 self.copies = []
1145 self.copies = []
1146 if self.delete:
1146 if self.delete:
1147 self.xargs(self.delete, 'delete')
1147 self.xargs(self.delete, 'delete')
1148 self.delete = []
1148 self.delete = []
1149 entries.update(self.add_files(files.difference(entries)))
1149 entries.update(self.add_files(files.difference(entries)))
1150 entries.update(self.tidy_dirs(entries))
1150 entries.update(self.tidy_dirs(entries))
1151 if self.delexec:
1151 if self.delexec:
1152 self.xargs(self.delexec, 'propdel', 'svn:executable')
1152 self.xargs(self.delexec, 'propdel', 'svn:executable')
1153 self.delexec = []
1153 self.delexec = []
1154 if self.setexec:
1154 if self.setexec:
1155 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1155 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1156 self.setexec = []
1156 self.setexec = []
1157
1157
1158 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1158 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1159 fp = os.fdopen(fd, 'w')
1159 fp = os.fdopen(fd, 'w')
1160 fp.write(commit.desc)
1160 fp.write(commit.desc)
1161 fp.close()
1161 fp.close()
1162 try:
1162 try:
1163 output = self.run0('commit',
1163 output = self.run0('commit',
1164 username=util.shortuser(commit.author),
1164 username=util.shortuser(commit.author),
1165 file=messagefile,
1165 file=messagefile,
1166 encoding='utf-8')
1166 encoding='utf-8')
1167 try:
1167 try:
1168 rev = self.commit_re.search(output).group(1)
1168 rev = self.commit_re.search(output).group(1)
1169 except AttributeError:
1169 except AttributeError:
1170 if not files:
1170 if not files:
1171 return parents[0]
1171 return parents[0]
1172 self.ui.warn(_('unexpected svn output:\n'))
1172 self.ui.warn(_('unexpected svn output:\n'))
1173 self.ui.warn(output)
1173 self.ui.warn(output)
1174 raise util.Abort(_('unable to cope with svn output'))
1174 raise util.Abort(_('unable to cope with svn output'))
1175 if commit.rev:
1175 if commit.rev:
1176 self.run('propset', 'hg:convert-rev', commit.rev,
1176 self.run('propset', 'hg:convert-rev', commit.rev,
1177 revprop=True, revision=rev)
1177 revprop=True, revision=rev)
1178 if commit.branch and commit.branch != 'default':
1178 if commit.branch and commit.branch != 'default':
1179 self.run('propset', 'hg:convert-branch', commit.branch,
1179 self.run('propset', 'hg:convert-branch', commit.branch,
1180 revprop=True, revision=rev)
1180 revprop=True, revision=rev)
1181 for parent in parents:
1181 for parent in parents:
1182 self.addchild(parent, rev)
1182 self.addchild(parent, rev)
1183 return self.revid(rev)
1183 return self.revid(rev)
1184 finally:
1184 finally:
1185 os.unlink(messagefile)
1185 os.unlink(messagefile)
1186
1186
1187 def puttags(self, tags):
1187 def puttags(self, tags):
1188 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1188 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1189 return None, None
1189 return None, None
@@ -1,451 +1,451 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10
10
11 import os
11 import os
12 import errno
12 import errno
13 import platform
13 import platform
14 import shutil
14 import shutil
15 import stat
15 import stat
16 import tempfile
16 import tempfile
17
17
18 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
18 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
19 from mercurial.i18n import _
19 from mercurial.i18n import _
20
20
21 shortname = '.hglf'
21 shortname = '.hglf'
22 longname = 'largefiles'
22 longname = 'largefiles'
23
23
24
24
25 # -- Portability wrappers ----------------------------------------------
25 # -- Portability wrappers ----------------------------------------------
26
26
27 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
27 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
28 return dirstate.walk(matcher, [], unknown, ignored)
28 return dirstate.walk(matcher, [], unknown, ignored)
29
29
30 def repo_add(repo, list):
30 def repo_add(repo, list):
31 add = repo[None].add
31 add = repo[None].add
32 return add(list)
32 return add(list)
33
33
34 def repo_remove(repo, list, unlink=False):
34 def repo_remove(repo, list, unlink=False):
35 def remove(list, unlink):
35 def remove(list, unlink):
36 wlock = repo.wlock()
36 wlock = repo.wlock()
37 try:
37 try:
38 if unlink:
38 if unlink:
39 for f in list:
39 for f in list:
40 try:
40 try:
41 util.unlinkpath(repo.wjoin(f))
41 util.unlinkpath(repo.wjoin(f))
42 except OSError, inst:
42 except OSError, inst:
43 if inst.errno != errno.ENOENT:
43 if inst.errno != errno.ENOENT:
44 raise
44 raise
45 repo[None].forget(list)
45 repo[None].forget(list)
46 finally:
46 finally:
47 wlock.release()
47 wlock.release()
48 return remove(list, unlink=unlink)
48 return remove(list, unlink=unlink)
49
49
50 def repo_forget(repo, list):
50 def repo_forget(repo, list):
51 forget = repo[None].forget
51 forget = repo[None].forget
52 return forget(list)
52 return forget(list)
53
53
54 def findoutgoing(repo, remote, force):
54 def findoutgoing(repo, remote, force):
55 from mercurial import discovery
55 from mercurial import discovery
56 common, _anyinc, _heads = discovery.findcommonincoming(repo,
56 common, _anyinc, _heads = discovery.findcommonincoming(repo,
57 remote, force=force)
57 remote, force=force)
58 return repo.changelog.findmissing(common)
58 return repo.changelog.findmissing(common)
59
59
60 # -- Private worker functions ------------------------------------------
60 # -- Private worker functions ------------------------------------------
61
61
62 def getminsize(ui, assumelfiles, opt, default=10):
62 def getminsize(ui, assumelfiles, opt, default=10):
63 lfsize = opt
63 lfsize = opt
64 if not lfsize and assumelfiles:
64 if not lfsize and assumelfiles:
65 lfsize = ui.config(longname, 'minsize', default=default)
65 lfsize = ui.config(longname, 'minsize', default=default)
66 if lfsize:
66 if lfsize:
67 try:
67 try:
68 lfsize = float(lfsize)
68 lfsize = float(lfsize)
69 except ValueError:
69 except ValueError:
70 raise util.Abort(_('largefiles: size must be number (not %s)\n')
70 raise util.Abort(_('largefiles: size must be number (not %s)\n')
71 % lfsize)
71 % lfsize)
72 if lfsize is None:
72 if lfsize is None:
73 raise util.Abort(_('minimum size for largefiles must be specified'))
73 raise util.Abort(_('minimum size for largefiles must be specified'))
74 return lfsize
74 return lfsize
75
75
76 def link(src, dest):
76 def link(src, dest):
77 try:
77 try:
78 util.oslink(src, dest)
78 util.oslink(src, dest)
79 except OSError:
79 except OSError:
80 # if hardlinks fail, fallback on atomic copy
80 # if hardlinks fail, fallback on atomic copy
81 dst = util.atomictempfile(dest)
81 dst = util.atomictempfile(dest)
82 for chunk in util.filechunkiter(open(src, 'rb')):
82 for chunk in util.filechunkiter(open(src, 'rb')):
83 dst.write(chunk)
83 dst.write(chunk)
84 dst.close()
84 dst.close()
85 os.chmod(dest, os.stat(src).st_mode)
85 os.chmod(dest, os.stat(src).st_mode)
86
86
87 def usercachepath(ui, hash):
87 def usercachepath(ui, hash):
88 path = ui.configpath(longname, 'usercache', None)
88 path = ui.configpath(longname, 'usercache', None)
89 if path:
89 if path:
90 path = os.path.join(path, hash)
90 path = os.path.join(path, hash)
91 else:
91 else:
92 if os.name == 'nt':
92 if os.name == 'nt':
93 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
93 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
94 if appdata:
94 if appdata:
95 path = os.path.join(appdata, longname, hash)
95 path = os.path.join(appdata, longname, hash)
96 elif platform.system() == 'Darwin':
96 elif platform.system() == 'Darwin':
97 home = os.getenv('HOME')
97 home = os.getenv('HOME')
98 if home:
98 if home:
99 path = os.path.join(home, 'Library', 'Caches',
99 path = os.path.join(home, 'Library', 'Caches',
100 longname, hash)
100 longname, hash)
101 elif os.name == 'posix':
101 elif os.name == 'posix':
102 path = os.getenv('XDG_CACHE_HOME')
102 path = os.getenv('XDG_CACHE_HOME')
103 if path:
103 if path:
104 path = os.path.join(path, longname, hash)
104 path = os.path.join(path, longname, hash)
105 else:
105 else:
106 home = os.getenv('HOME')
106 home = os.getenv('HOME')
107 if home:
107 if home:
108 path = os.path.join(home, '.cache', longname, hash)
108 path = os.path.join(home, '.cache', longname, hash)
109 else:
109 else:
110 raise util.Abort(_('unknown operating system: %s\n') % os.name)
110 raise util.Abort(_('unknown operating system: %s\n') % os.name)
111 return path
111 return path
112
112
113 def inusercache(ui, hash):
113 def inusercache(ui, hash):
114 path = usercachepath(ui, hash)
114 path = usercachepath(ui, hash)
115 return path and os.path.exists(path)
115 return path and os.path.exists(path)
116
116
117 def findfile(repo, hash):
117 def findfile(repo, hash):
118 if instore(repo, hash):
118 if instore(repo, hash):
119 repo.ui.note(_('Found %s in store\n') % hash)
119 repo.ui.note(_('Found %s in store\n') % hash)
120 return storepath(repo, hash)
120 return storepath(repo, hash)
121 elif inusercache(repo.ui, hash):
121 elif inusercache(repo.ui, hash):
122 repo.ui.note(_('Found %s in system cache\n') % hash)
122 repo.ui.note(_('Found %s in system cache\n') % hash)
123 path = storepath(repo, hash)
123 path = storepath(repo, hash)
124 util.makedirs(os.path.dirname(path))
124 util.makedirs(os.path.dirname(path))
125 link(usercachepath(repo.ui, hash), path)
125 link(usercachepath(repo.ui, hash), path)
126 return path
126 return path
127 return None
127 return None
128
128
129 class largefiles_dirstate(dirstate.dirstate):
129 class largefiles_dirstate(dirstate.dirstate):
130 def __getitem__(self, key):
130 def __getitem__(self, key):
131 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
131 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
132 def normal(self, f):
132 def normal(self, f):
133 return super(largefiles_dirstate, self).normal(unixpath(f))
133 return super(largefiles_dirstate, self).normal(unixpath(f))
134 def remove(self, f):
134 def remove(self, f):
135 return super(largefiles_dirstate, self).remove(unixpath(f))
135 return super(largefiles_dirstate, self).remove(unixpath(f))
136 def add(self, f):
136 def add(self, f):
137 return super(largefiles_dirstate, self).add(unixpath(f))
137 return super(largefiles_dirstate, self).add(unixpath(f))
138 def drop(self, f):
138 def drop(self, f):
139 return super(largefiles_dirstate, self).drop(unixpath(f))
139 return super(largefiles_dirstate, self).drop(unixpath(f))
140 def forget(self, f):
140 def forget(self, f):
141 return super(largefiles_dirstate, self).forget(unixpath(f))
141 return super(largefiles_dirstate, self).forget(unixpath(f))
142 def normallookup(self, f):
142 def normallookup(self, f):
143 return super(largefiles_dirstate, self).normallookup(unixpath(f))
143 return super(largefiles_dirstate, self).normallookup(unixpath(f))
144
144
145 def openlfdirstate(ui, repo):
145 def openlfdirstate(ui, repo):
146 '''
146 '''
147 Return a dirstate object that tracks largefiles: i.e. its root is
147 Return a dirstate object that tracks largefiles: i.e. its root is
148 the repo root, but it is saved in .hg/largefiles/dirstate.
148 the repo root, but it is saved in .hg/largefiles/dirstate.
149 '''
149 '''
150 admin = repo.join(longname)
150 admin = repo.join(longname)
151 opener = scmutil.opener(admin)
151 opener = scmutil.opener(admin)
152 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
152 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
153 repo.dirstate._validate)
153 repo.dirstate._validate)
154
154
155 # If the largefiles dirstate does not exist, populate and create
155 # If the largefiles dirstate does not exist, populate and create
156 # it. This ensures that we create it on the first meaningful
156 # it. This ensures that we create it on the first meaningful
157 # largefiles operation in a new clone.
157 # largefiles operation in a new clone.
158 if not os.path.exists(os.path.join(admin, 'dirstate')):
158 if not os.path.exists(os.path.join(admin, 'dirstate')):
159 util.makedirs(admin)
159 util.makedirs(admin)
160 matcher = getstandinmatcher(repo)
160 matcher = getstandinmatcher(repo)
161 for standin in dirstate_walk(repo.dirstate, matcher):
161 for standin in dirstate_walk(repo.dirstate, matcher):
162 lfile = splitstandin(standin)
162 lfile = splitstandin(standin)
163 hash = readstandin(repo, lfile)
163 hash = readstandin(repo, lfile)
164 lfdirstate.normallookup(lfile)
164 lfdirstate.normallookup(lfile)
165 try:
165 try:
166 if hash == hashfile(repo.wjoin(lfile)):
166 if hash == hashfile(repo.wjoin(lfile)):
167 lfdirstate.normal(lfile)
167 lfdirstate.normal(lfile)
168 except OSError, err:
168 except OSError, err:
169 if err.errno != errno.ENOENT:
169 if err.errno != errno.ENOENT:
170 raise
170 raise
171 return lfdirstate
171 return lfdirstate
172
172
173 def lfdirstate_status(lfdirstate, repo, rev):
173 def lfdirstate_status(lfdirstate, repo, rev):
174 match = match_.always(repo.root, repo.getcwd())
174 match = match_.always(repo.root, repo.getcwd())
175 s = lfdirstate.status(match, [], False, False, False)
175 s = lfdirstate.status(match, [], False, False, False)
176 unsure, modified, added, removed, missing, unknown, ignored, clean = s
176 unsure, modified, added, removed, missing, unknown, ignored, clean = s
177 for lfile in unsure:
177 for lfile in unsure:
178 if repo[rev][standin(lfile)].data().strip() != \
178 if repo[rev][standin(lfile)].data().strip() != \
179 hashfile(repo.wjoin(lfile)):
179 hashfile(repo.wjoin(lfile)):
180 modified.append(lfile)
180 modified.append(lfile)
181 else:
181 else:
182 clean.append(lfile)
182 clean.append(lfile)
183 lfdirstate.normal(lfile)
183 lfdirstate.normal(lfile)
184 return (modified, added, removed, missing, unknown, ignored, clean)
184 return (modified, added, removed, missing, unknown, ignored, clean)
185
185
186 def listlfiles(repo, rev=None, matcher=None):
186 def listlfiles(repo, rev=None, matcher=None):
187 '''return a list of largefiles in the working copy or the
187 '''return a list of largefiles in the working copy or the
188 specified changeset'''
188 specified changeset'''
189
189
190 if matcher is None:
190 if matcher is None:
191 matcher = getstandinmatcher(repo)
191 matcher = getstandinmatcher(repo)
192
192
193 # ignore unknown files in working directory
193 # ignore unknown files in working directory
194 return [splitstandin(f)
194 return [splitstandin(f)
195 for f in repo[rev].walk(matcher)
195 for f in repo[rev].walk(matcher)
196 if rev is not None or repo.dirstate[f] != '?']
196 if rev is not None or repo.dirstate[f] != '?']
197
197
198 def instore(repo, hash):
198 def instore(repo, hash):
199 return os.path.exists(storepath(repo, hash))
199 return os.path.exists(storepath(repo, hash))
200
200
201 def storepath(repo, hash):
201 def storepath(repo, hash):
202 return repo.join(os.path.join(longname, hash))
202 return repo.join(os.path.join(longname, hash))
203
203
204 def copyfromcache(repo, hash, filename):
204 def copyfromcache(repo, hash, filename):
205 '''Copy the specified largefile from the repo or system cache to
205 '''Copy the specified largefile from the repo or system cache to
206 filename in the repository. Return true on success or false if the
206 filename in the repository. Return true on success or false if the
207 file was not found in either cache (which should not happened:
207 file was not found in either cache (which should not happened:
208 this is meant to be called only after ensuring that the needed
208 this is meant to be called only after ensuring that the needed
209 largefile exists in the cache).'''
209 largefile exists in the cache).'''
210 path = findfile(repo, hash)
210 path = findfile(repo, hash)
211 if path is None:
211 if path is None:
212 return False
212 return False
213 util.makedirs(os.path.dirname(repo.wjoin(filename)))
213 util.makedirs(os.path.dirname(repo.wjoin(filename)))
214 # The write may fail before the file is fully written, but we
214 # The write may fail before the file is fully written, but we
215 # don't use atomic writes in the working copy.
215 # don't use atomic writes in the working copy.
216 shutil.copy(path, repo.wjoin(filename))
216 shutil.copy(path, repo.wjoin(filename))
217 return True
217 return True
218
218
219 def copytostore(repo, rev, file, uploaded=False):
219 def copytostore(repo, rev, file, uploaded=False):
220 hash = readstandin(repo, file)
220 hash = readstandin(repo, file)
221 if instore(repo, hash):
221 if instore(repo, hash):
222 return
222 return
223 copytostoreabsolute(repo, repo.wjoin(file), hash)
223 copytostoreabsolute(repo, repo.wjoin(file), hash)
224
224
225 def copyalltostore(repo, node):
225 def copyalltostore(repo, node):
226 '''Copy all largefiles in a given revision to the store'''
226 '''Copy all largefiles in a given revision to the store'''
227
227
228 ctx = repo[node]
228 ctx = repo[node]
229 for filename in ctx.files():
229 for filename in ctx.files():
230 if isstandin(filename) and filename in ctx.manifest():
230 if isstandin(filename) and filename in ctx.manifest():
231 realfile = splitstandin(filename)
231 realfile = splitstandin(filename)
232 copytostore(repo, ctx.node(), realfile)
232 copytostore(repo, ctx.node(), realfile)
233
233
234
234
235 def copytostoreabsolute(repo, file, hash):
235 def copytostoreabsolute(repo, file, hash):
236 util.makedirs(os.path.dirname(storepath(repo, hash)))
236 util.makedirs(os.path.dirname(storepath(repo, hash)))
237 if inusercache(repo.ui, hash):
237 if inusercache(repo.ui, hash):
238 link(usercachepath(repo.ui, hash), storepath(repo, hash))
238 link(usercachepath(repo.ui, hash), storepath(repo, hash))
239 else:
239 else:
240 dst = util.atomictempfile(storepath(repo, hash))
240 dst = util.atomictempfile(storepath(repo, hash))
241 for chunk in util.filechunkiter(open(file, 'rb')):
241 for chunk in util.filechunkiter(open(file, 'rb')):
242 dst.write(chunk)
242 dst.write(chunk)
243 dst.close()
243 dst.close()
244 util.copymode(file, storepath(repo, hash))
244 util.copymode(file, storepath(repo, hash))
245 linktousercache(repo, hash)
245 linktousercache(repo, hash)
246
246
247 def linktousercache(repo, hash):
247 def linktousercache(repo, hash):
248 path = usercachepath(repo.ui, hash)
248 path = usercachepath(repo.ui, hash)
249 if path:
249 if path:
250 util.makedirs(os.path.dirname(path))
250 util.makedirs(os.path.dirname(path))
251 link(storepath(repo, hash), path)
251 link(storepath(repo, hash), path)
252
252
253 def getstandinmatcher(repo, pats=[], opts={}):
253 def getstandinmatcher(repo, pats=[], opts={}):
254 '''Return a match object that applies pats to the standin directory'''
254 '''Return a match object that applies pats to the standin directory'''
255 standindir = repo.pathto(shortname)
255 standindir = repo.pathto(shortname)
256 if pats:
256 if pats:
257 # patterns supplied: search standin directory relative to current dir
257 # patterns supplied: search standin directory relative to current dir
258 cwd = repo.getcwd()
258 cwd = repo.getcwd()
259 if os.path.isabs(cwd):
259 if os.path.isabs(cwd):
260 # cwd is an absolute path for hg -R <reponame>
260 # cwd is an absolute path for hg -R <reponame>
261 # work relative to the repository root in this case
261 # work relative to the repository root in this case
262 cwd = ''
262 cwd = ''
263 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
263 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
264 elif os.path.isdir(standindir):
264 elif os.path.isdir(standindir):
265 # no patterns: relative to repo root
265 # no patterns: relative to repo root
266 pats = [standindir]
266 pats = [standindir]
267 else:
267 else:
268 # no patterns and no standin dir: return matcher that matches nothing
268 # no patterns and no standin dir: return matcher that matches nothing
269 match = match_.match(repo.root, None, [], exact=True)
269 match = match_.match(repo.root, None, [], exact=True)
270 match.matchfn = lambda f: False
270 match.matchfn = lambda f: False
271 return match
271 return match
272 return getmatcher(repo, pats, opts, showbad=False)
272 return getmatcher(repo, pats, opts, showbad=False)
273
273
274 def getmatcher(repo, pats=[], opts={}, showbad=True):
274 def getmatcher(repo, pats=[], opts={}, showbad=True):
275 '''Wrapper around scmutil.match() that adds showbad: if false,
275 '''Wrapper around scmutil.match() that adds showbad: if false,
276 neuter the match object's bad() method so it does not print any
276 neuter the match object's bad() method so it does not print any
277 warnings about missing files or directories.'''
277 warnings about missing files or directories.'''
278 match = scmutil.match(repo[None], pats, opts)
278 match = scmutil.match(repo[None], pats, opts)
279
279
280 if not showbad:
280 if not showbad:
281 match.bad = lambda f, msg: None
281 match.bad = lambda f, msg: None
282 return match
282 return match
283
283
284 def composestandinmatcher(repo, rmatcher):
284 def composestandinmatcher(repo, rmatcher):
285 '''Return a matcher that accepts standins corresponding to the
285 '''Return a matcher that accepts standins corresponding to the
286 files accepted by rmatcher. Pass the list of files in the matcher
286 files accepted by rmatcher. Pass the list of files in the matcher
287 as the paths specified by the user.'''
287 as the paths specified by the user.'''
288 smatcher = getstandinmatcher(repo, rmatcher.files())
288 smatcher = getstandinmatcher(repo, rmatcher.files())
289 isstandin = smatcher.matchfn
289 isstandin = smatcher.matchfn
290 def composed_matchfn(f):
290 def composed_matchfn(f):
291 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
291 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
292 smatcher.matchfn = composed_matchfn
292 smatcher.matchfn = composed_matchfn
293
293
294 return smatcher
294 return smatcher
295
295
296 def standin(filename):
296 def standin(filename):
297 '''Return the repo-relative path to the standin for the specified big
297 '''Return the repo-relative path to the standin for the specified big
298 file.'''
298 file.'''
299 # Notes:
299 # Notes:
300 # 1) Most callers want an absolute path, but _create_standin() needs
300 # 1) Most callers want an absolute path, but _create_standin() needs
301 # it repo-relative so lfadd() can pass it to repo_add(). So leave
301 # it repo-relative so lfadd() can pass it to repo_add(). So leave
302 # it up to the caller to use repo.wjoin() to get an absolute path.
302 # it up to the caller to use repo.wjoin() to get an absolute path.
303 # 2) Join with '/' because that's what dirstate always uses, even on
303 # 2) Join with '/' because that's what dirstate always uses, even on
304 # Windows. Change existing separator to '/' first in case we are
304 # Windows. Change existing separator to '/' first in case we are
305 # passed filenames from an external source (like the command line).
305 # passed filenames from an external source (like the command line).
306 return shortname + '/' + filename.replace(os.sep, '/')
306 return shortname + '/' + util.pconvert(filename)
307
307
308 def isstandin(filename):
308 def isstandin(filename):
309 '''Return true if filename is a big file standin. filename must be
309 '''Return true if filename is a big file standin. filename must be
310 in Mercurial's internal form (slash-separated).'''
310 in Mercurial's internal form (slash-separated).'''
311 return filename.startswith(shortname + '/')
311 return filename.startswith(shortname + '/')
312
312
313 def splitstandin(filename):
313 def splitstandin(filename):
314 # Split on / because that's what dirstate always uses, even on Windows.
314 # Split on / because that's what dirstate always uses, even on Windows.
315 # Change local separator to / first just in case we are passed filenames
315 # Change local separator to / first just in case we are passed filenames
316 # from an external source (like the command line).
316 # from an external source (like the command line).
317 bits = filename.replace(os.sep, '/').split('/', 1)
317 bits = util.pconvert(filename).split('/', 1)
318 if len(bits) == 2 and bits[0] == shortname:
318 if len(bits) == 2 and bits[0] == shortname:
319 return bits[1]
319 return bits[1]
320 else:
320 else:
321 return None
321 return None
322
322
323 def updatestandin(repo, standin):
323 def updatestandin(repo, standin):
324 file = repo.wjoin(splitstandin(standin))
324 file = repo.wjoin(splitstandin(standin))
325 if os.path.exists(file):
325 if os.path.exists(file):
326 hash = hashfile(file)
326 hash = hashfile(file)
327 executable = getexecutable(file)
327 executable = getexecutable(file)
328 writestandin(repo, standin, hash, executable)
328 writestandin(repo, standin, hash, executable)
329
329
330 def readstandin(repo, filename, node=None):
330 def readstandin(repo, filename, node=None):
331 '''read hex hash from standin for filename at given node, or working
331 '''read hex hash from standin for filename at given node, or working
332 directory if no node is given'''
332 directory if no node is given'''
333 return repo[node][standin(filename)].data().strip()
333 return repo[node][standin(filename)].data().strip()
334
334
335 def writestandin(repo, standin, hash, executable):
335 def writestandin(repo, standin, hash, executable):
336 '''write hash to <repo.root>/<standin>'''
336 '''write hash to <repo.root>/<standin>'''
337 writehash(hash, repo.wjoin(standin), executable)
337 writehash(hash, repo.wjoin(standin), executable)
338
338
339 def copyandhash(instream, outfile):
339 def copyandhash(instream, outfile):
340 '''Read bytes from instream (iterable) and write them to outfile,
340 '''Read bytes from instream (iterable) and write them to outfile,
341 computing the SHA-1 hash of the data along the way. Close outfile
341 computing the SHA-1 hash of the data along the way. Close outfile
342 when done and return the binary hash.'''
342 when done and return the binary hash.'''
343 hasher = util.sha1('')
343 hasher = util.sha1('')
344 for data in instream:
344 for data in instream:
345 hasher.update(data)
345 hasher.update(data)
346 outfile.write(data)
346 outfile.write(data)
347
347
348 # Blecch: closing a file that somebody else opened is rude and
348 # Blecch: closing a file that somebody else opened is rude and
349 # wrong. But it's so darn convenient and practical! After all,
349 # wrong. But it's so darn convenient and practical! After all,
350 # outfile was opened just to copy and hash.
350 # outfile was opened just to copy and hash.
351 outfile.close()
351 outfile.close()
352
352
353 return hasher.digest()
353 return hasher.digest()
354
354
355 def hashrepofile(repo, file):
355 def hashrepofile(repo, file):
356 return hashfile(repo.wjoin(file))
356 return hashfile(repo.wjoin(file))
357
357
358 def hashfile(file):
358 def hashfile(file):
359 if not os.path.exists(file):
359 if not os.path.exists(file):
360 return ''
360 return ''
361 hasher = util.sha1('')
361 hasher = util.sha1('')
362 fd = open(file, 'rb')
362 fd = open(file, 'rb')
363 for data in blockstream(fd):
363 for data in blockstream(fd):
364 hasher.update(data)
364 hasher.update(data)
365 fd.close()
365 fd.close()
366 return hasher.hexdigest()
366 return hasher.hexdigest()
367
367
368 class limitreader(object):
368 class limitreader(object):
369 def __init__(self, f, limit):
369 def __init__(self, f, limit):
370 self.f = f
370 self.f = f
371 self.limit = limit
371 self.limit = limit
372
372
373 def read(self, length):
373 def read(self, length):
374 if self.limit == 0:
374 if self.limit == 0:
375 return ''
375 return ''
376 length = length > self.limit and self.limit or length
376 length = length > self.limit and self.limit or length
377 self.limit -= length
377 self.limit -= length
378 return self.f.read(length)
378 return self.f.read(length)
379
379
380 def close(self):
380 def close(self):
381 pass
381 pass
382
382
383 def blockstream(infile, blocksize=128 * 1024):
383 def blockstream(infile, blocksize=128 * 1024):
384 """Generator that yields blocks of data from infile and closes infile."""
384 """Generator that yields blocks of data from infile and closes infile."""
385 while True:
385 while True:
386 data = infile.read(blocksize)
386 data = infile.read(blocksize)
387 if not data:
387 if not data:
388 break
388 break
389 yield data
389 yield data
390 # same blecch as copyandhash() above
390 # same blecch as copyandhash() above
391 infile.close()
391 infile.close()
392
392
393 def writehash(hash, filename, executable):
393 def writehash(hash, filename, executable):
394 util.makedirs(os.path.dirname(filename))
394 util.makedirs(os.path.dirname(filename))
395 util.writefile(filename, hash + '\n')
395 util.writefile(filename, hash + '\n')
396 os.chmod(filename, getmode(executable))
396 os.chmod(filename, getmode(executable))
397
397
398 def getexecutable(filename):
398 def getexecutable(filename):
399 mode = os.stat(filename).st_mode
399 mode = os.stat(filename).st_mode
400 return ((mode & stat.S_IXUSR) and
400 return ((mode & stat.S_IXUSR) and
401 (mode & stat.S_IXGRP) and
401 (mode & stat.S_IXGRP) and
402 (mode & stat.S_IXOTH))
402 (mode & stat.S_IXOTH))
403
403
404 def getmode(executable):
404 def getmode(executable):
405 if executable:
405 if executable:
406 return 0755
406 return 0755
407 else:
407 else:
408 return 0644
408 return 0644
409
409
410 def urljoin(first, second, *arg):
410 def urljoin(first, second, *arg):
411 def join(left, right):
411 def join(left, right):
412 if not left.endswith('/'):
412 if not left.endswith('/'):
413 left += '/'
413 left += '/'
414 if right.startswith('/'):
414 if right.startswith('/'):
415 right = right[1:]
415 right = right[1:]
416 return left + right
416 return left + right
417
417
418 url = join(first, second)
418 url = join(first, second)
419 for a in arg:
419 for a in arg:
420 url = join(url, a)
420 url = join(url, a)
421 return url
421 return url
422
422
423 def hexsha1(data):
423 def hexsha1(data):
424 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
424 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
425 object data"""
425 object data"""
426 h = util.sha1()
426 h = util.sha1()
427 for chunk in util.filechunkiter(data):
427 for chunk in util.filechunkiter(data):
428 h.update(chunk)
428 h.update(chunk)
429 return h.hexdigest()
429 return h.hexdigest()
430
430
431 def httpsendfile(ui, filename):
431 def httpsendfile(ui, filename):
432 return httpconnection.httpsendfile(ui, filename, 'rb')
432 return httpconnection.httpsendfile(ui, filename, 'rb')
433
433
434 def unixpath(path):
434 def unixpath(path):
435 '''Return a version of path normalized for use with the lfdirstate.'''
435 '''Return a version of path normalized for use with the lfdirstate.'''
436 return os.path.normpath(path).replace(os.sep, '/')
436 return util.pconvert(os.path.normpath(path))
437
437
438 def islfilesrepo(repo):
438 def islfilesrepo(repo):
439 return ('largefiles' in repo.requirements and
439 return ('largefiles' in repo.requirements and
440 util.any(shortname + '/' in f[0] for f in repo.store.datafiles()))
440 util.any(shortname + '/' in f[0] for f in repo.store.datafiles()))
441
441
442 def mkstemp(repo, prefix):
442 def mkstemp(repo, prefix):
443 '''Returns a file descriptor and a filename corresponding to a temporary
443 '''Returns a file descriptor and a filename corresponding to a temporary
444 file in the repo's largefiles store.'''
444 file in the repo's largefiles store.'''
445 path = repo.join(longname)
445 path = repo.join(longname)
446 util.makedirs(path)
446 util.makedirs(path)
447 return tempfile.mkstemp(prefix=prefix, dir=path)
447 return tempfile.mkstemp(prefix=prefix, dir=path)
448
448
449 class storeprotonotcapable(Exception):
449 class storeprotonotcapable(Exception):
450 def __init__(self, storetypes):
450 def __init__(self, storetypes):
451 self.storetypes = storetypes
451 self.storetypes = storetypes
@@ -1,499 +1,499 b''
1 /*
1 /*
2 bdiff.c - efficient binary diff extension for Mercurial
2 bdiff.c - efficient binary diff extension for Mercurial
3
3
4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms of
6 This software may be used and distributed according to the terms of
7 the GNU General Public License, incorporated herein by reference.
7 the GNU General Public License, incorporated herein by reference.
8
8
9 Based roughly on Python difflib
9 Based roughly on Python difflib
10 */
10 */
11
11
12 #include <Python.h>
12 #include <Python.h>
13 #include <stdlib.h>
13 #include <stdlib.h>
14 #include <string.h>
14 #include <string.h>
15 #include <limits.h>
15 #include <limits.h>
16
16
17 #if defined __hpux || defined __SUNPRO_C || defined _AIX
17 #if defined __hpux || defined __SUNPRO_C || defined _AIX
18 #define inline
18 #define inline
19 #endif
19 #endif
20
20
21 #ifdef __linux
21 #ifdef __linux
22 #define inline __inline
22 #define inline __inline
23 #endif
23 #endif
24
24
25 #ifdef _WIN32
25 #ifdef _WIN32
26 #ifdef _MSC_VER
26 #ifdef _MSC_VER
27 #define inline __inline
27 #define inline __inline
28 typedef unsigned long uint32_t;
28 typedef unsigned long uint32_t;
29 #else
29 #else
30 #include <stdint.h>
30 #include <stdint.h>
31 #endif
31 #endif
32 static uint32_t htonl(uint32_t x)
32 static uint32_t htonl(uint32_t x)
33 {
33 {
34 return ((x & 0x000000ffUL) << 24) |
34 return ((x & 0x000000ffUL) << 24) |
35 ((x & 0x0000ff00UL) << 8) |
35 ((x & 0x0000ff00UL) << 8) |
36 ((x & 0x00ff0000UL) >> 8) |
36 ((x & 0x00ff0000UL) >> 8) |
37 ((x & 0xff000000UL) >> 24);
37 ((x & 0xff000000UL) >> 24);
38 }
38 }
39 #else
39 #else
40 #include <sys/types.h>
40 #include <sys/types.h>
41 #if defined __BEOS__ && !defined __HAIKU__
41 #if defined __BEOS__ && !defined __HAIKU__
42 #include <ByteOrder.h>
42 #include <ByteOrder.h>
43 #else
43 #else
44 #include <arpa/inet.h>
44 #include <arpa/inet.h>
45 #endif
45 #endif
46 #include <inttypes.h>
46 #include <inttypes.h>
47 #endif
47 #endif
48
48
49 #include "util.h"
49 #include "util.h"
50
50
51 struct line {
51 struct line {
52 int hash, len, n, e;
52 int hash, len, n, e;
53 const char *l;
53 const char *l;
54 };
54 };
55
55
56 struct pos {
56 struct pos {
57 int pos, len;
57 int pos, len;
58 };
58 };
59
59
60 struct hunk;
60 struct hunk;
61 struct hunk {
61 struct hunk {
62 int a1, a2, b1, b2;
62 int a1, a2, b1, b2;
63 struct hunk *next;
63 struct hunk *next;
64 };
64 };
65
65
66 static int splitlines(const char *a, int len, struct line **lr)
66 static int splitlines(const char *a, int len, struct line **lr)
67 {
67 {
68 unsigned hash;
68 unsigned hash;
69 int i;
69 int i;
70 const char *p, *b = a;
70 const char *p, *b = a;
71 const char * const plast = a + len - 1;
71 const char * const plast = a + len - 1;
72 struct line *l;
72 struct line *l;
73
73
74 /* count the lines */
74 /* count the lines */
75 i = 1; /* extra line for sentinel */
75 i = 1; /* extra line for sentinel */
76 for (p = a; p < a + len; p++)
76 for (p = a; p < a + len; p++)
77 if (*p == '\n' || p == plast)
77 if (*p == '\n' || p == plast)
78 i++;
78 i++;
79
79
80 *lr = l = (struct line *)malloc(sizeof(struct line) * i);
80 *lr = l = (struct line *)malloc(sizeof(struct line) * i);
81 if (!l)
81 if (!l)
82 return -1;
82 return -1;
83
83
84 /* build the line array and calculate hashes */
84 /* build the line array and calculate hashes */
85 hash = 0;
85 hash = 0;
86 for (p = a; p < a + len; p++) {
86 for (p = a; p < a + len; p++) {
87 /* Leonid Yuriev's hash */
87 /* Leonid Yuriev's hash */
88 hash = (hash * 1664525) + (unsigned char)*p + 1013904223;
88 hash = (hash * 1664525) + (unsigned char)*p + 1013904223;
89
89
90 if (*p == '\n' || p == plast) {
90 if (*p == '\n' || p == plast) {
91 l->hash = hash;
91 l->hash = hash;
92 hash = 0;
92 hash = 0;
93 l->len = p - b + 1;
93 l->len = p - b + 1;
94 l->l = b;
94 l->l = b;
95 l->n = INT_MAX;
95 l->n = INT_MAX;
96 l++;
96 l++;
97 b = p + 1;
97 b = p + 1;
98 }
98 }
99 }
99 }
100
100
101 /* set up a sentinel */
101 /* set up a sentinel */
102 l->hash = 0;
102 l->hash = 0;
103 l->len = 0;
103 l->len = 0;
104 l->l = a + len;
104 l->l = a + len;
105 return i - 1;
105 return i - 1;
106 }
106 }
107
107
108 static inline int cmp(struct line *a, struct line *b)
108 static inline int cmp(struct line *a, struct line *b)
109 {
109 {
110 return a->hash != b->hash || a->len != b->len || memcmp(a->l, b->l, a->len);
110 return a->hash != b->hash || a->len != b->len || memcmp(a->l, b->l, a->len);
111 }
111 }
112
112
113 static int equatelines(struct line *a, int an, struct line *b, int bn)
113 static int equatelines(struct line *a, int an, struct line *b, int bn)
114 {
114 {
115 int i, j, buckets = 1, t, scale;
115 int i, j, buckets = 1, t, scale;
116 struct pos *h = NULL;
116 struct pos *h = NULL;
117
117
118 /* build a hash table of the next highest power of 2 */
118 /* build a hash table of the next highest power of 2 */
119 while (buckets < bn + 1)
119 while (buckets < bn + 1)
120 buckets *= 2;
120 buckets *= 2;
121
121
122 /* try to allocate a large hash table to avoid collisions */
122 /* try to allocate a large hash table to avoid collisions */
123 for (scale = 4; scale; scale /= 2) {
123 for (scale = 4; scale; scale /= 2) {
124 h = (struct pos *)malloc(scale * buckets * sizeof(struct pos));
124 h = (struct pos *)malloc(scale * buckets * sizeof(struct pos));
125 if (h)
125 if (h)
126 break;
126 break;
127 }
127 }
128
128
129 if (!h)
129 if (!h)
130 return 0;
130 return 0;
131
131
132 buckets = buckets * scale - 1;
132 buckets = buckets * scale - 1;
133
133
134 /* clear the hash table */
134 /* clear the hash table */
135 for (i = 0; i <= buckets; i++) {
135 for (i = 0; i <= buckets; i++) {
136 h[i].pos = INT_MAX;
136 h[i].pos = INT_MAX;
137 h[i].len = 0;
137 h[i].len = 0;
138 }
138 }
139
139
140 /* add lines to the hash table chains */
140 /* add lines to the hash table chains */
141 for (i = bn - 1; i >= 0; i--) {
141 for (i = bn - 1; i >= 0; i--) {
142 /* find the equivalence class */
142 /* find the equivalence class */
143 for (j = b[i].hash & buckets; h[j].pos != INT_MAX;
143 for (j = b[i].hash & buckets; h[j].pos != INT_MAX;
144 j = (j + 1) & buckets)
144 j = (j + 1) & buckets)
145 if (!cmp(b + i, b + h[j].pos))
145 if (!cmp(b + i, b + h[j].pos))
146 break;
146 break;
147
147
148 /* add to the head of the equivalence class */
148 /* add to the head of the equivalence class */
149 b[i].n = h[j].pos;
149 b[i].n = h[j].pos;
150 b[i].e = j;
150 b[i].e = j;
151 h[j].pos = i;
151 h[j].pos = i;
152 h[j].len++; /* keep track of popularity */
152 h[j].len++; /* keep track of popularity */
153 }
153 }
154
154
155 /* compute popularity threshold */
155 /* compute popularity threshold */
156 t = (bn >= 31000) ? bn / 1000 : 1000000 / (bn + 1);
156 t = (bn >= 31000) ? bn / 1000 : 1000000 / (bn + 1);
157
157
158 /* match items in a to their equivalence class in b */
158 /* match items in a to their equivalence class in b */
159 for (i = 0; i < an; i++) {
159 for (i = 0; i < an; i++) {
160 /* find the equivalence class */
160 /* find the equivalence class */
161 for (j = a[i].hash & buckets; h[j].pos != INT_MAX;
161 for (j = a[i].hash & buckets; h[j].pos != INT_MAX;
162 j = (j + 1) & buckets)
162 j = (j + 1) & buckets)
163 if (!cmp(a + i, b + h[j].pos))
163 if (!cmp(a + i, b + h[j].pos))
164 break;
164 break;
165
165
166 a[i].e = j; /* use equivalence class for quick compare */
166 a[i].e = j; /* use equivalence class for quick compare */
167 if (h[j].len <= t)
167 if (h[j].len <= t)
168 a[i].n = h[j].pos; /* point to head of match list */
168 a[i].n = h[j].pos; /* point to head of match list */
169 else
169 else
170 a[i].n = INT_MAX; /* too popular */
170 a[i].n = INT_MAX; /* too popular */
171 }
171 }
172
172
173 /* discard hash tables */
173 /* discard hash tables */
174 free(h);
174 free(h);
175 return 1;
175 return 1;
176 }
176 }
177
177
178 static int longest_match(struct line *a, struct line *b, struct pos *pos,
178 static int longest_match(struct line *a, struct line *b, struct pos *pos,
179 int a1, int a2, int b1, int b2, int *omi, int *omj)
179 int a1, int a2, int b1, int b2, int *omi, int *omj)
180 {
180 {
181 int mi = a1, mj = b1, mk = 0, mb = 0, i, j, k;
181 int mi = a1, mj = b1, mk = 0, mb = 0, i, j, k;
182
182
183 for (i = a1; i < a2; i++) {
183 for (i = a1; i < a2; i++) {
184 /* skip things before the current block */
184 /* skip things before the current block */
185 for (j = a[i].n; j < b1; j = b[j].n)
185 for (j = a[i].n; j < b1; j = b[j].n)
186 ;
186 ;
187
187
188 /* loop through all lines match a[i] in b */
188 /* loop through all lines match a[i] in b */
189 for (; j < b2; j = b[j].n) {
189 for (; j < b2; j = b[j].n) {
190 /* does this extend an earlier match? */
190 /* does this extend an earlier match? */
191 if (i > a1 && j > b1 && pos[j - 1].pos == i - 1)
191 if (i > a1 && j > b1 && pos[j - 1].pos == i - 1)
192 k = pos[j - 1].len + 1;
192 k = pos[j - 1].len + 1;
193 else
193 else
194 k = 1;
194 k = 1;
195 pos[j].pos = i;
195 pos[j].pos = i;
196 pos[j].len = k;
196 pos[j].len = k;
197
197
198 /* best match so far? */
198 /* best match so far? */
199 if (k > mk) {
199 if (k > mk) {
200 mi = i;
200 mi = i;
201 mj = j;
201 mj = j;
202 mk = k;
202 mk = k;
203 }
203 }
204 }
204 }
205 }
205 }
206
206
207 if (mk) {
207 if (mk) {
208 mi = mi - mk + 1;
208 mi = mi - mk + 1;
209 mj = mj - mk + 1;
209 mj = mj - mk + 1;
210 }
210 }
211
211
212 /* expand match to include neighboring popular lines */
212 /* expand match to include neighboring popular lines */
213 while (mi - mb > a1 && mj - mb > b1 &&
213 while (mi - mb > a1 && mj - mb > b1 &&
214 a[mi - mb - 1].e == b[mj - mb - 1].e)
214 a[mi - mb - 1].e == b[mj - mb - 1].e)
215 mb++;
215 mb++;
216 while (mi + mk < a2 && mj + mk < b2 &&
216 while (mi + mk < a2 && mj + mk < b2 &&
217 a[mi + mk].e == b[mj + mk].e)
217 a[mi + mk].e == b[mj + mk].e)
218 mk++;
218 mk++;
219
219
220 *omi = mi - mb;
220 *omi = mi - mb;
221 *omj = mj - mb;
221 *omj = mj - mb;
222
222
223 return mk + mb;
223 return mk + mb;
224 }
224 }
225
225
226 static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos,
226 static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos,
227 int a1, int a2, int b1, int b2, struct hunk *l)
227 int a1, int a2, int b1, int b2, struct hunk *l)
228 {
228 {
229 int i, j, k;
229 int i, j, k;
230
230
231 while (1) {
231 while (1) {
232 /* find the longest match in this chunk */
232 /* find the longest match in this chunk */
233 k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j);
233 k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j);
234 if (!k)
234 if (!k)
235 return l;
235 return l;
236
236
237 /* and recurse on the remaining chunks on either side */
237 /* and recurse on the remaining chunks on either side */
238 l = recurse(a, b, pos, a1, i, b1, j, l);
238 l = recurse(a, b, pos, a1, i, b1, j, l);
239 if (!l)
239 if (!l)
240 return NULL;
240 return NULL;
241
241
242 l->next = (struct hunk *)malloc(sizeof(struct hunk));
242 l->next = (struct hunk *)malloc(sizeof(struct hunk));
243 if (!l->next)
243 if (!l->next)
244 return NULL;
244 return NULL;
245
245
246 l = l->next;
246 l = l->next;
247 l->a1 = i;
247 l->a1 = i;
248 l->a2 = i + k;
248 l->a2 = i + k;
249 l->b1 = j;
249 l->b1 = j;
250 l->b2 = j + k;
250 l->b2 = j + k;
251 l->next = NULL;
251 l->next = NULL;
252
252
253 /* tail-recursion didn't happen, so do equivalent iteration */
253 /* tail-recursion didn't happen, so do equivalent iteration */
254 a1 = i + k;
254 a1 = i + k;
255 b1 = j + k;
255 b1 = j + k;
256 }
256 }
257 }
257 }
258
258
259 static int diff(struct line *a, int an, struct line *b, int bn,
259 static int diff(struct line *a, int an, struct line *b, int bn,
260 struct hunk *base)
260 struct hunk *base)
261 {
261 {
262 struct hunk *curr;
262 struct hunk *curr;
263 struct pos *pos;
263 struct pos *pos;
264 int t, count = 0;
264 int t, count = 0;
265
265
266 /* allocate and fill arrays */
266 /* allocate and fill arrays */
267 t = equatelines(a, an, b, bn);
267 t = equatelines(a, an, b, bn);
268 pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos));
268 pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos));
269
269
270 if (pos && t) {
270 if (pos && t) {
271 /* generate the matching block list */
271 /* generate the matching block list */
272
272
273 curr = recurse(a, b, pos, 0, an, 0, bn, base);
273 curr = recurse(a, b, pos, 0, an, 0, bn, base);
274 if (!curr)
274 if (!curr)
275 return -1;
275 return -1;
276
276
277 /* sentinel end hunk */
277 /* sentinel end hunk */
278 curr->next = (struct hunk *)malloc(sizeof(struct hunk));
278 curr->next = (struct hunk *)malloc(sizeof(struct hunk));
279 if (!curr->next)
279 if (!curr->next)
280 return -1;
280 return -1;
281 curr = curr->next;
281 curr = curr->next;
282 curr->a1 = curr->a2 = an;
282 curr->a1 = curr->a2 = an;
283 curr->b1 = curr->b2 = bn;
283 curr->b1 = curr->b2 = bn;
284 curr->next = NULL;
284 curr->next = NULL;
285 }
285 }
286
286
287 free(pos);
287 free(pos);
288
288
289 /* normalize the hunk list, try to push each hunk towards the end */
289 /* normalize the hunk list, try to push each hunk towards the end */
290 for (curr = base->next; curr; curr = curr->next) {
290 for (curr = base->next; curr; curr = curr->next) {
291 struct hunk *next = curr->next;
291 struct hunk *next = curr->next;
292 int shift = 0;
292 int shift = 0;
293
293
294 if (!next)
294 if (!next)
295 break;
295 break;
296
296
297 if (curr->a2 == next->a1)
297 if (curr->a2 == next->a1)
298 while (curr->a2 + shift < an && curr->b2 + shift < bn
298 while (curr->a2 + shift < an && curr->b2 + shift < bn
299 && !cmp(a + curr->a2 + shift,
299 && !cmp(a + curr->a2 + shift,
300 b + curr->b2 + shift))
300 b + curr->b2 + shift))
301 shift++;
301 shift++;
302 else if (curr->b2 == next->b1)
302 else if (curr->b2 == next->b1)
303 while (curr->b2 + shift < bn && curr->a2 + shift < an
303 while (curr->b2 + shift < bn && curr->a2 + shift < an
304 && !cmp(b + curr->b2 + shift,
304 && !cmp(b + curr->b2 + shift,
305 a + curr->a2 + shift))
305 a + curr->a2 + shift))
306 shift++;
306 shift++;
307 if (!shift)
307 if (!shift)
308 continue;
308 continue;
309 curr->b2 += shift;
309 curr->b2 += shift;
310 next->b1 += shift;
310 next->b1 += shift;
311 curr->a2 += shift;
311 curr->a2 += shift;
312 next->a1 += shift;
312 next->a1 += shift;
313 }
313 }
314
314
315 for (curr = base->next; curr; curr = curr->next)
315 for (curr = base->next; curr; curr = curr->next)
316 count++;
316 count++;
317 return count;
317 return count;
318 }
318 }
319
319
320 static void freehunks(struct hunk *l)
320 static void freehunks(struct hunk *l)
321 {
321 {
322 struct hunk *n;
322 struct hunk *n;
323 for (; l; l = n) {
323 for (; l; l = n) {
324 n = l->next;
324 n = l->next;
325 free(l);
325 free(l);
326 }
326 }
327 }
327 }
328
328
329 static PyObject *blocks(PyObject *self, PyObject *args)
329 static PyObject *blocks(PyObject *self, PyObject *args)
330 {
330 {
331 PyObject *sa, *sb, *rl = NULL, *m;
331 PyObject *sa, *sb, *rl = NULL, *m;
332 struct line *a, *b;
332 struct line *a, *b;
333 struct hunk l, *h;
333 struct hunk l, *h;
334 int an, bn, count, pos = 0;
334 int an, bn, count, pos = 0;
335
335
336 if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
336 if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
337 return NULL;
337 return NULL;
338
338
339 an = splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
339 an = splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
340 bn = splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
340 bn = splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
341
341
342 if (!a || !b)
342 if (!a || !b)
343 goto nomem;
343 goto nomem;
344
344
345 l.next = NULL;
345 l.next = NULL;
346 count = diff(a, an, b, bn, &l);
346 count = diff(a, an, b, bn, &l);
347 if (count < 0)
347 if (count < 0)
348 goto nomem;
348 goto nomem;
349
349
350 rl = PyList_New(count);
350 rl = PyList_New(count);
351 if (!rl)
351 if (!rl)
352 goto nomem;
352 goto nomem;
353
353
354 for (h = l.next; h; h = h->next) {
354 for (h = l.next; h; h = h->next) {
355 m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
355 m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
356 PyList_SetItem(rl, pos, m);
356 PyList_SetItem(rl, pos, m);
357 pos++;
357 pos++;
358 }
358 }
359
359
360 nomem:
360 nomem:
361 free(a);
361 free(a);
362 free(b);
362 free(b);
363 freehunks(l.next);
363 freehunks(l.next);
364 return rl ? rl : PyErr_NoMemory();
364 return rl ? rl : PyErr_NoMemory();
365 }
365 }
366
366
367 static PyObject *bdiff(PyObject *self, PyObject *args)
367 static PyObject *bdiff(PyObject *self, PyObject *args)
368 {
368 {
369 char *sa, *sb, *rb;
369 char *sa, *sb, *rb;
370 PyObject *result = NULL;
370 PyObject *result = NULL;
371 struct line *al, *bl;
371 struct line *al, *bl;
372 struct hunk l, *h;
372 struct hunk l, *h;
373 uint32_t encode[3];
373 uint32_t encode[3];
374 int an, bn, len = 0, la, lb, count;
374 int an, bn, len = 0, la, lb, count;
375
375
376 if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
376 if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
377 return NULL;
377 return NULL;
378
378
379 an = splitlines(sa, la, &al);
379 an = splitlines(sa, la, &al);
380 bn = splitlines(sb, lb, &bl);
380 bn = splitlines(sb, lb, &bl);
381 if (!al || !bl)
381 if (!al || !bl)
382 goto nomem;
382 goto nomem;
383
383
384 l.next = NULL;
384 l.next = NULL;
385 count = diff(al, an, bl, bn, &l);
385 count = diff(al, an, bl, bn, &l);
386 if (count < 0)
386 if (count < 0)
387 goto nomem;
387 goto nomem;
388
388
389 /* calculate length of output */
389 /* calculate length of output */
390 la = lb = 0;
390 la = lb = 0;
391 for (h = l.next; h; h = h->next) {
391 for (h = l.next; h; h = h->next) {
392 if (h->a1 != la || h->b1 != lb)
392 if (h->a1 != la || h->b1 != lb)
393 len += 12 + bl[h->b1].l - bl[lb].l;
393 len += 12 + bl[h->b1].l - bl[lb].l;
394 la = h->a2;
394 la = h->a2;
395 lb = h->b2;
395 lb = h->b2;
396 }
396 }
397
397
398 result = PyBytes_FromStringAndSize(NULL, len);
398 result = PyBytes_FromStringAndSize(NULL, len);
399
399
400 if (!result)
400 if (!result)
401 goto nomem;
401 goto nomem;
402
402
403 /* build binary patch */
403 /* build binary patch */
404 rb = PyBytes_AsString(result);
404 rb = PyBytes_AsString(result);
405 la = lb = 0;
405 la = lb = 0;
406
406
407 for (h = l.next; h; h = h->next) {
407 for (h = l.next; h; h = h->next) {
408 if (h->a1 != la || h->b1 != lb) {
408 if (h->a1 != la || h->b1 != lb) {
409 len = bl[h->b1].l - bl[lb].l;
409 len = bl[h->b1].l - bl[lb].l;
410 encode[0] = htonl(al[la].l - al->l);
410 encode[0] = htonl(al[la].l - al->l);
411 encode[1] = htonl(al[h->a1].l - al->l);
411 encode[1] = htonl(al[h->a1].l - al->l);
412 encode[2] = htonl(len);
412 encode[2] = htonl(len);
413 memcpy(rb, encode, 12);
413 memcpy(rb, encode, 12);
414 memcpy(rb + 12, bl[lb].l, len);
414 memcpy(rb + 12, bl[lb].l, len);
415 rb += 12 + len;
415 rb += 12 + len;
416 }
416 }
417 la = h->a2;
417 la = h->a2;
418 lb = h->b2;
418 lb = h->b2;
419 }
419 }
420
420
421 nomem:
421 nomem:
422 free(al);
422 free(al);
423 free(bl);
423 free(bl);
424 freehunks(l.next);
424 freehunks(l.next);
425 return result ? result : PyErr_NoMemory();
425 return result ? result : PyErr_NoMemory();
426 }
426 }
427
427
428 /*
428 /*
429 * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
429 * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
430 * reduce whitespace sequences to a single space and trim remaining whitespace
430 * reduce whitespace sequences to a single space and trim remaining whitespace
431 * from end of lines.
431 * from end of lines.
432 */
432 */
433 static PyObject *fixws(PyObject *self, PyObject *args)
433 static PyObject *fixws(PyObject *self, PyObject *args)
434 {
434 {
435 PyObject *s, *result = NULL;
435 PyObject *s, *result = NULL;
436 char allws, c;
436 char allws, c;
437 const char *r;
437 const char *r;
438 int i, rlen, wlen = 0;
438 int i, rlen, wlen = 0;
439 char *w;
439 char *w;
440
440
441 if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
441 if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
442 return NULL;
442 return NULL;
443 r = PyBytes_AsString(s);
443 r = PyBytes_AsString(s);
444 rlen = PyBytes_Size(s);
444 rlen = PyBytes_Size(s);
445
445
446 w = (char *)malloc(rlen);
446 w = (char *)malloc(rlen ? rlen : 1);
447 if (!w)
447 if (!w)
448 goto nomem;
448 goto nomem;
449
449
450 for (i = 0; i != rlen; i++) {
450 for (i = 0; i != rlen; i++) {
451 c = r[i];
451 c = r[i];
452 if (c == ' ' || c == '\t' || c == '\r') {
452 if (c == ' ' || c == '\t' || c == '\r') {
453 if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
453 if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
454 w[wlen++] = ' ';
454 w[wlen++] = ' ';
455 } else if (c == '\n' && !allws
455 } else if (c == '\n' && !allws
456 && wlen > 0 && w[wlen - 1] == ' ') {
456 && wlen > 0 && w[wlen - 1] == ' ') {
457 w[wlen - 1] = '\n';
457 w[wlen - 1] = '\n';
458 } else {
458 } else {
459 w[wlen++] = c;
459 w[wlen++] = c;
460 }
460 }
461 }
461 }
462
462
463 result = PyBytes_FromStringAndSize(w, wlen);
463 result = PyBytes_FromStringAndSize(w, wlen);
464
464
465 nomem:
465 nomem:
466 free(w);
466 free(w);
467 return result ? result : PyErr_NoMemory();
467 return result ? result : PyErr_NoMemory();
468 }
468 }
469
469
470
470
471 static char mdiff_doc[] = "Efficient binary diff.";
471 static char mdiff_doc[] = "Efficient binary diff.";
472
472
473 static PyMethodDef methods[] = {
473 static PyMethodDef methods[] = {
474 {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
474 {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
475 {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
475 {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
476 {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
476 {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
477 {NULL, NULL}
477 {NULL, NULL}
478 };
478 };
479
479
480 #ifdef IS_PY3K
480 #ifdef IS_PY3K
481 static struct PyModuleDef bdiff_module = {
481 static struct PyModuleDef bdiff_module = {
482 PyModuleDef_HEAD_INIT,
482 PyModuleDef_HEAD_INIT,
483 "bdiff",
483 "bdiff",
484 mdiff_doc,
484 mdiff_doc,
485 -1,
485 -1,
486 methods
486 methods
487 };
487 };
488
488
489 PyMODINIT_FUNC PyInit_bdiff(void)
489 PyMODINIT_FUNC PyInit_bdiff(void)
490 {
490 {
491 return PyModule_Create(&bdiff_module);
491 return PyModule_Create(&bdiff_module);
492 }
492 }
493 #else
493 #else
494 PyMODINIT_FUNC initbdiff(void)
494 PyMODINIT_FUNC initbdiff(void)
495 {
495 {
496 Py_InitModule3("bdiff", methods, mdiff_doc);
496 Py_InitModule3("bdiff", methods, mdiff_doc);
497 }
497 }
498 #endif
498 #endif
499
499
@@ -1,1319 +1,1319 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile
10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import subrepo
13 import subrepo
14
14
15 def parsealiases(cmd):
15 def parsealiases(cmd):
16 return cmd.lstrip("^").split("|")
16 return cmd.lstrip("^").split("|")
17
17
18 def findpossible(cmd, table, strict=False):
18 def findpossible(cmd, table, strict=False):
19 """
19 """
20 Return cmd -> (aliases, command table entry)
20 Return cmd -> (aliases, command table entry)
21 for each matching command.
21 for each matching command.
22 Return debug commands (or their aliases) only if no normal command matches.
22 Return debug commands (or their aliases) only if no normal command matches.
23 """
23 """
24 choice = {}
24 choice = {}
25 debugchoice = {}
25 debugchoice = {}
26
26
27 if cmd in table:
27 if cmd in table:
28 # short-circuit exact matches, "log" alias beats "^log|history"
28 # short-circuit exact matches, "log" alias beats "^log|history"
29 keys = [cmd]
29 keys = [cmd]
30 else:
30 else:
31 keys = table.keys()
31 keys = table.keys()
32
32
33 for e in keys:
33 for e in keys:
34 aliases = parsealiases(e)
34 aliases = parsealiases(e)
35 found = None
35 found = None
36 if cmd in aliases:
36 if cmd in aliases:
37 found = cmd
37 found = cmd
38 elif not strict:
38 elif not strict:
39 for a in aliases:
39 for a in aliases:
40 if a.startswith(cmd):
40 if a.startswith(cmd):
41 found = a
41 found = a
42 break
42 break
43 if found is not None:
43 if found is not None:
44 if aliases[0].startswith("debug") or found.startswith("debug"):
44 if aliases[0].startswith("debug") or found.startswith("debug"):
45 debugchoice[found] = (aliases, table[e])
45 debugchoice[found] = (aliases, table[e])
46 else:
46 else:
47 choice[found] = (aliases, table[e])
47 choice[found] = (aliases, table[e])
48
48
49 if not choice and debugchoice:
49 if not choice and debugchoice:
50 choice = debugchoice
50 choice = debugchoice
51
51
52 return choice
52 return choice
53
53
54 def findcmd(cmd, table, strict=True):
54 def findcmd(cmd, table, strict=True):
55 """Return (aliases, command table entry) for command string."""
55 """Return (aliases, command table entry) for command string."""
56 choice = findpossible(cmd, table, strict)
56 choice = findpossible(cmd, table, strict)
57
57
58 if cmd in choice:
58 if cmd in choice:
59 return choice[cmd]
59 return choice[cmd]
60
60
61 if len(choice) > 1:
61 if len(choice) > 1:
62 clist = choice.keys()
62 clist = choice.keys()
63 clist.sort()
63 clist.sort()
64 raise error.AmbiguousCommand(cmd, clist)
64 raise error.AmbiguousCommand(cmd, clist)
65
65
66 if choice:
66 if choice:
67 return choice.values()[0]
67 return choice.values()[0]
68
68
69 raise error.UnknownCommand(cmd)
69 raise error.UnknownCommand(cmd)
70
70
71 def findrepo(p):
71 def findrepo(p):
72 while not os.path.isdir(os.path.join(p, ".hg")):
72 while not os.path.isdir(os.path.join(p, ".hg")):
73 oldp, p = p, os.path.dirname(p)
73 oldp, p = p, os.path.dirname(p)
74 if p == oldp:
74 if p == oldp:
75 return None
75 return None
76
76
77 return p
77 return p
78
78
79 def bailifchanged(repo):
79 def bailifchanged(repo):
80 if repo.dirstate.p2() != nullid:
80 if repo.dirstate.p2() != nullid:
81 raise util.Abort(_('outstanding uncommitted merge'))
81 raise util.Abort(_('outstanding uncommitted merge'))
82 modified, added, removed, deleted = repo.status()[:4]
82 modified, added, removed, deleted = repo.status()[:4]
83 if modified or added or removed or deleted:
83 if modified or added or removed or deleted:
84 raise util.Abort(_("outstanding uncommitted changes"))
84 raise util.Abort(_("outstanding uncommitted changes"))
85 ctx = repo[None]
85 ctx = repo[None]
86 for s in ctx.substate:
86 for s in ctx.substate:
87 if ctx.sub(s).dirty():
87 if ctx.sub(s).dirty():
88 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
88 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
89
89
90 def logmessage(ui, opts):
90 def logmessage(ui, opts):
91 """ get the log message according to -m and -l option """
91 """ get the log message according to -m and -l option """
92 message = opts.get('message')
92 message = opts.get('message')
93 logfile = opts.get('logfile')
93 logfile = opts.get('logfile')
94
94
95 if message and logfile:
95 if message and logfile:
96 raise util.Abort(_('options --message and --logfile are mutually '
96 raise util.Abort(_('options --message and --logfile are mutually '
97 'exclusive'))
97 'exclusive'))
98 if not message and logfile:
98 if not message and logfile:
99 try:
99 try:
100 if logfile == '-':
100 if logfile == '-':
101 message = ui.fin.read()
101 message = ui.fin.read()
102 else:
102 else:
103 message = '\n'.join(util.readfile(logfile).splitlines())
103 message = '\n'.join(util.readfile(logfile).splitlines())
104 except IOError, inst:
104 except IOError, inst:
105 raise util.Abort(_("can't read commit message '%s': %s") %
105 raise util.Abort(_("can't read commit message '%s': %s") %
106 (logfile, inst.strerror))
106 (logfile, inst.strerror))
107 return message
107 return message
108
108
109 def loglimit(opts):
109 def loglimit(opts):
110 """get the log limit according to option -l/--limit"""
110 """get the log limit according to option -l/--limit"""
111 limit = opts.get('limit')
111 limit = opts.get('limit')
112 if limit:
112 if limit:
113 try:
113 try:
114 limit = int(limit)
114 limit = int(limit)
115 except ValueError:
115 except ValueError:
116 raise util.Abort(_('limit must be a positive integer'))
116 raise util.Abort(_('limit must be a positive integer'))
117 if limit <= 0:
117 if limit <= 0:
118 raise util.Abort(_('limit must be positive'))
118 raise util.Abort(_('limit must be positive'))
119 else:
119 else:
120 limit = None
120 limit = None
121 return limit
121 return limit
122
122
123 def makefilename(repo, pat, node, desc=None,
123 def makefilename(repo, pat, node, desc=None,
124 total=None, seqno=None, revwidth=None, pathname=None):
124 total=None, seqno=None, revwidth=None, pathname=None):
125 node_expander = {
125 node_expander = {
126 'H': lambda: hex(node),
126 'H': lambda: hex(node),
127 'R': lambda: str(repo.changelog.rev(node)),
127 'R': lambda: str(repo.changelog.rev(node)),
128 'h': lambda: short(node),
128 'h': lambda: short(node),
129 'm': lambda: re.sub('[^\w]', '_', str(desc))
129 'm': lambda: re.sub('[^\w]', '_', str(desc))
130 }
130 }
131 expander = {
131 expander = {
132 '%': lambda: '%',
132 '%': lambda: '%',
133 'b': lambda: os.path.basename(repo.root),
133 'b': lambda: os.path.basename(repo.root),
134 }
134 }
135
135
136 try:
136 try:
137 if node:
137 if node:
138 expander.update(node_expander)
138 expander.update(node_expander)
139 if node:
139 if node:
140 expander['r'] = (lambda:
140 expander['r'] = (lambda:
141 str(repo.changelog.rev(node)).zfill(revwidth or 0))
141 str(repo.changelog.rev(node)).zfill(revwidth or 0))
142 if total is not None:
142 if total is not None:
143 expander['N'] = lambda: str(total)
143 expander['N'] = lambda: str(total)
144 if seqno is not None:
144 if seqno is not None:
145 expander['n'] = lambda: str(seqno)
145 expander['n'] = lambda: str(seqno)
146 if total is not None and seqno is not None:
146 if total is not None and seqno is not None:
147 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
147 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
148 if pathname is not None:
148 if pathname is not None:
149 expander['s'] = lambda: os.path.basename(pathname)
149 expander['s'] = lambda: os.path.basename(pathname)
150 expander['d'] = lambda: os.path.dirname(pathname) or '.'
150 expander['d'] = lambda: os.path.dirname(pathname) or '.'
151 expander['p'] = lambda: pathname
151 expander['p'] = lambda: pathname
152
152
153 newname = []
153 newname = []
154 patlen = len(pat)
154 patlen = len(pat)
155 i = 0
155 i = 0
156 while i < patlen:
156 while i < patlen:
157 c = pat[i]
157 c = pat[i]
158 if c == '%':
158 if c == '%':
159 i += 1
159 i += 1
160 c = pat[i]
160 c = pat[i]
161 c = expander[c]()
161 c = expander[c]()
162 newname.append(c)
162 newname.append(c)
163 i += 1
163 i += 1
164 return ''.join(newname)
164 return ''.join(newname)
165 except KeyError, inst:
165 except KeyError, inst:
166 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
166 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
167 inst.args[0])
167 inst.args[0])
168
168
169 def makefileobj(repo, pat, node=None, desc=None, total=None,
169 def makefileobj(repo, pat, node=None, desc=None, total=None,
170 seqno=None, revwidth=None, mode='wb', pathname=None):
170 seqno=None, revwidth=None, mode='wb', pathname=None):
171
171
172 writable = mode not in ('r', 'rb')
172 writable = mode not in ('r', 'rb')
173
173
174 if not pat or pat == '-':
174 if not pat or pat == '-':
175 fp = writable and repo.ui.fout or repo.ui.fin
175 fp = writable and repo.ui.fout or repo.ui.fin
176 if util.safehasattr(fp, 'fileno'):
176 if util.safehasattr(fp, 'fileno'):
177 return os.fdopen(os.dup(fp.fileno()), mode)
177 return os.fdopen(os.dup(fp.fileno()), mode)
178 else:
178 else:
179 # if this fp can't be duped properly, return
179 # if this fp can't be duped properly, return
180 # a dummy object that can be closed
180 # a dummy object that can be closed
181 class wrappedfileobj(object):
181 class wrappedfileobj(object):
182 noop = lambda x: None
182 noop = lambda x: None
183 def __init__(self, f):
183 def __init__(self, f):
184 self.f = f
184 self.f = f
185 def __getattr__(self, attr):
185 def __getattr__(self, attr):
186 if attr == 'close':
186 if attr == 'close':
187 return self.noop
187 return self.noop
188 else:
188 else:
189 return getattr(self.f, attr)
189 return getattr(self.f, attr)
190
190
191 return wrappedfileobj(fp)
191 return wrappedfileobj(fp)
192 if util.safehasattr(pat, 'write') and writable:
192 if util.safehasattr(pat, 'write') and writable:
193 return pat
193 return pat
194 if util.safehasattr(pat, 'read') and 'r' in mode:
194 if util.safehasattr(pat, 'read') and 'r' in mode:
195 return pat
195 return pat
196 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
196 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
197 pathname),
197 pathname),
198 mode)
198 mode)
199
199
200 def openrevlog(repo, cmd, file_, opts):
200 def openrevlog(repo, cmd, file_, opts):
201 """opens the changelog, manifest, a filelog or a given revlog"""
201 """opens the changelog, manifest, a filelog or a given revlog"""
202 cl = opts['changelog']
202 cl = opts['changelog']
203 mf = opts['manifest']
203 mf = opts['manifest']
204 msg = None
204 msg = None
205 if cl and mf:
205 if cl and mf:
206 msg = _('cannot specify --changelog and --manifest at the same time')
206 msg = _('cannot specify --changelog and --manifest at the same time')
207 elif cl or mf:
207 elif cl or mf:
208 if file_:
208 if file_:
209 msg = _('cannot specify filename with --changelog or --manifest')
209 msg = _('cannot specify filename with --changelog or --manifest')
210 elif not repo:
210 elif not repo:
211 msg = _('cannot specify --changelog or --manifest '
211 msg = _('cannot specify --changelog or --manifest '
212 'without a repository')
212 'without a repository')
213 if msg:
213 if msg:
214 raise util.Abort(msg)
214 raise util.Abort(msg)
215
215
216 r = None
216 r = None
217 if repo:
217 if repo:
218 if cl:
218 if cl:
219 r = repo.changelog
219 r = repo.changelog
220 elif mf:
220 elif mf:
221 r = repo.manifest
221 r = repo.manifest
222 elif file_:
222 elif file_:
223 filelog = repo.file(file_)
223 filelog = repo.file(file_)
224 if len(filelog):
224 if len(filelog):
225 r = filelog
225 r = filelog
226 if not r:
226 if not r:
227 if not file_:
227 if not file_:
228 raise error.CommandError(cmd, _('invalid arguments'))
228 raise error.CommandError(cmd, _('invalid arguments'))
229 if not os.path.isfile(file_):
229 if not os.path.isfile(file_):
230 raise util.Abort(_("revlog '%s' not found") % file_)
230 raise util.Abort(_("revlog '%s' not found") % file_)
231 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
231 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
232 file_[:-2] + ".i")
232 file_[:-2] + ".i")
233 return r
233 return r
234
234
235 def copy(ui, repo, pats, opts, rename=False):
235 def copy(ui, repo, pats, opts, rename=False):
236 # called with the repo lock held
236 # called with the repo lock held
237 #
237 #
238 # hgsep => pathname that uses "/" to separate directories
238 # hgsep => pathname that uses "/" to separate directories
239 # ossep => pathname that uses os.sep to separate directories
239 # ossep => pathname that uses os.sep to separate directories
240 cwd = repo.getcwd()
240 cwd = repo.getcwd()
241 targets = {}
241 targets = {}
242 after = opts.get("after")
242 after = opts.get("after")
243 dryrun = opts.get("dry_run")
243 dryrun = opts.get("dry_run")
244 wctx = repo[None]
244 wctx = repo[None]
245
245
246 def walkpat(pat):
246 def walkpat(pat):
247 srcs = []
247 srcs = []
248 badstates = after and '?' or '?r'
248 badstates = after and '?' or '?r'
249 m = scmutil.match(repo[None], [pat], opts, globbed=True)
249 m = scmutil.match(repo[None], [pat], opts, globbed=True)
250 for abs in repo.walk(m):
250 for abs in repo.walk(m):
251 state = repo.dirstate[abs]
251 state = repo.dirstate[abs]
252 rel = m.rel(abs)
252 rel = m.rel(abs)
253 exact = m.exact(abs)
253 exact = m.exact(abs)
254 if state in badstates:
254 if state in badstates:
255 if exact and state == '?':
255 if exact and state == '?':
256 ui.warn(_('%s: not copying - file is not managed\n') % rel)
256 ui.warn(_('%s: not copying - file is not managed\n') % rel)
257 if exact and state == 'r':
257 if exact and state == 'r':
258 ui.warn(_('%s: not copying - file has been marked for'
258 ui.warn(_('%s: not copying - file has been marked for'
259 ' remove\n') % rel)
259 ' remove\n') % rel)
260 continue
260 continue
261 # abs: hgsep
261 # abs: hgsep
262 # rel: ossep
262 # rel: ossep
263 srcs.append((abs, rel, exact))
263 srcs.append((abs, rel, exact))
264 return srcs
264 return srcs
265
265
266 # abssrc: hgsep
266 # abssrc: hgsep
267 # relsrc: ossep
267 # relsrc: ossep
268 # otarget: ossep
268 # otarget: ossep
269 def copyfile(abssrc, relsrc, otarget, exact):
269 def copyfile(abssrc, relsrc, otarget, exact):
270 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
270 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
271 reltarget = repo.pathto(abstarget, cwd)
271 reltarget = repo.pathto(abstarget, cwd)
272 target = repo.wjoin(abstarget)
272 target = repo.wjoin(abstarget)
273 src = repo.wjoin(abssrc)
273 src = repo.wjoin(abssrc)
274 state = repo.dirstate[abstarget]
274 state = repo.dirstate[abstarget]
275
275
276 scmutil.checkportable(ui, abstarget)
276 scmutil.checkportable(ui, abstarget)
277
277
278 # check for collisions
278 # check for collisions
279 prevsrc = targets.get(abstarget)
279 prevsrc = targets.get(abstarget)
280 if prevsrc is not None:
280 if prevsrc is not None:
281 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
281 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
282 (reltarget, repo.pathto(abssrc, cwd),
282 (reltarget, repo.pathto(abssrc, cwd),
283 repo.pathto(prevsrc, cwd)))
283 repo.pathto(prevsrc, cwd)))
284 return
284 return
285
285
286 # check for overwrites
286 # check for overwrites
287 exists = os.path.lexists(target)
287 exists = os.path.lexists(target)
288 if not after and exists or after and state in 'mn':
288 if not after and exists or after and state in 'mn':
289 if not opts['force']:
289 if not opts['force']:
290 ui.warn(_('%s: not overwriting - file exists\n') %
290 ui.warn(_('%s: not overwriting - file exists\n') %
291 reltarget)
291 reltarget)
292 return
292 return
293
293
294 if after:
294 if after:
295 if not exists:
295 if not exists:
296 if rename:
296 if rename:
297 ui.warn(_('%s: not recording move - %s does not exist\n') %
297 ui.warn(_('%s: not recording move - %s does not exist\n') %
298 (relsrc, reltarget))
298 (relsrc, reltarget))
299 else:
299 else:
300 ui.warn(_('%s: not recording copy - %s does not exist\n') %
300 ui.warn(_('%s: not recording copy - %s does not exist\n') %
301 (relsrc, reltarget))
301 (relsrc, reltarget))
302 return
302 return
303 elif not dryrun:
303 elif not dryrun:
304 try:
304 try:
305 if exists:
305 if exists:
306 os.unlink(target)
306 os.unlink(target)
307 targetdir = os.path.dirname(target) or '.'
307 targetdir = os.path.dirname(target) or '.'
308 if not os.path.isdir(targetdir):
308 if not os.path.isdir(targetdir):
309 os.makedirs(targetdir)
309 os.makedirs(targetdir)
310 util.copyfile(src, target)
310 util.copyfile(src, target)
311 srcexists = True
311 srcexists = True
312 except IOError, inst:
312 except IOError, inst:
313 if inst.errno == errno.ENOENT:
313 if inst.errno == errno.ENOENT:
314 ui.warn(_('%s: deleted in working copy\n') % relsrc)
314 ui.warn(_('%s: deleted in working copy\n') % relsrc)
315 srcexists = False
315 srcexists = False
316 else:
316 else:
317 ui.warn(_('%s: cannot copy - %s\n') %
317 ui.warn(_('%s: cannot copy - %s\n') %
318 (relsrc, inst.strerror))
318 (relsrc, inst.strerror))
319 return True # report a failure
319 return True # report a failure
320
320
321 if ui.verbose or not exact:
321 if ui.verbose or not exact:
322 if rename:
322 if rename:
323 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
323 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
324 else:
324 else:
325 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
325 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
326
326
327 targets[abstarget] = abssrc
327 targets[abstarget] = abssrc
328
328
329 # fix up dirstate
329 # fix up dirstate
330 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
330 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
331 dryrun=dryrun, cwd=cwd)
331 dryrun=dryrun, cwd=cwd)
332 if rename and not dryrun:
332 if rename and not dryrun:
333 if not after and srcexists:
333 if not after and srcexists:
334 util.unlinkpath(repo.wjoin(abssrc))
334 util.unlinkpath(repo.wjoin(abssrc))
335 wctx.forget([abssrc])
335 wctx.forget([abssrc])
336
336
337 # pat: ossep
337 # pat: ossep
338 # dest ossep
338 # dest ossep
339 # srcs: list of (hgsep, hgsep, ossep, bool)
339 # srcs: list of (hgsep, hgsep, ossep, bool)
340 # return: function that takes hgsep and returns ossep
340 # return: function that takes hgsep and returns ossep
341 def targetpathfn(pat, dest, srcs):
341 def targetpathfn(pat, dest, srcs):
342 if os.path.isdir(pat):
342 if os.path.isdir(pat):
343 abspfx = scmutil.canonpath(repo.root, cwd, pat)
343 abspfx = scmutil.canonpath(repo.root, cwd, pat)
344 abspfx = util.localpath(abspfx)
344 abspfx = util.localpath(abspfx)
345 if destdirexists:
345 if destdirexists:
346 striplen = len(os.path.split(abspfx)[0])
346 striplen = len(os.path.split(abspfx)[0])
347 else:
347 else:
348 striplen = len(abspfx)
348 striplen = len(abspfx)
349 if striplen:
349 if striplen:
350 striplen += len(os.sep)
350 striplen += len(os.sep)
351 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
351 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
352 elif destdirexists:
352 elif destdirexists:
353 res = lambda p: os.path.join(dest,
353 res = lambda p: os.path.join(dest,
354 os.path.basename(util.localpath(p)))
354 os.path.basename(util.localpath(p)))
355 else:
355 else:
356 res = lambda p: dest
356 res = lambda p: dest
357 return res
357 return res
358
358
359 # pat: ossep
359 # pat: ossep
360 # dest ossep
360 # dest ossep
361 # srcs: list of (hgsep, hgsep, ossep, bool)
361 # srcs: list of (hgsep, hgsep, ossep, bool)
362 # return: function that takes hgsep and returns ossep
362 # return: function that takes hgsep and returns ossep
363 def targetpathafterfn(pat, dest, srcs):
363 def targetpathafterfn(pat, dest, srcs):
364 if matchmod.patkind(pat):
364 if matchmod.patkind(pat):
365 # a mercurial pattern
365 # a mercurial pattern
366 res = lambda p: os.path.join(dest,
366 res = lambda p: os.path.join(dest,
367 os.path.basename(util.localpath(p)))
367 os.path.basename(util.localpath(p)))
368 else:
368 else:
369 abspfx = scmutil.canonpath(repo.root, cwd, pat)
369 abspfx = scmutil.canonpath(repo.root, cwd, pat)
370 if len(abspfx) < len(srcs[0][0]):
370 if len(abspfx) < len(srcs[0][0]):
371 # A directory. Either the target path contains the last
371 # A directory. Either the target path contains the last
372 # component of the source path or it does not.
372 # component of the source path or it does not.
373 def evalpath(striplen):
373 def evalpath(striplen):
374 score = 0
374 score = 0
375 for s in srcs:
375 for s in srcs:
376 t = os.path.join(dest, util.localpath(s[0])[striplen:])
376 t = os.path.join(dest, util.localpath(s[0])[striplen:])
377 if os.path.lexists(t):
377 if os.path.lexists(t):
378 score += 1
378 score += 1
379 return score
379 return score
380
380
381 abspfx = util.localpath(abspfx)
381 abspfx = util.localpath(abspfx)
382 striplen = len(abspfx)
382 striplen = len(abspfx)
383 if striplen:
383 if striplen:
384 striplen += len(os.sep)
384 striplen += len(os.sep)
385 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
385 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
386 score = evalpath(striplen)
386 score = evalpath(striplen)
387 striplen1 = len(os.path.split(abspfx)[0])
387 striplen1 = len(os.path.split(abspfx)[0])
388 if striplen1:
388 if striplen1:
389 striplen1 += len(os.sep)
389 striplen1 += len(os.sep)
390 if evalpath(striplen1) > score:
390 if evalpath(striplen1) > score:
391 striplen = striplen1
391 striplen = striplen1
392 res = lambda p: os.path.join(dest,
392 res = lambda p: os.path.join(dest,
393 util.localpath(p)[striplen:])
393 util.localpath(p)[striplen:])
394 else:
394 else:
395 # a file
395 # a file
396 if destdirexists:
396 if destdirexists:
397 res = lambda p: os.path.join(dest,
397 res = lambda p: os.path.join(dest,
398 os.path.basename(util.localpath(p)))
398 os.path.basename(util.localpath(p)))
399 else:
399 else:
400 res = lambda p: dest
400 res = lambda p: dest
401 return res
401 return res
402
402
403
403
404 pats = scmutil.expandpats(pats)
404 pats = scmutil.expandpats(pats)
405 if not pats:
405 if not pats:
406 raise util.Abort(_('no source or destination specified'))
406 raise util.Abort(_('no source or destination specified'))
407 if len(pats) == 1:
407 if len(pats) == 1:
408 raise util.Abort(_('no destination specified'))
408 raise util.Abort(_('no destination specified'))
409 dest = pats.pop()
409 dest = pats.pop()
410 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
410 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
411 if not destdirexists:
411 if not destdirexists:
412 if len(pats) > 1 or matchmod.patkind(pats[0]):
412 if len(pats) > 1 or matchmod.patkind(pats[0]):
413 raise util.Abort(_('with multiple sources, destination must be an '
413 raise util.Abort(_('with multiple sources, destination must be an '
414 'existing directory'))
414 'existing directory'))
415 if util.endswithsep(dest):
415 if util.endswithsep(dest):
416 raise util.Abort(_('destination %s is not a directory') % dest)
416 raise util.Abort(_('destination %s is not a directory') % dest)
417
417
418 tfn = targetpathfn
418 tfn = targetpathfn
419 if after:
419 if after:
420 tfn = targetpathafterfn
420 tfn = targetpathafterfn
421 copylist = []
421 copylist = []
422 for pat in pats:
422 for pat in pats:
423 srcs = walkpat(pat)
423 srcs = walkpat(pat)
424 if not srcs:
424 if not srcs:
425 continue
425 continue
426 copylist.append((tfn(pat, dest, srcs), srcs))
426 copylist.append((tfn(pat, dest, srcs), srcs))
427 if not copylist:
427 if not copylist:
428 raise util.Abort(_('no files to copy'))
428 raise util.Abort(_('no files to copy'))
429
429
430 errors = 0
430 errors = 0
431 for targetpath, srcs in copylist:
431 for targetpath, srcs in copylist:
432 for abssrc, relsrc, exact in srcs:
432 for abssrc, relsrc, exact in srcs:
433 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
433 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
434 errors += 1
434 errors += 1
435
435
436 if errors:
436 if errors:
437 ui.warn(_('(consider using --after)\n'))
437 ui.warn(_('(consider using --after)\n'))
438
438
439 return errors != 0
439 return errors != 0
440
440
441 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
441 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
442 runargs=None, appendpid=False):
442 runargs=None, appendpid=False):
443 '''Run a command as a service.'''
443 '''Run a command as a service.'''
444
444
445 if opts['daemon'] and not opts['daemon_pipefds']:
445 if opts['daemon'] and not opts['daemon_pipefds']:
446 # Signal child process startup with file removal
446 # Signal child process startup with file removal
447 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
447 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
448 os.close(lockfd)
448 os.close(lockfd)
449 try:
449 try:
450 if not runargs:
450 if not runargs:
451 runargs = util.hgcmd() + sys.argv[1:]
451 runargs = util.hgcmd() + sys.argv[1:]
452 runargs.append('--daemon-pipefds=%s' % lockpath)
452 runargs.append('--daemon-pipefds=%s' % lockpath)
453 # Don't pass --cwd to the child process, because we've already
453 # Don't pass --cwd to the child process, because we've already
454 # changed directory.
454 # changed directory.
455 for i in xrange(1, len(runargs)):
455 for i in xrange(1, len(runargs)):
456 if runargs[i].startswith('--cwd='):
456 if runargs[i].startswith('--cwd='):
457 del runargs[i]
457 del runargs[i]
458 break
458 break
459 elif runargs[i].startswith('--cwd'):
459 elif runargs[i].startswith('--cwd'):
460 del runargs[i:i + 2]
460 del runargs[i:i + 2]
461 break
461 break
462 def condfn():
462 def condfn():
463 return not os.path.exists(lockpath)
463 return not os.path.exists(lockpath)
464 pid = util.rundetached(runargs, condfn)
464 pid = util.rundetached(runargs, condfn)
465 if pid < 0:
465 if pid < 0:
466 raise util.Abort(_('child process failed to start'))
466 raise util.Abort(_('child process failed to start'))
467 finally:
467 finally:
468 try:
468 try:
469 os.unlink(lockpath)
469 os.unlink(lockpath)
470 except OSError, e:
470 except OSError, e:
471 if e.errno != errno.ENOENT:
471 if e.errno != errno.ENOENT:
472 raise
472 raise
473 if parentfn:
473 if parentfn:
474 return parentfn(pid)
474 return parentfn(pid)
475 else:
475 else:
476 return
476 return
477
477
478 if initfn:
478 if initfn:
479 initfn()
479 initfn()
480
480
481 if opts['pid_file']:
481 if opts['pid_file']:
482 mode = appendpid and 'a' or 'w'
482 mode = appendpid and 'a' or 'w'
483 fp = open(opts['pid_file'], mode)
483 fp = open(opts['pid_file'], mode)
484 fp.write(str(os.getpid()) + '\n')
484 fp.write(str(os.getpid()) + '\n')
485 fp.close()
485 fp.close()
486
486
487 if opts['daemon_pipefds']:
487 if opts['daemon_pipefds']:
488 lockpath = opts['daemon_pipefds']
488 lockpath = opts['daemon_pipefds']
489 try:
489 try:
490 os.setsid()
490 os.setsid()
491 except AttributeError:
491 except AttributeError:
492 pass
492 pass
493 os.unlink(lockpath)
493 os.unlink(lockpath)
494 util.hidewindow()
494 util.hidewindow()
495 sys.stdout.flush()
495 sys.stdout.flush()
496 sys.stderr.flush()
496 sys.stderr.flush()
497
497
498 nullfd = os.open(util.nulldev, os.O_RDWR)
498 nullfd = os.open(util.nulldev, os.O_RDWR)
499 logfilefd = nullfd
499 logfilefd = nullfd
500 if logfile:
500 if logfile:
501 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
501 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
502 os.dup2(nullfd, 0)
502 os.dup2(nullfd, 0)
503 os.dup2(logfilefd, 1)
503 os.dup2(logfilefd, 1)
504 os.dup2(logfilefd, 2)
504 os.dup2(logfilefd, 2)
505 if nullfd not in (0, 1, 2):
505 if nullfd not in (0, 1, 2):
506 os.close(nullfd)
506 os.close(nullfd)
507 if logfile and logfilefd not in (0, 1, 2):
507 if logfile and logfilefd not in (0, 1, 2):
508 os.close(logfilefd)
508 os.close(logfilefd)
509
509
510 if runfn:
510 if runfn:
511 return runfn()
511 return runfn()
512
512
513 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
513 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
514 opts=None):
514 opts=None):
515 '''export changesets as hg patches.'''
515 '''export changesets as hg patches.'''
516
516
517 total = len(revs)
517 total = len(revs)
518 revwidth = max([len(str(rev)) for rev in revs])
518 revwidth = max([len(str(rev)) for rev in revs])
519
519
520 def single(rev, seqno, fp):
520 def single(rev, seqno, fp):
521 ctx = repo[rev]
521 ctx = repo[rev]
522 node = ctx.node()
522 node = ctx.node()
523 parents = [p.node() for p in ctx.parents() if p]
523 parents = [p.node() for p in ctx.parents() if p]
524 branch = ctx.branch()
524 branch = ctx.branch()
525 if switch_parent:
525 if switch_parent:
526 parents.reverse()
526 parents.reverse()
527 prev = (parents and parents[0]) or nullid
527 prev = (parents and parents[0]) or nullid
528
528
529 shouldclose = False
529 shouldclose = False
530 if not fp:
530 if not fp:
531 desc_lines = ctx.description().rstrip().split('\n')
531 desc_lines = ctx.description().rstrip().split('\n')
532 desc = desc_lines[0] #Commit always has a first line.
532 desc = desc_lines[0] #Commit always has a first line.
533 fp = makefileobj(repo, template, node, desc=desc, total=total,
533 fp = makefileobj(repo, template, node, desc=desc, total=total,
534 seqno=seqno, revwidth=revwidth, mode='ab')
534 seqno=seqno, revwidth=revwidth, mode='ab')
535 if fp != template:
535 if fp != template:
536 shouldclose = True
536 shouldclose = True
537 if fp != sys.stdout and util.safehasattr(fp, 'name'):
537 if fp != sys.stdout and util.safehasattr(fp, 'name'):
538 repo.ui.note("%s\n" % fp.name)
538 repo.ui.note("%s\n" % fp.name)
539
539
540 fp.write("# HG changeset patch\n")
540 fp.write("# HG changeset patch\n")
541 fp.write("# User %s\n" % ctx.user())
541 fp.write("# User %s\n" % ctx.user())
542 fp.write("# Date %d %d\n" % ctx.date())
542 fp.write("# Date %d %d\n" % ctx.date())
543 if branch and branch != 'default':
543 if branch and branch != 'default':
544 fp.write("# Branch %s\n" % branch)
544 fp.write("# Branch %s\n" % branch)
545 fp.write("# Node ID %s\n" % hex(node))
545 fp.write("# Node ID %s\n" % hex(node))
546 fp.write("# Parent %s\n" % hex(prev))
546 fp.write("# Parent %s\n" % hex(prev))
547 if len(parents) > 1:
547 if len(parents) > 1:
548 fp.write("# Parent %s\n" % hex(parents[1]))
548 fp.write("# Parent %s\n" % hex(parents[1]))
549 fp.write(ctx.description().rstrip())
549 fp.write(ctx.description().rstrip())
550 fp.write("\n\n")
550 fp.write("\n\n")
551
551
552 for chunk in patch.diff(repo, prev, node, opts=opts):
552 for chunk in patch.diff(repo, prev, node, opts=opts):
553 fp.write(chunk)
553 fp.write(chunk)
554
554
555 if shouldclose:
555 if shouldclose:
556 fp.close()
556 fp.close()
557
557
558 for seqno, rev in enumerate(revs):
558 for seqno, rev in enumerate(revs):
559 single(rev, seqno + 1, fp)
559 single(rev, seqno + 1, fp)
560
560
561 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
561 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
562 changes=None, stat=False, fp=None, prefix='',
562 changes=None, stat=False, fp=None, prefix='',
563 listsubrepos=False):
563 listsubrepos=False):
564 '''show diff or diffstat.'''
564 '''show diff or diffstat.'''
565 if fp is None:
565 if fp is None:
566 write = ui.write
566 write = ui.write
567 else:
567 else:
568 def write(s, **kw):
568 def write(s, **kw):
569 fp.write(s)
569 fp.write(s)
570
570
571 if stat:
571 if stat:
572 diffopts = diffopts.copy(context=0)
572 diffopts = diffopts.copy(context=0)
573 width = 80
573 width = 80
574 if not ui.plain():
574 if not ui.plain():
575 width = ui.termwidth()
575 width = ui.termwidth()
576 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
576 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
577 prefix=prefix)
577 prefix=prefix)
578 for chunk, label in patch.diffstatui(util.iterlines(chunks),
578 for chunk, label in patch.diffstatui(util.iterlines(chunks),
579 width=width,
579 width=width,
580 git=diffopts.git):
580 git=diffopts.git):
581 write(chunk, label=label)
581 write(chunk, label=label)
582 else:
582 else:
583 for chunk, label in patch.diffui(repo, node1, node2, match,
583 for chunk, label in patch.diffui(repo, node1, node2, match,
584 changes, diffopts, prefix=prefix):
584 changes, diffopts, prefix=prefix):
585 write(chunk, label=label)
585 write(chunk, label=label)
586
586
587 if listsubrepos:
587 if listsubrepos:
588 ctx1 = repo[node1]
588 ctx1 = repo[node1]
589 ctx2 = repo[node2]
589 ctx2 = repo[node2]
590 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
590 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
591 tempnode2 = node2
591 tempnode2 = node2
592 try:
592 try:
593 if node2 is not None:
593 if node2 is not None:
594 tempnode2 = ctx2.substate[subpath][1]
594 tempnode2 = ctx2.substate[subpath][1]
595 except KeyError:
595 except KeyError:
596 # A subrepo that existed in node1 was deleted between node1 and
596 # A subrepo that existed in node1 was deleted between node1 and
597 # node2 (inclusive). Thus, ctx2's substate won't contain that
597 # node2 (inclusive). Thus, ctx2's substate won't contain that
598 # subpath. The best we can do is to ignore it.
598 # subpath. The best we can do is to ignore it.
599 tempnode2 = None
599 tempnode2 = None
600 submatch = matchmod.narrowmatcher(subpath, match)
600 submatch = matchmod.narrowmatcher(subpath, match)
601 sub.diff(diffopts, tempnode2, submatch, changes=changes,
601 sub.diff(diffopts, tempnode2, submatch, changes=changes,
602 stat=stat, fp=fp, prefix=prefix)
602 stat=stat, fp=fp, prefix=prefix)
603
603
604 class changeset_printer(object):
604 class changeset_printer(object):
605 '''show changeset information when templating not requested.'''
605 '''show changeset information when templating not requested.'''
606
606
607 def __init__(self, ui, repo, patch, diffopts, buffered):
607 def __init__(self, ui, repo, patch, diffopts, buffered):
608 self.ui = ui
608 self.ui = ui
609 self.repo = repo
609 self.repo = repo
610 self.buffered = buffered
610 self.buffered = buffered
611 self.patch = patch
611 self.patch = patch
612 self.diffopts = diffopts
612 self.diffopts = diffopts
613 self.header = {}
613 self.header = {}
614 self.hunk = {}
614 self.hunk = {}
615 self.lastheader = None
615 self.lastheader = None
616 self.footer = None
616 self.footer = None
617
617
618 def flush(self, rev):
618 def flush(self, rev):
619 if rev in self.header:
619 if rev in self.header:
620 h = self.header[rev]
620 h = self.header[rev]
621 if h != self.lastheader:
621 if h != self.lastheader:
622 self.lastheader = h
622 self.lastheader = h
623 self.ui.write(h)
623 self.ui.write(h)
624 del self.header[rev]
624 del self.header[rev]
625 if rev in self.hunk:
625 if rev in self.hunk:
626 self.ui.write(self.hunk[rev])
626 self.ui.write(self.hunk[rev])
627 del self.hunk[rev]
627 del self.hunk[rev]
628 return 1
628 return 1
629 return 0
629 return 0
630
630
631 def close(self):
631 def close(self):
632 if self.footer:
632 if self.footer:
633 self.ui.write(self.footer)
633 self.ui.write(self.footer)
634
634
635 def show(self, ctx, copies=None, matchfn=None, **props):
635 def show(self, ctx, copies=None, matchfn=None, **props):
636 if self.buffered:
636 if self.buffered:
637 self.ui.pushbuffer()
637 self.ui.pushbuffer()
638 self._show(ctx, copies, matchfn, props)
638 self._show(ctx, copies, matchfn, props)
639 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
639 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
640 else:
640 else:
641 self._show(ctx, copies, matchfn, props)
641 self._show(ctx, copies, matchfn, props)
642
642
643 def _show(self, ctx, copies, matchfn, props):
643 def _show(self, ctx, copies, matchfn, props):
644 '''show a single changeset or file revision'''
644 '''show a single changeset or file revision'''
645 changenode = ctx.node()
645 changenode = ctx.node()
646 rev = ctx.rev()
646 rev = ctx.rev()
647
647
648 if self.ui.quiet:
648 if self.ui.quiet:
649 self.ui.write("%d:%s\n" % (rev, short(changenode)),
649 self.ui.write("%d:%s\n" % (rev, short(changenode)),
650 label='log.node')
650 label='log.node')
651 return
651 return
652
652
653 log = self.repo.changelog
653 log = self.repo.changelog
654 date = util.datestr(ctx.date())
654 date = util.datestr(ctx.date())
655
655
656 hexfunc = self.ui.debugflag and hex or short
656 hexfunc = self.ui.debugflag and hex or short
657
657
658 parents = [(p, hexfunc(log.node(p)))
658 parents = [(p, hexfunc(log.node(p)))
659 for p in self._meaningful_parentrevs(log, rev)]
659 for p in self._meaningful_parentrevs(log, rev)]
660
660
661 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
661 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
662 label='log.changeset')
662 label='log.changeset')
663
663
664 branch = ctx.branch()
664 branch = ctx.branch()
665 # don't show the default branch name
665 # don't show the default branch name
666 if branch != 'default':
666 if branch != 'default':
667 self.ui.write(_("branch: %s\n") % branch,
667 self.ui.write(_("branch: %s\n") % branch,
668 label='log.branch')
668 label='log.branch')
669 for bookmark in self.repo.nodebookmarks(changenode):
669 for bookmark in self.repo.nodebookmarks(changenode):
670 self.ui.write(_("bookmark: %s\n") % bookmark,
670 self.ui.write(_("bookmark: %s\n") % bookmark,
671 label='log.bookmark')
671 label='log.bookmark')
672 for tag in self.repo.nodetags(changenode):
672 for tag in self.repo.nodetags(changenode):
673 self.ui.write(_("tag: %s\n") % tag,
673 self.ui.write(_("tag: %s\n") % tag,
674 label='log.tag')
674 label='log.tag')
675 if self.ui.debugflag and ctx.phase():
675 if self.ui.debugflag and ctx.phase():
676 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
676 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
677 label='log.phase')
677 label='log.phase')
678 for parent in parents:
678 for parent in parents:
679 self.ui.write(_("parent: %d:%s\n") % parent,
679 self.ui.write(_("parent: %d:%s\n") % parent,
680 label='log.parent')
680 label='log.parent')
681
681
682 if self.ui.debugflag:
682 if self.ui.debugflag:
683 mnode = ctx.manifestnode()
683 mnode = ctx.manifestnode()
684 self.ui.write(_("manifest: %d:%s\n") %
684 self.ui.write(_("manifest: %d:%s\n") %
685 (self.repo.manifest.rev(mnode), hex(mnode)),
685 (self.repo.manifest.rev(mnode), hex(mnode)),
686 label='ui.debug log.manifest')
686 label='ui.debug log.manifest')
687 self.ui.write(_("user: %s\n") % ctx.user(),
687 self.ui.write(_("user: %s\n") % ctx.user(),
688 label='log.user')
688 label='log.user')
689 self.ui.write(_("date: %s\n") % date,
689 self.ui.write(_("date: %s\n") % date,
690 label='log.date')
690 label='log.date')
691
691
692 if self.ui.debugflag:
692 if self.ui.debugflag:
693 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
693 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
694 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
694 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
695 files):
695 files):
696 if value:
696 if value:
697 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
697 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
698 label='ui.debug log.files')
698 label='ui.debug log.files')
699 elif ctx.files() and self.ui.verbose:
699 elif ctx.files() and self.ui.verbose:
700 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
700 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
701 label='ui.note log.files')
701 label='ui.note log.files')
702 if copies and self.ui.verbose:
702 if copies and self.ui.verbose:
703 copies = ['%s (%s)' % c for c in copies]
703 copies = ['%s (%s)' % c for c in copies]
704 self.ui.write(_("copies: %s\n") % ' '.join(copies),
704 self.ui.write(_("copies: %s\n") % ' '.join(copies),
705 label='ui.note log.copies')
705 label='ui.note log.copies')
706
706
707 extra = ctx.extra()
707 extra = ctx.extra()
708 if extra and self.ui.debugflag:
708 if extra and self.ui.debugflag:
709 for key, value in sorted(extra.items()):
709 for key, value in sorted(extra.items()):
710 self.ui.write(_("extra: %s=%s\n")
710 self.ui.write(_("extra: %s=%s\n")
711 % (key, value.encode('string_escape')),
711 % (key, value.encode('string_escape')),
712 label='ui.debug log.extra')
712 label='ui.debug log.extra')
713
713
714 description = ctx.description().strip()
714 description = ctx.description().strip()
715 if description:
715 if description:
716 if self.ui.verbose:
716 if self.ui.verbose:
717 self.ui.write(_("description:\n"),
717 self.ui.write(_("description:\n"),
718 label='ui.note log.description')
718 label='ui.note log.description')
719 self.ui.write(description,
719 self.ui.write(description,
720 label='ui.note log.description')
720 label='ui.note log.description')
721 self.ui.write("\n\n")
721 self.ui.write("\n\n")
722 else:
722 else:
723 self.ui.write(_("summary: %s\n") %
723 self.ui.write(_("summary: %s\n") %
724 description.splitlines()[0],
724 description.splitlines()[0],
725 label='log.summary')
725 label='log.summary')
726 self.ui.write("\n")
726 self.ui.write("\n")
727
727
728 self.showpatch(changenode, matchfn)
728 self.showpatch(changenode, matchfn)
729
729
730 def showpatch(self, node, matchfn):
730 def showpatch(self, node, matchfn):
731 if not matchfn:
731 if not matchfn:
732 matchfn = self.patch
732 matchfn = self.patch
733 if matchfn:
733 if matchfn:
734 stat = self.diffopts.get('stat')
734 stat = self.diffopts.get('stat')
735 diff = self.diffopts.get('patch')
735 diff = self.diffopts.get('patch')
736 diffopts = patch.diffopts(self.ui, self.diffopts)
736 diffopts = patch.diffopts(self.ui, self.diffopts)
737 prev = self.repo.changelog.parents(node)[0]
737 prev = self.repo.changelog.parents(node)[0]
738 if stat:
738 if stat:
739 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
739 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
740 match=matchfn, stat=True)
740 match=matchfn, stat=True)
741 if diff:
741 if diff:
742 if stat:
742 if stat:
743 self.ui.write("\n")
743 self.ui.write("\n")
744 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
744 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
745 match=matchfn, stat=False)
745 match=matchfn, stat=False)
746 self.ui.write("\n")
746 self.ui.write("\n")
747
747
748 def _meaningful_parentrevs(self, log, rev):
748 def _meaningful_parentrevs(self, log, rev):
749 """Return list of meaningful (or all if debug) parentrevs for rev.
749 """Return list of meaningful (or all if debug) parentrevs for rev.
750
750
751 For merges (two non-nullrev revisions) both parents are meaningful.
751 For merges (two non-nullrev revisions) both parents are meaningful.
752 Otherwise the first parent revision is considered meaningful if it
752 Otherwise the first parent revision is considered meaningful if it
753 is not the preceding revision.
753 is not the preceding revision.
754 """
754 """
755 parents = log.parentrevs(rev)
755 parents = log.parentrevs(rev)
756 if not self.ui.debugflag and parents[1] == nullrev:
756 if not self.ui.debugflag and parents[1] == nullrev:
757 if parents[0] >= rev - 1:
757 if parents[0] >= rev - 1:
758 parents = []
758 parents = []
759 else:
759 else:
760 parents = [parents[0]]
760 parents = [parents[0]]
761 return parents
761 return parents
762
762
763
763
764 class changeset_templater(changeset_printer):
764 class changeset_templater(changeset_printer):
765 '''format changeset information.'''
765 '''format changeset information.'''
766
766
767 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
767 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
768 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
768 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
769 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
769 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
770 defaulttempl = {
770 defaulttempl = {
771 'parent': '{rev}:{node|formatnode} ',
771 'parent': '{rev}:{node|formatnode} ',
772 'manifest': '{rev}:{node|formatnode}',
772 'manifest': '{rev}:{node|formatnode}',
773 'file_copy': '{name} ({source})',
773 'file_copy': '{name} ({source})',
774 'extra': '{key}={value|stringescape}'
774 'extra': '{key}={value|stringescape}'
775 }
775 }
776 # filecopy is preserved for compatibility reasons
776 # filecopy is preserved for compatibility reasons
777 defaulttempl['filecopy'] = defaulttempl['file_copy']
777 defaulttempl['filecopy'] = defaulttempl['file_copy']
778 self.t = templater.templater(mapfile, {'formatnode': formatnode},
778 self.t = templater.templater(mapfile, {'formatnode': formatnode},
779 cache=defaulttempl)
779 cache=defaulttempl)
780 self.cache = {}
780 self.cache = {}
781
781
782 def use_template(self, t):
782 def use_template(self, t):
783 '''set template string to use'''
783 '''set template string to use'''
784 self.t.cache['changeset'] = t
784 self.t.cache['changeset'] = t
785
785
786 def _meaningful_parentrevs(self, ctx):
786 def _meaningful_parentrevs(self, ctx):
787 """Return list of meaningful (or all if debug) parentrevs for rev.
787 """Return list of meaningful (or all if debug) parentrevs for rev.
788 """
788 """
789 parents = ctx.parents()
789 parents = ctx.parents()
790 if len(parents) > 1:
790 if len(parents) > 1:
791 return parents
791 return parents
792 if self.ui.debugflag:
792 if self.ui.debugflag:
793 return [parents[0], self.repo['null']]
793 return [parents[0], self.repo['null']]
794 if parents[0].rev() >= ctx.rev() - 1:
794 if parents[0].rev() >= ctx.rev() - 1:
795 return []
795 return []
796 return parents
796 return parents
797
797
798 def _show(self, ctx, copies, matchfn, props):
798 def _show(self, ctx, copies, matchfn, props):
799 '''show a single changeset or file revision'''
799 '''show a single changeset or file revision'''
800
800
801 showlist = templatekw.showlist
801 showlist = templatekw.showlist
802
802
803 # showparents() behaviour depends on ui trace level which
803 # showparents() behaviour depends on ui trace level which
804 # causes unexpected behaviours at templating level and makes
804 # causes unexpected behaviours at templating level and makes
805 # it harder to extract it in a standalone function. Its
805 # it harder to extract it in a standalone function. Its
806 # behaviour cannot be changed so leave it here for now.
806 # behaviour cannot be changed so leave it here for now.
807 def showparents(**args):
807 def showparents(**args):
808 ctx = args['ctx']
808 ctx = args['ctx']
809 parents = [[('rev', p.rev()), ('node', p.hex())]
809 parents = [[('rev', p.rev()), ('node', p.hex())]
810 for p in self._meaningful_parentrevs(ctx)]
810 for p in self._meaningful_parentrevs(ctx)]
811 return showlist('parent', parents, **args)
811 return showlist('parent', parents, **args)
812
812
813 props = props.copy()
813 props = props.copy()
814 props.update(templatekw.keywords)
814 props.update(templatekw.keywords)
815 props['parents'] = showparents
815 props['parents'] = showparents
816 props['templ'] = self.t
816 props['templ'] = self.t
817 props['ctx'] = ctx
817 props['ctx'] = ctx
818 props['repo'] = self.repo
818 props['repo'] = self.repo
819 props['revcache'] = {'copies': copies}
819 props['revcache'] = {'copies': copies}
820 props['cache'] = self.cache
820 props['cache'] = self.cache
821
821
822 # find correct templates for current mode
822 # find correct templates for current mode
823
823
824 tmplmodes = [
824 tmplmodes = [
825 (True, None),
825 (True, None),
826 (self.ui.verbose, 'verbose'),
826 (self.ui.verbose, 'verbose'),
827 (self.ui.quiet, 'quiet'),
827 (self.ui.quiet, 'quiet'),
828 (self.ui.debugflag, 'debug'),
828 (self.ui.debugflag, 'debug'),
829 ]
829 ]
830
830
831 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
831 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
832 for mode, postfix in tmplmodes:
832 for mode, postfix in tmplmodes:
833 for type in types:
833 for type in types:
834 cur = postfix and ('%s_%s' % (type, postfix)) or type
834 cur = postfix and ('%s_%s' % (type, postfix)) or type
835 if mode and cur in self.t:
835 if mode and cur in self.t:
836 types[type] = cur
836 types[type] = cur
837
837
838 try:
838 try:
839
839
840 # write header
840 # write header
841 if types['header']:
841 if types['header']:
842 h = templater.stringify(self.t(types['header'], **props))
842 h = templater.stringify(self.t(types['header'], **props))
843 if self.buffered:
843 if self.buffered:
844 self.header[ctx.rev()] = h
844 self.header[ctx.rev()] = h
845 else:
845 else:
846 if self.lastheader != h:
846 if self.lastheader != h:
847 self.lastheader = h
847 self.lastheader = h
848 self.ui.write(h)
848 self.ui.write(h)
849
849
850 # write changeset metadata, then patch if requested
850 # write changeset metadata, then patch if requested
851 key = types['changeset']
851 key = types['changeset']
852 self.ui.write(templater.stringify(self.t(key, **props)))
852 self.ui.write(templater.stringify(self.t(key, **props)))
853 self.showpatch(ctx.node(), matchfn)
853 self.showpatch(ctx.node(), matchfn)
854
854
855 if types['footer']:
855 if types['footer']:
856 if not self.footer:
856 if not self.footer:
857 self.footer = templater.stringify(self.t(types['footer'],
857 self.footer = templater.stringify(self.t(types['footer'],
858 **props))
858 **props))
859
859
860 except KeyError, inst:
860 except KeyError, inst:
861 msg = _("%s: no key named '%s'")
861 msg = _("%s: no key named '%s'")
862 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
862 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
863 except SyntaxError, inst:
863 except SyntaxError, inst:
864 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
864 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
865
865
866 def show_changeset(ui, repo, opts, buffered=False):
866 def show_changeset(ui, repo, opts, buffered=False):
867 """show one changeset using template or regular display.
867 """show one changeset using template or regular display.
868
868
869 Display format will be the first non-empty hit of:
869 Display format will be the first non-empty hit of:
870 1. option 'template'
870 1. option 'template'
871 2. option 'style'
871 2. option 'style'
872 3. [ui] setting 'logtemplate'
872 3. [ui] setting 'logtemplate'
873 4. [ui] setting 'style'
873 4. [ui] setting 'style'
874 If all of these values are either the unset or the empty string,
874 If all of these values are either the unset or the empty string,
875 regular display via changeset_printer() is done.
875 regular display via changeset_printer() is done.
876 """
876 """
877 # options
877 # options
878 patch = False
878 patch = False
879 if opts.get('patch') or opts.get('stat'):
879 if opts.get('patch') or opts.get('stat'):
880 patch = scmutil.matchall(repo)
880 patch = scmutil.matchall(repo)
881
881
882 tmpl = opts.get('template')
882 tmpl = opts.get('template')
883 style = None
883 style = None
884 if tmpl:
884 if tmpl:
885 tmpl = templater.parsestring(tmpl, quoted=False)
885 tmpl = templater.parsestring(tmpl, quoted=False)
886 else:
886 else:
887 style = opts.get('style')
887 style = opts.get('style')
888
888
889 # ui settings
889 # ui settings
890 if not (tmpl or style):
890 if not (tmpl or style):
891 tmpl = ui.config('ui', 'logtemplate')
891 tmpl = ui.config('ui', 'logtemplate')
892 if tmpl:
892 if tmpl:
893 tmpl = templater.parsestring(tmpl)
893 tmpl = templater.parsestring(tmpl)
894 else:
894 else:
895 style = util.expandpath(ui.config('ui', 'style', ''))
895 style = util.expandpath(ui.config('ui', 'style', ''))
896
896
897 if not (tmpl or style):
897 if not (tmpl or style):
898 return changeset_printer(ui, repo, patch, opts, buffered)
898 return changeset_printer(ui, repo, patch, opts, buffered)
899
899
900 mapfile = None
900 mapfile = None
901 if style and not tmpl:
901 if style and not tmpl:
902 mapfile = style
902 mapfile = style
903 if not os.path.split(mapfile)[0]:
903 if not os.path.split(mapfile)[0]:
904 mapname = (templater.templatepath('map-cmdline.' + mapfile)
904 mapname = (templater.templatepath('map-cmdline.' + mapfile)
905 or templater.templatepath(mapfile))
905 or templater.templatepath(mapfile))
906 if mapname:
906 if mapname:
907 mapfile = mapname
907 mapfile = mapname
908
908
909 try:
909 try:
910 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
910 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
911 except SyntaxError, inst:
911 except SyntaxError, inst:
912 raise util.Abort(inst.args[0])
912 raise util.Abort(inst.args[0])
913 if tmpl:
913 if tmpl:
914 t.use_template(tmpl)
914 t.use_template(tmpl)
915 return t
915 return t
916
916
917 def finddate(ui, repo, date):
917 def finddate(ui, repo, date):
918 """Find the tipmost changeset that matches the given date spec"""
918 """Find the tipmost changeset that matches the given date spec"""
919
919
920 df = util.matchdate(date)
920 df = util.matchdate(date)
921 m = scmutil.matchall(repo)
921 m = scmutil.matchall(repo)
922 results = {}
922 results = {}
923
923
924 def prep(ctx, fns):
924 def prep(ctx, fns):
925 d = ctx.date()
925 d = ctx.date()
926 if df(d[0]):
926 if df(d[0]):
927 results[ctx.rev()] = d
927 results[ctx.rev()] = d
928
928
929 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
929 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
930 rev = ctx.rev()
930 rev = ctx.rev()
931 if rev in results:
931 if rev in results:
932 ui.status(_("Found revision %s from %s\n") %
932 ui.status(_("Found revision %s from %s\n") %
933 (rev, util.datestr(results[rev])))
933 (rev, util.datestr(results[rev])))
934 return str(rev)
934 return str(rev)
935
935
936 raise util.Abort(_("revision matching date not found"))
936 raise util.Abort(_("revision matching date not found"))
937
937
938 def walkchangerevs(repo, match, opts, prepare):
938 def walkchangerevs(repo, match, opts, prepare):
939 '''Iterate over files and the revs in which they changed.
939 '''Iterate over files and the revs in which they changed.
940
940
941 Callers most commonly need to iterate backwards over the history
941 Callers most commonly need to iterate backwards over the history
942 in which they are interested. Doing so has awful (quadratic-looking)
942 in which they are interested. Doing so has awful (quadratic-looking)
943 performance, so we use iterators in a "windowed" way.
943 performance, so we use iterators in a "windowed" way.
944
944
945 We walk a window of revisions in the desired order. Within the
945 We walk a window of revisions in the desired order. Within the
946 window, we first walk forwards to gather data, then in the desired
946 window, we first walk forwards to gather data, then in the desired
947 order (usually backwards) to display it.
947 order (usually backwards) to display it.
948
948
949 This function returns an iterator yielding contexts. Before
949 This function returns an iterator yielding contexts. Before
950 yielding each context, the iterator will first call the prepare
950 yielding each context, the iterator will first call the prepare
951 function on each context in the window in forward order.'''
951 function on each context in the window in forward order.'''
952
952
953 def increasing_windows(start, end, windowsize=8, sizelimit=512):
953 def increasing_windows(start, end, windowsize=8, sizelimit=512):
954 if start < end:
954 if start < end:
955 while start < end:
955 while start < end:
956 yield start, min(windowsize, end - start)
956 yield start, min(windowsize, end - start)
957 start += windowsize
957 start += windowsize
958 if windowsize < sizelimit:
958 if windowsize < sizelimit:
959 windowsize *= 2
959 windowsize *= 2
960 else:
960 else:
961 while start > end:
961 while start > end:
962 yield start, min(windowsize, start - end - 1)
962 yield start, min(windowsize, start - end - 1)
963 start -= windowsize
963 start -= windowsize
964 if windowsize < sizelimit:
964 if windowsize < sizelimit:
965 windowsize *= 2
965 windowsize *= 2
966
966
967 follow = opts.get('follow') or opts.get('follow_first')
967 follow = opts.get('follow') or opts.get('follow_first')
968
968
969 if not len(repo):
969 if not len(repo):
970 return []
970 return []
971
971
972 if follow:
972 if follow:
973 defrange = '%s:0' % repo['.'].rev()
973 defrange = '%s:0' % repo['.'].rev()
974 else:
974 else:
975 defrange = '-1:0'
975 defrange = '-1:0'
976 revs = scmutil.revrange(repo, opts['rev'] or [defrange])
976 revs = scmutil.revrange(repo, opts['rev'] or [defrange])
977 if not revs:
977 if not revs:
978 return []
978 return []
979 wanted = set()
979 wanted = set()
980 slowpath = match.anypats() or (match.files() and opts.get('removed'))
980 slowpath = match.anypats() or (match.files() and opts.get('removed'))
981 fncache = {}
981 fncache = {}
982 change = util.cachefunc(repo.changectx)
982 change = util.cachefunc(repo.changectx)
983
983
984 # First step is to fill wanted, the set of revisions that we want to yield.
984 # First step is to fill wanted, the set of revisions that we want to yield.
985 # When it does not induce extra cost, we also fill fncache for revisions in
985 # When it does not induce extra cost, we also fill fncache for revisions in
986 # wanted: a cache of filenames that were changed (ctx.files()) and that
986 # wanted: a cache of filenames that were changed (ctx.files()) and that
987 # match the file filtering conditions.
987 # match the file filtering conditions.
988
988
989 if not slowpath and not match.files():
989 if not slowpath and not match.files():
990 # No files, no patterns. Display all revs.
990 # No files, no patterns. Display all revs.
991 wanted = set(revs)
991 wanted = set(revs)
992 copies = []
992 copies = []
993
993
994 if not slowpath:
994 if not slowpath:
995 # We only have to read through the filelog to find wanted revisions
995 # We only have to read through the filelog to find wanted revisions
996
996
997 minrev, maxrev = min(revs), max(revs)
997 minrev, maxrev = min(revs), max(revs)
998 def filerevgen(filelog, last):
998 def filerevgen(filelog, last):
999 """
999 """
1000 Only files, no patterns. Check the history of each file.
1000 Only files, no patterns. Check the history of each file.
1001
1001
1002 Examines filelog entries within minrev, maxrev linkrev range
1002 Examines filelog entries within minrev, maxrev linkrev range
1003 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1003 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1004 tuples in backwards order
1004 tuples in backwards order
1005 """
1005 """
1006 cl_count = len(repo)
1006 cl_count = len(repo)
1007 revs = []
1007 revs = []
1008 for j in xrange(0, last + 1):
1008 for j in xrange(0, last + 1):
1009 linkrev = filelog.linkrev(j)
1009 linkrev = filelog.linkrev(j)
1010 if linkrev < minrev:
1010 if linkrev < minrev:
1011 continue
1011 continue
1012 # only yield rev for which we have the changelog, it can
1012 # only yield rev for which we have the changelog, it can
1013 # happen while doing "hg log" during a pull or commit
1013 # happen while doing "hg log" during a pull or commit
1014 if linkrev >= cl_count:
1014 if linkrev >= cl_count:
1015 break
1015 break
1016
1016
1017 parentlinkrevs = []
1017 parentlinkrevs = []
1018 for p in filelog.parentrevs(j):
1018 for p in filelog.parentrevs(j):
1019 if p != nullrev:
1019 if p != nullrev:
1020 parentlinkrevs.append(filelog.linkrev(p))
1020 parentlinkrevs.append(filelog.linkrev(p))
1021 n = filelog.node(j)
1021 n = filelog.node(j)
1022 revs.append((linkrev, parentlinkrevs,
1022 revs.append((linkrev, parentlinkrevs,
1023 follow and filelog.renamed(n)))
1023 follow and filelog.renamed(n)))
1024
1024
1025 return reversed(revs)
1025 return reversed(revs)
1026 def iterfiles():
1026 def iterfiles():
1027 for filename in match.files():
1027 for filename in match.files():
1028 yield filename, None
1028 yield filename, None
1029 for filename_node in copies:
1029 for filename_node in copies:
1030 yield filename_node
1030 yield filename_node
1031 for file_, node in iterfiles():
1031 for file_, node in iterfiles():
1032 filelog = repo.file(file_)
1032 filelog = repo.file(file_)
1033 if not len(filelog):
1033 if not len(filelog):
1034 if node is None:
1034 if node is None:
1035 # A zero count may be a directory or deleted file, so
1035 # A zero count may be a directory or deleted file, so
1036 # try to find matching entries on the slow path.
1036 # try to find matching entries on the slow path.
1037 if follow:
1037 if follow:
1038 raise util.Abort(
1038 raise util.Abort(
1039 _('cannot follow nonexistent file: "%s"') % file_)
1039 _('cannot follow nonexistent file: "%s"') % file_)
1040 slowpath = True
1040 slowpath = True
1041 break
1041 break
1042 else:
1042 else:
1043 continue
1043 continue
1044
1044
1045 if node is None:
1045 if node is None:
1046 last = len(filelog) - 1
1046 last = len(filelog) - 1
1047 else:
1047 else:
1048 last = filelog.rev(node)
1048 last = filelog.rev(node)
1049
1049
1050
1050
1051 # keep track of all ancestors of the file
1051 # keep track of all ancestors of the file
1052 ancestors = set([filelog.linkrev(last)])
1052 ancestors = set([filelog.linkrev(last)])
1053
1053
1054 # iterate from latest to oldest revision
1054 # iterate from latest to oldest revision
1055 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1055 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1056 if not follow:
1056 if not follow:
1057 if rev > maxrev:
1057 if rev > maxrev:
1058 continue
1058 continue
1059 else:
1059 else:
1060 # Note that last might not be the first interesting
1060 # Note that last might not be the first interesting
1061 # rev to us:
1061 # rev to us:
1062 # if the file has been changed after maxrev, we'll
1062 # if the file has been changed after maxrev, we'll
1063 # have linkrev(last) > maxrev, and we still need
1063 # have linkrev(last) > maxrev, and we still need
1064 # to explore the file graph
1064 # to explore the file graph
1065 if rev not in ancestors:
1065 if rev not in ancestors:
1066 continue
1066 continue
1067 # XXX insert 1327 fix here
1067 # XXX insert 1327 fix here
1068 if flparentlinkrevs:
1068 if flparentlinkrevs:
1069 ancestors.update(flparentlinkrevs)
1069 ancestors.update(flparentlinkrevs)
1070
1070
1071 fncache.setdefault(rev, []).append(file_)
1071 fncache.setdefault(rev, []).append(file_)
1072 wanted.add(rev)
1072 wanted.add(rev)
1073 if copied:
1073 if copied:
1074 copies.append(copied)
1074 copies.append(copied)
1075 if slowpath:
1075 if slowpath:
1076 # We have to read the changelog to match filenames against
1076 # We have to read the changelog to match filenames against
1077 # changed files
1077 # changed files
1078
1078
1079 if follow:
1079 if follow:
1080 raise util.Abort(_('can only follow copies/renames for explicit '
1080 raise util.Abort(_('can only follow copies/renames for explicit '
1081 'filenames'))
1081 'filenames'))
1082
1082
1083 # The slow path checks files modified in every changeset.
1083 # The slow path checks files modified in every changeset.
1084 for i in sorted(revs):
1084 for i in sorted(revs):
1085 ctx = change(i)
1085 ctx = change(i)
1086 matches = filter(match, ctx.files())
1086 matches = filter(match, ctx.files())
1087 if matches:
1087 if matches:
1088 fncache[i] = matches
1088 fncache[i] = matches
1089 wanted.add(i)
1089 wanted.add(i)
1090
1090
1091 class followfilter(object):
1091 class followfilter(object):
1092 def __init__(self, onlyfirst=False):
1092 def __init__(self, onlyfirst=False):
1093 self.startrev = nullrev
1093 self.startrev = nullrev
1094 self.roots = set()
1094 self.roots = set()
1095 self.onlyfirst = onlyfirst
1095 self.onlyfirst = onlyfirst
1096
1096
1097 def match(self, rev):
1097 def match(self, rev):
1098 def realparents(rev):
1098 def realparents(rev):
1099 if self.onlyfirst:
1099 if self.onlyfirst:
1100 return repo.changelog.parentrevs(rev)[0:1]
1100 return repo.changelog.parentrevs(rev)[0:1]
1101 else:
1101 else:
1102 return filter(lambda x: x != nullrev,
1102 return filter(lambda x: x != nullrev,
1103 repo.changelog.parentrevs(rev))
1103 repo.changelog.parentrevs(rev))
1104
1104
1105 if self.startrev == nullrev:
1105 if self.startrev == nullrev:
1106 self.startrev = rev
1106 self.startrev = rev
1107 return True
1107 return True
1108
1108
1109 if rev > self.startrev:
1109 if rev > self.startrev:
1110 # forward: all descendants
1110 # forward: all descendants
1111 if not self.roots:
1111 if not self.roots:
1112 self.roots.add(self.startrev)
1112 self.roots.add(self.startrev)
1113 for parent in realparents(rev):
1113 for parent in realparents(rev):
1114 if parent in self.roots:
1114 if parent in self.roots:
1115 self.roots.add(rev)
1115 self.roots.add(rev)
1116 return True
1116 return True
1117 else:
1117 else:
1118 # backwards: all parents
1118 # backwards: all parents
1119 if not self.roots:
1119 if not self.roots:
1120 self.roots.update(realparents(self.startrev))
1120 self.roots.update(realparents(self.startrev))
1121 if rev in self.roots:
1121 if rev in self.roots:
1122 self.roots.remove(rev)
1122 self.roots.remove(rev)
1123 self.roots.update(realparents(rev))
1123 self.roots.update(realparents(rev))
1124 return True
1124 return True
1125
1125
1126 return False
1126 return False
1127
1127
1128 # it might be worthwhile to do this in the iterator if the rev range
1128 # it might be worthwhile to do this in the iterator if the rev range
1129 # is descending and the prune args are all within that range
1129 # is descending and the prune args are all within that range
1130 for rev in opts.get('prune', ()):
1130 for rev in opts.get('prune', ()):
1131 rev = repo.changelog.rev(repo.lookup(rev))
1131 rev = repo.changelog.rev(repo.lookup(rev))
1132 ff = followfilter()
1132 ff = followfilter()
1133 stop = min(revs[0], revs[-1])
1133 stop = min(revs[0], revs[-1])
1134 for x in xrange(rev, stop - 1, -1):
1134 for x in xrange(rev, stop - 1, -1):
1135 if ff.match(x):
1135 if ff.match(x):
1136 wanted.discard(x)
1136 wanted.discard(x)
1137
1137
1138 # Now that wanted is correctly initialized, we can iterate over the
1138 # Now that wanted is correctly initialized, we can iterate over the
1139 # revision range, yielding only revisions in wanted.
1139 # revision range, yielding only revisions in wanted.
1140 def iterate():
1140 def iterate():
1141 if follow and not match.files():
1141 if follow and not match.files():
1142 ff = followfilter(onlyfirst=opts.get('follow_first'))
1142 ff = followfilter(onlyfirst=opts.get('follow_first'))
1143 def want(rev):
1143 def want(rev):
1144 return ff.match(rev) and rev in wanted
1144 return ff.match(rev) and rev in wanted
1145 else:
1145 else:
1146 def want(rev):
1146 def want(rev):
1147 return rev in wanted
1147 return rev in wanted
1148
1148
1149 for i, window in increasing_windows(0, len(revs)):
1149 for i, window in increasing_windows(0, len(revs)):
1150 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1150 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1151 for rev in sorted(nrevs):
1151 for rev in sorted(nrevs):
1152 fns = fncache.get(rev)
1152 fns = fncache.get(rev)
1153 ctx = change(rev)
1153 ctx = change(rev)
1154 if not fns:
1154 if not fns:
1155 def fns_generator():
1155 def fns_generator():
1156 for f in ctx.files():
1156 for f in ctx.files():
1157 if match(f):
1157 if match(f):
1158 yield f
1158 yield f
1159 fns = fns_generator()
1159 fns = fns_generator()
1160 prepare(ctx, fns)
1160 prepare(ctx, fns)
1161 for rev in nrevs:
1161 for rev in nrevs:
1162 yield change(rev)
1162 yield change(rev)
1163 return iterate()
1163 return iterate()
1164
1164
1165 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1165 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1166 join = lambda f: os.path.join(prefix, f)
1166 join = lambda f: os.path.join(prefix, f)
1167 bad = []
1167 bad = []
1168 oldbad = match.bad
1168 oldbad = match.bad
1169 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1169 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1170 names = []
1170 names = []
1171 wctx = repo[None]
1171 wctx = repo[None]
1172 cca = None
1172 cca = None
1173 abort, warn = scmutil.checkportabilityalert(ui)
1173 abort, warn = scmutil.checkportabilityalert(ui)
1174 if abort or warn:
1174 if abort or warn:
1175 cca = scmutil.casecollisionauditor(ui, abort, wctx)
1175 cca = scmutil.casecollisionauditor(ui, abort, wctx)
1176 for f in repo.walk(match):
1176 for f in repo.walk(match):
1177 exact = match.exact(f)
1177 exact = match.exact(f)
1178 if exact or not explicitonly and f not in repo.dirstate:
1178 if exact or not explicitonly and f not in repo.dirstate:
1179 if cca:
1179 if cca:
1180 cca(f)
1180 cca(f)
1181 names.append(f)
1181 names.append(f)
1182 if ui.verbose or not exact:
1182 if ui.verbose or not exact:
1183 ui.status(_('adding %s\n') % match.rel(join(f)))
1183 ui.status(_('adding %s\n') % match.rel(join(f)))
1184
1184
1185 for subpath in wctx.substate:
1185 for subpath in wctx.substate:
1186 sub = wctx.sub(subpath)
1186 sub = wctx.sub(subpath)
1187 try:
1187 try:
1188 submatch = matchmod.narrowmatcher(subpath, match)
1188 submatch = matchmod.narrowmatcher(subpath, match)
1189 if listsubrepos:
1189 if listsubrepos:
1190 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1190 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1191 False))
1191 False))
1192 else:
1192 else:
1193 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1193 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1194 True))
1194 True))
1195 except error.LookupError:
1195 except error.LookupError:
1196 ui.status(_("skipping missing subrepository: %s\n")
1196 ui.status(_("skipping missing subrepository: %s\n")
1197 % join(subpath))
1197 % join(subpath))
1198
1198
1199 if not dryrun:
1199 if not dryrun:
1200 rejected = wctx.add(names, prefix)
1200 rejected = wctx.add(names, prefix)
1201 bad.extend(f for f in rejected if f in match.files())
1201 bad.extend(f for f in rejected if f in match.files())
1202 return bad
1202 return bad
1203
1203
1204 def forget(ui, repo, match, prefix, explicitonly):
1204 def forget(ui, repo, match, prefix, explicitonly):
1205 join = lambda f: os.path.join(prefix, f)
1205 join = lambda f: os.path.join(prefix, f)
1206 bad = []
1206 bad = []
1207 oldbad = match.bad
1207 oldbad = match.bad
1208 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1208 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1209 wctx = repo[None]
1209 wctx = repo[None]
1210 forgot = []
1210 forgot = []
1211 s = repo.status(match=match, clean=True)
1211 s = repo.status(match=match, clean=True)
1212 forget = sorted(s[0] + s[1] + s[3] + s[6])
1212 forget = sorted(s[0] + s[1] + s[3] + s[6])
1213 if explicitonly:
1213 if explicitonly:
1214 forget = [f for f in forget if match.exact(f)]
1214 forget = [f for f in forget if match.exact(f)]
1215
1215
1216 for subpath in wctx.substate:
1216 for subpath in wctx.substate:
1217 sub = wctx.sub(subpath)
1217 sub = wctx.sub(subpath)
1218 try:
1218 try:
1219 submatch = matchmod.narrowmatcher(subpath, match)
1219 submatch = matchmod.narrowmatcher(subpath, match)
1220 subbad, subforgot = sub.forget(ui, submatch, prefix)
1220 subbad, subforgot = sub.forget(ui, submatch, prefix)
1221 bad.extend([subpath + '/' + f for f in subbad])
1221 bad.extend([subpath + '/' + f for f in subbad])
1222 forgot.extend([subpath + '/' + f for f in subforgot])
1222 forgot.extend([subpath + '/' + f for f in subforgot])
1223 except error.LookupError:
1223 except error.LookupError:
1224 ui.status(_("skipping missing subrepository: %s\n")
1224 ui.status(_("skipping missing subrepository: %s\n")
1225 % join(subpath))
1225 % join(subpath))
1226
1226
1227 for f in match.files():
1227 if not explicitonly:
1228 if match.exact(f) or not explicitonly:
1228 for f in match.files():
1229 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
1229 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
1230 if f not in forgot:
1230 if f not in forgot:
1231 if os.path.exists(match.rel(join(f))):
1231 if os.path.exists(match.rel(join(f))):
1232 ui.warn(_('not removing %s: '
1232 ui.warn(_('not removing %s: '
1233 'file is already untracked\n')
1233 'file is already untracked\n')
1234 % match.rel(join(f)))
1234 % match.rel(join(f)))
1235 bad.append(f)
1235 bad.append(f)
1236
1236
1237 for f in forget:
1237 for f in forget:
1238 if ui.verbose or not match.exact(f):
1238 if ui.verbose or not match.exact(f):
1239 ui.status(_('removing %s\n') % match.rel(join(f)))
1239 ui.status(_('removing %s\n') % match.rel(join(f)))
1240
1240
1241 rejected = wctx.forget(forget, prefix)
1241 rejected = wctx.forget(forget, prefix)
1242 bad.extend(f for f in rejected if f in match.files())
1242 bad.extend(f for f in rejected if f in match.files())
1243 forgot.extend(forget)
1243 forgot.extend(forget)
1244 return bad, forgot
1244 return bad, forgot
1245
1245
1246 def duplicatecopies(repo, rev, p1):
1246 def duplicatecopies(repo, rev, p1):
1247 "Reproduce copies found in the source revision in the dirstate for grafts"
1247 "Reproduce copies found in the source revision in the dirstate for grafts"
1248 for dst, src in copies.pathcopies(repo[p1], repo[rev]).iteritems():
1248 for dst, src in copies.pathcopies(repo[p1], repo[rev]).iteritems():
1249 repo.dirstate.copy(src, dst)
1249 repo.dirstate.copy(src, dst)
1250
1250
1251 def commit(ui, repo, commitfunc, pats, opts):
1251 def commit(ui, repo, commitfunc, pats, opts):
1252 '''commit the specified files or all outstanding changes'''
1252 '''commit the specified files or all outstanding changes'''
1253 date = opts.get('date')
1253 date = opts.get('date')
1254 if date:
1254 if date:
1255 opts['date'] = util.parsedate(date)
1255 opts['date'] = util.parsedate(date)
1256 message = logmessage(ui, opts)
1256 message = logmessage(ui, opts)
1257
1257
1258 # extract addremove carefully -- this function can be called from a command
1258 # extract addremove carefully -- this function can be called from a command
1259 # that doesn't support addremove
1259 # that doesn't support addremove
1260 if opts.get('addremove'):
1260 if opts.get('addremove'):
1261 scmutil.addremove(repo, pats, opts)
1261 scmutil.addremove(repo, pats, opts)
1262
1262
1263 return commitfunc(ui, repo, message,
1263 return commitfunc(ui, repo, message,
1264 scmutil.match(repo[None], pats, opts), opts)
1264 scmutil.match(repo[None], pats, opts), opts)
1265
1265
1266 def commiteditor(repo, ctx, subs):
1266 def commiteditor(repo, ctx, subs):
1267 if ctx.description():
1267 if ctx.description():
1268 return ctx.description()
1268 return ctx.description()
1269 return commitforceeditor(repo, ctx, subs)
1269 return commitforceeditor(repo, ctx, subs)
1270
1270
1271 def commitforceeditor(repo, ctx, subs):
1271 def commitforceeditor(repo, ctx, subs):
1272 edittext = []
1272 edittext = []
1273 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1273 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1274 if ctx.description():
1274 if ctx.description():
1275 edittext.append(ctx.description())
1275 edittext.append(ctx.description())
1276 edittext.append("")
1276 edittext.append("")
1277 edittext.append("") # Empty line between message and comments.
1277 edittext.append("") # Empty line between message and comments.
1278 edittext.append(_("HG: Enter commit message."
1278 edittext.append(_("HG: Enter commit message."
1279 " Lines beginning with 'HG:' are removed."))
1279 " Lines beginning with 'HG:' are removed."))
1280 edittext.append(_("HG: Leave message empty to abort commit."))
1280 edittext.append(_("HG: Leave message empty to abort commit."))
1281 edittext.append("HG: --")
1281 edittext.append("HG: --")
1282 edittext.append(_("HG: user: %s") % ctx.user())
1282 edittext.append(_("HG: user: %s") % ctx.user())
1283 if ctx.p2():
1283 if ctx.p2():
1284 edittext.append(_("HG: branch merge"))
1284 edittext.append(_("HG: branch merge"))
1285 if ctx.branch():
1285 if ctx.branch():
1286 edittext.append(_("HG: branch '%s'") % ctx.branch())
1286 edittext.append(_("HG: branch '%s'") % ctx.branch())
1287 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1287 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1288 edittext.extend([_("HG: added %s") % f for f in added])
1288 edittext.extend([_("HG: added %s") % f for f in added])
1289 edittext.extend([_("HG: changed %s") % f for f in modified])
1289 edittext.extend([_("HG: changed %s") % f for f in modified])
1290 edittext.extend([_("HG: removed %s") % f for f in removed])
1290 edittext.extend([_("HG: removed %s") % f for f in removed])
1291 if not added and not modified and not removed:
1291 if not added and not modified and not removed:
1292 edittext.append(_("HG: no files changed"))
1292 edittext.append(_("HG: no files changed"))
1293 edittext.append("")
1293 edittext.append("")
1294 # run editor in the repository root
1294 # run editor in the repository root
1295 olddir = os.getcwd()
1295 olddir = os.getcwd()
1296 os.chdir(repo.root)
1296 os.chdir(repo.root)
1297 text = repo.ui.edit("\n".join(edittext), ctx.user())
1297 text = repo.ui.edit("\n".join(edittext), ctx.user())
1298 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1298 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1299 os.chdir(olddir)
1299 os.chdir(olddir)
1300
1300
1301 if not text.strip():
1301 if not text.strip():
1302 raise util.Abort(_("empty commit message"))
1302 raise util.Abort(_("empty commit message"))
1303
1303
1304 return text
1304 return text
1305
1305
1306 def command(table):
1306 def command(table):
1307 '''returns a function object bound to table which can be used as
1307 '''returns a function object bound to table which can be used as
1308 a decorator for populating table as a command table'''
1308 a decorator for populating table as a command table'''
1309
1309
1310 def cmd(name, options, synopsis=None):
1310 def cmd(name, options, synopsis=None):
1311 def decorator(func):
1311 def decorator(func):
1312 if synopsis:
1312 if synopsis:
1313 table[name] = func, options[:], synopsis
1313 table[name] = func, options[:], synopsis
1314 else:
1314 else:
1315 table[name] = func, options[:]
1315 table[name] = func, options[:]
1316 return func
1316 return func
1317 return decorator
1317 return decorator
1318
1318
1319 return cmd
1319 return cmd
@@ -1,822 +1,822 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding
9 import util, error, osutil, revset, similar, encoding
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, secretlist=None):
13 def nochangesfound(ui, secretlist=None):
14 '''report no changes for push/pull'''
14 '''report no changes for push/pull'''
15 if secretlist:
15 if secretlist:
16 ui.status(_("no changes found (ignored %d secret changesets)\n")
16 ui.status(_("no changes found (ignored %d secret changesets)\n")
17 % len(secretlist))
17 % len(secretlist))
18 else:
18 else:
19 ui.status(_("no changes found\n"))
19 ui.status(_("no changes found\n"))
20
20
21 def checkfilename(f):
21 def checkfilename(f):
22 '''Check that the filename f is an acceptable filename for a tracked file'''
22 '''Check that the filename f is an acceptable filename for a tracked file'''
23 if '\r' in f or '\n' in f:
23 if '\r' in f or '\n' in f:
24 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
24 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
25
25
26 def checkportable(ui, f):
26 def checkportable(ui, f):
27 '''Check if filename f is portable and warn or abort depending on config'''
27 '''Check if filename f is portable and warn or abort depending on config'''
28 checkfilename(f)
28 checkfilename(f)
29 abort, warn = checkportabilityalert(ui)
29 abort, warn = checkportabilityalert(ui)
30 if abort or warn:
30 if abort or warn:
31 msg = util.checkwinfilename(f)
31 msg = util.checkwinfilename(f)
32 if msg:
32 if msg:
33 msg = "%s: %r" % (msg, f)
33 msg = "%s: %r" % (msg, f)
34 if abort:
34 if abort:
35 raise util.Abort(msg)
35 raise util.Abort(msg)
36 ui.warn(_("warning: %s\n") % msg)
36 ui.warn(_("warning: %s\n") % msg)
37
37
38 def checkportabilityalert(ui):
38 def checkportabilityalert(ui):
39 '''check if the user's config requests nothing, a warning, or abort for
39 '''check if the user's config requests nothing, a warning, or abort for
40 non-portable filenames'''
40 non-portable filenames'''
41 val = ui.config('ui', 'portablefilenames', 'warn')
41 val = ui.config('ui', 'portablefilenames', 'warn')
42 lval = val.lower()
42 lval = val.lower()
43 bval = util.parsebool(val)
43 bval = util.parsebool(val)
44 abort = os.name == 'nt' or lval == 'abort'
44 abort = os.name == 'nt' or lval == 'abort'
45 warn = bval or lval == 'warn'
45 warn = bval or lval == 'warn'
46 if bval is None and not (warn or abort or lval == 'ignore'):
46 if bval is None and not (warn or abort or lval == 'ignore'):
47 raise error.ConfigError(
47 raise error.ConfigError(
48 _("ui.portablefilenames value is invalid ('%s')") % val)
48 _("ui.portablefilenames value is invalid ('%s')") % val)
49 return abort, warn
49 return abort, warn
50
50
51 class casecollisionauditor(object):
51 class casecollisionauditor(object):
52 def __init__(self, ui, abort, existingiter):
52 def __init__(self, ui, abort, existingiter):
53 self._ui = ui
53 self._ui = ui
54 self._abort = abort
54 self._abort = abort
55 self._map = {}
55 self._map = {}
56 for f in existingiter:
56 for f in existingiter:
57 self._map[encoding.lower(f)] = f
57 self._map[encoding.lower(f)] = f
58
58
59 def __call__(self, f):
59 def __call__(self, f):
60 fl = encoding.lower(f)
60 fl = encoding.lower(f)
61 map = self._map
61 map = self._map
62 if fl in map and map[fl] != f:
62 if fl in map and map[fl] != f:
63 msg = _('possible case-folding collision for %s') % f
63 msg = _('possible case-folding collision for %s') % f
64 if self._abort:
64 if self._abort:
65 raise util.Abort(msg)
65 raise util.Abort(msg)
66 self._ui.warn(_("warning: %s\n") % msg)
66 self._ui.warn(_("warning: %s\n") % msg)
67 map[fl] = f
67 map[fl] = f
68
68
69 class pathauditor(object):
69 class pathauditor(object):
70 '''ensure that a filesystem path contains no banned components.
70 '''ensure that a filesystem path contains no banned components.
71 the following properties of a path are checked:
71 the following properties of a path are checked:
72
72
73 - ends with a directory separator
73 - ends with a directory separator
74 - under top-level .hg
74 - under top-level .hg
75 - starts at the root of a windows drive
75 - starts at the root of a windows drive
76 - contains ".."
76 - contains ".."
77 - traverses a symlink (e.g. a/symlink_here/b)
77 - traverses a symlink (e.g. a/symlink_here/b)
78 - inside a nested repository (a callback can be used to approve
78 - inside a nested repository (a callback can be used to approve
79 some nested repositories, e.g., subrepositories)
79 some nested repositories, e.g., subrepositories)
80 '''
80 '''
81
81
82 def __init__(self, root, callback=None):
82 def __init__(self, root, callback=None):
83 self.audited = set()
83 self.audited = set()
84 self.auditeddir = set()
84 self.auditeddir = set()
85 self.root = root
85 self.root = root
86 self.callback = callback
86 self.callback = callback
87 if os.path.lexists(root) and not util.checkcase(root):
87 if os.path.lexists(root) and not util.checkcase(root):
88 self.normcase = util.normcase
88 self.normcase = util.normcase
89 else:
89 else:
90 self.normcase = lambda x: x
90 self.normcase = lambda x: x
91
91
92 def __call__(self, path):
92 def __call__(self, path):
93 '''Check the relative path.
93 '''Check the relative path.
94 path may contain a pattern (e.g. foodir/**.txt)'''
94 path may contain a pattern (e.g. foodir/**.txt)'''
95
95
96 path = util.localpath(path)
96 path = util.localpath(path)
97 normpath = self.normcase(path)
97 normpath = self.normcase(path)
98 if normpath in self.audited:
98 if normpath in self.audited:
99 return
99 return
100 # AIX ignores "/" at end of path, others raise EISDIR.
100 # AIX ignores "/" at end of path, others raise EISDIR.
101 if util.endswithsep(path):
101 if util.endswithsep(path):
102 raise util.Abort(_("path ends in directory separator: %s") % path)
102 raise util.Abort(_("path ends in directory separator: %s") % path)
103 parts = util.splitpath(path)
103 parts = util.splitpath(path)
104 if (os.path.splitdrive(path)[0]
104 if (os.path.splitdrive(path)[0]
105 or parts[0].lower() in ('.hg', '.hg.', '')
105 or parts[0].lower() in ('.hg', '.hg.', '')
106 or os.pardir in parts):
106 or os.pardir in parts):
107 raise util.Abort(_("path contains illegal component: %s") % path)
107 raise util.Abort(_("path contains illegal component: %s") % path)
108 if '.hg' in path.lower():
108 if '.hg' in path.lower():
109 lparts = [p.lower() for p in parts]
109 lparts = [p.lower() for p in parts]
110 for p in '.hg', '.hg.':
110 for p in '.hg', '.hg.':
111 if p in lparts[1:]:
111 if p in lparts[1:]:
112 pos = lparts.index(p)
112 pos = lparts.index(p)
113 base = os.path.join(*parts[:pos])
113 base = os.path.join(*parts[:pos])
114 raise util.Abort(_("path '%s' is inside nested repo %r")
114 raise util.Abort(_("path '%s' is inside nested repo %r")
115 % (path, base))
115 % (path, base))
116
116
117 normparts = util.splitpath(normpath)
117 normparts = util.splitpath(normpath)
118 assert len(parts) == len(normparts)
118 assert len(parts) == len(normparts)
119
119
120 parts.pop()
120 parts.pop()
121 normparts.pop()
121 normparts.pop()
122 prefixes = []
122 prefixes = []
123 while parts:
123 while parts:
124 prefix = os.sep.join(parts)
124 prefix = os.sep.join(parts)
125 normprefix = os.sep.join(normparts)
125 normprefix = os.sep.join(normparts)
126 if normprefix in self.auditeddir:
126 if normprefix in self.auditeddir:
127 break
127 break
128 curpath = os.path.join(self.root, prefix)
128 curpath = os.path.join(self.root, prefix)
129 try:
129 try:
130 st = os.lstat(curpath)
130 st = os.lstat(curpath)
131 except OSError, err:
131 except OSError, err:
132 # EINVAL can be raised as invalid path syntax under win32.
132 # EINVAL can be raised as invalid path syntax under win32.
133 # They must be ignored for patterns can be checked too.
133 # They must be ignored for patterns can be checked too.
134 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
134 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
135 raise
135 raise
136 else:
136 else:
137 if stat.S_ISLNK(st.st_mode):
137 if stat.S_ISLNK(st.st_mode):
138 raise util.Abort(
138 raise util.Abort(
139 _('path %r traverses symbolic link %r')
139 _('path %r traverses symbolic link %r')
140 % (path, prefix))
140 % (path, prefix))
141 elif (stat.S_ISDIR(st.st_mode) and
141 elif (stat.S_ISDIR(st.st_mode) and
142 os.path.isdir(os.path.join(curpath, '.hg'))):
142 os.path.isdir(os.path.join(curpath, '.hg'))):
143 if not self.callback or not self.callback(curpath):
143 if not self.callback or not self.callback(curpath):
144 raise util.Abort(_("path '%s' is inside nested repo %r") %
144 raise util.Abort(_("path '%s' is inside nested repo %r") %
145 (path, prefix))
145 (path, prefix))
146 prefixes.append(normprefix)
146 prefixes.append(normprefix)
147 parts.pop()
147 parts.pop()
148 normparts.pop()
148 normparts.pop()
149
149
150 self.audited.add(normpath)
150 self.audited.add(normpath)
151 # only add prefixes to the cache after checking everything: we don't
151 # only add prefixes to the cache after checking everything: we don't
152 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
152 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
153 self.auditeddir.update(prefixes)
153 self.auditeddir.update(prefixes)
154
154
155 class abstractopener(object):
155 class abstractopener(object):
156 """Abstract base class; cannot be instantiated"""
156 """Abstract base class; cannot be instantiated"""
157
157
158 def __init__(self, *args, **kwargs):
158 def __init__(self, *args, **kwargs):
159 '''Prevent instantiation; don't call this from subclasses.'''
159 '''Prevent instantiation; don't call this from subclasses.'''
160 raise NotImplementedError('attempted instantiating ' + str(type(self)))
160 raise NotImplementedError('attempted instantiating ' + str(type(self)))
161
161
162 def read(self, path):
162 def read(self, path):
163 fp = self(path, 'rb')
163 fp = self(path, 'rb')
164 try:
164 try:
165 return fp.read()
165 return fp.read()
166 finally:
166 finally:
167 fp.close()
167 fp.close()
168
168
169 def write(self, path, data):
169 def write(self, path, data):
170 fp = self(path, 'wb')
170 fp = self(path, 'wb')
171 try:
171 try:
172 return fp.write(data)
172 return fp.write(data)
173 finally:
173 finally:
174 fp.close()
174 fp.close()
175
175
176 def append(self, path, data):
176 def append(self, path, data):
177 fp = self(path, 'ab')
177 fp = self(path, 'ab')
178 try:
178 try:
179 return fp.write(data)
179 return fp.write(data)
180 finally:
180 finally:
181 fp.close()
181 fp.close()
182
182
183 class opener(abstractopener):
183 class opener(abstractopener):
184 '''Open files relative to a base directory
184 '''Open files relative to a base directory
185
185
186 This class is used to hide the details of COW semantics and
186 This class is used to hide the details of COW semantics and
187 remote file access from higher level code.
187 remote file access from higher level code.
188 '''
188 '''
189 def __init__(self, base, audit=True):
189 def __init__(self, base, audit=True):
190 self.base = base
190 self.base = base
191 self._audit = audit
191 self._audit = audit
192 if audit:
192 if audit:
193 self.auditor = pathauditor(base)
193 self.auditor = pathauditor(base)
194 else:
194 else:
195 self.auditor = util.always
195 self.auditor = util.always
196 self.createmode = None
196 self.createmode = None
197 self._trustnlink = None
197 self._trustnlink = None
198
198
199 @util.propertycache
199 @util.propertycache
200 def _cansymlink(self):
200 def _cansymlink(self):
201 return util.checklink(self.base)
201 return util.checklink(self.base)
202
202
203 def _fixfilemode(self, name):
203 def _fixfilemode(self, name):
204 if self.createmode is None:
204 if self.createmode is None:
205 return
205 return
206 os.chmod(name, self.createmode & 0666)
206 os.chmod(name, self.createmode & 0666)
207
207
208 def __call__(self, path, mode="r", text=False, atomictemp=False):
208 def __call__(self, path, mode="r", text=False, atomictemp=False):
209 if self._audit:
209 if self._audit:
210 r = util.checkosfilename(path)
210 r = util.checkosfilename(path)
211 if r:
211 if r:
212 raise util.Abort("%s: %r" % (r, path))
212 raise util.Abort("%s: %r" % (r, path))
213 self.auditor(path)
213 self.auditor(path)
214 f = os.path.join(self.base, path)
214 f = os.path.join(self.base, path)
215
215
216 if not text and "b" not in mode:
216 if not text and "b" not in mode:
217 mode += "b" # for that other OS
217 mode += "b" # for that other OS
218
218
219 nlink = -1
219 nlink = -1
220 dirname, basename = os.path.split(f)
220 dirname, basename = os.path.split(f)
221 # If basename is empty, then the path is malformed because it points
221 # If basename is empty, then the path is malformed because it points
222 # to a directory. Let the posixfile() call below raise IOError.
222 # to a directory. Let the posixfile() call below raise IOError.
223 if basename and mode not in ('r', 'rb'):
223 if basename and mode not in ('r', 'rb'):
224 if atomictemp:
224 if atomictemp:
225 if not os.path.isdir(dirname):
225 if not os.path.isdir(dirname):
226 util.makedirs(dirname, self.createmode)
226 util.makedirs(dirname, self.createmode)
227 return util.atomictempfile(f, mode, self.createmode)
227 return util.atomictempfile(f, mode, self.createmode)
228 try:
228 try:
229 if 'w' in mode:
229 if 'w' in mode:
230 util.unlink(f)
230 util.unlink(f)
231 nlink = 0
231 nlink = 0
232 else:
232 else:
233 # nlinks() may behave differently for files on Windows
233 # nlinks() may behave differently for files on Windows
234 # shares if the file is open.
234 # shares if the file is open.
235 fd = util.posixfile(f)
235 fd = util.posixfile(f)
236 nlink = util.nlinks(f)
236 nlink = util.nlinks(f)
237 if nlink < 1:
237 if nlink < 1:
238 nlink = 2 # force mktempcopy (issue1922)
238 nlink = 2 # force mktempcopy (issue1922)
239 fd.close()
239 fd.close()
240 except (OSError, IOError), e:
240 except (OSError, IOError), e:
241 if e.errno != errno.ENOENT:
241 if e.errno != errno.ENOENT:
242 raise
242 raise
243 nlink = 0
243 nlink = 0
244 if not os.path.isdir(dirname):
244 if not os.path.isdir(dirname):
245 util.makedirs(dirname, self.createmode)
245 util.makedirs(dirname, self.createmode)
246 if nlink > 0:
246 if nlink > 0:
247 if self._trustnlink is None:
247 if self._trustnlink is None:
248 self._trustnlink = nlink > 1 or util.checknlink(f)
248 self._trustnlink = nlink > 1 or util.checknlink(f)
249 if nlink > 1 or not self._trustnlink:
249 if nlink > 1 or not self._trustnlink:
250 util.rename(util.mktempcopy(f), f)
250 util.rename(util.mktempcopy(f), f)
251 fp = util.posixfile(f, mode)
251 fp = util.posixfile(f, mode)
252 if nlink == 0:
252 if nlink == 0:
253 self._fixfilemode(f)
253 self._fixfilemode(f)
254 return fp
254 return fp
255
255
256 def symlink(self, src, dst):
256 def symlink(self, src, dst):
257 self.auditor(dst)
257 self.auditor(dst)
258 linkname = os.path.join(self.base, dst)
258 linkname = os.path.join(self.base, dst)
259 try:
259 try:
260 os.unlink(linkname)
260 os.unlink(linkname)
261 except OSError:
261 except OSError:
262 pass
262 pass
263
263
264 dirname = os.path.dirname(linkname)
264 dirname = os.path.dirname(linkname)
265 if not os.path.exists(dirname):
265 if not os.path.exists(dirname):
266 util.makedirs(dirname, self.createmode)
266 util.makedirs(dirname, self.createmode)
267
267
268 if self._cansymlink:
268 if self._cansymlink:
269 try:
269 try:
270 os.symlink(src, linkname)
270 os.symlink(src, linkname)
271 except OSError, err:
271 except OSError, err:
272 raise OSError(err.errno, _('could not symlink to %r: %s') %
272 raise OSError(err.errno, _('could not symlink to %r: %s') %
273 (src, err.strerror), linkname)
273 (src, err.strerror), linkname)
274 else:
274 else:
275 f = self(dst, "w")
275 f = self(dst, "w")
276 f.write(src)
276 f.write(src)
277 f.close()
277 f.close()
278 self._fixfilemode(dst)
278 self._fixfilemode(dst)
279
279
280 def audit(self, path):
280 def audit(self, path):
281 self.auditor(path)
281 self.auditor(path)
282
282
283 class filteropener(abstractopener):
283 class filteropener(abstractopener):
284 '''Wrapper opener for filtering filenames with a function.'''
284 '''Wrapper opener for filtering filenames with a function.'''
285
285
286 def __init__(self, opener, filter):
286 def __init__(self, opener, filter):
287 self._filter = filter
287 self._filter = filter
288 self._orig = opener
288 self._orig = opener
289
289
290 def __call__(self, path, *args, **kwargs):
290 def __call__(self, path, *args, **kwargs):
291 return self._orig(self._filter(path), *args, **kwargs)
291 return self._orig(self._filter(path), *args, **kwargs)
292
292
293 def canonpath(root, cwd, myname, auditor=None):
293 def canonpath(root, cwd, myname, auditor=None):
294 '''return the canonical path of myname, given cwd and root'''
294 '''return the canonical path of myname, given cwd and root'''
295 if util.endswithsep(root):
295 if util.endswithsep(root):
296 rootsep = root
296 rootsep = root
297 else:
297 else:
298 rootsep = root + os.sep
298 rootsep = root + os.sep
299 name = myname
299 name = myname
300 if not os.path.isabs(name):
300 if not os.path.isabs(name):
301 name = os.path.join(root, cwd, name)
301 name = os.path.join(root, cwd, name)
302 name = os.path.normpath(name)
302 name = os.path.normpath(name)
303 if auditor is None:
303 if auditor is None:
304 auditor = pathauditor(root)
304 auditor = pathauditor(root)
305 if name != rootsep and name.startswith(rootsep):
305 if name != rootsep and name.startswith(rootsep):
306 name = name[len(rootsep):]
306 name = name[len(rootsep):]
307 auditor(name)
307 auditor(name)
308 return util.pconvert(name)
308 return util.pconvert(name)
309 elif name == root:
309 elif name == root:
310 return ''
310 return ''
311 else:
311 else:
312 # Determine whether `name' is in the hierarchy at or beneath `root',
312 # Determine whether `name' is in the hierarchy at or beneath `root',
313 # by iterating name=dirname(name) until that causes no change (can't
313 # by iterating name=dirname(name) until that causes no change (can't
314 # check name == '/', because that doesn't work on windows). For each
314 # check name == '/', because that doesn't work on windows). For each
315 # `name', compare dev/inode numbers. If they match, the list `rel'
315 # `name', compare dev/inode numbers. If they match, the list `rel'
316 # holds the reversed list of components making up the relative file
316 # holds the reversed list of components making up the relative file
317 # name we want.
317 # name we want.
318 root_st = os.stat(root)
318 root_st = os.stat(root)
319 rel = []
319 rel = []
320 while True:
320 while True:
321 try:
321 try:
322 name_st = os.stat(name)
322 name_st = os.stat(name)
323 except OSError:
323 except OSError:
324 name_st = None
324 name_st = None
325 if name_st and util.samestat(name_st, root_st):
325 if name_st and util.samestat(name_st, root_st):
326 if not rel:
326 if not rel:
327 # name was actually the same as root (maybe a symlink)
327 # name was actually the same as root (maybe a symlink)
328 return ''
328 return ''
329 rel.reverse()
329 rel.reverse()
330 name = os.path.join(*rel)
330 name = os.path.join(*rel)
331 auditor(name)
331 auditor(name)
332 return util.pconvert(name)
332 return util.pconvert(name)
333 dirname, basename = os.path.split(name)
333 dirname, basename = os.path.split(name)
334 rel.append(basename)
334 rel.append(basename)
335 if dirname == name:
335 if dirname == name:
336 break
336 break
337 name = dirname
337 name = dirname
338
338
339 raise util.Abort('%s not under root' % myname)
339 raise util.Abort('%s not under root' % myname)
340
340
341 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
341 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
342 '''yield every hg repository under path, recursively.'''
342 '''yield every hg repository under path, recursively.'''
343 def errhandler(err):
343 def errhandler(err):
344 if err.filename == path:
344 if err.filename == path:
345 raise err
345 raise err
346 samestat = getattr(os.path, 'samestat', None)
346 samestat = getattr(os.path, 'samestat', None)
347 if followsym and samestat is not None:
347 if followsym and samestat is not None:
348 def adddir(dirlst, dirname):
348 def adddir(dirlst, dirname):
349 match = False
349 match = False
350 dirstat = os.stat(dirname)
350 dirstat = os.stat(dirname)
351 for lstdirstat in dirlst:
351 for lstdirstat in dirlst:
352 if samestat(dirstat, lstdirstat):
352 if samestat(dirstat, lstdirstat):
353 match = True
353 match = True
354 break
354 break
355 if not match:
355 if not match:
356 dirlst.append(dirstat)
356 dirlst.append(dirstat)
357 return not match
357 return not match
358 else:
358 else:
359 followsym = False
359 followsym = False
360
360
361 if (seen_dirs is None) and followsym:
361 if (seen_dirs is None) and followsym:
362 seen_dirs = []
362 seen_dirs = []
363 adddir(seen_dirs, path)
363 adddir(seen_dirs, path)
364 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
364 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
365 dirs.sort()
365 dirs.sort()
366 if '.hg' in dirs:
366 if '.hg' in dirs:
367 yield root # found a repository
367 yield root # found a repository
368 qroot = os.path.join(root, '.hg', 'patches')
368 qroot = os.path.join(root, '.hg', 'patches')
369 if os.path.isdir(os.path.join(qroot, '.hg')):
369 if os.path.isdir(os.path.join(qroot, '.hg')):
370 yield qroot # we have a patch queue repo here
370 yield qroot # we have a patch queue repo here
371 if recurse:
371 if recurse:
372 # avoid recursing inside the .hg directory
372 # avoid recursing inside the .hg directory
373 dirs.remove('.hg')
373 dirs.remove('.hg')
374 else:
374 else:
375 dirs[:] = [] # don't descend further
375 dirs[:] = [] # don't descend further
376 elif followsym:
376 elif followsym:
377 newdirs = []
377 newdirs = []
378 for d in dirs:
378 for d in dirs:
379 fname = os.path.join(root, d)
379 fname = os.path.join(root, d)
380 if adddir(seen_dirs, fname):
380 if adddir(seen_dirs, fname):
381 if os.path.islink(fname):
381 if os.path.islink(fname):
382 for hgname in walkrepos(fname, True, seen_dirs):
382 for hgname in walkrepos(fname, True, seen_dirs):
383 yield hgname
383 yield hgname
384 else:
384 else:
385 newdirs.append(d)
385 newdirs.append(d)
386 dirs[:] = newdirs
386 dirs[:] = newdirs
387
387
388 def osrcpath():
388 def osrcpath():
389 '''return default os-specific hgrc search path'''
389 '''return default os-specific hgrc search path'''
390 path = systemrcpath()
390 path = systemrcpath()
391 path.extend(userrcpath())
391 path.extend(userrcpath())
392 path = [os.path.normpath(f) for f in path]
392 path = [os.path.normpath(f) for f in path]
393 return path
393 return path
394
394
395 _rcpath = None
395 _rcpath = None
396
396
397 def rcpath():
397 def rcpath():
398 '''return hgrc search path. if env var HGRCPATH is set, use it.
398 '''return hgrc search path. if env var HGRCPATH is set, use it.
399 for each item in path, if directory, use files ending in .rc,
399 for each item in path, if directory, use files ending in .rc,
400 else use item.
400 else use item.
401 make HGRCPATH empty to only look in .hg/hgrc of current repo.
401 make HGRCPATH empty to only look in .hg/hgrc of current repo.
402 if no HGRCPATH, use default os-specific path.'''
402 if no HGRCPATH, use default os-specific path.'''
403 global _rcpath
403 global _rcpath
404 if _rcpath is None:
404 if _rcpath is None:
405 if 'HGRCPATH' in os.environ:
405 if 'HGRCPATH' in os.environ:
406 _rcpath = []
406 _rcpath = []
407 for p in os.environ['HGRCPATH'].split(os.pathsep):
407 for p in os.environ['HGRCPATH'].split(os.pathsep):
408 if not p:
408 if not p:
409 continue
409 continue
410 p = util.expandpath(p)
410 p = util.expandpath(p)
411 if os.path.isdir(p):
411 if os.path.isdir(p):
412 for f, kind in osutil.listdir(p):
412 for f, kind in osutil.listdir(p):
413 if f.endswith('.rc'):
413 if f.endswith('.rc'):
414 _rcpath.append(os.path.join(p, f))
414 _rcpath.append(os.path.join(p, f))
415 else:
415 else:
416 _rcpath.append(p)
416 _rcpath.append(p)
417 else:
417 else:
418 _rcpath = osrcpath()
418 _rcpath = osrcpath()
419 return _rcpath
419 return _rcpath
420
420
421 if os.name != 'nt':
421 if os.name != 'nt':
422
422
423 def rcfiles(path):
423 def rcfiles(path):
424 rcs = [os.path.join(path, 'hgrc')]
424 rcs = [os.path.join(path, 'hgrc')]
425 rcdir = os.path.join(path, 'hgrc.d')
425 rcdir = os.path.join(path, 'hgrc.d')
426 try:
426 try:
427 rcs.extend([os.path.join(rcdir, f)
427 rcs.extend([os.path.join(rcdir, f)
428 for f, kind in osutil.listdir(rcdir)
428 for f, kind in osutil.listdir(rcdir)
429 if f.endswith(".rc")])
429 if f.endswith(".rc")])
430 except OSError:
430 except OSError:
431 pass
431 pass
432 return rcs
432 return rcs
433
433
434 def systemrcpath():
434 def systemrcpath():
435 path = []
435 path = []
436 # old mod_python does not set sys.argv
436 # old mod_python does not set sys.argv
437 if len(getattr(sys, 'argv', [])) > 0:
437 if len(getattr(sys, 'argv', [])) > 0:
438 p = os.path.dirname(os.path.dirname(sys.argv[0]))
438 p = os.path.dirname(os.path.dirname(sys.argv[0]))
439 path.extend(rcfiles(os.path.join(p, 'etc/mercurial')))
439 path.extend(rcfiles(os.path.join(p, 'etc/mercurial')))
440 path.extend(rcfiles('/etc/mercurial'))
440 path.extend(rcfiles('/etc/mercurial'))
441 return path
441 return path
442
442
443 def userrcpath():
443 def userrcpath():
444 return [os.path.expanduser('~/.hgrc')]
444 return [os.path.expanduser('~/.hgrc')]
445
445
446 else:
446 else:
447
447
448 _HKEY_LOCAL_MACHINE = 0x80000002L
448 _HKEY_LOCAL_MACHINE = 0x80000002L
449
449
450 def systemrcpath():
450 def systemrcpath():
451 '''return default os-specific hgrc search path'''
451 '''return default os-specific hgrc search path'''
452 rcpath = []
452 rcpath = []
453 filename = util.executablepath()
453 filename = util.executablepath()
454 # Use mercurial.ini found in directory with hg.exe
454 # Use mercurial.ini found in directory with hg.exe
455 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
455 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
456 if os.path.isfile(progrc):
456 if os.path.isfile(progrc):
457 rcpath.append(progrc)
457 rcpath.append(progrc)
458 return rcpath
458 return rcpath
459 # Use hgrc.d found in directory with hg.exe
459 # Use hgrc.d found in directory with hg.exe
460 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
460 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
461 if os.path.isdir(progrcd):
461 if os.path.isdir(progrcd):
462 for f, kind in osutil.listdir(progrcd):
462 for f, kind in osutil.listdir(progrcd):
463 if f.endswith('.rc'):
463 if f.endswith('.rc'):
464 rcpath.append(os.path.join(progrcd, f))
464 rcpath.append(os.path.join(progrcd, f))
465 return rcpath
465 return rcpath
466 # else look for a system rcpath in the registry
466 # else look for a system rcpath in the registry
467 value = util.lookupreg('SOFTWARE\\Mercurial', None,
467 value = util.lookupreg('SOFTWARE\\Mercurial', None,
468 _HKEY_LOCAL_MACHINE)
468 _HKEY_LOCAL_MACHINE)
469 if not isinstance(value, str) or not value:
469 if not isinstance(value, str) or not value:
470 return rcpath
470 return rcpath
471 value = value.replace('/', os.sep)
471 value = util.localpath(value)
472 for p in value.split(os.pathsep):
472 for p in value.split(os.pathsep):
473 if p.lower().endswith('mercurial.ini'):
473 if p.lower().endswith('mercurial.ini'):
474 rcpath.append(p)
474 rcpath.append(p)
475 elif os.path.isdir(p):
475 elif os.path.isdir(p):
476 for f, kind in osutil.listdir(p):
476 for f, kind in osutil.listdir(p):
477 if f.endswith('.rc'):
477 if f.endswith('.rc'):
478 rcpath.append(os.path.join(p, f))
478 rcpath.append(os.path.join(p, f))
479 return rcpath
479 return rcpath
480
480
481 def userrcpath():
481 def userrcpath():
482 '''return os-specific hgrc search path to the user dir'''
482 '''return os-specific hgrc search path to the user dir'''
483 home = os.path.expanduser('~')
483 home = os.path.expanduser('~')
484 path = [os.path.join(home, 'mercurial.ini'),
484 path = [os.path.join(home, 'mercurial.ini'),
485 os.path.join(home, '.hgrc')]
485 os.path.join(home, '.hgrc')]
486 userprofile = os.environ.get('USERPROFILE')
486 userprofile = os.environ.get('USERPROFILE')
487 if userprofile:
487 if userprofile:
488 path.append(os.path.join(userprofile, 'mercurial.ini'))
488 path.append(os.path.join(userprofile, 'mercurial.ini'))
489 path.append(os.path.join(userprofile, '.hgrc'))
489 path.append(os.path.join(userprofile, '.hgrc'))
490 return path
490 return path
491
491
492 def revsingle(repo, revspec, default='.'):
492 def revsingle(repo, revspec, default='.'):
493 if not revspec:
493 if not revspec:
494 return repo[default]
494 return repo[default]
495
495
496 l = revrange(repo, [revspec])
496 l = revrange(repo, [revspec])
497 if len(l) < 1:
497 if len(l) < 1:
498 raise util.Abort(_('empty revision set'))
498 raise util.Abort(_('empty revision set'))
499 return repo[l[-1]]
499 return repo[l[-1]]
500
500
501 def revpair(repo, revs):
501 def revpair(repo, revs):
502 if not revs:
502 if not revs:
503 return repo.dirstate.p1(), None
503 return repo.dirstate.p1(), None
504
504
505 l = revrange(repo, revs)
505 l = revrange(repo, revs)
506
506
507 if len(l) == 0:
507 if len(l) == 0:
508 return repo.dirstate.p1(), None
508 return repo.dirstate.p1(), None
509
509
510 if len(l) == 1:
510 if len(l) == 1:
511 return repo.lookup(l[0]), None
511 return repo.lookup(l[0]), None
512
512
513 return repo.lookup(l[0]), repo.lookup(l[-1])
513 return repo.lookup(l[0]), repo.lookup(l[-1])
514
514
515 _revrangesep = ':'
515 _revrangesep = ':'
516
516
517 def revrange(repo, revs):
517 def revrange(repo, revs):
518 """Yield revision as strings from a list of revision specifications."""
518 """Yield revision as strings from a list of revision specifications."""
519
519
520 def revfix(repo, val, defval):
520 def revfix(repo, val, defval):
521 if not val and val != 0 and defval is not None:
521 if not val and val != 0 and defval is not None:
522 return defval
522 return defval
523 return repo.changelog.rev(repo.lookup(val))
523 return repo.changelog.rev(repo.lookup(val))
524
524
525 seen, l = set(), []
525 seen, l = set(), []
526 for spec in revs:
526 for spec in revs:
527 # attempt to parse old-style ranges first to deal with
527 # attempt to parse old-style ranges first to deal with
528 # things like old-tag which contain query metacharacters
528 # things like old-tag which contain query metacharacters
529 try:
529 try:
530 if isinstance(spec, int):
530 if isinstance(spec, int):
531 seen.add(spec)
531 seen.add(spec)
532 l.append(spec)
532 l.append(spec)
533 continue
533 continue
534
534
535 if _revrangesep in spec:
535 if _revrangesep in spec:
536 start, end = spec.split(_revrangesep, 1)
536 start, end = spec.split(_revrangesep, 1)
537 start = revfix(repo, start, 0)
537 start = revfix(repo, start, 0)
538 end = revfix(repo, end, len(repo) - 1)
538 end = revfix(repo, end, len(repo) - 1)
539 step = start > end and -1 or 1
539 step = start > end and -1 or 1
540 for rev in xrange(start, end + step, step):
540 for rev in xrange(start, end + step, step):
541 if rev in seen:
541 if rev in seen:
542 continue
542 continue
543 seen.add(rev)
543 seen.add(rev)
544 l.append(rev)
544 l.append(rev)
545 continue
545 continue
546 elif spec and spec in repo: # single unquoted rev
546 elif spec and spec in repo: # single unquoted rev
547 rev = revfix(repo, spec, None)
547 rev = revfix(repo, spec, None)
548 if rev in seen:
548 if rev in seen:
549 continue
549 continue
550 seen.add(rev)
550 seen.add(rev)
551 l.append(rev)
551 l.append(rev)
552 continue
552 continue
553 except error.RepoLookupError:
553 except error.RepoLookupError:
554 pass
554 pass
555
555
556 # fall through to new-style queries if old-style fails
556 # fall through to new-style queries if old-style fails
557 m = revset.match(repo.ui, spec)
557 m = revset.match(repo.ui, spec)
558 for r in m(repo, range(len(repo))):
558 for r in m(repo, range(len(repo))):
559 if r not in seen:
559 if r not in seen:
560 l.append(r)
560 l.append(r)
561 seen.update(l)
561 seen.update(l)
562
562
563 return l
563 return l
564
564
565 def expandpats(pats):
565 def expandpats(pats):
566 if not util.expandglobs:
566 if not util.expandglobs:
567 return list(pats)
567 return list(pats)
568 ret = []
568 ret = []
569 for p in pats:
569 for p in pats:
570 kind, name = matchmod._patsplit(p, None)
570 kind, name = matchmod._patsplit(p, None)
571 if kind is None:
571 if kind is None:
572 try:
572 try:
573 globbed = glob.glob(name)
573 globbed = glob.glob(name)
574 except re.error:
574 except re.error:
575 globbed = [name]
575 globbed = [name]
576 if globbed:
576 if globbed:
577 ret.extend(globbed)
577 ret.extend(globbed)
578 continue
578 continue
579 ret.append(p)
579 ret.append(p)
580 return ret
580 return ret
581
581
582 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
582 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
583 if pats == ("",):
583 if pats == ("",):
584 pats = []
584 pats = []
585 if not globbed and default == 'relpath':
585 if not globbed and default == 'relpath':
586 pats = expandpats(pats or [])
586 pats = expandpats(pats or [])
587
587
588 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
588 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
589 default)
589 default)
590 def badfn(f, msg):
590 def badfn(f, msg):
591 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
591 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
592 m.bad = badfn
592 m.bad = badfn
593 return m
593 return m
594
594
595 def matchall(repo):
595 def matchall(repo):
596 return matchmod.always(repo.root, repo.getcwd())
596 return matchmod.always(repo.root, repo.getcwd())
597
597
598 def matchfiles(repo, files):
598 def matchfiles(repo, files):
599 return matchmod.exact(repo.root, repo.getcwd(), files)
599 return matchmod.exact(repo.root, repo.getcwd(), files)
600
600
601 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
601 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
602 if dry_run is None:
602 if dry_run is None:
603 dry_run = opts.get('dry_run')
603 dry_run = opts.get('dry_run')
604 if similarity is None:
604 if similarity is None:
605 similarity = float(opts.get('similarity') or 0)
605 similarity = float(opts.get('similarity') or 0)
606 # we'd use status here, except handling of symlinks and ignore is tricky
606 # we'd use status here, except handling of symlinks and ignore is tricky
607 added, unknown, deleted, removed = [], [], [], []
607 added, unknown, deleted, removed = [], [], [], []
608 audit_path = pathauditor(repo.root)
608 audit_path = pathauditor(repo.root)
609 m = match(repo[None], pats, opts)
609 m = match(repo[None], pats, opts)
610 for abs in repo.walk(m):
610 for abs in repo.walk(m):
611 target = repo.wjoin(abs)
611 target = repo.wjoin(abs)
612 good = True
612 good = True
613 try:
613 try:
614 audit_path(abs)
614 audit_path(abs)
615 except (OSError, util.Abort):
615 except (OSError, util.Abort):
616 good = False
616 good = False
617 rel = m.rel(abs)
617 rel = m.rel(abs)
618 exact = m.exact(abs)
618 exact = m.exact(abs)
619 if good and abs not in repo.dirstate:
619 if good and abs not in repo.dirstate:
620 unknown.append(abs)
620 unknown.append(abs)
621 if repo.ui.verbose or not exact:
621 if repo.ui.verbose or not exact:
622 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
622 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
623 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
623 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
624 or (os.path.isdir(target) and not os.path.islink(target))):
624 or (os.path.isdir(target) and not os.path.islink(target))):
625 deleted.append(abs)
625 deleted.append(abs)
626 if repo.ui.verbose or not exact:
626 if repo.ui.verbose or not exact:
627 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
627 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
628 # for finding renames
628 # for finding renames
629 elif repo.dirstate[abs] == 'r':
629 elif repo.dirstate[abs] == 'r':
630 removed.append(abs)
630 removed.append(abs)
631 elif repo.dirstate[abs] == 'a':
631 elif repo.dirstate[abs] == 'a':
632 added.append(abs)
632 added.append(abs)
633 copies = {}
633 copies = {}
634 if similarity > 0:
634 if similarity > 0:
635 for old, new, score in similar.findrenames(repo,
635 for old, new, score in similar.findrenames(repo,
636 added + unknown, removed + deleted, similarity):
636 added + unknown, removed + deleted, similarity):
637 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
637 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
638 repo.ui.status(_('recording removal of %s as rename to %s '
638 repo.ui.status(_('recording removal of %s as rename to %s '
639 '(%d%% similar)\n') %
639 '(%d%% similar)\n') %
640 (m.rel(old), m.rel(new), score * 100))
640 (m.rel(old), m.rel(new), score * 100))
641 copies[new] = old
641 copies[new] = old
642
642
643 if not dry_run:
643 if not dry_run:
644 wctx = repo[None]
644 wctx = repo[None]
645 wlock = repo.wlock()
645 wlock = repo.wlock()
646 try:
646 try:
647 wctx.forget(deleted)
647 wctx.forget(deleted)
648 wctx.add(unknown)
648 wctx.add(unknown)
649 for new, old in copies.iteritems():
649 for new, old in copies.iteritems():
650 wctx.copy(old, new)
650 wctx.copy(old, new)
651 finally:
651 finally:
652 wlock.release()
652 wlock.release()
653
653
654 def updatedir(ui, repo, patches, similarity=0):
654 def updatedir(ui, repo, patches, similarity=0):
655 '''Update dirstate after patch application according to metadata'''
655 '''Update dirstate after patch application according to metadata'''
656 if not patches:
656 if not patches:
657 return []
657 return []
658 copies = []
658 copies = []
659 removes = set()
659 removes = set()
660 cfiles = patches.keys()
660 cfiles = patches.keys()
661 cwd = repo.getcwd()
661 cwd = repo.getcwd()
662 if cwd:
662 if cwd:
663 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
663 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
664 for f in patches:
664 for f in patches:
665 gp = patches[f]
665 gp = patches[f]
666 if not gp:
666 if not gp:
667 continue
667 continue
668 if gp.op == 'RENAME':
668 if gp.op == 'RENAME':
669 copies.append((gp.oldpath, gp.path))
669 copies.append((gp.oldpath, gp.path))
670 removes.add(gp.oldpath)
670 removes.add(gp.oldpath)
671 elif gp.op == 'COPY':
671 elif gp.op == 'COPY':
672 copies.append((gp.oldpath, gp.path))
672 copies.append((gp.oldpath, gp.path))
673 elif gp.op == 'DELETE':
673 elif gp.op == 'DELETE':
674 removes.add(gp.path)
674 removes.add(gp.path)
675
675
676 wctx = repo[None]
676 wctx = repo[None]
677 for src, dst in copies:
677 for src, dst in copies:
678 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
678 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
679 if (not similarity) and removes:
679 if (not similarity) and removes:
680 wctx.remove(sorted(removes), True)
680 wctx.remove(sorted(removes), True)
681
681
682 for f in patches:
682 for f in patches:
683 gp = patches[f]
683 gp = patches[f]
684 if gp and gp.mode:
684 if gp and gp.mode:
685 islink, isexec = gp.mode
685 islink, isexec = gp.mode
686 dst = repo.wjoin(gp.path)
686 dst = repo.wjoin(gp.path)
687 # patch won't create empty files
687 # patch won't create empty files
688 if gp.op == 'ADD' and not os.path.lexists(dst):
688 if gp.op == 'ADD' and not os.path.lexists(dst):
689 flags = (isexec and 'x' or '') + (islink and 'l' or '')
689 flags = (isexec and 'x' or '') + (islink and 'l' or '')
690 repo.wwrite(gp.path, '', flags)
690 repo.wwrite(gp.path, '', flags)
691 util.setflags(dst, islink, isexec)
691 util.setflags(dst, islink, isexec)
692 addremove(repo, cfiles, similarity=similarity)
692 addremove(repo, cfiles, similarity=similarity)
693 files = patches.keys()
693 files = patches.keys()
694 files.extend([r for r in removes if r not in files])
694 files.extend([r for r in removes if r not in files])
695 return sorted(files)
695 return sorted(files)
696
696
697 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
697 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
698 """Update the dirstate to reflect the intent of copying src to dst. For
698 """Update the dirstate to reflect the intent of copying src to dst. For
699 different reasons it might not end with dst being marked as copied from src.
699 different reasons it might not end with dst being marked as copied from src.
700 """
700 """
701 origsrc = repo.dirstate.copied(src) or src
701 origsrc = repo.dirstate.copied(src) or src
702 if dst == origsrc: # copying back a copy?
702 if dst == origsrc: # copying back a copy?
703 if repo.dirstate[dst] not in 'mn' and not dryrun:
703 if repo.dirstate[dst] not in 'mn' and not dryrun:
704 repo.dirstate.normallookup(dst)
704 repo.dirstate.normallookup(dst)
705 else:
705 else:
706 if repo.dirstate[origsrc] == 'a' and origsrc == src:
706 if repo.dirstate[origsrc] == 'a' and origsrc == src:
707 if not ui.quiet:
707 if not ui.quiet:
708 ui.warn(_("%s has not been committed yet, so no copy "
708 ui.warn(_("%s has not been committed yet, so no copy "
709 "data will be stored for %s.\n")
709 "data will be stored for %s.\n")
710 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
710 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
711 if repo.dirstate[dst] in '?r' and not dryrun:
711 if repo.dirstate[dst] in '?r' and not dryrun:
712 wctx.add([dst])
712 wctx.add([dst])
713 elif not dryrun:
713 elif not dryrun:
714 wctx.copy(origsrc, dst)
714 wctx.copy(origsrc, dst)
715
715
716 def readrequires(opener, supported):
716 def readrequires(opener, supported):
717 '''Reads and parses .hg/requires and checks if all entries found
717 '''Reads and parses .hg/requires and checks if all entries found
718 are in the list of supported features.'''
718 are in the list of supported features.'''
719 requirements = set(opener.read("requires").splitlines())
719 requirements = set(opener.read("requires").splitlines())
720 missings = []
720 missings = []
721 for r in requirements:
721 for r in requirements:
722 if r not in supported:
722 if r not in supported:
723 if not r or not r[0].isalnum():
723 if not r or not r[0].isalnum():
724 raise error.RequirementError(_(".hg/requires file is corrupt"))
724 raise error.RequirementError(_(".hg/requires file is corrupt"))
725 missings.append(r)
725 missings.append(r)
726 missings.sort()
726 missings.sort()
727 if missings:
727 if missings:
728 raise error.RequirementError(_("unknown repository format: "
728 raise error.RequirementError(_("unknown repository format: "
729 "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
729 "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
730 return requirements
730 return requirements
731
731
732 class filecacheentry(object):
732 class filecacheentry(object):
733 def __init__(self, path):
733 def __init__(self, path):
734 self.path = path
734 self.path = path
735 self.cachestat = filecacheentry.stat(self.path)
735 self.cachestat = filecacheentry.stat(self.path)
736
736
737 if self.cachestat:
737 if self.cachestat:
738 self._cacheable = self.cachestat.cacheable()
738 self._cacheable = self.cachestat.cacheable()
739 else:
739 else:
740 # None means we don't know yet
740 # None means we don't know yet
741 self._cacheable = None
741 self._cacheable = None
742
742
743 def refresh(self):
743 def refresh(self):
744 if self.cacheable():
744 if self.cacheable():
745 self.cachestat = filecacheentry.stat(self.path)
745 self.cachestat = filecacheentry.stat(self.path)
746
746
747 def cacheable(self):
747 def cacheable(self):
748 if self._cacheable is not None:
748 if self._cacheable is not None:
749 return self._cacheable
749 return self._cacheable
750
750
751 # we don't know yet, assume it is for now
751 # we don't know yet, assume it is for now
752 return True
752 return True
753
753
754 def changed(self):
754 def changed(self):
755 # no point in going further if we can't cache it
755 # no point in going further if we can't cache it
756 if not self.cacheable():
756 if not self.cacheable():
757 return True
757 return True
758
758
759 newstat = filecacheentry.stat(self.path)
759 newstat = filecacheentry.stat(self.path)
760
760
761 # we may not know if it's cacheable yet, check again now
761 # we may not know if it's cacheable yet, check again now
762 if newstat and self._cacheable is None:
762 if newstat and self._cacheable is None:
763 self._cacheable = newstat.cacheable()
763 self._cacheable = newstat.cacheable()
764
764
765 # check again
765 # check again
766 if not self._cacheable:
766 if not self._cacheable:
767 return True
767 return True
768
768
769 if self.cachestat != newstat:
769 if self.cachestat != newstat:
770 self.cachestat = newstat
770 self.cachestat = newstat
771 return True
771 return True
772 else:
772 else:
773 return False
773 return False
774
774
775 @staticmethod
775 @staticmethod
776 def stat(path):
776 def stat(path):
777 try:
777 try:
778 return util.cachestat(path)
778 return util.cachestat(path)
779 except OSError, e:
779 except OSError, e:
780 if e.errno != errno.ENOENT:
780 if e.errno != errno.ENOENT:
781 raise
781 raise
782
782
783 class filecache(object):
783 class filecache(object):
784 '''A property like decorator that tracks a file under .hg/ for updates.
784 '''A property like decorator that tracks a file under .hg/ for updates.
785
785
786 Records stat info when called in _filecache.
786 Records stat info when called in _filecache.
787
787
788 On subsequent calls, compares old stat info with new info, and recreates
788 On subsequent calls, compares old stat info with new info, and recreates
789 the object when needed, updating the new stat info in _filecache.
789 the object when needed, updating the new stat info in _filecache.
790
790
791 Mercurial either atomic renames or appends for files under .hg,
791 Mercurial either atomic renames or appends for files under .hg,
792 so to ensure the cache is reliable we need the filesystem to be able
792 so to ensure the cache is reliable we need the filesystem to be able
793 to tell us if a file has been replaced. If it can't, we fallback to
793 to tell us if a file has been replaced. If it can't, we fallback to
794 recreating the object on every call (essentially the same behaviour as
794 recreating the object on every call (essentially the same behaviour as
795 propertycache).'''
795 propertycache).'''
796 def __init__(self, path, instore=False):
796 def __init__(self, path, instore=False):
797 self.path = path
797 self.path = path
798 self.instore = instore
798 self.instore = instore
799
799
800 def __call__(self, func):
800 def __call__(self, func):
801 self.func = func
801 self.func = func
802 self.name = func.__name__
802 self.name = func.__name__
803 return self
803 return self
804
804
805 def __get__(self, obj, type=None):
805 def __get__(self, obj, type=None):
806 entry = obj._filecache.get(self.name)
806 entry = obj._filecache.get(self.name)
807
807
808 if entry:
808 if entry:
809 if entry.changed():
809 if entry.changed():
810 entry.obj = self.func(obj)
810 entry.obj = self.func(obj)
811 else:
811 else:
812 path = self.instore and obj.sjoin(self.path) or obj.join(self.path)
812 path = self.instore and obj.sjoin(self.path) or obj.join(self.path)
813
813
814 # We stat -before- creating the object so our cache doesn't lie if
814 # We stat -before- creating the object so our cache doesn't lie if
815 # a writer modified between the time we read and stat
815 # a writer modified between the time we read and stat
816 entry = filecacheentry(path)
816 entry = filecacheentry(path)
817 entry.obj = self.func(obj)
817 entry.obj = self.func(obj)
818
818
819 obj._filecache[self.name] = entry
819 obj._filecache[self.name] = entry
820
820
821 setattr(obj, self.name, entry.obj)
821 setattr(obj, self.name, entry.obj)
822 return entry.obj
822 return entry.obj
@@ -1,657 +1,657 b''
1 $ check_code="$TESTDIR"/../contrib/check-code.py
1 $ check_code="$TESTDIR"/../contrib/check-code.py
2 $ cd "$TESTDIR"/..
2 $ cd "$TESTDIR"/..
3
3
4 $ "$check_code" `hg manifest` || echo 'FAILURE IS NOT AN OPTION!!!'
4 $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
5
5
6 $ "$check_code" --warnings --nolineno --per-file=0 `hg manifest`
6 $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0
7 contrib/check-code.py:0:
7 contrib/check-code.py:0:
8 > # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', "don't use underbars in identifiers"),
8 > # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', "don't use underbars in identifiers"),
9 warning: line over 80 characters
9 warning: line over 80 characters
10 contrib/perf.py:0:
10 contrib/perf.py:0:
11 > except:
11 > except:
12 warning: naked except clause
12 warning: naked except clause
13 contrib/perf.py:0:
13 contrib/perf.py:0:
14 > #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, False))))
14 > #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, False))))
15 warning: line over 80 characters
15 warning: line over 80 characters
16 contrib/perf.py:0:
16 contrib/perf.py:0:
17 > except:
17 > except:
18 warning: naked except clause
18 warning: naked except clause
19 contrib/setup3k.py:0:
19 contrib/setup3k.py:0:
20 > except:
20 > except:
21 warning: naked except clause
21 warning: naked except clause
22 contrib/setup3k.py:0:
22 contrib/setup3k.py:0:
23 > except:
23 > except:
24 warning: naked except clause
24 warning: naked except clause
25 contrib/setup3k.py:0:
25 contrib/setup3k.py:0:
26 > except:
26 > except:
27 warning: naked except clause
27 warning: naked except clause
28 warning: naked except clause
28 warning: naked except clause
29 warning: naked except clause
29 warning: naked except clause
30 contrib/shrink-revlog.py:0:
30 contrib/shrink-revlog.py:0:
31 > '(You can delete those files when you are satisfied that your\n'
31 > '(You can delete those files when you are satisfied that your\n'
32 warning: line over 80 characters
32 warning: line over 80 characters
33 contrib/shrink-revlog.py:0:
33 contrib/shrink-revlog.py:0:
34 > ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
34 > ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
35 warning: line over 80 characters
35 warning: line over 80 characters
36 contrib/shrink-revlog.py:0:
36 contrib/shrink-revlog.py:0:
37 > [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
37 > [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
38 warning: line over 80 characters
38 warning: line over 80 characters
39 contrib/shrink-revlog.py:0:
39 contrib/shrink-revlog.py:0:
40 > except:
40 > except:
41 warning: naked except clause
41 warning: naked except clause
42 doc/gendoc.py:0:
42 doc/gendoc.py:0:
43 > "together with Mercurial. Help for other extensions is available "
43 > "together with Mercurial. Help for other extensions is available "
44 warning: line over 80 characters
44 warning: line over 80 characters
45 hgext/bugzilla.py:0:
45 hgext/bugzilla.py:0:
46 > raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
46 > raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
47 warning: line over 80 characters
47 warning: line over 80 characters
48 hgext/bugzilla.py:0:
48 hgext/bugzilla.py:0:
49 > bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
49 > bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
50 warning: line over 80 characters
50 warning: line over 80 characters
51 hgext/convert/__init__.py:0:
51 hgext/convert/__init__.py:0:
52 > ('', 'ancestors', '', _('show current changeset in ancestor branches')),
52 > ('', 'ancestors', '', _('show current changeset in ancestor branches')),
53 warning: line over 80 characters
53 warning: line over 80 characters
54 hgext/convert/bzr.py:0:
54 hgext/convert/bzr.py:0:
55 > except:
55 > except:
56 warning: naked except clause
56 warning: naked except clause
57 hgext/convert/common.py:0:
57 hgext/convert/common.py:0:
58 > except:
58 > except:
59 warning: naked except clause
59 warning: naked except clause
60 hgext/convert/common.py:0:
60 hgext/convert/common.py:0:
61 > except:
61 > except:
62 warning: naked except clause
62 warning: naked except clause
63 warning: naked except clause
63 warning: naked except clause
64 hgext/convert/convcmd.py:0:
64 hgext/convert/convcmd.py:0:
65 > except:
65 > except:
66 warning: naked except clause
66 warning: naked except clause
67 hgext/convert/cvs.py:0:
67 hgext/convert/cvs.py:0:
68 > # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
68 > # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
69 warning: line over 80 characters
69 warning: line over 80 characters
70 hgext/convert/cvsps.py:0:
70 hgext/convert/cvsps.py:0:
71 > assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
71 > assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
72 warning: line over 80 characters
72 warning: line over 80 characters
73 hgext/convert/cvsps.py:0:
73 hgext/convert/cvsps.py:0:
74 > ui.write('Ancestors: %s\n' % (','.join(r)))
74 > ui.write('Ancestors: %s\n' % (','.join(r)))
75 warning: unwrapped ui message
75 warning: unwrapped ui message
76 hgext/convert/cvsps.py:0:
76 hgext/convert/cvsps.py:0:
77 > ui.write('Parent: %d\n' % cs.parents[0].id)
77 > ui.write('Parent: %d\n' % cs.parents[0].id)
78 warning: unwrapped ui message
78 warning: unwrapped ui message
79 hgext/convert/cvsps.py:0:
79 hgext/convert/cvsps.py:0:
80 > ui.write('Parents: %s\n' %
80 > ui.write('Parents: %s\n' %
81 warning: unwrapped ui message
81 warning: unwrapped ui message
82 hgext/convert/cvsps.py:0:
82 hgext/convert/cvsps.py:0:
83 > except:
83 > except:
84 warning: naked except clause
84 warning: naked except clause
85 hgext/convert/cvsps.py:0:
85 hgext/convert/cvsps.py:0:
86 > ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
86 > ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
87 warning: unwrapped ui message
87 warning: unwrapped ui message
88 hgext/convert/cvsps.py:0:
88 hgext/convert/cvsps.py:0:
89 > ui.write('Author: %s\n' % cs.author)
89 > ui.write('Author: %s\n' % cs.author)
90 warning: unwrapped ui message
90 warning: unwrapped ui message
91 hgext/convert/cvsps.py:0:
91 hgext/convert/cvsps.py:0:
92 > ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
92 > ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
93 warning: unwrapped ui message
93 warning: unwrapped ui message
94 hgext/convert/cvsps.py:0:
94 hgext/convert/cvsps.py:0:
95 > ui.write('Date: %s\n' % util.datestr(cs.date,
95 > ui.write('Date: %s\n' % util.datestr(cs.date,
96 warning: unwrapped ui message
96 warning: unwrapped ui message
97 hgext/convert/cvsps.py:0:
97 hgext/convert/cvsps.py:0:
98 > ui.write('Log:\n')
98 > ui.write('Log:\n')
99 warning: unwrapped ui message
99 warning: unwrapped ui message
100 hgext/convert/cvsps.py:0:
100 hgext/convert/cvsps.py:0:
101 > ui.write('Members: \n')
101 > ui.write('Members: \n')
102 warning: unwrapped ui message
102 warning: unwrapped ui message
103 hgext/convert/cvsps.py:0:
103 hgext/convert/cvsps.py:0:
104 > ui.write('PatchSet %d \n' % cs.id)
104 > ui.write('PatchSet %d \n' % cs.id)
105 warning: unwrapped ui message
105 warning: unwrapped ui message
106 hgext/convert/cvsps.py:0:
106 hgext/convert/cvsps.py:0:
107 > ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
107 > ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
108 warning: unwrapped ui message
108 warning: unwrapped ui message
109 hgext/convert/git.py:0:
109 hgext/convert/git.py:0:
110 > except:
110 > except:
111 warning: naked except clause
111 warning: naked except clause
112 hgext/convert/git.py:0:
112 hgext/convert/git.py:0:
113 > fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
113 > fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
114 warning: line over 80 characters
114 warning: line over 80 characters
115 hgext/convert/hg.py:0:
115 hgext/convert/hg.py:0:
116 > # detect missing revlogs and abort on errors or populate self.ignored
116 > # detect missing revlogs and abort on errors or populate self.ignored
117 warning: line over 80 characters
117 warning: line over 80 characters
118 hgext/convert/hg.py:0:
118 hgext/convert/hg.py:0:
119 > except:
119 > except:
120 warning: naked except clause
120 warning: naked except clause
121 warning: naked except clause
121 warning: naked except clause
122 hgext/convert/hg.py:0:
122 hgext/convert/hg.py:0:
123 > except:
123 > except:
124 warning: naked except clause
124 warning: naked except clause
125 hgext/convert/monotone.py:0:
125 hgext/convert/monotone.py:0:
126 > except:
126 > except:
127 warning: naked except clause
127 warning: naked except clause
128 hgext/convert/monotone.py:0:
128 hgext/convert/monotone.py:0:
129 > except:
129 > except:
130 warning: naked except clause
130 warning: naked except clause
131 hgext/convert/subversion.py:0:
131 hgext/convert/subversion.py:0:
132 > raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
132 > raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
133 warning: line over 80 characters
133 warning: line over 80 characters
134 hgext/convert/subversion.py:0:
134 hgext/convert/subversion.py:0:
135 > except:
135 > except:
136 warning: naked except clause
136 warning: naked except clause
137 hgext/convert/subversion.py:0:
137 hgext/convert/subversion.py:0:
138 > args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
138 > args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
139 warning: line over 80 characters
139 warning: line over 80 characters
140 hgext/convert/subversion.py:0:
140 hgext/convert/subversion.py:0:
141 > self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
141 > self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
142 warning: line over 80 characters
142 warning: line over 80 characters
143 hgext/convert/subversion.py:0:
143 hgext/convert/subversion.py:0:
144 > except:
144 > except:
145 warning: naked except clause
145 warning: naked except clause
146 hgext/convert/subversion.py:0:
146 hgext/convert/subversion.py:0:
147 > def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
147 > def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
148 warning: line over 80 characters
148 warning: line over 80 characters
149 hgext/eol.py:0:
149 hgext/eol.py:0:
150 > if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
150 > if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
151 warning: line over 80 characters
151 warning: line over 80 characters
152 warning: line over 80 characters
152 warning: line over 80 characters
153 hgext/gpg.py:0:
153 hgext/gpg.py:0:
154 > except:
154 > except:
155 warning: naked except clause
155 warning: naked except clause
156 hgext/hgcia.py:0:
156 hgext/hgcia.py:0:
157 > except:
157 > except:
158 warning: naked except clause
158 warning: naked except clause
159 hgext/hgk.py:0:
159 hgext/hgk.py:0:
160 > ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
160 > ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
161 warning: line over 80 characters
161 warning: line over 80 characters
162 hgext/hgk.py:0:
162 hgext/hgk.py:0:
163 > ui.write("parent %s\n" % p)
163 > ui.write("parent %s\n" % p)
164 warning: unwrapped ui message
164 warning: unwrapped ui message
165 hgext/hgk.py:0:
165 hgext/hgk.py:0:
166 > ui.write('k=%s\nv=%s\n' % (name, value))
166 > ui.write('k=%s\nv=%s\n' % (name, value))
167 warning: unwrapped ui message
167 warning: unwrapped ui message
168 hgext/hgk.py:0:
168 hgext/hgk.py:0:
169 > ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
169 > ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
170 warning: unwrapped ui message
170 warning: unwrapped ui message
171 hgext/hgk.py:0:
171 hgext/hgk.py:0:
172 > ui.write("branch %s\n\n" % ctx.branch())
172 > ui.write("branch %s\n\n" % ctx.branch())
173 warning: unwrapped ui message
173 warning: unwrapped ui message
174 hgext/hgk.py:0:
174 hgext/hgk.py:0:
175 > ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
175 > ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
176 warning: unwrapped ui message
176 warning: unwrapped ui message
177 hgext/hgk.py:0:
177 hgext/hgk.py:0:
178 > ui.write("revision %d\n" % ctx.rev())
178 > ui.write("revision %d\n" % ctx.rev())
179 warning: unwrapped ui message
179 warning: unwrapped ui message
180 hgext/hgk.py:0:
180 hgext/hgk.py:0:
181 > ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
181 > ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
182 warning: line over 80 characters
182 warning: line over 80 characters
183 warning: unwrapped ui message
183 warning: unwrapped ui message
184 hgext/highlight/__init__.py:0:
184 hgext/highlight/__init__.py:0:
185 > extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
185 > extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
186 warning: line over 80 characters
186 warning: line over 80 characters
187 hgext/highlight/__init__.py:0:
187 hgext/highlight/__init__.py:0:
188 > return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
188 > return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
189 warning: line over 80 characters
189 warning: line over 80 characters
190 hgext/inotify/__init__.py:0:
190 hgext/inotify/__init__.py:0:
191 > if self._inotifyon and not ignored and not subrepos and not self._dirty:
191 > if self._inotifyon and not ignored and not subrepos and not self._dirty:
192 warning: line over 80 characters
192 warning: line over 80 characters
193 hgext/inotify/server.py:0:
193 hgext/inotify/server.py:0:
194 > except:
194 > except:
195 warning: naked except clause
195 warning: naked except clause
196 hgext/inotify/server.py:0:
196 hgext/inotify/server.py:0:
197 > except:
197 > except:
198 warning: naked except clause
198 warning: naked except clause
199 hgext/keyword.py:0:
199 hgext/keyword.py:0:
200 > ui.note("hg ci -m '%s'\n" % msg)
200 > ui.note("hg ci -m '%s'\n" % msg)
201 warning: unwrapped ui message
201 warning: unwrapped ui message
202 hgext/largefiles/overrides.py:0:
202 hgext/largefiles/overrides.py:0:
203 > # When we call orig below it creates the standins but we don't add them
203 > # When we call orig below it creates the standins but we don't add them
204 warning: line over 80 characters
204 warning: line over 80 characters
205 hgext/largefiles/reposetup.py:0:
205 hgext/largefiles/reposetup.py:0:
206 > if os.path.exists(self.wjoin(lfutil.standin(lfile))):
206 > if os.path.exists(self.wjoin(lfutil.standin(lfile))):
207 warning: line over 80 characters
207 warning: line over 80 characters
208 hgext/mq.py:0:
208 hgext/mq.py:0:
209 > raise util.Abort(_("%s does not have a parent recorded" % root))
209 > raise util.Abort(_("%s does not have a parent recorded" % root))
210 warning: line over 80 characters
210 warning: line over 80 characters
211 hgext/mq.py:0:
211 hgext/mq.py:0:
212 > raise util.Abort(_("cannot push --exact with applied patches"))
212 > raise util.Abort(_("cannot push --exact with applied patches"))
213 warning: line over 80 characters
213 warning: line over 80 characters
214 hgext/mq.py:0:
214 hgext/mq.py:0:
215 > raise util.Abort(_("cannot use --exact and --move together"))
215 > raise util.Abort(_("cannot use --exact and --move together"))
216 warning: line over 80 characters
216 warning: line over 80 characters
217 hgext/mq.py:0:
217 hgext/mq.py:0:
218 > self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
218 > self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
219 warning: line over 80 characters
219 warning: line over 80 characters
220 hgext/mq.py:0:
220 hgext/mq.py:0:
221 > except:
221 > except:
222 warning: naked except clause
222 warning: naked except clause
223 warning: naked except clause
223 warning: naked except clause
224 hgext/mq.py:0:
224 hgext/mq.py:0:
225 > except:
225 > except:
226 warning: naked except clause
226 warning: naked except clause
227 warning: naked except clause
227 warning: naked except clause
228 warning: naked except clause
228 warning: naked except clause
229 warning: naked except clause
229 warning: naked except clause
230 hgext/mq.py:0:
230 hgext/mq.py:0:
231 > raise util.Abort(_('cannot mix -l/--list with options or arguments'))
231 > raise util.Abort(_('cannot mix -l/--list with options or arguments'))
232 warning: line over 80 characters
232 warning: line over 80 characters
233 hgext/mq.py:0:
233 hgext/mq.py:0:
234 > raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
234 > raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
235 warning: line over 80 characters
235 warning: line over 80 characters
236 hgext/mq.py:0:
236 hgext/mq.py:0:
237 > ('', 'move', None, _('reorder patch series and apply only the patch'))],
237 > ('', 'move', None, _('reorder patch series and apply only the patch'))],
238 warning: line over 80 characters
238 warning: line over 80 characters
239 hgext/mq.py:0:
239 hgext/mq.py:0:
240 > ('U', 'noupdate', None, _('do not update the new working directories')),
240 > ('U', 'noupdate', None, _('do not update the new working directories')),
241 warning: line over 80 characters
241 warning: line over 80 characters
242 hgext/mq.py:0:
242 hgext/mq.py:0:
243 > ('e', 'exact', None, _('apply the target patch to its recorded parent')),
243 > ('e', 'exact', None, _('apply the target patch to its recorded parent')),
244 warning: line over 80 characters
244 warning: line over 80 characters
245 hgext/mq.py:0:
245 hgext/mq.py:0:
246 > except:
246 > except:
247 warning: naked except clause
247 warning: naked except clause
248 hgext/mq.py:0:
248 hgext/mq.py:0:
249 > ui.write("mq: %s\n" % ', '.join(m))
249 > ui.write("mq: %s\n" % ', '.join(m))
250 warning: unwrapped ui message
250 warning: unwrapped ui message
251 hgext/mq.py:0:
251 hgext/mq.py:0:
252 > repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
252 > repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
253 warning: line over 80 characters
253 warning: line over 80 characters
254 hgext/notify.py:0:
254 hgext/notify.py:0:
255 > ui.note(_('notify: suppressing notification for merge %d:%s\n') %
255 > ui.note(_('notify: suppressing notification for merge %d:%s\n') %
256 warning: line over 80 characters
256 warning: line over 80 characters
257 hgext/patchbomb.py:0:
257 hgext/patchbomb.py:0:
258 > binnode, seqno=idx, total=total)
258 > binnode, seqno=idx, total=total)
259 warning: line over 80 characters
259 warning: line over 80 characters
260 hgext/patchbomb.py:0:
260 hgext/patchbomb.py:0:
261 > except:
261 > except:
262 warning: naked except clause
262 warning: naked except clause
263 hgext/patchbomb.py:0:
263 hgext/patchbomb.py:0:
264 > ui.write('Subject: %s\n' % subj)
264 > ui.write('Subject: %s\n' % subj)
265 warning: unwrapped ui message
265 warning: unwrapped ui message
266 hgext/patchbomb.py:0:
266 hgext/patchbomb.py:0:
267 > p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
267 > p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
268 warning: line over 80 characters
268 warning: line over 80 characters
269 hgext/patchbomb.py:0:
269 hgext/patchbomb.py:0:
270 > ui.write('From: %s\n' % sender)
270 > ui.write('From: %s\n' % sender)
271 warning: unwrapped ui message
271 warning: unwrapped ui message
272 hgext/record.py:0:
272 hgext/record.py:0:
273 > ignoreblanklines=opts.get('ignore_blank_lines'))
273 > ignoreblanklines=opts.get('ignore_blank_lines'))
274 warning: line over 80 characters
274 warning: line over 80 characters
275 hgext/record.py:0:
275 hgext/record.py:0:
276 > ignorewsamount=opts.get('ignore_space_change'),
276 > ignorewsamount=opts.get('ignore_space_change'),
277 warning: line over 80 characters
277 warning: line over 80 characters
278 hgext/zeroconf/__init__.py:0:
278 hgext/zeroconf/__init__.py:0:
279 > publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
279 > publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
280 warning: line over 80 characters
280 warning: line over 80 characters
281 hgext/zeroconf/__init__.py:0:
281 hgext/zeroconf/__init__.py:0:
282 > except:
282 > except:
283 warning: naked except clause
283 warning: naked except clause
284 warning: naked except clause
284 warning: naked except clause
285 mercurial/bundlerepo.py:0:
285 mercurial/bundlerepo.py:0:
286 > is a bundlerepo for the obtained bundle when the original "other" is remote.
286 > is a bundlerepo for the obtained bundle when the original "other" is remote.
287 warning: line over 80 characters
287 warning: line over 80 characters
288 mercurial/bundlerepo.py:0:
288 mercurial/bundlerepo.py:0:
289 > "local" is a local repo from which to obtain the actual incoming changesets; it
289 > "local" is a local repo from which to obtain the actual incoming changesets; it
290 warning: line over 80 characters
290 warning: line over 80 characters
291 mercurial/bundlerepo.py:0:
291 mercurial/bundlerepo.py:0:
292 > tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
292 > tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
293 warning: line over 80 characters
293 warning: line over 80 characters
294 mercurial/commands.py:0:
294 mercurial/commands.py:0:
295 > " size " + basehdr + " link p1 p2 nodeid\n")
295 > " size " + basehdr + " link p1 p2 nodeid\n")
296 warning: line over 80 characters
296 warning: line over 80 characters
297 mercurial/commands.py:0:
297 mercurial/commands.py:0:
298 > raise util.Abort('cannot use localheads with old style discovery')
298 > raise util.Abort('cannot use localheads with old style discovery')
299 warning: line over 80 characters
299 warning: line over 80 characters
300 mercurial/commands.py:0:
300 mercurial/commands.py:0:
301 > ui.note('branch %s\n' % data)
301 > ui.note('branch %s\n' % data)
302 warning: unwrapped ui message
302 warning: unwrapped ui message
303 mercurial/commands.py:0:
303 mercurial/commands.py:0:
304 > ui.note('node %s\n' % str(data))
304 > ui.note('node %s\n' % str(data))
305 warning: unwrapped ui message
305 warning: unwrapped ui message
306 mercurial/commands.py:0:
306 mercurial/commands.py:0:
307 > ui.note('tag %s\n' % name)
307 > ui.note('tag %s\n' % name)
308 warning: unwrapped ui message
308 warning: unwrapped ui message
309 mercurial/commands.py:0:
309 mercurial/commands.py:0:
310 > ui.write("unpruned common: %s\n" % " ".join([short(n)
310 > ui.write("unpruned common: %s\n" % " ".join([short(n)
311 warning: unwrapped ui message
311 warning: unwrapped ui message
312 mercurial/commands.py:0:
312 mercurial/commands.py:0:
313 > yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
313 > yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
314 warning: line over 80 characters
314 warning: line over 80 characters
315 mercurial/commands.py:0:
315 mercurial/commands.py:0:
316 > yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
316 > yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
317 warning: line over 80 characters
317 warning: line over 80 characters
318 mercurial/commands.py:0:
318 mercurial/commands.py:0:
319 > except:
319 > except:
320 warning: naked except clause
320 warning: naked except clause
321 mercurial/commands.py:0:
321 mercurial/commands.py:0:
322 > ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
322 > ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
323 warning: line over 80 characters
323 warning: line over 80 characters
324 mercurial/commands.py:0:
324 mercurial/commands.py:0:
325 > ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
325 > ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
326 warning: unwrapped ui message
326 warning: unwrapped ui message
327 mercurial/commands.py:0:
327 mercurial/commands.py:0:
328 > ui.write("local is subset\n")
328 > ui.write("local is subset\n")
329 warning: unwrapped ui message
329 warning: unwrapped ui message
330 mercurial/commands.py:0:
330 mercurial/commands.py:0:
331 > ui.write("remote is subset\n")
331 > ui.write("remote is subset\n")
332 warning: unwrapped ui message
332 warning: unwrapped ui message
333 mercurial/commands.py:0:
333 mercurial/commands.py:0:
334 > ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
334 > ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
335 warning: line over 80 characters
335 warning: line over 80 characters
336 mercurial/commands.py:0:
336 mercurial/commands.py:0:
337 > ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
337 > ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
338 warning: line over 80 characters
338 warning: line over 80 characters
339 mercurial/commands.py:0:
339 mercurial/commands.py:0:
340 > ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
340 > ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
341 warning: line over 80 characters
341 warning: line over 80 characters
342 mercurial/commands.py:0:
342 mercurial/commands.py:0:
343 > ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
343 > ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
344 warning: line over 80 characters
344 warning: line over 80 characters
345 warning: unwrapped ui message
345 warning: unwrapped ui message
346 mercurial/commands.py:0:
346 mercurial/commands.py:0:
347 > ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
347 > ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
348 warning: unwrapped ui message
348 warning: unwrapped ui message
349 mercurial/commands.py:0:
349 mercurial/commands.py:0:
350 > ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
350 > ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
351 warning: unwrapped ui message
351 warning: unwrapped ui message
352 mercurial/commands.py:0:
352 mercurial/commands.py:0:
353 > cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
353 > cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
354 warning: line over 80 characters
354 warning: line over 80 characters
355 mercurial/commands.py:0:
355 mercurial/commands.py:0:
356 > except:
356 > except:
357 warning: naked except clause
357 warning: naked except clause
358 mercurial/commands.py:0:
358 mercurial/commands.py:0:
359 > revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
359 > revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
360 warning: line over 80 characters
360 warning: line over 80 characters
361 mercurial/commands.py:0:
361 mercurial/commands.py:0:
362 > ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
362 > ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
363 warning: unwrapped ui message
363 warning: unwrapped ui message
364 mercurial/commands.py:0:
364 mercurial/commands.py:0:
365 > ui.write("match: %s\n" % m(d[0]))
365 > ui.write("match: %s\n" % m(d[0]))
366 warning: unwrapped ui message
366 warning: unwrapped ui message
367 mercurial/commands.py:0:
367 mercurial/commands.py:0:
368 > ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
368 > ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
369 warning: unwrapped ui message
369 warning: unwrapped ui message
370 mercurial/commands.py:0:
370 mercurial/commands.py:0:
371 > ui.write('path %s\n' % k)
371 > ui.write('path %s\n' % k)
372 warning: unwrapped ui message
372 warning: unwrapped ui message
373 mercurial/commands.py:0:
373 mercurial/commands.py:0:
374 > ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
374 > ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
375 warning: unwrapped ui message
375 warning: unwrapped ui message
376 mercurial/commands.py:0:
376 mercurial/commands.py:0:
377 > Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
377 > Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
378 warning: line over 80 characters
378 warning: line over 80 characters
379 mercurial/commands.py:0:
379 mercurial/commands.py:0:
380 > remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
380 > remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
381 warning: line over 80 characters
381 warning: line over 80 characters
382 mercurial/commands.py:0:
382 mercurial/commands.py:0:
383 > ui.write("digraph G {\n")
383 > ui.write("digraph G {\n")
384 warning: unwrapped ui message
384 warning: unwrapped ui message
385 mercurial/commands.py:0:
385 mercurial/commands.py:0:
386 > ui.write("internal: %s %s\n" % d)
386 > ui.write("internal: %s %s\n" % d)
387 warning: unwrapped ui message
387 warning: unwrapped ui message
388 mercurial/commands.py:0:
388 mercurial/commands.py:0:
389 > ui.write("standard: %s\n" % util.datestr(d))
389 > ui.write("standard: %s\n" % util.datestr(d))
390 warning: unwrapped ui message
390 warning: unwrapped ui message
391 mercurial/commands.py:0:
391 mercurial/commands.py:0:
392 > ui.write('avg chain length : ' + fmt % avgchainlen)
392 > ui.write('avg chain length : ' + fmt % avgchainlen)
393 warning: unwrapped ui message
393 warning: unwrapped ui message
394 mercurial/commands.py:0:
394 mercurial/commands.py:0:
395 > ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
395 > ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
396 warning: unwrapped ui message
396 warning: unwrapped ui message
397 mercurial/commands.py:0:
397 mercurial/commands.py:0:
398 > ui.write('compression ratio : ' + fmt % compratio)
398 > ui.write('compression ratio : ' + fmt % compratio)
399 warning: unwrapped ui message
399 warning: unwrapped ui message
400 mercurial/commands.py:0:
400 mercurial/commands.py:0:
401 > ui.write('delta size (min/max/avg) : %d / %d / %d\n'
401 > ui.write('delta size (min/max/avg) : %d / %d / %d\n'
402 warning: unwrapped ui message
402 warning: unwrapped ui message
403 mercurial/commands.py:0:
403 mercurial/commands.py:0:
404 > ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
404 > ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
405 warning: unwrapped ui message
405 warning: unwrapped ui message
406 mercurial/commands.py:0:
406 mercurial/commands.py:0:
407 > ui.write('flags : %s\n' % ', '.join(flags))
407 > ui.write('flags : %s\n' % ', '.join(flags))
408 warning: unwrapped ui message
408 warning: unwrapped ui message
409 mercurial/commands.py:0:
409 mercurial/commands.py:0:
410 > ui.write('format : %d\n' % format)
410 > ui.write('format : %d\n' % format)
411 warning: unwrapped ui message
411 warning: unwrapped ui message
412 mercurial/commands.py:0:
412 mercurial/commands.py:0:
413 > ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
413 > ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
414 warning: unwrapped ui message
414 warning: unwrapped ui message
415 mercurial/commands.py:0:
415 mercurial/commands.py:0:
416 > ui.write('revision size : ' + fmt2 % totalsize)
416 > ui.write('revision size : ' + fmt2 % totalsize)
417 warning: unwrapped ui message
417 warning: unwrapped ui message
418 mercurial/commands.py:0:
418 mercurial/commands.py:0:
419 > ui.write('revisions : ' + fmt2 % numrevs)
419 > ui.write('revisions : ' + fmt2 % numrevs)
420 warning: unwrapped ui message
420 warning: unwrapped ui message
421 warning: unwrapped ui message
421 warning: unwrapped ui message
422 mercurial/commands.py:0:
422 mercurial/commands.py:0:
423 > ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
423 > ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
424 warning: unwrapped ui message
424 warning: unwrapped ui message
425 mercurial/commandserver.py:0:
425 mercurial/commandserver.py:0:
426 > # the ui here is really the repo ui so take its baseui so we don't end up
426 > # the ui here is really the repo ui so take its baseui so we don't end up
427 warning: line over 80 characters
427 warning: line over 80 characters
428 mercurial/context.py:0:
428 mercurial/context.py:0:
429 > return self._manifestdelta[path], self._manifestdelta.flags(path)
429 > return self._manifestdelta[path], self._manifestdelta.flags(path)
430 warning: line over 80 characters
430 warning: line over 80 characters
431 mercurial/dagparser.py:0:
431 mercurial/dagparser.py:0:
432 > raise util.Abort(_("invalid character in dag description: %s...") % s)
432 > raise util.Abort(_("invalid character in dag description: %s...") % s)
433 warning: line over 80 characters
433 warning: line over 80 characters
434 mercurial/dagparser.py:0:
434 mercurial/dagparser.py:0:
435 > >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
435 > >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
436 warning: line over 80 characters
436 warning: line over 80 characters
437 mercurial/dirstate.py:0:
437 mercurial/dirstate.py:0:
438 > if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
438 > if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
439 warning: line over 80 characters
439 warning: line over 80 characters
440 mercurial/discovery.py:0:
440 mercurial/discovery.py:0:
441 > If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
441 > If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
442 warning: line over 80 characters
442 warning: line over 80 characters
443 mercurial/discovery.py:0:
443 mercurial/discovery.py:0:
444 > def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
444 > def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
445 warning: line over 80 characters
445 warning: line over 80 characters
446 mercurial/dispatch.py:0:
446 mercurial/dispatch.py:0:
447 > " (.hg not found)") % os.getcwd())
447 > " (.hg not found)") % os.getcwd())
448 warning: line over 80 characters
448 warning: line over 80 characters
449 mercurial/dispatch.py:0:
449 mercurial/dispatch.py:0:
450 > aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict"))
450 > aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict"))
451 warning: line over 80 characters
451 warning: line over 80 characters
452 mercurial/dispatch.py:0:
452 mercurial/dispatch.py:0:
453 > except:
453 > except:
454 warning: naked except clause
454 warning: naked except clause
455 mercurial/dispatch.py:0:
455 mercurial/dispatch.py:0:
456 > return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
456 > return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
457 warning: line over 80 characters
457 warning: line over 80 characters
458 mercurial/dispatch.py:0:
458 mercurial/dispatch.py:0:
459 > def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
459 > def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
460 warning: line over 80 characters
460 warning: line over 80 characters
461 mercurial/dispatch.py:0:
461 mercurial/dispatch.py:0:
462 > except:
462 > except:
463 warning: naked except clause
463 warning: naked except clause
464 mercurial/hg.py:0:
464 mercurial/hg.py:0:
465 > except:
465 > except:
466 warning: naked except clause
466 warning: naked except clause
467 mercurial/hgweb/hgweb_mod.py:0:
467 mercurial/hgweb/hgweb_mod.py:0:
468 > self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
468 > self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
469 warning: line over 80 characters
469 warning: line over 80 characters
470 mercurial/keepalive.py:0:
470 mercurial/keepalive.py:0:
471 > except:
471 > except:
472 warning: naked except clause
472 warning: naked except clause
473 mercurial/keepalive.py:0:
473 mercurial/keepalive.py:0:
474 > except:
474 > except:
475 warning: naked except clause
475 warning: naked except clause
476 mercurial/localrepo.py:0:
476 mercurial/localrepo.py:0:
477 > # we return an integer indicating remote head count change
477 > # we return an integer indicating remote head count change
478 warning: line over 80 characters
478 warning: line over 80 characters
479 mercurial/localrepo.py:0:
479 mercurial/localrepo.py:0:
480 > raise util.Abort(_("empty or missing revlog for %s") % fname)
480 > raise util.Abort(_("empty or missing revlog for %s") % fname)
481 warning: line over 80 characters
481 warning: line over 80 characters
482 warning: line over 80 characters
482 warning: line over 80 characters
483 mercurial/localrepo.py:0:
483 mercurial/localrepo.py:0:
484 > if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
484 > if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
485 warning: line over 80 characters
485 warning: line over 80 characters
486 mercurial/localrepo.py:0:
486 mercurial/localrepo.py:0:
487 > self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
487 > self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
488 warning: line over 80 characters
488 warning: line over 80 characters
489 mercurial/localrepo.py:0:
489 mercurial/localrepo.py:0:
490 > # new requirements = old non-format requirements + new format-related
490 > # new requirements = old non-format requirements + new format-related
491 warning: line over 80 characters
491 warning: line over 80 characters
492 mercurial/localrepo.py:0:
492 mercurial/localrepo.py:0:
493 > except:
493 > except:
494 warning: naked except clause
494 warning: naked except clause
495 mercurial/localrepo.py:0:
495 mercurial/localrepo.py:0:
496 > """return status of files between two nodes or node and working directory
496 > """return status of files between two nodes or node and working directory
497 warning: line over 80 characters
497 warning: line over 80 characters
498 mercurial/localrepo.py:0:
498 mercurial/localrepo.py:0:
499 > '''Returns a tagscache object that contains various tags related caches.'''
499 > '''Returns a tagscache object that contains various tags related caches.'''
500 warning: line over 80 characters
500 warning: line over 80 characters
501 mercurial/manifest.py:0:
501 mercurial/manifest.py:0:
502 > return "".join(struct.pack(">lll", start, end, len(content)) + content
502 > return "".join(struct.pack(">lll", start, end, len(content)) + content
503 warning: line over 80 characters
503 warning: line over 80 characters
504 mercurial/merge.py:0:
504 mercurial/merge.py:0:
505 > subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
505 > subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
506 warning: line over 80 characters
506 warning: line over 80 characters
507 mercurial/patch.py:0:
507 mercurial/patch.py:0:
508 > modified, added, removed, copy, getfilectx, opts, losedata, prefix)
508 > modified, added, removed, copy, getfilectx, opts, losedata, prefix)
509 warning: line over 80 characters
509 warning: line over 80 characters
510 mercurial/patch.py:0:
510 mercurial/patch.py:0:
511 > diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
511 > diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
512 warning: line over 80 characters
512 warning: line over 80 characters
513 mercurial/patch.py:0:
513 mercurial/patch.py:0:
514 > output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
514 > output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
515 warning: line over 80 characters
515 warning: line over 80 characters
516 mercurial/patch.py:0:
516 mercurial/patch.py:0:
517 > except:
517 > except:
518 warning: naked except clause
518 warning: naked except clause
519 mercurial/pure/base85.py:0:
519 mercurial/pure/base85.py:0:
520 > raise OverflowError('Base85 overflow in hunk starting at byte %d' % i)
520 > raise OverflowError('Base85 overflow in hunk starting at byte %d' % i)
521 warning: line over 80 characters
521 warning: line over 80 characters
522 mercurial/pure/mpatch.py:0:
522 mercurial/pure/mpatch.py:0:
523 > frags.extend(reversed(new)) # what was left at the end
523 > frags.extend(reversed(new)) # what was left at the end
524 warning: line over 80 characters
524 warning: line over 80 characters
525 mercurial/repair.py:0:
525 mercurial/repair.py:0:
526 > except:
526 > except:
527 warning: naked except clause
527 warning: naked except clause
528 mercurial/repair.py:0:
528 mercurial/repair.py:0:
529 > except:
529 > except:
530 warning: naked except clause
530 warning: naked except clause
531 mercurial/revset.py:0:
531 mercurial/revset.py:0:
532 > elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
532 > elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
533 warning: line over 80 characters
533 warning: line over 80 characters
534 mercurial/revset.py:0:
534 mercurial/revset.py:0:
535 > Changesets that are the Nth ancestor (first parents only) of a changeset in set.
535 > Changesets that are the Nth ancestor (first parents only) of a changeset in set.
536 warning: line over 80 characters
536 warning: line over 80 characters
537 mercurial/scmutil.py:0:
537 mercurial/scmutil.py:0:
538 > raise util.Abort(_("path '%s' is inside nested repo %r") %
538 > raise util.Abort(_("path '%s' is inside nested repo %r") %
539 warning: line over 80 characters
539 warning: line over 80 characters
540 mercurial/scmutil.py:0:
540 mercurial/scmutil.py:0:
541 > "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
541 > "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
542 warning: line over 80 characters
542 warning: line over 80 characters
543 mercurial/scmutil.py:0:
543 mercurial/scmutil.py:0:
544 > elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
544 > elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
545 warning: line over 80 characters
545 warning: line over 80 characters
546 mercurial/setdiscovery.py:0:
546 mercurial/setdiscovery.py:0:
547 > # treat remote heads (and maybe own heads) as a first implicit sample response
547 > # treat remote heads (and maybe own heads) as a first implicit sample response
548 warning: line over 80 characters
548 warning: line over 80 characters
549 mercurial/setdiscovery.py:0:
549 mercurial/setdiscovery.py:0:
550 > undecided = dag.nodeset() # own nodes where I don't know if remote knows them
550 > undecided = dag.nodeset() # own nodes where I don't know if remote knows them
551 warning: line over 80 characters
551 warning: line over 80 characters
552 mercurial/similar.py:0:
552 mercurial/similar.py:0:
553 > repo.ui.progress(_('searching for similar files'), i, total=len(removed))
553 > repo.ui.progress(_('searching for similar files'), i, total=len(removed))
554 warning: line over 80 characters
554 warning: line over 80 characters
555 mercurial/simplemerge.py:0:
555 mercurial/simplemerge.py:0:
556 > for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
556 > for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
557 warning: line over 80 characters
557 warning: line over 80 characters
558 mercurial/sshrepo.py:0:
558 mercurial/sshrepo.py:0:
559 > self._abort(error.RepoError(_("no suitable response from remote hg")))
559 > self._abort(error.RepoError(_("no suitable response from remote hg")))
560 warning: line over 80 characters
560 warning: line over 80 characters
561 mercurial/sshrepo.py:0:
561 mercurial/sshrepo.py:0:
562 > except:
562 > except:
563 warning: naked except clause
563 warning: naked except clause
564 mercurial/subrepo.py:0:
564 mercurial/subrepo.py:0:
565 > other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
565 > other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
566 warning: line over 80 characters
566 warning: line over 80 characters
567 mercurial/subrepo.py:0:
567 mercurial/subrepo.py:0:
568 > msg = (_(' subrepository sources for %s differ (in checked out version)\n'
568 > msg = (_(' subrepository sources for %s differ (in checked out version)\n'
569 warning: line over 80 characters
569 warning: line over 80 characters
570 mercurial/transaction.py:0:
570 mercurial/transaction.py:0:
571 > except:
571 > except:
572 warning: naked except clause
572 warning: naked except clause
573 mercurial/ui.py:0:
573 mercurial/ui.py:0:
574 > traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
574 > traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
575 warning: line over 80 characters
575 warning: line over 80 characters
576 mercurial/url.py:0:
576 mercurial/url.py:0:
577 > conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
577 > conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
578 warning: line over 80 characters
578 warning: line over 80 characters
579 mercurial/util.py:0:
579 mercurial/util.py:0:
580 > except:
580 > except:
581 warning: naked except clause
581 warning: naked except clause
582 mercurial/util.py:0:
582 mercurial/util.py:0:
583 > except:
583 > except:
584 warning: naked except clause
584 warning: naked except clause
585 mercurial/verify.py:0:
585 mercurial/verify.py:0:
586 > except:
586 > except:
587 warning: naked except clause
587 warning: naked except clause
588 mercurial/verify.py:0:
588 mercurial/verify.py:0:
589 > except:
589 > except:
590 warning: naked except clause
590 warning: naked except clause
591 mercurial/wireproto.py:0:
591 mercurial/wireproto.py:0:
592 > # Assuming the future to be filled with the result from the batched request
592 > # Assuming the future to be filled with the result from the batched request
593 warning: line over 80 characters
593 warning: line over 80 characters
594 mercurial/wireproto.py:0:
594 mercurial/wireproto.py:0:
595 > '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
595 > '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
596 warning: line over 80 characters
596 warning: line over 80 characters
597 mercurial/wireproto.py:0:
597 mercurial/wireproto.py:0:
598 > All methods invoked on instances of this class are simply queued and return a
598 > All methods invoked on instances of this class are simply queued and return a
599 warning: line over 80 characters
599 warning: line over 80 characters
600 mercurial/wireproto.py:0:
600 mercurial/wireproto.py:0:
601 > The decorator returns a function which wraps this coroutine as a plain method,
601 > The decorator returns a function which wraps this coroutine as a plain method,
602 warning: line over 80 characters
602 warning: line over 80 characters
603 setup.py:0:
603 setup.py:0:
604 > raise SystemExit("Python headers are required to build Mercurial")
604 > raise SystemExit("Python headers are required to build Mercurial")
605 warning: line over 80 characters
605 warning: line over 80 characters
606 setup.py:0:
606 setup.py:0:
607 > except:
607 > except:
608 warning: naked except clause
608 warning: naked except clause
609 setup.py:0:
609 setup.py:0:
610 > # build_py), it will not find osutil & friends, thinking that those modules are
610 > # build_py), it will not find osutil & friends, thinking that those modules are
611 warning: line over 80 characters
611 warning: line over 80 characters
612 setup.py:0:
612 setup.py:0:
613 > except:
613 > except:
614 warning: naked except clause
614 warning: naked except clause
615 warning: naked except clause
615 warning: naked except clause
616 setup.py:0:
616 setup.py:0:
617 > isironpython = platform.python_implementation().lower().find("ironpython") != -1
617 > isironpython = platform.python_implementation().lower().find("ironpython") != -1
618 warning: line over 80 characters
618 warning: line over 80 characters
619 setup.py:0:
619 setup.py:0:
620 > except:
620 > except:
621 warning: naked except clause
621 warning: naked except clause
622 warning: naked except clause
622 warning: naked except clause
623 warning: naked except clause
623 warning: naked except clause
624 tests/autodiff.py:0:
624 tests/autodiff.py:0:
625 > ui.write('data lost for: %s\n' % fn)
625 > ui.write('data lost for: %s\n' % fn)
626 warning: unwrapped ui message
626 warning: unwrapped ui message
627 tests/run-tests.py:0:
627 tests/run-tests.py:0:
628 > except:
628 > except:
629 warning: naked except clause
629 warning: naked except clause
630 tests/test-commandserver.py:0:
630 tests/test-commandserver.py:0:
631 > 'hooks.pre-identify=python:test-commandserver.hook', 'id'],
631 > 'hooks.pre-identify=python:test-commandserver.hook', 'id'],
632 warning: line over 80 characters
632 warning: line over 80 characters
633 tests/test-commandserver.py:0:
633 tests/test-commandserver.py:0:
634 > # the cached repo local hgrc contains ui.foo=bar, so showconfig should show it
634 > # the cached repo local hgrc contains ui.foo=bar, so showconfig should show it
635 warning: line over 80 characters
635 warning: line over 80 characters
636 tests/test-commandserver.py:0:
636 tests/test-commandserver.py:0:
637 > print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***', data))
637 > print '%c, %r' % (ch, re.sub('encoding: [a-zA-Z0-9-]+', 'encoding: ***', data))
638 warning: line over 80 characters
638 warning: line over 80 characters
639 tests/test-filecache.py:0:
639 tests/test-filecache.py:0:
640 > except:
640 > except:
641 warning: naked except clause
641 warning: naked except clause
642 tests/test-filecache.py:0:
642 tests/test-filecache.py:0:
643 > if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
643 > if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
644 warning: line over 80 characters
644 warning: line over 80 characters
645 tests/test-ui-color.py:0:
645 tests/test-ui-color.py:0:
646 > testui.warn('warning\n')
646 > testui.warn('warning\n')
647 warning: unwrapped ui message
647 warning: unwrapped ui message
648 tests/test-ui-color.py:0:
648 tests/test-ui-color.py:0:
649 > testui.write('buffered\n')
649 > testui.write('buffered\n')
650 warning: unwrapped ui message
650 warning: unwrapped ui message
651 tests/test-walkrepo.py:0:
651 tests/test-walkrepo.py:0:
652 > print "Found %d repositories when I should have found 2" % (len(reposet),)
652 > print "Found %d repositories when I should have found 2" % (len(reposet),)
653 warning: line over 80 characters
653 warning: line over 80 characters
654 tests/test-walkrepo.py:0:
654 tests/test-walkrepo.py:0:
655 > print "Found %d repositories when I should have found 3" % (len(reposet),)
655 > print "Found %d repositories when I should have found 3" % (len(reposet),)
656 warning: line over 80 characters
656 warning: line over 80 characters
657 [1]
657 [123]
@@ -1,488 +1,492 b''
1 Create test repository:
1 Create test repository:
2
2
3 $ hg init repo
3 $ hg init repo
4 $ cd repo
4 $ cd repo
5 $ echo x1 > x.txt
5 $ echo x1 > x.txt
6
6
7 $ hg init foo
7 $ hg init foo
8 $ cd foo
8 $ cd foo
9 $ echo y1 > y.txt
9 $ echo y1 > y.txt
10
10
11 $ hg init bar
11 $ hg init bar
12 $ cd bar
12 $ cd bar
13 $ echo z1 > z.txt
13 $ echo z1 > z.txt
14
14
15 $ cd ..
15 $ cd ..
16 $ echo 'bar = bar' > .hgsub
16 $ echo 'bar = bar' > .hgsub
17
17
18 $ cd ..
18 $ cd ..
19 $ echo 'foo = foo' > .hgsub
19 $ echo 'foo = foo' > .hgsub
20
20
21 Add files --- .hgsub files must go first to trigger subrepos:
21 Add files --- .hgsub files must go first to trigger subrepos:
22
22
23 $ hg add -S .hgsub
23 $ hg add -S .hgsub
24 $ hg add -S foo/.hgsub
24 $ hg add -S foo/.hgsub
25 $ hg add -S foo/bar
25 $ hg add -S foo/bar
26 adding foo/bar/z.txt (glob)
26 adding foo/bar/z.txt (glob)
27 $ hg add -S
27 $ hg add -S
28 adding x.txt
28 adding x.txt
29 adding foo/y.txt (glob)
29 adding foo/y.txt (glob)
30
30
31 Test recursive status without committing anything:
31 Test recursive status without committing anything:
32
32
33 $ hg status -S
33 $ hg status -S
34 A .hgsub
34 A .hgsub
35 A foo/.hgsub
35 A foo/.hgsub
36 A foo/bar/z.txt
36 A foo/bar/z.txt
37 A foo/y.txt
37 A foo/y.txt
38 A x.txt
38 A x.txt
39
39
40 Test recursive diff without committing anything:
40 Test recursive diff without committing anything:
41
41
42 $ hg diff --nodates -S foo
42 $ hg diff --nodates -S foo
43 diff -r 000000000000 foo/.hgsub
43 diff -r 000000000000 foo/.hgsub
44 --- /dev/null
44 --- /dev/null
45 +++ b/foo/.hgsub
45 +++ b/foo/.hgsub
46 @@ -0,0 +1,1 @@
46 @@ -0,0 +1,1 @@
47 +bar = bar
47 +bar = bar
48 diff -r 000000000000 foo/y.txt
48 diff -r 000000000000 foo/y.txt
49 --- /dev/null
49 --- /dev/null
50 +++ b/foo/y.txt
50 +++ b/foo/y.txt
51 @@ -0,0 +1,1 @@
51 @@ -0,0 +1,1 @@
52 +y1
52 +y1
53 diff -r 000000000000 foo/bar/z.txt
53 diff -r 000000000000 foo/bar/z.txt
54 --- /dev/null
54 --- /dev/null
55 +++ b/foo/bar/z.txt
55 +++ b/foo/bar/z.txt
56 @@ -0,0 +1,1 @@
56 @@ -0,0 +1,1 @@
57 +z1
57 +z1
58
58
59 Commits:
59 Commits:
60
60
61 $ hg commit -m fails
61 $ hg commit -m fails
62 abort: uncommitted changes in subrepo foo
62 abort: uncommitted changes in subrepo foo
63 (use --subrepos for recursive commit)
63 (use --subrepos for recursive commit)
64 [255]
64 [255]
65
65
66 The --subrepos flag overwrite the config setting:
66 The --subrepos flag overwrite the config setting:
67
67
68 $ hg commit -m 0-0-0 --config ui.commitsubrepos=No --subrepos
68 $ hg commit -m 0-0-0 --config ui.commitsubrepos=No --subrepos
69 committing subrepository foo
69 committing subrepository foo
70 committing subrepository foo/bar (glob)
70 committing subrepository foo/bar (glob)
71
71
72 $ cd foo
72 $ cd foo
73 $ echo y2 >> y.txt
73 $ echo y2 >> y.txt
74 $ hg commit -m 0-1-0
74 $ hg commit -m 0-1-0
75
75
76 $ cd bar
76 $ cd bar
77 $ echo z2 >> z.txt
77 $ echo z2 >> z.txt
78 $ hg commit -m 0-1-1
78 $ hg commit -m 0-1-1
79
79
80 $ cd ..
80 $ cd ..
81 $ hg commit -m 0-2-1
81 $ hg commit -m 0-2-1
82
82
83 $ cd ..
83 $ cd ..
84 $ hg commit -m 1-2-1
84 $ hg commit -m 1-2-1
85
85
86 Change working directory:
86 Change working directory:
87
87
88 $ echo y3 >> foo/y.txt
88 $ echo y3 >> foo/y.txt
89 $ echo z3 >> foo/bar/z.txt
89 $ echo z3 >> foo/bar/z.txt
90 $ hg status -S
90 $ hg status -S
91 M foo/bar/z.txt
91 M foo/bar/z.txt
92 M foo/y.txt
92 M foo/y.txt
93 $ hg diff --nodates -S
93 $ hg diff --nodates -S
94 diff -r d254738c5f5e foo/y.txt
94 diff -r d254738c5f5e foo/y.txt
95 --- a/foo/y.txt
95 --- a/foo/y.txt
96 +++ b/foo/y.txt
96 +++ b/foo/y.txt
97 @@ -1,2 +1,3 @@
97 @@ -1,2 +1,3 @@
98 y1
98 y1
99 y2
99 y2
100 +y3
100 +y3
101 diff -r 9647f22de499 foo/bar/z.txt
101 diff -r 9647f22de499 foo/bar/z.txt
102 --- a/foo/bar/z.txt
102 --- a/foo/bar/z.txt
103 +++ b/foo/bar/z.txt
103 +++ b/foo/bar/z.txt
104 @@ -1,2 +1,3 @@
104 @@ -1,2 +1,3 @@
105 z1
105 z1
106 z2
106 z2
107 +z3
107 +z3
108
108
109 Status call crossing repository boundaries:
109 Status call crossing repository boundaries:
110
110
111 $ hg status -S foo/bar/z.txt
111 $ hg status -S foo/bar/z.txt
112 M foo/bar/z.txt
112 M foo/bar/z.txt
113 $ hg status -S -I 'foo/?.txt'
113 $ hg status -S -I 'foo/?.txt'
114 M foo/y.txt
114 M foo/y.txt
115 $ hg status -S -I '**/?.txt'
115 $ hg status -S -I '**/?.txt'
116 M foo/bar/z.txt
116 M foo/bar/z.txt
117 M foo/y.txt
117 M foo/y.txt
118 $ hg diff --nodates -S -I '**/?.txt'
118 $ hg diff --nodates -S -I '**/?.txt'
119 diff -r d254738c5f5e foo/y.txt
119 diff -r d254738c5f5e foo/y.txt
120 --- a/foo/y.txt
120 --- a/foo/y.txt
121 +++ b/foo/y.txt
121 +++ b/foo/y.txt
122 @@ -1,2 +1,3 @@
122 @@ -1,2 +1,3 @@
123 y1
123 y1
124 y2
124 y2
125 +y3
125 +y3
126 diff -r 9647f22de499 foo/bar/z.txt
126 diff -r 9647f22de499 foo/bar/z.txt
127 --- a/foo/bar/z.txt
127 --- a/foo/bar/z.txt
128 +++ b/foo/bar/z.txt
128 +++ b/foo/bar/z.txt
129 @@ -1,2 +1,3 @@
129 @@ -1,2 +1,3 @@
130 z1
130 z1
131 z2
131 z2
132 +z3
132 +z3
133
133
134 Status from within a subdirectory:
134 Status from within a subdirectory:
135
135
136 $ mkdir dir
136 $ mkdir dir
137 $ cd dir
137 $ cd dir
138 $ echo a1 > a.txt
138 $ echo a1 > a.txt
139 $ hg status -S
139 $ hg status -S
140 M foo/bar/z.txt
140 M foo/bar/z.txt
141 M foo/y.txt
141 M foo/y.txt
142 ? dir/a.txt
142 ? dir/a.txt
143 $ hg diff --nodates -S
143 $ hg diff --nodates -S
144 diff -r d254738c5f5e foo/y.txt
144 diff -r d254738c5f5e foo/y.txt
145 --- a/foo/y.txt
145 --- a/foo/y.txt
146 +++ b/foo/y.txt
146 +++ b/foo/y.txt
147 @@ -1,2 +1,3 @@
147 @@ -1,2 +1,3 @@
148 y1
148 y1
149 y2
149 y2
150 +y3
150 +y3
151 diff -r 9647f22de499 foo/bar/z.txt
151 diff -r 9647f22de499 foo/bar/z.txt
152 --- a/foo/bar/z.txt
152 --- a/foo/bar/z.txt
153 +++ b/foo/bar/z.txt
153 +++ b/foo/bar/z.txt
154 @@ -1,2 +1,3 @@
154 @@ -1,2 +1,3 @@
155 z1
155 z1
156 z2
156 z2
157 +z3
157 +z3
158
158
159 Status with relative path:
159 Status with relative path:
160
160
161 $ hg status -S ..
161 $ hg status -S ..
162 M ../foo/bar/z.txt
162 M ../foo/bar/z.txt
163 M ../foo/y.txt
163 M ../foo/y.txt
164 ? a.txt
164 ? a.txt
165 $ hg diff --nodates -S ..
165 $ hg diff --nodates -S ..
166 diff -r d254738c5f5e foo/y.txt
166 diff -r d254738c5f5e foo/y.txt
167 --- a/foo/y.txt
167 --- a/foo/y.txt
168 +++ b/foo/y.txt
168 +++ b/foo/y.txt
169 @@ -1,2 +1,3 @@
169 @@ -1,2 +1,3 @@
170 y1
170 y1
171 y2
171 y2
172 +y3
172 +y3
173 diff -r 9647f22de499 foo/bar/z.txt
173 diff -r 9647f22de499 foo/bar/z.txt
174 --- a/foo/bar/z.txt
174 --- a/foo/bar/z.txt
175 +++ b/foo/bar/z.txt
175 +++ b/foo/bar/z.txt
176 @@ -1,2 +1,3 @@
176 @@ -1,2 +1,3 @@
177 z1
177 z1
178 z2
178 z2
179 +z3
179 +z3
180 $ cd ..
180 $ cd ..
181
181
182 Cleanup and final commit:
182 Cleanup and final commit:
183
183
184 $ rm -r dir
184 $ rm -r dir
185 $ hg commit --subrepos -m 2-3-2
185 $ hg commit --subrepos -m 2-3-2
186 committing subrepository foo
186 committing subrepository foo
187 committing subrepository foo/bar (glob)
187 committing subrepository foo/bar (glob)
188
188
189 Test explicit path commands within subrepos: add/forget
189 Test explicit path commands within subrepos: add/forget
190 $ echo z1 > foo/bar/z2.txt
190 $ echo z1 > foo/bar/z2.txt
191 $ hg status -S
191 $ hg status -S
192 ? foo/bar/z2.txt
192 ? foo/bar/z2.txt
193 $ hg add foo/bar/z2.txt
193 $ hg add foo/bar/z2.txt
194 $ hg status -S
194 $ hg status -S
195 A foo/bar/z2.txt
195 A foo/bar/z2.txt
196 This is expected to forget the file, but is currently broken
197 $ hg forget foo/bar/z2.txt
196 $ hg forget foo/bar/z2.txt
198 $ hg status -S
197 $ hg status -S
199 ? foo/bar/z2.txt
198 ? foo/bar/z2.txt
199 $ hg forget foo/bar/z2.txt
200 not removing foo/bar/z2.txt: file is already untracked
201 [1]
202 $ hg status -S
203 ? foo/bar/z2.txt
200 $ rm foo/bar/z2.txt
204 $ rm foo/bar/z2.txt
201
205
202 Log with the relationships between repo and its subrepo:
206 Log with the relationships between repo and its subrepo:
203
207
204 $ hg log --template '{rev}:{node|short} {desc}\n'
208 $ hg log --template '{rev}:{node|short} {desc}\n'
205 2:1326fa26d0c0 2-3-2
209 2:1326fa26d0c0 2-3-2
206 1:4b3c9ff4f66b 1-2-1
210 1:4b3c9ff4f66b 1-2-1
207 0:23376cbba0d8 0-0-0
211 0:23376cbba0d8 0-0-0
208
212
209 $ hg -R foo log --template '{rev}:{node|short} {desc}\n'
213 $ hg -R foo log --template '{rev}:{node|short} {desc}\n'
210 3:65903cebad86 2-3-2
214 3:65903cebad86 2-3-2
211 2:d254738c5f5e 0-2-1
215 2:d254738c5f5e 0-2-1
212 1:8629ce7dcc39 0-1-0
216 1:8629ce7dcc39 0-1-0
213 0:af048e97ade2 0-0-0
217 0:af048e97ade2 0-0-0
214
218
215 $ hg -R foo/bar log --template '{rev}:{node|short} {desc}\n'
219 $ hg -R foo/bar log --template '{rev}:{node|short} {desc}\n'
216 2:31ecbdafd357 2-3-2
220 2:31ecbdafd357 2-3-2
217 1:9647f22de499 0-1-1
221 1:9647f22de499 0-1-1
218 0:4904098473f9 0-0-0
222 0:4904098473f9 0-0-0
219
223
220 Status between revisions:
224 Status between revisions:
221
225
222 $ hg status -S
226 $ hg status -S
223 $ hg status -S --rev 0:1
227 $ hg status -S --rev 0:1
224 M .hgsubstate
228 M .hgsubstate
225 M foo/.hgsubstate
229 M foo/.hgsubstate
226 M foo/bar/z.txt
230 M foo/bar/z.txt
227 M foo/y.txt
231 M foo/y.txt
228 $ hg diff --nodates -S -I '**/?.txt' --rev 0:1
232 $ hg diff --nodates -S -I '**/?.txt' --rev 0:1
229 diff -r af048e97ade2 -r d254738c5f5e foo/y.txt
233 diff -r af048e97ade2 -r d254738c5f5e foo/y.txt
230 --- a/foo/y.txt
234 --- a/foo/y.txt
231 +++ b/foo/y.txt
235 +++ b/foo/y.txt
232 @@ -1,1 +1,2 @@
236 @@ -1,1 +1,2 @@
233 y1
237 y1
234 +y2
238 +y2
235 diff -r 4904098473f9 -r 9647f22de499 foo/bar/z.txt
239 diff -r 4904098473f9 -r 9647f22de499 foo/bar/z.txt
236 --- a/foo/bar/z.txt
240 --- a/foo/bar/z.txt
237 +++ b/foo/bar/z.txt
241 +++ b/foo/bar/z.txt
238 @@ -1,1 +1,2 @@
242 @@ -1,1 +1,2 @@
239 z1
243 z1
240 +z2
244 +z2
241
245
242 Enable progress extension for archive tests:
246 Enable progress extension for archive tests:
243
247
244 $ cp $HGRCPATH $HGRCPATH.no-progress
248 $ cp $HGRCPATH $HGRCPATH.no-progress
245 $ cat >> $HGRCPATH <<EOF
249 $ cat >> $HGRCPATH <<EOF
246 > [extensions]
250 > [extensions]
247 > progress =
251 > progress =
248 > [progress]
252 > [progress]
249 > assume-tty = 1
253 > assume-tty = 1
250 > delay = 0
254 > delay = 0
251 > format = topic bar number
255 > format = topic bar number
252 > refresh = 0
256 > refresh = 0
253 > width = 60
257 > width = 60
254 > EOF
258 > EOF
255
259
256 Test archiving to a directory tree (the doubled lines in the output
260 Test archiving to a directory tree (the doubled lines in the output
257 only show up in the test output, not in real usage):
261 only show up in the test output, not in real usage):
258
262
259 $ hg archive --subrepos ../archive 2>&1 | $TESTDIR/filtercr.py
263 $ hg archive --subrepos ../archive 2>&1 | $TESTDIR/filtercr.py
260
264
261 archiving [ ] 0/3
265 archiving [ ] 0/3
262 archiving [ ] 0/3
266 archiving [ ] 0/3
263 archiving [=============> ] 1/3
267 archiving [=============> ] 1/3
264 archiving [=============> ] 1/3
268 archiving [=============> ] 1/3
265 archiving [===========================> ] 2/3
269 archiving [===========================> ] 2/3
266 archiving [===========================> ] 2/3
270 archiving [===========================> ] 2/3
267 archiving [==========================================>] 3/3
271 archiving [==========================================>] 3/3
268 archiving [==========================================>] 3/3
272 archiving [==========================================>] 3/3
269
273
270 archiving (foo) [ ] 0/3
274 archiving (foo) [ ] 0/3
271 archiving (foo) [ ] 0/3
275 archiving (foo) [ ] 0/3
272 archiving (foo) [===========> ] 1/3
276 archiving (foo) [===========> ] 1/3
273 archiving (foo) [===========> ] 1/3
277 archiving (foo) [===========> ] 1/3
274 archiving (foo) [=======================> ] 2/3
278 archiving (foo) [=======================> ] 2/3
275 archiving (foo) [=======================> ] 2/3
279 archiving (foo) [=======================> ] 2/3
276 archiving (foo) [====================================>] 3/3
280 archiving (foo) [====================================>] 3/3
277 archiving (foo) [====================================>] 3/3
281 archiving (foo) [====================================>] 3/3
278
282
279 archiving (foo/bar) [ ] 0/1 (glob)
283 archiving (foo/bar) [ ] 0/1 (glob)
280 archiving (foo/bar) [ ] 0/1 (glob)
284 archiving (foo/bar) [ ] 0/1 (glob)
281 archiving (foo/bar) [================================>] 1/1 (glob)
285 archiving (foo/bar) [================================>] 1/1 (glob)
282 archiving (foo/bar) [================================>] 1/1 (glob)
286 archiving (foo/bar) [================================>] 1/1 (glob)
283 \r (esc)
287 \r (esc)
284 $ find ../archive | sort
288 $ find ../archive | sort
285 ../archive
289 ../archive
286 ../archive/.hg_archival.txt
290 ../archive/.hg_archival.txt
287 ../archive/.hgsub
291 ../archive/.hgsub
288 ../archive/.hgsubstate
292 ../archive/.hgsubstate
289 ../archive/foo
293 ../archive/foo
290 ../archive/foo/.hgsub
294 ../archive/foo/.hgsub
291 ../archive/foo/.hgsubstate
295 ../archive/foo/.hgsubstate
292 ../archive/foo/bar
296 ../archive/foo/bar
293 ../archive/foo/bar/z.txt
297 ../archive/foo/bar/z.txt
294 ../archive/foo/y.txt
298 ../archive/foo/y.txt
295 ../archive/x.txt
299 ../archive/x.txt
296
300
297 Test archiving to zip file (unzip output is unstable):
301 Test archiving to zip file (unzip output is unstable):
298
302
299 $ hg archive --subrepos ../archive.zip 2>&1 | $TESTDIR/filtercr.py
303 $ hg archive --subrepos ../archive.zip 2>&1 | $TESTDIR/filtercr.py
300
304
301 archiving [ ] 0/3
305 archiving [ ] 0/3
302 archiving [ ] 0/3
306 archiving [ ] 0/3
303 archiving [=============> ] 1/3
307 archiving [=============> ] 1/3
304 archiving [=============> ] 1/3
308 archiving [=============> ] 1/3
305 archiving [===========================> ] 2/3
309 archiving [===========================> ] 2/3
306 archiving [===========================> ] 2/3
310 archiving [===========================> ] 2/3
307 archiving [==========================================>] 3/3
311 archiving [==========================================>] 3/3
308 archiving [==========================================>] 3/3
312 archiving [==========================================>] 3/3
309
313
310 archiving (foo) [ ] 0/3
314 archiving (foo) [ ] 0/3
311 archiving (foo) [ ] 0/3
315 archiving (foo) [ ] 0/3
312 archiving (foo) [===========> ] 1/3
316 archiving (foo) [===========> ] 1/3
313 archiving (foo) [===========> ] 1/3
317 archiving (foo) [===========> ] 1/3
314 archiving (foo) [=======================> ] 2/3
318 archiving (foo) [=======================> ] 2/3
315 archiving (foo) [=======================> ] 2/3
319 archiving (foo) [=======================> ] 2/3
316 archiving (foo) [====================================>] 3/3
320 archiving (foo) [====================================>] 3/3
317 archiving (foo) [====================================>] 3/3
321 archiving (foo) [====================================>] 3/3
318
322
319 archiving (foo/bar) [ ] 0/1 (glob)
323 archiving (foo/bar) [ ] 0/1 (glob)
320 archiving (foo/bar) [ ] 0/1 (glob)
324 archiving (foo/bar) [ ] 0/1 (glob)
321 archiving (foo/bar) [================================>] 1/1 (glob)
325 archiving (foo/bar) [================================>] 1/1 (glob)
322 archiving (foo/bar) [================================>] 1/1 (glob)
326 archiving (foo/bar) [================================>] 1/1 (glob)
323 \r (esc)
327 \r (esc)
324
328
325 Test archiving a revision that references a subrepo that is not yet
329 Test archiving a revision that references a subrepo that is not yet
326 cloned:
330 cloned:
327
331
328 $ hg clone -U . ../empty
332 $ hg clone -U . ../empty
329 $ cd ../empty
333 $ cd ../empty
330 $ hg archive --subrepos -r tip ../archive.tar.gz 2>&1 | $TESTDIR/filtercr.py
334 $ hg archive --subrepos -r tip ../archive.tar.gz 2>&1 | $TESTDIR/filtercr.py
331
335
332 archiving [ ] 0/3
336 archiving [ ] 0/3
333 archiving [ ] 0/3
337 archiving [ ] 0/3
334 archiving [=============> ] 1/3
338 archiving [=============> ] 1/3
335 archiving [=============> ] 1/3
339 archiving [=============> ] 1/3
336 archiving [===========================> ] 2/3
340 archiving [===========================> ] 2/3
337 archiving [===========================> ] 2/3
341 archiving [===========================> ] 2/3
338 archiving [==========================================>] 3/3
342 archiving [==========================================>] 3/3
339 archiving [==========================================>] 3/3
343 archiving [==========================================>] 3/3
340
344
341 archiving (foo) [ ] 0/3
345 archiving (foo) [ ] 0/3
342 archiving (foo) [ ] 0/3
346 archiving (foo) [ ] 0/3
343 archiving (foo) [===========> ] 1/3
347 archiving (foo) [===========> ] 1/3
344 archiving (foo) [===========> ] 1/3
348 archiving (foo) [===========> ] 1/3
345 archiving (foo) [=======================> ] 2/3
349 archiving (foo) [=======================> ] 2/3
346 archiving (foo) [=======================> ] 2/3
350 archiving (foo) [=======================> ] 2/3
347 archiving (foo) [====================================>] 3/3
351 archiving (foo) [====================================>] 3/3
348 archiving (foo) [====================================>] 3/3
352 archiving (foo) [====================================>] 3/3
349
353
350 archiving (foo/bar) [ ] 0/1 (glob)
354 archiving (foo/bar) [ ] 0/1 (glob)
351 archiving (foo/bar) [ ] 0/1 (glob)
355 archiving (foo/bar) [ ] 0/1 (glob)
352 archiving (foo/bar) [================================>] 1/1 (glob)
356 archiving (foo/bar) [================================>] 1/1 (glob)
353 archiving (foo/bar) [================================>] 1/1 (glob)
357 archiving (foo/bar) [================================>] 1/1 (glob)
354
358
355 cloning subrepo foo from $TESTTMP/repo/foo
359 cloning subrepo foo from $TESTTMP/repo/foo
356 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
360 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
357
361
358 The newly cloned subrepos contain no working copy:
362 The newly cloned subrepos contain no working copy:
359
363
360 $ hg -R foo summary
364 $ hg -R foo summary
361 parent: -1:000000000000 (no revision checked out)
365 parent: -1:000000000000 (no revision checked out)
362 branch: default
366 branch: default
363 commit: (clean)
367 commit: (clean)
364 update: 4 new changesets (update)
368 update: 4 new changesets (update)
365
369
366 Disable progress extension and cleanup:
370 Disable progress extension and cleanup:
367
371
368 $ mv $HGRCPATH.no-progress $HGRCPATH
372 $ mv $HGRCPATH.no-progress $HGRCPATH
369
373
370 Test archiving when there is a directory in the way for a subrepo
374 Test archiving when there is a directory in the way for a subrepo
371 created by archive:
375 created by archive:
372
376
373 $ hg clone -U . ../almost-empty
377 $ hg clone -U . ../almost-empty
374 $ cd ../almost-empty
378 $ cd ../almost-empty
375 $ mkdir foo
379 $ mkdir foo
376 $ echo f > foo/f
380 $ echo f > foo/f
377 $ hg archive --subrepos -r tip archive
381 $ hg archive --subrepos -r tip archive
378 cloning subrepo foo from $TESTTMP/empty/foo
382 cloning subrepo foo from $TESTTMP/empty/foo
379 abort: destination '$TESTTMP/almost-empty/foo' is not empty (glob)
383 abort: destination '$TESTTMP/almost-empty/foo' is not empty (glob)
380 [255]
384 [255]
381
385
382 Clone and test outgoing:
386 Clone and test outgoing:
383
387
384 $ cd ..
388 $ cd ..
385 $ hg clone repo repo2
389 $ hg clone repo repo2
386 updating to branch default
390 updating to branch default
387 cloning subrepo foo from $TESTTMP/repo/foo
391 cloning subrepo foo from $TESTTMP/repo/foo
388 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
392 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
389 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
393 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
390 $ cd repo2
394 $ cd repo2
391 $ hg outgoing -S
395 $ hg outgoing -S
392 comparing with $TESTTMP/repo (glob)
396 comparing with $TESTTMP/repo (glob)
393 searching for changes
397 searching for changes
394 no changes found
398 no changes found
395 comparing with $TESTTMP/repo/foo
399 comparing with $TESTTMP/repo/foo
396 searching for changes
400 searching for changes
397 no changes found
401 no changes found
398 comparing with $TESTTMP/repo/foo/bar
402 comparing with $TESTTMP/repo/foo/bar
399 searching for changes
403 searching for changes
400 no changes found
404 no changes found
401 [1]
405 [1]
402
406
403 Make nested change:
407 Make nested change:
404
408
405 $ echo y4 >> foo/y.txt
409 $ echo y4 >> foo/y.txt
406 $ hg diff --nodates -S
410 $ hg diff --nodates -S
407 diff -r 65903cebad86 foo/y.txt
411 diff -r 65903cebad86 foo/y.txt
408 --- a/foo/y.txt
412 --- a/foo/y.txt
409 +++ b/foo/y.txt
413 +++ b/foo/y.txt
410 @@ -1,3 +1,4 @@
414 @@ -1,3 +1,4 @@
411 y1
415 y1
412 y2
416 y2
413 y3
417 y3
414 +y4
418 +y4
415 $ hg commit --subrepos -m 3-4-2
419 $ hg commit --subrepos -m 3-4-2
416 committing subrepository foo
420 committing subrepository foo
417 $ hg outgoing -S
421 $ hg outgoing -S
418 comparing with $TESTTMP/repo (glob)
422 comparing with $TESTTMP/repo (glob)
419 searching for changes
423 searching for changes
420 changeset: 3:2655b8ecc4ee
424 changeset: 3:2655b8ecc4ee
421 tag: tip
425 tag: tip
422 user: test
426 user: test
423 date: Thu Jan 01 00:00:00 1970 +0000
427 date: Thu Jan 01 00:00:00 1970 +0000
424 summary: 3-4-2
428 summary: 3-4-2
425
429
426 comparing with $TESTTMP/repo/foo
430 comparing with $TESTTMP/repo/foo
427 searching for changes
431 searching for changes
428 changeset: 4:e96193d6cb36
432 changeset: 4:e96193d6cb36
429 tag: tip
433 tag: tip
430 user: test
434 user: test
431 date: Thu Jan 01 00:00:00 1970 +0000
435 date: Thu Jan 01 00:00:00 1970 +0000
432 summary: 3-4-2
436 summary: 3-4-2
433
437
434 comparing with $TESTTMP/repo/foo/bar
438 comparing with $TESTTMP/repo/foo/bar
435 searching for changes
439 searching for changes
436 no changes found
440 no changes found
437
441
438
442
439 Switch to original repo and setup default path:
443 Switch to original repo and setup default path:
440
444
441 $ cd ../repo
445 $ cd ../repo
442 $ echo '[paths]' >> .hg/hgrc
446 $ echo '[paths]' >> .hg/hgrc
443 $ echo 'default = ../repo2' >> .hg/hgrc
447 $ echo 'default = ../repo2' >> .hg/hgrc
444
448
445 Test incoming:
449 Test incoming:
446
450
447 $ hg incoming -S
451 $ hg incoming -S
448 comparing with $TESTTMP/repo2 (glob)
452 comparing with $TESTTMP/repo2 (glob)
449 searching for changes
453 searching for changes
450 changeset: 3:2655b8ecc4ee
454 changeset: 3:2655b8ecc4ee
451 tag: tip
455 tag: tip
452 user: test
456 user: test
453 date: Thu Jan 01 00:00:00 1970 +0000
457 date: Thu Jan 01 00:00:00 1970 +0000
454 summary: 3-4-2
458 summary: 3-4-2
455
459
456 comparing with $TESTTMP/repo2/foo
460 comparing with $TESTTMP/repo2/foo
457 searching for changes
461 searching for changes
458 changeset: 4:e96193d6cb36
462 changeset: 4:e96193d6cb36
459 tag: tip
463 tag: tip
460 user: test
464 user: test
461 date: Thu Jan 01 00:00:00 1970 +0000
465 date: Thu Jan 01 00:00:00 1970 +0000
462 summary: 3-4-2
466 summary: 3-4-2
463
467
464 comparing with $TESTTMP/repo2/foo/bar
468 comparing with $TESTTMP/repo2/foo/bar
465 searching for changes
469 searching for changes
466 no changes found
470 no changes found
467
471
468 $ hg incoming -S --bundle incoming.hg
472 $ hg incoming -S --bundle incoming.hg
469 abort: cannot combine --bundle and --subrepos
473 abort: cannot combine --bundle and --subrepos
470 [255]
474 [255]
471
475
472 Test missing subrepo:
476 Test missing subrepo:
473
477
474 $ rm -r foo
478 $ rm -r foo
475 $ hg status -S
479 $ hg status -S
476 warning: error "unknown revision '65903cebad86f1a84bd4f1134f62fa7dcb7a1c98'" in subrepository "foo"
480 warning: error "unknown revision '65903cebad86f1a84bd4f1134f62fa7dcb7a1c98'" in subrepository "foo"
477
481
478 Issue2619: IndexError: list index out of range on hg add with subrepos
482 Issue2619: IndexError: list index out of range on hg add with subrepos
479 The subrepo must sorts after the explicit filename.
483 The subrepo must sorts after the explicit filename.
480
484
481 $ cd ..
485 $ cd ..
482 $ hg init test
486 $ hg init test
483 $ cd test
487 $ cd test
484 $ hg init x
488 $ hg init x
485 $ echo "x = x" >> .hgsub
489 $ echo "x = x" >> .hgsub
486 $ hg add .hgsub
490 $ hg add .hgsub
487 $ touch a x/a
491 $ touch a x/a
488 $ hg add a x/a
492 $ hg add a x/a
General Comments 0
You need to be logged in to leave comments. Login now