##// END OF EJS Templates
kill trailing whitespace
Dirkjan Ochtman -
r9312:c5f0825c default
parent child Browse files
Show More
@@ -1,1136 +1,1136
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os
5 import os
6 import re
6 import re
7 import sys
7 import sys
8 import cPickle as pickle
8 import cPickle as pickle
9 import tempfile
9 import tempfile
10 import urllib
10 import urllib
11
11
12 from mercurial import strutil, util, encoding
12 from mercurial import strutil, util, encoding
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 # Subversion stuff. Works best with very recent Python SVN bindings
15 # Subversion stuff. Works best with very recent Python SVN bindings
16 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
16 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # these bindings.
17 # these bindings.
18
18
19 from cStringIO import StringIO
19 from cStringIO import StringIO
20
20
21 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
21 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import commandline, converter_source, converter_sink, mapfile
22 from common import commandline, converter_source, converter_sink, mapfile
23
23
24 try:
24 try:
25 from svn.core import SubversionException, Pool
25 from svn.core import SubversionException, Pool
26 import svn
26 import svn
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.ra
29 import svn.ra
30 import svn.delta
30 import svn.delta
31 import transport
31 import transport
32 import warnings
32 import warnings
33 warnings.filterwarnings('ignore',
33 warnings.filterwarnings('ignore',
34 module='svn.core',
34 module='svn.core',
35 category=DeprecationWarning)
35 category=DeprecationWarning)
36
36
37 except ImportError:
37 except ImportError:
38 pass
38 pass
39
39
40 class SvnPathNotFound(Exception):
40 class SvnPathNotFound(Exception):
41 pass
41 pass
42
42
43 def geturl(path):
43 def geturl(path):
44 try:
44 try:
45 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
45 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
46 except SubversionException:
46 except SubversionException:
47 pass
47 pass
48 if os.path.isdir(path):
48 if os.path.isdir(path):
49 path = os.path.normpath(os.path.abspath(path))
49 path = os.path.normpath(os.path.abspath(path))
50 if os.name == 'nt':
50 if os.name == 'nt':
51 path = '/' + util.normpath(path)
51 path = '/' + util.normpath(path)
52 # Module URL is later compared with the repository URL returned
52 # Module URL is later compared with the repository URL returned
53 # by svn API, which is UTF-8.
53 # by svn API, which is UTF-8.
54 path = encoding.tolocal(path)
54 path = encoding.tolocal(path)
55 return 'file://%s' % urllib.quote(path)
55 return 'file://%s' % urllib.quote(path)
56 return path
56 return path
57
57
58 def optrev(number):
58 def optrev(number):
59 optrev = svn.core.svn_opt_revision_t()
59 optrev = svn.core.svn_opt_revision_t()
60 optrev.kind = svn.core.svn_opt_revision_number
60 optrev.kind = svn.core.svn_opt_revision_number
61 optrev.value.number = number
61 optrev.value.number = number
62 return optrev
62 return optrev
63
63
64 class changedpath(object):
64 class changedpath(object):
65 def __init__(self, p):
65 def __init__(self, p):
66 self.copyfrom_path = p.copyfrom_path
66 self.copyfrom_path = p.copyfrom_path
67 self.copyfrom_rev = p.copyfrom_rev
67 self.copyfrom_rev = p.copyfrom_rev
68 self.action = p.action
68 self.action = p.action
69
69
70 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
70 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
71 strict_node_history=False):
71 strict_node_history=False):
72 protocol = -1
72 protocol = -1
73 def receiver(orig_paths, revnum, author, date, message, pool):
73 def receiver(orig_paths, revnum, author, date, message, pool):
74 if orig_paths is not None:
74 if orig_paths is not None:
75 for k, v in orig_paths.iteritems():
75 for k, v in orig_paths.iteritems():
76 orig_paths[k] = changedpath(v)
76 orig_paths[k] = changedpath(v)
77 pickle.dump((orig_paths, revnum, author, date, message),
77 pickle.dump((orig_paths, revnum, author, date, message),
78 fp, protocol)
78 fp, protocol)
79
79
80 try:
80 try:
81 # Use an ra of our own so that our parent can consume
81 # Use an ra of our own so that our parent can consume
82 # our results without confusing the server.
82 # our results without confusing the server.
83 t = transport.SvnRaTransport(url=url)
83 t = transport.SvnRaTransport(url=url)
84 svn.ra.get_log(t.ra, paths, start, end, limit,
84 svn.ra.get_log(t.ra, paths, start, end, limit,
85 discover_changed_paths,
85 discover_changed_paths,
86 strict_node_history,
86 strict_node_history,
87 receiver)
87 receiver)
88 except SubversionException, (inst, num):
88 except SubversionException, (inst, num):
89 pickle.dump(num, fp, protocol)
89 pickle.dump(num, fp, protocol)
90 except IOError:
90 except IOError:
91 # Caller may interrupt the iteration
91 # Caller may interrupt the iteration
92 pickle.dump(None, fp, protocol)
92 pickle.dump(None, fp, protocol)
93 else:
93 else:
94 pickle.dump(None, fp, protocol)
94 pickle.dump(None, fp, protocol)
95 fp.close()
95 fp.close()
96 # With large history, cleanup process goes crazy and suddenly
96 # With large history, cleanup process goes crazy and suddenly
97 # consumes *huge* amount of memory. The output file being closed,
97 # consumes *huge* amount of memory. The output file being closed,
98 # there is no need for clean termination.
98 # there is no need for clean termination.
99 os._exit(0)
99 os._exit(0)
100
100
101 def debugsvnlog(ui, **opts):
101 def debugsvnlog(ui, **opts):
102 """Fetch SVN log in a subprocess and channel them back to parent to
102 """Fetch SVN log in a subprocess and channel them back to parent to
103 avoid memory collection issues.
103 avoid memory collection issues.
104 """
104 """
105 util.set_binary(sys.stdin)
105 util.set_binary(sys.stdin)
106 util.set_binary(sys.stdout)
106 util.set_binary(sys.stdout)
107 args = decodeargs(sys.stdin.read())
107 args = decodeargs(sys.stdin.read())
108 get_log_child(sys.stdout, *args)
108 get_log_child(sys.stdout, *args)
109
109
110 class logstream(object):
110 class logstream(object):
111 """Interruptible revision log iterator."""
111 """Interruptible revision log iterator."""
112 def __init__(self, stdout):
112 def __init__(self, stdout):
113 self._stdout = stdout
113 self._stdout = stdout
114
114
115 def __iter__(self):
115 def __iter__(self):
116 while True:
116 while True:
117 entry = pickle.load(self._stdout)
117 entry = pickle.load(self._stdout)
118 try:
118 try:
119 orig_paths, revnum, author, date, message = entry
119 orig_paths, revnum, author, date, message = entry
120 except:
120 except:
121 if entry is None:
121 if entry is None:
122 break
122 break
123 raise SubversionException("child raised exception", entry)
123 raise SubversionException("child raised exception", entry)
124 yield entry
124 yield entry
125
125
126 def close(self):
126 def close(self):
127 if self._stdout:
127 if self._stdout:
128 self._stdout.close()
128 self._stdout.close()
129 self._stdout = None
129 self._stdout = None
130
130
131
131
132 # Check to see if the given path is a local Subversion repo. Verify this by
132 # Check to see if the given path is a local Subversion repo. Verify this by
133 # looking for several svn-specific files and directories in the given
133 # looking for several svn-specific files and directories in the given
134 # directory.
134 # directory.
135 def filecheck(path, proto):
135 def filecheck(path, proto):
136 for x in ('locks', 'hooks', 'format', 'db', ):
136 for x in ('locks', 'hooks', 'format', 'db', ):
137 if not os.path.exists(os.path.join(path, x)):
137 if not os.path.exists(os.path.join(path, x)):
138 return False
138 return False
139 return True
139 return True
140
140
141 # Check to see if a given path is the root of an svn repo over http. We verify
141 # Check to see if a given path is the root of an svn repo over http. We verify
142 # this by requesting a version-controlled URL we know can't exist and looking
142 # this by requesting a version-controlled URL we know can't exist and looking
143 # for the svn-specific "not found" XML.
143 # for the svn-specific "not found" XML.
144 def httpcheck(path, proto):
144 def httpcheck(path, proto):
145 return ('<m:human-readable errcode="160013">' in
145 return ('<m:human-readable errcode="160013">' in
146 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
146 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
147
147
148 protomap = {'http': httpcheck,
148 protomap = {'http': httpcheck,
149 'https': httpcheck,
149 'https': httpcheck,
150 'file': filecheck,
150 'file': filecheck,
151 }
151 }
152 def issvnurl(url):
152 def issvnurl(url):
153 try:
153 try:
154 proto, path = url.split('://', 1)
154 proto, path = url.split('://', 1)
155 path = urllib.url2pathname(path)
155 path = urllib.url2pathname(path)
156 except ValueError:
156 except ValueError:
157 proto = 'file'
157 proto = 'file'
158 path = os.path.abspath(url)
158 path = os.path.abspath(url)
159 path = path.replace(os.sep, '/')
159 path = path.replace(os.sep, '/')
160 check = protomap.get(proto, lambda p, p2: False)
160 check = protomap.get(proto, lambda p, p2: False)
161 while '/' in path:
161 while '/' in path:
162 if check(path, proto):
162 if check(path, proto):
163 return True
163 return True
164 path = path.rsplit('/', 1)[0]
164 path = path.rsplit('/', 1)[0]
165 return False
165 return False
166
166
167 # SVN conversion code stolen from bzr-svn and tailor
167 # SVN conversion code stolen from bzr-svn and tailor
168 #
168 #
169 # Subversion looks like a versioned filesystem, branches structures
169 # Subversion looks like a versioned filesystem, branches structures
170 # are defined by conventions and not enforced by the tool. First,
170 # are defined by conventions and not enforced by the tool. First,
171 # we define the potential branches (modules) as "trunk" and "branches"
171 # we define the potential branches (modules) as "trunk" and "branches"
172 # children directories. Revisions are then identified by their
172 # children directories. Revisions are then identified by their
173 # module and revision number (and a repository identifier).
173 # module and revision number (and a repository identifier).
174 #
174 #
175 # The revision graph is really a tree (or a forest). By default, a
175 # The revision graph is really a tree (or a forest). By default, a
176 # revision parent is the previous revision in the same module. If the
176 # revision parent is the previous revision in the same module. If the
177 # module directory is copied/moved from another module then the
177 # module directory is copied/moved from another module then the
178 # revision is the module root and its parent the source revision in
178 # revision is the module root and its parent the source revision in
179 # the parent module. A revision has at most one parent.
179 # the parent module. A revision has at most one parent.
180 #
180 #
181 class svn_source(converter_source):
181 class svn_source(converter_source):
182 def __init__(self, ui, url, rev=None):
182 def __init__(self, ui, url, rev=None):
183 super(svn_source, self).__init__(ui, url, rev=rev)
183 super(svn_source, self).__init__(ui, url, rev=rev)
184
184
185 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
185 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
186 (os.path.exists(url) and
186 (os.path.exists(url) and
187 os.path.exists(os.path.join(url, '.svn'))) or
187 os.path.exists(os.path.join(url, '.svn'))) or
188 issvnurl(url)):
188 issvnurl(url)):
189 raise NoRepo("%s does not look like a Subversion repo" % url)
189 raise NoRepo("%s does not look like a Subversion repo" % url)
190
190
191 try:
191 try:
192 SubversionException
192 SubversionException
193 except NameError:
193 except NameError:
194 raise MissingTool(_('Subversion python bindings could not be loaded'))
194 raise MissingTool(_('Subversion python bindings could not be loaded'))
195
195
196 try:
196 try:
197 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
197 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
198 if version < (1, 4):
198 if version < (1, 4):
199 raise MissingTool(_('Subversion python bindings %d.%d found, '
199 raise MissingTool(_('Subversion python bindings %d.%d found, '
200 '1.4 or later required') % version)
200 '1.4 or later required') % version)
201 except AttributeError:
201 except AttributeError:
202 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
202 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
203 'or later required'))
203 'or later required'))
204
204
205 self.lastrevs = {}
205 self.lastrevs = {}
206
206
207 latest = None
207 latest = None
208 try:
208 try:
209 # Support file://path@rev syntax. Useful e.g. to convert
209 # Support file://path@rev syntax. Useful e.g. to convert
210 # deleted branches.
210 # deleted branches.
211 at = url.rfind('@')
211 at = url.rfind('@')
212 if at >= 0:
212 if at >= 0:
213 latest = int(url[at+1:])
213 latest = int(url[at+1:])
214 url = url[:at]
214 url = url[:at]
215 except ValueError:
215 except ValueError:
216 pass
216 pass
217 self.url = geturl(url)
217 self.url = geturl(url)
218 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
218 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
219 try:
219 try:
220 self.transport = transport.SvnRaTransport(url=self.url)
220 self.transport = transport.SvnRaTransport(url=self.url)
221 self.ra = self.transport.ra
221 self.ra = self.transport.ra
222 self.ctx = self.transport.client
222 self.ctx = self.transport.client
223 self.baseurl = svn.ra.get_repos_root(self.ra)
223 self.baseurl = svn.ra.get_repos_root(self.ra)
224 # Module is either empty or a repository path starting with
224 # Module is either empty or a repository path starting with
225 # a slash and not ending with a slash.
225 # a slash and not ending with a slash.
226 self.module = urllib.unquote(self.url[len(self.baseurl):])
226 self.module = urllib.unquote(self.url[len(self.baseurl):])
227 self.prevmodule = None
227 self.prevmodule = None
228 self.rootmodule = self.module
228 self.rootmodule = self.module
229 self.commits = {}
229 self.commits = {}
230 self.paths = {}
230 self.paths = {}
231 self.uuid = svn.ra.get_uuid(self.ra)
231 self.uuid = svn.ra.get_uuid(self.ra)
232 except SubversionException:
232 except SubversionException:
233 ui.traceback()
233 ui.traceback()
234 raise NoRepo("%s does not look like a Subversion repo" % self.url)
234 raise NoRepo("%s does not look like a Subversion repo" % self.url)
235
235
236 if rev:
236 if rev:
237 try:
237 try:
238 latest = int(rev)
238 latest = int(rev)
239 except ValueError:
239 except ValueError:
240 raise util.Abort(_('svn: revision %s is not an integer') % rev)
240 raise util.Abort(_('svn: revision %s is not an integer') % rev)
241
241
242 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
242 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
243 try:
243 try:
244 self.startrev = int(self.startrev)
244 self.startrev = int(self.startrev)
245 if self.startrev < 0:
245 if self.startrev < 0:
246 self.startrev = 0
246 self.startrev = 0
247 except ValueError:
247 except ValueError:
248 raise util.Abort(_('svn: start revision %s is not an integer')
248 raise util.Abort(_('svn: start revision %s is not an integer')
249 % self.startrev)
249 % self.startrev)
250
250
251 self.head = self.latest(self.module, latest)
251 self.head = self.latest(self.module, latest)
252 if not self.head:
252 if not self.head:
253 raise util.Abort(_('no revision found in module %s')
253 raise util.Abort(_('no revision found in module %s')
254 % self.module)
254 % self.module)
255 self.last_changed = self.revnum(self.head)
255 self.last_changed = self.revnum(self.head)
256
256
257 self._changescache = None
257 self._changescache = None
258
258
259 if os.path.exists(os.path.join(url, '.svn/entries')):
259 if os.path.exists(os.path.join(url, '.svn/entries')):
260 self.wc = url
260 self.wc = url
261 else:
261 else:
262 self.wc = None
262 self.wc = None
263 self.convertfp = None
263 self.convertfp = None
264
264
265 def setrevmap(self, revmap):
265 def setrevmap(self, revmap):
266 lastrevs = {}
266 lastrevs = {}
267 for revid in revmap.iterkeys():
267 for revid in revmap.iterkeys():
268 uuid, module, revnum = self.revsplit(revid)
268 uuid, module, revnum = self.revsplit(revid)
269 lastrevnum = lastrevs.setdefault(module, revnum)
269 lastrevnum = lastrevs.setdefault(module, revnum)
270 if revnum > lastrevnum:
270 if revnum > lastrevnum:
271 lastrevs[module] = revnum
271 lastrevs[module] = revnum
272 self.lastrevs = lastrevs
272 self.lastrevs = lastrevs
273
273
274 def exists(self, path, optrev):
274 def exists(self, path, optrev):
275 try:
275 try:
276 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
276 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
277 optrev, False, self.ctx)
277 optrev, False, self.ctx)
278 return True
278 return True
279 except SubversionException:
279 except SubversionException:
280 return False
280 return False
281
281
282 def getheads(self):
282 def getheads(self):
283
283
284 def isdir(path, revnum):
284 def isdir(path, revnum):
285 kind = self._checkpath(path, revnum)
285 kind = self._checkpath(path, revnum)
286 return kind == svn.core.svn_node_dir
286 return kind == svn.core.svn_node_dir
287
287
288 def getcfgpath(name, rev):
288 def getcfgpath(name, rev):
289 cfgpath = self.ui.config('convert', 'svn.' + name)
289 cfgpath = self.ui.config('convert', 'svn.' + name)
290 if cfgpath is not None and cfgpath.strip() == '':
290 if cfgpath is not None and cfgpath.strip() == '':
291 return None
291 return None
292 path = (cfgpath or name).strip('/')
292 path = (cfgpath or name).strip('/')
293 if not self.exists(path, rev):
293 if not self.exists(path, rev):
294 if cfgpath:
294 if cfgpath:
295 raise util.Abort(_('expected %s to be at %r, but not found')
295 raise util.Abort(_('expected %s to be at %r, but not found')
296 % (name, path))
296 % (name, path))
297 return None
297 return None
298 self.ui.note(_('found %s at %r\n') % (name, path))
298 self.ui.note(_('found %s at %r\n') % (name, path))
299 return path
299 return path
300
300
301 rev = optrev(self.last_changed)
301 rev = optrev(self.last_changed)
302 oldmodule = ''
302 oldmodule = ''
303 trunk = getcfgpath('trunk', rev)
303 trunk = getcfgpath('trunk', rev)
304 self.tags = getcfgpath('tags', rev)
304 self.tags = getcfgpath('tags', rev)
305 branches = getcfgpath('branches', rev)
305 branches = getcfgpath('branches', rev)
306
306
307 # If the project has a trunk or branches, we will extract heads
307 # If the project has a trunk or branches, we will extract heads
308 # from them. We keep the project root otherwise.
308 # from them. We keep the project root otherwise.
309 if trunk:
309 if trunk:
310 oldmodule = self.module or ''
310 oldmodule = self.module or ''
311 self.module += '/' + trunk
311 self.module += '/' + trunk
312 self.head = self.latest(self.module, self.last_changed)
312 self.head = self.latest(self.module, self.last_changed)
313 if not self.head:
313 if not self.head:
314 raise util.Abort(_('no revision found in module %s')
314 raise util.Abort(_('no revision found in module %s')
315 % self.module)
315 % self.module)
316
316
317 # First head in the list is the module's head
317 # First head in the list is the module's head
318 self.heads = [self.head]
318 self.heads = [self.head]
319 if self.tags is not None:
319 if self.tags is not None:
320 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
320 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
321
321
322 # Check if branches bring a few more heads to the list
322 # Check if branches bring a few more heads to the list
323 if branches:
323 if branches:
324 rpath = self.url.strip('/')
324 rpath = self.url.strip('/')
325 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
325 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
326 rev, False, self.ctx)
326 rev, False, self.ctx)
327 for branch in branchnames.keys():
327 for branch in branchnames.keys():
328 module = '%s/%s/%s' % (oldmodule, branches, branch)
328 module = '%s/%s/%s' % (oldmodule, branches, branch)
329 if not isdir(module, self.last_changed):
329 if not isdir(module, self.last_changed):
330 continue
330 continue
331 brevid = self.latest(module, self.last_changed)
331 brevid = self.latest(module, self.last_changed)
332 if not brevid:
332 if not brevid:
333 self.ui.note(_('ignoring empty branch %s\n') % branch)
333 self.ui.note(_('ignoring empty branch %s\n') % branch)
334 continue
334 continue
335 self.ui.note(_('found branch %s at %d\n') %
335 self.ui.note(_('found branch %s at %d\n') %
336 (branch, self.revnum(brevid)))
336 (branch, self.revnum(brevid)))
337 self.heads.append(brevid)
337 self.heads.append(brevid)
338
338
339 if self.startrev and self.heads:
339 if self.startrev and self.heads:
340 if len(self.heads) > 1:
340 if len(self.heads) > 1:
341 raise util.Abort(_('svn: start revision is not supported '
341 raise util.Abort(_('svn: start revision is not supported '
342 'with more than one branch'))
342 'with more than one branch'))
343 revnum = self.revnum(self.heads[0])
343 revnum = self.revnum(self.heads[0])
344 if revnum < self.startrev:
344 if revnum < self.startrev:
345 raise util.Abort(_('svn: no revision found after start revision %d')
345 raise util.Abort(_('svn: no revision found after start revision %d')
346 % self.startrev)
346 % self.startrev)
347
347
348 return self.heads
348 return self.heads
349
349
350 def getfile(self, file, rev):
350 def getfile(self, file, rev):
351 data, mode = self._getfile(file, rev)
351 data, mode = self._getfile(file, rev)
352 self.modecache[(file, rev)] = mode
352 self.modecache[(file, rev)] = mode
353 return data
353 return data
354
354
355 def getmode(self, file, rev):
355 def getmode(self, file, rev):
356 return self.modecache[(file, rev)]
356 return self.modecache[(file, rev)]
357
357
358 def getchanges(self, rev):
358 def getchanges(self, rev):
359 if self._changescache and self._changescache[0] == rev:
359 if self._changescache and self._changescache[0] == rev:
360 return self._changescache[1]
360 return self._changescache[1]
361 self._changescache = None
361 self._changescache = None
362 self.modecache = {}
362 self.modecache = {}
363 (paths, parents) = self.paths[rev]
363 (paths, parents) = self.paths[rev]
364 if parents:
364 if parents:
365 files, copies = self.expandpaths(rev, paths, parents)
365 files, copies = self.expandpaths(rev, paths, parents)
366 else:
366 else:
367 # Perform a full checkout on roots
367 # Perform a full checkout on roots
368 uuid, module, revnum = self.revsplit(rev)
368 uuid, module, revnum = self.revsplit(rev)
369 entries = svn.client.ls(self.baseurl + urllib.quote(module),
369 entries = svn.client.ls(self.baseurl + urllib.quote(module),
370 optrev(revnum), True, self.ctx)
370 optrev(revnum), True, self.ctx)
371 files = [n for n,e in entries.iteritems()
371 files = [n for n,e in entries.iteritems()
372 if e.kind == svn.core.svn_node_file]
372 if e.kind == svn.core.svn_node_file]
373 copies = {}
373 copies = {}
374
374
375 files.sort()
375 files.sort()
376 files = zip(files, [rev] * len(files))
376 files = zip(files, [rev] * len(files))
377
377
378 # caller caches the result, so free it here to release memory
378 # caller caches the result, so free it here to release memory
379 del self.paths[rev]
379 del self.paths[rev]
380 return (files, copies)
380 return (files, copies)
381
381
382 def getchangedfiles(self, rev, i):
382 def getchangedfiles(self, rev, i):
383 changes = self.getchanges(rev)
383 changes = self.getchanges(rev)
384 self._changescache = (rev, changes)
384 self._changescache = (rev, changes)
385 return [f[0] for f in changes[0]]
385 return [f[0] for f in changes[0]]
386
386
387 def getcommit(self, rev):
387 def getcommit(self, rev):
388 if rev not in self.commits:
388 if rev not in self.commits:
389 uuid, module, revnum = self.revsplit(rev)
389 uuid, module, revnum = self.revsplit(rev)
390 self.module = module
390 self.module = module
391 self.reparent(module)
391 self.reparent(module)
392 # We assume that:
392 # We assume that:
393 # - requests for revisions after "stop" come from the
393 # - requests for revisions after "stop" come from the
394 # revision graph backward traversal. Cache all of them
394 # revision graph backward traversal. Cache all of them
395 # down to stop, they will be used eventually.
395 # down to stop, they will be used eventually.
396 # - requests for revisions before "stop" come to get
396 # - requests for revisions before "stop" come to get
397 # isolated branches parents. Just fetch what is needed.
397 # isolated branches parents. Just fetch what is needed.
398 stop = self.lastrevs.get(module, 0)
398 stop = self.lastrevs.get(module, 0)
399 if revnum < stop:
399 if revnum < stop:
400 stop = revnum + 1
400 stop = revnum + 1
401 self._fetch_revisions(revnum, stop)
401 self._fetch_revisions(revnum, stop)
402 commit = self.commits[rev]
402 commit = self.commits[rev]
403 # caller caches the result, so free it here to release memory
403 # caller caches the result, so free it here to release memory
404 del self.commits[rev]
404 del self.commits[rev]
405 return commit
405 return commit
406
406
407 def gettags(self):
407 def gettags(self):
408 tags = {}
408 tags = {}
409 if self.tags is None:
409 if self.tags is None:
410 return tags
410 return tags
411
411
412 # svn tags are just a convention, project branches left in a
412 # svn tags are just a convention, project branches left in a
413 # 'tags' directory. There is no other relationship than
413 # 'tags' directory. There is no other relationship than
414 # ancestry, which is expensive to discover and makes them hard
414 # ancestry, which is expensive to discover and makes them hard
415 # to update incrementally. Worse, past revisions may be
415 # to update incrementally. Worse, past revisions may be
416 # referenced by tags far away in the future, requiring a deep
416 # referenced by tags far away in the future, requiring a deep
417 # history traversal on every calculation. Current code
417 # history traversal on every calculation. Current code
418 # performs a single backward traversal, tracking moves within
418 # performs a single backward traversal, tracking moves within
419 # the tags directory (tag renaming) and recording a new tag
419 # the tags directory (tag renaming) and recording a new tag
420 # everytime a project is copied from outside the tags
420 # everytime a project is copied from outside the tags
421 # directory. It also lists deleted tags, this behaviour may
421 # directory. It also lists deleted tags, this behaviour may
422 # change in the future.
422 # change in the future.
423 pendings = []
423 pendings = []
424 tagspath = self.tags
424 tagspath = self.tags
425 start = svn.ra.get_latest_revnum(self.ra)
425 start = svn.ra.get_latest_revnum(self.ra)
426 try:
426 try:
427 for entry in self._getlog([self.tags], start, self.startrev):
427 for entry in self._getlog([self.tags], start, self.startrev):
428 origpaths, revnum, author, date, message = entry
428 origpaths, revnum, author, date, message = entry
429 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
429 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
430 in origpaths.iteritems() if e.copyfrom_path]
430 in origpaths.iteritems() if e.copyfrom_path]
431 # Apply moves/copies from more specific to general
431 # Apply moves/copies from more specific to general
432 copies.sort(reverse=True)
432 copies.sort(reverse=True)
433
433
434 srctagspath = tagspath
434 srctagspath = tagspath
435 if copies and copies[-1][2] == tagspath:
435 if copies and copies[-1][2] == tagspath:
436 # Track tags directory moves
436 # Track tags directory moves
437 srctagspath = copies.pop()[0]
437 srctagspath = copies.pop()[0]
438
438
439 for source, sourcerev, dest in copies:
439 for source, sourcerev, dest in copies:
440 if not dest.startswith(tagspath + '/'):
440 if not dest.startswith(tagspath + '/'):
441 continue
441 continue
442 for tag in pendings:
442 for tag in pendings:
443 if tag[0].startswith(dest):
443 if tag[0].startswith(dest):
444 tagpath = source + tag[0][len(dest):]
444 tagpath = source + tag[0][len(dest):]
445 tag[:2] = [tagpath, sourcerev]
445 tag[:2] = [tagpath, sourcerev]
446 break
446 break
447 else:
447 else:
448 pendings.append([source, sourcerev, dest])
448 pendings.append([source, sourcerev, dest])
449
449
450 # Filter out tags with children coming from different
450 # Filter out tags with children coming from different
451 # parts of the repository like:
451 # parts of the repository like:
452 # /tags/tag.1 (from /trunk:10)
452 # /tags/tag.1 (from /trunk:10)
453 # /tags/tag.1/foo (from /branches/foo:12)
453 # /tags/tag.1/foo (from /branches/foo:12)
454 # Here/tags/tag.1 discarded as well as its children.
454 # Here/tags/tag.1 discarded as well as its children.
455 # It happens with tools like cvs2svn. Such tags cannot
455 # It happens with tools like cvs2svn. Such tags cannot
456 # be represented in mercurial.
456 # be represented in mercurial.
457 addeds = dict((p, e.copyfrom_path) for p, e
457 addeds = dict((p, e.copyfrom_path) for p, e
458 in origpaths.iteritems()
458 in origpaths.iteritems()
459 if e.action == 'A' and e.copyfrom_path)
459 if e.action == 'A' and e.copyfrom_path)
460 badroots = set()
460 badroots = set()
461 for destroot in addeds:
461 for destroot in addeds:
462 for source, sourcerev, dest in pendings:
462 for source, sourcerev, dest in pendings:
463 if (not dest.startswith(destroot + '/')
463 if (not dest.startswith(destroot + '/')
464 or source.startswith(addeds[destroot] + '/')):
464 or source.startswith(addeds[destroot] + '/')):
465 continue
465 continue
466 badroots.add(destroot)
466 badroots.add(destroot)
467 break
467 break
468
468
469 for badroot in badroots:
469 for badroot in badroots:
470 pendings = [p for p in pendings if p[2] != badroot
470 pendings = [p for p in pendings if p[2] != badroot
471 and not p[2].startswith(badroot + '/')]
471 and not p[2].startswith(badroot + '/')]
472
472
473 # Tell tag renamings from tag creations
473 # Tell tag renamings from tag creations
474 remainings = []
474 remainings = []
475 for source, sourcerev, dest in pendings:
475 for source, sourcerev, dest in pendings:
476 tagname = dest.split('/')[-1]
476 tagname = dest.split('/')[-1]
477 if source.startswith(srctagspath):
477 if source.startswith(srctagspath):
478 remainings.append([source, sourcerev, tagname])
478 remainings.append([source, sourcerev, tagname])
479 continue
479 continue
480 if tagname in tags:
480 if tagname in tags:
481 # Keep the latest tag value
481 # Keep the latest tag value
482 continue
482 continue
483 # From revision may be fake, get one with changes
483 # From revision may be fake, get one with changes
484 try:
484 try:
485 tagid = self.latest(source, sourcerev)
485 tagid = self.latest(source, sourcerev)
486 if tagid and tagname not in tags:
486 if tagid and tagname not in tags:
487 tags[tagname] = tagid
487 tags[tagname] = tagid
488 except SvnPathNotFound:
488 except SvnPathNotFound:
489 # It happens when we are following directories
489 # It happens when we are following directories
490 # we assumed were copied with their parents
490 # we assumed were copied with their parents
491 # but were really created in the tag
491 # but were really created in the tag
492 # directory.
492 # directory.
493 pass
493 pass
494 pendings = remainings
494 pendings = remainings
495 tagspath = srctagspath
495 tagspath = srctagspath
496
496
497 except SubversionException:
497 except SubversionException:
498 self.ui.note(_('no tags found at revision %d\n') % start)
498 self.ui.note(_('no tags found at revision %d\n') % start)
499 return tags
499 return tags
500
500
501 def converted(self, rev, destrev):
501 def converted(self, rev, destrev):
502 if not self.wc:
502 if not self.wc:
503 return
503 return
504 if self.convertfp is None:
504 if self.convertfp is None:
505 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
505 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
506 'a')
506 'a')
507 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
507 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
508 self.convertfp.flush()
508 self.convertfp.flush()
509
509
510 def revid(self, revnum, module=None):
510 def revid(self, revnum, module=None):
511 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
511 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
512
512
513 def revnum(self, rev):
513 def revnum(self, rev):
514 return int(rev.split('@')[-1])
514 return int(rev.split('@')[-1])
515
515
516 def revsplit(self, rev):
516 def revsplit(self, rev):
517 url, revnum = rev.rsplit('@', 1)
517 url, revnum = rev.rsplit('@', 1)
518 revnum = int(revnum)
518 revnum = int(revnum)
519 parts = url.split('/', 1)
519 parts = url.split('/', 1)
520 uuid = parts.pop(0)[4:]
520 uuid = parts.pop(0)[4:]
521 mod = ''
521 mod = ''
522 if parts:
522 if parts:
523 mod = '/' + parts[0]
523 mod = '/' + parts[0]
524 return uuid, mod, revnum
524 return uuid, mod, revnum
525
525
526 def latest(self, path, stop=0):
526 def latest(self, path, stop=0):
527 """Find the latest revid affecting path, up to stop. It may return
527 """Find the latest revid affecting path, up to stop. It may return
528 a revision in a different module, since a branch may be moved without
528 a revision in a different module, since a branch may be moved without
529 a change being reported. Return None if computed module does not
529 a change being reported. Return None if computed module does not
530 belong to rootmodule subtree.
530 belong to rootmodule subtree.
531 """
531 """
532 if not path.startswith(self.rootmodule):
532 if not path.startswith(self.rootmodule):
533 # Requests on foreign branches may be forbidden at server level
533 # Requests on foreign branches may be forbidden at server level
534 self.ui.debug(_('ignoring foreign branch %r\n') % path)
534 self.ui.debug(_('ignoring foreign branch %r\n') % path)
535 return None
535 return None
536
536
537 if not stop:
537 if not stop:
538 stop = svn.ra.get_latest_revnum(self.ra)
538 stop = svn.ra.get_latest_revnum(self.ra)
539 try:
539 try:
540 prevmodule = self.reparent('')
540 prevmodule = self.reparent('')
541 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
541 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
542 self.reparent(prevmodule)
542 self.reparent(prevmodule)
543 except SubversionException:
543 except SubversionException:
544 dirent = None
544 dirent = None
545 if not dirent:
545 if not dirent:
546 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
546 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
547
547
548 # stat() gives us the previous revision on this line of
548 # stat() gives us the previous revision on this line of
549 # development, but it might be in *another module*. Fetch the
549 # development, but it might be in *another module*. Fetch the
550 # log and detect renames down to the latest revision.
550 # log and detect renames down to the latest revision.
551 stream = self._getlog([path], stop, dirent.created_rev)
551 stream = self._getlog([path], stop, dirent.created_rev)
552 try:
552 try:
553 for entry in stream:
553 for entry in stream:
554 paths, revnum, author, date, message = entry
554 paths, revnum, author, date, message = entry
555 if revnum <= dirent.created_rev:
555 if revnum <= dirent.created_rev:
556 break
556 break
557
557
558 for p in paths:
558 for p in paths:
559 if not path.startswith(p) or not paths[p].copyfrom_path:
559 if not path.startswith(p) or not paths[p].copyfrom_path:
560 continue
560 continue
561 newpath = paths[p].copyfrom_path + path[len(p):]
561 newpath = paths[p].copyfrom_path + path[len(p):]
562 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
562 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
563 (path, newpath, revnum))
563 (path, newpath, revnum))
564 path = newpath
564 path = newpath
565 break
565 break
566 finally:
566 finally:
567 stream.close()
567 stream.close()
568
568
569 if not path.startswith(self.rootmodule):
569 if not path.startswith(self.rootmodule):
570 self.ui.debug(_('ignoring foreign branch %r\n') % path)
570 self.ui.debug(_('ignoring foreign branch %r\n') % path)
571 return None
571 return None
572 return self.revid(dirent.created_rev, path)
572 return self.revid(dirent.created_rev, path)
573
573
574 def reparent(self, module):
574 def reparent(self, module):
575 """Reparent the svn transport and return the previous parent."""
575 """Reparent the svn transport and return the previous parent."""
576 if self.prevmodule == module:
576 if self.prevmodule == module:
577 return module
577 return module
578 svnurl = self.baseurl + urllib.quote(module)
578 svnurl = self.baseurl + urllib.quote(module)
579 prevmodule = self.prevmodule
579 prevmodule = self.prevmodule
580 if prevmodule is None:
580 if prevmodule is None:
581 prevmodule = ''
581 prevmodule = ''
582 self.ui.debug(_("reparent to %s\n") % svnurl)
582 self.ui.debug(_("reparent to %s\n") % svnurl)
583 svn.ra.reparent(self.ra, svnurl)
583 svn.ra.reparent(self.ra, svnurl)
584 self.prevmodule = module
584 self.prevmodule = module
585 return prevmodule
585 return prevmodule
586
586
587 def expandpaths(self, rev, paths, parents):
587 def expandpaths(self, rev, paths, parents):
588 entries = []
588 entries = []
589 # Map of entrypath, revision for finding source of deleted
589 # Map of entrypath, revision for finding source of deleted
590 # revisions.
590 # revisions.
591 copyfrom = {}
591 copyfrom = {}
592 copies = {}
592 copies = {}
593
593
594 new_module, revnum = self.revsplit(rev)[1:]
594 new_module, revnum = self.revsplit(rev)[1:]
595 if new_module != self.module:
595 if new_module != self.module:
596 self.module = new_module
596 self.module = new_module
597 self.reparent(self.module)
597 self.reparent(self.module)
598
598
599 for path, ent in paths:
599 for path, ent in paths:
600 entrypath = self.getrelpath(path)
600 entrypath = self.getrelpath(path)
601
601
602 kind = self._checkpath(entrypath, revnum)
602 kind = self._checkpath(entrypath, revnum)
603 if kind == svn.core.svn_node_file:
603 if kind == svn.core.svn_node_file:
604 entries.append(self.recode(entrypath))
604 entries.append(self.recode(entrypath))
605 if not ent.copyfrom_path or not parents:
605 if not ent.copyfrom_path or not parents:
606 continue
606 continue
607 # Copy sources not in parent revisions cannot be
607 # Copy sources not in parent revisions cannot be
608 # represented, ignore their origin for now
608 # represented, ignore their origin for now
609 pmodule, prevnum = self.revsplit(parents[0])[1:]
609 pmodule, prevnum = self.revsplit(parents[0])[1:]
610 if ent.copyfrom_rev < prevnum:
610 if ent.copyfrom_rev < prevnum:
611 continue
611 continue
612 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
612 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
613 if not copyfrom_path:
613 if not copyfrom_path:
614 continue
614 continue
615 self.ui.debug(_("copied to %s from %s@%s\n") %
615 self.ui.debug(_("copied to %s from %s@%s\n") %
616 (entrypath, copyfrom_path, ent.copyfrom_rev))
616 (entrypath, copyfrom_path, ent.copyfrom_rev))
617 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
617 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
618 elif kind == 0: # gone, but had better be a deleted *file*
618 elif kind == 0: # gone, but had better be a deleted *file*
619 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
619 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
620 pmodule, prevnum = self.revsplit(parents[0])[1:]
620 pmodule, prevnum = self.revsplit(parents[0])[1:]
621 parentpath = pmodule + "/" + entrypath
621 parentpath = pmodule + "/" + entrypath
622 self.ui.debug(_("entry %s\n") % parentpath)
622 self.ui.debug(_("entry %s\n") % parentpath)
623
623
624 # We can avoid the reparent calls if the module has
624 # We can avoid the reparent calls if the module has
625 # not changed but it probably does not worth the pain.
625 # not changed but it probably does not worth the pain.
626 prevmodule = self.reparent('')
626 prevmodule = self.reparent('')
627 fromkind = svn.ra.check_path(self.ra, parentpath.strip('/'), prevnum)
627 fromkind = svn.ra.check_path(self.ra, parentpath.strip('/'), prevnum)
628 self.reparent(prevmodule)
628 self.reparent(prevmodule)
629
629
630 if fromkind == svn.core.svn_node_file:
630 if fromkind == svn.core.svn_node_file:
631 entries.append(self.recode(entrypath))
631 entries.append(self.recode(entrypath))
632 elif fromkind == svn.core.svn_node_dir:
632 elif fromkind == svn.core.svn_node_dir:
633 if ent.action == 'C':
633 if ent.action == 'C':
634 children = self._find_children(path, prevnum)
634 children = self._find_children(path, prevnum)
635 else:
635 else:
636 oroot = parentpath.strip('/')
636 oroot = parentpath.strip('/')
637 nroot = path.strip('/')
637 nroot = path.strip('/')
638 children = self._find_children(oroot, prevnum)
638 children = self._find_children(oroot, prevnum)
639 children = [s.replace(oroot,nroot) for s in children]
639 children = [s.replace(oroot,nroot) for s in children]
640
640
641 for child in children:
641 for child in children:
642 childpath = self.getrelpath("/" + child, pmodule)
642 childpath = self.getrelpath("/" + child, pmodule)
643 if not childpath:
643 if not childpath:
644 continue
644 continue
645 if childpath in copies:
645 if childpath in copies:
646 del copies[childpath]
646 del copies[childpath]
647 entries.append(childpath)
647 entries.append(childpath)
648 else:
648 else:
649 self.ui.debug(_('unknown path in revision %d: %s\n') % \
649 self.ui.debug(_('unknown path in revision %d: %s\n') % \
650 (revnum, path))
650 (revnum, path))
651 elif kind == svn.core.svn_node_dir:
651 elif kind == svn.core.svn_node_dir:
652 # If the directory just had a prop change,
652 # If the directory just had a prop change,
653 # then we shouldn't need to look for its children.
653 # then we shouldn't need to look for its children.
654 if ent.action == 'M':
654 if ent.action == 'M':
655 continue
655 continue
656
656
657 children = sorted(self._find_children(path, revnum))
657 children = sorted(self._find_children(path, revnum))
658 for child in children:
658 for child in children:
659 # Can we move a child directory and its
659 # Can we move a child directory and its
660 # parent in the same commit? (probably can). Could
660 # parent in the same commit? (probably can). Could
661 # cause problems if instead of revnum -1,
661 # cause problems if instead of revnum -1,
662 # we have to look in (copyfrom_path, revnum - 1)
662 # we have to look in (copyfrom_path, revnum - 1)
663 entrypath = self.getrelpath("/" + child)
663 entrypath = self.getrelpath("/" + child)
664 if entrypath:
664 if entrypath:
665 # Need to filter out directories here...
665 # Need to filter out directories here...
666 kind = self._checkpath(entrypath, revnum)
666 kind = self._checkpath(entrypath, revnum)
667 if kind != svn.core.svn_node_dir:
667 if kind != svn.core.svn_node_dir:
668 entries.append(self.recode(entrypath))
668 entries.append(self.recode(entrypath))
669
669
670 # Handle directory copies
670 # Handle directory copies
671 if not ent.copyfrom_path or not parents:
671 if not ent.copyfrom_path or not parents:
672 continue
672 continue
673 # Copy sources not in parent revisions cannot be
673 # Copy sources not in parent revisions cannot be
674 # represented, ignore their origin for now
674 # represented, ignore their origin for now
675 pmodule, prevnum = self.revsplit(parents[0])[1:]
675 pmodule, prevnum = self.revsplit(parents[0])[1:]
676 if ent.copyfrom_rev < prevnum:
676 if ent.copyfrom_rev < prevnum:
677 continue
677 continue
678 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
678 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
679 if not copyfrompath:
679 if not copyfrompath:
680 continue
680 continue
681 copyfrom[path] = ent
681 copyfrom[path] = ent
682 self.ui.debug(_("mark %s came from %s:%d\n")
682 self.ui.debug(_("mark %s came from %s:%d\n")
683 % (path, copyfrompath, ent.copyfrom_rev))
683 % (path, copyfrompath, ent.copyfrom_rev))
684 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
684 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
685 children.sort()
685 children.sort()
686 for child in children:
686 for child in children:
687 entrypath = self.getrelpath("/" + child, pmodule)
687 entrypath = self.getrelpath("/" + child, pmodule)
688 if not entrypath:
688 if not entrypath:
689 continue
689 continue
690 copytopath = path + entrypath[len(copyfrompath):]
690 copytopath = path + entrypath[len(copyfrompath):]
691 copytopath = self.getrelpath(copytopath)
691 copytopath = self.getrelpath(copytopath)
692 copies[self.recode(copytopath)] = self.recode(entrypath)
692 copies[self.recode(copytopath)] = self.recode(entrypath)
693
693
694 return (list(set(entries)), copies)
694 return (list(set(entries)), copies)
695
695
696 def _fetch_revisions(self, from_revnum, to_revnum):
696 def _fetch_revisions(self, from_revnum, to_revnum):
697 if from_revnum < to_revnum:
697 if from_revnum < to_revnum:
698 from_revnum, to_revnum = to_revnum, from_revnum
698 from_revnum, to_revnum = to_revnum, from_revnum
699
699
700 self.child_cset = None
700 self.child_cset = None
701
701
702 def parselogentry(orig_paths, revnum, author, date, message):
702 def parselogentry(orig_paths, revnum, author, date, message):
703 """Return the parsed commit object or None, and True if
703 """Return the parsed commit object or None, and True if
704 the revision is a branch root.
704 the revision is a branch root.
705 """
705 """
706 self.ui.debug(_("parsing revision %d (%d changes)\n") %
706 self.ui.debug(_("parsing revision %d (%d changes)\n") %
707 (revnum, len(orig_paths)))
707 (revnum, len(orig_paths)))
708
708
709 branched = False
709 branched = False
710 rev = self.revid(revnum)
710 rev = self.revid(revnum)
711 # branch log might return entries for a parent we already have
711 # branch log might return entries for a parent we already have
712
712
713 if rev in self.commits or revnum < to_revnum:
713 if rev in self.commits or revnum < to_revnum:
714 return None, branched
714 return None, branched
715
715
716 parents = []
716 parents = []
717 # check whether this revision is the start of a branch or part
717 # check whether this revision is the start of a branch or part
718 # of a branch renaming
718 # of a branch renaming
719 orig_paths = sorted(orig_paths.iteritems())
719 orig_paths = sorted(orig_paths.iteritems())
720 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
720 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
721 if root_paths:
721 if root_paths:
722 path, ent = root_paths[-1]
722 path, ent = root_paths[-1]
723 if ent.copyfrom_path:
723 if ent.copyfrom_path:
724 branched = True
724 branched = True
725 newpath = ent.copyfrom_path + self.module[len(path):]
725 newpath = ent.copyfrom_path + self.module[len(path):]
726 # ent.copyfrom_rev may not be the actual last revision
726 # ent.copyfrom_rev may not be the actual last revision
727 previd = self.latest(newpath, ent.copyfrom_rev)
727 previd = self.latest(newpath, ent.copyfrom_rev)
728 if previd is not None:
728 if previd is not None:
729 prevmodule, prevnum = self.revsplit(previd)[1:]
729 prevmodule, prevnum = self.revsplit(previd)[1:]
730 if prevnum >= self.startrev:
730 if prevnum >= self.startrev:
731 parents = [previd]
731 parents = [previd]
732 self.ui.note(_('found parent of branch %s at %d: %s\n') %
732 self.ui.note(_('found parent of branch %s at %d: %s\n') %
733 (self.module, prevnum, prevmodule))
733 (self.module, prevnum, prevmodule))
734 else:
734 else:
735 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
735 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
736
736
737 paths = []
737 paths = []
738 # filter out unrelated paths
738 # filter out unrelated paths
739 for path, ent in orig_paths:
739 for path, ent in orig_paths:
740 if self.getrelpath(path) is None:
740 if self.getrelpath(path) is None:
741 continue
741 continue
742 paths.append((path, ent))
742 paths.append((path, ent))
743
743
744 # Example SVN datetime. Includes microseconds.
744 # Example SVN datetime. Includes microseconds.
745 # ISO-8601 conformant
745 # ISO-8601 conformant
746 # '2007-01-04T17:35:00.902377Z'
746 # '2007-01-04T17:35:00.902377Z'
747 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
747 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
748
748
749 log = message and self.recode(message) or ''
749 log = message and self.recode(message) or ''
750 author = author and self.recode(author) or ''
750 author = author and self.recode(author) or ''
751 try:
751 try:
752 branch = self.module.split("/")[-1]
752 branch = self.module.split("/")[-1]
753 if branch == 'trunk':
753 if branch == 'trunk':
754 branch = ''
754 branch = ''
755 except IndexError:
755 except IndexError:
756 branch = None
756 branch = None
757
757
758 cset = commit(author=author,
758 cset = commit(author=author,
759 date=util.datestr(date),
759 date=util.datestr(date),
760 desc=log,
760 desc=log,
761 parents=parents,
761 parents=parents,
762 branch=branch,
762 branch=branch,
763 rev=rev)
763 rev=rev)
764
764
765 self.commits[rev] = cset
765 self.commits[rev] = cset
766 # The parents list is *shared* among self.paths and the
766 # The parents list is *shared* among self.paths and the
767 # commit object. Both will be updated below.
767 # commit object. Both will be updated below.
768 self.paths[rev] = (paths, cset.parents)
768 self.paths[rev] = (paths, cset.parents)
769 if self.child_cset and not self.child_cset.parents:
769 if self.child_cset and not self.child_cset.parents:
770 self.child_cset.parents[:] = [rev]
770 self.child_cset.parents[:] = [rev]
771 self.child_cset = cset
771 self.child_cset = cset
772 return cset, branched
772 return cset, branched
773
773
774 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
774 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
775 (self.module, from_revnum, to_revnum))
775 (self.module, from_revnum, to_revnum))
776
776
777 try:
777 try:
778 firstcset = None
778 firstcset = None
779 lastonbranch = False
779 lastonbranch = False
780 stream = self._getlog([self.module], from_revnum, to_revnum)
780 stream = self._getlog([self.module], from_revnum, to_revnum)
781 try:
781 try:
782 for entry in stream:
782 for entry in stream:
783 paths, revnum, author, date, message = entry
783 paths, revnum, author, date, message = entry
784 if revnum < self.startrev:
784 if revnum < self.startrev:
785 lastonbranch = True
785 lastonbranch = True
786 break
786 break
787 if not paths:
787 if not paths:
788 self.ui.debug(_('revision %d has no entries\n') % revnum)
788 self.ui.debug(_('revision %d has no entries\n') % revnum)
789 continue
789 continue
790 cset, lastonbranch = parselogentry(paths, revnum, author,
790 cset, lastonbranch = parselogentry(paths, revnum, author,
791 date, message)
791 date, message)
792 if cset:
792 if cset:
793 firstcset = cset
793 firstcset = cset
794 if lastonbranch:
794 if lastonbranch:
795 break
795 break
796 finally:
796 finally:
797 stream.close()
797 stream.close()
798
798
799 if not lastonbranch and firstcset and not firstcset.parents:
799 if not lastonbranch and firstcset and not firstcset.parents:
800 # The first revision of the sequence (the last fetched one)
800 # The first revision of the sequence (the last fetched one)
801 # has invalid parents if not a branch root. Find the parent
801 # has invalid parents if not a branch root. Find the parent
802 # revision now, if any.
802 # revision now, if any.
803 try:
803 try:
804 firstrevnum = self.revnum(firstcset.rev)
804 firstrevnum = self.revnum(firstcset.rev)
805 if firstrevnum > 1:
805 if firstrevnum > 1:
806 latest = self.latest(self.module, firstrevnum - 1)
806 latest = self.latest(self.module, firstrevnum - 1)
807 if latest:
807 if latest:
808 firstcset.parents.append(latest)
808 firstcset.parents.append(latest)
809 except SvnPathNotFound:
809 except SvnPathNotFound:
810 pass
810 pass
811 except SubversionException, (inst, num):
811 except SubversionException, (inst, num):
812 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
812 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
813 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
813 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
814 raise
814 raise
815
815
816 def _getfile(self, file, rev):
816 def _getfile(self, file, rev):
817 # TODO: ra.get_file transmits the whole file instead of diffs.
817 # TODO: ra.get_file transmits the whole file instead of diffs.
818 mode = ''
818 mode = ''
819 try:
819 try:
820 new_module, revnum = self.revsplit(rev)[1:]
820 new_module, revnum = self.revsplit(rev)[1:]
821 if self.module != new_module:
821 if self.module != new_module:
822 self.module = new_module
822 self.module = new_module
823 self.reparent(self.module)
823 self.reparent(self.module)
824 io = StringIO()
824 io = StringIO()
825 info = svn.ra.get_file(self.ra, file, revnum, io)
825 info = svn.ra.get_file(self.ra, file, revnum, io)
826 data = io.getvalue()
826 data = io.getvalue()
827 # ra.get_files() seems to keep a reference on the input buffer
827 # ra.get_files() seems to keep a reference on the input buffer
828 # preventing collection. Release it explicitely.
828 # preventing collection. Release it explicitely.
829 io.close()
829 io.close()
830 if isinstance(info, list):
830 if isinstance(info, list):
831 info = info[-1]
831 info = info[-1]
832 mode = ("svn:executable" in info) and 'x' or ''
832 mode = ("svn:executable" in info) and 'x' or ''
833 mode = ("svn:special" in info) and 'l' or mode
833 mode = ("svn:special" in info) and 'l' or mode
834 except SubversionException, e:
834 except SubversionException, e:
835 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
835 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
836 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
836 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
837 if e.apr_err in notfound: # File not found
837 if e.apr_err in notfound: # File not found
838 raise IOError()
838 raise IOError()
839 raise
839 raise
840 if mode == 'l':
840 if mode == 'l':
841 link_prefix = "link "
841 link_prefix = "link "
842 if data.startswith(link_prefix):
842 if data.startswith(link_prefix):
843 data = data[len(link_prefix):]
843 data = data[len(link_prefix):]
844 return data, mode
844 return data, mode
845
845
846 def _find_children(self, path, revnum):
846 def _find_children(self, path, revnum):
847 path = path.strip('/')
847 path = path.strip('/')
848 pool = Pool()
848 pool = Pool()
849 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
849 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
850 return ['%s/%s' % (path, x) for x in
850 return ['%s/%s' % (path, x) for x in
851 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
851 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
852
852
853 def getrelpath(self, path, module=None):
853 def getrelpath(self, path, module=None):
854 if module is None:
854 if module is None:
855 module = self.module
855 module = self.module
856 # Given the repository url of this wc, say
856 # Given the repository url of this wc, say
857 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
857 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
858 # extract the "entry" portion (a relative path) from what
858 # extract the "entry" portion (a relative path) from what
859 # svn log --xml says, ie
859 # svn log --xml says, ie
860 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
860 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
861 # that is to say "tests/PloneTestCase.py"
861 # that is to say "tests/PloneTestCase.py"
862 if path.startswith(module):
862 if path.startswith(module):
863 relative = path.rstrip('/')[len(module):]
863 relative = path.rstrip('/')[len(module):]
864 if relative.startswith('/'):
864 if relative.startswith('/'):
865 return relative[1:]
865 return relative[1:]
866 elif relative == '':
866 elif relative == '':
867 return relative
867 return relative
868
868
869 # The path is outside our tracked tree...
869 # The path is outside our tracked tree...
870 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
870 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
871 return None
871 return None
872
872
873 def _checkpath(self, path, revnum):
873 def _checkpath(self, path, revnum):
874 # ra.check_path does not like leading slashes very much, it leads
874 # ra.check_path does not like leading slashes very much, it leads
875 # to PROPFIND subversion errors
875 # to PROPFIND subversion errors
876 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
876 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
877
877
878 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
878 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
879 strict_node_history=False):
879 strict_node_history=False):
880 # Normalize path names, svn >= 1.5 only wants paths relative to
880 # Normalize path names, svn >= 1.5 only wants paths relative to
881 # supplied URL
881 # supplied URL
882 relpaths = []
882 relpaths = []
883 for p in paths:
883 for p in paths:
884 if not p.startswith('/'):
884 if not p.startswith('/'):
885 p = self.module + '/' + p
885 p = self.module + '/' + p
886 relpaths.append(p.strip('/'))
886 relpaths.append(p.strip('/'))
887 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
887 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
888 strict_node_history]
888 strict_node_history]
889 arg = encodeargs(args)
889 arg = encodeargs(args)
890 hgexe = util.hgexecutable()
890 hgexe = util.hgexecutable()
891 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
891 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
892 stdin, stdout = util.popen2(cmd)
892 stdin, stdout = util.popen2(cmd)
893 stdin.write(arg)
893 stdin.write(arg)
894 stdin.close()
894 stdin.close()
895 return logstream(stdout)
895 return logstream(stdout)
896
896
897 pre_revprop_change = '''#!/bin/sh
897 pre_revprop_change = '''#!/bin/sh
898
898
899 REPOS="$1"
899 REPOS="$1"
900 REV="$2"
900 REV="$2"
901 USER="$3"
901 USER="$3"
902 PROPNAME="$4"
902 PROPNAME="$4"
903 ACTION="$5"
903 ACTION="$5"
904
904
905 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
905 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
906 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
906 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
907 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
907 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
908
908
909 echo "Changing prohibited revision property" >&2
909 echo "Changing prohibited revision property" >&2
910 exit 1
910 exit 1
911 '''
911 '''
912
912
913 class svn_sink(converter_sink, commandline):
913 class svn_sink(converter_sink, commandline):
914 commit_re = re.compile(r'Committed revision (\d+).', re.M)
914 commit_re = re.compile(r'Committed revision (\d+).', re.M)
915
915
916 def prerun(self):
916 def prerun(self):
917 if self.wc:
917 if self.wc:
918 os.chdir(self.wc)
918 os.chdir(self.wc)
919
919
920 def postrun(self):
920 def postrun(self):
921 if self.wc:
921 if self.wc:
922 os.chdir(self.cwd)
922 os.chdir(self.cwd)
923
923
924 def join(self, name):
924 def join(self, name):
925 return os.path.join(self.wc, '.svn', name)
925 return os.path.join(self.wc, '.svn', name)
926
926
927 def revmapfile(self):
927 def revmapfile(self):
928 return self.join('hg-shamap')
928 return self.join('hg-shamap')
929
929
930 def authorfile(self):
930 def authorfile(self):
931 return self.join('hg-authormap')
931 return self.join('hg-authormap')
932
932
933 def __init__(self, ui, path):
933 def __init__(self, ui, path):
934 converter_sink.__init__(self, ui, path)
934 converter_sink.__init__(self, ui, path)
935 commandline.__init__(self, ui, 'svn')
935 commandline.__init__(self, ui, 'svn')
936 self.delete = []
936 self.delete = []
937 self.setexec = []
937 self.setexec = []
938 self.delexec = []
938 self.delexec = []
939 self.copies = []
939 self.copies = []
940 self.wc = None
940 self.wc = None
941 self.cwd = os.getcwd()
941 self.cwd = os.getcwd()
942
942
943 path = os.path.realpath(path)
943 path = os.path.realpath(path)
944
944
945 created = False
945 created = False
946 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
946 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
947 self.wc = path
947 self.wc = path
948 self.run0('update')
948 self.run0('update')
949 else:
949 else:
950 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
950 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
951
951
952 if os.path.isdir(os.path.dirname(path)):
952 if os.path.isdir(os.path.dirname(path)):
953 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
953 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
954 ui.status(_('initializing svn repo %r\n') %
954 ui.status(_('initializing svn repo %r\n') %
955 os.path.basename(path))
955 os.path.basename(path))
956 commandline(ui, 'svnadmin').run0('create', path)
956 commandline(ui, 'svnadmin').run0('create', path)
957 created = path
957 created = path
958 path = util.normpath(path)
958 path = util.normpath(path)
959 if not path.startswith('/'):
959 if not path.startswith('/'):
960 path = '/' + path
960 path = '/' + path
961 path = 'file://' + path
961 path = 'file://' + path
962
962
963 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
963 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
964 self.run0('checkout', path, wcpath)
964 self.run0('checkout', path, wcpath)
965
965
966 self.wc = wcpath
966 self.wc = wcpath
967 self.opener = util.opener(self.wc)
967 self.opener = util.opener(self.wc)
968 self.wopener = util.opener(self.wc)
968 self.wopener = util.opener(self.wc)
969 self.childmap = mapfile(ui, self.join('hg-childmap'))
969 self.childmap = mapfile(ui, self.join('hg-childmap'))
970 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
970 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
971
971
972 if created:
972 if created:
973 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
973 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
974 fp = open(hook, 'w')
974 fp = open(hook, 'w')
975 fp.write(pre_revprop_change)
975 fp.write(pre_revprop_change)
976 fp.close()
976 fp.close()
977 util.set_flags(hook, False, True)
977 util.set_flags(hook, False, True)
978
978
979 xport = transport.SvnRaTransport(url=geturl(path))
979 xport = transport.SvnRaTransport(url=geturl(path))
980 self.uuid = svn.ra.get_uuid(xport.ra)
980 self.uuid = svn.ra.get_uuid(xport.ra)
981
981
982 def wjoin(self, *names):
982 def wjoin(self, *names):
983 return os.path.join(self.wc, *names)
983 return os.path.join(self.wc, *names)
984
984
985 def putfile(self, filename, flags, data):
985 def putfile(self, filename, flags, data):
986 if 'l' in flags:
986 if 'l' in flags:
987 self.wopener.symlink(data, filename)
987 self.wopener.symlink(data, filename)
988 else:
988 else:
989 try:
989 try:
990 if os.path.islink(self.wjoin(filename)):
990 if os.path.islink(self.wjoin(filename)):
991 os.unlink(filename)
991 os.unlink(filename)
992 except OSError:
992 except OSError:
993 pass
993 pass
994 self.wopener(filename, 'w').write(data)
994 self.wopener(filename, 'w').write(data)
995
995
996 if self.is_exec:
996 if self.is_exec:
997 was_exec = self.is_exec(self.wjoin(filename))
997 was_exec = self.is_exec(self.wjoin(filename))
998 else:
998 else:
999 # On filesystems not supporting execute-bit, there is no way
999 # On filesystems not supporting execute-bit, there is no way
1000 # to know if it is set but asking subversion. Setting it
1000 # to know if it is set but asking subversion. Setting it
1001 # systematically is just as expensive and much simpler.
1001 # systematically is just as expensive and much simpler.
1002 was_exec = 'x' not in flags
1002 was_exec = 'x' not in flags
1003
1003
1004 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1004 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1005 if was_exec:
1005 if was_exec:
1006 if 'x' not in flags:
1006 if 'x' not in flags:
1007 self.delexec.append(filename)
1007 self.delexec.append(filename)
1008 else:
1008 else:
1009 if 'x' in flags:
1009 if 'x' in flags:
1010 self.setexec.append(filename)
1010 self.setexec.append(filename)
1011
1011
1012 def _copyfile(self, source, dest):
1012 def _copyfile(self, source, dest):
1013 # SVN's copy command pukes if the destination file exists, but
1013 # SVN's copy command pukes if the destination file exists, but
1014 # our copyfile method expects to record a copy that has
1014 # our copyfile method expects to record a copy that has
1015 # already occurred. Cross the semantic gap.
1015 # already occurred. Cross the semantic gap.
1016 wdest = self.wjoin(dest)
1016 wdest = self.wjoin(dest)
1017 exists = os.path.exists(wdest)
1017 exists = os.path.exists(wdest)
1018 if exists:
1018 if exists:
1019 fd, tempname = tempfile.mkstemp(
1019 fd, tempname = tempfile.mkstemp(
1020 prefix='hg-copy-', dir=os.path.dirname(wdest))
1020 prefix='hg-copy-', dir=os.path.dirname(wdest))
1021 os.close(fd)
1021 os.close(fd)
1022 os.unlink(tempname)
1022 os.unlink(tempname)
1023 os.rename(wdest, tempname)
1023 os.rename(wdest, tempname)
1024 try:
1024 try:
1025 self.run0('copy', source, dest)
1025 self.run0('copy', source, dest)
1026 finally:
1026 finally:
1027 if exists:
1027 if exists:
1028 try:
1028 try:
1029 os.unlink(wdest)
1029 os.unlink(wdest)
1030 except OSError:
1030 except OSError:
1031 pass
1031 pass
1032 os.rename(tempname, wdest)
1032 os.rename(tempname, wdest)
1033
1033
1034 def dirs_of(self, files):
1034 def dirs_of(self, files):
1035 dirs = set()
1035 dirs = set()
1036 for f in files:
1036 for f in files:
1037 if os.path.isdir(self.wjoin(f)):
1037 if os.path.isdir(self.wjoin(f)):
1038 dirs.add(f)
1038 dirs.add(f)
1039 for i in strutil.rfindall(f, '/'):
1039 for i in strutil.rfindall(f, '/'):
1040 dirs.add(f[:i])
1040 dirs.add(f[:i])
1041 return dirs
1041 return dirs
1042
1042
1043 def add_dirs(self, files):
1043 def add_dirs(self, files):
1044 add_dirs = [d for d in sorted(self.dirs_of(files))
1044 add_dirs = [d for d in sorted(self.dirs_of(files))
1045 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1045 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1046 if add_dirs:
1046 if add_dirs:
1047 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1047 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1048 return add_dirs
1048 return add_dirs
1049
1049
1050 def add_files(self, files):
1050 def add_files(self, files):
1051 if files:
1051 if files:
1052 self.xargs(files, 'add', quiet=True)
1052 self.xargs(files, 'add', quiet=True)
1053 return files
1053 return files
1054
1054
1055 def tidy_dirs(self, names):
1055 def tidy_dirs(self, names):
1056 deleted = []
1056 deleted = []
1057 for d in sorted(self.dirs_of(names), reverse=True):
1057 for d in sorted(self.dirs_of(names), reverse=True):
1058 wd = self.wjoin(d)
1058 wd = self.wjoin(d)
1059 if os.listdir(wd) == '.svn':
1059 if os.listdir(wd) == '.svn':
1060 self.run0('delete', d)
1060 self.run0('delete', d)
1061 deleted.append(d)
1061 deleted.append(d)
1062 return deleted
1062 return deleted
1063
1063
1064 def addchild(self, parent, child):
1064 def addchild(self, parent, child):
1065 self.childmap[parent] = child
1065 self.childmap[parent] = child
1066
1066
1067 def revid(self, rev):
1067 def revid(self, rev):
1068 return u"svn:%s@%s" % (self.uuid, rev)
1068 return u"svn:%s@%s" % (self.uuid, rev)
1069
1069
1070 def putcommit(self, files, copies, parents, commit, source, revmap):
1070 def putcommit(self, files, copies, parents, commit, source, revmap):
1071 # Apply changes to working copy
1071 # Apply changes to working copy
1072 for f, v in files:
1072 for f, v in files:
1073 try:
1073 try:
1074 data = source.getfile(f, v)
1074 data = source.getfile(f, v)
1075 except IOError:
1075 except IOError:
1076 self.delete.append(f)
1076 self.delete.append(f)
1077 else:
1077 else:
1078 e = source.getmode(f, v)
1078 e = source.getmode(f, v)
1079 self.putfile(f, e, data)
1079 self.putfile(f, e, data)
1080 if f in copies:
1080 if f in copies:
1081 self.copies.append([copies[f], f])
1081 self.copies.append([copies[f], f])
1082 files = [f[0] for f in files]
1082 files = [f[0] for f in files]
1083
1083
1084 for parent in parents:
1084 for parent in parents:
1085 try:
1085 try:
1086 return self.revid(self.childmap[parent])
1086 return self.revid(self.childmap[parent])
1087 except KeyError:
1087 except KeyError:
1088 pass
1088 pass
1089 entries = set(self.delete)
1089 entries = set(self.delete)
1090 files = frozenset(files)
1090 files = frozenset(files)
1091 entries.update(self.add_dirs(files.difference(entries)))
1091 entries.update(self.add_dirs(files.difference(entries)))
1092 if self.copies:
1092 if self.copies:
1093 for s, d in self.copies:
1093 for s, d in self.copies:
1094 self._copyfile(s, d)
1094 self._copyfile(s, d)
1095 self.copies = []
1095 self.copies = []
1096 if self.delete:
1096 if self.delete:
1097 self.xargs(self.delete, 'delete')
1097 self.xargs(self.delete, 'delete')
1098 self.delete = []
1098 self.delete = []
1099 entries.update(self.add_files(files.difference(entries)))
1099 entries.update(self.add_files(files.difference(entries)))
1100 entries.update(self.tidy_dirs(entries))
1100 entries.update(self.tidy_dirs(entries))
1101 if self.delexec:
1101 if self.delexec:
1102 self.xargs(self.delexec, 'propdel', 'svn:executable')
1102 self.xargs(self.delexec, 'propdel', 'svn:executable')
1103 self.delexec = []
1103 self.delexec = []
1104 if self.setexec:
1104 if self.setexec:
1105 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1105 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1106 self.setexec = []
1106 self.setexec = []
1107
1107
1108 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1108 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1109 fp = os.fdopen(fd, 'w')
1109 fp = os.fdopen(fd, 'w')
1110 fp.write(commit.desc)
1110 fp.write(commit.desc)
1111 fp.close()
1111 fp.close()
1112 try:
1112 try:
1113 output = self.run0('commit',
1113 output = self.run0('commit',
1114 username=util.shortuser(commit.author),
1114 username=util.shortuser(commit.author),
1115 file=messagefile,
1115 file=messagefile,
1116 encoding='utf-8')
1116 encoding='utf-8')
1117 try:
1117 try:
1118 rev = self.commit_re.search(output).group(1)
1118 rev = self.commit_re.search(output).group(1)
1119 except AttributeError:
1119 except AttributeError:
1120 self.ui.warn(_('unexpected svn output:\n'))
1120 self.ui.warn(_('unexpected svn output:\n'))
1121 self.ui.warn(output)
1121 self.ui.warn(output)
1122 raise util.Abort(_('unable to cope with svn output'))
1122 raise util.Abort(_('unable to cope with svn output'))
1123 if commit.rev:
1123 if commit.rev:
1124 self.run('propset', 'hg:convert-rev', commit.rev,
1124 self.run('propset', 'hg:convert-rev', commit.rev,
1125 revprop=True, revision=rev)
1125 revprop=True, revision=rev)
1126 if commit.branch and commit.branch != 'default':
1126 if commit.branch and commit.branch != 'default':
1127 self.run('propset', 'hg:convert-branch', commit.branch,
1127 self.run('propset', 'hg:convert-branch', commit.branch,
1128 revprop=True, revision=rev)
1128 revprop=True, revision=rev)
1129 for parent in parents:
1129 for parent in parents:
1130 self.addchild(parent, rev)
1130 self.addchild(parent, rev)
1131 return self.revid(rev)
1131 return self.revid(rev)
1132 finally:
1132 finally:
1133 os.unlink(messagefile)
1133 os.unlink(messagefile)
1134
1134
1135 def puttags(self, tags):
1135 def puttags(self, tags):
1136 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
1136 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,339 +1,338
1 # tags.py - read tag info from local repository
1 # tags.py - read tag info from local repository
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009 Matt Mackall <mpm@selenic.com>
4 # Copyright 2009 Greg Ward <greg@gerg.ca>
4 # Copyright 2009 Greg Ward <greg@gerg.ca>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 # Currently this module only deals with reading and caching tags.
9 # Currently this module only deals with reading and caching tags.
10 # Eventually, it could take care of updating (adding/removing/moving)
10 # Eventually, it could take care of updating (adding/removing/moving)
11 # tags too.
11 # tags too.
12
12
13 import os
13 import os
14 from node import nullid, bin, hex, short
14 from node import nullid, bin, hex, short
15 from i18n import _
15 from i18n import _
16 import encoding
16 import encoding
17 import error
17 import error
18
18
19 def _debugalways(ui, *msg):
19 def _debugalways(ui, *msg):
20 ui.write(*msg)
20 ui.write(*msg)
21
21
22 def _debugconditional(ui, *msg):
22 def _debugconditional(ui, *msg):
23 ui.debug(*msg)
23 ui.debug(*msg)
24
24
25 def _debugnever(ui, *msg):
25 def _debugnever(ui, *msg):
26 pass
26 pass
27
27
28 _debug = _debugalways
28 _debug = _debugalways
29 _debug = _debugnever
29 _debug = _debugnever
30
30
31 def findglobaltags1(ui, repo, alltags, tagtypes):
31 def findglobaltags1(ui, repo, alltags, tagtypes):
32 '''Find global tags in repo by reading .hgtags from every head that
32 '''Find global tags in repo by reading .hgtags from every head that
33 has a distinct version of it. Updates the dicts alltags, tagtypes
33 has a distinct version of it. Updates the dicts alltags, tagtypes
34 in place: alltags maps tag name to (node, hist) pair (see _readtags()
34 in place: alltags maps tag name to (node, hist) pair (see _readtags()
35 below), and tagtypes maps tag name to tag type ('global' in this
35 below), and tagtypes maps tag name to tag type ('global' in this
36 case).'''
36 case).'''
37
37
38 seen = set()
38 seen = set()
39 fctx = None
39 fctx = None
40 ctxs = [] # list of filectx
40 ctxs = [] # list of filectx
41 for node in repo.heads():
41 for node in repo.heads():
42 try:
42 try:
43 fnode = repo[node].filenode('.hgtags')
43 fnode = repo[node].filenode('.hgtags')
44 except error.LookupError:
44 except error.LookupError:
45 continue
45 continue
46 if fnode not in seen:
46 if fnode not in seen:
47 seen.add(fnode)
47 seen.add(fnode)
48 if not fctx:
48 if not fctx:
49 fctx = repo.filectx('.hgtags', fileid=fnode)
49 fctx = repo.filectx('.hgtags', fileid=fnode)
50 else:
50 else:
51 fctx = fctx.filectx(fnode)
51 fctx = fctx.filectx(fnode)
52 ctxs.append(fctx)
52 ctxs.append(fctx)
53
53
54 # read the tags file from each head, ending with the tip
54 # read the tags file from each head, ending with the tip
55 for fctx in reversed(ctxs):
55 for fctx in reversed(ctxs):
56 filetags = _readtags(
56 filetags = _readtags(
57 ui, repo, fctx.data().splitlines(), fctx)
57 ui, repo, fctx.data().splitlines(), fctx)
58 _updatetags(filetags, "global", alltags, tagtypes)
58 _updatetags(filetags, "global", alltags, tagtypes)
59
59
60 def findglobaltags2(ui, repo, alltags, tagtypes):
60 def findglobaltags2(ui, repo, alltags, tagtypes):
61 '''Same as findglobaltags1(), but with caching.'''
61 '''Same as findglobaltags1(), but with caching.'''
62 # This is so we can be lazy and assume alltags contains only global
62 # This is so we can be lazy and assume alltags contains only global
63 # tags when we pass it to _writetagcache().
63 # tags when we pass it to _writetagcache().
64 assert len(alltags) == len(tagtypes) == 0, \
64 assert len(alltags) == len(tagtypes) == 0, \
65 "findglobaltags() should be called first"
65 "findglobaltags() should be called first"
66
66
67 (heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo)
67 (heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo)
68 if cachetags is not None:
68 if cachetags is not None:
69 assert not shouldwrite
69 assert not shouldwrite
70 # XXX is this really 100% correct? are there oddball special
70 # XXX is this really 100% correct? are there oddball special
71 # cases where a global tag should outrank a local tag but won't,
71 # cases where a global tag should outrank a local tag but won't,
72 # because cachetags does not contain rank info?
72 # because cachetags does not contain rank info?
73 _updatetags(cachetags, 'global', alltags, tagtypes)
73 _updatetags(cachetags, 'global', alltags, tagtypes)
74 return
74 return
75
75
76 _debug(ui, "reading tags from %d head(s): %s\n"
76 _debug(ui, "reading tags from %d head(s): %s\n"
77 % (len(heads), map(short, reversed(heads))))
77 % (len(heads), map(short, reversed(heads))))
78 seen = set() # set of fnode
78 seen = set() # set of fnode
79 fctx = None
79 fctx = None
80 for head in reversed(heads): # oldest to newest
80 for head in reversed(heads): # oldest to newest
81 assert head in repo.changelog.nodemap, \
81 assert head in repo.changelog.nodemap, \
82 "tag cache returned bogus head %s" % short(head)
82 "tag cache returned bogus head %s" % short(head)
83
83
84 fnode = tagfnode.get(head)
84 fnode = tagfnode.get(head)
85 if fnode and fnode not in seen:
85 if fnode and fnode not in seen:
86 seen.add(fnode)
86 seen.add(fnode)
87 if not fctx:
87 if not fctx:
88 fctx = repo.filectx('.hgtags', fileid=fnode)
88 fctx = repo.filectx('.hgtags', fileid=fnode)
89 else:
89 else:
90 fctx = fctx.filectx(fnode)
90 fctx = fctx.filectx(fnode)
91
91
92 filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
92 filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
93 _updatetags(filetags, 'global', alltags, tagtypes)
93 _updatetags(filetags, 'global', alltags, tagtypes)
94
94
95 # and update the cache (if necessary)
95 # and update the cache (if necessary)
96 if shouldwrite:
96 if shouldwrite:
97 _writetagcache(ui, repo, heads, tagfnode, alltags)
97 _writetagcache(ui, repo, heads, tagfnode, alltags)
98
98
99 # Set this to findglobaltags1 to disable tag caching.
99 # Set this to findglobaltags1 to disable tag caching.
100 findglobaltags = findglobaltags2
100 findglobaltags = findglobaltags2
101
101
102 def readlocaltags(ui, repo, alltags, tagtypes):
102 def readlocaltags(ui, repo, alltags, tagtypes):
103 '''Read local tags in repo. Update alltags and tagtypes.'''
103 '''Read local tags in repo. Update alltags and tagtypes.'''
104 try:
104 try:
105 # localtags is in the local encoding; re-encode to UTF-8 on
105 # localtags is in the local encoding; re-encode to UTF-8 on
106 # input for consistency with the rest of this module.
106 # input for consistency with the rest of this module.
107 data = repo.opener("localtags").read()
107 data = repo.opener("localtags").read()
108 filetags = _readtags(
108 filetags = _readtags(
109 ui, repo, data.splitlines(), "localtags",
109 ui, repo, data.splitlines(), "localtags",
110 recode=encoding.fromlocal)
110 recode=encoding.fromlocal)
111 _updatetags(filetags, "local", alltags, tagtypes)
111 _updatetags(filetags, "local", alltags, tagtypes)
112 except IOError:
112 except IOError:
113 pass
113 pass
114
114
115 def _readtags(ui, repo, lines, fn, recode=None):
115 def _readtags(ui, repo, lines, fn, recode=None):
116 '''Read tag definitions from a file (or any source of lines).
116 '''Read tag definitions from a file (or any source of lines).
117 Return a mapping from tag name to (node, hist): node is the node id
117 Return a mapping from tag name to (node, hist): node is the node id
118 from the last line read for that name, and hist is the list of node
118 from the last line read for that name, and hist is the list of node
119 ids previously associated with it (in file order). All node ids are
119 ids previously associated with it (in file order). All node ids are
120 binary, not hex.'''
120 binary, not hex.'''
121
121
122 filetags = {} # map tag name to (node, hist)
122 filetags = {} # map tag name to (node, hist)
123 count = 0
123 count = 0
124
124
125 def warn(msg):
125 def warn(msg):
126 ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
126 ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
127
127
128 for line in lines:
128 for line in lines:
129 count += 1
129 count += 1
130 if not line:
130 if not line:
131 continue
131 continue
132 try:
132 try:
133 (nodehex, name) = line.split(" ", 1)
133 (nodehex, name) = line.split(" ", 1)
134 except ValueError:
134 except ValueError:
135 warn(_("cannot parse entry"))
135 warn(_("cannot parse entry"))
136 continue
136 continue
137 name = name.strip()
137 name = name.strip()
138 if recode:
138 if recode:
139 name = recode(name)
139 name = recode(name)
140 try:
140 try:
141 nodebin = bin(nodehex)
141 nodebin = bin(nodehex)
142 except TypeError:
142 except TypeError:
143 warn(_("node '%s' is not well formed") % nodehex)
143 warn(_("node '%s' is not well formed") % nodehex)
144 continue
144 continue
145 if nodebin not in repo.changelog.nodemap:
145 if nodebin not in repo.changelog.nodemap:
146 # silently ignore as pull -r might cause this
146 # silently ignore as pull -r might cause this
147 continue
147 continue
148
148
149 # update filetags
149 # update filetags
150 hist = []
150 hist = []
151 if name in filetags:
151 if name in filetags:
152 n, hist = filetags[name]
152 n, hist = filetags[name]
153 hist.append(n)
153 hist.append(n)
154 filetags[name] = (nodebin, hist)
154 filetags[name] = (nodebin, hist)
155 return filetags
155 return filetags
156
156
157 def _updatetags(filetags, tagtype, alltags, tagtypes):
157 def _updatetags(filetags, tagtype, alltags, tagtypes):
158 '''Incorporate the tag info read from one file into the two
158 '''Incorporate the tag info read from one file into the two
159 dictionaries, alltags and tagtypes, that contain all tag
159 dictionaries, alltags and tagtypes, that contain all tag
160 info (global across all heads plus local).'''
160 info (global across all heads plus local).'''
161
161
162 for name, nodehist in filetags.iteritems():
162 for name, nodehist in filetags.iteritems():
163 if name not in alltags:
163 if name not in alltags:
164 alltags[name] = nodehist
164 alltags[name] = nodehist
165 tagtypes[name] = tagtype
165 tagtypes[name] = tagtype
166 continue
166 continue
167
167
168 # we prefer alltags[name] if:
168 # we prefer alltags[name] if:
169 # it supercedes us OR
169 # it supercedes us OR
170 # mutual supercedes and it has a higher rank
170 # mutual supercedes and it has a higher rank
171 # otherwise we win because we're tip-most
171 # otherwise we win because we're tip-most
172 anode, ahist = nodehist
172 anode, ahist = nodehist
173 bnode, bhist = alltags[name]
173 bnode, bhist = alltags[name]
174 if (bnode != anode and anode in bhist and
174 if (bnode != anode and anode in bhist and
175 (bnode not in ahist or len(bhist) > len(ahist))):
175 (bnode not in ahist or len(bhist) > len(ahist))):
176 anode = bnode
176 anode = bnode
177 ahist.extend([n for n in bhist if n not in ahist])
177 ahist.extend([n for n in bhist if n not in ahist])
178 alltags[name] = anode, ahist
178 alltags[name] = anode, ahist
179 tagtypes[name] = tagtype
179 tagtypes[name] = tagtype
180
180
181
181
182 # The tag cache only stores info about heads, not the tag contents
182 # The tag cache only stores info about heads, not the tag contents
183 # from each head. I.e. it doesn't try to squeeze out the maximum
183 # from each head. I.e. it doesn't try to squeeze out the maximum
184 # performance, but is simpler has a better chance of actually
184 # performance, but is simpler has a better chance of actually
185 # working correctly. And this gives the biggest performance win: it
185 # working correctly. And this gives the biggest performance win: it
186 # avoids looking up .hgtags in the manifest for every head, and it
186 # avoids looking up .hgtags in the manifest for every head, and it
187 # can avoid calling heads() at all if there have been no changes to
187 # can avoid calling heads() at all if there have been no changes to
188 # the repo.
188 # the repo.
189
189
190 def _readtagcache(ui, repo):
190 def _readtagcache(ui, repo):
191 '''Read the tag cache and return a tuple (heads, fnodes, cachetags,
191 '''Read the tag cache and return a tuple (heads, fnodes, cachetags,
192 shouldwrite). If the cache is completely up-to-date, cachetags is a
192 shouldwrite). If the cache is completely up-to-date, cachetags is a
193 dict of the form returned by _readtags(); otherwise, it is None and
193 dict of the form returned by _readtags(); otherwise, it is None and
194 heads and fnodes are set. In that case, heads is the list of all
194 heads and fnodes are set. In that case, heads is the list of all
195 heads currently in the repository (ordered from tip to oldest) and
195 heads currently in the repository (ordered from tip to oldest) and
196 fnodes is a mapping from head to .hgtags filenode. If those two are
196 fnodes is a mapping from head to .hgtags filenode. If those two are
197 set, caller is responsible for reading tag info from each head.'''
197 set, caller is responsible for reading tag info from each head.'''
198
198
199 try:
199 try:
200 cachefile = repo.opener('tags.cache', 'r')
200 cachefile = repo.opener('tags.cache', 'r')
201 _debug(ui, 'reading tag cache from %s\n' % cachefile.name)
201 _debug(ui, 'reading tag cache from %s\n' % cachefile.name)
202 except IOError:
202 except IOError:
203 cachefile = None
203 cachefile = None
204
204
205 # The cache file consists of lines like
205 # The cache file consists of lines like
206 # <headrev> <headnode> [<tagnode>]
206 # <headrev> <headnode> [<tagnode>]
207 # where <headrev> and <headnode> redundantly identify a repository
207 # where <headrev> and <headnode> redundantly identify a repository
208 # head from the time the cache was written, and <tagnode> is the
208 # head from the time the cache was written, and <tagnode> is the
209 # filenode of .hgtags on that head. Heads with no .hgtags file will
209 # filenode of .hgtags on that head. Heads with no .hgtags file will
210 # have no <tagnode>. The cache is ordered from tip to oldest (which
210 # have no <tagnode>. The cache is ordered from tip to oldest (which
211 # is part of why <headrev> is there: a quick visual check is all
211 # is part of why <headrev> is there: a quick visual check is all
212 # that's required to ensure correct order).
212 # that's required to ensure correct order).
213 #
213 #
214 # This information is enough to let us avoid the most expensive part
214 # This information is enough to let us avoid the most expensive part
215 # of finding global tags, which is looking up <tagnode> in the
215 # of finding global tags, which is looking up <tagnode> in the
216 # manifest for each head.
216 # manifest for each head.
217 cacherevs = [] # list of headrev
217 cacherevs = [] # list of headrev
218 cacheheads = [] # list of headnode
218 cacheheads = [] # list of headnode
219 cachefnode = {} # map headnode to filenode
219 cachefnode = {} # map headnode to filenode
220 if cachefile:
220 if cachefile:
221 for line in cachefile:
221 for line in cachefile:
222 if line == "\n":
222 if line == "\n":
223 break
223 break
224 line = line.rstrip().split()
224 line = line.rstrip().split()
225 cacherevs.append(int(line[0]))
225 cacherevs.append(int(line[0]))
226 headnode = bin(line[1])
226 headnode = bin(line[1])
227 cacheheads.append(headnode)
227 cacheheads.append(headnode)
228 if len(line) == 3:
228 if len(line) == 3:
229 fnode = bin(line[2])
229 fnode = bin(line[2])
230 cachefnode[headnode] = fnode
230 cachefnode[headnode] = fnode
231
231
232 tipnode = repo.changelog.tip()
232 tipnode = repo.changelog.tip()
233 tiprev = len(repo.changelog) - 1
233 tiprev = len(repo.changelog) - 1
234
234
235 # Case 1 (common): tip is the same, so nothing has changed.
235 # Case 1 (common): tip is the same, so nothing has changed.
236 # (Unchanged tip trivially means no changesets have been added.
236 # (Unchanged tip trivially means no changesets have been added.
237 # But, thanks to localrepository.destroyed(), it also means none
237 # But, thanks to localrepository.destroyed(), it also means none
238 # have been destroyed by strip or rollback.)
238 # have been destroyed by strip or rollback.)
239 if cacheheads and cacheheads[0] == tipnode and cacherevs[0] == tiprev:
239 if cacheheads and cacheheads[0] == tipnode and cacherevs[0] == tiprev:
240 _debug(ui, "tag cache: tip unchanged\n")
240 _debug(ui, "tag cache: tip unchanged\n")
241 tags = _readtags(ui, repo, cachefile, cachefile.name)
241 tags = _readtags(ui, repo, cachefile, cachefile.name)
242 cachefile.close()
242 cachefile.close()
243 return (None, None, tags, False)
243 return (None, None, tags, False)
244 if cachefile:
244 if cachefile:
245 cachefile.close() # ignore rest of file
245 cachefile.close() # ignore rest of file
246
246
247 repoheads = repo.heads()
247 repoheads = repo.heads()
248
249 # Case 2 (uncommon): empty repo; get out quickly and don't bother
248 # Case 2 (uncommon): empty repo; get out quickly and don't bother
250 # writing an empty cache.
249 # writing an empty cache.
251 if repoheads == [nullid]:
250 if repoheads == [nullid]:
252 return ([], {}, {}, False)
251 return ([], {}, {}, False)
253
252
254 # Case 3 (uncommon): cache file missing or empty.
253 # Case 3 (uncommon): cache file missing or empty.
255 if not cacheheads:
254 if not cacheheads:
256 _debug(ui, 'tag cache: cache file missing or empty\n')
255 _debug(ui, 'tag cache: cache file missing or empty\n')
257
256
258 # Case 4 (uncommon): tip rev decreased. This should only happen
257 # Case 4 (uncommon): tip rev decreased. This should only happen
259 # when we're called from localrepository.destroyed(). Refresh the
258 # when we're called from localrepository.destroyed(). Refresh the
260 # cache so future invocations will not see disappeared heads in the
259 # cache so future invocations will not see disappeared heads in the
261 # cache.
260 # cache.
262 elif cacheheads and tiprev < cacherevs[0]:
261 elif cacheheads and tiprev < cacherevs[0]:
263 _debug(ui,
262 _debug(ui,
264 'tag cache: tip rev decremented (from %d to %d), '
263 'tag cache: tip rev decremented (from %d to %d), '
265 'so we must be destroying nodes\n'
264 'so we must be destroying nodes\n'
266 % (cacherevs[0], tiprev))
265 % (cacherevs[0], tiprev))
267
266
268 # Case 5 (common): tip has changed, so we've added/replaced heads.
267 # Case 5 (common): tip has changed, so we've added/replaced heads.
269 else:
268 else:
270 _debug(ui,
269 _debug(ui,
271 'tag cache: tip has changed (%d:%s); must find new heads\n'
270 'tag cache: tip has changed (%d:%s); must find new heads\n'
272 % (tiprev, short(tipnode)))
271 % (tiprev, short(tipnode)))
273
272
274 # Luckily, the code to handle cases 3, 4, 5 is the same. So the
273 # Luckily, the code to handle cases 3, 4, 5 is the same. So the
275 # above if/elif/else can disappear once we're confident this thing
274 # above if/elif/else can disappear once we're confident this thing
276 # actually works and we don't need the debug output.
275 # actually works and we don't need the debug output.
277
276
278 # N.B. in case 4 (nodes destroyed), "new head" really means "newly
277 # N.B. in case 4 (nodes destroyed), "new head" really means "newly
279 # exposed".
278 # exposed".
280 newheads = [head
279 newheads = [head
281 for head in repoheads
280 for head in repoheads
282 if head not in set(cacheheads)]
281 if head not in set(cacheheads)]
283 _debug(ui, 'tag cache: found %d head(s) not in cache: %s\n'
282 _debug(ui, 'tag cache: found %d head(s) not in cache: %s\n'
284 % (len(newheads), map(short, newheads)))
283 % (len(newheads), map(short, newheads)))
285
284
286 # Now we have to lookup the .hgtags filenode for every new head.
285 # Now we have to lookup the .hgtags filenode for every new head.
287 # This is the most expensive part of finding tags, so performance
286 # This is the most expensive part of finding tags, so performance
288 # depends primarily on the size of newheads. Worst case: no cache
287 # depends primarily on the size of newheads. Worst case: no cache
289 # file, so newheads == repoheads.
288 # file, so newheads == repoheads.
290 for head in newheads:
289 for head in newheads:
291 cctx = repo[head]
290 cctx = repo[head]
292 try:
291 try:
293 fnode = cctx.filenode('.hgtags')
292 fnode = cctx.filenode('.hgtags')
294 cachefnode[head] = fnode
293 cachefnode[head] = fnode
295 except error.LookupError:
294 except error.LookupError:
296 # no .hgtags file on this head
295 # no .hgtags file on this head
297 pass
296 pass
298
297
299 # Caller has to iterate over all heads, but can use the filenodes in
298 # Caller has to iterate over all heads, but can use the filenodes in
300 # cachefnode to get to each .hgtags revision quickly.
299 # cachefnode to get to each .hgtags revision quickly.
301 return (repoheads, cachefnode, None, True)
300 return (repoheads, cachefnode, None, True)
302
301
303 def _writetagcache(ui, repo, heads, tagfnode, cachetags):
302 def _writetagcache(ui, repo, heads, tagfnode, cachetags):
304
303
305 cachefile = repo.opener('tags.cache', 'w', atomictemp=True)
304 cachefile = repo.opener('tags.cache', 'w', atomictemp=True)
306 _debug(ui, 'writing cache file %s\n' % cachefile.name)
305 _debug(ui, 'writing cache file %s\n' % cachefile.name)
307
306
308 realheads = repo.heads() # for sanity checks below
307 realheads = repo.heads() # for sanity checks below
309 for head in heads:
308 for head in heads:
310 # temporary sanity checks; these can probably be removed
309 # temporary sanity checks; these can probably be removed
311 # once this code has been in crew for a few weeks
310 # once this code has been in crew for a few weeks
312 assert head in repo.changelog.nodemap, \
311 assert head in repo.changelog.nodemap, \
313 'trying to write non-existent node %s to tag cache' % short(head)
312 'trying to write non-existent node %s to tag cache' % short(head)
314 assert head in realheads, \
313 assert head in realheads, \
315 'trying to write non-head %s to tag cache' % short(head)
314 'trying to write non-head %s to tag cache' % short(head)
316 assert head != nullid, \
315 assert head != nullid, \
317 'trying to write nullid to tag cache'
316 'trying to write nullid to tag cache'
318
317
319 # This can't fail because of the first assert above. When/if we
318 # This can't fail because of the first assert above. When/if we
320 # remove that assert, we might want to catch LookupError here
319 # remove that assert, we might want to catch LookupError here
321 # and downgrade it to a warning.
320 # and downgrade it to a warning.
322 rev = repo.changelog.rev(head)
321 rev = repo.changelog.rev(head)
323
322
324 fnode = tagfnode.get(head)
323 fnode = tagfnode.get(head)
325 if fnode:
324 if fnode:
326 cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
325 cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
327 else:
326 else:
328 cachefile.write('%d %s\n' % (rev, hex(head)))
327 cachefile.write('%d %s\n' % (rev, hex(head)))
329
328
330 # Tag names in the cache are in UTF-8 -- which is the whole reason
329 # Tag names in the cache are in UTF-8 -- which is the whole reason
331 # we keep them in UTF-8 throughout this module. If we converted
330 # we keep them in UTF-8 throughout this module. If we converted
332 # them local encoding on input, we would lose info writing them to
331 # them local encoding on input, we would lose info writing them to
333 # the cache.
332 # the cache.
334 cachefile.write('\n')
333 cachefile.write('\n')
335 for (name, (node, hist)) in cachetags.iteritems():
334 for (name, (node, hist)) in cachetags.iteritems():
336 cachefile.write("%s %s\n" % (hex(node), name))
335 cachefile.write("%s %s\n" % (hex(node), name))
337
336
338 cachefile.rename()
337 cachefile.rename()
339 cachefile.close()
338 cachefile.close()
@@ -1,381 +1,381
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import errno, getpass, os, socket, sys, tempfile, traceback
9 import errno, getpass, os, socket, sys, tempfile, traceback
10 import config, util, error
10 import config, util, error
11
11
12 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True,
12 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True,
13 '0': False, 'no': False, 'false': False, 'off': False}
13 '0': False, 'no': False, 'false': False, 'off': False}
14
14
15 class ui(object):
15 class ui(object):
16 def __init__(self, src=None):
16 def __init__(self, src=None):
17 self._buffers = []
17 self._buffers = []
18 self.quiet = self.verbose = self.debugflag = self._traceback = False
18 self.quiet = self.verbose = self.debugflag = self._traceback = False
19 self._reportuntrusted = True
19 self._reportuntrusted = True
20 self._ocfg = config.config() # overlay
20 self._ocfg = config.config() # overlay
21 self._tcfg = config.config() # trusted
21 self._tcfg = config.config() # trusted
22 self._ucfg = config.config() # untrusted
22 self._ucfg = config.config() # untrusted
23 self._trustusers = set()
23 self._trustusers = set()
24 self._trustgroups = set()
24 self._trustgroups = set()
25
25
26 if src:
26 if src:
27 self._tcfg = src._tcfg.copy()
27 self._tcfg = src._tcfg.copy()
28 self._ucfg = src._ucfg.copy()
28 self._ucfg = src._ucfg.copy()
29 self._ocfg = src._ocfg.copy()
29 self._ocfg = src._ocfg.copy()
30 self._trustusers = src._trustusers.copy()
30 self._trustusers = src._trustusers.copy()
31 self._trustgroups = src._trustgroups.copy()
31 self._trustgroups = src._trustgroups.copy()
32 self.fixconfig()
32 self.fixconfig()
33 else:
33 else:
34 # we always trust global config files
34 # we always trust global config files
35 for f in util.rcpath():
35 for f in util.rcpath():
36 self.readconfig(f, trust=True)
36 self.readconfig(f, trust=True)
37
37
38 def copy(self):
38 def copy(self):
39 return self.__class__(self)
39 return self.__class__(self)
40
40
41 def _is_trusted(self, fp, f):
41 def _is_trusted(self, fp, f):
42 st = util.fstat(fp)
42 st = util.fstat(fp)
43 if util.isowner(st):
43 if util.isowner(st):
44 return True
44 return True
45
45
46 tusers, tgroups = self._trustusers, self._trustgroups
46 tusers, tgroups = self._trustusers, self._trustgroups
47 if '*' in tusers or '*' in tgroups:
47 if '*' in tusers or '*' in tgroups:
48 return True
48 return True
49
49
50 user = util.username(st.st_uid)
50 user = util.username(st.st_uid)
51 group = util.groupname(st.st_gid)
51 group = util.groupname(st.st_gid)
52 if user in tusers or group in tgroups or user == util.username():
52 if user in tusers or group in tgroups or user == util.username():
53 return True
53 return True
54
54
55 if self._reportuntrusted:
55 if self._reportuntrusted:
56 self.warn(_('Not trusting file %s from untrusted '
56 self.warn(_('Not trusting file %s from untrusted '
57 'user %s, group %s\n') % (f, user, group))
57 'user %s, group %s\n') % (f, user, group))
58 return False
58 return False
59
59
60 def readconfig(self, filename, root=None, trust=False,
60 def readconfig(self, filename, root=None, trust=False,
61 sections=None, remap=None):
61 sections=None, remap=None):
62 try:
62 try:
63 fp = open(filename)
63 fp = open(filename)
64 except IOError:
64 except IOError:
65 if not sections: # ignore unless we were looking for something
65 if not sections: # ignore unless we were looking for something
66 return
66 return
67 raise
67 raise
68
68
69 cfg = config.config()
69 cfg = config.config()
70 trusted = sections or trust or self._is_trusted(fp, filename)
70 trusted = sections or trust or self._is_trusted(fp, filename)
71
71
72 try:
72 try:
73 cfg.read(filename, fp, sections=sections, remap=remap)
73 cfg.read(filename, fp, sections=sections, remap=remap)
74 except error.ConfigError, inst:
74 except error.ConfigError, inst:
75 if trusted:
75 if trusted:
76 raise
76 raise
77 self.warn(_("Ignored: %s\n") % str(inst))
77 self.warn(_("Ignored: %s\n") % str(inst))
78
78
79 if trusted:
79 if trusted:
80 self._tcfg.update(cfg)
80 self._tcfg.update(cfg)
81 self._tcfg.update(self._ocfg)
81 self._tcfg.update(self._ocfg)
82 self._ucfg.update(cfg)
82 self._ucfg.update(cfg)
83 self._ucfg.update(self._ocfg)
83 self._ucfg.update(self._ocfg)
84
84
85 if root is None:
85 if root is None:
86 root = os.path.expanduser('~')
86 root = os.path.expanduser('~')
87 self.fixconfig(root=root)
87 self.fixconfig(root=root)
88
88
89 def fixconfig(self, root=None):
89 def fixconfig(self, root=None):
90 # translate paths relative to root (or home) into absolute paths
90 # translate paths relative to root (or home) into absolute paths
91 root = root or os.getcwd()
91 root = root or os.getcwd()
92 for c in self._tcfg, self._ucfg, self._ocfg:
92 for c in self._tcfg, self._ucfg, self._ocfg:
93 for n, p in c.items('paths'):
93 for n, p in c.items('paths'):
94 if p and "://" not in p and not os.path.isabs(p):
94 if p and "://" not in p and not os.path.isabs(p):
95 c.set("paths", n, os.path.normpath(os.path.join(root, p)))
95 c.set("paths", n, os.path.normpath(os.path.join(root, p)))
96
96
97 # update ui options
97 # update ui options
98 self.debugflag = self.configbool('ui', 'debug')
98 self.debugflag = self.configbool('ui', 'debug')
99 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
99 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
100 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
100 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
101 if self.verbose and self.quiet:
101 if self.verbose and self.quiet:
102 self.quiet = self.verbose = False
102 self.quiet = self.verbose = False
103 self._reportuntrusted = self.configbool("ui", "report_untrusted", True)
103 self._reportuntrusted = self.configbool("ui", "report_untrusted", True)
104 self._traceback = self.configbool('ui', 'traceback', False)
104 self._traceback = self.configbool('ui', 'traceback', False)
105
105
106 # update trust information
106 # update trust information
107 self._trustusers.update(self.configlist('trusted', 'users'))
107 self._trustusers.update(self.configlist('trusted', 'users'))
108 self._trustgroups.update(self.configlist('trusted', 'groups'))
108 self._trustgroups.update(self.configlist('trusted', 'groups'))
109
109
110 def setconfig(self, section, name, value):
110 def setconfig(self, section, name, value):
111 for cfg in (self._ocfg, self._tcfg, self._ucfg):
111 for cfg in (self._ocfg, self._tcfg, self._ucfg):
112 cfg.set(section, name, value)
112 cfg.set(section, name, value)
113 self.fixconfig()
113 self.fixconfig()
114
114
115 def _data(self, untrusted):
115 def _data(self, untrusted):
116 return untrusted and self._ucfg or self._tcfg
116 return untrusted and self._ucfg or self._tcfg
117
117
118 def configsource(self, section, name, untrusted=False):
118 def configsource(self, section, name, untrusted=False):
119 return self._data(untrusted).source(section, name) or 'none'
119 return self._data(untrusted).source(section, name) or 'none'
120
120
121 def config(self, section, name, default=None, untrusted=False):
121 def config(self, section, name, default=None, untrusted=False):
122 value = self._data(untrusted).get(section, name, default)
122 value = self._data(untrusted).get(section, name, default)
123 if self.debugflag and not untrusted and self._reportuntrusted:
123 if self.debugflag and not untrusted and self._reportuntrusted:
124 uvalue = self._ucfg.get(section, name)
124 uvalue = self._ucfg.get(section, name)
125 if uvalue is not None and uvalue != value:
125 if uvalue is not None and uvalue != value:
126 self.debug(_("ignoring untrusted configuration option "
126 self.debug(_("ignoring untrusted configuration option "
127 "%s.%s = %s\n") % (section, name, uvalue))
127 "%s.%s = %s\n") % (section, name, uvalue))
128 return value
128 return value
129
129
130 def configbool(self, section, name, default=False, untrusted=False):
130 def configbool(self, section, name, default=False, untrusted=False):
131 v = self.config(section, name, None, untrusted)
131 v = self.config(section, name, None, untrusted)
132 if v is None:
132 if v is None:
133 return default
133 return default
134 if v.lower() not in _booleans:
134 if v.lower() not in _booleans:
135 raise error.ConfigError(_("%s.%s not a boolean ('%s')")
135 raise error.ConfigError(_("%s.%s not a boolean ('%s')")
136 % (section, name, v))
136 % (section, name, v))
137 return _booleans[v.lower()]
137 return _booleans[v.lower()]
138
138
139 def configlist(self, section, name, default=None, untrusted=False):
139 def configlist(self, section, name, default=None, untrusted=False):
140 """Return a list of comma/space separated strings"""
140 """Return a list of comma/space separated strings"""
141 result = self.config(section, name, untrusted=untrusted)
141 result = self.config(section, name, untrusted=untrusted)
142 if result is None:
142 if result is None:
143 result = default or []
143 result = default or []
144 if isinstance(result, basestring):
144 if isinstance(result, basestring):
145 result = result.replace(",", " ").split()
145 result = result.replace(",", " ").split()
146 return result
146 return result
147
147
148 def has_section(self, section, untrusted=False):
148 def has_section(self, section, untrusted=False):
149 '''tell whether section exists in config.'''
149 '''tell whether section exists in config.'''
150 return section in self._data(untrusted)
150 return section in self._data(untrusted)
151
151
152 def configitems(self, section, untrusted=False):
152 def configitems(self, section, untrusted=False):
153 items = self._data(untrusted).items(section)
153 items = self._data(untrusted).items(section)
154 if self.debugflag and not untrusted and self._reportuntrusted:
154 if self.debugflag and not untrusted and self._reportuntrusted:
155 for k, v in self._ucfg.items(section):
155 for k, v in self._ucfg.items(section):
156 if self._tcfg.get(section, k) != v:
156 if self._tcfg.get(section, k) != v:
157 self.debug(_("ignoring untrusted configuration option "
157 self.debug(_("ignoring untrusted configuration option "
158 "%s.%s = %s\n") % (section, k, v))
158 "%s.%s = %s\n") % (section, k, v))
159 return items
159 return items
160
160
161 def walkconfig(self, untrusted=False):
161 def walkconfig(self, untrusted=False):
162 cfg = self._data(untrusted)
162 cfg = self._data(untrusted)
163 for section in cfg.sections():
163 for section in cfg.sections():
164 for name, value in self.configitems(section, untrusted):
164 for name, value in self.configitems(section, untrusted):
165 yield section, name, str(value).replace('\n', '\\n')
165 yield section, name, str(value).replace('\n', '\\n')
166
166
167 def username(self):
167 def username(self):
168 """Return default username to be used in commits.
168 """Return default username to be used in commits.
169
169
170 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
170 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
171 and stop searching if one of these is set.
171 and stop searching if one of these is set.
172 If not found and ui.askusername is True, ask the user, else use
172 If not found and ui.askusername is True, ask the user, else use
173 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
173 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
174 """
174 """
175 user = os.environ.get("HGUSER")
175 user = os.environ.get("HGUSER")
176 if user is None:
176 if user is None:
177 user = self.config("ui", "username")
177 user = self.config("ui", "username")
178 if user is None:
178 if user is None:
179 user = os.environ.get("EMAIL")
179 user = os.environ.get("EMAIL")
180 if user is None and self.configbool("ui", "askusername"):
180 if user is None and self.configbool("ui", "askusername"):
181 user = self.prompt(_("enter a commit username:"), default=None)
181 user = self.prompt(_("enter a commit username:"), default=None)
182 if user is None:
182 if user is None:
183 try:
183 try:
184 user = '%s@%s' % (util.getuser(), socket.getfqdn())
184 user = '%s@%s' % (util.getuser(), socket.getfqdn())
185 self.warn(_("No username found, using '%s' instead\n") % user)
185 self.warn(_("No username found, using '%s' instead\n") % user)
186 except KeyError:
186 except KeyError:
187 pass
187 pass
188 if not user:
188 if not user:
189 raise util.Abort(_("Please specify a username."))
189 raise util.Abort(_("Please specify a username."))
190 if "\n" in user:
190 if "\n" in user:
191 raise util.Abort(_("username %s contains a newline\n") % repr(user))
191 raise util.Abort(_("username %s contains a newline\n") % repr(user))
192 return user
192 return user
193
193
194 def shortuser(self, user):
194 def shortuser(self, user):
195 """Return a short representation of a user name or email address."""
195 """Return a short representation of a user name or email address."""
196 if not self.verbose: user = util.shortuser(user)
196 if not self.verbose: user = util.shortuser(user)
197 return user
197 return user
198
198
199 def _path(self, loc):
199 def _path(self, loc):
200 p = self.config('paths', loc)
200 p = self.config('paths', loc)
201 if p and '%%' in p:
201 if p and '%%' in p:
202 self.warn('(deprecated \'%%\' in path %s=%s from %s)\n' %
202 self.warn('(deprecated \'%%\' in path %s=%s from %s)\n' %
203 (loc, p, self.configsource('paths', loc)))
203 (loc, p, self.configsource('paths', loc)))
204 p = p.replace('%%', '%')
204 p = p.replace('%%', '%')
205 return p
205 return p
206
206
207 def expandpath(self, loc, default=None):
207 def expandpath(self, loc, default=None):
208 """Return repository location relative to cwd or from [paths]"""
208 """Return repository location relative to cwd or from [paths]"""
209 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
209 if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')):
210 return loc
210 return loc
211
211
212 path = self._path(loc)
212 path = self._path(loc)
213 if not path and default is not None:
213 if not path and default is not None:
214 path = self._path(default)
214 path = self._path(default)
215 return path or loc
215 return path or loc
216
216
217 def pushbuffer(self):
217 def pushbuffer(self):
218 self._buffers.append([])
218 self._buffers.append([])
219
219
220 def popbuffer(self):
220 def popbuffer(self):
221 return "".join(self._buffers.pop())
221 return "".join(self._buffers.pop())
222
222
223 def write(self, *args):
223 def write(self, *args):
224 if self._buffers:
224 if self._buffers:
225 self._buffers[-1].extend([str(a) for a in args])
225 self._buffers[-1].extend([str(a) for a in args])
226 else:
226 else:
227 for a in args:
227 for a in args:
228 sys.stdout.write(str(a))
228 sys.stdout.write(str(a))
229
229
230 def write_err(self, *args):
230 def write_err(self, *args):
231 try:
231 try:
232 if not sys.stdout.closed: sys.stdout.flush()
232 if not sys.stdout.closed: sys.stdout.flush()
233 for a in args:
233 for a in args:
234 sys.stderr.write(str(a))
234 sys.stderr.write(str(a))
235 # stderr may be buffered under win32 when redirected to files,
235 # stderr may be buffered under win32 when redirected to files,
236 # including stdout.
236 # including stdout.
237 if not sys.stderr.closed: sys.stderr.flush()
237 if not sys.stderr.closed: sys.stderr.flush()
238 except IOError, inst:
238 except IOError, inst:
239 if inst.errno != errno.EPIPE:
239 if inst.errno != errno.EPIPE:
240 raise
240 raise
241
241
242 def flush(self):
242 def flush(self):
243 try: sys.stdout.flush()
243 try: sys.stdout.flush()
244 except: pass
244 except: pass
245 try: sys.stderr.flush()
245 try: sys.stderr.flush()
246 except: pass
246 except: pass
247
247
248 def interactive(self):
248 def interactive(self):
249 i = self.configbool("ui", "interactive", None)
249 i = self.configbool("ui", "interactive", None)
250 if i is None:
250 if i is None:
251 return sys.stdin.isatty()
251 return sys.stdin.isatty()
252 return i
252 return i
253
253
254 def _readline(self, prompt=''):
254 def _readline(self, prompt=''):
255 if sys.stdin.isatty():
255 if sys.stdin.isatty():
256 try:
256 try:
257 # magically add command line editing support, where
257 # magically add command line editing support, where
258 # available
258 # available
259 import readline
259 import readline
260 # force demandimport to really load the module
260 # force demandimport to really load the module
261 readline.read_history_file
261 readline.read_history_file
262 # windows sometimes raises something other than ImportError
262 # windows sometimes raises something other than ImportError
263 except Exception:
263 except Exception:
264 pass
264 pass
265 line = raw_input(prompt)
265 line = raw_input(prompt)
266 # When stdin is in binary mode on Windows, it can cause
266 # When stdin is in binary mode on Windows, it can cause
267 # raw_input() to emit an extra trailing carriage return
267 # raw_input() to emit an extra trailing carriage return
268 if os.linesep == '\r\n' and line and line[-1] == '\r':
268 if os.linesep == '\r\n' and line and line[-1] == '\r':
269 line = line[:-1]
269 line = line[:-1]
270 return line
270 return line
271
271
272 def prompt(self, msg, default="y"):
272 def prompt(self, msg, default="y"):
273 """Prompt user with msg, read response.
273 """Prompt user with msg, read response.
274 If ui is not interactive, the default is returned.
274 If ui is not interactive, the default is returned.
275 """
275 """
276 if not self.interactive():
276 if not self.interactive():
277 self.write(msg, ' ', default, "\n")
277 self.write(msg, ' ', default, "\n")
278 return default
278 return default
279 try:
279 try:
280 r = self._readline(msg + ' ')
280 r = self._readline(msg + ' ')
281 if not r:
281 if not r:
282 return default
282 return default
283 return r
283 return r
284 except EOFError:
284 except EOFError:
285 raise util.Abort(_('response expected'))
285 raise util.Abort(_('response expected'))
286
286
287 def promptchoice(self, msg, choices, default=0):
287 def promptchoice(self, msg, choices, default=0):
288 """Prompt user with msg, read response, and ensure it matches
288 """Prompt user with msg, read response, and ensure it matches
289 one of the provided choices. The index of the choice is returned.
289 one of the provided choices. The index of the choice is returned.
290 choices is a sequence of acceptable responses with the format:
290 choices is a sequence of acceptable responses with the format:
291 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
291 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
292 If ui is not interactive, the default is returned.
292 If ui is not interactive, the default is returned.
293 """
293 """
294 resps = [s[s.index('&')+1].lower() for s in choices]
294 resps = [s[s.index('&')+1].lower() for s in choices]
295 while True:
295 while True:
296 r = self.prompt(msg, resps[default])
296 r = self.prompt(msg, resps[default])
297 if r.lower() in resps:
297 if r.lower() in resps:
298 return resps.index(r.lower())
298 return resps.index(r.lower())
299 self.write(_("unrecognized response\n"))
299 self.write(_("unrecognized response\n"))
300
300
301
301
302 def getpass(self, prompt=None, default=None):
302 def getpass(self, prompt=None, default=None):
303 if not self.interactive(): return default
303 if not self.interactive(): return default
304 try:
304 try:
305 return getpass.getpass(prompt or _('password: '))
305 return getpass.getpass(prompt or _('password: '))
306 except EOFError:
306 except EOFError:
307 raise util.Abort(_('response expected'))
307 raise util.Abort(_('response expected'))
308 def status(self, *msg):
308 def status(self, *msg):
309 if not self.quiet: self.write(*msg)
309 if not self.quiet: self.write(*msg)
310 def warn(self, *msg):
310 def warn(self, *msg):
311 self.write_err(*msg)
311 self.write_err(*msg)
312 def note(self, *msg):
312 def note(self, *msg):
313 if self.verbose: self.write(*msg)
313 if self.verbose: self.write(*msg)
314 def debug(self, *msg):
314 def debug(self, *msg):
315 if self.debugflag: self.write(*msg)
315 if self.debugflag: self.write(*msg)
316 def edit(self, text, user):
316 def edit(self, text, user):
317 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
317 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
318 text=True)
318 text=True)
319 try:
319 try:
320 f = os.fdopen(fd, "w")
320 f = os.fdopen(fd, "w")
321 f.write(text)
321 f.write(text)
322 f.close()
322 f.close()
323
323
324 editor = self.geteditor()
324 editor = self.geteditor()
325
325
326 util.system("%s \"%s\"" % (editor, name),
326 util.system("%s \"%s\"" % (editor, name),
327 environ={'HGUSER': user},
327 environ={'HGUSER': user},
328 onerr=util.Abort, errprefix=_("edit failed"))
328 onerr=util.Abort, errprefix=_("edit failed"))
329
329
330 f = open(name)
330 f = open(name)
331 t = f.read()
331 t = f.read()
332 f.close()
332 f.close()
333 finally:
333 finally:
334 os.unlink(name)
334 os.unlink(name)
335
335
336 return t
336 return t
337
337
338 def traceback(self):
338 def traceback(self):
339 '''print exception traceback if traceback printing enabled.
339 '''print exception traceback if traceback printing enabled.
340 only to call in exception handler. returns true if traceback
340 only to call in exception handler. returns true if traceback
341 printed.'''
341 printed.'''
342 if self._traceback:
342 if self._traceback:
343 traceback.print_exc()
343 traceback.print_exc()
344 return self._traceback
344 return self._traceback
345
345
346 def geteditor(self):
346 def geteditor(self):
347 '''return editor to use'''
347 '''return editor to use'''
348 return (os.environ.get("HGEDITOR") or
348 return (os.environ.get("HGEDITOR") or
349 self.config("ui", "editor") or
349 self.config("ui", "editor") or
350 os.environ.get("VISUAL") or
350 os.environ.get("VISUAL") or
351 os.environ.get("EDITOR", "vi"))
351 os.environ.get("EDITOR", "vi"))
352
352
353 def progress(self, topic, pos, item="", unit="", total=None):
353 def progress(self, topic, pos, item="", unit="", total=None):
354 '''show a progress message
354 '''show a progress message
355
355
356 With stock hg, this is simply a debug message that is hidden
356 With stock hg, this is simply a debug message that is hidden
357 by default, but with extensions or GUI tools it may be
357 by default, but with extensions or GUI tools it may be
358 visible. 'topic' is the current operation, 'item' is a
358 visible. 'topic' is the current operation, 'item' is a
359 non-numeric marker of the current position (ie the currently
359 non-numeric marker of the current position (ie the currently
360 in-process file), 'pos' is the current numeric position (ie
360 in-process file), 'pos' is the current numeric position (ie
361 revision, bytes, etc.), units is a corresponding unit label,
361 revision, bytes, etc.), units is a corresponding unit label,
362 and total is the highest expected pos.
362 and total is the highest expected pos.
363
363
364 Multiple nested topics may be active at a time. All topics
364 Multiple nested topics may be active at a time. All topics
365 should be marked closed by setting pos to None at termination.
365 should be marked closed by setting pos to None at termination.
366 '''
366 '''
367
367
368 if pos == None or not self.debugflag:
368 if pos == None or not self.debugflag:
369 return
369 return
370
370
371 if units:
371 if units:
372 units = ' ' + units
372 units = ' ' + units
373 if item:
373 if item:
374 item = ' ' + item
374 item = ' ' + item
375
375
376 if total:
376 if total:
377 pct = 100.0 * pos / total
377 pct = 100.0 * pos / total
378 ui.debug('%s:%s %s/%s%s (%4.2g%%)\n'
378 ui.debug('%s:%s %s/%s%s (%4.2g%%)\n'
379 % (topic, item, pos, total, units, pct))
379 % (topic, item, pos, total, units, pct))
380 else:
380 else:
381 ui.debug('%s:%s %s%s\n' % (topic, item, pos, units))
381 ui.debug('%s:%s %s%s\n' % (topic, item, pos, units))
General Comments 0
You need to be logged in to leave comments. Login now