##// END OF EJS Templates
util: replace util.nulldev with os.devnull...
Ross Lagerwall -
r17391:fc24c104 stable
parent child Browse files
Show More
@@ -1,445 +1,445
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno
8 import base64, errno
9 import os
9 import os
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 propertycache = util.propertycache
14 propertycache = util.propertycache
15
15
16 def encodeargs(args):
16 def encodeargs(args):
17 def encodearg(s):
17 def encodearg(s):
18 lines = base64.encodestring(s)
18 lines = base64.encodestring(s)
19 lines = [l.splitlines()[0] for l in lines]
19 lines = [l.splitlines()[0] for l in lines]
20 return ''.join(lines)
20 return ''.join(lines)
21
21
22 s = pickle.dumps(args)
22 s = pickle.dumps(args)
23 return encodearg(s)
23 return encodearg(s)
24
24
25 def decodeargs(s):
25 def decodeargs(s):
26 s = base64.decodestring(s)
26 s = base64.decodestring(s)
27 return pickle.loads(s)
27 return pickle.loads(s)
28
28
29 class MissingTool(Exception):
29 class MissingTool(Exception):
30 pass
30 pass
31
31
32 def checktool(exe, name=None, abort=True):
32 def checktool(exe, name=None, abort=True):
33 name = name or exe
33 name = name or exe
34 if not util.findexe(exe):
34 if not util.findexe(exe):
35 exc = abort and util.Abort or MissingTool
35 exc = abort and util.Abort or MissingTool
36 raise exc(_('cannot find required "%s" tool') % name)
36 raise exc(_('cannot find required "%s" tool') % name)
37
37
38 class NoRepo(Exception):
38 class NoRepo(Exception):
39 pass
39 pass
40
40
41 SKIPREV = 'SKIP'
41 SKIPREV = 'SKIP'
42
42
43 class commit(object):
43 class commit(object):
44 def __init__(self, author, date, desc, parents, branch=None, rev=None,
44 def __init__(self, author, date, desc, parents, branch=None, rev=None,
45 extra={}, sortkey=None):
45 extra={}, sortkey=None):
46 self.author = author or 'unknown'
46 self.author = author or 'unknown'
47 self.date = date or '0 0'
47 self.date = date or '0 0'
48 self.desc = desc
48 self.desc = desc
49 self.parents = parents
49 self.parents = parents
50 self.branch = branch
50 self.branch = branch
51 self.rev = rev
51 self.rev = rev
52 self.extra = extra
52 self.extra = extra
53 self.sortkey = sortkey
53 self.sortkey = sortkey
54
54
55 class converter_source(object):
55 class converter_source(object):
56 """Conversion source interface"""
56 """Conversion source interface"""
57
57
58 def __init__(self, ui, path=None, rev=None):
58 def __init__(self, ui, path=None, rev=None):
59 """Initialize conversion source (or raise NoRepo("message")
59 """Initialize conversion source (or raise NoRepo("message")
60 exception if path is not a valid repository)"""
60 exception if path is not a valid repository)"""
61 self.ui = ui
61 self.ui = ui
62 self.path = path
62 self.path = path
63 self.rev = rev
63 self.rev = rev
64
64
65 self.encoding = 'utf-8'
65 self.encoding = 'utf-8'
66
66
67 def before(self):
67 def before(self):
68 pass
68 pass
69
69
70 def after(self):
70 def after(self):
71 pass
71 pass
72
72
73 def setrevmap(self, revmap):
73 def setrevmap(self, revmap):
74 """set the map of already-converted revisions"""
74 """set the map of already-converted revisions"""
75 pass
75 pass
76
76
77 def getheads(self):
77 def getheads(self):
78 """Return a list of this repository's heads"""
78 """Return a list of this repository's heads"""
79 raise NotImplementedError
79 raise NotImplementedError
80
80
81 def getfile(self, name, rev):
81 def getfile(self, name, rev):
82 """Return a pair (data, mode) where data is the file content
82 """Return a pair (data, mode) where data is the file content
83 as a string and mode one of '', 'x' or 'l'. rev is the
83 as a string and mode one of '', 'x' or 'l'. rev is the
84 identifier returned by a previous call to getchanges(). Raise
84 identifier returned by a previous call to getchanges(). Raise
85 IOError to indicate that name was deleted in rev.
85 IOError to indicate that name was deleted in rev.
86 """
86 """
87 raise NotImplementedError
87 raise NotImplementedError
88
88
89 def getchanges(self, version):
89 def getchanges(self, version):
90 """Returns a tuple of (files, copies).
90 """Returns a tuple of (files, copies).
91
91
92 files is a sorted list of (filename, id) tuples for all files
92 files is a sorted list of (filename, id) tuples for all files
93 changed between version and its first parent returned by
93 changed between version and its first parent returned by
94 getcommit(). id is the source revision id of the file.
94 getcommit(). id is the source revision id of the file.
95
95
96 copies is a dictionary of dest: source
96 copies is a dictionary of dest: source
97 """
97 """
98 raise NotImplementedError
98 raise NotImplementedError
99
99
100 def getcommit(self, version):
100 def getcommit(self, version):
101 """Return the commit object for version"""
101 """Return the commit object for version"""
102 raise NotImplementedError
102 raise NotImplementedError
103
103
104 def gettags(self):
104 def gettags(self):
105 """Return the tags as a dictionary of name: revision
105 """Return the tags as a dictionary of name: revision
106
106
107 Tag names must be UTF-8 strings.
107 Tag names must be UTF-8 strings.
108 """
108 """
109 raise NotImplementedError
109 raise NotImplementedError
110
110
111 def recode(self, s, encoding=None):
111 def recode(self, s, encoding=None):
112 if not encoding:
112 if not encoding:
113 encoding = self.encoding or 'utf-8'
113 encoding = self.encoding or 'utf-8'
114
114
115 if isinstance(s, unicode):
115 if isinstance(s, unicode):
116 return s.encode("utf-8")
116 return s.encode("utf-8")
117 try:
117 try:
118 return s.decode(encoding).encode("utf-8")
118 return s.decode(encoding).encode("utf-8")
119 except UnicodeError:
119 except UnicodeError:
120 try:
120 try:
121 return s.decode("latin-1").encode("utf-8")
121 return s.decode("latin-1").encode("utf-8")
122 except UnicodeError:
122 except UnicodeError:
123 return s.decode(encoding, "replace").encode("utf-8")
123 return s.decode(encoding, "replace").encode("utf-8")
124
124
125 def getchangedfiles(self, rev, i):
125 def getchangedfiles(self, rev, i):
126 """Return the files changed by rev compared to parent[i].
126 """Return the files changed by rev compared to parent[i].
127
127
128 i is an index selecting one of the parents of rev. The return
128 i is an index selecting one of the parents of rev. The return
129 value should be the list of files that are different in rev and
129 value should be the list of files that are different in rev and
130 this parent.
130 this parent.
131
131
132 If rev has no parents, i is None.
132 If rev has no parents, i is None.
133
133
134 This function is only needed to support --filemap
134 This function is only needed to support --filemap
135 """
135 """
136 raise NotImplementedError
136 raise NotImplementedError
137
137
138 def converted(self, rev, sinkrev):
138 def converted(self, rev, sinkrev):
139 '''Notify the source that a revision has been converted.'''
139 '''Notify the source that a revision has been converted.'''
140 pass
140 pass
141
141
142 def hasnativeorder(self):
142 def hasnativeorder(self):
143 """Return true if this source has a meaningful, native revision
143 """Return true if this source has a meaningful, native revision
144 order. For instance, Mercurial revisions are store sequentially
144 order. For instance, Mercurial revisions are store sequentially
145 while there is no such global ordering with Darcs.
145 while there is no such global ordering with Darcs.
146 """
146 """
147 return False
147 return False
148
148
149 def lookuprev(self, rev):
149 def lookuprev(self, rev):
150 """If rev is a meaningful revision reference in source, return
150 """If rev is a meaningful revision reference in source, return
151 the referenced identifier in the same format used by getcommit().
151 the referenced identifier in the same format used by getcommit().
152 return None otherwise.
152 return None otherwise.
153 """
153 """
154 return None
154 return None
155
155
156 def getbookmarks(self):
156 def getbookmarks(self):
157 """Return the bookmarks as a dictionary of name: revision
157 """Return the bookmarks as a dictionary of name: revision
158
158
159 Bookmark names are to be UTF-8 strings.
159 Bookmark names are to be UTF-8 strings.
160 """
160 """
161 return {}
161 return {}
162
162
163 class converter_sink(object):
163 class converter_sink(object):
164 """Conversion sink (target) interface"""
164 """Conversion sink (target) interface"""
165
165
166 def __init__(self, ui, path):
166 def __init__(self, ui, path):
167 """Initialize conversion sink (or raise NoRepo("message")
167 """Initialize conversion sink (or raise NoRepo("message")
168 exception if path is not a valid repository)
168 exception if path is not a valid repository)
169
169
170 created is a list of paths to remove if a fatal error occurs
170 created is a list of paths to remove if a fatal error occurs
171 later"""
171 later"""
172 self.ui = ui
172 self.ui = ui
173 self.path = path
173 self.path = path
174 self.created = []
174 self.created = []
175
175
176 def getheads(self):
176 def getheads(self):
177 """Return a list of this repository's heads"""
177 """Return a list of this repository's heads"""
178 raise NotImplementedError
178 raise NotImplementedError
179
179
180 def revmapfile(self):
180 def revmapfile(self):
181 """Path to a file that will contain lines
181 """Path to a file that will contain lines
182 source_rev_id sink_rev_id
182 source_rev_id sink_rev_id
183 mapping equivalent revision identifiers for each system."""
183 mapping equivalent revision identifiers for each system."""
184 raise NotImplementedError
184 raise NotImplementedError
185
185
186 def authorfile(self):
186 def authorfile(self):
187 """Path to a file that will contain lines
187 """Path to a file that will contain lines
188 srcauthor=dstauthor
188 srcauthor=dstauthor
189 mapping equivalent authors identifiers for each system."""
189 mapping equivalent authors identifiers for each system."""
190 return None
190 return None
191
191
192 def putcommit(self, files, copies, parents, commit, source, revmap):
192 def putcommit(self, files, copies, parents, commit, source, revmap):
193 """Create a revision with all changed files listed in 'files'
193 """Create a revision with all changed files listed in 'files'
194 and having listed parents. 'commit' is a commit object
194 and having listed parents. 'commit' is a commit object
195 containing at a minimum the author, date, and message for this
195 containing at a minimum the author, date, and message for this
196 changeset. 'files' is a list of (path, version) tuples,
196 changeset. 'files' is a list of (path, version) tuples,
197 'copies' is a dictionary mapping destinations to sources,
197 'copies' is a dictionary mapping destinations to sources,
198 'source' is the source repository, and 'revmap' is a mapfile
198 'source' is the source repository, and 'revmap' is a mapfile
199 of source revisions to converted revisions. Only getfile() and
199 of source revisions to converted revisions. Only getfile() and
200 lookuprev() should be called on 'source'.
200 lookuprev() should be called on 'source'.
201
201
202 Note that the sink repository is not told to update itself to
202 Note that the sink repository is not told to update itself to
203 a particular revision (or even what that revision would be)
203 a particular revision (or even what that revision would be)
204 before it receives the file data.
204 before it receives the file data.
205 """
205 """
206 raise NotImplementedError
206 raise NotImplementedError
207
207
208 def puttags(self, tags):
208 def puttags(self, tags):
209 """Put tags into sink.
209 """Put tags into sink.
210
210
211 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
211 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
212 Return a pair (tag_revision, tag_parent_revision), or (None, None)
212 Return a pair (tag_revision, tag_parent_revision), or (None, None)
213 if nothing was changed.
213 if nothing was changed.
214 """
214 """
215 raise NotImplementedError
215 raise NotImplementedError
216
216
217 def setbranch(self, branch, pbranches):
217 def setbranch(self, branch, pbranches):
218 """Set the current branch name. Called before the first putcommit
218 """Set the current branch name. Called before the first putcommit
219 on the branch.
219 on the branch.
220 branch: branch name for subsequent commits
220 branch: branch name for subsequent commits
221 pbranches: (converted parent revision, parent branch) tuples"""
221 pbranches: (converted parent revision, parent branch) tuples"""
222 pass
222 pass
223
223
224 def setfilemapmode(self, active):
224 def setfilemapmode(self, active):
225 """Tell the destination that we're using a filemap
225 """Tell the destination that we're using a filemap
226
226
227 Some converter_sources (svn in particular) can claim that a file
227 Some converter_sources (svn in particular) can claim that a file
228 was changed in a revision, even if there was no change. This method
228 was changed in a revision, even if there was no change. This method
229 tells the destination that we're using a filemap and that it should
229 tells the destination that we're using a filemap and that it should
230 filter empty revisions.
230 filter empty revisions.
231 """
231 """
232 pass
232 pass
233
233
234 def before(self):
234 def before(self):
235 pass
235 pass
236
236
237 def after(self):
237 def after(self):
238 pass
238 pass
239
239
240 def putbookmarks(self, bookmarks):
240 def putbookmarks(self, bookmarks):
241 """Put bookmarks into sink.
241 """Put bookmarks into sink.
242
242
243 bookmarks: {bookmarkname: sink_rev_id, ...}
243 bookmarks: {bookmarkname: sink_rev_id, ...}
244 where bookmarkname is an UTF-8 string.
244 where bookmarkname is an UTF-8 string.
245 """
245 """
246 pass
246 pass
247
247
248 def hascommit(self, rev):
248 def hascommit(self, rev):
249 """Return True if the sink contains rev"""
249 """Return True if the sink contains rev"""
250 raise NotImplementedError
250 raise NotImplementedError
251
251
252 class commandline(object):
252 class commandline(object):
253 def __init__(self, ui, command):
253 def __init__(self, ui, command):
254 self.ui = ui
254 self.ui = ui
255 self.command = command
255 self.command = command
256
256
257 def prerun(self):
257 def prerun(self):
258 pass
258 pass
259
259
260 def postrun(self):
260 def postrun(self):
261 pass
261 pass
262
262
263 def _cmdline(self, cmd, closestdin, *args, **kwargs):
263 def _cmdline(self, cmd, closestdin, *args, **kwargs):
264 cmdline = [self.command, cmd] + list(args)
264 cmdline = [self.command, cmd] + list(args)
265 for k, v in kwargs.iteritems():
265 for k, v in kwargs.iteritems():
266 if len(k) == 1:
266 if len(k) == 1:
267 cmdline.append('-' + k)
267 cmdline.append('-' + k)
268 else:
268 else:
269 cmdline.append('--' + k.replace('_', '-'))
269 cmdline.append('--' + k.replace('_', '-'))
270 try:
270 try:
271 if len(k) == 1:
271 if len(k) == 1:
272 cmdline.append('' + v)
272 cmdline.append('' + v)
273 else:
273 else:
274 cmdline[-1] += '=' + v
274 cmdline[-1] += '=' + v
275 except TypeError:
275 except TypeError:
276 pass
276 pass
277 cmdline = [util.shellquote(arg) for arg in cmdline]
277 cmdline = [util.shellquote(arg) for arg in cmdline]
278 if not self.ui.debugflag:
278 if not self.ui.debugflag:
279 cmdline += ['2>', util.nulldev]
279 cmdline += ['2>', os.devnull]
280 if closestdin:
280 if closestdin:
281 cmdline += ['<', util.nulldev]
281 cmdline += ['<', os.devnull]
282 cmdline = ' '.join(cmdline)
282 cmdline = ' '.join(cmdline)
283 return cmdline
283 return cmdline
284
284
285 def _run(self, cmd, *args, **kwargs):
285 def _run(self, cmd, *args, **kwargs):
286 return self._dorun(util.popen, cmd, True, *args, **kwargs)
286 return self._dorun(util.popen, cmd, True, *args, **kwargs)
287
287
288 def _run2(self, cmd, *args, **kwargs):
288 def _run2(self, cmd, *args, **kwargs):
289 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
289 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
290
290
291 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
291 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
292 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
292 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
293 self.ui.debug('running: %s\n' % (cmdline,))
293 self.ui.debug('running: %s\n' % (cmdline,))
294 self.prerun()
294 self.prerun()
295 try:
295 try:
296 return openfunc(cmdline)
296 return openfunc(cmdline)
297 finally:
297 finally:
298 self.postrun()
298 self.postrun()
299
299
300 def run(self, cmd, *args, **kwargs):
300 def run(self, cmd, *args, **kwargs):
301 fp = self._run(cmd, *args, **kwargs)
301 fp = self._run(cmd, *args, **kwargs)
302 output = fp.read()
302 output = fp.read()
303 self.ui.debug(output)
303 self.ui.debug(output)
304 return output, fp.close()
304 return output, fp.close()
305
305
306 def runlines(self, cmd, *args, **kwargs):
306 def runlines(self, cmd, *args, **kwargs):
307 fp = self._run(cmd, *args, **kwargs)
307 fp = self._run(cmd, *args, **kwargs)
308 output = fp.readlines()
308 output = fp.readlines()
309 self.ui.debug(''.join(output))
309 self.ui.debug(''.join(output))
310 return output, fp.close()
310 return output, fp.close()
311
311
312 def checkexit(self, status, output=''):
312 def checkexit(self, status, output=''):
313 if status:
313 if status:
314 if output:
314 if output:
315 self.ui.warn(_('%s error:\n') % self.command)
315 self.ui.warn(_('%s error:\n') % self.command)
316 self.ui.warn(output)
316 self.ui.warn(output)
317 msg = util.explainexit(status)[0]
317 msg = util.explainexit(status)[0]
318 raise util.Abort('%s %s' % (self.command, msg))
318 raise util.Abort('%s %s' % (self.command, msg))
319
319
320 def run0(self, cmd, *args, **kwargs):
320 def run0(self, cmd, *args, **kwargs):
321 output, status = self.run(cmd, *args, **kwargs)
321 output, status = self.run(cmd, *args, **kwargs)
322 self.checkexit(status, output)
322 self.checkexit(status, output)
323 return output
323 return output
324
324
325 def runlines0(self, cmd, *args, **kwargs):
325 def runlines0(self, cmd, *args, **kwargs):
326 output, status = self.runlines(cmd, *args, **kwargs)
326 output, status = self.runlines(cmd, *args, **kwargs)
327 self.checkexit(status, ''.join(output))
327 self.checkexit(status, ''.join(output))
328 return output
328 return output
329
329
330 @propertycache
330 @propertycache
331 def argmax(self):
331 def argmax(self):
332 # POSIX requires at least 4096 bytes for ARG_MAX
332 # POSIX requires at least 4096 bytes for ARG_MAX
333 argmax = 4096
333 argmax = 4096
334 try:
334 try:
335 argmax = os.sysconf("SC_ARG_MAX")
335 argmax = os.sysconf("SC_ARG_MAX")
336 except (AttributeError, ValueError):
336 except (AttributeError, ValueError):
337 pass
337 pass
338
338
339 # Windows shells impose their own limits on command line length,
339 # Windows shells impose their own limits on command line length,
340 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
340 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
341 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
341 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
342 # details about cmd.exe limitations.
342 # details about cmd.exe limitations.
343
343
344 # Since ARG_MAX is for command line _and_ environment, lower our limit
344 # Since ARG_MAX is for command line _and_ environment, lower our limit
345 # (and make happy Windows shells while doing this).
345 # (and make happy Windows shells while doing this).
346 return argmax // 2 - 1
346 return argmax // 2 - 1
347
347
348 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
348 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
349 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
349 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
350 limit = self.argmax - cmdlen
350 limit = self.argmax - cmdlen
351 bytes = 0
351 bytes = 0
352 fl = []
352 fl = []
353 for fn in arglist:
353 for fn in arglist:
354 b = len(fn) + 3
354 b = len(fn) + 3
355 if bytes + b < limit or len(fl) == 0:
355 if bytes + b < limit or len(fl) == 0:
356 fl.append(fn)
356 fl.append(fn)
357 bytes += b
357 bytes += b
358 else:
358 else:
359 yield fl
359 yield fl
360 fl = [fn]
360 fl = [fn]
361 bytes = b
361 bytes = b
362 if fl:
362 if fl:
363 yield fl
363 yield fl
364
364
365 def xargs(self, arglist, cmd, *args, **kwargs):
365 def xargs(self, arglist, cmd, *args, **kwargs):
366 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
366 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
367 self.run0(cmd, *(list(args) + l), **kwargs)
367 self.run0(cmd, *(list(args) + l), **kwargs)
368
368
369 class mapfile(dict):
369 class mapfile(dict):
370 def __init__(self, ui, path):
370 def __init__(self, ui, path):
371 super(mapfile, self).__init__()
371 super(mapfile, self).__init__()
372 self.ui = ui
372 self.ui = ui
373 self.path = path
373 self.path = path
374 self.fp = None
374 self.fp = None
375 self.order = []
375 self.order = []
376 self._read()
376 self._read()
377
377
378 def _read(self):
378 def _read(self):
379 if not self.path:
379 if not self.path:
380 return
380 return
381 try:
381 try:
382 fp = open(self.path, 'r')
382 fp = open(self.path, 'r')
383 except IOError, err:
383 except IOError, err:
384 if err.errno != errno.ENOENT:
384 if err.errno != errno.ENOENT:
385 raise
385 raise
386 return
386 return
387 for i, line in enumerate(fp):
387 for i, line in enumerate(fp):
388 line = line.splitlines()[0].rstrip()
388 line = line.splitlines()[0].rstrip()
389 if not line:
389 if not line:
390 # Ignore blank lines
390 # Ignore blank lines
391 continue
391 continue
392 try:
392 try:
393 key, value = line.rsplit(' ', 1)
393 key, value = line.rsplit(' ', 1)
394 except ValueError:
394 except ValueError:
395 raise util.Abort(
395 raise util.Abort(
396 _('syntax error in %s(%d): key/value pair expected')
396 _('syntax error in %s(%d): key/value pair expected')
397 % (self.path, i + 1))
397 % (self.path, i + 1))
398 if key not in self:
398 if key not in self:
399 self.order.append(key)
399 self.order.append(key)
400 super(mapfile, self).__setitem__(key, value)
400 super(mapfile, self).__setitem__(key, value)
401 fp.close()
401 fp.close()
402
402
403 def __setitem__(self, key, value):
403 def __setitem__(self, key, value):
404 if self.fp is None:
404 if self.fp is None:
405 try:
405 try:
406 self.fp = open(self.path, 'a')
406 self.fp = open(self.path, 'a')
407 except IOError, err:
407 except IOError, err:
408 raise util.Abort(_('could not open map file %r: %s') %
408 raise util.Abort(_('could not open map file %r: %s') %
409 (self.path, err.strerror))
409 (self.path, err.strerror))
410 self.fp.write('%s %s\n' % (key, value))
410 self.fp.write('%s %s\n' % (key, value))
411 self.fp.flush()
411 self.fp.flush()
412 super(mapfile, self).__setitem__(key, value)
412 super(mapfile, self).__setitem__(key, value)
413
413
414 def close(self):
414 def close(self):
415 if self.fp:
415 if self.fp:
416 self.fp.close()
416 self.fp.close()
417 self.fp = None
417 self.fp = None
418
418
419 def parsesplicemap(path):
419 def parsesplicemap(path):
420 """Parse a splicemap, return a child/parents dictionary."""
420 """Parse a splicemap, return a child/parents dictionary."""
421 if not path:
421 if not path:
422 return {}
422 return {}
423 m = {}
423 m = {}
424 try:
424 try:
425 fp = open(path, 'r')
425 fp = open(path, 'r')
426 for i, line in enumerate(fp):
426 for i, line in enumerate(fp):
427 line = line.splitlines()[0].rstrip()
427 line = line.splitlines()[0].rstrip()
428 if not line:
428 if not line:
429 # Ignore blank lines
429 # Ignore blank lines
430 continue
430 continue
431 try:
431 try:
432 child, parents = line.split(' ', 1)
432 child, parents = line.split(' ', 1)
433 parents = parents.replace(',', ' ').split()
433 parents = parents.replace(',', ' ').split()
434 except ValueError:
434 except ValueError:
435 raise util.Abort(_('syntax error in %s(%d): child parent1'
435 raise util.Abort(_('syntax error in %s(%d): child parent1'
436 '[,parent2] expected') % (path, i + 1))
436 '[,parent2] expected') % (path, i + 1))
437 pp = []
437 pp = []
438 for p in parents:
438 for p in parents:
439 if p not in pp:
439 if p not in pp:
440 pp.append(p)
440 pp.append(p)
441 m[child] = pp
441 m[child] = pp
442 except IOError, e:
442 except IOError, e:
443 if e.errno != errno.ENOENT:
443 if e.errno != errno.ENOENT:
444 raise
444 raise
445 return m
445 return m
@@ -1,338 +1,338
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import encoding, util
11 from mercurial import encoding, util
12 import os, shutil, tempfile, stat
12 import os, shutil, tempfile, stat
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev(object):
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 % path)
35 % path)
36
36
37 # Could use checktool, but we want to check for baz or tla.
37 # Could use checktool, but we want to check for baz or tla.
38 self.execmd = None
38 self.execmd = None
39 if util.findexe('baz'):
39 if util.findexe('baz'):
40 self.execmd = 'baz'
40 self.execmd = 'baz'
41 else:
41 else:
42 if util.findexe('tla'):
42 if util.findexe('tla'):
43 self.execmd = 'tla'
43 self.execmd = 'tla'
44 else:
44 else:
45 raise util.Abort(_('cannot find a GNU Arch tool'))
45 raise util.Abort(_('cannot find a GNU Arch tool'))
46
46
47 commandline.__init__(self, ui, self.execmd)
47 commandline.__init__(self, ui, self.execmd)
48
48
49 self.path = os.path.realpath(path)
49 self.path = os.path.realpath(path)
50 self.tmppath = None
50 self.tmppath = None
51
51
52 self.treeversion = None
52 self.treeversion = None
53 self.lastrev = None
53 self.lastrev = None
54 self.changes = {}
54 self.changes = {}
55 self.parents = {}
55 self.parents = {}
56 self.tags = {}
56 self.tags = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.encoding = encoding.encoding
58 self.encoding = encoding.encoding
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieveing revisions for %s'
92 self.checkexit(status, 'failed retrieveing revisions for %s'
93 % treeversion)
93 % treeversion)
94
94
95 # No new iteration unless a revision has a continuation-of header
95 # No new iteration unless a revision has a continuation-of header
96 treeversion = None
96 treeversion = None
97
97
98 for l in output:
98 for l in output:
99 rev = l.strip()
99 rev = l.strip()
100 self.changes[rev] = self.gnuarch_rev(rev)
100 self.changes[rev] = self.gnuarch_rev(rev)
101 self.parents[rev] = []
101 self.parents[rev] = []
102
102
103 # Read author, date and summary
103 # Read author, date and summary
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 if status:
105 if status:
106 catlog = self.run0('cat-archive-log', rev)
106 catlog = self.run0('cat-archive-log', rev)
107 self._parsecatlog(catlog, rev)
107 self._parsecatlog(catlog, rev)
108
108
109 # Populate the parents map
109 # Populate the parents map
110 self.parents[child].append(rev)
110 self.parents[child].append(rev)
111
111
112 # Keep track of the current revision as the child of the next
112 # Keep track of the current revision as the child of the next
113 # revision scanned
113 # revision scanned
114 child = rev
114 child = rev
115
115
116 # Check if we have to follow the usual incremental history
116 # Check if we have to follow the usual incremental history
117 # or if we have to 'jump' to a different treeversion given
117 # or if we have to 'jump' to a different treeversion given
118 # by the continuation-of header.
118 # by the continuation-of header.
119 if self.changes[rev].continuationof:
119 if self.changes[rev].continuationof:
120 treeversion = '--'.join(
120 treeversion = '--'.join(
121 self.changes[rev].continuationof.split('--')[:-1])
121 self.changes[rev].continuationof.split('--')[:-1])
122 break
122 break
123
123
124 # If we reached a base-0 revision w/o any continuation-of
124 # If we reached a base-0 revision w/o any continuation-of
125 # header, it means the tree history ends here.
125 # header, it means the tree history ends here.
126 if rev[-6:] == 'base-0':
126 if rev[-6:] == 'base-0':
127 break
127 break
128
128
129 def after(self):
129 def after(self):
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev != self.lastrev:
137 if rev != self.lastrev:
138 raise util.Abort(_('internal calling inconsistency'))
138 raise util.Abort(_('internal calling inconsistency'))
139
139
140 # Raise IOError if necessary (i.e. deleted files).
140 # Raise IOError if necessary (i.e. deleted files).
141 if not os.path.lexists(os.path.join(self.tmppath, name)):
141 if not os.path.lexists(os.path.join(self.tmppath, name)):
142 raise IOError
142 raise IOError
143
143
144 return self._getfile(name, rev)
144 return self._getfile(name, rev)
145
145
146 def getchanges(self, rev):
146 def getchanges(self, rev):
147 self._update(rev)
147 self._update(rev)
148 changes = []
148 changes = []
149 copies = {}
149 copies = {}
150
150
151 for f in self.changes[rev].add_files:
151 for f in self.changes[rev].add_files:
152 changes.append((f, rev))
152 changes.append((f, rev))
153
153
154 for f in self.changes[rev].mod_files:
154 for f in self.changes[rev].mod_files:
155 changes.append((f, rev))
155 changes.append((f, rev))
156
156
157 for f in self.changes[rev].del_files:
157 for f in self.changes[rev].del_files:
158 changes.append((f, rev))
158 changes.append((f, rev))
159
159
160 for src in self.changes[rev].ren_files:
160 for src in self.changes[rev].ren_files:
161 to = self.changes[rev].ren_files[src]
161 to = self.changes[rev].ren_files[src]
162 changes.append((src, rev))
162 changes.append((src, rev))
163 changes.append((to, rev))
163 changes.append((to, rev))
164 copies[to] = src
164 copies[to] = src
165
165
166 for src in self.changes[rev].ren_dirs:
166 for src in self.changes[rev].ren_dirs:
167 to = self.changes[rev].ren_dirs[src]
167 to = self.changes[rev].ren_dirs[src]
168 chgs, cps = self._rendirchanges(src, to)
168 chgs, cps = self._rendirchanges(src, to)
169 changes += [(f, rev) for f in chgs]
169 changes += [(f, rev) for f in chgs]
170 copies.update(cps)
170 copies.update(cps)
171
171
172 self.lastrev = rev
172 self.lastrev = rev
173 return sorted(set(changes)), copies
173 return sorted(set(changes)), copies
174
174
175 def getcommit(self, rev):
175 def getcommit(self, rev):
176 changes = self.changes[rev]
176 changes = self.changes[rev]
177 return commit(author=changes.author, date=changes.date,
177 return commit(author=changes.author, date=changes.date,
178 desc=changes.summary, parents=self.parents[rev], rev=rev)
178 desc=changes.summary, parents=self.parents[rev], rev=rev)
179
179
180 def gettags(self):
180 def gettags(self):
181 return self.tags
181 return self.tags
182
182
183 def _execute(self, cmd, *args, **kwargs):
183 def _execute(self, cmd, *args, **kwargs):
184 cmdline = [self.execmd, cmd]
184 cmdline = [self.execmd, cmd]
185 cmdline += args
185 cmdline += args
186 cmdline = [util.shellquote(arg) for arg in cmdline]
186 cmdline = [util.shellquote(arg) for arg in cmdline]
187 cmdline += ['>', util.nulldev, '2>', util.nulldev]
187 cmdline += ['>', os.devnull, '2>', os.devnull]
188 cmdline = util.quotecommand(' '.join(cmdline))
188 cmdline = util.quotecommand(' '.join(cmdline))
189 self.ui.debug(cmdline, '\n')
189 self.ui.debug(cmdline, '\n')
190 return os.system(cmdline)
190 return os.system(cmdline)
191
191
192 def _update(self, rev):
192 def _update(self, rev):
193 self.ui.debug('applying revision %s...\n' % rev)
193 self.ui.debug('applying revision %s...\n' % rev)
194 changeset, status = self.runlines('replay', '-d', self.tmppath,
194 changeset, status = self.runlines('replay', '-d', self.tmppath,
195 rev)
195 rev)
196 if status:
196 if status:
197 # Something went wrong while merging (baz or tla
197 # Something went wrong while merging (baz or tla
198 # issue?), get latest revision and try from there
198 # issue?), get latest revision and try from there
199 shutil.rmtree(self.tmppath, ignore_errors=True)
199 shutil.rmtree(self.tmppath, ignore_errors=True)
200 self._obtainrevision(rev)
200 self._obtainrevision(rev)
201 else:
201 else:
202 old_rev = self.parents[rev][0]
202 old_rev = self.parents[rev][0]
203 self.ui.debug('computing changeset between %s and %s...\n'
203 self.ui.debug('computing changeset between %s and %s...\n'
204 % (old_rev, rev))
204 % (old_rev, rev))
205 self._parsechangeset(changeset, rev)
205 self._parsechangeset(changeset, rev)
206
206
207 def _getfile(self, name, rev):
207 def _getfile(self, name, rev):
208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
209 if stat.S_ISLNK(mode):
209 if stat.S_ISLNK(mode):
210 data = os.readlink(os.path.join(self.tmppath, name))
210 data = os.readlink(os.path.join(self.tmppath, name))
211 mode = mode and 'l' or ''
211 mode = mode and 'l' or ''
212 else:
212 else:
213 data = open(os.path.join(self.tmppath, name), 'rb').read()
213 data = open(os.path.join(self.tmppath, name), 'rb').read()
214 mode = (mode & 0111) and 'x' or ''
214 mode = (mode & 0111) and 'x' or ''
215 return data, mode
215 return data, mode
216
216
217 def _exclude(self, name):
217 def _exclude(self, name):
218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
219 for exc in exclude:
219 for exc in exclude:
220 if name.find(exc) != -1:
220 if name.find(exc) != -1:
221 return True
221 return True
222 return False
222 return False
223
223
224 def _readcontents(self, path):
224 def _readcontents(self, path):
225 files = []
225 files = []
226 contents = os.listdir(path)
226 contents = os.listdir(path)
227 while len(contents) > 0:
227 while len(contents) > 0:
228 c = contents.pop()
228 c = contents.pop()
229 p = os.path.join(path, c)
229 p = os.path.join(path, c)
230 # os.walk could be used, but here we avoid internal GNU
230 # os.walk could be used, but here we avoid internal GNU
231 # Arch files and directories, thus saving a lot time.
231 # Arch files and directories, thus saving a lot time.
232 if not self._exclude(p):
232 if not self._exclude(p):
233 if os.path.isdir(p):
233 if os.path.isdir(p):
234 contents += [os.path.join(c, f) for f in os.listdir(p)]
234 contents += [os.path.join(c, f) for f in os.listdir(p)]
235 else:
235 else:
236 files.append(c)
236 files.append(c)
237 return files
237 return files
238
238
239 def _rendirchanges(self, src, dest):
239 def _rendirchanges(self, src, dest):
240 changes = []
240 changes = []
241 copies = {}
241 copies = {}
242 files = self._readcontents(os.path.join(self.tmppath, dest))
242 files = self._readcontents(os.path.join(self.tmppath, dest))
243 for f in files:
243 for f in files:
244 s = os.path.join(src, f)
244 s = os.path.join(src, f)
245 d = os.path.join(dest, f)
245 d = os.path.join(dest, f)
246 changes.append(s)
246 changes.append(s)
247 changes.append(d)
247 changes.append(d)
248 copies[d] = s
248 copies[d] = s
249 return changes, copies
249 return changes, copies
250
250
251 def _obtainrevision(self, rev):
251 def _obtainrevision(self, rev):
252 self.ui.debug('obtaining revision %s...\n' % rev)
252 self.ui.debug('obtaining revision %s...\n' % rev)
253 output = self._execute('get', rev, self.tmppath)
253 output = self._execute('get', rev, self.tmppath)
254 self.checkexit(output)
254 self.checkexit(output)
255 self.ui.debug('analyzing revision %s...\n' % rev)
255 self.ui.debug('analyzing revision %s...\n' % rev)
256 files = self._readcontents(self.tmppath)
256 files = self._readcontents(self.tmppath)
257 self.changes[rev].add_files += files
257 self.changes[rev].add_files += files
258
258
259 def _stripbasepath(self, path):
259 def _stripbasepath(self, path):
260 if path.startswith('./'):
260 if path.startswith('./'):
261 return path[2:]
261 return path[2:]
262 return path
262 return path
263
263
264 def _parsecatlog(self, data, rev):
264 def _parsecatlog(self, data, rev):
265 try:
265 try:
266 catlog = self.catlogparser.parsestr(data)
266 catlog = self.catlogparser.parsestr(data)
267
267
268 # Commit date
268 # Commit date
269 self.changes[rev].date = util.datestr(
269 self.changes[rev].date = util.datestr(
270 util.strdate(catlog['Standard-date'],
270 util.strdate(catlog['Standard-date'],
271 '%Y-%m-%d %H:%M:%S'))
271 '%Y-%m-%d %H:%M:%S'))
272
272
273 # Commit author
273 # Commit author
274 self.changes[rev].author = self.recode(catlog['Creator'])
274 self.changes[rev].author = self.recode(catlog['Creator'])
275
275
276 # Commit description
276 # Commit description
277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
278 catlog.get_payload()))
278 catlog.get_payload()))
279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
280
280
281 # Commit revision origin when dealing with a branch or tag
281 # Commit revision origin when dealing with a branch or tag
282 if 'Continuation-of' in catlog:
282 if 'Continuation-of' in catlog:
283 self.changes[rev].continuationof = self.recode(
283 self.changes[rev].continuationof = self.recode(
284 catlog['Continuation-of'])
284 catlog['Continuation-of'])
285 except Exception:
285 except Exception:
286 raise util.Abort(_('could not parse cat-log of %s') % rev)
286 raise util.Abort(_('could not parse cat-log of %s') % rev)
287
287
288 def _parsechangeset(self, data, rev):
288 def _parsechangeset(self, data, rev):
289 for l in data:
289 for l in data:
290 l = l.strip()
290 l = l.strip()
291 # Added file (ignore added directory)
291 # Added file (ignore added directory)
292 if l.startswith('A') and not l.startswith('A/'):
292 if l.startswith('A') and not l.startswith('A/'):
293 file = self._stripbasepath(l[1:].strip())
293 file = self._stripbasepath(l[1:].strip())
294 if not self._exclude(file):
294 if not self._exclude(file):
295 self.changes[rev].add_files.append(file)
295 self.changes[rev].add_files.append(file)
296 # Deleted file (ignore deleted directory)
296 # Deleted file (ignore deleted directory)
297 elif l.startswith('D') and not l.startswith('D/'):
297 elif l.startswith('D') and not l.startswith('D/'):
298 file = self._stripbasepath(l[1:].strip())
298 file = self._stripbasepath(l[1:].strip())
299 if not self._exclude(file):
299 if not self._exclude(file):
300 self.changes[rev].del_files.append(file)
300 self.changes[rev].del_files.append(file)
301 # Modified binary file
301 # Modified binary file
302 elif l.startswith('Mb'):
302 elif l.startswith('Mb'):
303 file = self._stripbasepath(l[2:].strip())
303 file = self._stripbasepath(l[2:].strip())
304 if not self._exclude(file):
304 if not self._exclude(file):
305 self.changes[rev].mod_files.append(file)
305 self.changes[rev].mod_files.append(file)
306 # Modified link
306 # Modified link
307 elif l.startswith('M->'):
307 elif l.startswith('M->'):
308 file = self._stripbasepath(l[3:].strip())
308 file = self._stripbasepath(l[3:].strip())
309 if not self._exclude(file):
309 if not self._exclude(file):
310 self.changes[rev].mod_files.append(file)
310 self.changes[rev].mod_files.append(file)
311 # Modified file
311 # Modified file
312 elif l.startswith('M'):
312 elif l.startswith('M'):
313 file = self._stripbasepath(l[1:].strip())
313 file = self._stripbasepath(l[1:].strip())
314 if not self._exclude(file):
314 if not self._exclude(file):
315 self.changes[rev].mod_files.append(file)
315 self.changes[rev].mod_files.append(file)
316 # Renamed file (or link)
316 # Renamed file (or link)
317 elif l.startswith('=>'):
317 elif l.startswith('=>'):
318 files = l[2:].strip().split(' ')
318 files = l[2:].strip().split(' ')
319 if len(files) == 1:
319 if len(files) == 1:
320 files = l[2:].strip().split('\t')
320 files = l[2:].strip().split('\t')
321 src = self._stripbasepath(files[0])
321 src = self._stripbasepath(files[0])
322 dst = self._stripbasepath(files[1])
322 dst = self._stripbasepath(files[1])
323 if not self._exclude(src) and not self._exclude(dst):
323 if not self._exclude(src) and not self._exclude(dst):
324 self.changes[rev].ren_files[src] = dst
324 self.changes[rev].ren_files[src] = dst
325 # Conversion from file to link or from link to file (modified)
325 # Conversion from file to link or from link to file (modified)
326 elif l.startswith('ch'):
326 elif l.startswith('ch'):
327 file = self._stripbasepath(l[2:].strip())
327 file = self._stripbasepath(l[2:].strip())
328 if not self._exclude(file):
328 if not self._exclude(file):
329 self.changes[rev].mod_files.append(file)
329 self.changes[rev].mod_files.append(file)
330 # Renamed directory
330 # Renamed directory
331 elif l.startswith('/>'):
331 elif l.startswith('/>'):
332 dirs = l[2:].strip().split(' ')
332 dirs = l[2:].strip().split(' ')
333 if len(dirs) == 1:
333 if len(dirs) == 1:
334 dirs = l[2:].strip().split('\t')
334 dirs = l[2:].strip().split('\t')
335 src = self._stripbasepath(dirs[0])
335 src = self._stripbasepath(dirs[0])
336 dst = self._stripbasepath(dirs[1])
336 dst = self._stripbasepath(dirs[1])
337 if not self._exclude(src) and not self._exclude(dst):
337 if not self._exclude(src) and not self._exclude(dst):
338 self.changes[rev].ren_dirs[src] = dst
338 self.changes[rev].ren_dirs[src] = dst
@@ -1,1931 +1,1931
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile
10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import subrepo, context, repair, bookmarks, graphmod, revset
13 import subrepo, context, repair, bookmarks, graphmod, revset
14
14
15 def parsealiases(cmd):
15 def parsealiases(cmd):
16 return cmd.lstrip("^").split("|")
16 return cmd.lstrip("^").split("|")
17
17
18 def findpossible(cmd, table, strict=False):
18 def findpossible(cmd, table, strict=False):
19 """
19 """
20 Return cmd -> (aliases, command table entry)
20 Return cmd -> (aliases, command table entry)
21 for each matching command.
21 for each matching command.
22 Return debug commands (or their aliases) only if no normal command matches.
22 Return debug commands (or their aliases) only if no normal command matches.
23 """
23 """
24 choice = {}
24 choice = {}
25 debugchoice = {}
25 debugchoice = {}
26
26
27 if cmd in table:
27 if cmd in table:
28 # short-circuit exact matches, "log" alias beats "^log|history"
28 # short-circuit exact matches, "log" alias beats "^log|history"
29 keys = [cmd]
29 keys = [cmd]
30 else:
30 else:
31 keys = table.keys()
31 keys = table.keys()
32
32
33 for e in keys:
33 for e in keys:
34 aliases = parsealiases(e)
34 aliases = parsealiases(e)
35 found = None
35 found = None
36 if cmd in aliases:
36 if cmd in aliases:
37 found = cmd
37 found = cmd
38 elif not strict:
38 elif not strict:
39 for a in aliases:
39 for a in aliases:
40 if a.startswith(cmd):
40 if a.startswith(cmd):
41 found = a
41 found = a
42 break
42 break
43 if found is not None:
43 if found is not None:
44 if aliases[0].startswith("debug") or found.startswith("debug"):
44 if aliases[0].startswith("debug") or found.startswith("debug"):
45 debugchoice[found] = (aliases, table[e])
45 debugchoice[found] = (aliases, table[e])
46 else:
46 else:
47 choice[found] = (aliases, table[e])
47 choice[found] = (aliases, table[e])
48
48
49 if not choice and debugchoice:
49 if not choice and debugchoice:
50 choice = debugchoice
50 choice = debugchoice
51
51
52 return choice
52 return choice
53
53
54 def findcmd(cmd, table, strict=True):
54 def findcmd(cmd, table, strict=True):
55 """Return (aliases, command table entry) for command string."""
55 """Return (aliases, command table entry) for command string."""
56 choice = findpossible(cmd, table, strict)
56 choice = findpossible(cmd, table, strict)
57
57
58 if cmd in choice:
58 if cmd in choice:
59 return choice[cmd]
59 return choice[cmd]
60
60
61 if len(choice) > 1:
61 if len(choice) > 1:
62 clist = choice.keys()
62 clist = choice.keys()
63 clist.sort()
63 clist.sort()
64 raise error.AmbiguousCommand(cmd, clist)
64 raise error.AmbiguousCommand(cmd, clist)
65
65
66 if choice:
66 if choice:
67 return choice.values()[0]
67 return choice.values()[0]
68
68
69 raise error.UnknownCommand(cmd)
69 raise error.UnknownCommand(cmd)
70
70
71 def findrepo(p):
71 def findrepo(p):
72 while not os.path.isdir(os.path.join(p, ".hg")):
72 while not os.path.isdir(os.path.join(p, ".hg")):
73 oldp, p = p, os.path.dirname(p)
73 oldp, p = p, os.path.dirname(p)
74 if p == oldp:
74 if p == oldp:
75 return None
75 return None
76
76
77 return p
77 return p
78
78
79 def bailifchanged(repo):
79 def bailifchanged(repo):
80 if repo.dirstate.p2() != nullid:
80 if repo.dirstate.p2() != nullid:
81 raise util.Abort(_('outstanding uncommitted merge'))
81 raise util.Abort(_('outstanding uncommitted merge'))
82 modified, added, removed, deleted = repo.status()[:4]
82 modified, added, removed, deleted = repo.status()[:4]
83 if modified or added or removed or deleted:
83 if modified or added or removed or deleted:
84 raise util.Abort(_("outstanding uncommitted changes"))
84 raise util.Abort(_("outstanding uncommitted changes"))
85 ctx = repo[None]
85 ctx = repo[None]
86 for s in ctx.substate:
86 for s in ctx.substate:
87 if ctx.sub(s).dirty():
87 if ctx.sub(s).dirty():
88 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
88 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
89
89
90 def logmessage(ui, opts):
90 def logmessage(ui, opts):
91 """ get the log message according to -m and -l option """
91 """ get the log message according to -m and -l option """
92 message = opts.get('message')
92 message = opts.get('message')
93 logfile = opts.get('logfile')
93 logfile = opts.get('logfile')
94
94
95 if message and logfile:
95 if message and logfile:
96 raise util.Abort(_('options --message and --logfile are mutually '
96 raise util.Abort(_('options --message and --logfile are mutually '
97 'exclusive'))
97 'exclusive'))
98 if not message and logfile:
98 if not message and logfile:
99 try:
99 try:
100 if logfile == '-':
100 if logfile == '-':
101 message = ui.fin.read()
101 message = ui.fin.read()
102 else:
102 else:
103 message = '\n'.join(util.readfile(logfile).splitlines())
103 message = '\n'.join(util.readfile(logfile).splitlines())
104 except IOError, inst:
104 except IOError, inst:
105 raise util.Abort(_("can't read commit message '%s': %s") %
105 raise util.Abort(_("can't read commit message '%s': %s") %
106 (logfile, inst.strerror))
106 (logfile, inst.strerror))
107 return message
107 return message
108
108
109 def loglimit(opts):
109 def loglimit(opts):
110 """get the log limit according to option -l/--limit"""
110 """get the log limit according to option -l/--limit"""
111 limit = opts.get('limit')
111 limit = opts.get('limit')
112 if limit:
112 if limit:
113 try:
113 try:
114 limit = int(limit)
114 limit = int(limit)
115 except ValueError:
115 except ValueError:
116 raise util.Abort(_('limit must be a positive integer'))
116 raise util.Abort(_('limit must be a positive integer'))
117 if limit <= 0:
117 if limit <= 0:
118 raise util.Abort(_('limit must be positive'))
118 raise util.Abort(_('limit must be positive'))
119 else:
119 else:
120 limit = None
120 limit = None
121 return limit
121 return limit
122
122
123 def makefilename(repo, pat, node, desc=None,
123 def makefilename(repo, pat, node, desc=None,
124 total=None, seqno=None, revwidth=None, pathname=None):
124 total=None, seqno=None, revwidth=None, pathname=None):
125 node_expander = {
125 node_expander = {
126 'H': lambda: hex(node),
126 'H': lambda: hex(node),
127 'R': lambda: str(repo.changelog.rev(node)),
127 'R': lambda: str(repo.changelog.rev(node)),
128 'h': lambda: short(node),
128 'h': lambda: short(node),
129 'm': lambda: re.sub('[^\w]', '_', str(desc))
129 'm': lambda: re.sub('[^\w]', '_', str(desc))
130 }
130 }
131 expander = {
131 expander = {
132 '%': lambda: '%',
132 '%': lambda: '%',
133 'b': lambda: os.path.basename(repo.root),
133 'b': lambda: os.path.basename(repo.root),
134 }
134 }
135
135
136 try:
136 try:
137 if node:
137 if node:
138 expander.update(node_expander)
138 expander.update(node_expander)
139 if node:
139 if node:
140 expander['r'] = (lambda:
140 expander['r'] = (lambda:
141 str(repo.changelog.rev(node)).zfill(revwidth or 0))
141 str(repo.changelog.rev(node)).zfill(revwidth or 0))
142 if total is not None:
142 if total is not None:
143 expander['N'] = lambda: str(total)
143 expander['N'] = lambda: str(total)
144 if seqno is not None:
144 if seqno is not None:
145 expander['n'] = lambda: str(seqno)
145 expander['n'] = lambda: str(seqno)
146 if total is not None and seqno is not None:
146 if total is not None and seqno is not None:
147 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
147 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
148 if pathname is not None:
148 if pathname is not None:
149 expander['s'] = lambda: os.path.basename(pathname)
149 expander['s'] = lambda: os.path.basename(pathname)
150 expander['d'] = lambda: os.path.dirname(pathname) or '.'
150 expander['d'] = lambda: os.path.dirname(pathname) or '.'
151 expander['p'] = lambda: pathname
151 expander['p'] = lambda: pathname
152
152
153 newname = []
153 newname = []
154 patlen = len(pat)
154 patlen = len(pat)
155 i = 0
155 i = 0
156 while i < patlen:
156 while i < patlen:
157 c = pat[i]
157 c = pat[i]
158 if c == '%':
158 if c == '%':
159 i += 1
159 i += 1
160 c = pat[i]
160 c = pat[i]
161 c = expander[c]()
161 c = expander[c]()
162 newname.append(c)
162 newname.append(c)
163 i += 1
163 i += 1
164 return ''.join(newname)
164 return ''.join(newname)
165 except KeyError, inst:
165 except KeyError, inst:
166 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
166 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
167 inst.args[0])
167 inst.args[0])
168
168
169 def makefileobj(repo, pat, node=None, desc=None, total=None,
169 def makefileobj(repo, pat, node=None, desc=None, total=None,
170 seqno=None, revwidth=None, mode='wb', pathname=None):
170 seqno=None, revwidth=None, mode='wb', pathname=None):
171
171
172 writable = mode not in ('r', 'rb')
172 writable = mode not in ('r', 'rb')
173
173
174 if not pat or pat == '-':
174 if not pat or pat == '-':
175 fp = writable and repo.ui.fout or repo.ui.fin
175 fp = writable and repo.ui.fout or repo.ui.fin
176 if util.safehasattr(fp, 'fileno'):
176 if util.safehasattr(fp, 'fileno'):
177 return os.fdopen(os.dup(fp.fileno()), mode)
177 return os.fdopen(os.dup(fp.fileno()), mode)
178 else:
178 else:
179 # if this fp can't be duped properly, return
179 # if this fp can't be duped properly, return
180 # a dummy object that can be closed
180 # a dummy object that can be closed
181 class wrappedfileobj(object):
181 class wrappedfileobj(object):
182 noop = lambda x: None
182 noop = lambda x: None
183 def __init__(self, f):
183 def __init__(self, f):
184 self.f = f
184 self.f = f
185 def __getattr__(self, attr):
185 def __getattr__(self, attr):
186 if attr == 'close':
186 if attr == 'close':
187 return self.noop
187 return self.noop
188 else:
188 else:
189 return getattr(self.f, attr)
189 return getattr(self.f, attr)
190
190
191 return wrappedfileobj(fp)
191 return wrappedfileobj(fp)
192 if util.safehasattr(pat, 'write') and writable:
192 if util.safehasattr(pat, 'write') and writable:
193 return pat
193 return pat
194 if util.safehasattr(pat, 'read') and 'r' in mode:
194 if util.safehasattr(pat, 'read') and 'r' in mode:
195 return pat
195 return pat
196 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
196 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
197 pathname),
197 pathname),
198 mode)
198 mode)
199
199
200 def openrevlog(repo, cmd, file_, opts):
200 def openrevlog(repo, cmd, file_, opts):
201 """opens the changelog, manifest, a filelog or a given revlog"""
201 """opens the changelog, manifest, a filelog or a given revlog"""
202 cl = opts['changelog']
202 cl = opts['changelog']
203 mf = opts['manifest']
203 mf = opts['manifest']
204 msg = None
204 msg = None
205 if cl and mf:
205 if cl and mf:
206 msg = _('cannot specify --changelog and --manifest at the same time')
206 msg = _('cannot specify --changelog and --manifest at the same time')
207 elif cl or mf:
207 elif cl or mf:
208 if file_:
208 if file_:
209 msg = _('cannot specify filename with --changelog or --manifest')
209 msg = _('cannot specify filename with --changelog or --manifest')
210 elif not repo:
210 elif not repo:
211 msg = _('cannot specify --changelog or --manifest '
211 msg = _('cannot specify --changelog or --manifest '
212 'without a repository')
212 'without a repository')
213 if msg:
213 if msg:
214 raise util.Abort(msg)
214 raise util.Abort(msg)
215
215
216 r = None
216 r = None
217 if repo:
217 if repo:
218 if cl:
218 if cl:
219 r = repo.changelog
219 r = repo.changelog
220 elif mf:
220 elif mf:
221 r = repo.manifest
221 r = repo.manifest
222 elif file_:
222 elif file_:
223 filelog = repo.file(file_)
223 filelog = repo.file(file_)
224 if len(filelog):
224 if len(filelog):
225 r = filelog
225 r = filelog
226 if not r:
226 if not r:
227 if not file_:
227 if not file_:
228 raise error.CommandError(cmd, _('invalid arguments'))
228 raise error.CommandError(cmd, _('invalid arguments'))
229 if not os.path.isfile(file_):
229 if not os.path.isfile(file_):
230 raise util.Abort(_("revlog '%s' not found") % file_)
230 raise util.Abort(_("revlog '%s' not found") % file_)
231 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
231 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
232 file_[:-2] + ".i")
232 file_[:-2] + ".i")
233 return r
233 return r
234
234
235 def copy(ui, repo, pats, opts, rename=False):
235 def copy(ui, repo, pats, opts, rename=False):
236 # called with the repo lock held
236 # called with the repo lock held
237 #
237 #
238 # hgsep => pathname that uses "/" to separate directories
238 # hgsep => pathname that uses "/" to separate directories
239 # ossep => pathname that uses os.sep to separate directories
239 # ossep => pathname that uses os.sep to separate directories
240 cwd = repo.getcwd()
240 cwd = repo.getcwd()
241 targets = {}
241 targets = {}
242 after = opts.get("after")
242 after = opts.get("after")
243 dryrun = opts.get("dry_run")
243 dryrun = opts.get("dry_run")
244 wctx = repo[None]
244 wctx = repo[None]
245
245
246 def walkpat(pat):
246 def walkpat(pat):
247 srcs = []
247 srcs = []
248 badstates = after and '?' or '?r'
248 badstates = after and '?' or '?r'
249 m = scmutil.match(repo[None], [pat], opts, globbed=True)
249 m = scmutil.match(repo[None], [pat], opts, globbed=True)
250 for abs in repo.walk(m):
250 for abs in repo.walk(m):
251 state = repo.dirstate[abs]
251 state = repo.dirstate[abs]
252 rel = m.rel(abs)
252 rel = m.rel(abs)
253 exact = m.exact(abs)
253 exact = m.exact(abs)
254 if state in badstates:
254 if state in badstates:
255 if exact and state == '?':
255 if exact and state == '?':
256 ui.warn(_('%s: not copying - file is not managed\n') % rel)
256 ui.warn(_('%s: not copying - file is not managed\n') % rel)
257 if exact and state == 'r':
257 if exact and state == 'r':
258 ui.warn(_('%s: not copying - file has been marked for'
258 ui.warn(_('%s: not copying - file has been marked for'
259 ' remove\n') % rel)
259 ' remove\n') % rel)
260 continue
260 continue
261 # abs: hgsep
261 # abs: hgsep
262 # rel: ossep
262 # rel: ossep
263 srcs.append((abs, rel, exact))
263 srcs.append((abs, rel, exact))
264 return srcs
264 return srcs
265
265
266 # abssrc: hgsep
266 # abssrc: hgsep
267 # relsrc: ossep
267 # relsrc: ossep
268 # otarget: ossep
268 # otarget: ossep
269 def copyfile(abssrc, relsrc, otarget, exact):
269 def copyfile(abssrc, relsrc, otarget, exact):
270 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
270 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
271 if '/' in abstarget:
271 if '/' in abstarget:
272 # We cannot normalize abstarget itself, this would prevent
272 # We cannot normalize abstarget itself, this would prevent
273 # case only renames, like a => A.
273 # case only renames, like a => A.
274 abspath, absname = abstarget.rsplit('/', 1)
274 abspath, absname = abstarget.rsplit('/', 1)
275 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
275 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
276 reltarget = repo.pathto(abstarget, cwd)
276 reltarget = repo.pathto(abstarget, cwd)
277 target = repo.wjoin(abstarget)
277 target = repo.wjoin(abstarget)
278 src = repo.wjoin(abssrc)
278 src = repo.wjoin(abssrc)
279 state = repo.dirstate[abstarget]
279 state = repo.dirstate[abstarget]
280
280
281 scmutil.checkportable(ui, abstarget)
281 scmutil.checkportable(ui, abstarget)
282
282
283 # check for collisions
283 # check for collisions
284 prevsrc = targets.get(abstarget)
284 prevsrc = targets.get(abstarget)
285 if prevsrc is not None:
285 if prevsrc is not None:
286 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
286 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
287 (reltarget, repo.pathto(abssrc, cwd),
287 (reltarget, repo.pathto(abssrc, cwd),
288 repo.pathto(prevsrc, cwd)))
288 repo.pathto(prevsrc, cwd)))
289 return
289 return
290
290
291 # check for overwrites
291 # check for overwrites
292 exists = os.path.lexists(target)
292 exists = os.path.lexists(target)
293 samefile = False
293 samefile = False
294 if exists and abssrc != abstarget:
294 if exists and abssrc != abstarget:
295 if (repo.dirstate.normalize(abssrc) ==
295 if (repo.dirstate.normalize(abssrc) ==
296 repo.dirstate.normalize(abstarget)):
296 repo.dirstate.normalize(abstarget)):
297 if not rename:
297 if not rename:
298 ui.warn(_("%s: can't copy - same file\n") % reltarget)
298 ui.warn(_("%s: can't copy - same file\n") % reltarget)
299 return
299 return
300 exists = False
300 exists = False
301 samefile = True
301 samefile = True
302
302
303 if not after and exists or after and state in 'mn':
303 if not after and exists or after and state in 'mn':
304 if not opts['force']:
304 if not opts['force']:
305 ui.warn(_('%s: not overwriting - file exists\n') %
305 ui.warn(_('%s: not overwriting - file exists\n') %
306 reltarget)
306 reltarget)
307 return
307 return
308
308
309 if after:
309 if after:
310 if not exists:
310 if not exists:
311 if rename:
311 if rename:
312 ui.warn(_('%s: not recording move - %s does not exist\n') %
312 ui.warn(_('%s: not recording move - %s does not exist\n') %
313 (relsrc, reltarget))
313 (relsrc, reltarget))
314 else:
314 else:
315 ui.warn(_('%s: not recording copy - %s does not exist\n') %
315 ui.warn(_('%s: not recording copy - %s does not exist\n') %
316 (relsrc, reltarget))
316 (relsrc, reltarget))
317 return
317 return
318 elif not dryrun:
318 elif not dryrun:
319 try:
319 try:
320 if exists:
320 if exists:
321 os.unlink(target)
321 os.unlink(target)
322 targetdir = os.path.dirname(target) or '.'
322 targetdir = os.path.dirname(target) or '.'
323 if not os.path.isdir(targetdir):
323 if not os.path.isdir(targetdir):
324 os.makedirs(targetdir)
324 os.makedirs(targetdir)
325 if samefile:
325 if samefile:
326 tmp = target + "~hgrename"
326 tmp = target + "~hgrename"
327 os.rename(src, tmp)
327 os.rename(src, tmp)
328 os.rename(tmp, target)
328 os.rename(tmp, target)
329 else:
329 else:
330 util.copyfile(src, target)
330 util.copyfile(src, target)
331 srcexists = True
331 srcexists = True
332 except IOError, inst:
332 except IOError, inst:
333 if inst.errno == errno.ENOENT:
333 if inst.errno == errno.ENOENT:
334 ui.warn(_('%s: deleted in working copy\n') % relsrc)
334 ui.warn(_('%s: deleted in working copy\n') % relsrc)
335 srcexists = False
335 srcexists = False
336 else:
336 else:
337 ui.warn(_('%s: cannot copy - %s\n') %
337 ui.warn(_('%s: cannot copy - %s\n') %
338 (relsrc, inst.strerror))
338 (relsrc, inst.strerror))
339 return True # report a failure
339 return True # report a failure
340
340
341 if ui.verbose or not exact:
341 if ui.verbose or not exact:
342 if rename:
342 if rename:
343 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
343 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
344 else:
344 else:
345 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
345 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
346
346
347 targets[abstarget] = abssrc
347 targets[abstarget] = abssrc
348
348
349 # fix up dirstate
349 # fix up dirstate
350 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
350 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
351 dryrun=dryrun, cwd=cwd)
351 dryrun=dryrun, cwd=cwd)
352 if rename and not dryrun:
352 if rename and not dryrun:
353 if not after and srcexists and not samefile:
353 if not after and srcexists and not samefile:
354 util.unlinkpath(repo.wjoin(abssrc))
354 util.unlinkpath(repo.wjoin(abssrc))
355 wctx.forget([abssrc])
355 wctx.forget([abssrc])
356
356
357 # pat: ossep
357 # pat: ossep
358 # dest ossep
358 # dest ossep
359 # srcs: list of (hgsep, hgsep, ossep, bool)
359 # srcs: list of (hgsep, hgsep, ossep, bool)
360 # return: function that takes hgsep and returns ossep
360 # return: function that takes hgsep and returns ossep
361 def targetpathfn(pat, dest, srcs):
361 def targetpathfn(pat, dest, srcs):
362 if os.path.isdir(pat):
362 if os.path.isdir(pat):
363 abspfx = scmutil.canonpath(repo.root, cwd, pat)
363 abspfx = scmutil.canonpath(repo.root, cwd, pat)
364 abspfx = util.localpath(abspfx)
364 abspfx = util.localpath(abspfx)
365 if destdirexists:
365 if destdirexists:
366 striplen = len(os.path.split(abspfx)[0])
366 striplen = len(os.path.split(abspfx)[0])
367 else:
367 else:
368 striplen = len(abspfx)
368 striplen = len(abspfx)
369 if striplen:
369 if striplen:
370 striplen += len(os.sep)
370 striplen += len(os.sep)
371 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
371 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
372 elif destdirexists:
372 elif destdirexists:
373 res = lambda p: os.path.join(dest,
373 res = lambda p: os.path.join(dest,
374 os.path.basename(util.localpath(p)))
374 os.path.basename(util.localpath(p)))
375 else:
375 else:
376 res = lambda p: dest
376 res = lambda p: dest
377 return res
377 return res
378
378
379 # pat: ossep
379 # pat: ossep
380 # dest ossep
380 # dest ossep
381 # srcs: list of (hgsep, hgsep, ossep, bool)
381 # srcs: list of (hgsep, hgsep, ossep, bool)
382 # return: function that takes hgsep and returns ossep
382 # return: function that takes hgsep and returns ossep
383 def targetpathafterfn(pat, dest, srcs):
383 def targetpathafterfn(pat, dest, srcs):
384 if matchmod.patkind(pat):
384 if matchmod.patkind(pat):
385 # a mercurial pattern
385 # a mercurial pattern
386 res = lambda p: os.path.join(dest,
386 res = lambda p: os.path.join(dest,
387 os.path.basename(util.localpath(p)))
387 os.path.basename(util.localpath(p)))
388 else:
388 else:
389 abspfx = scmutil.canonpath(repo.root, cwd, pat)
389 abspfx = scmutil.canonpath(repo.root, cwd, pat)
390 if len(abspfx) < len(srcs[0][0]):
390 if len(abspfx) < len(srcs[0][0]):
391 # A directory. Either the target path contains the last
391 # A directory. Either the target path contains the last
392 # component of the source path or it does not.
392 # component of the source path or it does not.
393 def evalpath(striplen):
393 def evalpath(striplen):
394 score = 0
394 score = 0
395 for s in srcs:
395 for s in srcs:
396 t = os.path.join(dest, util.localpath(s[0])[striplen:])
396 t = os.path.join(dest, util.localpath(s[0])[striplen:])
397 if os.path.lexists(t):
397 if os.path.lexists(t):
398 score += 1
398 score += 1
399 return score
399 return score
400
400
401 abspfx = util.localpath(abspfx)
401 abspfx = util.localpath(abspfx)
402 striplen = len(abspfx)
402 striplen = len(abspfx)
403 if striplen:
403 if striplen:
404 striplen += len(os.sep)
404 striplen += len(os.sep)
405 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
405 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
406 score = evalpath(striplen)
406 score = evalpath(striplen)
407 striplen1 = len(os.path.split(abspfx)[0])
407 striplen1 = len(os.path.split(abspfx)[0])
408 if striplen1:
408 if striplen1:
409 striplen1 += len(os.sep)
409 striplen1 += len(os.sep)
410 if evalpath(striplen1) > score:
410 if evalpath(striplen1) > score:
411 striplen = striplen1
411 striplen = striplen1
412 res = lambda p: os.path.join(dest,
412 res = lambda p: os.path.join(dest,
413 util.localpath(p)[striplen:])
413 util.localpath(p)[striplen:])
414 else:
414 else:
415 # a file
415 # a file
416 if destdirexists:
416 if destdirexists:
417 res = lambda p: os.path.join(dest,
417 res = lambda p: os.path.join(dest,
418 os.path.basename(util.localpath(p)))
418 os.path.basename(util.localpath(p)))
419 else:
419 else:
420 res = lambda p: dest
420 res = lambda p: dest
421 return res
421 return res
422
422
423
423
424 pats = scmutil.expandpats(pats)
424 pats = scmutil.expandpats(pats)
425 if not pats:
425 if not pats:
426 raise util.Abort(_('no source or destination specified'))
426 raise util.Abort(_('no source or destination specified'))
427 if len(pats) == 1:
427 if len(pats) == 1:
428 raise util.Abort(_('no destination specified'))
428 raise util.Abort(_('no destination specified'))
429 dest = pats.pop()
429 dest = pats.pop()
430 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
430 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
431 if not destdirexists:
431 if not destdirexists:
432 if len(pats) > 1 or matchmod.patkind(pats[0]):
432 if len(pats) > 1 or matchmod.patkind(pats[0]):
433 raise util.Abort(_('with multiple sources, destination must be an '
433 raise util.Abort(_('with multiple sources, destination must be an '
434 'existing directory'))
434 'existing directory'))
435 if util.endswithsep(dest):
435 if util.endswithsep(dest):
436 raise util.Abort(_('destination %s is not a directory') % dest)
436 raise util.Abort(_('destination %s is not a directory') % dest)
437
437
438 tfn = targetpathfn
438 tfn = targetpathfn
439 if after:
439 if after:
440 tfn = targetpathafterfn
440 tfn = targetpathafterfn
441 copylist = []
441 copylist = []
442 for pat in pats:
442 for pat in pats:
443 srcs = walkpat(pat)
443 srcs = walkpat(pat)
444 if not srcs:
444 if not srcs:
445 continue
445 continue
446 copylist.append((tfn(pat, dest, srcs), srcs))
446 copylist.append((tfn(pat, dest, srcs), srcs))
447 if not copylist:
447 if not copylist:
448 raise util.Abort(_('no files to copy'))
448 raise util.Abort(_('no files to copy'))
449
449
450 errors = 0
450 errors = 0
451 for targetpath, srcs in copylist:
451 for targetpath, srcs in copylist:
452 for abssrc, relsrc, exact in srcs:
452 for abssrc, relsrc, exact in srcs:
453 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
453 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
454 errors += 1
454 errors += 1
455
455
456 if errors:
456 if errors:
457 ui.warn(_('(consider using --after)\n'))
457 ui.warn(_('(consider using --after)\n'))
458
458
459 return errors != 0
459 return errors != 0
460
460
461 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
461 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
462 runargs=None, appendpid=False):
462 runargs=None, appendpid=False):
463 '''Run a command as a service.'''
463 '''Run a command as a service.'''
464
464
465 if opts['daemon'] and not opts['daemon_pipefds']:
465 if opts['daemon'] and not opts['daemon_pipefds']:
466 # Signal child process startup with file removal
466 # Signal child process startup with file removal
467 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
467 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
468 os.close(lockfd)
468 os.close(lockfd)
469 try:
469 try:
470 if not runargs:
470 if not runargs:
471 runargs = util.hgcmd() + sys.argv[1:]
471 runargs = util.hgcmd() + sys.argv[1:]
472 runargs.append('--daemon-pipefds=%s' % lockpath)
472 runargs.append('--daemon-pipefds=%s' % lockpath)
473 # Don't pass --cwd to the child process, because we've already
473 # Don't pass --cwd to the child process, because we've already
474 # changed directory.
474 # changed directory.
475 for i in xrange(1, len(runargs)):
475 for i in xrange(1, len(runargs)):
476 if runargs[i].startswith('--cwd='):
476 if runargs[i].startswith('--cwd='):
477 del runargs[i]
477 del runargs[i]
478 break
478 break
479 elif runargs[i].startswith('--cwd'):
479 elif runargs[i].startswith('--cwd'):
480 del runargs[i:i + 2]
480 del runargs[i:i + 2]
481 break
481 break
482 def condfn():
482 def condfn():
483 return not os.path.exists(lockpath)
483 return not os.path.exists(lockpath)
484 pid = util.rundetached(runargs, condfn)
484 pid = util.rundetached(runargs, condfn)
485 if pid < 0:
485 if pid < 0:
486 raise util.Abort(_('child process failed to start'))
486 raise util.Abort(_('child process failed to start'))
487 finally:
487 finally:
488 try:
488 try:
489 os.unlink(lockpath)
489 os.unlink(lockpath)
490 except OSError, e:
490 except OSError, e:
491 if e.errno != errno.ENOENT:
491 if e.errno != errno.ENOENT:
492 raise
492 raise
493 if parentfn:
493 if parentfn:
494 return parentfn(pid)
494 return parentfn(pid)
495 else:
495 else:
496 return
496 return
497
497
498 if initfn:
498 if initfn:
499 initfn()
499 initfn()
500
500
501 if opts['pid_file']:
501 if opts['pid_file']:
502 mode = appendpid and 'a' or 'w'
502 mode = appendpid and 'a' or 'w'
503 fp = open(opts['pid_file'], mode)
503 fp = open(opts['pid_file'], mode)
504 fp.write(str(os.getpid()) + '\n')
504 fp.write(str(os.getpid()) + '\n')
505 fp.close()
505 fp.close()
506
506
507 if opts['daemon_pipefds']:
507 if opts['daemon_pipefds']:
508 lockpath = opts['daemon_pipefds']
508 lockpath = opts['daemon_pipefds']
509 try:
509 try:
510 os.setsid()
510 os.setsid()
511 except AttributeError:
511 except AttributeError:
512 pass
512 pass
513 os.unlink(lockpath)
513 os.unlink(lockpath)
514 util.hidewindow()
514 util.hidewindow()
515 sys.stdout.flush()
515 sys.stdout.flush()
516 sys.stderr.flush()
516 sys.stderr.flush()
517
517
518 nullfd = os.open(util.nulldev, os.O_RDWR)
518 nullfd = os.open(os.devnull, os.O_RDWR)
519 logfilefd = nullfd
519 logfilefd = nullfd
520 if logfile:
520 if logfile:
521 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
521 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
522 os.dup2(nullfd, 0)
522 os.dup2(nullfd, 0)
523 os.dup2(logfilefd, 1)
523 os.dup2(logfilefd, 1)
524 os.dup2(logfilefd, 2)
524 os.dup2(logfilefd, 2)
525 if nullfd not in (0, 1, 2):
525 if nullfd not in (0, 1, 2):
526 os.close(nullfd)
526 os.close(nullfd)
527 if logfile and logfilefd not in (0, 1, 2):
527 if logfile and logfilefd not in (0, 1, 2):
528 os.close(logfilefd)
528 os.close(logfilefd)
529
529
530 if runfn:
530 if runfn:
531 return runfn()
531 return runfn()
532
532
533 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
533 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
534 opts=None):
534 opts=None):
535 '''export changesets as hg patches.'''
535 '''export changesets as hg patches.'''
536
536
537 total = len(revs)
537 total = len(revs)
538 revwidth = max([len(str(rev)) for rev in revs])
538 revwidth = max([len(str(rev)) for rev in revs])
539
539
540 def single(rev, seqno, fp):
540 def single(rev, seqno, fp):
541 ctx = repo[rev]
541 ctx = repo[rev]
542 node = ctx.node()
542 node = ctx.node()
543 parents = [p.node() for p in ctx.parents() if p]
543 parents = [p.node() for p in ctx.parents() if p]
544 branch = ctx.branch()
544 branch = ctx.branch()
545 if switch_parent:
545 if switch_parent:
546 parents.reverse()
546 parents.reverse()
547 prev = (parents and parents[0]) or nullid
547 prev = (parents and parents[0]) or nullid
548
548
549 shouldclose = False
549 shouldclose = False
550 if not fp:
550 if not fp:
551 desc_lines = ctx.description().rstrip().split('\n')
551 desc_lines = ctx.description().rstrip().split('\n')
552 desc = desc_lines[0] #Commit always has a first line.
552 desc = desc_lines[0] #Commit always has a first line.
553 fp = makefileobj(repo, template, node, desc=desc, total=total,
553 fp = makefileobj(repo, template, node, desc=desc, total=total,
554 seqno=seqno, revwidth=revwidth, mode='ab')
554 seqno=seqno, revwidth=revwidth, mode='ab')
555 if fp != template:
555 if fp != template:
556 shouldclose = True
556 shouldclose = True
557 if fp != sys.stdout and util.safehasattr(fp, 'name'):
557 if fp != sys.stdout and util.safehasattr(fp, 'name'):
558 repo.ui.note("%s\n" % fp.name)
558 repo.ui.note("%s\n" % fp.name)
559
559
560 fp.write("# HG changeset patch\n")
560 fp.write("# HG changeset patch\n")
561 fp.write("# User %s\n" % ctx.user())
561 fp.write("# User %s\n" % ctx.user())
562 fp.write("# Date %d %d\n" % ctx.date())
562 fp.write("# Date %d %d\n" % ctx.date())
563 if branch and branch != 'default':
563 if branch and branch != 'default':
564 fp.write("# Branch %s\n" % branch)
564 fp.write("# Branch %s\n" % branch)
565 fp.write("# Node ID %s\n" % hex(node))
565 fp.write("# Node ID %s\n" % hex(node))
566 fp.write("# Parent %s\n" % hex(prev))
566 fp.write("# Parent %s\n" % hex(prev))
567 if len(parents) > 1:
567 if len(parents) > 1:
568 fp.write("# Parent %s\n" % hex(parents[1]))
568 fp.write("# Parent %s\n" % hex(parents[1]))
569 fp.write(ctx.description().rstrip())
569 fp.write(ctx.description().rstrip())
570 fp.write("\n\n")
570 fp.write("\n\n")
571
571
572 for chunk in patch.diff(repo, prev, node, opts=opts):
572 for chunk in patch.diff(repo, prev, node, opts=opts):
573 fp.write(chunk)
573 fp.write(chunk)
574
574
575 if shouldclose:
575 if shouldclose:
576 fp.close()
576 fp.close()
577
577
578 for seqno, rev in enumerate(revs):
578 for seqno, rev in enumerate(revs):
579 single(rev, seqno + 1, fp)
579 single(rev, seqno + 1, fp)
580
580
581 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
581 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
582 changes=None, stat=False, fp=None, prefix='',
582 changes=None, stat=False, fp=None, prefix='',
583 listsubrepos=False):
583 listsubrepos=False):
584 '''show diff or diffstat.'''
584 '''show diff or diffstat.'''
585 if fp is None:
585 if fp is None:
586 write = ui.write
586 write = ui.write
587 else:
587 else:
588 def write(s, **kw):
588 def write(s, **kw):
589 fp.write(s)
589 fp.write(s)
590
590
591 if stat:
591 if stat:
592 diffopts = diffopts.copy(context=0)
592 diffopts = diffopts.copy(context=0)
593 width = 80
593 width = 80
594 if not ui.plain():
594 if not ui.plain():
595 width = ui.termwidth()
595 width = ui.termwidth()
596 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
596 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
597 prefix=prefix)
597 prefix=prefix)
598 for chunk, label in patch.diffstatui(util.iterlines(chunks),
598 for chunk, label in patch.diffstatui(util.iterlines(chunks),
599 width=width,
599 width=width,
600 git=diffopts.git):
600 git=diffopts.git):
601 write(chunk, label=label)
601 write(chunk, label=label)
602 else:
602 else:
603 for chunk, label in patch.diffui(repo, node1, node2, match,
603 for chunk, label in patch.diffui(repo, node1, node2, match,
604 changes, diffopts, prefix=prefix):
604 changes, diffopts, prefix=prefix):
605 write(chunk, label=label)
605 write(chunk, label=label)
606
606
607 if listsubrepos:
607 if listsubrepos:
608 ctx1 = repo[node1]
608 ctx1 = repo[node1]
609 ctx2 = repo[node2]
609 ctx2 = repo[node2]
610 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
610 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
611 tempnode2 = node2
611 tempnode2 = node2
612 try:
612 try:
613 if node2 is not None:
613 if node2 is not None:
614 tempnode2 = ctx2.substate[subpath][1]
614 tempnode2 = ctx2.substate[subpath][1]
615 except KeyError:
615 except KeyError:
616 # A subrepo that existed in node1 was deleted between node1 and
616 # A subrepo that existed in node1 was deleted between node1 and
617 # node2 (inclusive). Thus, ctx2's substate won't contain that
617 # node2 (inclusive). Thus, ctx2's substate won't contain that
618 # subpath. The best we can do is to ignore it.
618 # subpath. The best we can do is to ignore it.
619 tempnode2 = None
619 tempnode2 = None
620 submatch = matchmod.narrowmatcher(subpath, match)
620 submatch = matchmod.narrowmatcher(subpath, match)
621 sub.diff(diffopts, tempnode2, submatch, changes=changes,
621 sub.diff(diffopts, tempnode2, submatch, changes=changes,
622 stat=stat, fp=fp, prefix=prefix)
622 stat=stat, fp=fp, prefix=prefix)
623
623
624 class changeset_printer(object):
624 class changeset_printer(object):
625 '''show changeset information when templating not requested.'''
625 '''show changeset information when templating not requested.'''
626
626
627 def __init__(self, ui, repo, patch, diffopts, buffered):
627 def __init__(self, ui, repo, patch, diffopts, buffered):
628 self.ui = ui
628 self.ui = ui
629 self.repo = repo
629 self.repo = repo
630 self.buffered = buffered
630 self.buffered = buffered
631 self.patch = patch
631 self.patch = patch
632 self.diffopts = diffopts
632 self.diffopts = diffopts
633 self.header = {}
633 self.header = {}
634 self.hunk = {}
634 self.hunk = {}
635 self.lastheader = None
635 self.lastheader = None
636 self.footer = None
636 self.footer = None
637
637
638 def flush(self, rev):
638 def flush(self, rev):
639 if rev in self.header:
639 if rev in self.header:
640 h = self.header[rev]
640 h = self.header[rev]
641 if h != self.lastheader:
641 if h != self.lastheader:
642 self.lastheader = h
642 self.lastheader = h
643 self.ui.write(h)
643 self.ui.write(h)
644 del self.header[rev]
644 del self.header[rev]
645 if rev in self.hunk:
645 if rev in self.hunk:
646 self.ui.write(self.hunk[rev])
646 self.ui.write(self.hunk[rev])
647 del self.hunk[rev]
647 del self.hunk[rev]
648 return 1
648 return 1
649 return 0
649 return 0
650
650
651 def close(self):
651 def close(self):
652 if self.footer:
652 if self.footer:
653 self.ui.write(self.footer)
653 self.ui.write(self.footer)
654
654
655 def show(self, ctx, copies=None, matchfn=None, **props):
655 def show(self, ctx, copies=None, matchfn=None, **props):
656 if self.buffered:
656 if self.buffered:
657 self.ui.pushbuffer()
657 self.ui.pushbuffer()
658 self._show(ctx, copies, matchfn, props)
658 self._show(ctx, copies, matchfn, props)
659 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
659 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
660 else:
660 else:
661 self._show(ctx, copies, matchfn, props)
661 self._show(ctx, copies, matchfn, props)
662
662
663 def _show(self, ctx, copies, matchfn, props):
663 def _show(self, ctx, copies, matchfn, props):
664 '''show a single changeset or file revision'''
664 '''show a single changeset or file revision'''
665 changenode = ctx.node()
665 changenode = ctx.node()
666 rev = ctx.rev()
666 rev = ctx.rev()
667
667
668 if self.ui.quiet:
668 if self.ui.quiet:
669 self.ui.write("%d:%s\n" % (rev, short(changenode)),
669 self.ui.write("%d:%s\n" % (rev, short(changenode)),
670 label='log.node')
670 label='log.node')
671 return
671 return
672
672
673 log = self.repo.changelog
673 log = self.repo.changelog
674 date = util.datestr(ctx.date())
674 date = util.datestr(ctx.date())
675
675
676 hexfunc = self.ui.debugflag and hex or short
676 hexfunc = self.ui.debugflag and hex or short
677
677
678 parents = [(p, hexfunc(log.node(p)))
678 parents = [(p, hexfunc(log.node(p)))
679 for p in self._meaningful_parentrevs(log, rev)]
679 for p in self._meaningful_parentrevs(log, rev)]
680
680
681 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
681 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
682 label='log.changeset')
682 label='log.changeset')
683
683
684 branch = ctx.branch()
684 branch = ctx.branch()
685 # don't show the default branch name
685 # don't show the default branch name
686 if branch != 'default':
686 if branch != 'default':
687 self.ui.write(_("branch: %s\n") % branch,
687 self.ui.write(_("branch: %s\n") % branch,
688 label='log.branch')
688 label='log.branch')
689 for bookmark in self.repo.nodebookmarks(changenode):
689 for bookmark in self.repo.nodebookmarks(changenode):
690 self.ui.write(_("bookmark: %s\n") % bookmark,
690 self.ui.write(_("bookmark: %s\n") % bookmark,
691 label='log.bookmark')
691 label='log.bookmark')
692 for tag in self.repo.nodetags(changenode):
692 for tag in self.repo.nodetags(changenode):
693 self.ui.write(_("tag: %s\n") % tag,
693 self.ui.write(_("tag: %s\n") % tag,
694 label='log.tag')
694 label='log.tag')
695 if self.ui.debugflag and ctx.phase():
695 if self.ui.debugflag and ctx.phase():
696 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
696 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
697 label='log.phase')
697 label='log.phase')
698 for parent in parents:
698 for parent in parents:
699 self.ui.write(_("parent: %d:%s\n") % parent,
699 self.ui.write(_("parent: %d:%s\n") % parent,
700 label='log.parent')
700 label='log.parent')
701
701
702 if self.ui.debugflag:
702 if self.ui.debugflag:
703 mnode = ctx.manifestnode()
703 mnode = ctx.manifestnode()
704 self.ui.write(_("manifest: %d:%s\n") %
704 self.ui.write(_("manifest: %d:%s\n") %
705 (self.repo.manifest.rev(mnode), hex(mnode)),
705 (self.repo.manifest.rev(mnode), hex(mnode)),
706 label='ui.debug log.manifest')
706 label='ui.debug log.manifest')
707 self.ui.write(_("user: %s\n") % ctx.user(),
707 self.ui.write(_("user: %s\n") % ctx.user(),
708 label='log.user')
708 label='log.user')
709 self.ui.write(_("date: %s\n") % date,
709 self.ui.write(_("date: %s\n") % date,
710 label='log.date')
710 label='log.date')
711
711
712 if self.ui.debugflag:
712 if self.ui.debugflag:
713 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
713 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
714 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
714 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
715 files):
715 files):
716 if value:
716 if value:
717 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
717 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
718 label='ui.debug log.files')
718 label='ui.debug log.files')
719 elif ctx.files() and self.ui.verbose:
719 elif ctx.files() and self.ui.verbose:
720 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
720 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
721 label='ui.note log.files')
721 label='ui.note log.files')
722 if copies and self.ui.verbose:
722 if copies and self.ui.verbose:
723 copies = ['%s (%s)' % c for c in copies]
723 copies = ['%s (%s)' % c for c in copies]
724 self.ui.write(_("copies: %s\n") % ' '.join(copies),
724 self.ui.write(_("copies: %s\n") % ' '.join(copies),
725 label='ui.note log.copies')
725 label='ui.note log.copies')
726
726
727 extra = ctx.extra()
727 extra = ctx.extra()
728 if extra and self.ui.debugflag:
728 if extra and self.ui.debugflag:
729 for key, value in sorted(extra.items()):
729 for key, value in sorted(extra.items()):
730 self.ui.write(_("extra: %s=%s\n")
730 self.ui.write(_("extra: %s=%s\n")
731 % (key, value.encode('string_escape')),
731 % (key, value.encode('string_escape')),
732 label='ui.debug log.extra')
732 label='ui.debug log.extra')
733
733
734 description = ctx.description().strip()
734 description = ctx.description().strip()
735 if description:
735 if description:
736 if self.ui.verbose:
736 if self.ui.verbose:
737 self.ui.write(_("description:\n"),
737 self.ui.write(_("description:\n"),
738 label='ui.note log.description')
738 label='ui.note log.description')
739 self.ui.write(description,
739 self.ui.write(description,
740 label='ui.note log.description')
740 label='ui.note log.description')
741 self.ui.write("\n\n")
741 self.ui.write("\n\n")
742 else:
742 else:
743 self.ui.write(_("summary: %s\n") %
743 self.ui.write(_("summary: %s\n") %
744 description.splitlines()[0],
744 description.splitlines()[0],
745 label='log.summary')
745 label='log.summary')
746 self.ui.write("\n")
746 self.ui.write("\n")
747
747
748 self.showpatch(changenode, matchfn)
748 self.showpatch(changenode, matchfn)
749
749
750 def showpatch(self, node, matchfn):
750 def showpatch(self, node, matchfn):
751 if not matchfn:
751 if not matchfn:
752 matchfn = self.patch
752 matchfn = self.patch
753 if matchfn:
753 if matchfn:
754 stat = self.diffopts.get('stat')
754 stat = self.diffopts.get('stat')
755 diff = self.diffopts.get('patch')
755 diff = self.diffopts.get('patch')
756 diffopts = patch.diffopts(self.ui, self.diffopts)
756 diffopts = patch.diffopts(self.ui, self.diffopts)
757 prev = self.repo.changelog.parents(node)[0]
757 prev = self.repo.changelog.parents(node)[0]
758 if stat:
758 if stat:
759 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
759 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
760 match=matchfn, stat=True)
760 match=matchfn, stat=True)
761 if diff:
761 if diff:
762 if stat:
762 if stat:
763 self.ui.write("\n")
763 self.ui.write("\n")
764 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
764 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
765 match=matchfn, stat=False)
765 match=matchfn, stat=False)
766 self.ui.write("\n")
766 self.ui.write("\n")
767
767
768 def _meaningful_parentrevs(self, log, rev):
768 def _meaningful_parentrevs(self, log, rev):
769 """Return list of meaningful (or all if debug) parentrevs for rev.
769 """Return list of meaningful (or all if debug) parentrevs for rev.
770
770
771 For merges (two non-nullrev revisions) both parents are meaningful.
771 For merges (two non-nullrev revisions) both parents are meaningful.
772 Otherwise the first parent revision is considered meaningful if it
772 Otherwise the first parent revision is considered meaningful if it
773 is not the preceding revision.
773 is not the preceding revision.
774 """
774 """
775 parents = log.parentrevs(rev)
775 parents = log.parentrevs(rev)
776 if not self.ui.debugflag and parents[1] == nullrev:
776 if not self.ui.debugflag and parents[1] == nullrev:
777 if parents[0] >= rev - 1:
777 if parents[0] >= rev - 1:
778 parents = []
778 parents = []
779 else:
779 else:
780 parents = [parents[0]]
780 parents = [parents[0]]
781 return parents
781 return parents
782
782
783
783
784 class changeset_templater(changeset_printer):
784 class changeset_templater(changeset_printer):
785 '''format changeset information.'''
785 '''format changeset information.'''
786
786
787 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
787 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
788 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
788 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
789 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
789 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
790 defaulttempl = {
790 defaulttempl = {
791 'parent': '{rev}:{node|formatnode} ',
791 'parent': '{rev}:{node|formatnode} ',
792 'manifest': '{rev}:{node|formatnode}',
792 'manifest': '{rev}:{node|formatnode}',
793 'file_copy': '{name} ({source})',
793 'file_copy': '{name} ({source})',
794 'extra': '{key}={value|stringescape}'
794 'extra': '{key}={value|stringescape}'
795 }
795 }
796 # filecopy is preserved for compatibility reasons
796 # filecopy is preserved for compatibility reasons
797 defaulttempl['filecopy'] = defaulttempl['file_copy']
797 defaulttempl['filecopy'] = defaulttempl['file_copy']
798 self.t = templater.templater(mapfile, {'formatnode': formatnode},
798 self.t = templater.templater(mapfile, {'formatnode': formatnode},
799 cache=defaulttempl)
799 cache=defaulttempl)
800 self.cache = {}
800 self.cache = {}
801
801
802 def use_template(self, t):
802 def use_template(self, t):
803 '''set template string to use'''
803 '''set template string to use'''
804 self.t.cache['changeset'] = t
804 self.t.cache['changeset'] = t
805
805
806 def _meaningful_parentrevs(self, ctx):
806 def _meaningful_parentrevs(self, ctx):
807 """Return list of meaningful (or all if debug) parentrevs for rev.
807 """Return list of meaningful (or all if debug) parentrevs for rev.
808 """
808 """
809 parents = ctx.parents()
809 parents = ctx.parents()
810 if len(parents) > 1:
810 if len(parents) > 1:
811 return parents
811 return parents
812 if self.ui.debugflag:
812 if self.ui.debugflag:
813 return [parents[0], self.repo['null']]
813 return [parents[0], self.repo['null']]
814 if parents[0].rev() >= ctx.rev() - 1:
814 if parents[0].rev() >= ctx.rev() - 1:
815 return []
815 return []
816 return parents
816 return parents
817
817
818 def _show(self, ctx, copies, matchfn, props):
818 def _show(self, ctx, copies, matchfn, props):
819 '''show a single changeset or file revision'''
819 '''show a single changeset or file revision'''
820
820
821 showlist = templatekw.showlist
821 showlist = templatekw.showlist
822
822
823 # showparents() behaviour depends on ui trace level which
823 # showparents() behaviour depends on ui trace level which
824 # causes unexpected behaviours at templating level and makes
824 # causes unexpected behaviours at templating level and makes
825 # it harder to extract it in a standalone function. Its
825 # it harder to extract it in a standalone function. Its
826 # behaviour cannot be changed so leave it here for now.
826 # behaviour cannot be changed so leave it here for now.
827 def showparents(**args):
827 def showparents(**args):
828 ctx = args['ctx']
828 ctx = args['ctx']
829 parents = [[('rev', p.rev()), ('node', p.hex())]
829 parents = [[('rev', p.rev()), ('node', p.hex())]
830 for p in self._meaningful_parentrevs(ctx)]
830 for p in self._meaningful_parentrevs(ctx)]
831 return showlist('parent', parents, **args)
831 return showlist('parent', parents, **args)
832
832
833 props = props.copy()
833 props = props.copy()
834 props.update(templatekw.keywords)
834 props.update(templatekw.keywords)
835 props['parents'] = showparents
835 props['parents'] = showparents
836 props['templ'] = self.t
836 props['templ'] = self.t
837 props['ctx'] = ctx
837 props['ctx'] = ctx
838 props['repo'] = self.repo
838 props['repo'] = self.repo
839 props['revcache'] = {'copies': copies}
839 props['revcache'] = {'copies': copies}
840 props['cache'] = self.cache
840 props['cache'] = self.cache
841
841
842 # find correct templates for current mode
842 # find correct templates for current mode
843
843
844 tmplmodes = [
844 tmplmodes = [
845 (True, None),
845 (True, None),
846 (self.ui.verbose, 'verbose'),
846 (self.ui.verbose, 'verbose'),
847 (self.ui.quiet, 'quiet'),
847 (self.ui.quiet, 'quiet'),
848 (self.ui.debugflag, 'debug'),
848 (self.ui.debugflag, 'debug'),
849 ]
849 ]
850
850
851 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
851 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
852 for mode, postfix in tmplmodes:
852 for mode, postfix in tmplmodes:
853 for type in types:
853 for type in types:
854 cur = postfix and ('%s_%s' % (type, postfix)) or type
854 cur = postfix and ('%s_%s' % (type, postfix)) or type
855 if mode and cur in self.t:
855 if mode and cur in self.t:
856 types[type] = cur
856 types[type] = cur
857
857
858 try:
858 try:
859
859
860 # write header
860 # write header
861 if types['header']:
861 if types['header']:
862 h = templater.stringify(self.t(types['header'], **props))
862 h = templater.stringify(self.t(types['header'], **props))
863 if self.buffered:
863 if self.buffered:
864 self.header[ctx.rev()] = h
864 self.header[ctx.rev()] = h
865 else:
865 else:
866 if self.lastheader != h:
866 if self.lastheader != h:
867 self.lastheader = h
867 self.lastheader = h
868 self.ui.write(h)
868 self.ui.write(h)
869
869
870 # write changeset metadata, then patch if requested
870 # write changeset metadata, then patch if requested
871 key = types['changeset']
871 key = types['changeset']
872 self.ui.write(templater.stringify(self.t(key, **props)))
872 self.ui.write(templater.stringify(self.t(key, **props)))
873 self.showpatch(ctx.node(), matchfn)
873 self.showpatch(ctx.node(), matchfn)
874
874
875 if types['footer']:
875 if types['footer']:
876 if not self.footer:
876 if not self.footer:
877 self.footer = templater.stringify(self.t(types['footer'],
877 self.footer = templater.stringify(self.t(types['footer'],
878 **props))
878 **props))
879
879
880 except KeyError, inst:
880 except KeyError, inst:
881 msg = _("%s: no key named '%s'")
881 msg = _("%s: no key named '%s'")
882 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
882 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
883 except SyntaxError, inst:
883 except SyntaxError, inst:
884 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
884 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
885
885
886 def show_changeset(ui, repo, opts, buffered=False):
886 def show_changeset(ui, repo, opts, buffered=False):
887 """show one changeset using template or regular display.
887 """show one changeset using template or regular display.
888
888
889 Display format will be the first non-empty hit of:
889 Display format will be the first non-empty hit of:
890 1. option 'template'
890 1. option 'template'
891 2. option 'style'
891 2. option 'style'
892 3. [ui] setting 'logtemplate'
892 3. [ui] setting 'logtemplate'
893 4. [ui] setting 'style'
893 4. [ui] setting 'style'
894 If all of these values are either the unset or the empty string,
894 If all of these values are either the unset or the empty string,
895 regular display via changeset_printer() is done.
895 regular display via changeset_printer() is done.
896 """
896 """
897 # options
897 # options
898 patch = False
898 patch = False
899 if opts.get('patch') or opts.get('stat'):
899 if opts.get('patch') or opts.get('stat'):
900 patch = scmutil.matchall(repo)
900 patch = scmutil.matchall(repo)
901
901
902 tmpl = opts.get('template')
902 tmpl = opts.get('template')
903 style = None
903 style = None
904 if tmpl:
904 if tmpl:
905 tmpl = templater.parsestring(tmpl, quoted=False)
905 tmpl = templater.parsestring(tmpl, quoted=False)
906 else:
906 else:
907 style = opts.get('style')
907 style = opts.get('style')
908
908
909 # ui settings
909 # ui settings
910 if not (tmpl or style):
910 if not (tmpl or style):
911 tmpl = ui.config('ui', 'logtemplate')
911 tmpl = ui.config('ui', 'logtemplate')
912 if tmpl:
912 if tmpl:
913 try:
913 try:
914 tmpl = templater.parsestring(tmpl)
914 tmpl = templater.parsestring(tmpl)
915 except SyntaxError:
915 except SyntaxError:
916 tmpl = templater.parsestring(tmpl, quoted=False)
916 tmpl = templater.parsestring(tmpl, quoted=False)
917 else:
917 else:
918 style = util.expandpath(ui.config('ui', 'style', ''))
918 style = util.expandpath(ui.config('ui', 'style', ''))
919
919
920 if not (tmpl or style):
920 if not (tmpl or style):
921 return changeset_printer(ui, repo, patch, opts, buffered)
921 return changeset_printer(ui, repo, patch, opts, buffered)
922
922
923 mapfile = None
923 mapfile = None
924 if style and not tmpl:
924 if style and not tmpl:
925 mapfile = style
925 mapfile = style
926 if not os.path.split(mapfile)[0]:
926 if not os.path.split(mapfile)[0]:
927 mapname = (templater.templatepath('map-cmdline.' + mapfile)
927 mapname = (templater.templatepath('map-cmdline.' + mapfile)
928 or templater.templatepath(mapfile))
928 or templater.templatepath(mapfile))
929 if mapname:
929 if mapname:
930 mapfile = mapname
930 mapfile = mapname
931
931
932 try:
932 try:
933 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
933 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
934 except SyntaxError, inst:
934 except SyntaxError, inst:
935 raise util.Abort(inst.args[0])
935 raise util.Abort(inst.args[0])
936 if tmpl:
936 if tmpl:
937 t.use_template(tmpl)
937 t.use_template(tmpl)
938 return t
938 return t
939
939
940 def finddate(ui, repo, date):
940 def finddate(ui, repo, date):
941 """Find the tipmost changeset that matches the given date spec"""
941 """Find the tipmost changeset that matches the given date spec"""
942
942
943 df = util.matchdate(date)
943 df = util.matchdate(date)
944 m = scmutil.matchall(repo)
944 m = scmutil.matchall(repo)
945 results = {}
945 results = {}
946
946
947 def prep(ctx, fns):
947 def prep(ctx, fns):
948 d = ctx.date()
948 d = ctx.date()
949 if df(d[0]):
949 if df(d[0]):
950 results[ctx.rev()] = d
950 results[ctx.rev()] = d
951
951
952 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
952 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
953 rev = ctx.rev()
953 rev = ctx.rev()
954 if rev in results:
954 if rev in results:
955 ui.status(_("found revision %s from %s\n") %
955 ui.status(_("found revision %s from %s\n") %
956 (rev, util.datestr(results[rev])))
956 (rev, util.datestr(results[rev])))
957 return str(rev)
957 return str(rev)
958
958
959 raise util.Abort(_("revision matching date not found"))
959 raise util.Abort(_("revision matching date not found"))
960
960
961 def increasingwindows(start, end, windowsize=8, sizelimit=512):
961 def increasingwindows(start, end, windowsize=8, sizelimit=512):
962 if start < end:
962 if start < end:
963 while start < end:
963 while start < end:
964 yield start, min(windowsize, end - start)
964 yield start, min(windowsize, end - start)
965 start += windowsize
965 start += windowsize
966 if windowsize < sizelimit:
966 if windowsize < sizelimit:
967 windowsize *= 2
967 windowsize *= 2
968 else:
968 else:
969 while start > end:
969 while start > end:
970 yield start, min(windowsize, start - end - 1)
970 yield start, min(windowsize, start - end - 1)
971 start -= windowsize
971 start -= windowsize
972 if windowsize < sizelimit:
972 if windowsize < sizelimit:
973 windowsize *= 2
973 windowsize *= 2
974
974
975 def walkchangerevs(repo, match, opts, prepare):
975 def walkchangerevs(repo, match, opts, prepare):
976 '''Iterate over files and the revs in which they changed.
976 '''Iterate over files and the revs in which they changed.
977
977
978 Callers most commonly need to iterate backwards over the history
978 Callers most commonly need to iterate backwards over the history
979 in which they are interested. Doing so has awful (quadratic-looking)
979 in which they are interested. Doing so has awful (quadratic-looking)
980 performance, so we use iterators in a "windowed" way.
980 performance, so we use iterators in a "windowed" way.
981
981
982 We walk a window of revisions in the desired order. Within the
982 We walk a window of revisions in the desired order. Within the
983 window, we first walk forwards to gather data, then in the desired
983 window, we first walk forwards to gather data, then in the desired
984 order (usually backwards) to display it.
984 order (usually backwards) to display it.
985
985
986 This function returns an iterator yielding contexts. Before
986 This function returns an iterator yielding contexts. Before
987 yielding each context, the iterator will first call the prepare
987 yielding each context, the iterator will first call the prepare
988 function on each context in the window in forward order.'''
988 function on each context in the window in forward order.'''
989
989
990 follow = opts.get('follow') or opts.get('follow_first')
990 follow = opts.get('follow') or opts.get('follow_first')
991
991
992 if not len(repo):
992 if not len(repo):
993 return []
993 return []
994
994
995 if follow:
995 if follow:
996 defrange = '%s:0' % repo['.'].rev()
996 defrange = '%s:0' % repo['.'].rev()
997 else:
997 else:
998 defrange = '-1:0'
998 defrange = '-1:0'
999 revs = scmutil.revrange(repo, opts.get('rev') or [defrange])
999 revs = scmutil.revrange(repo, opts.get('rev') or [defrange])
1000 if not revs:
1000 if not revs:
1001 return []
1001 return []
1002 wanted = set()
1002 wanted = set()
1003 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1003 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1004 fncache = {}
1004 fncache = {}
1005 change = repo.changectx
1005 change = repo.changectx
1006
1006
1007 # First step is to fill wanted, the set of revisions that we want to yield.
1007 # First step is to fill wanted, the set of revisions that we want to yield.
1008 # When it does not induce extra cost, we also fill fncache for revisions in
1008 # When it does not induce extra cost, we also fill fncache for revisions in
1009 # wanted: a cache of filenames that were changed (ctx.files()) and that
1009 # wanted: a cache of filenames that were changed (ctx.files()) and that
1010 # match the file filtering conditions.
1010 # match the file filtering conditions.
1011
1011
1012 if not slowpath and not match.files():
1012 if not slowpath and not match.files():
1013 # No files, no patterns. Display all revs.
1013 # No files, no patterns. Display all revs.
1014 wanted = set(revs)
1014 wanted = set(revs)
1015 copies = []
1015 copies = []
1016
1016
1017 if not slowpath and match.files():
1017 if not slowpath and match.files():
1018 # We only have to read through the filelog to find wanted revisions
1018 # We only have to read through the filelog to find wanted revisions
1019
1019
1020 minrev, maxrev = min(revs), max(revs)
1020 minrev, maxrev = min(revs), max(revs)
1021 def filerevgen(filelog, last):
1021 def filerevgen(filelog, last):
1022 """
1022 """
1023 Only files, no patterns. Check the history of each file.
1023 Only files, no patterns. Check the history of each file.
1024
1024
1025 Examines filelog entries within minrev, maxrev linkrev range
1025 Examines filelog entries within minrev, maxrev linkrev range
1026 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1026 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1027 tuples in backwards order
1027 tuples in backwards order
1028 """
1028 """
1029 cl_count = len(repo)
1029 cl_count = len(repo)
1030 revs = []
1030 revs = []
1031 for j in xrange(0, last + 1):
1031 for j in xrange(0, last + 1):
1032 linkrev = filelog.linkrev(j)
1032 linkrev = filelog.linkrev(j)
1033 if linkrev < minrev:
1033 if linkrev < minrev:
1034 continue
1034 continue
1035 # only yield rev for which we have the changelog, it can
1035 # only yield rev for which we have the changelog, it can
1036 # happen while doing "hg log" during a pull or commit
1036 # happen while doing "hg log" during a pull or commit
1037 if linkrev >= cl_count:
1037 if linkrev >= cl_count:
1038 break
1038 break
1039
1039
1040 parentlinkrevs = []
1040 parentlinkrevs = []
1041 for p in filelog.parentrevs(j):
1041 for p in filelog.parentrevs(j):
1042 if p != nullrev:
1042 if p != nullrev:
1043 parentlinkrevs.append(filelog.linkrev(p))
1043 parentlinkrevs.append(filelog.linkrev(p))
1044 n = filelog.node(j)
1044 n = filelog.node(j)
1045 revs.append((linkrev, parentlinkrevs,
1045 revs.append((linkrev, parentlinkrevs,
1046 follow and filelog.renamed(n)))
1046 follow and filelog.renamed(n)))
1047
1047
1048 return reversed(revs)
1048 return reversed(revs)
1049 def iterfiles():
1049 def iterfiles():
1050 pctx = repo['.']
1050 pctx = repo['.']
1051 for filename in match.files():
1051 for filename in match.files():
1052 if follow:
1052 if follow:
1053 if filename not in pctx:
1053 if filename not in pctx:
1054 raise util.Abort(_('cannot follow file not in parent '
1054 raise util.Abort(_('cannot follow file not in parent '
1055 'revision: "%s"') % filename)
1055 'revision: "%s"') % filename)
1056 yield filename, pctx[filename].filenode()
1056 yield filename, pctx[filename].filenode()
1057 else:
1057 else:
1058 yield filename, None
1058 yield filename, None
1059 for filename_node in copies:
1059 for filename_node in copies:
1060 yield filename_node
1060 yield filename_node
1061 for file_, node in iterfiles():
1061 for file_, node in iterfiles():
1062 filelog = repo.file(file_)
1062 filelog = repo.file(file_)
1063 if not len(filelog):
1063 if not len(filelog):
1064 if node is None:
1064 if node is None:
1065 # A zero count may be a directory or deleted file, so
1065 # A zero count may be a directory or deleted file, so
1066 # try to find matching entries on the slow path.
1066 # try to find matching entries on the slow path.
1067 if follow:
1067 if follow:
1068 raise util.Abort(
1068 raise util.Abort(
1069 _('cannot follow nonexistent file: "%s"') % file_)
1069 _('cannot follow nonexistent file: "%s"') % file_)
1070 slowpath = True
1070 slowpath = True
1071 break
1071 break
1072 else:
1072 else:
1073 continue
1073 continue
1074
1074
1075 if node is None:
1075 if node is None:
1076 last = len(filelog) - 1
1076 last = len(filelog) - 1
1077 else:
1077 else:
1078 last = filelog.rev(node)
1078 last = filelog.rev(node)
1079
1079
1080
1080
1081 # keep track of all ancestors of the file
1081 # keep track of all ancestors of the file
1082 ancestors = set([filelog.linkrev(last)])
1082 ancestors = set([filelog.linkrev(last)])
1083
1083
1084 # iterate from latest to oldest revision
1084 # iterate from latest to oldest revision
1085 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1085 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1086 if not follow:
1086 if not follow:
1087 if rev > maxrev:
1087 if rev > maxrev:
1088 continue
1088 continue
1089 else:
1089 else:
1090 # Note that last might not be the first interesting
1090 # Note that last might not be the first interesting
1091 # rev to us:
1091 # rev to us:
1092 # if the file has been changed after maxrev, we'll
1092 # if the file has been changed after maxrev, we'll
1093 # have linkrev(last) > maxrev, and we still need
1093 # have linkrev(last) > maxrev, and we still need
1094 # to explore the file graph
1094 # to explore the file graph
1095 if rev not in ancestors:
1095 if rev not in ancestors:
1096 continue
1096 continue
1097 # XXX insert 1327 fix here
1097 # XXX insert 1327 fix here
1098 if flparentlinkrevs:
1098 if flparentlinkrevs:
1099 ancestors.update(flparentlinkrevs)
1099 ancestors.update(flparentlinkrevs)
1100
1100
1101 fncache.setdefault(rev, []).append(file_)
1101 fncache.setdefault(rev, []).append(file_)
1102 wanted.add(rev)
1102 wanted.add(rev)
1103 if copied:
1103 if copied:
1104 copies.append(copied)
1104 copies.append(copied)
1105 if slowpath:
1105 if slowpath:
1106 # We have to read the changelog to match filenames against
1106 # We have to read the changelog to match filenames against
1107 # changed files
1107 # changed files
1108
1108
1109 if follow:
1109 if follow:
1110 raise util.Abort(_('can only follow copies/renames for explicit '
1110 raise util.Abort(_('can only follow copies/renames for explicit '
1111 'filenames'))
1111 'filenames'))
1112
1112
1113 # The slow path checks files modified in every changeset.
1113 # The slow path checks files modified in every changeset.
1114 for i in sorted(revs):
1114 for i in sorted(revs):
1115 ctx = change(i)
1115 ctx = change(i)
1116 matches = filter(match, ctx.files())
1116 matches = filter(match, ctx.files())
1117 if matches:
1117 if matches:
1118 fncache[i] = matches
1118 fncache[i] = matches
1119 wanted.add(i)
1119 wanted.add(i)
1120
1120
1121 class followfilter(object):
1121 class followfilter(object):
1122 def __init__(self, onlyfirst=False):
1122 def __init__(self, onlyfirst=False):
1123 self.startrev = nullrev
1123 self.startrev = nullrev
1124 self.roots = set()
1124 self.roots = set()
1125 self.onlyfirst = onlyfirst
1125 self.onlyfirst = onlyfirst
1126
1126
1127 def match(self, rev):
1127 def match(self, rev):
1128 def realparents(rev):
1128 def realparents(rev):
1129 if self.onlyfirst:
1129 if self.onlyfirst:
1130 return repo.changelog.parentrevs(rev)[0:1]
1130 return repo.changelog.parentrevs(rev)[0:1]
1131 else:
1131 else:
1132 return filter(lambda x: x != nullrev,
1132 return filter(lambda x: x != nullrev,
1133 repo.changelog.parentrevs(rev))
1133 repo.changelog.parentrevs(rev))
1134
1134
1135 if self.startrev == nullrev:
1135 if self.startrev == nullrev:
1136 self.startrev = rev
1136 self.startrev = rev
1137 return True
1137 return True
1138
1138
1139 if rev > self.startrev:
1139 if rev > self.startrev:
1140 # forward: all descendants
1140 # forward: all descendants
1141 if not self.roots:
1141 if not self.roots:
1142 self.roots.add(self.startrev)
1142 self.roots.add(self.startrev)
1143 for parent in realparents(rev):
1143 for parent in realparents(rev):
1144 if parent in self.roots:
1144 if parent in self.roots:
1145 self.roots.add(rev)
1145 self.roots.add(rev)
1146 return True
1146 return True
1147 else:
1147 else:
1148 # backwards: all parents
1148 # backwards: all parents
1149 if not self.roots:
1149 if not self.roots:
1150 self.roots.update(realparents(self.startrev))
1150 self.roots.update(realparents(self.startrev))
1151 if rev in self.roots:
1151 if rev in self.roots:
1152 self.roots.remove(rev)
1152 self.roots.remove(rev)
1153 self.roots.update(realparents(rev))
1153 self.roots.update(realparents(rev))
1154 return True
1154 return True
1155
1155
1156 return False
1156 return False
1157
1157
1158 # it might be worthwhile to do this in the iterator if the rev range
1158 # it might be worthwhile to do this in the iterator if the rev range
1159 # is descending and the prune args are all within that range
1159 # is descending and the prune args are all within that range
1160 for rev in opts.get('prune', ()):
1160 for rev in opts.get('prune', ()):
1161 rev = repo[rev].rev()
1161 rev = repo[rev].rev()
1162 ff = followfilter()
1162 ff = followfilter()
1163 stop = min(revs[0], revs[-1])
1163 stop = min(revs[0], revs[-1])
1164 for x in xrange(rev, stop - 1, -1):
1164 for x in xrange(rev, stop - 1, -1):
1165 if ff.match(x):
1165 if ff.match(x):
1166 wanted.discard(x)
1166 wanted.discard(x)
1167
1167
1168 # Now that wanted is correctly initialized, we can iterate over the
1168 # Now that wanted is correctly initialized, we can iterate over the
1169 # revision range, yielding only revisions in wanted.
1169 # revision range, yielding only revisions in wanted.
1170 def iterate():
1170 def iterate():
1171 if follow and not match.files():
1171 if follow and not match.files():
1172 ff = followfilter(onlyfirst=opts.get('follow_first'))
1172 ff = followfilter(onlyfirst=opts.get('follow_first'))
1173 def want(rev):
1173 def want(rev):
1174 return ff.match(rev) and rev in wanted
1174 return ff.match(rev) and rev in wanted
1175 else:
1175 else:
1176 def want(rev):
1176 def want(rev):
1177 return rev in wanted
1177 return rev in wanted
1178
1178
1179 for i, window in increasingwindows(0, len(revs)):
1179 for i, window in increasingwindows(0, len(revs)):
1180 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1180 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1181 for rev in sorted(nrevs):
1181 for rev in sorted(nrevs):
1182 fns = fncache.get(rev)
1182 fns = fncache.get(rev)
1183 ctx = change(rev)
1183 ctx = change(rev)
1184 if not fns:
1184 if not fns:
1185 def fns_generator():
1185 def fns_generator():
1186 for f in ctx.files():
1186 for f in ctx.files():
1187 if match(f):
1187 if match(f):
1188 yield f
1188 yield f
1189 fns = fns_generator()
1189 fns = fns_generator()
1190 prepare(ctx, fns)
1190 prepare(ctx, fns)
1191 for rev in nrevs:
1191 for rev in nrevs:
1192 yield change(rev)
1192 yield change(rev)
1193 return iterate()
1193 return iterate()
1194
1194
1195 def _makegraphfilematcher(repo, pats, followfirst):
1195 def _makegraphfilematcher(repo, pats, followfirst):
1196 # When displaying a revision with --patch --follow FILE, we have
1196 # When displaying a revision with --patch --follow FILE, we have
1197 # to know which file of the revision must be diffed. With
1197 # to know which file of the revision must be diffed. With
1198 # --follow, we want the names of the ancestors of FILE in the
1198 # --follow, we want the names of the ancestors of FILE in the
1199 # revision, stored in "fcache". "fcache" is populated by
1199 # revision, stored in "fcache". "fcache" is populated by
1200 # reproducing the graph traversal already done by --follow revset
1200 # reproducing the graph traversal already done by --follow revset
1201 # and relating linkrevs to file names (which is not "correct" but
1201 # and relating linkrevs to file names (which is not "correct" but
1202 # good enough).
1202 # good enough).
1203 fcache = {}
1203 fcache = {}
1204 fcacheready = [False]
1204 fcacheready = [False]
1205 pctx = repo['.']
1205 pctx = repo['.']
1206 wctx = repo[None]
1206 wctx = repo[None]
1207
1207
1208 def populate():
1208 def populate():
1209 for fn in pats:
1209 for fn in pats:
1210 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1210 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1211 for c in i:
1211 for c in i:
1212 fcache.setdefault(c.linkrev(), set()).add(c.path())
1212 fcache.setdefault(c.linkrev(), set()).add(c.path())
1213
1213
1214 def filematcher(rev):
1214 def filematcher(rev):
1215 if not fcacheready[0]:
1215 if not fcacheready[0]:
1216 # Lazy initialization
1216 # Lazy initialization
1217 fcacheready[0] = True
1217 fcacheready[0] = True
1218 populate()
1218 populate()
1219 return scmutil.match(wctx, fcache.get(rev, []), default='path')
1219 return scmutil.match(wctx, fcache.get(rev, []), default='path')
1220
1220
1221 return filematcher
1221 return filematcher
1222
1222
1223 def _makegraphlogrevset(repo, pats, opts, revs):
1223 def _makegraphlogrevset(repo, pats, opts, revs):
1224 """Return (expr, filematcher) where expr is a revset string built
1224 """Return (expr, filematcher) where expr is a revset string built
1225 from log options and file patterns or None. If --stat or --patch
1225 from log options and file patterns or None. If --stat or --patch
1226 are not passed filematcher is None. Otherwise it is a callable
1226 are not passed filematcher is None. Otherwise it is a callable
1227 taking a revision number and returning a match objects filtering
1227 taking a revision number and returning a match objects filtering
1228 the files to be detailed when displaying the revision.
1228 the files to be detailed when displaying the revision.
1229 """
1229 """
1230 opt2revset = {
1230 opt2revset = {
1231 'no_merges': ('not merge()', None),
1231 'no_merges': ('not merge()', None),
1232 'only_merges': ('merge()', None),
1232 'only_merges': ('merge()', None),
1233 '_ancestors': ('ancestors(%(val)s)', None),
1233 '_ancestors': ('ancestors(%(val)s)', None),
1234 '_fancestors': ('_firstancestors(%(val)s)', None),
1234 '_fancestors': ('_firstancestors(%(val)s)', None),
1235 '_descendants': ('descendants(%(val)s)', None),
1235 '_descendants': ('descendants(%(val)s)', None),
1236 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1236 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1237 '_matchfiles': ('_matchfiles(%(val)s)', None),
1237 '_matchfiles': ('_matchfiles(%(val)s)', None),
1238 'date': ('date(%(val)r)', None),
1238 'date': ('date(%(val)r)', None),
1239 'branch': ('branch(%(val)r)', ' or '),
1239 'branch': ('branch(%(val)r)', ' or '),
1240 '_patslog': ('filelog(%(val)r)', ' or '),
1240 '_patslog': ('filelog(%(val)r)', ' or '),
1241 '_patsfollow': ('follow(%(val)r)', ' or '),
1241 '_patsfollow': ('follow(%(val)r)', ' or '),
1242 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1242 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1243 'keyword': ('keyword(%(val)r)', ' or '),
1243 'keyword': ('keyword(%(val)r)', ' or '),
1244 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1244 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1245 'user': ('user(%(val)r)', ' or '),
1245 'user': ('user(%(val)r)', ' or '),
1246 }
1246 }
1247
1247
1248 opts = dict(opts)
1248 opts = dict(opts)
1249 # follow or not follow?
1249 # follow or not follow?
1250 follow = opts.get('follow') or opts.get('follow_first')
1250 follow = opts.get('follow') or opts.get('follow_first')
1251 followfirst = opts.get('follow_first') and 1 or 0
1251 followfirst = opts.get('follow_first') and 1 or 0
1252 # --follow with FILE behaviour depends on revs...
1252 # --follow with FILE behaviour depends on revs...
1253 startrev = revs[0]
1253 startrev = revs[0]
1254 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
1254 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
1255
1255
1256 # branch and only_branch are really aliases and must be handled at
1256 # branch and only_branch are really aliases and must be handled at
1257 # the same time
1257 # the same time
1258 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1258 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1259 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1259 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1260 # pats/include/exclude are passed to match.match() directly in
1260 # pats/include/exclude are passed to match.match() directly in
1261 # _matchfile() revset but walkchangerevs() builds its matcher with
1261 # _matchfile() revset but walkchangerevs() builds its matcher with
1262 # scmutil.match(). The difference is input pats are globbed on
1262 # scmutil.match(). The difference is input pats are globbed on
1263 # platforms without shell expansion (windows).
1263 # platforms without shell expansion (windows).
1264 pctx = repo[None]
1264 pctx = repo[None]
1265 match, pats = scmutil.matchandpats(pctx, pats, opts)
1265 match, pats = scmutil.matchandpats(pctx, pats, opts)
1266 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1266 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1267 if not slowpath:
1267 if not slowpath:
1268 for f in match.files():
1268 for f in match.files():
1269 if follow and f not in pctx:
1269 if follow and f not in pctx:
1270 raise util.Abort(_('cannot follow file not in parent '
1270 raise util.Abort(_('cannot follow file not in parent '
1271 'revision: "%s"') % f)
1271 'revision: "%s"') % f)
1272 filelog = repo.file(f)
1272 filelog = repo.file(f)
1273 if not len(filelog):
1273 if not len(filelog):
1274 # A zero count may be a directory or deleted file, so
1274 # A zero count may be a directory or deleted file, so
1275 # try to find matching entries on the slow path.
1275 # try to find matching entries on the slow path.
1276 if follow:
1276 if follow:
1277 raise util.Abort(
1277 raise util.Abort(
1278 _('cannot follow nonexistent file: "%s"') % f)
1278 _('cannot follow nonexistent file: "%s"') % f)
1279 slowpath = True
1279 slowpath = True
1280 if slowpath:
1280 if slowpath:
1281 # See walkchangerevs() slow path.
1281 # See walkchangerevs() slow path.
1282 #
1282 #
1283 if follow:
1283 if follow:
1284 raise util.Abort(_('can only follow copies/renames for explicit '
1284 raise util.Abort(_('can only follow copies/renames for explicit '
1285 'filenames'))
1285 'filenames'))
1286 # pats/include/exclude cannot be represented as separate
1286 # pats/include/exclude cannot be represented as separate
1287 # revset expressions as their filtering logic applies at file
1287 # revset expressions as their filtering logic applies at file
1288 # level. For instance "-I a -X a" matches a revision touching
1288 # level. For instance "-I a -X a" matches a revision touching
1289 # "a" and "b" while "file(a) and not file(b)" does
1289 # "a" and "b" while "file(a) and not file(b)" does
1290 # not. Besides, filesets are evaluated against the working
1290 # not. Besides, filesets are evaluated against the working
1291 # directory.
1291 # directory.
1292 matchargs = ['r:', 'd:relpath']
1292 matchargs = ['r:', 'd:relpath']
1293 for p in pats:
1293 for p in pats:
1294 matchargs.append('p:' + p)
1294 matchargs.append('p:' + p)
1295 for p in opts.get('include', []):
1295 for p in opts.get('include', []):
1296 matchargs.append('i:' + p)
1296 matchargs.append('i:' + p)
1297 for p in opts.get('exclude', []):
1297 for p in opts.get('exclude', []):
1298 matchargs.append('x:' + p)
1298 matchargs.append('x:' + p)
1299 matchargs = ','.join(('%r' % p) for p in matchargs)
1299 matchargs = ','.join(('%r' % p) for p in matchargs)
1300 opts['_matchfiles'] = matchargs
1300 opts['_matchfiles'] = matchargs
1301 else:
1301 else:
1302 if follow:
1302 if follow:
1303 fpats = ('_patsfollow', '_patsfollowfirst')
1303 fpats = ('_patsfollow', '_patsfollowfirst')
1304 fnopats = (('_ancestors', '_fancestors'),
1304 fnopats = (('_ancestors', '_fancestors'),
1305 ('_descendants', '_fdescendants'))
1305 ('_descendants', '_fdescendants'))
1306 if pats:
1306 if pats:
1307 # follow() revset inteprets its file argument as a
1307 # follow() revset inteprets its file argument as a
1308 # manifest entry, so use match.files(), not pats.
1308 # manifest entry, so use match.files(), not pats.
1309 opts[fpats[followfirst]] = list(match.files())
1309 opts[fpats[followfirst]] = list(match.files())
1310 else:
1310 else:
1311 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1311 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1312 else:
1312 else:
1313 opts['_patslog'] = list(pats)
1313 opts['_patslog'] = list(pats)
1314
1314
1315 filematcher = None
1315 filematcher = None
1316 if opts.get('patch') or opts.get('stat'):
1316 if opts.get('patch') or opts.get('stat'):
1317 if follow:
1317 if follow:
1318 filematcher = _makegraphfilematcher(repo, pats, followfirst)
1318 filematcher = _makegraphfilematcher(repo, pats, followfirst)
1319 else:
1319 else:
1320 filematcher = lambda rev: match
1320 filematcher = lambda rev: match
1321
1321
1322 expr = []
1322 expr = []
1323 for op, val in opts.iteritems():
1323 for op, val in opts.iteritems():
1324 if not val:
1324 if not val:
1325 continue
1325 continue
1326 if op not in opt2revset:
1326 if op not in opt2revset:
1327 continue
1327 continue
1328 revop, andor = opt2revset[op]
1328 revop, andor = opt2revset[op]
1329 if '%(val)' not in revop:
1329 if '%(val)' not in revop:
1330 expr.append(revop)
1330 expr.append(revop)
1331 else:
1331 else:
1332 if not isinstance(val, list):
1332 if not isinstance(val, list):
1333 e = revop % {'val': val}
1333 e = revop % {'val': val}
1334 else:
1334 else:
1335 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1335 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1336 expr.append(e)
1336 expr.append(e)
1337
1337
1338 if expr:
1338 if expr:
1339 expr = '(' + ' and '.join(expr) + ')'
1339 expr = '(' + ' and '.join(expr) + ')'
1340 else:
1340 else:
1341 expr = None
1341 expr = None
1342 return expr, filematcher
1342 return expr, filematcher
1343
1343
1344 def getgraphlogrevs(repo, pats, opts):
1344 def getgraphlogrevs(repo, pats, opts):
1345 """Return (revs, expr, filematcher) where revs is an iterable of
1345 """Return (revs, expr, filematcher) where revs is an iterable of
1346 revision numbers, expr is a revset string built from log options
1346 revision numbers, expr is a revset string built from log options
1347 and file patterns or None, and used to filter 'revs'. If --stat or
1347 and file patterns or None, and used to filter 'revs'. If --stat or
1348 --patch are not passed filematcher is None. Otherwise it is a
1348 --patch are not passed filematcher is None. Otherwise it is a
1349 callable taking a revision number and returning a match objects
1349 callable taking a revision number and returning a match objects
1350 filtering the files to be detailed when displaying the revision.
1350 filtering the files to be detailed when displaying the revision.
1351 """
1351 """
1352 def increasingrevs(repo, revs, matcher):
1352 def increasingrevs(repo, revs, matcher):
1353 # The sorted input rev sequence is chopped in sub-sequences
1353 # The sorted input rev sequence is chopped in sub-sequences
1354 # which are sorted in ascending order and passed to the
1354 # which are sorted in ascending order and passed to the
1355 # matcher. The filtered revs are sorted again as they were in
1355 # matcher. The filtered revs are sorted again as they were in
1356 # the original sub-sequence. This achieve several things:
1356 # the original sub-sequence. This achieve several things:
1357 #
1357 #
1358 # - getlogrevs() now returns a generator which behaviour is
1358 # - getlogrevs() now returns a generator which behaviour is
1359 # adapted to log need. First results come fast, last ones
1359 # adapted to log need. First results come fast, last ones
1360 # are batched for performances.
1360 # are batched for performances.
1361 #
1361 #
1362 # - revset matchers often operate faster on revision in
1362 # - revset matchers often operate faster on revision in
1363 # changelog order, because most filters deal with the
1363 # changelog order, because most filters deal with the
1364 # changelog.
1364 # changelog.
1365 #
1365 #
1366 # - revset matchers can reorder revisions. "A or B" typically
1366 # - revset matchers can reorder revisions. "A or B" typically
1367 # returns returns the revision matching A then the revision
1367 # returns returns the revision matching A then the revision
1368 # matching B. We want to hide this internal implementation
1368 # matching B. We want to hide this internal implementation
1369 # detail from the caller, and sorting the filtered revision
1369 # detail from the caller, and sorting the filtered revision
1370 # again achieves this.
1370 # again achieves this.
1371 for i, window in increasingwindows(0, len(revs), windowsize=1):
1371 for i, window in increasingwindows(0, len(revs), windowsize=1):
1372 orevs = revs[i:i + window]
1372 orevs = revs[i:i + window]
1373 nrevs = set(matcher(repo, sorted(orevs)))
1373 nrevs = set(matcher(repo, sorted(orevs)))
1374 for rev in orevs:
1374 for rev in orevs:
1375 if rev in nrevs:
1375 if rev in nrevs:
1376 yield rev
1376 yield rev
1377
1377
1378 if not len(repo):
1378 if not len(repo):
1379 return iter([]), None, None
1379 return iter([]), None, None
1380 # Default --rev value depends on --follow but --follow behaviour
1380 # Default --rev value depends on --follow but --follow behaviour
1381 # depends on revisions resolved from --rev...
1381 # depends on revisions resolved from --rev...
1382 follow = opts.get('follow') or opts.get('follow_first')
1382 follow = opts.get('follow') or opts.get('follow_first')
1383 if opts.get('rev'):
1383 if opts.get('rev'):
1384 revs = scmutil.revrange(repo, opts['rev'])
1384 revs = scmutil.revrange(repo, opts['rev'])
1385 else:
1385 else:
1386 if follow and len(repo) > 0:
1386 if follow and len(repo) > 0:
1387 revs = scmutil.revrange(repo, ['.:0'])
1387 revs = scmutil.revrange(repo, ['.:0'])
1388 else:
1388 else:
1389 revs = range(len(repo) - 1, -1, -1)
1389 revs = range(len(repo) - 1, -1, -1)
1390 if not revs:
1390 if not revs:
1391 return iter([]), None, None
1391 return iter([]), None, None
1392 expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
1392 expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
1393 if expr:
1393 if expr:
1394 matcher = revset.match(repo.ui, expr)
1394 matcher = revset.match(repo.ui, expr)
1395 revs = increasingrevs(repo, revs, matcher)
1395 revs = increasingrevs(repo, revs, matcher)
1396 if not opts.get('hidden'):
1396 if not opts.get('hidden'):
1397 # --hidden is still experimental and not worth a dedicated revset
1397 # --hidden is still experimental and not worth a dedicated revset
1398 # yet. Fortunately, filtering revision number is fast.
1398 # yet. Fortunately, filtering revision number is fast.
1399 revs = (r for r in revs if r not in repo.hiddenrevs)
1399 revs = (r for r in revs if r not in repo.hiddenrevs)
1400 else:
1400 else:
1401 revs = iter(revs)
1401 revs = iter(revs)
1402 return revs, expr, filematcher
1402 return revs, expr, filematcher
1403
1403
1404 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1404 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1405 filematcher=None):
1405 filematcher=None):
1406 seen, state = [], graphmod.asciistate()
1406 seen, state = [], graphmod.asciistate()
1407 for rev, type, ctx, parents in dag:
1407 for rev, type, ctx, parents in dag:
1408 char = 'o'
1408 char = 'o'
1409 if ctx.node() in showparents:
1409 if ctx.node() in showparents:
1410 char = '@'
1410 char = '@'
1411 elif ctx.obsolete():
1411 elif ctx.obsolete():
1412 char = 'x'
1412 char = 'x'
1413 copies = None
1413 copies = None
1414 if getrenamed and ctx.rev():
1414 if getrenamed and ctx.rev():
1415 copies = []
1415 copies = []
1416 for fn in ctx.files():
1416 for fn in ctx.files():
1417 rename = getrenamed(fn, ctx.rev())
1417 rename = getrenamed(fn, ctx.rev())
1418 if rename:
1418 if rename:
1419 copies.append((fn, rename[0]))
1419 copies.append((fn, rename[0]))
1420 revmatchfn = None
1420 revmatchfn = None
1421 if filematcher is not None:
1421 if filematcher is not None:
1422 revmatchfn = filematcher(ctx.rev())
1422 revmatchfn = filematcher(ctx.rev())
1423 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1423 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1424 lines = displayer.hunk.pop(rev).split('\n')
1424 lines = displayer.hunk.pop(rev).split('\n')
1425 if not lines[-1]:
1425 if not lines[-1]:
1426 del lines[-1]
1426 del lines[-1]
1427 displayer.flush(rev)
1427 displayer.flush(rev)
1428 edges = edgefn(type, char, lines, seen, rev, parents)
1428 edges = edgefn(type, char, lines, seen, rev, parents)
1429 for type, char, lines, coldata in edges:
1429 for type, char, lines, coldata in edges:
1430 graphmod.ascii(ui, state, type, char, lines, coldata)
1430 graphmod.ascii(ui, state, type, char, lines, coldata)
1431 displayer.close()
1431 displayer.close()
1432
1432
1433 def graphlog(ui, repo, *pats, **opts):
1433 def graphlog(ui, repo, *pats, **opts):
1434 # Parameters are identical to log command ones
1434 # Parameters are identical to log command ones
1435 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1435 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1436 revs = sorted(revs, reverse=1)
1436 revs = sorted(revs, reverse=1)
1437 limit = loglimit(opts)
1437 limit = loglimit(opts)
1438 if limit is not None:
1438 if limit is not None:
1439 revs = revs[:limit]
1439 revs = revs[:limit]
1440 revdag = graphmod.dagwalker(repo, revs)
1440 revdag = graphmod.dagwalker(repo, revs)
1441
1441
1442 getrenamed = None
1442 getrenamed = None
1443 if opts.get('copies'):
1443 if opts.get('copies'):
1444 endrev = None
1444 endrev = None
1445 if opts.get('rev'):
1445 if opts.get('rev'):
1446 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
1446 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
1447 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1447 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1448 displayer = show_changeset(ui, repo, opts, buffered=True)
1448 displayer = show_changeset(ui, repo, opts, buffered=True)
1449 showparents = [ctx.node() for ctx in repo[None].parents()]
1449 showparents = [ctx.node() for ctx in repo[None].parents()]
1450 displaygraph(ui, revdag, displayer, showparents,
1450 displaygraph(ui, revdag, displayer, showparents,
1451 graphmod.asciiedges, getrenamed, filematcher)
1451 graphmod.asciiedges, getrenamed, filematcher)
1452
1452
1453 def checkunsupportedgraphflags(pats, opts):
1453 def checkunsupportedgraphflags(pats, opts):
1454 for op in ["newest_first"]:
1454 for op in ["newest_first"]:
1455 if op in opts and opts[op]:
1455 if op in opts and opts[op]:
1456 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1456 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1457 % op.replace("_", "-"))
1457 % op.replace("_", "-"))
1458
1458
1459 def graphrevs(repo, nodes, opts):
1459 def graphrevs(repo, nodes, opts):
1460 limit = loglimit(opts)
1460 limit = loglimit(opts)
1461 nodes.reverse()
1461 nodes.reverse()
1462 if limit is not None:
1462 if limit is not None:
1463 nodes = nodes[:limit]
1463 nodes = nodes[:limit]
1464 return graphmod.nodes(repo, nodes)
1464 return graphmod.nodes(repo, nodes)
1465
1465
1466 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1466 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1467 join = lambda f: os.path.join(prefix, f)
1467 join = lambda f: os.path.join(prefix, f)
1468 bad = []
1468 bad = []
1469 oldbad = match.bad
1469 oldbad = match.bad
1470 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1470 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1471 names = []
1471 names = []
1472 wctx = repo[None]
1472 wctx = repo[None]
1473 cca = None
1473 cca = None
1474 abort, warn = scmutil.checkportabilityalert(ui)
1474 abort, warn = scmutil.checkportabilityalert(ui)
1475 if abort or warn:
1475 if abort or warn:
1476 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1476 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1477 for f in repo.walk(match):
1477 for f in repo.walk(match):
1478 exact = match.exact(f)
1478 exact = match.exact(f)
1479 if exact or not explicitonly and f not in repo.dirstate:
1479 if exact or not explicitonly and f not in repo.dirstate:
1480 if cca:
1480 if cca:
1481 cca(f)
1481 cca(f)
1482 names.append(f)
1482 names.append(f)
1483 if ui.verbose or not exact:
1483 if ui.verbose or not exact:
1484 ui.status(_('adding %s\n') % match.rel(join(f)))
1484 ui.status(_('adding %s\n') % match.rel(join(f)))
1485
1485
1486 for subpath in wctx.substate:
1486 for subpath in wctx.substate:
1487 sub = wctx.sub(subpath)
1487 sub = wctx.sub(subpath)
1488 try:
1488 try:
1489 submatch = matchmod.narrowmatcher(subpath, match)
1489 submatch = matchmod.narrowmatcher(subpath, match)
1490 if listsubrepos:
1490 if listsubrepos:
1491 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1491 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1492 False))
1492 False))
1493 else:
1493 else:
1494 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1494 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1495 True))
1495 True))
1496 except error.LookupError:
1496 except error.LookupError:
1497 ui.status(_("skipping missing subrepository: %s\n")
1497 ui.status(_("skipping missing subrepository: %s\n")
1498 % join(subpath))
1498 % join(subpath))
1499
1499
1500 if not dryrun:
1500 if not dryrun:
1501 rejected = wctx.add(names, prefix)
1501 rejected = wctx.add(names, prefix)
1502 bad.extend(f for f in rejected if f in match.files())
1502 bad.extend(f for f in rejected if f in match.files())
1503 return bad
1503 return bad
1504
1504
1505 def forget(ui, repo, match, prefix, explicitonly):
1505 def forget(ui, repo, match, prefix, explicitonly):
1506 join = lambda f: os.path.join(prefix, f)
1506 join = lambda f: os.path.join(prefix, f)
1507 bad = []
1507 bad = []
1508 oldbad = match.bad
1508 oldbad = match.bad
1509 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1509 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1510 wctx = repo[None]
1510 wctx = repo[None]
1511 forgot = []
1511 forgot = []
1512 s = repo.status(match=match, clean=True)
1512 s = repo.status(match=match, clean=True)
1513 forget = sorted(s[0] + s[1] + s[3] + s[6])
1513 forget = sorted(s[0] + s[1] + s[3] + s[6])
1514 if explicitonly:
1514 if explicitonly:
1515 forget = [f for f in forget if match.exact(f)]
1515 forget = [f for f in forget if match.exact(f)]
1516
1516
1517 for subpath in wctx.substate:
1517 for subpath in wctx.substate:
1518 sub = wctx.sub(subpath)
1518 sub = wctx.sub(subpath)
1519 try:
1519 try:
1520 submatch = matchmod.narrowmatcher(subpath, match)
1520 submatch = matchmod.narrowmatcher(subpath, match)
1521 subbad, subforgot = sub.forget(ui, submatch, prefix)
1521 subbad, subforgot = sub.forget(ui, submatch, prefix)
1522 bad.extend([subpath + '/' + f for f in subbad])
1522 bad.extend([subpath + '/' + f for f in subbad])
1523 forgot.extend([subpath + '/' + f for f in subforgot])
1523 forgot.extend([subpath + '/' + f for f in subforgot])
1524 except error.LookupError:
1524 except error.LookupError:
1525 ui.status(_("skipping missing subrepository: %s\n")
1525 ui.status(_("skipping missing subrepository: %s\n")
1526 % join(subpath))
1526 % join(subpath))
1527
1527
1528 if not explicitonly:
1528 if not explicitonly:
1529 for f in match.files():
1529 for f in match.files():
1530 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
1530 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
1531 if f not in forgot:
1531 if f not in forgot:
1532 if os.path.exists(match.rel(join(f))):
1532 if os.path.exists(match.rel(join(f))):
1533 ui.warn(_('not removing %s: '
1533 ui.warn(_('not removing %s: '
1534 'file is already untracked\n')
1534 'file is already untracked\n')
1535 % match.rel(join(f)))
1535 % match.rel(join(f)))
1536 bad.append(f)
1536 bad.append(f)
1537
1537
1538 for f in forget:
1538 for f in forget:
1539 if ui.verbose or not match.exact(f):
1539 if ui.verbose or not match.exact(f):
1540 ui.status(_('removing %s\n') % match.rel(join(f)))
1540 ui.status(_('removing %s\n') % match.rel(join(f)))
1541
1541
1542 rejected = wctx.forget(forget, prefix)
1542 rejected = wctx.forget(forget, prefix)
1543 bad.extend(f for f in rejected if f in match.files())
1543 bad.extend(f for f in rejected if f in match.files())
1544 forgot.extend(forget)
1544 forgot.extend(forget)
1545 return bad, forgot
1545 return bad, forgot
1546
1546
1547 def duplicatecopies(repo, rev, p1):
1547 def duplicatecopies(repo, rev, p1):
1548 "Reproduce copies found in the source revision in the dirstate for grafts"
1548 "Reproduce copies found in the source revision in the dirstate for grafts"
1549 for dst, src in copies.pathcopies(repo[p1], repo[rev]).iteritems():
1549 for dst, src in copies.pathcopies(repo[p1], repo[rev]).iteritems():
1550 repo.dirstate.copy(src, dst)
1550 repo.dirstate.copy(src, dst)
1551
1551
1552 def commit(ui, repo, commitfunc, pats, opts):
1552 def commit(ui, repo, commitfunc, pats, opts):
1553 '''commit the specified files or all outstanding changes'''
1553 '''commit the specified files or all outstanding changes'''
1554 date = opts.get('date')
1554 date = opts.get('date')
1555 if date:
1555 if date:
1556 opts['date'] = util.parsedate(date)
1556 opts['date'] = util.parsedate(date)
1557 message = logmessage(ui, opts)
1557 message = logmessage(ui, opts)
1558
1558
1559 # extract addremove carefully -- this function can be called from a command
1559 # extract addremove carefully -- this function can be called from a command
1560 # that doesn't support addremove
1560 # that doesn't support addremove
1561 if opts.get('addremove'):
1561 if opts.get('addremove'):
1562 scmutil.addremove(repo, pats, opts)
1562 scmutil.addremove(repo, pats, opts)
1563
1563
1564 return commitfunc(ui, repo, message,
1564 return commitfunc(ui, repo, message,
1565 scmutil.match(repo[None], pats, opts), opts)
1565 scmutil.match(repo[None], pats, opts), opts)
1566
1566
1567 def amend(ui, repo, commitfunc, old, extra, pats, opts):
1567 def amend(ui, repo, commitfunc, old, extra, pats, opts):
1568 ui.note(_('amending changeset %s\n') % old)
1568 ui.note(_('amending changeset %s\n') % old)
1569 base = old.p1()
1569 base = old.p1()
1570
1570
1571 wlock = repo.wlock()
1571 wlock = repo.wlock()
1572 try:
1572 try:
1573 # First, do a regular commit to record all changes in the working
1573 # First, do a regular commit to record all changes in the working
1574 # directory (if there are any)
1574 # directory (if there are any)
1575 ui.callhooks = False
1575 ui.callhooks = False
1576 try:
1576 try:
1577 node = commit(ui, repo, commitfunc, pats, opts)
1577 node = commit(ui, repo, commitfunc, pats, opts)
1578 finally:
1578 finally:
1579 ui.callhooks = True
1579 ui.callhooks = True
1580 ctx = repo[node]
1580 ctx = repo[node]
1581
1581
1582 # Participating changesets:
1582 # Participating changesets:
1583 #
1583 #
1584 # node/ctx o - new (intermediate) commit that contains changes from
1584 # node/ctx o - new (intermediate) commit that contains changes from
1585 # | working dir to go into amending commit (or a workingctx
1585 # | working dir to go into amending commit (or a workingctx
1586 # | if there were no changes)
1586 # | if there were no changes)
1587 # |
1587 # |
1588 # old o - changeset to amend
1588 # old o - changeset to amend
1589 # |
1589 # |
1590 # base o - parent of amending changeset
1590 # base o - parent of amending changeset
1591
1591
1592 # Update extra dict from amended commit (e.g. to preserve graft source)
1592 # Update extra dict from amended commit (e.g. to preserve graft source)
1593 extra.update(old.extra())
1593 extra.update(old.extra())
1594
1594
1595 # Also update it from the intermediate commit or from the wctx
1595 # Also update it from the intermediate commit or from the wctx
1596 extra.update(ctx.extra())
1596 extra.update(ctx.extra())
1597
1597
1598 files = set(old.files())
1598 files = set(old.files())
1599
1599
1600 # Second, we use either the commit we just did, or if there were no
1600 # Second, we use either the commit we just did, or if there were no
1601 # changes the parent of the working directory as the version of the
1601 # changes the parent of the working directory as the version of the
1602 # files in the final amend commit
1602 # files in the final amend commit
1603 if node:
1603 if node:
1604 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
1604 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
1605
1605
1606 user = ctx.user()
1606 user = ctx.user()
1607 date = ctx.date()
1607 date = ctx.date()
1608 message = ctx.description()
1608 message = ctx.description()
1609 # Recompute copies (avoid recording a -> b -> a)
1609 # Recompute copies (avoid recording a -> b -> a)
1610 copied = copies.pathcopies(base, ctx)
1610 copied = copies.pathcopies(base, ctx)
1611
1611
1612 # Prune files which were reverted by the updates: if old introduced
1612 # Prune files which were reverted by the updates: if old introduced
1613 # file X and our intermediate commit, node, renamed that file, then
1613 # file X and our intermediate commit, node, renamed that file, then
1614 # those two files are the same and we can discard X from our list
1614 # those two files are the same and we can discard X from our list
1615 # of files. Likewise if X was deleted, it's no longer relevant
1615 # of files. Likewise if X was deleted, it's no longer relevant
1616 files.update(ctx.files())
1616 files.update(ctx.files())
1617
1617
1618 def samefile(f):
1618 def samefile(f):
1619 if f in ctx.manifest():
1619 if f in ctx.manifest():
1620 a = ctx.filectx(f)
1620 a = ctx.filectx(f)
1621 if f in base.manifest():
1621 if f in base.manifest():
1622 b = base.filectx(f)
1622 b = base.filectx(f)
1623 return (not a.cmp(b)
1623 return (not a.cmp(b)
1624 and a.flags() == b.flags())
1624 and a.flags() == b.flags())
1625 else:
1625 else:
1626 return False
1626 return False
1627 else:
1627 else:
1628 return f not in base.manifest()
1628 return f not in base.manifest()
1629 files = [f for f in files if not samefile(f)]
1629 files = [f for f in files if not samefile(f)]
1630
1630
1631 def filectxfn(repo, ctx_, path):
1631 def filectxfn(repo, ctx_, path):
1632 try:
1632 try:
1633 fctx = ctx[path]
1633 fctx = ctx[path]
1634 flags = fctx.flags()
1634 flags = fctx.flags()
1635 mctx = context.memfilectx(fctx.path(), fctx.data(),
1635 mctx = context.memfilectx(fctx.path(), fctx.data(),
1636 islink='l' in flags,
1636 islink='l' in flags,
1637 isexec='x' in flags,
1637 isexec='x' in flags,
1638 copied=copied.get(path))
1638 copied=copied.get(path))
1639 return mctx
1639 return mctx
1640 except KeyError:
1640 except KeyError:
1641 raise IOError
1641 raise IOError
1642 else:
1642 else:
1643 ui.note(_('copying changeset %s to %s\n') % (old, base))
1643 ui.note(_('copying changeset %s to %s\n') % (old, base))
1644
1644
1645 # Use version of files as in the old cset
1645 # Use version of files as in the old cset
1646 def filectxfn(repo, ctx_, path):
1646 def filectxfn(repo, ctx_, path):
1647 try:
1647 try:
1648 return old.filectx(path)
1648 return old.filectx(path)
1649 except KeyError:
1649 except KeyError:
1650 raise IOError
1650 raise IOError
1651
1651
1652 # See if we got a message from -m or -l, if not, open the editor
1652 # See if we got a message from -m or -l, if not, open the editor
1653 # with the message of the changeset to amend
1653 # with the message of the changeset to amend
1654 user = opts.get('user') or old.user()
1654 user = opts.get('user') or old.user()
1655 date = opts.get('date') or old.date()
1655 date = opts.get('date') or old.date()
1656 message = logmessage(ui, opts)
1656 message = logmessage(ui, opts)
1657 if not message:
1657 if not message:
1658 cctx = context.workingctx(repo, old.description(), user, date,
1658 cctx = context.workingctx(repo, old.description(), user, date,
1659 extra,
1659 extra,
1660 repo.status(base.node(), old.node()))
1660 repo.status(base.node(), old.node()))
1661 message = commitforceeditor(repo, cctx, [])
1661 message = commitforceeditor(repo, cctx, [])
1662
1662
1663 new = context.memctx(repo,
1663 new = context.memctx(repo,
1664 parents=[base.node(), nullid],
1664 parents=[base.node(), nullid],
1665 text=message,
1665 text=message,
1666 files=files,
1666 files=files,
1667 filectxfn=filectxfn,
1667 filectxfn=filectxfn,
1668 user=user,
1668 user=user,
1669 date=date,
1669 date=date,
1670 extra=extra)
1670 extra=extra)
1671 newid = repo.commitctx(new)
1671 newid = repo.commitctx(new)
1672 if newid != old.node():
1672 if newid != old.node():
1673 # Reroute the working copy parent to the new changeset
1673 # Reroute the working copy parent to the new changeset
1674 repo.setparents(newid, nullid)
1674 repo.setparents(newid, nullid)
1675
1675
1676 # Move bookmarks from old parent to amend commit
1676 # Move bookmarks from old parent to amend commit
1677 bms = repo.nodebookmarks(old.node())
1677 bms = repo.nodebookmarks(old.node())
1678 if bms:
1678 if bms:
1679 for bm in bms:
1679 for bm in bms:
1680 repo._bookmarks[bm] = newid
1680 repo._bookmarks[bm] = newid
1681 bookmarks.write(repo)
1681 bookmarks.write(repo)
1682
1682
1683 # Strip the intermediate commit (if there was one) and the amended
1683 # Strip the intermediate commit (if there was one) and the amended
1684 # commit
1684 # commit
1685 lock = repo.lock()
1685 lock = repo.lock()
1686 try:
1686 try:
1687 if node:
1687 if node:
1688 ui.note(_('stripping intermediate changeset %s\n') % ctx)
1688 ui.note(_('stripping intermediate changeset %s\n') % ctx)
1689 ui.note(_('stripping amended changeset %s\n') % old)
1689 ui.note(_('stripping amended changeset %s\n') % old)
1690 repair.strip(ui, repo, old.node(), topic='amend-backup')
1690 repair.strip(ui, repo, old.node(), topic='amend-backup')
1691 finally:
1691 finally:
1692 lock.release()
1692 lock.release()
1693 finally:
1693 finally:
1694 wlock.release()
1694 wlock.release()
1695 return newid
1695 return newid
1696
1696
1697 def commiteditor(repo, ctx, subs):
1697 def commiteditor(repo, ctx, subs):
1698 if ctx.description():
1698 if ctx.description():
1699 return ctx.description()
1699 return ctx.description()
1700 return commitforceeditor(repo, ctx, subs)
1700 return commitforceeditor(repo, ctx, subs)
1701
1701
1702 def commitforceeditor(repo, ctx, subs):
1702 def commitforceeditor(repo, ctx, subs):
1703 edittext = []
1703 edittext = []
1704 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1704 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1705 if ctx.description():
1705 if ctx.description():
1706 edittext.append(ctx.description())
1706 edittext.append(ctx.description())
1707 edittext.append("")
1707 edittext.append("")
1708 edittext.append("") # Empty line between message and comments.
1708 edittext.append("") # Empty line between message and comments.
1709 edittext.append(_("HG: Enter commit message."
1709 edittext.append(_("HG: Enter commit message."
1710 " Lines beginning with 'HG:' are removed."))
1710 " Lines beginning with 'HG:' are removed."))
1711 edittext.append(_("HG: Leave message empty to abort commit."))
1711 edittext.append(_("HG: Leave message empty to abort commit."))
1712 edittext.append("HG: --")
1712 edittext.append("HG: --")
1713 edittext.append(_("HG: user: %s") % ctx.user())
1713 edittext.append(_("HG: user: %s") % ctx.user())
1714 if ctx.p2():
1714 if ctx.p2():
1715 edittext.append(_("HG: branch merge"))
1715 edittext.append(_("HG: branch merge"))
1716 if ctx.branch():
1716 if ctx.branch():
1717 edittext.append(_("HG: branch '%s'") % ctx.branch())
1717 edittext.append(_("HG: branch '%s'") % ctx.branch())
1718 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1718 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1719 edittext.extend([_("HG: added %s") % f for f in added])
1719 edittext.extend([_("HG: added %s") % f for f in added])
1720 edittext.extend([_("HG: changed %s") % f for f in modified])
1720 edittext.extend([_("HG: changed %s") % f for f in modified])
1721 edittext.extend([_("HG: removed %s") % f for f in removed])
1721 edittext.extend([_("HG: removed %s") % f for f in removed])
1722 if not added and not modified and not removed:
1722 if not added and not modified and not removed:
1723 edittext.append(_("HG: no files changed"))
1723 edittext.append(_("HG: no files changed"))
1724 edittext.append("")
1724 edittext.append("")
1725 # run editor in the repository root
1725 # run editor in the repository root
1726 olddir = os.getcwd()
1726 olddir = os.getcwd()
1727 os.chdir(repo.root)
1727 os.chdir(repo.root)
1728 text = repo.ui.edit("\n".join(edittext), ctx.user())
1728 text = repo.ui.edit("\n".join(edittext), ctx.user())
1729 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1729 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1730 os.chdir(olddir)
1730 os.chdir(olddir)
1731
1731
1732 if not text.strip():
1732 if not text.strip():
1733 raise util.Abort(_("empty commit message"))
1733 raise util.Abort(_("empty commit message"))
1734
1734
1735 return text
1735 return text
1736
1736
1737 def revert(ui, repo, ctx, parents, *pats, **opts):
1737 def revert(ui, repo, ctx, parents, *pats, **opts):
1738 parent, p2 = parents
1738 parent, p2 = parents
1739 node = ctx.node()
1739 node = ctx.node()
1740
1740
1741 mf = ctx.manifest()
1741 mf = ctx.manifest()
1742 if node == parent:
1742 if node == parent:
1743 pmf = mf
1743 pmf = mf
1744 else:
1744 else:
1745 pmf = None
1745 pmf = None
1746
1746
1747 # need all matching names in dirstate and manifest of target rev,
1747 # need all matching names in dirstate and manifest of target rev,
1748 # so have to walk both. do not print errors if files exist in one
1748 # so have to walk both. do not print errors if files exist in one
1749 # but not other.
1749 # but not other.
1750
1750
1751 names = {}
1751 names = {}
1752
1752
1753 wlock = repo.wlock()
1753 wlock = repo.wlock()
1754 try:
1754 try:
1755 # walk dirstate.
1755 # walk dirstate.
1756
1756
1757 m = scmutil.match(repo[None], pats, opts)
1757 m = scmutil.match(repo[None], pats, opts)
1758 m.bad = lambda x, y: False
1758 m.bad = lambda x, y: False
1759 for abs in repo.walk(m):
1759 for abs in repo.walk(m):
1760 names[abs] = m.rel(abs), m.exact(abs)
1760 names[abs] = m.rel(abs), m.exact(abs)
1761
1761
1762 # walk target manifest.
1762 # walk target manifest.
1763
1763
1764 def badfn(path, msg):
1764 def badfn(path, msg):
1765 if path in names:
1765 if path in names:
1766 return
1766 return
1767 if path in ctx.substate:
1767 if path in ctx.substate:
1768 return
1768 return
1769 path_ = path + '/'
1769 path_ = path + '/'
1770 for f in names:
1770 for f in names:
1771 if f.startswith(path_):
1771 if f.startswith(path_):
1772 return
1772 return
1773 ui.warn("%s: %s\n" % (m.rel(path), msg))
1773 ui.warn("%s: %s\n" % (m.rel(path), msg))
1774
1774
1775 m = scmutil.match(ctx, pats, opts)
1775 m = scmutil.match(ctx, pats, opts)
1776 m.bad = badfn
1776 m.bad = badfn
1777 for abs in ctx.walk(m):
1777 for abs in ctx.walk(m):
1778 if abs not in names:
1778 if abs not in names:
1779 names[abs] = m.rel(abs), m.exact(abs)
1779 names[abs] = m.rel(abs), m.exact(abs)
1780
1780
1781 # get the list of subrepos that must be reverted
1781 # get the list of subrepos that must be reverted
1782 targetsubs = [s for s in ctx.substate if m(s)]
1782 targetsubs = [s for s in ctx.substate if m(s)]
1783 m = scmutil.matchfiles(repo, names)
1783 m = scmutil.matchfiles(repo, names)
1784 changes = repo.status(match=m)[:4]
1784 changes = repo.status(match=m)[:4]
1785 modified, added, removed, deleted = map(set, changes)
1785 modified, added, removed, deleted = map(set, changes)
1786
1786
1787 # if f is a rename, also revert the source
1787 # if f is a rename, also revert the source
1788 cwd = repo.getcwd()
1788 cwd = repo.getcwd()
1789 for f in added:
1789 for f in added:
1790 src = repo.dirstate.copied(f)
1790 src = repo.dirstate.copied(f)
1791 if src and src not in names and repo.dirstate[src] == 'r':
1791 if src and src not in names and repo.dirstate[src] == 'r':
1792 removed.add(src)
1792 removed.add(src)
1793 names[src] = (repo.pathto(src, cwd), True)
1793 names[src] = (repo.pathto(src, cwd), True)
1794
1794
1795 def removeforget(abs):
1795 def removeforget(abs):
1796 if repo.dirstate[abs] == 'a':
1796 if repo.dirstate[abs] == 'a':
1797 return _('forgetting %s\n')
1797 return _('forgetting %s\n')
1798 return _('removing %s\n')
1798 return _('removing %s\n')
1799
1799
1800 revert = ([], _('reverting %s\n'))
1800 revert = ([], _('reverting %s\n'))
1801 add = ([], _('adding %s\n'))
1801 add = ([], _('adding %s\n'))
1802 remove = ([], removeforget)
1802 remove = ([], removeforget)
1803 undelete = ([], _('undeleting %s\n'))
1803 undelete = ([], _('undeleting %s\n'))
1804
1804
1805 disptable = (
1805 disptable = (
1806 # dispatch table:
1806 # dispatch table:
1807 # file state
1807 # file state
1808 # action if in target manifest
1808 # action if in target manifest
1809 # action if not in target manifest
1809 # action if not in target manifest
1810 # make backup if in target manifest
1810 # make backup if in target manifest
1811 # make backup if not in target manifest
1811 # make backup if not in target manifest
1812 (modified, revert, remove, True, True),
1812 (modified, revert, remove, True, True),
1813 (added, revert, remove, True, False),
1813 (added, revert, remove, True, False),
1814 (removed, undelete, None, False, False),
1814 (removed, undelete, None, False, False),
1815 (deleted, revert, remove, False, False),
1815 (deleted, revert, remove, False, False),
1816 )
1816 )
1817
1817
1818 for abs, (rel, exact) in sorted(names.items()):
1818 for abs, (rel, exact) in sorted(names.items()):
1819 mfentry = mf.get(abs)
1819 mfentry = mf.get(abs)
1820 target = repo.wjoin(abs)
1820 target = repo.wjoin(abs)
1821 def handle(xlist, dobackup):
1821 def handle(xlist, dobackup):
1822 xlist[0].append(abs)
1822 xlist[0].append(abs)
1823 if (dobackup and not opts.get('no_backup') and
1823 if (dobackup and not opts.get('no_backup') and
1824 os.path.lexists(target)):
1824 os.path.lexists(target)):
1825 bakname = "%s.orig" % rel
1825 bakname = "%s.orig" % rel
1826 ui.note(_('saving current version of %s as %s\n') %
1826 ui.note(_('saving current version of %s as %s\n') %
1827 (rel, bakname))
1827 (rel, bakname))
1828 if not opts.get('dry_run'):
1828 if not opts.get('dry_run'):
1829 util.rename(target, bakname)
1829 util.rename(target, bakname)
1830 if ui.verbose or not exact:
1830 if ui.verbose or not exact:
1831 msg = xlist[1]
1831 msg = xlist[1]
1832 if not isinstance(msg, basestring):
1832 if not isinstance(msg, basestring):
1833 msg = msg(abs)
1833 msg = msg(abs)
1834 ui.status(msg % rel)
1834 ui.status(msg % rel)
1835 for table, hitlist, misslist, backuphit, backupmiss in disptable:
1835 for table, hitlist, misslist, backuphit, backupmiss in disptable:
1836 if abs not in table:
1836 if abs not in table:
1837 continue
1837 continue
1838 # file has changed in dirstate
1838 # file has changed in dirstate
1839 if mfentry:
1839 if mfentry:
1840 handle(hitlist, backuphit)
1840 handle(hitlist, backuphit)
1841 elif misslist is not None:
1841 elif misslist is not None:
1842 handle(misslist, backupmiss)
1842 handle(misslist, backupmiss)
1843 break
1843 break
1844 else:
1844 else:
1845 if abs not in repo.dirstate:
1845 if abs not in repo.dirstate:
1846 if mfentry:
1846 if mfentry:
1847 handle(add, True)
1847 handle(add, True)
1848 elif exact:
1848 elif exact:
1849 ui.warn(_('file not managed: %s\n') % rel)
1849 ui.warn(_('file not managed: %s\n') % rel)
1850 continue
1850 continue
1851 # file has not changed in dirstate
1851 # file has not changed in dirstate
1852 if node == parent:
1852 if node == parent:
1853 if exact:
1853 if exact:
1854 ui.warn(_('no changes needed to %s\n') % rel)
1854 ui.warn(_('no changes needed to %s\n') % rel)
1855 continue
1855 continue
1856 if pmf is None:
1856 if pmf is None:
1857 # only need parent manifest in this unlikely case,
1857 # only need parent manifest in this unlikely case,
1858 # so do not read by default
1858 # so do not read by default
1859 pmf = repo[parent].manifest()
1859 pmf = repo[parent].manifest()
1860 if abs in pmf and mfentry:
1860 if abs in pmf and mfentry:
1861 # if version of file is same in parent and target
1861 # if version of file is same in parent and target
1862 # manifests, do nothing
1862 # manifests, do nothing
1863 if (pmf[abs] != mfentry or
1863 if (pmf[abs] != mfentry or
1864 pmf.flags(abs) != mf.flags(abs)):
1864 pmf.flags(abs) != mf.flags(abs)):
1865 handle(revert, False)
1865 handle(revert, False)
1866 else:
1866 else:
1867 handle(remove, False)
1867 handle(remove, False)
1868
1868
1869 if not opts.get('dry_run'):
1869 if not opts.get('dry_run'):
1870 def checkout(f):
1870 def checkout(f):
1871 fc = ctx[f]
1871 fc = ctx[f]
1872 repo.wwrite(f, fc.data(), fc.flags())
1872 repo.wwrite(f, fc.data(), fc.flags())
1873
1873
1874 audit_path = scmutil.pathauditor(repo.root)
1874 audit_path = scmutil.pathauditor(repo.root)
1875 for f in remove[0]:
1875 for f in remove[0]:
1876 if repo.dirstate[f] == 'a':
1876 if repo.dirstate[f] == 'a':
1877 repo.dirstate.drop(f)
1877 repo.dirstate.drop(f)
1878 continue
1878 continue
1879 audit_path(f)
1879 audit_path(f)
1880 try:
1880 try:
1881 util.unlinkpath(repo.wjoin(f))
1881 util.unlinkpath(repo.wjoin(f))
1882 except OSError:
1882 except OSError:
1883 pass
1883 pass
1884 repo.dirstate.remove(f)
1884 repo.dirstate.remove(f)
1885
1885
1886 normal = None
1886 normal = None
1887 if node == parent:
1887 if node == parent:
1888 # We're reverting to our parent. If possible, we'd like status
1888 # We're reverting to our parent. If possible, we'd like status
1889 # to report the file as clean. We have to use normallookup for
1889 # to report the file as clean. We have to use normallookup for
1890 # merges to avoid losing information about merged/dirty files.
1890 # merges to avoid losing information about merged/dirty files.
1891 if p2 != nullid:
1891 if p2 != nullid:
1892 normal = repo.dirstate.normallookup
1892 normal = repo.dirstate.normallookup
1893 else:
1893 else:
1894 normal = repo.dirstate.normal
1894 normal = repo.dirstate.normal
1895 for f in revert[0]:
1895 for f in revert[0]:
1896 checkout(f)
1896 checkout(f)
1897 if normal:
1897 if normal:
1898 normal(f)
1898 normal(f)
1899
1899
1900 for f in add[0]:
1900 for f in add[0]:
1901 checkout(f)
1901 checkout(f)
1902 repo.dirstate.add(f)
1902 repo.dirstate.add(f)
1903
1903
1904 normal = repo.dirstate.normallookup
1904 normal = repo.dirstate.normallookup
1905 if node == parent and p2 == nullid:
1905 if node == parent and p2 == nullid:
1906 normal = repo.dirstate.normal
1906 normal = repo.dirstate.normal
1907 for f in undelete[0]:
1907 for f in undelete[0]:
1908 checkout(f)
1908 checkout(f)
1909 normal(f)
1909 normal(f)
1910
1910
1911 if targetsubs:
1911 if targetsubs:
1912 # Revert the subrepos on the revert list
1912 # Revert the subrepos on the revert list
1913 for sub in targetsubs:
1913 for sub in targetsubs:
1914 ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
1914 ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
1915 finally:
1915 finally:
1916 wlock.release()
1916 wlock.release()
1917
1917
1918 def command(table):
1918 def command(table):
1919 '''returns a function object bound to table which can be used as
1919 '''returns a function object bound to table which can be used as
1920 a decorator for populating table as a command table'''
1920 a decorator for populating table as a command table'''
1921
1921
1922 def cmd(name, options, synopsis=None):
1922 def cmd(name, options, synopsis=None):
1923 def decorator(func):
1923 def decorator(func):
1924 if synopsis:
1924 if synopsis:
1925 table[name] = func, options[:], synopsis
1925 table[name] = func, options[:], synopsis
1926 else:
1926 else:
1927 table[name] = func, options[:]
1927 table[name] = func, options[:]
1928 return func
1928 return func
1929 return decorator
1929 return decorator
1930
1930
1931 return cmd
1931 return cmd
@@ -1,470 +1,469
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import encoding
9 import encoding
10 import os, sys, errno, stat, getpass, pwd, grp, tempfile, unicodedata
10 import os, sys, errno, stat, getpass, pwd, grp, tempfile, unicodedata
11
11
12 posixfile = open
12 posixfile = open
13 nulldev = '/dev/null'
14 normpath = os.path.normpath
13 normpath = os.path.normpath
15 samestat = os.path.samestat
14 samestat = os.path.samestat
16 oslink = os.link
15 oslink = os.link
17 unlink = os.unlink
16 unlink = os.unlink
18 rename = os.rename
17 rename = os.rename
19 expandglobs = False
18 expandglobs = False
20
19
21 umask = os.umask(0)
20 umask = os.umask(0)
22 os.umask(umask)
21 os.umask(umask)
23
22
24 def openhardlinks():
23 def openhardlinks():
25 '''return true if it is safe to hold open file handles to hardlinks'''
24 '''return true if it is safe to hold open file handles to hardlinks'''
26 return True
25 return True
27
26
28 def nlinks(name):
27 def nlinks(name):
29 '''return number of hardlinks for the given file'''
28 '''return number of hardlinks for the given file'''
30 return os.lstat(name).st_nlink
29 return os.lstat(name).st_nlink
31
30
32 def parsepatchoutput(output_line):
31 def parsepatchoutput(output_line):
33 """parses the output produced by patch and returns the filename"""
32 """parses the output produced by patch and returns the filename"""
34 pf = output_line[14:]
33 pf = output_line[14:]
35 if os.sys.platform == 'OpenVMS':
34 if os.sys.platform == 'OpenVMS':
36 if pf[0] == '`':
35 if pf[0] == '`':
37 pf = pf[1:-1] # Remove the quotes
36 pf = pf[1:-1] # Remove the quotes
38 else:
37 else:
39 if pf.startswith("'") and pf.endswith("'") and " " in pf:
38 if pf.startswith("'") and pf.endswith("'") and " " in pf:
40 pf = pf[1:-1] # Remove the quotes
39 pf = pf[1:-1] # Remove the quotes
41 return pf
40 return pf
42
41
43 def sshargs(sshcmd, host, user, port):
42 def sshargs(sshcmd, host, user, port):
44 '''Build argument list for ssh'''
43 '''Build argument list for ssh'''
45 args = user and ("%s@%s" % (user, host)) or host
44 args = user and ("%s@%s" % (user, host)) or host
46 return port and ("%s -p %s" % (args, port)) or args
45 return port and ("%s -p %s" % (args, port)) or args
47
46
48 def isexec(f):
47 def isexec(f):
49 """check whether a file is executable"""
48 """check whether a file is executable"""
50 return (os.lstat(f).st_mode & 0100 != 0)
49 return (os.lstat(f).st_mode & 0100 != 0)
51
50
52 def setflags(f, l, x):
51 def setflags(f, l, x):
53 s = os.lstat(f).st_mode
52 s = os.lstat(f).st_mode
54 if l:
53 if l:
55 if not stat.S_ISLNK(s):
54 if not stat.S_ISLNK(s):
56 # switch file to link
55 # switch file to link
57 fp = open(f)
56 fp = open(f)
58 data = fp.read()
57 data = fp.read()
59 fp.close()
58 fp.close()
60 os.unlink(f)
59 os.unlink(f)
61 try:
60 try:
62 os.symlink(data, f)
61 os.symlink(data, f)
63 except OSError:
62 except OSError:
64 # failed to make a link, rewrite file
63 # failed to make a link, rewrite file
65 fp = open(f, "w")
64 fp = open(f, "w")
66 fp.write(data)
65 fp.write(data)
67 fp.close()
66 fp.close()
68 # no chmod needed at this point
67 # no chmod needed at this point
69 return
68 return
70 if stat.S_ISLNK(s):
69 if stat.S_ISLNK(s):
71 # switch link to file
70 # switch link to file
72 data = os.readlink(f)
71 data = os.readlink(f)
73 os.unlink(f)
72 os.unlink(f)
74 fp = open(f, "w")
73 fp = open(f, "w")
75 fp.write(data)
74 fp.write(data)
76 fp.close()
75 fp.close()
77 s = 0666 & ~umask # avoid restatting for chmod
76 s = 0666 & ~umask # avoid restatting for chmod
78
77
79 sx = s & 0100
78 sx = s & 0100
80 if x and not sx:
79 if x and not sx:
81 # Turn on +x for every +r bit when making a file executable
80 # Turn on +x for every +r bit when making a file executable
82 # and obey umask.
81 # and obey umask.
83 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
82 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
84 elif not x and sx:
83 elif not x and sx:
85 # Turn off all +x bits
84 # Turn off all +x bits
86 os.chmod(f, s & 0666)
85 os.chmod(f, s & 0666)
87
86
88 def copymode(src, dst, mode=None):
87 def copymode(src, dst, mode=None):
89 '''Copy the file mode from the file at path src to dst.
88 '''Copy the file mode from the file at path src to dst.
90 If src doesn't exist, we're using mode instead. If mode is None, we're
89 If src doesn't exist, we're using mode instead. If mode is None, we're
91 using umask.'''
90 using umask.'''
92 try:
91 try:
93 st_mode = os.lstat(src).st_mode & 0777
92 st_mode = os.lstat(src).st_mode & 0777
94 except OSError, inst:
93 except OSError, inst:
95 if inst.errno != errno.ENOENT:
94 if inst.errno != errno.ENOENT:
96 raise
95 raise
97 st_mode = mode
96 st_mode = mode
98 if st_mode is None:
97 if st_mode is None:
99 st_mode = ~umask
98 st_mode = ~umask
100 st_mode &= 0666
99 st_mode &= 0666
101 os.chmod(dst, st_mode)
100 os.chmod(dst, st_mode)
102
101
103 def checkexec(path):
102 def checkexec(path):
104 """
103 """
105 Check whether the given path is on a filesystem with UNIX-like exec flags
104 Check whether the given path is on a filesystem with UNIX-like exec flags
106
105
107 Requires a directory (like /foo/.hg)
106 Requires a directory (like /foo/.hg)
108 """
107 """
109
108
110 # VFAT on some Linux versions can flip mode but it doesn't persist
109 # VFAT on some Linux versions can flip mode but it doesn't persist
111 # a FS remount. Frequently we can detect it if files are created
110 # a FS remount. Frequently we can detect it if files are created
112 # with exec bit on.
111 # with exec bit on.
113
112
114 try:
113 try:
115 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
114 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
116 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
115 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
117 try:
116 try:
118 os.close(fh)
117 os.close(fh)
119 m = os.stat(fn).st_mode & 0777
118 m = os.stat(fn).st_mode & 0777
120 new_file_has_exec = m & EXECFLAGS
119 new_file_has_exec = m & EXECFLAGS
121 os.chmod(fn, m ^ EXECFLAGS)
120 os.chmod(fn, m ^ EXECFLAGS)
122 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
121 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
123 finally:
122 finally:
124 os.unlink(fn)
123 os.unlink(fn)
125 except (IOError, OSError):
124 except (IOError, OSError):
126 # we don't care, the user probably won't be able to commit anyway
125 # we don't care, the user probably won't be able to commit anyway
127 return False
126 return False
128 return not (new_file_has_exec or exec_flags_cannot_flip)
127 return not (new_file_has_exec or exec_flags_cannot_flip)
129
128
130 def checklink(path):
129 def checklink(path):
131 """check whether the given path is on a symlink-capable filesystem"""
130 """check whether the given path is on a symlink-capable filesystem"""
132 # mktemp is not racy because symlink creation will fail if the
131 # mktemp is not racy because symlink creation will fail if the
133 # file already exists
132 # file already exists
134 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
133 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
135 try:
134 try:
136 os.symlink(".", name)
135 os.symlink(".", name)
137 os.unlink(name)
136 os.unlink(name)
138 return True
137 return True
139 except (OSError, AttributeError):
138 except (OSError, AttributeError):
140 return False
139 return False
141
140
142 def checkosfilename(path):
141 def checkosfilename(path):
143 '''Check that the base-relative path is a valid filename on this platform.
142 '''Check that the base-relative path is a valid filename on this platform.
144 Returns None if the path is ok, or a UI string describing the problem.'''
143 Returns None if the path is ok, or a UI string describing the problem.'''
145 pass # on posix platforms, every path is ok
144 pass # on posix platforms, every path is ok
146
145
147 def setbinary(fd):
146 def setbinary(fd):
148 pass
147 pass
149
148
150 def pconvert(path):
149 def pconvert(path):
151 return path
150 return path
152
151
153 def localpath(path):
152 def localpath(path):
154 return path
153 return path
155
154
156 def samefile(fpath1, fpath2):
155 def samefile(fpath1, fpath2):
157 """Returns whether path1 and path2 refer to the same file. This is only
156 """Returns whether path1 and path2 refer to the same file. This is only
158 guaranteed to work for files, not directories."""
157 guaranteed to work for files, not directories."""
159 return os.path.samefile(fpath1, fpath2)
158 return os.path.samefile(fpath1, fpath2)
160
159
161 def samedevice(fpath1, fpath2):
160 def samedevice(fpath1, fpath2):
162 """Returns whether fpath1 and fpath2 are on the same device. This is only
161 """Returns whether fpath1 and fpath2 are on the same device. This is only
163 guaranteed to work for files, not directories."""
162 guaranteed to work for files, not directories."""
164 st1 = os.lstat(fpath1)
163 st1 = os.lstat(fpath1)
165 st2 = os.lstat(fpath2)
164 st2 = os.lstat(fpath2)
166 return st1.st_dev == st2.st_dev
165 return st1.st_dev == st2.st_dev
167
166
168 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
167 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
169 def normcase(path):
168 def normcase(path):
170 return path.lower()
169 return path.lower()
171
170
172 if sys.platform == 'darwin':
171 if sys.platform == 'darwin':
173 import fcntl # only needed on darwin, missing on jython
172 import fcntl # only needed on darwin, missing on jython
174
173
175 def normcase(path):
174 def normcase(path):
176 try:
175 try:
177 u = path.decode('utf-8')
176 u = path.decode('utf-8')
178 except UnicodeDecodeError:
177 except UnicodeDecodeError:
179 # percent-encode any characters that don't round-trip
178 # percent-encode any characters that don't round-trip
180 p2 = path.decode('utf-8', 'ignore').encode('utf-8')
179 p2 = path.decode('utf-8', 'ignore').encode('utf-8')
181 s = ""
180 s = ""
182 pos = 0
181 pos = 0
183 for c in path:
182 for c in path:
184 if p2[pos:pos + 1] == c:
183 if p2[pos:pos + 1] == c:
185 s += c
184 s += c
186 pos += 1
185 pos += 1
187 else:
186 else:
188 s += "%%%02X" % ord(c)
187 s += "%%%02X" % ord(c)
189 u = s.decode('utf-8')
188 u = s.decode('utf-8')
190
189
191 # Decompose then lowercase (HFS+ technote specifies lower)
190 # Decompose then lowercase (HFS+ technote specifies lower)
192 return unicodedata.normalize('NFD', u).lower().encode('utf-8')
191 return unicodedata.normalize('NFD', u).lower().encode('utf-8')
193
192
194 def realpath(path):
193 def realpath(path):
195 '''
194 '''
196 Returns the true, canonical file system path equivalent to the given
195 Returns the true, canonical file system path equivalent to the given
197 path.
196 path.
198
197
199 Equivalent means, in this case, resulting in the same, unique
198 Equivalent means, in this case, resulting in the same, unique
200 file system link to the path. Every file system entry, whether a file,
199 file system link to the path. Every file system entry, whether a file,
201 directory, hard link or symbolic link or special, will have a single
200 directory, hard link or symbolic link or special, will have a single
202 path preferred by the system, but may allow multiple, differing path
201 path preferred by the system, but may allow multiple, differing path
203 lookups to point to it.
202 lookups to point to it.
204
203
205 Most regular UNIX file systems only allow a file system entry to be
204 Most regular UNIX file systems only allow a file system entry to be
206 looked up by its distinct path. Obviously, this does not apply to case
205 looked up by its distinct path. Obviously, this does not apply to case
207 insensitive file systems, whether case preserving or not. The most
206 insensitive file systems, whether case preserving or not. The most
208 complex issue to deal with is file systems transparently reencoding the
207 complex issue to deal with is file systems transparently reencoding the
209 path, such as the non-standard Unicode normalisation required for HFS+
208 path, such as the non-standard Unicode normalisation required for HFS+
210 and HFSX.
209 and HFSX.
211 '''
210 '''
212 # Constants copied from /usr/include/sys/fcntl.h
211 # Constants copied from /usr/include/sys/fcntl.h
213 F_GETPATH = 50
212 F_GETPATH = 50
214 O_SYMLINK = 0x200000
213 O_SYMLINK = 0x200000
215
214
216 try:
215 try:
217 fd = os.open(path, O_SYMLINK)
216 fd = os.open(path, O_SYMLINK)
218 except OSError, err:
217 except OSError, err:
219 if err.errno == errno.ENOENT:
218 if err.errno == errno.ENOENT:
220 return path
219 return path
221 raise
220 raise
222
221
223 try:
222 try:
224 return fcntl.fcntl(fd, F_GETPATH, '\0' * 1024).rstrip('\0')
223 return fcntl.fcntl(fd, F_GETPATH, '\0' * 1024).rstrip('\0')
225 finally:
224 finally:
226 os.close(fd)
225 os.close(fd)
227 elif sys.version_info < (2, 4, 2, 'final'):
226 elif sys.version_info < (2, 4, 2, 'final'):
228 # Workaround for http://bugs.python.org/issue1213894 (os.path.realpath
227 # Workaround for http://bugs.python.org/issue1213894 (os.path.realpath
229 # didn't resolve symlinks that were the first component of the path.)
228 # didn't resolve symlinks that were the first component of the path.)
230 def realpath(path):
229 def realpath(path):
231 if os.path.isabs(path):
230 if os.path.isabs(path):
232 return os.path.realpath(path)
231 return os.path.realpath(path)
233 else:
232 else:
234 return os.path.realpath('./' + path)
233 return os.path.realpath('./' + path)
235 else:
234 else:
236 # Fallback to the likely inadequate Python builtin function.
235 # Fallback to the likely inadequate Python builtin function.
237 realpath = os.path.realpath
236 realpath = os.path.realpath
238
237
239 if sys.platform == 'cygwin':
238 if sys.platform == 'cygwin':
240 # workaround for cygwin, in which mount point part of path is
239 # workaround for cygwin, in which mount point part of path is
241 # treated as case sensitive, even though underlying NTFS is case
240 # treated as case sensitive, even though underlying NTFS is case
242 # insensitive.
241 # insensitive.
243
242
244 # default mount points
243 # default mount points
245 cygwinmountpoints = sorted([
244 cygwinmountpoints = sorted([
246 "/usr/bin",
245 "/usr/bin",
247 "/usr/lib",
246 "/usr/lib",
248 "/cygdrive",
247 "/cygdrive",
249 ], reverse=True)
248 ], reverse=True)
250
249
251 # use upper-ing as normcase as same as NTFS workaround
250 # use upper-ing as normcase as same as NTFS workaround
252 def normcase(path):
251 def normcase(path):
253 pathlen = len(path)
252 pathlen = len(path)
254 if (pathlen == 0) or (path[0] != os.sep):
253 if (pathlen == 0) or (path[0] != os.sep):
255 # treat as relative
254 # treat as relative
256 return encoding.upper(path)
255 return encoding.upper(path)
257
256
258 # to preserve case of mountpoint part
257 # to preserve case of mountpoint part
259 for mp in cygwinmountpoints:
258 for mp in cygwinmountpoints:
260 if not path.startswith(mp):
259 if not path.startswith(mp):
261 continue
260 continue
262
261
263 mplen = len(mp)
262 mplen = len(mp)
264 if mplen == pathlen: # mount point itself
263 if mplen == pathlen: # mount point itself
265 return mp
264 return mp
266 if path[mplen] == os.sep:
265 if path[mplen] == os.sep:
267 return mp + encoding.upper(path[mplen:])
266 return mp + encoding.upper(path[mplen:])
268
267
269 return encoding.upper(path)
268 return encoding.upper(path)
270
269
271 # Cygwin translates native ACLs to POSIX permissions,
270 # Cygwin translates native ACLs to POSIX permissions,
272 # but these translations are not supported by native
271 # but these translations are not supported by native
273 # tools, so the exec bit tends to be set erroneously.
272 # tools, so the exec bit tends to be set erroneously.
274 # Therefore, disable executable bit access on Cygwin.
273 # Therefore, disable executable bit access on Cygwin.
275 def checkexec(path):
274 def checkexec(path):
276 return False
275 return False
277
276
278 # Similarly, Cygwin's symlink emulation is likely to create
277 # Similarly, Cygwin's symlink emulation is likely to create
279 # problems when Mercurial is used from both Cygwin and native
278 # problems when Mercurial is used from both Cygwin and native
280 # Windows, with other native tools, or on shared volumes
279 # Windows, with other native tools, or on shared volumes
281 def checklink(path):
280 def checklink(path):
282 return False
281 return False
283
282
284 def shellquote(s):
283 def shellquote(s):
285 if os.sys.platform == 'OpenVMS':
284 if os.sys.platform == 'OpenVMS':
286 return '"%s"' % s
285 return '"%s"' % s
287 else:
286 else:
288 return "'%s'" % s.replace("'", "'\\''")
287 return "'%s'" % s.replace("'", "'\\''")
289
288
290 def quotecommand(cmd):
289 def quotecommand(cmd):
291 return cmd
290 return cmd
292
291
293 def popen(command, mode='r'):
292 def popen(command, mode='r'):
294 return os.popen(command, mode)
293 return os.popen(command, mode)
295
294
296 def testpid(pid):
295 def testpid(pid):
297 '''return False if pid dead, True if running or not sure'''
296 '''return False if pid dead, True if running or not sure'''
298 if os.sys.platform == 'OpenVMS':
297 if os.sys.platform == 'OpenVMS':
299 return True
298 return True
300 try:
299 try:
301 os.kill(pid, 0)
300 os.kill(pid, 0)
302 return True
301 return True
303 except OSError, inst:
302 except OSError, inst:
304 return inst.errno != errno.ESRCH
303 return inst.errno != errno.ESRCH
305
304
306 def explainexit(code):
305 def explainexit(code):
307 """return a 2-tuple (desc, code) describing a subprocess status
306 """return a 2-tuple (desc, code) describing a subprocess status
308 (codes from kill are negative - not os.system/wait encoding)"""
307 (codes from kill are negative - not os.system/wait encoding)"""
309 if code >= 0:
308 if code >= 0:
310 return _("exited with status %d") % code, code
309 return _("exited with status %d") % code, code
311 return _("killed by signal %d") % -code, -code
310 return _("killed by signal %d") % -code, -code
312
311
313 def isowner(st):
312 def isowner(st):
314 """Return True if the stat object st is from the current user."""
313 """Return True if the stat object st is from the current user."""
315 return st.st_uid == os.getuid()
314 return st.st_uid == os.getuid()
316
315
317 def findexe(command):
316 def findexe(command):
318 '''Find executable for command searching like which does.
317 '''Find executable for command searching like which does.
319 If command is a basename then PATH is searched for command.
318 If command is a basename then PATH is searched for command.
320 PATH isn't searched if command is an absolute or relative path.
319 PATH isn't searched if command is an absolute or relative path.
321 If command isn't found None is returned.'''
320 If command isn't found None is returned.'''
322 if sys.platform == 'OpenVMS':
321 if sys.platform == 'OpenVMS':
323 return command
322 return command
324
323
325 def findexisting(executable):
324 def findexisting(executable):
326 'Will return executable if existing file'
325 'Will return executable if existing file'
327 if os.path.isfile(executable) and os.access(executable, os.X_OK):
326 if os.path.isfile(executable) and os.access(executable, os.X_OK):
328 return executable
327 return executable
329 return None
328 return None
330
329
331 if os.sep in command:
330 if os.sep in command:
332 return findexisting(command)
331 return findexisting(command)
333
332
334 if sys.platform == 'plan9':
333 if sys.platform == 'plan9':
335 return findexisting(os.path.join('/bin', command))
334 return findexisting(os.path.join('/bin', command))
336
335
337 for path in os.environ.get('PATH', '').split(os.pathsep):
336 for path in os.environ.get('PATH', '').split(os.pathsep):
338 executable = findexisting(os.path.join(path, command))
337 executable = findexisting(os.path.join(path, command))
339 if executable is not None:
338 if executable is not None:
340 return executable
339 return executable
341 return None
340 return None
342
341
343 def setsignalhandler():
342 def setsignalhandler():
344 pass
343 pass
345
344
346 def statfiles(files):
345 def statfiles(files):
347 'Stat each file in files and yield stat or None if file does not exist.'
346 'Stat each file in files and yield stat or None if file does not exist.'
348 lstat = os.lstat
347 lstat = os.lstat
349 for nf in files:
348 for nf in files:
350 try:
349 try:
351 st = lstat(nf)
350 st = lstat(nf)
352 except OSError, err:
351 except OSError, err:
353 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
352 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
354 raise
353 raise
355 st = None
354 st = None
356 yield st
355 yield st
357
356
358 def getuser():
357 def getuser():
359 '''return name of current user'''
358 '''return name of current user'''
360 return getpass.getuser()
359 return getpass.getuser()
361
360
362 def username(uid=None):
361 def username(uid=None):
363 """Return the name of the user with the given uid.
362 """Return the name of the user with the given uid.
364
363
365 If uid is None, return the name of the current user."""
364 If uid is None, return the name of the current user."""
366
365
367 if uid is None:
366 if uid is None:
368 uid = os.getuid()
367 uid = os.getuid()
369 try:
368 try:
370 return pwd.getpwuid(uid)[0]
369 return pwd.getpwuid(uid)[0]
371 except KeyError:
370 except KeyError:
372 return str(uid)
371 return str(uid)
373
372
374 def groupname(gid=None):
373 def groupname(gid=None):
375 """Return the name of the group with the given gid.
374 """Return the name of the group with the given gid.
376
375
377 If gid is None, return the name of the current group."""
376 If gid is None, return the name of the current group."""
378
377
379 if gid is None:
378 if gid is None:
380 gid = os.getgid()
379 gid = os.getgid()
381 try:
380 try:
382 return grp.getgrgid(gid)[0]
381 return grp.getgrgid(gid)[0]
383 except KeyError:
382 except KeyError:
384 return str(gid)
383 return str(gid)
385
384
386 def groupmembers(name):
385 def groupmembers(name):
387 """Return the list of members of the group with the given
386 """Return the list of members of the group with the given
388 name, KeyError if the group does not exist.
387 name, KeyError if the group does not exist.
389 """
388 """
390 return list(grp.getgrnam(name).gr_mem)
389 return list(grp.getgrnam(name).gr_mem)
391
390
392 def spawndetached(args):
391 def spawndetached(args):
393 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
392 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
394 args[0], args)
393 args[0], args)
395
394
396 def gethgcmd():
395 def gethgcmd():
397 return sys.argv[:1]
396 return sys.argv[:1]
398
397
399 def termwidth():
398 def termwidth():
400 try:
399 try:
401 import termios, array, fcntl
400 import termios, array, fcntl
402 for dev in (sys.stderr, sys.stdout, sys.stdin):
401 for dev in (sys.stderr, sys.stdout, sys.stdin):
403 try:
402 try:
404 try:
403 try:
405 fd = dev.fileno()
404 fd = dev.fileno()
406 except AttributeError:
405 except AttributeError:
407 continue
406 continue
408 if not os.isatty(fd):
407 if not os.isatty(fd):
409 continue
408 continue
410 try:
409 try:
411 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
410 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
412 width = array.array('h', arri)[1]
411 width = array.array('h', arri)[1]
413 if width > 0:
412 if width > 0:
414 return width
413 return width
415 except AttributeError:
414 except AttributeError:
416 pass
415 pass
417 except ValueError:
416 except ValueError:
418 pass
417 pass
419 except IOError, e:
418 except IOError, e:
420 if e[0] == errno.EINVAL:
419 if e[0] == errno.EINVAL:
421 pass
420 pass
422 else:
421 else:
423 raise
422 raise
424 except ImportError:
423 except ImportError:
425 pass
424 pass
426 return 80
425 return 80
427
426
428 def makedir(path, notindexed):
427 def makedir(path, notindexed):
429 os.mkdir(path)
428 os.mkdir(path)
430
429
431 def unlinkpath(f):
430 def unlinkpath(f):
432 """unlink and remove the directory if it is empty"""
431 """unlink and remove the directory if it is empty"""
433 os.unlink(f)
432 os.unlink(f)
434 # try removing directories that might now be empty
433 # try removing directories that might now be empty
435 try:
434 try:
436 os.removedirs(os.path.dirname(f))
435 os.removedirs(os.path.dirname(f))
437 except OSError:
436 except OSError:
438 pass
437 pass
439
438
440 def lookupreg(key, name=None, scope=None):
439 def lookupreg(key, name=None, scope=None):
441 return None
440 return None
442
441
443 def hidewindow():
442 def hidewindow():
444 """Hide current shell window.
443 """Hide current shell window.
445
444
446 Used to hide the window opened when starting asynchronous
445 Used to hide the window opened when starting asynchronous
447 child process under Windows, unneeded on other systems.
446 child process under Windows, unneeded on other systems.
448 """
447 """
449 pass
448 pass
450
449
451 class cachestat(object):
450 class cachestat(object):
452 def __init__(self, path):
451 def __init__(self, path):
453 self.stat = os.stat(path)
452 self.stat = os.stat(path)
454
453
455 def cacheable(self):
454 def cacheable(self):
456 return bool(self.stat.st_ino)
455 return bool(self.stat.st_ino)
457
456
458 __hash__ = object.__hash__
457 __hash__ = object.__hash__
459
458
460 def __eq__(self, other):
459 def __eq__(self, other):
461 try:
460 try:
462 return self.stat == other.stat
461 return self.stat == other.stat
463 except AttributeError:
462 except AttributeError:
464 return False
463 return False
465
464
466 def __ne__(self, other):
465 def __ne__(self, other):
467 return not self == other
466 return not self == other
468
467
469 def executablepath():
468 def executablepath():
470 return None # available on Windows only
469 return None # available on Windows only
@@ -1,1801 +1,1800
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding, collections
17 import error, osutil, encoding, collections
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, datetime, calendar, textwrap, signal
19 import os, time, datetime, calendar, textwrap, signal
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 if os.name == 'nt':
22 if os.name == 'nt':
23 import windows as platform
23 import windows as platform
24 else:
24 else:
25 import posix as platform
25 import posix as platform
26
26
27 cachestat = platform.cachestat
27 cachestat = platform.cachestat
28 checkexec = platform.checkexec
28 checkexec = platform.checkexec
29 checklink = platform.checklink
29 checklink = platform.checklink
30 copymode = platform.copymode
30 copymode = platform.copymode
31 executablepath = platform.executablepath
31 executablepath = platform.executablepath
32 expandglobs = platform.expandglobs
32 expandglobs = platform.expandglobs
33 explainexit = platform.explainexit
33 explainexit = platform.explainexit
34 findexe = platform.findexe
34 findexe = platform.findexe
35 gethgcmd = platform.gethgcmd
35 gethgcmd = platform.gethgcmd
36 getuser = platform.getuser
36 getuser = platform.getuser
37 groupmembers = platform.groupmembers
37 groupmembers = platform.groupmembers
38 groupname = platform.groupname
38 groupname = platform.groupname
39 hidewindow = platform.hidewindow
39 hidewindow = platform.hidewindow
40 isexec = platform.isexec
40 isexec = platform.isexec
41 isowner = platform.isowner
41 isowner = platform.isowner
42 localpath = platform.localpath
42 localpath = platform.localpath
43 lookupreg = platform.lookupreg
43 lookupreg = platform.lookupreg
44 makedir = platform.makedir
44 makedir = platform.makedir
45 nlinks = platform.nlinks
45 nlinks = platform.nlinks
46 normpath = platform.normpath
46 normpath = platform.normpath
47 normcase = platform.normcase
47 normcase = platform.normcase
48 nulldev = platform.nulldev
49 openhardlinks = platform.openhardlinks
48 openhardlinks = platform.openhardlinks
50 oslink = platform.oslink
49 oslink = platform.oslink
51 parsepatchoutput = platform.parsepatchoutput
50 parsepatchoutput = platform.parsepatchoutput
52 pconvert = platform.pconvert
51 pconvert = platform.pconvert
53 popen = platform.popen
52 popen = platform.popen
54 posixfile = platform.posixfile
53 posixfile = platform.posixfile
55 quotecommand = platform.quotecommand
54 quotecommand = platform.quotecommand
56 realpath = platform.realpath
55 realpath = platform.realpath
57 rename = platform.rename
56 rename = platform.rename
58 samedevice = platform.samedevice
57 samedevice = platform.samedevice
59 samefile = platform.samefile
58 samefile = platform.samefile
60 samestat = platform.samestat
59 samestat = platform.samestat
61 setbinary = platform.setbinary
60 setbinary = platform.setbinary
62 setflags = platform.setflags
61 setflags = platform.setflags
63 setsignalhandler = platform.setsignalhandler
62 setsignalhandler = platform.setsignalhandler
64 shellquote = platform.shellquote
63 shellquote = platform.shellquote
65 spawndetached = platform.spawndetached
64 spawndetached = platform.spawndetached
66 sshargs = platform.sshargs
65 sshargs = platform.sshargs
67 statfiles = platform.statfiles
66 statfiles = platform.statfiles
68 termwidth = platform.termwidth
67 termwidth = platform.termwidth
69 testpid = platform.testpid
68 testpid = platform.testpid
70 umask = platform.umask
69 umask = platform.umask
71 unlink = platform.unlink
70 unlink = platform.unlink
72 unlinkpath = platform.unlinkpath
71 unlinkpath = platform.unlinkpath
73 username = platform.username
72 username = platform.username
74
73
75 # Python compatibility
74 # Python compatibility
76
75
77 _notset = object()
76 _notset = object()
78
77
79 def safehasattr(thing, attr):
78 def safehasattr(thing, attr):
80 return getattr(thing, attr, _notset) is not _notset
79 return getattr(thing, attr, _notset) is not _notset
81
80
82 def sha1(s=''):
81 def sha1(s=''):
83 '''
82 '''
84 Low-overhead wrapper around Python's SHA support
83 Low-overhead wrapper around Python's SHA support
85
84
86 >>> f = _fastsha1
85 >>> f = _fastsha1
87 >>> a = sha1()
86 >>> a = sha1()
88 >>> a = f()
87 >>> a = f()
89 >>> a.hexdigest()
88 >>> a.hexdigest()
90 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
89 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
91 '''
90 '''
92
91
93 return _fastsha1(s)
92 return _fastsha1(s)
94
93
95 def _fastsha1(s=''):
94 def _fastsha1(s=''):
96 # This function will import sha1 from hashlib or sha (whichever is
95 # This function will import sha1 from hashlib or sha (whichever is
97 # available) and overwrite itself with it on the first call.
96 # available) and overwrite itself with it on the first call.
98 # Subsequent calls will go directly to the imported function.
97 # Subsequent calls will go directly to the imported function.
99 if sys.version_info >= (2, 5):
98 if sys.version_info >= (2, 5):
100 from hashlib import sha1 as _sha1
99 from hashlib import sha1 as _sha1
101 else:
100 else:
102 from sha import sha as _sha1
101 from sha import sha as _sha1
103 global _fastsha1, sha1
102 global _fastsha1, sha1
104 _fastsha1 = sha1 = _sha1
103 _fastsha1 = sha1 = _sha1
105 return _sha1(s)
104 return _sha1(s)
106
105
107 try:
106 try:
108 buffer = buffer
107 buffer = buffer
109 except NameError:
108 except NameError:
110 if sys.version_info[0] < 3:
109 if sys.version_info[0] < 3:
111 def buffer(sliceable, offset=0):
110 def buffer(sliceable, offset=0):
112 return sliceable[offset:]
111 return sliceable[offset:]
113 else:
112 else:
114 def buffer(sliceable, offset=0):
113 def buffer(sliceable, offset=0):
115 return memoryview(sliceable)[offset:]
114 return memoryview(sliceable)[offset:]
116
115
117 import subprocess
116 import subprocess
118 closefds = os.name == 'posix'
117 closefds = os.name == 'posix'
119
118
120 def popen2(cmd, env=None, newlines=False):
119 def popen2(cmd, env=None, newlines=False):
121 # Setting bufsize to -1 lets the system decide the buffer size.
120 # Setting bufsize to -1 lets the system decide the buffer size.
122 # The default for bufsize is 0, meaning unbuffered. This leads to
121 # The default for bufsize is 0, meaning unbuffered. This leads to
123 # poor performance on Mac OS X: http://bugs.python.org/issue4194
122 # poor performance on Mac OS X: http://bugs.python.org/issue4194
124 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
123 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
125 close_fds=closefds,
124 close_fds=closefds,
126 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
125 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
127 universal_newlines=newlines,
126 universal_newlines=newlines,
128 env=env)
127 env=env)
129 return p.stdin, p.stdout
128 return p.stdin, p.stdout
130
129
131 def popen3(cmd, env=None, newlines=False):
130 def popen3(cmd, env=None, newlines=False):
132 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
131 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
133 close_fds=closefds,
132 close_fds=closefds,
134 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
133 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
135 stderr=subprocess.PIPE,
134 stderr=subprocess.PIPE,
136 universal_newlines=newlines,
135 universal_newlines=newlines,
137 env=env)
136 env=env)
138 return p.stdin, p.stdout, p.stderr
137 return p.stdin, p.stdout, p.stderr
139
138
140 def version():
139 def version():
141 """Return version information if available."""
140 """Return version information if available."""
142 try:
141 try:
143 import __version__
142 import __version__
144 return __version__.version
143 return __version__.version
145 except ImportError:
144 except ImportError:
146 return 'unknown'
145 return 'unknown'
147
146
148 # used by parsedate
147 # used by parsedate
149 defaultdateformats = (
148 defaultdateformats = (
150 '%Y-%m-%d %H:%M:%S',
149 '%Y-%m-%d %H:%M:%S',
151 '%Y-%m-%d %I:%M:%S%p',
150 '%Y-%m-%d %I:%M:%S%p',
152 '%Y-%m-%d %H:%M',
151 '%Y-%m-%d %H:%M',
153 '%Y-%m-%d %I:%M%p',
152 '%Y-%m-%d %I:%M%p',
154 '%Y-%m-%d',
153 '%Y-%m-%d',
155 '%m-%d',
154 '%m-%d',
156 '%m/%d',
155 '%m/%d',
157 '%m/%d/%y',
156 '%m/%d/%y',
158 '%m/%d/%Y',
157 '%m/%d/%Y',
159 '%a %b %d %H:%M:%S %Y',
158 '%a %b %d %H:%M:%S %Y',
160 '%a %b %d %I:%M:%S%p %Y',
159 '%a %b %d %I:%M:%S%p %Y',
161 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
160 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
162 '%b %d %H:%M:%S %Y',
161 '%b %d %H:%M:%S %Y',
163 '%b %d %I:%M:%S%p %Y',
162 '%b %d %I:%M:%S%p %Y',
164 '%b %d %H:%M:%S',
163 '%b %d %H:%M:%S',
165 '%b %d %I:%M:%S%p',
164 '%b %d %I:%M:%S%p',
166 '%b %d %H:%M',
165 '%b %d %H:%M',
167 '%b %d %I:%M%p',
166 '%b %d %I:%M%p',
168 '%b %d %Y',
167 '%b %d %Y',
169 '%b %d',
168 '%b %d',
170 '%H:%M:%S',
169 '%H:%M:%S',
171 '%I:%M:%S%p',
170 '%I:%M:%S%p',
172 '%H:%M',
171 '%H:%M',
173 '%I:%M%p',
172 '%I:%M%p',
174 )
173 )
175
174
176 extendeddateformats = defaultdateformats + (
175 extendeddateformats = defaultdateformats + (
177 "%Y",
176 "%Y",
178 "%Y-%m",
177 "%Y-%m",
179 "%b",
178 "%b",
180 "%b %Y",
179 "%b %Y",
181 )
180 )
182
181
183 def cachefunc(func):
182 def cachefunc(func):
184 '''cache the result of function calls'''
183 '''cache the result of function calls'''
185 # XXX doesn't handle keywords args
184 # XXX doesn't handle keywords args
186 cache = {}
185 cache = {}
187 if func.func_code.co_argcount == 1:
186 if func.func_code.co_argcount == 1:
188 # we gain a small amount of time because
187 # we gain a small amount of time because
189 # we don't need to pack/unpack the list
188 # we don't need to pack/unpack the list
190 def f(arg):
189 def f(arg):
191 if arg not in cache:
190 if arg not in cache:
192 cache[arg] = func(arg)
191 cache[arg] = func(arg)
193 return cache[arg]
192 return cache[arg]
194 else:
193 else:
195 def f(*args):
194 def f(*args):
196 if args not in cache:
195 if args not in cache:
197 cache[args] = func(*args)
196 cache[args] = func(*args)
198 return cache[args]
197 return cache[args]
199
198
200 return f
199 return f
201
200
202 try:
201 try:
203 collections.deque.remove
202 collections.deque.remove
204 deque = collections.deque
203 deque = collections.deque
205 except AttributeError:
204 except AttributeError:
206 # python 2.4 lacks deque.remove
205 # python 2.4 lacks deque.remove
207 class deque(collections.deque):
206 class deque(collections.deque):
208 def remove(self, val):
207 def remove(self, val):
209 for i, v in enumerate(self):
208 for i, v in enumerate(self):
210 if v == val:
209 if v == val:
211 del self[i]
210 del self[i]
212 break
211 break
213
212
214 def lrucachefunc(func):
213 def lrucachefunc(func):
215 '''cache most recent results of function calls'''
214 '''cache most recent results of function calls'''
216 cache = {}
215 cache = {}
217 order = deque()
216 order = deque()
218 if func.func_code.co_argcount == 1:
217 if func.func_code.co_argcount == 1:
219 def f(arg):
218 def f(arg):
220 if arg not in cache:
219 if arg not in cache:
221 if len(cache) > 20:
220 if len(cache) > 20:
222 del cache[order.popleft()]
221 del cache[order.popleft()]
223 cache[arg] = func(arg)
222 cache[arg] = func(arg)
224 else:
223 else:
225 order.remove(arg)
224 order.remove(arg)
226 order.append(arg)
225 order.append(arg)
227 return cache[arg]
226 return cache[arg]
228 else:
227 else:
229 def f(*args):
228 def f(*args):
230 if args not in cache:
229 if args not in cache:
231 if len(cache) > 20:
230 if len(cache) > 20:
232 del cache[order.popleft()]
231 del cache[order.popleft()]
233 cache[args] = func(*args)
232 cache[args] = func(*args)
234 else:
233 else:
235 order.remove(args)
234 order.remove(args)
236 order.append(args)
235 order.append(args)
237 return cache[args]
236 return cache[args]
238
237
239 return f
238 return f
240
239
241 class propertycache(object):
240 class propertycache(object):
242 def __init__(self, func):
241 def __init__(self, func):
243 self.func = func
242 self.func = func
244 self.name = func.__name__
243 self.name = func.__name__
245 def __get__(self, obj, type=None):
244 def __get__(self, obj, type=None):
246 result = self.func(obj)
245 result = self.func(obj)
247 setattr(obj, self.name, result)
246 setattr(obj, self.name, result)
248 return result
247 return result
249
248
250 def pipefilter(s, cmd):
249 def pipefilter(s, cmd):
251 '''filter string S through command CMD, returning its output'''
250 '''filter string S through command CMD, returning its output'''
252 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
251 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
253 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
252 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
254 pout, perr = p.communicate(s)
253 pout, perr = p.communicate(s)
255 return pout
254 return pout
256
255
257 def tempfilter(s, cmd):
256 def tempfilter(s, cmd):
258 '''filter string S through a pair of temporary files with CMD.
257 '''filter string S through a pair of temporary files with CMD.
259 CMD is used as a template to create the real command to be run,
258 CMD is used as a template to create the real command to be run,
260 with the strings INFILE and OUTFILE replaced by the real names of
259 with the strings INFILE and OUTFILE replaced by the real names of
261 the temporary files generated.'''
260 the temporary files generated.'''
262 inname, outname = None, None
261 inname, outname = None, None
263 try:
262 try:
264 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
263 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
265 fp = os.fdopen(infd, 'wb')
264 fp = os.fdopen(infd, 'wb')
266 fp.write(s)
265 fp.write(s)
267 fp.close()
266 fp.close()
268 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
267 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
269 os.close(outfd)
268 os.close(outfd)
270 cmd = cmd.replace('INFILE', inname)
269 cmd = cmd.replace('INFILE', inname)
271 cmd = cmd.replace('OUTFILE', outname)
270 cmd = cmd.replace('OUTFILE', outname)
272 code = os.system(cmd)
271 code = os.system(cmd)
273 if sys.platform == 'OpenVMS' and code & 1:
272 if sys.platform == 'OpenVMS' and code & 1:
274 code = 0
273 code = 0
275 if code:
274 if code:
276 raise Abort(_("command '%s' failed: %s") %
275 raise Abort(_("command '%s' failed: %s") %
277 (cmd, explainexit(code)))
276 (cmd, explainexit(code)))
278 fp = open(outname, 'rb')
277 fp = open(outname, 'rb')
279 r = fp.read()
278 r = fp.read()
280 fp.close()
279 fp.close()
281 return r
280 return r
282 finally:
281 finally:
283 try:
282 try:
284 if inname:
283 if inname:
285 os.unlink(inname)
284 os.unlink(inname)
286 except OSError:
285 except OSError:
287 pass
286 pass
288 try:
287 try:
289 if outname:
288 if outname:
290 os.unlink(outname)
289 os.unlink(outname)
291 except OSError:
290 except OSError:
292 pass
291 pass
293
292
294 filtertable = {
293 filtertable = {
295 'tempfile:': tempfilter,
294 'tempfile:': tempfilter,
296 'pipe:': pipefilter,
295 'pipe:': pipefilter,
297 }
296 }
298
297
299 def filter(s, cmd):
298 def filter(s, cmd):
300 "filter a string through a command that transforms its input to its output"
299 "filter a string through a command that transforms its input to its output"
301 for name, fn in filtertable.iteritems():
300 for name, fn in filtertable.iteritems():
302 if cmd.startswith(name):
301 if cmd.startswith(name):
303 return fn(s, cmd[len(name):].lstrip())
302 return fn(s, cmd[len(name):].lstrip())
304 return pipefilter(s, cmd)
303 return pipefilter(s, cmd)
305
304
306 def binary(s):
305 def binary(s):
307 """return true if a string is binary data"""
306 """return true if a string is binary data"""
308 return bool(s and '\0' in s)
307 return bool(s and '\0' in s)
309
308
310 def increasingchunks(source, min=1024, max=65536):
309 def increasingchunks(source, min=1024, max=65536):
311 '''return no less than min bytes per chunk while data remains,
310 '''return no less than min bytes per chunk while data remains,
312 doubling min after each chunk until it reaches max'''
311 doubling min after each chunk until it reaches max'''
313 def log2(x):
312 def log2(x):
314 if not x:
313 if not x:
315 return 0
314 return 0
316 i = 0
315 i = 0
317 while x:
316 while x:
318 x >>= 1
317 x >>= 1
319 i += 1
318 i += 1
320 return i - 1
319 return i - 1
321
320
322 buf = []
321 buf = []
323 blen = 0
322 blen = 0
324 for chunk in source:
323 for chunk in source:
325 buf.append(chunk)
324 buf.append(chunk)
326 blen += len(chunk)
325 blen += len(chunk)
327 if blen >= min:
326 if blen >= min:
328 if min < max:
327 if min < max:
329 min = min << 1
328 min = min << 1
330 nmin = 1 << log2(blen)
329 nmin = 1 << log2(blen)
331 if nmin > min:
330 if nmin > min:
332 min = nmin
331 min = nmin
333 if min > max:
332 if min > max:
334 min = max
333 min = max
335 yield ''.join(buf)
334 yield ''.join(buf)
336 blen = 0
335 blen = 0
337 buf = []
336 buf = []
338 if buf:
337 if buf:
339 yield ''.join(buf)
338 yield ''.join(buf)
340
339
341 Abort = error.Abort
340 Abort = error.Abort
342
341
343 def always(fn):
342 def always(fn):
344 return True
343 return True
345
344
346 def never(fn):
345 def never(fn):
347 return False
346 return False
348
347
349 def pathto(root, n1, n2):
348 def pathto(root, n1, n2):
350 '''return the relative path from one place to another.
349 '''return the relative path from one place to another.
351 root should use os.sep to separate directories
350 root should use os.sep to separate directories
352 n1 should use os.sep to separate directories
351 n1 should use os.sep to separate directories
353 n2 should use "/" to separate directories
352 n2 should use "/" to separate directories
354 returns an os.sep-separated path.
353 returns an os.sep-separated path.
355
354
356 If n1 is a relative path, it's assumed it's
355 If n1 is a relative path, it's assumed it's
357 relative to root.
356 relative to root.
358 n2 should always be relative to root.
357 n2 should always be relative to root.
359 '''
358 '''
360 if not n1:
359 if not n1:
361 return localpath(n2)
360 return localpath(n2)
362 if os.path.isabs(n1):
361 if os.path.isabs(n1):
363 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
362 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
364 return os.path.join(root, localpath(n2))
363 return os.path.join(root, localpath(n2))
365 n2 = '/'.join((pconvert(root), n2))
364 n2 = '/'.join((pconvert(root), n2))
366 a, b = splitpath(n1), n2.split('/')
365 a, b = splitpath(n1), n2.split('/')
367 a.reverse()
366 a.reverse()
368 b.reverse()
367 b.reverse()
369 while a and b and a[-1] == b[-1]:
368 while a and b and a[-1] == b[-1]:
370 a.pop()
369 a.pop()
371 b.pop()
370 b.pop()
372 b.reverse()
371 b.reverse()
373 return os.sep.join((['..'] * len(a)) + b) or '.'
372 return os.sep.join((['..'] * len(a)) + b) or '.'
374
373
375 _hgexecutable = None
374 _hgexecutable = None
376
375
377 def mainfrozen():
376 def mainfrozen():
378 """return True if we are a frozen executable.
377 """return True if we are a frozen executable.
379
378
380 The code supports py2exe (most common, Windows only) and tools/freeze
379 The code supports py2exe (most common, Windows only) and tools/freeze
381 (portable, not much used).
380 (portable, not much used).
382 """
381 """
383 return (safehasattr(sys, "frozen") or # new py2exe
382 return (safehasattr(sys, "frozen") or # new py2exe
384 safehasattr(sys, "importers") or # old py2exe
383 safehasattr(sys, "importers") or # old py2exe
385 imp.is_frozen("__main__")) # tools/freeze
384 imp.is_frozen("__main__")) # tools/freeze
386
385
387 def hgexecutable():
386 def hgexecutable():
388 """return location of the 'hg' executable.
387 """return location of the 'hg' executable.
389
388
390 Defaults to $HG or 'hg' in the search path.
389 Defaults to $HG or 'hg' in the search path.
391 """
390 """
392 if _hgexecutable is None:
391 if _hgexecutable is None:
393 hg = os.environ.get('HG')
392 hg = os.environ.get('HG')
394 mainmod = sys.modules['__main__']
393 mainmod = sys.modules['__main__']
395 if hg:
394 if hg:
396 _sethgexecutable(hg)
395 _sethgexecutable(hg)
397 elif mainfrozen():
396 elif mainfrozen():
398 _sethgexecutable(sys.executable)
397 _sethgexecutable(sys.executable)
399 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
398 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
400 _sethgexecutable(mainmod.__file__)
399 _sethgexecutable(mainmod.__file__)
401 else:
400 else:
402 exe = findexe('hg') or os.path.basename(sys.argv[0])
401 exe = findexe('hg') or os.path.basename(sys.argv[0])
403 _sethgexecutable(exe)
402 _sethgexecutable(exe)
404 return _hgexecutable
403 return _hgexecutable
405
404
406 def _sethgexecutable(path):
405 def _sethgexecutable(path):
407 """set location of the 'hg' executable"""
406 """set location of the 'hg' executable"""
408 global _hgexecutable
407 global _hgexecutable
409 _hgexecutable = path
408 _hgexecutable = path
410
409
411 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
410 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
412 '''enhanced shell command execution.
411 '''enhanced shell command execution.
413 run with environment maybe modified, maybe in different dir.
412 run with environment maybe modified, maybe in different dir.
414
413
415 if command fails and onerr is None, return status. if ui object,
414 if command fails and onerr is None, return status. if ui object,
416 print error message and return status, else raise onerr object as
415 print error message and return status, else raise onerr object as
417 exception.
416 exception.
418
417
419 if out is specified, it is assumed to be a file-like object that has a
418 if out is specified, it is assumed to be a file-like object that has a
420 write() method. stdout and stderr will be redirected to out.'''
419 write() method. stdout and stderr will be redirected to out.'''
421 try:
420 try:
422 sys.stdout.flush()
421 sys.stdout.flush()
423 except Exception:
422 except Exception:
424 pass
423 pass
425 def py2shell(val):
424 def py2shell(val):
426 'convert python object into string that is useful to shell'
425 'convert python object into string that is useful to shell'
427 if val is None or val is False:
426 if val is None or val is False:
428 return '0'
427 return '0'
429 if val is True:
428 if val is True:
430 return '1'
429 return '1'
431 return str(val)
430 return str(val)
432 origcmd = cmd
431 origcmd = cmd
433 cmd = quotecommand(cmd)
432 cmd = quotecommand(cmd)
434 if sys.platform == 'plan9':
433 if sys.platform == 'plan9':
435 # subprocess kludge to work around issues in half-baked Python
434 # subprocess kludge to work around issues in half-baked Python
436 # ports, notably bichued/python:
435 # ports, notably bichued/python:
437 if not cwd is None:
436 if not cwd is None:
438 os.chdir(cwd)
437 os.chdir(cwd)
439 rc = os.system(cmd)
438 rc = os.system(cmd)
440 else:
439 else:
441 env = dict(os.environ)
440 env = dict(os.environ)
442 env.update((k, py2shell(v)) for k, v in environ.iteritems())
441 env.update((k, py2shell(v)) for k, v in environ.iteritems())
443 env['HG'] = hgexecutable()
442 env['HG'] = hgexecutable()
444 if out is None or out == sys.__stdout__:
443 if out is None or out == sys.__stdout__:
445 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
444 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
446 env=env, cwd=cwd)
445 env=env, cwd=cwd)
447 else:
446 else:
448 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
447 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
449 env=env, cwd=cwd, stdout=subprocess.PIPE,
448 env=env, cwd=cwd, stdout=subprocess.PIPE,
450 stderr=subprocess.STDOUT)
449 stderr=subprocess.STDOUT)
451 for line in proc.stdout:
450 for line in proc.stdout:
452 out.write(line)
451 out.write(line)
453 proc.wait()
452 proc.wait()
454 rc = proc.returncode
453 rc = proc.returncode
455 if sys.platform == 'OpenVMS' and rc & 1:
454 if sys.platform == 'OpenVMS' and rc & 1:
456 rc = 0
455 rc = 0
457 if rc and onerr:
456 if rc and onerr:
458 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
457 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
459 explainexit(rc)[0])
458 explainexit(rc)[0])
460 if errprefix:
459 if errprefix:
461 errmsg = '%s: %s' % (errprefix, errmsg)
460 errmsg = '%s: %s' % (errprefix, errmsg)
462 try:
461 try:
463 onerr.warn(errmsg + '\n')
462 onerr.warn(errmsg + '\n')
464 except AttributeError:
463 except AttributeError:
465 raise onerr(errmsg)
464 raise onerr(errmsg)
466 return rc
465 return rc
467
466
468 def checksignature(func):
467 def checksignature(func):
469 '''wrap a function with code to check for calling errors'''
468 '''wrap a function with code to check for calling errors'''
470 def check(*args, **kwargs):
469 def check(*args, **kwargs):
471 try:
470 try:
472 return func(*args, **kwargs)
471 return func(*args, **kwargs)
473 except TypeError:
472 except TypeError:
474 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
473 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
475 raise error.SignatureError
474 raise error.SignatureError
476 raise
475 raise
477
476
478 return check
477 return check
479
478
480 def copyfile(src, dest):
479 def copyfile(src, dest):
481 "copy a file, preserving mode and atime/mtime"
480 "copy a file, preserving mode and atime/mtime"
482 if os.path.islink(src):
481 if os.path.islink(src):
483 try:
482 try:
484 os.unlink(dest)
483 os.unlink(dest)
485 except OSError:
484 except OSError:
486 pass
485 pass
487 os.symlink(os.readlink(src), dest)
486 os.symlink(os.readlink(src), dest)
488 else:
487 else:
489 try:
488 try:
490 shutil.copyfile(src, dest)
489 shutil.copyfile(src, dest)
491 shutil.copymode(src, dest)
490 shutil.copymode(src, dest)
492 except shutil.Error, inst:
491 except shutil.Error, inst:
493 raise Abort(str(inst))
492 raise Abort(str(inst))
494
493
495 def copyfiles(src, dst, hardlink=None):
494 def copyfiles(src, dst, hardlink=None):
496 """Copy a directory tree using hardlinks if possible"""
495 """Copy a directory tree using hardlinks if possible"""
497
496
498 if hardlink is None:
497 if hardlink is None:
499 hardlink = (os.stat(src).st_dev ==
498 hardlink = (os.stat(src).st_dev ==
500 os.stat(os.path.dirname(dst)).st_dev)
499 os.stat(os.path.dirname(dst)).st_dev)
501
500
502 num = 0
501 num = 0
503 if os.path.isdir(src):
502 if os.path.isdir(src):
504 os.mkdir(dst)
503 os.mkdir(dst)
505 for name, kind in osutil.listdir(src):
504 for name, kind in osutil.listdir(src):
506 srcname = os.path.join(src, name)
505 srcname = os.path.join(src, name)
507 dstname = os.path.join(dst, name)
506 dstname = os.path.join(dst, name)
508 hardlink, n = copyfiles(srcname, dstname, hardlink)
507 hardlink, n = copyfiles(srcname, dstname, hardlink)
509 num += n
508 num += n
510 else:
509 else:
511 if hardlink:
510 if hardlink:
512 try:
511 try:
513 oslink(src, dst)
512 oslink(src, dst)
514 except (IOError, OSError):
513 except (IOError, OSError):
515 hardlink = False
514 hardlink = False
516 shutil.copy(src, dst)
515 shutil.copy(src, dst)
517 else:
516 else:
518 shutil.copy(src, dst)
517 shutil.copy(src, dst)
519 num += 1
518 num += 1
520
519
521 return hardlink, num
520 return hardlink, num
522
521
523 _winreservednames = '''con prn aux nul
522 _winreservednames = '''con prn aux nul
524 com1 com2 com3 com4 com5 com6 com7 com8 com9
523 com1 com2 com3 com4 com5 com6 com7 com8 com9
525 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
524 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
526 _winreservedchars = ':*?"<>|'
525 _winreservedchars = ':*?"<>|'
527 def checkwinfilename(path):
526 def checkwinfilename(path):
528 '''Check that the base-relative path is a valid filename on Windows.
527 '''Check that the base-relative path is a valid filename on Windows.
529 Returns None if the path is ok, or a UI string describing the problem.
528 Returns None if the path is ok, or a UI string describing the problem.
530
529
531 >>> checkwinfilename("just/a/normal/path")
530 >>> checkwinfilename("just/a/normal/path")
532 >>> checkwinfilename("foo/bar/con.xml")
531 >>> checkwinfilename("foo/bar/con.xml")
533 "filename contains 'con', which is reserved on Windows"
532 "filename contains 'con', which is reserved on Windows"
534 >>> checkwinfilename("foo/con.xml/bar")
533 >>> checkwinfilename("foo/con.xml/bar")
535 "filename contains 'con', which is reserved on Windows"
534 "filename contains 'con', which is reserved on Windows"
536 >>> checkwinfilename("foo/bar/xml.con")
535 >>> checkwinfilename("foo/bar/xml.con")
537 >>> checkwinfilename("foo/bar/AUX/bla.txt")
536 >>> checkwinfilename("foo/bar/AUX/bla.txt")
538 "filename contains 'AUX', which is reserved on Windows"
537 "filename contains 'AUX', which is reserved on Windows"
539 >>> checkwinfilename("foo/bar/bla:.txt")
538 >>> checkwinfilename("foo/bar/bla:.txt")
540 "filename contains ':', which is reserved on Windows"
539 "filename contains ':', which is reserved on Windows"
541 >>> checkwinfilename("foo/bar/b\07la.txt")
540 >>> checkwinfilename("foo/bar/b\07la.txt")
542 "filename contains '\\\\x07', which is invalid on Windows"
541 "filename contains '\\\\x07', which is invalid on Windows"
543 >>> checkwinfilename("foo/bar/bla ")
542 >>> checkwinfilename("foo/bar/bla ")
544 "filename ends with ' ', which is not allowed on Windows"
543 "filename ends with ' ', which is not allowed on Windows"
545 >>> checkwinfilename("../bar")
544 >>> checkwinfilename("../bar")
546 '''
545 '''
547 for n in path.replace('\\', '/').split('/'):
546 for n in path.replace('\\', '/').split('/'):
548 if not n:
547 if not n:
549 continue
548 continue
550 for c in n:
549 for c in n:
551 if c in _winreservedchars:
550 if c in _winreservedchars:
552 return _("filename contains '%s', which is reserved "
551 return _("filename contains '%s', which is reserved "
553 "on Windows") % c
552 "on Windows") % c
554 if ord(c) <= 31:
553 if ord(c) <= 31:
555 return _("filename contains %r, which is invalid "
554 return _("filename contains %r, which is invalid "
556 "on Windows") % c
555 "on Windows") % c
557 base = n.split('.')[0]
556 base = n.split('.')[0]
558 if base and base.lower() in _winreservednames:
557 if base and base.lower() in _winreservednames:
559 return _("filename contains '%s', which is reserved "
558 return _("filename contains '%s', which is reserved "
560 "on Windows") % base
559 "on Windows") % base
561 t = n[-1]
560 t = n[-1]
562 if t in '. ' and n not in '..':
561 if t in '. ' and n not in '..':
563 return _("filename ends with '%s', which is not allowed "
562 return _("filename ends with '%s', which is not allowed "
564 "on Windows") % t
563 "on Windows") % t
565
564
566 if os.name == 'nt':
565 if os.name == 'nt':
567 checkosfilename = checkwinfilename
566 checkosfilename = checkwinfilename
568 else:
567 else:
569 checkosfilename = platform.checkosfilename
568 checkosfilename = platform.checkosfilename
570
569
571 def makelock(info, pathname):
570 def makelock(info, pathname):
572 try:
571 try:
573 return os.symlink(info, pathname)
572 return os.symlink(info, pathname)
574 except OSError, why:
573 except OSError, why:
575 if why.errno == errno.EEXIST:
574 if why.errno == errno.EEXIST:
576 raise
575 raise
577 except AttributeError: # no symlink in os
576 except AttributeError: # no symlink in os
578 pass
577 pass
579
578
580 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
579 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
581 os.write(ld, info)
580 os.write(ld, info)
582 os.close(ld)
581 os.close(ld)
583
582
584 def readlock(pathname):
583 def readlock(pathname):
585 try:
584 try:
586 return os.readlink(pathname)
585 return os.readlink(pathname)
587 except OSError, why:
586 except OSError, why:
588 if why.errno not in (errno.EINVAL, errno.ENOSYS):
587 if why.errno not in (errno.EINVAL, errno.ENOSYS):
589 raise
588 raise
590 except AttributeError: # no symlink in os
589 except AttributeError: # no symlink in os
591 pass
590 pass
592 fp = posixfile(pathname)
591 fp = posixfile(pathname)
593 r = fp.read()
592 r = fp.read()
594 fp.close()
593 fp.close()
595 return r
594 return r
596
595
597 def fstat(fp):
596 def fstat(fp):
598 '''stat file object that may not have fileno method.'''
597 '''stat file object that may not have fileno method.'''
599 try:
598 try:
600 return os.fstat(fp.fileno())
599 return os.fstat(fp.fileno())
601 except AttributeError:
600 except AttributeError:
602 return os.stat(fp.name)
601 return os.stat(fp.name)
603
602
604 # File system features
603 # File system features
605
604
606 def checkcase(path):
605 def checkcase(path):
607 """
606 """
608 Check whether the given path is on a case-sensitive filesystem
607 Check whether the given path is on a case-sensitive filesystem
609
608
610 Requires a path (like /foo/.hg) ending with a foldable final
609 Requires a path (like /foo/.hg) ending with a foldable final
611 directory component.
610 directory component.
612 """
611 """
613 s1 = os.stat(path)
612 s1 = os.stat(path)
614 d, b = os.path.split(path)
613 d, b = os.path.split(path)
615 b2 = b.upper()
614 b2 = b.upper()
616 if b == b2:
615 if b == b2:
617 b2 = b.lower()
616 b2 = b.lower()
618 if b == b2:
617 if b == b2:
619 return True # no evidence against case sensitivity
618 return True # no evidence against case sensitivity
620 p2 = os.path.join(d, b2)
619 p2 = os.path.join(d, b2)
621 try:
620 try:
622 s2 = os.stat(p2)
621 s2 = os.stat(p2)
623 if s2 == s1:
622 if s2 == s1:
624 return False
623 return False
625 return True
624 return True
626 except OSError:
625 except OSError:
627 return True
626 return True
628
627
629 try:
628 try:
630 import re2
629 import re2
631 _re2 = None
630 _re2 = None
632 except ImportError:
631 except ImportError:
633 _re2 = False
632 _re2 = False
634
633
635 def compilere(pat):
634 def compilere(pat):
636 '''Compile a regular expression, using re2 if possible
635 '''Compile a regular expression, using re2 if possible
637
636
638 For best performance, use only re2-compatible regexp features.'''
637 For best performance, use only re2-compatible regexp features.'''
639 global _re2
638 global _re2
640 if _re2 is None:
639 if _re2 is None:
641 try:
640 try:
642 re2.compile
641 re2.compile
643 _re2 = True
642 _re2 = True
644 except ImportError:
643 except ImportError:
645 _re2 = False
644 _re2 = False
646 if _re2:
645 if _re2:
647 try:
646 try:
648 return re2.compile(pat)
647 return re2.compile(pat)
649 except re2.error:
648 except re2.error:
650 pass
649 pass
651 return re.compile(pat)
650 return re.compile(pat)
652
651
653 _fspathcache = {}
652 _fspathcache = {}
654 def fspath(name, root):
653 def fspath(name, root):
655 '''Get name in the case stored in the filesystem
654 '''Get name in the case stored in the filesystem
656
655
657 The name should be relative to root, and be normcase-ed for efficiency.
656 The name should be relative to root, and be normcase-ed for efficiency.
658
657
659 Note that this function is unnecessary, and should not be
658 Note that this function is unnecessary, and should not be
660 called, for case-sensitive filesystems (simply because it's expensive).
659 called, for case-sensitive filesystems (simply because it's expensive).
661
660
662 The root should be normcase-ed, too.
661 The root should be normcase-ed, too.
663 '''
662 '''
664 def find(p, contents):
663 def find(p, contents):
665 for n in contents:
664 for n in contents:
666 if normcase(n) == p:
665 if normcase(n) == p:
667 return n
666 return n
668 return None
667 return None
669
668
670 seps = os.sep
669 seps = os.sep
671 if os.altsep:
670 if os.altsep:
672 seps = seps + os.altsep
671 seps = seps + os.altsep
673 # Protect backslashes. This gets silly very quickly.
672 # Protect backslashes. This gets silly very quickly.
674 seps.replace('\\','\\\\')
673 seps.replace('\\','\\\\')
675 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
674 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
676 dir = os.path.normpath(root)
675 dir = os.path.normpath(root)
677 result = []
676 result = []
678 for part, sep in pattern.findall(name):
677 for part, sep in pattern.findall(name):
679 if sep:
678 if sep:
680 result.append(sep)
679 result.append(sep)
681 continue
680 continue
682
681
683 if dir not in _fspathcache:
682 if dir not in _fspathcache:
684 _fspathcache[dir] = os.listdir(dir)
683 _fspathcache[dir] = os.listdir(dir)
685 contents = _fspathcache[dir]
684 contents = _fspathcache[dir]
686
685
687 found = find(part, contents)
686 found = find(part, contents)
688 if not found:
687 if not found:
689 # retry "once per directory" per "dirstate.walk" which
688 # retry "once per directory" per "dirstate.walk" which
690 # may take place for each patches of "hg qpush", for example
689 # may take place for each patches of "hg qpush", for example
691 contents = os.listdir(dir)
690 contents = os.listdir(dir)
692 _fspathcache[dir] = contents
691 _fspathcache[dir] = contents
693 found = find(part, contents)
692 found = find(part, contents)
694
693
695 result.append(found or part)
694 result.append(found or part)
696 dir = os.path.join(dir, part)
695 dir = os.path.join(dir, part)
697
696
698 return ''.join(result)
697 return ''.join(result)
699
698
700 def checknlink(testfile):
699 def checknlink(testfile):
701 '''check whether hardlink count reporting works properly'''
700 '''check whether hardlink count reporting works properly'''
702
701
703 # testfile may be open, so we need a separate file for checking to
702 # testfile may be open, so we need a separate file for checking to
704 # work around issue2543 (or testfile may get lost on Samba shares)
703 # work around issue2543 (or testfile may get lost on Samba shares)
705 f1 = testfile + ".hgtmp1"
704 f1 = testfile + ".hgtmp1"
706 if os.path.lexists(f1):
705 if os.path.lexists(f1):
707 return False
706 return False
708 try:
707 try:
709 posixfile(f1, 'w').close()
708 posixfile(f1, 'w').close()
710 except IOError:
709 except IOError:
711 return False
710 return False
712
711
713 f2 = testfile + ".hgtmp2"
712 f2 = testfile + ".hgtmp2"
714 fd = None
713 fd = None
715 try:
714 try:
716 try:
715 try:
717 oslink(f1, f2)
716 oslink(f1, f2)
718 except OSError:
717 except OSError:
719 return False
718 return False
720
719
721 # nlinks() may behave differently for files on Windows shares if
720 # nlinks() may behave differently for files on Windows shares if
722 # the file is open.
721 # the file is open.
723 fd = posixfile(f2)
722 fd = posixfile(f2)
724 return nlinks(f2) > 1
723 return nlinks(f2) > 1
725 finally:
724 finally:
726 if fd is not None:
725 if fd is not None:
727 fd.close()
726 fd.close()
728 for f in (f1, f2):
727 for f in (f1, f2):
729 try:
728 try:
730 os.unlink(f)
729 os.unlink(f)
731 except OSError:
730 except OSError:
732 pass
731 pass
733
732
734 return False
733 return False
735
734
736 def endswithsep(path):
735 def endswithsep(path):
737 '''Check path ends with os.sep or os.altsep.'''
736 '''Check path ends with os.sep or os.altsep.'''
738 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
737 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
739
738
740 def splitpath(path):
739 def splitpath(path):
741 '''Split path by os.sep.
740 '''Split path by os.sep.
742 Note that this function does not use os.altsep because this is
741 Note that this function does not use os.altsep because this is
743 an alternative of simple "xxx.split(os.sep)".
742 an alternative of simple "xxx.split(os.sep)".
744 It is recommended to use os.path.normpath() before using this
743 It is recommended to use os.path.normpath() before using this
745 function if need.'''
744 function if need.'''
746 return path.split(os.sep)
745 return path.split(os.sep)
747
746
748 def gui():
747 def gui():
749 '''Are we running in a GUI?'''
748 '''Are we running in a GUI?'''
750 if sys.platform == 'darwin':
749 if sys.platform == 'darwin':
751 if 'SSH_CONNECTION' in os.environ:
750 if 'SSH_CONNECTION' in os.environ:
752 # handle SSH access to a box where the user is logged in
751 # handle SSH access to a box where the user is logged in
753 return False
752 return False
754 elif getattr(osutil, 'isgui', None):
753 elif getattr(osutil, 'isgui', None):
755 # check if a CoreGraphics session is available
754 # check if a CoreGraphics session is available
756 return osutil.isgui()
755 return osutil.isgui()
757 else:
756 else:
758 # pure build; use a safe default
757 # pure build; use a safe default
759 return True
758 return True
760 else:
759 else:
761 return os.name == "nt" or os.environ.get("DISPLAY")
760 return os.name == "nt" or os.environ.get("DISPLAY")
762
761
763 def mktempcopy(name, emptyok=False, createmode=None):
762 def mktempcopy(name, emptyok=False, createmode=None):
764 """Create a temporary file with the same contents from name
763 """Create a temporary file with the same contents from name
765
764
766 The permission bits are copied from the original file.
765 The permission bits are copied from the original file.
767
766
768 If the temporary file is going to be truncated immediately, you
767 If the temporary file is going to be truncated immediately, you
769 can use emptyok=True as an optimization.
768 can use emptyok=True as an optimization.
770
769
771 Returns the name of the temporary file.
770 Returns the name of the temporary file.
772 """
771 """
773 d, fn = os.path.split(name)
772 d, fn = os.path.split(name)
774 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
773 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
775 os.close(fd)
774 os.close(fd)
776 # Temporary files are created with mode 0600, which is usually not
775 # Temporary files are created with mode 0600, which is usually not
777 # what we want. If the original file already exists, just copy
776 # what we want. If the original file already exists, just copy
778 # its mode. Otherwise, manually obey umask.
777 # its mode. Otherwise, manually obey umask.
779 copymode(name, temp, createmode)
778 copymode(name, temp, createmode)
780 if emptyok:
779 if emptyok:
781 return temp
780 return temp
782 try:
781 try:
783 try:
782 try:
784 ifp = posixfile(name, "rb")
783 ifp = posixfile(name, "rb")
785 except IOError, inst:
784 except IOError, inst:
786 if inst.errno == errno.ENOENT:
785 if inst.errno == errno.ENOENT:
787 return temp
786 return temp
788 if not getattr(inst, 'filename', None):
787 if not getattr(inst, 'filename', None):
789 inst.filename = name
788 inst.filename = name
790 raise
789 raise
791 ofp = posixfile(temp, "wb")
790 ofp = posixfile(temp, "wb")
792 for chunk in filechunkiter(ifp):
791 for chunk in filechunkiter(ifp):
793 ofp.write(chunk)
792 ofp.write(chunk)
794 ifp.close()
793 ifp.close()
795 ofp.close()
794 ofp.close()
796 except: # re-raises
795 except: # re-raises
797 try: os.unlink(temp)
796 try: os.unlink(temp)
798 except OSError: pass
797 except OSError: pass
799 raise
798 raise
800 return temp
799 return temp
801
800
802 class atomictempfile(object):
801 class atomictempfile(object):
803 '''writeable file object that atomically updates a file
802 '''writeable file object that atomically updates a file
804
803
805 All writes will go to a temporary copy of the original file. Call
804 All writes will go to a temporary copy of the original file. Call
806 close() when you are done writing, and atomictempfile will rename
805 close() when you are done writing, and atomictempfile will rename
807 the temporary copy to the original name, making the changes
806 the temporary copy to the original name, making the changes
808 visible. If the object is destroyed without being closed, all your
807 visible. If the object is destroyed without being closed, all your
809 writes are discarded.
808 writes are discarded.
810 '''
809 '''
811 def __init__(self, name, mode='w+b', createmode=None):
810 def __init__(self, name, mode='w+b', createmode=None):
812 self.__name = name # permanent name
811 self.__name = name # permanent name
813 self._tempname = mktempcopy(name, emptyok=('w' in mode),
812 self._tempname = mktempcopy(name, emptyok=('w' in mode),
814 createmode=createmode)
813 createmode=createmode)
815 self._fp = posixfile(self._tempname, mode)
814 self._fp = posixfile(self._tempname, mode)
816
815
817 # delegated methods
816 # delegated methods
818 self.write = self._fp.write
817 self.write = self._fp.write
819 self.seek = self._fp.seek
818 self.seek = self._fp.seek
820 self.tell = self._fp.tell
819 self.tell = self._fp.tell
821 self.fileno = self._fp.fileno
820 self.fileno = self._fp.fileno
822
821
823 def close(self):
822 def close(self):
824 if not self._fp.closed:
823 if not self._fp.closed:
825 self._fp.close()
824 self._fp.close()
826 rename(self._tempname, localpath(self.__name))
825 rename(self._tempname, localpath(self.__name))
827
826
828 def discard(self):
827 def discard(self):
829 if not self._fp.closed:
828 if not self._fp.closed:
830 try:
829 try:
831 os.unlink(self._tempname)
830 os.unlink(self._tempname)
832 except OSError:
831 except OSError:
833 pass
832 pass
834 self._fp.close()
833 self._fp.close()
835
834
836 def __del__(self):
835 def __del__(self):
837 if safehasattr(self, '_fp'): # constructor actually did something
836 if safehasattr(self, '_fp'): # constructor actually did something
838 self.discard()
837 self.discard()
839
838
840 def makedirs(name, mode=None):
839 def makedirs(name, mode=None):
841 """recursive directory creation with parent mode inheritance"""
840 """recursive directory creation with parent mode inheritance"""
842 try:
841 try:
843 os.mkdir(name)
842 os.mkdir(name)
844 except OSError, err:
843 except OSError, err:
845 if err.errno == errno.EEXIST:
844 if err.errno == errno.EEXIST:
846 return
845 return
847 if err.errno != errno.ENOENT or not name:
846 if err.errno != errno.ENOENT or not name:
848 raise
847 raise
849 parent = os.path.dirname(os.path.abspath(name))
848 parent = os.path.dirname(os.path.abspath(name))
850 if parent == name:
849 if parent == name:
851 raise
850 raise
852 makedirs(parent, mode)
851 makedirs(parent, mode)
853 os.mkdir(name)
852 os.mkdir(name)
854 if mode is not None:
853 if mode is not None:
855 os.chmod(name, mode)
854 os.chmod(name, mode)
856
855
857 def readfile(path):
856 def readfile(path):
858 fp = open(path, 'rb')
857 fp = open(path, 'rb')
859 try:
858 try:
860 return fp.read()
859 return fp.read()
861 finally:
860 finally:
862 fp.close()
861 fp.close()
863
862
864 def writefile(path, text):
863 def writefile(path, text):
865 fp = open(path, 'wb')
864 fp = open(path, 'wb')
866 try:
865 try:
867 fp.write(text)
866 fp.write(text)
868 finally:
867 finally:
869 fp.close()
868 fp.close()
870
869
871 def appendfile(path, text):
870 def appendfile(path, text):
872 fp = open(path, 'ab')
871 fp = open(path, 'ab')
873 try:
872 try:
874 fp.write(text)
873 fp.write(text)
875 finally:
874 finally:
876 fp.close()
875 fp.close()
877
876
878 class chunkbuffer(object):
877 class chunkbuffer(object):
879 """Allow arbitrary sized chunks of data to be efficiently read from an
878 """Allow arbitrary sized chunks of data to be efficiently read from an
880 iterator over chunks of arbitrary size."""
879 iterator over chunks of arbitrary size."""
881
880
882 def __init__(self, in_iter):
881 def __init__(self, in_iter):
883 """in_iter is the iterator that's iterating over the input chunks.
882 """in_iter is the iterator that's iterating over the input chunks.
884 targetsize is how big a buffer to try to maintain."""
883 targetsize is how big a buffer to try to maintain."""
885 def splitbig(chunks):
884 def splitbig(chunks):
886 for chunk in chunks:
885 for chunk in chunks:
887 if len(chunk) > 2**20:
886 if len(chunk) > 2**20:
888 pos = 0
887 pos = 0
889 while pos < len(chunk):
888 while pos < len(chunk):
890 end = pos + 2 ** 18
889 end = pos + 2 ** 18
891 yield chunk[pos:end]
890 yield chunk[pos:end]
892 pos = end
891 pos = end
893 else:
892 else:
894 yield chunk
893 yield chunk
895 self.iter = splitbig(in_iter)
894 self.iter = splitbig(in_iter)
896 self._queue = deque()
895 self._queue = deque()
897
896
898 def read(self, l):
897 def read(self, l):
899 """Read L bytes of data from the iterator of chunks of data.
898 """Read L bytes of data from the iterator of chunks of data.
900 Returns less than L bytes if the iterator runs dry."""
899 Returns less than L bytes if the iterator runs dry."""
901 left = l
900 left = l
902 buf = ''
901 buf = ''
903 queue = self._queue
902 queue = self._queue
904 while left > 0:
903 while left > 0:
905 # refill the queue
904 # refill the queue
906 if not queue:
905 if not queue:
907 target = 2**18
906 target = 2**18
908 for chunk in self.iter:
907 for chunk in self.iter:
909 queue.append(chunk)
908 queue.append(chunk)
910 target -= len(chunk)
909 target -= len(chunk)
911 if target <= 0:
910 if target <= 0:
912 break
911 break
913 if not queue:
912 if not queue:
914 break
913 break
915
914
916 chunk = queue.popleft()
915 chunk = queue.popleft()
917 left -= len(chunk)
916 left -= len(chunk)
918 if left < 0:
917 if left < 0:
919 queue.appendleft(chunk[left:])
918 queue.appendleft(chunk[left:])
920 buf += chunk[:left]
919 buf += chunk[:left]
921 else:
920 else:
922 buf += chunk
921 buf += chunk
923
922
924 return buf
923 return buf
925
924
926 def filechunkiter(f, size=65536, limit=None):
925 def filechunkiter(f, size=65536, limit=None):
927 """Create a generator that produces the data in the file size
926 """Create a generator that produces the data in the file size
928 (default 65536) bytes at a time, up to optional limit (default is
927 (default 65536) bytes at a time, up to optional limit (default is
929 to read all data). Chunks may be less than size bytes if the
928 to read all data). Chunks may be less than size bytes if the
930 chunk is the last chunk in the file, or the file is a socket or
929 chunk is the last chunk in the file, or the file is a socket or
931 some other type of file that sometimes reads less data than is
930 some other type of file that sometimes reads less data than is
932 requested."""
931 requested."""
933 assert size >= 0
932 assert size >= 0
934 assert limit is None or limit >= 0
933 assert limit is None or limit >= 0
935 while True:
934 while True:
936 if limit is None:
935 if limit is None:
937 nbytes = size
936 nbytes = size
938 else:
937 else:
939 nbytes = min(limit, size)
938 nbytes = min(limit, size)
940 s = nbytes and f.read(nbytes)
939 s = nbytes and f.read(nbytes)
941 if not s:
940 if not s:
942 break
941 break
943 if limit:
942 if limit:
944 limit -= len(s)
943 limit -= len(s)
945 yield s
944 yield s
946
945
947 def makedate():
946 def makedate():
948 ct = time.time()
947 ct = time.time()
949 if ct < 0:
948 if ct < 0:
950 hint = _("check your clock")
949 hint = _("check your clock")
951 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
950 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
952 delta = (datetime.datetime.utcfromtimestamp(ct) -
951 delta = (datetime.datetime.utcfromtimestamp(ct) -
953 datetime.datetime.fromtimestamp(ct))
952 datetime.datetime.fromtimestamp(ct))
954 tz = delta.days * 86400 + delta.seconds
953 tz = delta.days * 86400 + delta.seconds
955 return ct, tz
954 return ct, tz
956
955
957 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
956 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
958 """represent a (unixtime, offset) tuple as a localized time.
957 """represent a (unixtime, offset) tuple as a localized time.
959 unixtime is seconds since the epoch, and offset is the time zone's
958 unixtime is seconds since the epoch, and offset is the time zone's
960 number of seconds away from UTC. if timezone is false, do not
959 number of seconds away from UTC. if timezone is false, do not
961 append time zone to string."""
960 append time zone to string."""
962 t, tz = date or makedate()
961 t, tz = date or makedate()
963 if t < 0:
962 if t < 0:
964 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
963 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
965 tz = 0
964 tz = 0
966 if "%1" in format or "%2" in format:
965 if "%1" in format or "%2" in format:
967 sign = (tz > 0) and "-" or "+"
966 sign = (tz > 0) and "-" or "+"
968 minutes = abs(tz) // 60
967 minutes = abs(tz) // 60
969 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
968 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
970 format = format.replace("%2", "%02d" % (minutes % 60))
969 format = format.replace("%2", "%02d" % (minutes % 60))
971 try:
970 try:
972 t = time.gmtime(float(t) - tz)
971 t = time.gmtime(float(t) - tz)
973 except ValueError:
972 except ValueError:
974 # time was out of range
973 # time was out of range
975 t = time.gmtime(sys.maxint)
974 t = time.gmtime(sys.maxint)
976 s = time.strftime(format, t)
975 s = time.strftime(format, t)
977 return s
976 return s
978
977
979 def shortdate(date=None):
978 def shortdate(date=None):
980 """turn (timestamp, tzoff) tuple into iso 8631 date."""
979 """turn (timestamp, tzoff) tuple into iso 8631 date."""
981 return datestr(date, format='%Y-%m-%d')
980 return datestr(date, format='%Y-%m-%d')
982
981
983 def strdate(string, format, defaults=[]):
982 def strdate(string, format, defaults=[]):
984 """parse a localized time string and return a (unixtime, offset) tuple.
983 """parse a localized time string and return a (unixtime, offset) tuple.
985 if the string cannot be parsed, ValueError is raised."""
984 if the string cannot be parsed, ValueError is raised."""
986 def timezone(string):
985 def timezone(string):
987 tz = string.split()[-1]
986 tz = string.split()[-1]
988 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
987 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
989 sign = (tz[0] == "+") and 1 or -1
988 sign = (tz[0] == "+") and 1 or -1
990 hours = int(tz[1:3])
989 hours = int(tz[1:3])
991 minutes = int(tz[3:5])
990 minutes = int(tz[3:5])
992 return -sign * (hours * 60 + minutes) * 60
991 return -sign * (hours * 60 + minutes) * 60
993 if tz == "GMT" or tz == "UTC":
992 if tz == "GMT" or tz == "UTC":
994 return 0
993 return 0
995 return None
994 return None
996
995
997 # NOTE: unixtime = localunixtime + offset
996 # NOTE: unixtime = localunixtime + offset
998 offset, date = timezone(string), string
997 offset, date = timezone(string), string
999 if offset is not None:
998 if offset is not None:
1000 date = " ".join(string.split()[:-1])
999 date = " ".join(string.split()[:-1])
1001
1000
1002 # add missing elements from defaults
1001 # add missing elements from defaults
1003 usenow = False # default to using biased defaults
1002 usenow = False # default to using biased defaults
1004 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1003 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1005 found = [True for p in part if ("%"+p) in format]
1004 found = [True for p in part if ("%"+p) in format]
1006 if not found:
1005 if not found:
1007 date += "@" + defaults[part][usenow]
1006 date += "@" + defaults[part][usenow]
1008 format += "@%" + part[0]
1007 format += "@%" + part[0]
1009 else:
1008 else:
1010 # We've found a specific time element, less specific time
1009 # We've found a specific time element, less specific time
1011 # elements are relative to today
1010 # elements are relative to today
1012 usenow = True
1011 usenow = True
1013
1012
1014 timetuple = time.strptime(date, format)
1013 timetuple = time.strptime(date, format)
1015 localunixtime = int(calendar.timegm(timetuple))
1014 localunixtime = int(calendar.timegm(timetuple))
1016 if offset is None:
1015 if offset is None:
1017 # local timezone
1016 # local timezone
1018 unixtime = int(time.mktime(timetuple))
1017 unixtime = int(time.mktime(timetuple))
1019 offset = unixtime - localunixtime
1018 offset = unixtime - localunixtime
1020 else:
1019 else:
1021 unixtime = localunixtime + offset
1020 unixtime = localunixtime + offset
1022 return unixtime, offset
1021 return unixtime, offset
1023
1022
1024 def parsedate(date, formats=None, bias={}):
1023 def parsedate(date, formats=None, bias={}):
1025 """parse a localized date/time and return a (unixtime, offset) tuple.
1024 """parse a localized date/time and return a (unixtime, offset) tuple.
1026
1025
1027 The date may be a "unixtime offset" string or in one of the specified
1026 The date may be a "unixtime offset" string or in one of the specified
1028 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1027 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1029 """
1028 """
1030 if not date:
1029 if not date:
1031 return 0, 0
1030 return 0, 0
1032 if isinstance(date, tuple) and len(date) == 2:
1031 if isinstance(date, tuple) and len(date) == 2:
1033 return date
1032 return date
1034 if not formats:
1033 if not formats:
1035 formats = defaultdateformats
1034 formats = defaultdateformats
1036 date = date.strip()
1035 date = date.strip()
1037 try:
1036 try:
1038 when, offset = map(int, date.split(' '))
1037 when, offset = map(int, date.split(' '))
1039 except ValueError:
1038 except ValueError:
1040 # fill out defaults
1039 # fill out defaults
1041 now = makedate()
1040 now = makedate()
1042 defaults = {}
1041 defaults = {}
1043 for part in ("d", "mb", "yY", "HI", "M", "S"):
1042 for part in ("d", "mb", "yY", "HI", "M", "S"):
1044 # this piece is for rounding the specific end of unknowns
1043 # this piece is for rounding the specific end of unknowns
1045 b = bias.get(part)
1044 b = bias.get(part)
1046 if b is None:
1045 if b is None:
1047 if part[0] in "HMS":
1046 if part[0] in "HMS":
1048 b = "00"
1047 b = "00"
1049 else:
1048 else:
1050 b = "0"
1049 b = "0"
1051
1050
1052 # this piece is for matching the generic end to today's date
1051 # this piece is for matching the generic end to today's date
1053 n = datestr(now, "%" + part[0])
1052 n = datestr(now, "%" + part[0])
1054
1053
1055 defaults[part] = (b, n)
1054 defaults[part] = (b, n)
1056
1055
1057 for format in formats:
1056 for format in formats:
1058 try:
1057 try:
1059 when, offset = strdate(date, format, defaults)
1058 when, offset = strdate(date, format, defaults)
1060 except (ValueError, OverflowError):
1059 except (ValueError, OverflowError):
1061 pass
1060 pass
1062 else:
1061 else:
1063 break
1062 break
1064 else:
1063 else:
1065 raise Abort(_('invalid date: %r') % date)
1064 raise Abort(_('invalid date: %r') % date)
1066 # validate explicit (probably user-specified) date and
1065 # validate explicit (probably user-specified) date and
1067 # time zone offset. values must fit in signed 32 bits for
1066 # time zone offset. values must fit in signed 32 bits for
1068 # current 32-bit linux runtimes. timezones go from UTC-12
1067 # current 32-bit linux runtimes. timezones go from UTC-12
1069 # to UTC+14
1068 # to UTC+14
1070 if abs(when) > 0x7fffffff:
1069 if abs(when) > 0x7fffffff:
1071 raise Abort(_('date exceeds 32 bits: %d') % when)
1070 raise Abort(_('date exceeds 32 bits: %d') % when)
1072 if when < 0:
1071 if when < 0:
1073 raise Abort(_('negative date value: %d') % when)
1072 raise Abort(_('negative date value: %d') % when)
1074 if offset < -50400 or offset > 43200:
1073 if offset < -50400 or offset > 43200:
1075 raise Abort(_('impossible time zone offset: %d') % offset)
1074 raise Abort(_('impossible time zone offset: %d') % offset)
1076 return when, offset
1075 return when, offset
1077
1076
1078 def matchdate(date):
1077 def matchdate(date):
1079 """Return a function that matches a given date match specifier
1078 """Return a function that matches a given date match specifier
1080
1079
1081 Formats include:
1080 Formats include:
1082
1081
1083 '{date}' match a given date to the accuracy provided
1082 '{date}' match a given date to the accuracy provided
1084
1083
1085 '<{date}' on or before a given date
1084 '<{date}' on or before a given date
1086
1085
1087 '>{date}' on or after a given date
1086 '>{date}' on or after a given date
1088
1087
1089 >>> p1 = parsedate("10:29:59")
1088 >>> p1 = parsedate("10:29:59")
1090 >>> p2 = parsedate("10:30:00")
1089 >>> p2 = parsedate("10:30:00")
1091 >>> p3 = parsedate("10:30:59")
1090 >>> p3 = parsedate("10:30:59")
1092 >>> p4 = parsedate("10:31:00")
1091 >>> p4 = parsedate("10:31:00")
1093 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1092 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1094 >>> f = matchdate("10:30")
1093 >>> f = matchdate("10:30")
1095 >>> f(p1[0])
1094 >>> f(p1[0])
1096 False
1095 False
1097 >>> f(p2[0])
1096 >>> f(p2[0])
1098 True
1097 True
1099 >>> f(p3[0])
1098 >>> f(p3[0])
1100 True
1099 True
1101 >>> f(p4[0])
1100 >>> f(p4[0])
1102 False
1101 False
1103 >>> f(p5[0])
1102 >>> f(p5[0])
1104 False
1103 False
1105 """
1104 """
1106
1105
1107 def lower(date):
1106 def lower(date):
1108 d = dict(mb="1", d="1")
1107 d = dict(mb="1", d="1")
1109 return parsedate(date, extendeddateformats, d)[0]
1108 return parsedate(date, extendeddateformats, d)[0]
1110
1109
1111 def upper(date):
1110 def upper(date):
1112 d = dict(mb="12", HI="23", M="59", S="59")
1111 d = dict(mb="12", HI="23", M="59", S="59")
1113 for days in ("31", "30", "29"):
1112 for days in ("31", "30", "29"):
1114 try:
1113 try:
1115 d["d"] = days
1114 d["d"] = days
1116 return parsedate(date, extendeddateformats, d)[0]
1115 return parsedate(date, extendeddateformats, d)[0]
1117 except Abort:
1116 except Abort:
1118 pass
1117 pass
1119 d["d"] = "28"
1118 d["d"] = "28"
1120 return parsedate(date, extendeddateformats, d)[0]
1119 return parsedate(date, extendeddateformats, d)[0]
1121
1120
1122 date = date.strip()
1121 date = date.strip()
1123
1122
1124 if not date:
1123 if not date:
1125 raise Abort(_("dates cannot consist entirely of whitespace"))
1124 raise Abort(_("dates cannot consist entirely of whitespace"))
1126 elif date[0] == "<":
1125 elif date[0] == "<":
1127 if not date[1:]:
1126 if not date[1:]:
1128 raise Abort(_("invalid day spec, use '<DATE'"))
1127 raise Abort(_("invalid day spec, use '<DATE'"))
1129 when = upper(date[1:])
1128 when = upper(date[1:])
1130 return lambda x: x <= when
1129 return lambda x: x <= when
1131 elif date[0] == ">":
1130 elif date[0] == ">":
1132 if not date[1:]:
1131 if not date[1:]:
1133 raise Abort(_("invalid day spec, use '>DATE'"))
1132 raise Abort(_("invalid day spec, use '>DATE'"))
1134 when = lower(date[1:])
1133 when = lower(date[1:])
1135 return lambda x: x >= when
1134 return lambda x: x >= when
1136 elif date[0] == "-":
1135 elif date[0] == "-":
1137 try:
1136 try:
1138 days = int(date[1:])
1137 days = int(date[1:])
1139 except ValueError:
1138 except ValueError:
1140 raise Abort(_("invalid day spec: %s") % date[1:])
1139 raise Abort(_("invalid day spec: %s") % date[1:])
1141 if days < 0:
1140 if days < 0:
1142 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1141 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1143 % date[1:])
1142 % date[1:])
1144 when = makedate()[0] - days * 3600 * 24
1143 when = makedate()[0] - days * 3600 * 24
1145 return lambda x: x >= when
1144 return lambda x: x >= when
1146 elif " to " in date:
1145 elif " to " in date:
1147 a, b = date.split(" to ")
1146 a, b = date.split(" to ")
1148 start, stop = lower(a), upper(b)
1147 start, stop = lower(a), upper(b)
1149 return lambda x: x >= start and x <= stop
1148 return lambda x: x >= start and x <= stop
1150 else:
1149 else:
1151 start, stop = lower(date), upper(date)
1150 start, stop = lower(date), upper(date)
1152 return lambda x: x >= start and x <= stop
1151 return lambda x: x >= start and x <= stop
1153
1152
1154 def shortuser(user):
1153 def shortuser(user):
1155 """Return a short representation of a user name or email address."""
1154 """Return a short representation of a user name or email address."""
1156 f = user.find('@')
1155 f = user.find('@')
1157 if f >= 0:
1156 if f >= 0:
1158 user = user[:f]
1157 user = user[:f]
1159 f = user.find('<')
1158 f = user.find('<')
1160 if f >= 0:
1159 if f >= 0:
1161 user = user[f + 1:]
1160 user = user[f + 1:]
1162 f = user.find(' ')
1161 f = user.find(' ')
1163 if f >= 0:
1162 if f >= 0:
1164 user = user[:f]
1163 user = user[:f]
1165 f = user.find('.')
1164 f = user.find('.')
1166 if f >= 0:
1165 if f >= 0:
1167 user = user[:f]
1166 user = user[:f]
1168 return user
1167 return user
1169
1168
1170 def emailuser(user):
1169 def emailuser(user):
1171 """Return the user portion of an email address."""
1170 """Return the user portion of an email address."""
1172 f = user.find('@')
1171 f = user.find('@')
1173 if f >= 0:
1172 if f >= 0:
1174 user = user[:f]
1173 user = user[:f]
1175 f = user.find('<')
1174 f = user.find('<')
1176 if f >= 0:
1175 if f >= 0:
1177 user = user[f + 1:]
1176 user = user[f + 1:]
1178 return user
1177 return user
1179
1178
1180 def email(author):
1179 def email(author):
1181 '''get email of author.'''
1180 '''get email of author.'''
1182 r = author.find('>')
1181 r = author.find('>')
1183 if r == -1:
1182 if r == -1:
1184 r = None
1183 r = None
1185 return author[author.find('<') + 1:r]
1184 return author[author.find('<') + 1:r]
1186
1185
1187 def _ellipsis(text, maxlength):
1186 def _ellipsis(text, maxlength):
1188 if len(text) <= maxlength:
1187 if len(text) <= maxlength:
1189 return text, False
1188 return text, False
1190 else:
1189 else:
1191 return "%s..." % (text[:maxlength - 3]), True
1190 return "%s..." % (text[:maxlength - 3]), True
1192
1191
1193 def ellipsis(text, maxlength=400):
1192 def ellipsis(text, maxlength=400):
1194 """Trim string to at most maxlength (default: 400) characters."""
1193 """Trim string to at most maxlength (default: 400) characters."""
1195 try:
1194 try:
1196 # use unicode not to split at intermediate multi-byte sequence
1195 # use unicode not to split at intermediate multi-byte sequence
1197 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1196 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1198 maxlength)
1197 maxlength)
1199 if not truncated:
1198 if not truncated:
1200 return text
1199 return text
1201 return utext.encode(encoding.encoding)
1200 return utext.encode(encoding.encoding)
1202 except (UnicodeDecodeError, UnicodeEncodeError):
1201 except (UnicodeDecodeError, UnicodeEncodeError):
1203 return _ellipsis(text, maxlength)[0]
1202 return _ellipsis(text, maxlength)[0]
1204
1203
1205 _byteunits = (
1204 _byteunits = (
1206 (100, 1 << 30, _('%.0f GB')),
1205 (100, 1 << 30, _('%.0f GB')),
1207 (10, 1 << 30, _('%.1f GB')),
1206 (10, 1 << 30, _('%.1f GB')),
1208 (1, 1 << 30, _('%.2f GB')),
1207 (1, 1 << 30, _('%.2f GB')),
1209 (100, 1 << 20, _('%.0f MB')),
1208 (100, 1 << 20, _('%.0f MB')),
1210 (10, 1 << 20, _('%.1f MB')),
1209 (10, 1 << 20, _('%.1f MB')),
1211 (1, 1 << 20, _('%.2f MB')),
1210 (1, 1 << 20, _('%.2f MB')),
1212 (100, 1 << 10, _('%.0f KB')),
1211 (100, 1 << 10, _('%.0f KB')),
1213 (10, 1 << 10, _('%.1f KB')),
1212 (10, 1 << 10, _('%.1f KB')),
1214 (1, 1 << 10, _('%.2f KB')),
1213 (1, 1 << 10, _('%.2f KB')),
1215 (1, 1, _('%.0f bytes')),
1214 (1, 1, _('%.0f bytes')),
1216 )
1215 )
1217
1216
1218 def bytecount(nbytes):
1217 def bytecount(nbytes):
1219 '''return byte count formatted as readable string, with units'''
1218 '''return byte count formatted as readable string, with units'''
1220
1219
1221 for multiplier, divisor, format in _byteunits:
1220 for multiplier, divisor, format in _byteunits:
1222 if nbytes >= divisor * multiplier:
1221 if nbytes >= divisor * multiplier:
1223 return format % (nbytes / float(divisor))
1222 return format % (nbytes / float(divisor))
1224 return _byteunits[-1][2] % nbytes
1223 return _byteunits[-1][2] % nbytes
1225
1224
1226 def uirepr(s):
1225 def uirepr(s):
1227 # Avoid double backslash in Windows path repr()
1226 # Avoid double backslash in Windows path repr()
1228 return repr(s).replace('\\\\', '\\')
1227 return repr(s).replace('\\\\', '\\')
1229
1228
1230 # delay import of textwrap
1229 # delay import of textwrap
1231 def MBTextWrapper(**kwargs):
1230 def MBTextWrapper(**kwargs):
1232 class tw(textwrap.TextWrapper):
1231 class tw(textwrap.TextWrapper):
1233 """
1232 """
1234 Extend TextWrapper for width-awareness.
1233 Extend TextWrapper for width-awareness.
1235
1234
1236 Neither number of 'bytes' in any encoding nor 'characters' is
1235 Neither number of 'bytes' in any encoding nor 'characters' is
1237 appropriate to calculate terminal columns for specified string.
1236 appropriate to calculate terminal columns for specified string.
1238
1237
1239 Original TextWrapper implementation uses built-in 'len()' directly,
1238 Original TextWrapper implementation uses built-in 'len()' directly,
1240 so overriding is needed to use width information of each characters.
1239 so overriding is needed to use width information of each characters.
1241
1240
1242 In addition, characters classified into 'ambiguous' width are
1241 In addition, characters classified into 'ambiguous' width are
1243 treated as wide in east asian area, but as narrow in other.
1242 treated as wide in east asian area, but as narrow in other.
1244
1243
1245 This requires use decision to determine width of such characters.
1244 This requires use decision to determine width of such characters.
1246 """
1245 """
1247 def __init__(self, **kwargs):
1246 def __init__(self, **kwargs):
1248 textwrap.TextWrapper.__init__(self, **kwargs)
1247 textwrap.TextWrapper.__init__(self, **kwargs)
1249
1248
1250 # for compatibility between 2.4 and 2.6
1249 # for compatibility between 2.4 and 2.6
1251 if getattr(self, 'drop_whitespace', None) is None:
1250 if getattr(self, 'drop_whitespace', None) is None:
1252 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1251 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1253
1252
1254 def _cutdown(self, ucstr, space_left):
1253 def _cutdown(self, ucstr, space_left):
1255 l = 0
1254 l = 0
1256 colwidth = encoding.ucolwidth
1255 colwidth = encoding.ucolwidth
1257 for i in xrange(len(ucstr)):
1256 for i in xrange(len(ucstr)):
1258 l += colwidth(ucstr[i])
1257 l += colwidth(ucstr[i])
1259 if space_left < l:
1258 if space_left < l:
1260 return (ucstr[:i], ucstr[i:])
1259 return (ucstr[:i], ucstr[i:])
1261 return ucstr, ''
1260 return ucstr, ''
1262
1261
1263 # overriding of base class
1262 # overriding of base class
1264 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1263 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1265 space_left = max(width - cur_len, 1)
1264 space_left = max(width - cur_len, 1)
1266
1265
1267 if self.break_long_words:
1266 if self.break_long_words:
1268 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1267 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1269 cur_line.append(cut)
1268 cur_line.append(cut)
1270 reversed_chunks[-1] = res
1269 reversed_chunks[-1] = res
1271 elif not cur_line:
1270 elif not cur_line:
1272 cur_line.append(reversed_chunks.pop())
1271 cur_line.append(reversed_chunks.pop())
1273
1272
1274 # this overriding code is imported from TextWrapper of python 2.6
1273 # this overriding code is imported from TextWrapper of python 2.6
1275 # to calculate columns of string by 'encoding.ucolwidth()'
1274 # to calculate columns of string by 'encoding.ucolwidth()'
1276 def _wrap_chunks(self, chunks):
1275 def _wrap_chunks(self, chunks):
1277 colwidth = encoding.ucolwidth
1276 colwidth = encoding.ucolwidth
1278
1277
1279 lines = []
1278 lines = []
1280 if self.width <= 0:
1279 if self.width <= 0:
1281 raise ValueError("invalid width %r (must be > 0)" % self.width)
1280 raise ValueError("invalid width %r (must be > 0)" % self.width)
1282
1281
1283 # Arrange in reverse order so items can be efficiently popped
1282 # Arrange in reverse order so items can be efficiently popped
1284 # from a stack of chucks.
1283 # from a stack of chucks.
1285 chunks.reverse()
1284 chunks.reverse()
1286
1285
1287 while chunks:
1286 while chunks:
1288
1287
1289 # Start the list of chunks that will make up the current line.
1288 # Start the list of chunks that will make up the current line.
1290 # cur_len is just the length of all the chunks in cur_line.
1289 # cur_len is just the length of all the chunks in cur_line.
1291 cur_line = []
1290 cur_line = []
1292 cur_len = 0
1291 cur_len = 0
1293
1292
1294 # Figure out which static string will prefix this line.
1293 # Figure out which static string will prefix this line.
1295 if lines:
1294 if lines:
1296 indent = self.subsequent_indent
1295 indent = self.subsequent_indent
1297 else:
1296 else:
1298 indent = self.initial_indent
1297 indent = self.initial_indent
1299
1298
1300 # Maximum width for this line.
1299 # Maximum width for this line.
1301 width = self.width - len(indent)
1300 width = self.width - len(indent)
1302
1301
1303 # First chunk on line is whitespace -- drop it, unless this
1302 # First chunk on line is whitespace -- drop it, unless this
1304 # is the very beginning of the text (ie. no lines started yet).
1303 # is the very beginning of the text (ie. no lines started yet).
1305 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1304 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1306 del chunks[-1]
1305 del chunks[-1]
1307
1306
1308 while chunks:
1307 while chunks:
1309 l = colwidth(chunks[-1])
1308 l = colwidth(chunks[-1])
1310
1309
1311 # Can at least squeeze this chunk onto the current line.
1310 # Can at least squeeze this chunk onto the current line.
1312 if cur_len + l <= width:
1311 if cur_len + l <= width:
1313 cur_line.append(chunks.pop())
1312 cur_line.append(chunks.pop())
1314 cur_len += l
1313 cur_len += l
1315
1314
1316 # Nope, this line is full.
1315 # Nope, this line is full.
1317 else:
1316 else:
1318 break
1317 break
1319
1318
1320 # The current line is full, and the next chunk is too big to
1319 # The current line is full, and the next chunk is too big to
1321 # fit on *any* line (not just this one).
1320 # fit on *any* line (not just this one).
1322 if chunks and colwidth(chunks[-1]) > width:
1321 if chunks and colwidth(chunks[-1]) > width:
1323 self._handle_long_word(chunks, cur_line, cur_len, width)
1322 self._handle_long_word(chunks, cur_line, cur_len, width)
1324
1323
1325 # If the last chunk on this line is all whitespace, drop it.
1324 # If the last chunk on this line is all whitespace, drop it.
1326 if (self.drop_whitespace and
1325 if (self.drop_whitespace and
1327 cur_line and cur_line[-1].strip() == ''):
1326 cur_line and cur_line[-1].strip() == ''):
1328 del cur_line[-1]
1327 del cur_line[-1]
1329
1328
1330 # Convert current line back to a string and store it in list
1329 # Convert current line back to a string and store it in list
1331 # of all lines (return value).
1330 # of all lines (return value).
1332 if cur_line:
1331 if cur_line:
1333 lines.append(indent + ''.join(cur_line))
1332 lines.append(indent + ''.join(cur_line))
1334
1333
1335 return lines
1334 return lines
1336
1335
1337 global MBTextWrapper
1336 global MBTextWrapper
1338 MBTextWrapper = tw
1337 MBTextWrapper = tw
1339 return tw(**kwargs)
1338 return tw(**kwargs)
1340
1339
1341 def wrap(line, width, initindent='', hangindent=''):
1340 def wrap(line, width, initindent='', hangindent=''):
1342 maxindent = max(len(hangindent), len(initindent))
1341 maxindent = max(len(hangindent), len(initindent))
1343 if width <= maxindent:
1342 if width <= maxindent:
1344 # adjust for weird terminal size
1343 # adjust for weird terminal size
1345 width = max(78, maxindent + 1)
1344 width = max(78, maxindent + 1)
1346 line = line.decode(encoding.encoding, encoding.encodingmode)
1345 line = line.decode(encoding.encoding, encoding.encodingmode)
1347 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1346 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1348 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1347 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1349 wrapper = MBTextWrapper(width=width,
1348 wrapper = MBTextWrapper(width=width,
1350 initial_indent=initindent,
1349 initial_indent=initindent,
1351 subsequent_indent=hangindent)
1350 subsequent_indent=hangindent)
1352 return wrapper.fill(line).encode(encoding.encoding)
1351 return wrapper.fill(line).encode(encoding.encoding)
1353
1352
1354 def iterlines(iterator):
1353 def iterlines(iterator):
1355 for chunk in iterator:
1354 for chunk in iterator:
1356 for line in chunk.splitlines():
1355 for line in chunk.splitlines():
1357 yield line
1356 yield line
1358
1357
1359 def expandpath(path):
1358 def expandpath(path):
1360 return os.path.expanduser(os.path.expandvars(path))
1359 return os.path.expanduser(os.path.expandvars(path))
1361
1360
1362 def hgcmd():
1361 def hgcmd():
1363 """Return the command used to execute current hg
1362 """Return the command used to execute current hg
1364
1363
1365 This is different from hgexecutable() because on Windows we want
1364 This is different from hgexecutable() because on Windows we want
1366 to avoid things opening new shell windows like batch files, so we
1365 to avoid things opening new shell windows like batch files, so we
1367 get either the python call or current executable.
1366 get either the python call or current executable.
1368 """
1367 """
1369 if mainfrozen():
1368 if mainfrozen():
1370 return [sys.executable]
1369 return [sys.executable]
1371 return gethgcmd()
1370 return gethgcmd()
1372
1371
1373 def rundetached(args, condfn):
1372 def rundetached(args, condfn):
1374 """Execute the argument list in a detached process.
1373 """Execute the argument list in a detached process.
1375
1374
1376 condfn is a callable which is called repeatedly and should return
1375 condfn is a callable which is called repeatedly and should return
1377 True once the child process is known to have started successfully.
1376 True once the child process is known to have started successfully.
1378 At this point, the child process PID is returned. If the child
1377 At this point, the child process PID is returned. If the child
1379 process fails to start or finishes before condfn() evaluates to
1378 process fails to start or finishes before condfn() evaluates to
1380 True, return -1.
1379 True, return -1.
1381 """
1380 """
1382 # Windows case is easier because the child process is either
1381 # Windows case is easier because the child process is either
1383 # successfully starting and validating the condition or exiting
1382 # successfully starting and validating the condition or exiting
1384 # on failure. We just poll on its PID. On Unix, if the child
1383 # on failure. We just poll on its PID. On Unix, if the child
1385 # process fails to start, it will be left in a zombie state until
1384 # process fails to start, it will be left in a zombie state until
1386 # the parent wait on it, which we cannot do since we expect a long
1385 # the parent wait on it, which we cannot do since we expect a long
1387 # running process on success. Instead we listen for SIGCHLD telling
1386 # running process on success. Instead we listen for SIGCHLD telling
1388 # us our child process terminated.
1387 # us our child process terminated.
1389 terminated = set()
1388 terminated = set()
1390 def handler(signum, frame):
1389 def handler(signum, frame):
1391 terminated.add(os.wait())
1390 terminated.add(os.wait())
1392 prevhandler = None
1391 prevhandler = None
1393 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1392 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1394 if SIGCHLD is not None:
1393 if SIGCHLD is not None:
1395 prevhandler = signal.signal(SIGCHLD, handler)
1394 prevhandler = signal.signal(SIGCHLD, handler)
1396 try:
1395 try:
1397 pid = spawndetached(args)
1396 pid = spawndetached(args)
1398 while not condfn():
1397 while not condfn():
1399 if ((pid in terminated or not testpid(pid))
1398 if ((pid in terminated or not testpid(pid))
1400 and not condfn()):
1399 and not condfn()):
1401 return -1
1400 return -1
1402 time.sleep(0.1)
1401 time.sleep(0.1)
1403 return pid
1402 return pid
1404 finally:
1403 finally:
1405 if prevhandler is not None:
1404 if prevhandler is not None:
1406 signal.signal(signal.SIGCHLD, prevhandler)
1405 signal.signal(signal.SIGCHLD, prevhandler)
1407
1406
1408 try:
1407 try:
1409 any, all = any, all
1408 any, all = any, all
1410 except NameError:
1409 except NameError:
1411 def any(iterable):
1410 def any(iterable):
1412 for i in iterable:
1411 for i in iterable:
1413 if i:
1412 if i:
1414 return True
1413 return True
1415 return False
1414 return False
1416
1415
1417 def all(iterable):
1416 def all(iterable):
1418 for i in iterable:
1417 for i in iterable:
1419 if not i:
1418 if not i:
1420 return False
1419 return False
1421 return True
1420 return True
1422
1421
1423 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1422 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1424 """Return the result of interpolating items in the mapping into string s.
1423 """Return the result of interpolating items in the mapping into string s.
1425
1424
1426 prefix is a single character string, or a two character string with
1425 prefix is a single character string, or a two character string with
1427 a backslash as the first character if the prefix needs to be escaped in
1426 a backslash as the first character if the prefix needs to be escaped in
1428 a regular expression.
1427 a regular expression.
1429
1428
1430 fn is an optional function that will be applied to the replacement text
1429 fn is an optional function that will be applied to the replacement text
1431 just before replacement.
1430 just before replacement.
1432
1431
1433 escape_prefix is an optional flag that allows using doubled prefix for
1432 escape_prefix is an optional flag that allows using doubled prefix for
1434 its escaping.
1433 its escaping.
1435 """
1434 """
1436 fn = fn or (lambda s: s)
1435 fn = fn or (lambda s: s)
1437 patterns = '|'.join(mapping.keys())
1436 patterns = '|'.join(mapping.keys())
1438 if escape_prefix:
1437 if escape_prefix:
1439 patterns += '|' + prefix
1438 patterns += '|' + prefix
1440 if len(prefix) > 1:
1439 if len(prefix) > 1:
1441 prefix_char = prefix[1:]
1440 prefix_char = prefix[1:]
1442 else:
1441 else:
1443 prefix_char = prefix
1442 prefix_char = prefix
1444 mapping[prefix_char] = prefix_char
1443 mapping[prefix_char] = prefix_char
1445 r = re.compile(r'%s(%s)' % (prefix, patterns))
1444 r = re.compile(r'%s(%s)' % (prefix, patterns))
1446 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1445 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1447
1446
1448 def getport(port):
1447 def getport(port):
1449 """Return the port for a given network service.
1448 """Return the port for a given network service.
1450
1449
1451 If port is an integer, it's returned as is. If it's a string, it's
1450 If port is an integer, it's returned as is. If it's a string, it's
1452 looked up using socket.getservbyname(). If there's no matching
1451 looked up using socket.getservbyname(). If there's no matching
1453 service, util.Abort is raised.
1452 service, util.Abort is raised.
1454 """
1453 """
1455 try:
1454 try:
1456 return int(port)
1455 return int(port)
1457 except ValueError:
1456 except ValueError:
1458 pass
1457 pass
1459
1458
1460 try:
1459 try:
1461 return socket.getservbyname(port)
1460 return socket.getservbyname(port)
1462 except socket.error:
1461 except socket.error:
1463 raise Abort(_("no port number associated with service '%s'") % port)
1462 raise Abort(_("no port number associated with service '%s'") % port)
1464
1463
1465 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1464 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1466 '0': False, 'no': False, 'false': False, 'off': False,
1465 '0': False, 'no': False, 'false': False, 'off': False,
1467 'never': False}
1466 'never': False}
1468
1467
1469 def parsebool(s):
1468 def parsebool(s):
1470 """Parse s into a boolean.
1469 """Parse s into a boolean.
1471
1470
1472 If s is not a valid boolean, returns None.
1471 If s is not a valid boolean, returns None.
1473 """
1472 """
1474 return _booleans.get(s.lower(), None)
1473 return _booleans.get(s.lower(), None)
1475
1474
1476 _hexdig = '0123456789ABCDEFabcdef'
1475 _hexdig = '0123456789ABCDEFabcdef'
1477 _hextochr = dict((a + b, chr(int(a + b, 16)))
1476 _hextochr = dict((a + b, chr(int(a + b, 16)))
1478 for a in _hexdig for b in _hexdig)
1477 for a in _hexdig for b in _hexdig)
1479
1478
1480 def _urlunquote(s):
1479 def _urlunquote(s):
1481 """unquote('abc%20def') -> 'abc def'."""
1480 """unquote('abc%20def') -> 'abc def'."""
1482 res = s.split('%')
1481 res = s.split('%')
1483 # fastpath
1482 # fastpath
1484 if len(res) == 1:
1483 if len(res) == 1:
1485 return s
1484 return s
1486 s = res[0]
1485 s = res[0]
1487 for item in res[1:]:
1486 for item in res[1:]:
1488 try:
1487 try:
1489 s += _hextochr[item[:2]] + item[2:]
1488 s += _hextochr[item[:2]] + item[2:]
1490 except KeyError:
1489 except KeyError:
1491 s += '%' + item
1490 s += '%' + item
1492 except UnicodeDecodeError:
1491 except UnicodeDecodeError:
1493 s += unichr(int(item[:2], 16)) + item[2:]
1492 s += unichr(int(item[:2], 16)) + item[2:]
1494 return s
1493 return s
1495
1494
1496 class url(object):
1495 class url(object):
1497 r"""Reliable URL parser.
1496 r"""Reliable URL parser.
1498
1497
1499 This parses URLs and provides attributes for the following
1498 This parses URLs and provides attributes for the following
1500 components:
1499 components:
1501
1500
1502 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1501 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1503
1502
1504 Missing components are set to None. The only exception is
1503 Missing components are set to None. The only exception is
1505 fragment, which is set to '' if present but empty.
1504 fragment, which is set to '' if present but empty.
1506
1505
1507 If parsefragment is False, fragment is included in query. If
1506 If parsefragment is False, fragment is included in query. If
1508 parsequery is False, query is included in path. If both are
1507 parsequery is False, query is included in path. If both are
1509 False, both fragment and query are included in path.
1508 False, both fragment and query are included in path.
1510
1509
1511 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1510 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1512
1511
1513 Note that for backward compatibility reasons, bundle URLs do not
1512 Note that for backward compatibility reasons, bundle URLs do not
1514 take host names. That means 'bundle://../' has a path of '../'.
1513 take host names. That means 'bundle://../' has a path of '../'.
1515
1514
1516 Examples:
1515 Examples:
1517
1516
1518 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1517 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1519 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1518 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1520 >>> url('ssh://[::1]:2200//home/joe/repo')
1519 >>> url('ssh://[::1]:2200//home/joe/repo')
1521 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1520 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1522 >>> url('file:///home/joe/repo')
1521 >>> url('file:///home/joe/repo')
1523 <url scheme: 'file', path: '/home/joe/repo'>
1522 <url scheme: 'file', path: '/home/joe/repo'>
1524 >>> url('file:///c:/temp/foo/')
1523 >>> url('file:///c:/temp/foo/')
1525 <url scheme: 'file', path: 'c:/temp/foo/'>
1524 <url scheme: 'file', path: 'c:/temp/foo/'>
1526 >>> url('bundle:foo')
1525 >>> url('bundle:foo')
1527 <url scheme: 'bundle', path: 'foo'>
1526 <url scheme: 'bundle', path: 'foo'>
1528 >>> url('bundle://../foo')
1527 >>> url('bundle://../foo')
1529 <url scheme: 'bundle', path: '../foo'>
1528 <url scheme: 'bundle', path: '../foo'>
1530 >>> url(r'c:\foo\bar')
1529 >>> url(r'c:\foo\bar')
1531 <url path: 'c:\\foo\\bar'>
1530 <url path: 'c:\\foo\\bar'>
1532 >>> url(r'\\blah\blah\blah')
1531 >>> url(r'\\blah\blah\blah')
1533 <url path: '\\\\blah\\blah\\blah'>
1532 <url path: '\\\\blah\\blah\\blah'>
1534 >>> url(r'\\blah\blah\blah#baz')
1533 >>> url(r'\\blah\blah\blah#baz')
1535 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1534 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1536
1535
1537 Authentication credentials:
1536 Authentication credentials:
1538
1537
1539 >>> url('ssh://joe:xyz@x/repo')
1538 >>> url('ssh://joe:xyz@x/repo')
1540 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1539 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1541 >>> url('ssh://joe@x/repo')
1540 >>> url('ssh://joe@x/repo')
1542 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1541 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1543
1542
1544 Query strings and fragments:
1543 Query strings and fragments:
1545
1544
1546 >>> url('http://host/a?b#c')
1545 >>> url('http://host/a?b#c')
1547 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1546 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1548 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1547 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1549 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1548 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1550 """
1549 """
1551
1550
1552 _safechars = "!~*'()+"
1551 _safechars = "!~*'()+"
1553 _safepchars = "/!~*'()+:"
1552 _safepchars = "/!~*'()+:"
1554 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1553 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1555
1554
1556 def __init__(self, path, parsequery=True, parsefragment=True):
1555 def __init__(self, path, parsequery=True, parsefragment=True):
1557 # We slowly chomp away at path until we have only the path left
1556 # We slowly chomp away at path until we have only the path left
1558 self.scheme = self.user = self.passwd = self.host = None
1557 self.scheme = self.user = self.passwd = self.host = None
1559 self.port = self.path = self.query = self.fragment = None
1558 self.port = self.path = self.query = self.fragment = None
1560 self._localpath = True
1559 self._localpath = True
1561 self._hostport = ''
1560 self._hostport = ''
1562 self._origpath = path
1561 self._origpath = path
1563
1562
1564 if parsefragment and '#' in path:
1563 if parsefragment and '#' in path:
1565 path, self.fragment = path.split('#', 1)
1564 path, self.fragment = path.split('#', 1)
1566 if not path:
1565 if not path:
1567 path = None
1566 path = None
1568
1567
1569 # special case for Windows drive letters and UNC paths
1568 # special case for Windows drive letters and UNC paths
1570 if hasdriveletter(path) or path.startswith(r'\\'):
1569 if hasdriveletter(path) or path.startswith(r'\\'):
1571 self.path = path
1570 self.path = path
1572 return
1571 return
1573
1572
1574 # For compatibility reasons, we can't handle bundle paths as
1573 # For compatibility reasons, we can't handle bundle paths as
1575 # normal URLS
1574 # normal URLS
1576 if path.startswith('bundle:'):
1575 if path.startswith('bundle:'):
1577 self.scheme = 'bundle'
1576 self.scheme = 'bundle'
1578 path = path[7:]
1577 path = path[7:]
1579 if path.startswith('//'):
1578 if path.startswith('//'):
1580 path = path[2:]
1579 path = path[2:]
1581 self.path = path
1580 self.path = path
1582 return
1581 return
1583
1582
1584 if self._matchscheme(path):
1583 if self._matchscheme(path):
1585 parts = path.split(':', 1)
1584 parts = path.split(':', 1)
1586 if parts[0]:
1585 if parts[0]:
1587 self.scheme, path = parts
1586 self.scheme, path = parts
1588 self._localpath = False
1587 self._localpath = False
1589
1588
1590 if not path:
1589 if not path:
1591 path = None
1590 path = None
1592 if self._localpath:
1591 if self._localpath:
1593 self.path = ''
1592 self.path = ''
1594 return
1593 return
1595 else:
1594 else:
1596 if self._localpath:
1595 if self._localpath:
1597 self.path = path
1596 self.path = path
1598 return
1597 return
1599
1598
1600 if parsequery and '?' in path:
1599 if parsequery and '?' in path:
1601 path, self.query = path.split('?', 1)
1600 path, self.query = path.split('?', 1)
1602 if not path:
1601 if not path:
1603 path = None
1602 path = None
1604 if not self.query:
1603 if not self.query:
1605 self.query = None
1604 self.query = None
1606
1605
1607 # // is required to specify a host/authority
1606 # // is required to specify a host/authority
1608 if path and path.startswith('//'):
1607 if path and path.startswith('//'):
1609 parts = path[2:].split('/', 1)
1608 parts = path[2:].split('/', 1)
1610 if len(parts) > 1:
1609 if len(parts) > 1:
1611 self.host, path = parts
1610 self.host, path = parts
1612 path = path
1611 path = path
1613 else:
1612 else:
1614 self.host = parts[0]
1613 self.host = parts[0]
1615 path = None
1614 path = None
1616 if not self.host:
1615 if not self.host:
1617 self.host = None
1616 self.host = None
1618 # path of file:///d is /d
1617 # path of file:///d is /d
1619 # path of file:///d:/ is d:/, not /d:/
1618 # path of file:///d:/ is d:/, not /d:/
1620 if path and not hasdriveletter(path):
1619 if path and not hasdriveletter(path):
1621 path = '/' + path
1620 path = '/' + path
1622
1621
1623 if self.host and '@' in self.host:
1622 if self.host and '@' in self.host:
1624 self.user, self.host = self.host.rsplit('@', 1)
1623 self.user, self.host = self.host.rsplit('@', 1)
1625 if ':' in self.user:
1624 if ':' in self.user:
1626 self.user, self.passwd = self.user.split(':', 1)
1625 self.user, self.passwd = self.user.split(':', 1)
1627 if not self.host:
1626 if not self.host:
1628 self.host = None
1627 self.host = None
1629
1628
1630 # Don't split on colons in IPv6 addresses without ports
1629 # Don't split on colons in IPv6 addresses without ports
1631 if (self.host and ':' in self.host and
1630 if (self.host and ':' in self.host and
1632 not (self.host.startswith('[') and self.host.endswith(']'))):
1631 not (self.host.startswith('[') and self.host.endswith(']'))):
1633 self._hostport = self.host
1632 self._hostport = self.host
1634 self.host, self.port = self.host.rsplit(':', 1)
1633 self.host, self.port = self.host.rsplit(':', 1)
1635 if not self.host:
1634 if not self.host:
1636 self.host = None
1635 self.host = None
1637
1636
1638 if (self.host and self.scheme == 'file' and
1637 if (self.host and self.scheme == 'file' and
1639 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1638 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1640 raise Abort(_('file:// URLs can only refer to localhost'))
1639 raise Abort(_('file:// URLs can only refer to localhost'))
1641
1640
1642 self.path = path
1641 self.path = path
1643
1642
1644 # leave the query string escaped
1643 # leave the query string escaped
1645 for a in ('user', 'passwd', 'host', 'port',
1644 for a in ('user', 'passwd', 'host', 'port',
1646 'path', 'fragment'):
1645 'path', 'fragment'):
1647 v = getattr(self, a)
1646 v = getattr(self, a)
1648 if v is not None:
1647 if v is not None:
1649 setattr(self, a, _urlunquote(v))
1648 setattr(self, a, _urlunquote(v))
1650
1649
1651 def __repr__(self):
1650 def __repr__(self):
1652 attrs = []
1651 attrs = []
1653 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1652 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1654 'query', 'fragment'):
1653 'query', 'fragment'):
1655 v = getattr(self, a)
1654 v = getattr(self, a)
1656 if v is not None:
1655 if v is not None:
1657 attrs.append('%s: %r' % (a, v))
1656 attrs.append('%s: %r' % (a, v))
1658 return '<url %s>' % ', '.join(attrs)
1657 return '<url %s>' % ', '.join(attrs)
1659
1658
1660 def __str__(self):
1659 def __str__(self):
1661 r"""Join the URL's components back into a URL string.
1660 r"""Join the URL's components back into a URL string.
1662
1661
1663 Examples:
1662 Examples:
1664
1663
1665 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1664 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1666 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1665 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1667 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1666 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1668 'http://user:pw@host:80/?foo=bar&baz=42'
1667 'http://user:pw@host:80/?foo=bar&baz=42'
1669 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1668 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1670 'http://user:pw@host:80/?foo=bar%3dbaz'
1669 'http://user:pw@host:80/?foo=bar%3dbaz'
1671 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1670 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1672 'ssh://user:pw@[::1]:2200//home/joe#'
1671 'ssh://user:pw@[::1]:2200//home/joe#'
1673 >>> str(url('http://localhost:80//'))
1672 >>> str(url('http://localhost:80//'))
1674 'http://localhost:80//'
1673 'http://localhost:80//'
1675 >>> str(url('http://localhost:80/'))
1674 >>> str(url('http://localhost:80/'))
1676 'http://localhost:80/'
1675 'http://localhost:80/'
1677 >>> str(url('http://localhost:80'))
1676 >>> str(url('http://localhost:80'))
1678 'http://localhost:80/'
1677 'http://localhost:80/'
1679 >>> str(url('bundle:foo'))
1678 >>> str(url('bundle:foo'))
1680 'bundle:foo'
1679 'bundle:foo'
1681 >>> str(url('bundle://../foo'))
1680 >>> str(url('bundle://../foo'))
1682 'bundle:../foo'
1681 'bundle:../foo'
1683 >>> str(url('path'))
1682 >>> str(url('path'))
1684 'path'
1683 'path'
1685 >>> str(url('file:///tmp/foo/bar'))
1684 >>> str(url('file:///tmp/foo/bar'))
1686 'file:///tmp/foo/bar'
1685 'file:///tmp/foo/bar'
1687 >>> str(url('file:///c:/tmp/foo/bar'))
1686 >>> str(url('file:///c:/tmp/foo/bar'))
1688 'file:///c:/tmp/foo/bar'
1687 'file:///c:/tmp/foo/bar'
1689 >>> print url(r'bundle:foo\bar')
1688 >>> print url(r'bundle:foo\bar')
1690 bundle:foo\bar
1689 bundle:foo\bar
1691 """
1690 """
1692 if self._localpath:
1691 if self._localpath:
1693 s = self.path
1692 s = self.path
1694 if self.scheme == 'bundle':
1693 if self.scheme == 'bundle':
1695 s = 'bundle:' + s
1694 s = 'bundle:' + s
1696 if self.fragment:
1695 if self.fragment:
1697 s += '#' + self.fragment
1696 s += '#' + self.fragment
1698 return s
1697 return s
1699
1698
1700 s = self.scheme + ':'
1699 s = self.scheme + ':'
1701 if self.user or self.passwd or self.host:
1700 if self.user or self.passwd or self.host:
1702 s += '//'
1701 s += '//'
1703 elif self.scheme and (not self.path or self.path.startswith('/')
1702 elif self.scheme and (not self.path or self.path.startswith('/')
1704 or hasdriveletter(self.path)):
1703 or hasdriveletter(self.path)):
1705 s += '//'
1704 s += '//'
1706 if hasdriveletter(self.path):
1705 if hasdriveletter(self.path):
1707 s += '/'
1706 s += '/'
1708 if self.user:
1707 if self.user:
1709 s += urllib.quote(self.user, safe=self._safechars)
1708 s += urllib.quote(self.user, safe=self._safechars)
1710 if self.passwd:
1709 if self.passwd:
1711 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1710 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1712 if self.user or self.passwd:
1711 if self.user or self.passwd:
1713 s += '@'
1712 s += '@'
1714 if self.host:
1713 if self.host:
1715 if not (self.host.startswith('[') and self.host.endswith(']')):
1714 if not (self.host.startswith('[') and self.host.endswith(']')):
1716 s += urllib.quote(self.host)
1715 s += urllib.quote(self.host)
1717 else:
1716 else:
1718 s += self.host
1717 s += self.host
1719 if self.port:
1718 if self.port:
1720 s += ':' + urllib.quote(self.port)
1719 s += ':' + urllib.quote(self.port)
1721 if self.host:
1720 if self.host:
1722 s += '/'
1721 s += '/'
1723 if self.path:
1722 if self.path:
1724 # TODO: similar to the query string, we should not unescape the
1723 # TODO: similar to the query string, we should not unescape the
1725 # path when we store it, the path might contain '%2f' = '/',
1724 # path when we store it, the path might contain '%2f' = '/',
1726 # which we should *not* escape.
1725 # which we should *not* escape.
1727 s += urllib.quote(self.path, safe=self._safepchars)
1726 s += urllib.quote(self.path, safe=self._safepchars)
1728 if self.query:
1727 if self.query:
1729 # we store the query in escaped form.
1728 # we store the query in escaped form.
1730 s += '?' + self.query
1729 s += '?' + self.query
1731 if self.fragment is not None:
1730 if self.fragment is not None:
1732 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1731 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1733 return s
1732 return s
1734
1733
1735 def authinfo(self):
1734 def authinfo(self):
1736 user, passwd = self.user, self.passwd
1735 user, passwd = self.user, self.passwd
1737 try:
1736 try:
1738 self.user, self.passwd = None, None
1737 self.user, self.passwd = None, None
1739 s = str(self)
1738 s = str(self)
1740 finally:
1739 finally:
1741 self.user, self.passwd = user, passwd
1740 self.user, self.passwd = user, passwd
1742 if not self.user:
1741 if not self.user:
1743 return (s, None)
1742 return (s, None)
1744 # authinfo[1] is passed to urllib2 password manager, and its
1743 # authinfo[1] is passed to urllib2 password manager, and its
1745 # URIs must not contain credentials. The host is passed in the
1744 # URIs must not contain credentials. The host is passed in the
1746 # URIs list because Python < 2.4.3 uses only that to search for
1745 # URIs list because Python < 2.4.3 uses only that to search for
1747 # a password.
1746 # a password.
1748 return (s, (None, (s, self.host),
1747 return (s, (None, (s, self.host),
1749 self.user, self.passwd or ''))
1748 self.user, self.passwd or ''))
1750
1749
1751 def isabs(self):
1750 def isabs(self):
1752 if self.scheme and self.scheme != 'file':
1751 if self.scheme and self.scheme != 'file':
1753 return True # remote URL
1752 return True # remote URL
1754 if hasdriveletter(self.path):
1753 if hasdriveletter(self.path):
1755 return True # absolute for our purposes - can't be joined()
1754 return True # absolute for our purposes - can't be joined()
1756 if self.path.startswith(r'\\'):
1755 if self.path.startswith(r'\\'):
1757 return True # Windows UNC path
1756 return True # Windows UNC path
1758 if self.path.startswith('/'):
1757 if self.path.startswith('/'):
1759 return True # POSIX-style
1758 return True # POSIX-style
1760 return False
1759 return False
1761
1760
1762 def localpath(self):
1761 def localpath(self):
1763 if self.scheme == 'file' or self.scheme == 'bundle':
1762 if self.scheme == 'file' or self.scheme == 'bundle':
1764 path = self.path or '/'
1763 path = self.path or '/'
1765 # For Windows, we need to promote hosts containing drive
1764 # For Windows, we need to promote hosts containing drive
1766 # letters to paths with drive letters.
1765 # letters to paths with drive letters.
1767 if hasdriveletter(self._hostport):
1766 if hasdriveletter(self._hostport):
1768 path = self._hostport + '/' + self.path
1767 path = self._hostport + '/' + self.path
1769 elif (self.host is not None and self.path
1768 elif (self.host is not None and self.path
1770 and not hasdriveletter(path)):
1769 and not hasdriveletter(path)):
1771 path = '/' + path
1770 path = '/' + path
1772 return path
1771 return path
1773 return self._origpath
1772 return self._origpath
1774
1773
1775 def hasscheme(path):
1774 def hasscheme(path):
1776 return bool(url(path).scheme)
1775 return bool(url(path).scheme)
1777
1776
1778 def hasdriveletter(path):
1777 def hasdriveletter(path):
1779 return path and path[1:2] == ':' and path[0:1].isalpha()
1778 return path and path[1:2] == ':' and path[0:1].isalpha()
1780
1779
1781 def urllocalpath(path):
1780 def urllocalpath(path):
1782 return url(path, parsequery=False, parsefragment=False).localpath()
1781 return url(path, parsequery=False, parsefragment=False).localpath()
1783
1782
1784 def hidepassword(u):
1783 def hidepassword(u):
1785 '''hide user credential in a url string'''
1784 '''hide user credential in a url string'''
1786 u = url(u)
1785 u = url(u)
1787 if u.passwd:
1786 if u.passwd:
1788 u.passwd = '***'
1787 u.passwd = '***'
1789 return str(u)
1788 return str(u)
1790
1789
1791 def removeauth(u):
1790 def removeauth(u):
1792 '''remove all authentication information from a url string'''
1791 '''remove all authentication information from a url string'''
1793 u = url(u)
1792 u = url(u)
1794 u.user = u.passwd = None
1793 u.user = u.passwd = None
1795 return str(u)
1794 return str(u)
1796
1795
1797 def isatty(fd):
1796 def isatty(fd):
1798 try:
1797 try:
1799 return fd.isatty()
1798 return fd.isatty()
1800 except AttributeError:
1799 except AttributeError:
1801 return False
1800 return False
@@ -1,329 +1,328
1 # windows.py - Windows utility function implementations for Mercurial
1 # windows.py - Windows utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, encoding
9 import osutil, encoding
10 import errno, msvcrt, os, re, sys, _winreg
10 import errno, msvcrt, os, re, sys, _winreg
11
11
12 import win32
12 import win32
13 executablepath = win32.executablepath
13 executablepath = win32.executablepath
14 getuser = win32.getuser
14 getuser = win32.getuser
15 hidewindow = win32.hidewindow
15 hidewindow = win32.hidewindow
16 makedir = win32.makedir
16 makedir = win32.makedir
17 nlinks = win32.nlinks
17 nlinks = win32.nlinks
18 oslink = win32.oslink
18 oslink = win32.oslink
19 samedevice = win32.samedevice
19 samedevice = win32.samedevice
20 samefile = win32.samefile
20 samefile = win32.samefile
21 setsignalhandler = win32.setsignalhandler
21 setsignalhandler = win32.setsignalhandler
22 spawndetached = win32.spawndetached
22 spawndetached = win32.spawndetached
23 termwidth = win32.termwidth
23 termwidth = win32.termwidth
24 testpid = win32.testpid
24 testpid = win32.testpid
25 unlink = win32.unlink
25 unlink = win32.unlink
26
26
27 nulldev = 'NUL:'
28 umask = 0022
27 umask = 0022
29
28
30 # wrap osutil.posixfile to provide friendlier exceptions
29 # wrap osutil.posixfile to provide friendlier exceptions
31 def posixfile(name, mode='r', buffering=-1):
30 def posixfile(name, mode='r', buffering=-1):
32 try:
31 try:
33 return osutil.posixfile(name, mode, buffering)
32 return osutil.posixfile(name, mode, buffering)
34 except WindowsError, err:
33 except WindowsError, err:
35 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
34 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
36 posixfile.__doc__ = osutil.posixfile.__doc__
35 posixfile.__doc__ = osutil.posixfile.__doc__
37
36
38 class winstdout(object):
37 class winstdout(object):
39 '''stdout on windows misbehaves if sent through a pipe'''
38 '''stdout on windows misbehaves if sent through a pipe'''
40
39
41 def __init__(self, fp):
40 def __init__(self, fp):
42 self.fp = fp
41 self.fp = fp
43
42
44 def __getattr__(self, key):
43 def __getattr__(self, key):
45 return getattr(self.fp, key)
44 return getattr(self.fp, key)
46
45
47 def close(self):
46 def close(self):
48 try:
47 try:
49 self.fp.close()
48 self.fp.close()
50 except IOError:
49 except IOError:
51 pass
50 pass
52
51
53 def write(self, s):
52 def write(self, s):
54 try:
53 try:
55 # This is workaround for "Not enough space" error on
54 # This is workaround for "Not enough space" error on
56 # writing large size of data to console.
55 # writing large size of data to console.
57 limit = 16000
56 limit = 16000
58 l = len(s)
57 l = len(s)
59 start = 0
58 start = 0
60 self.softspace = 0
59 self.softspace = 0
61 while start < l:
60 while start < l:
62 end = start + limit
61 end = start + limit
63 self.fp.write(s[start:end])
62 self.fp.write(s[start:end])
64 start = end
63 start = end
65 except IOError, inst:
64 except IOError, inst:
66 if inst.errno != 0:
65 if inst.errno != 0:
67 raise
66 raise
68 self.close()
67 self.close()
69 raise IOError(errno.EPIPE, 'Broken pipe')
68 raise IOError(errno.EPIPE, 'Broken pipe')
70
69
71 def flush(self):
70 def flush(self):
72 try:
71 try:
73 return self.fp.flush()
72 return self.fp.flush()
74 except IOError, inst:
73 except IOError, inst:
75 if inst.errno != errno.EINVAL:
74 if inst.errno != errno.EINVAL:
76 raise
75 raise
77 self.close()
76 self.close()
78 raise IOError(errno.EPIPE, 'Broken pipe')
77 raise IOError(errno.EPIPE, 'Broken pipe')
79
78
80 sys.__stdout__ = sys.stdout = winstdout(sys.stdout)
79 sys.__stdout__ = sys.stdout = winstdout(sys.stdout)
81
80
82 def _is_win_9x():
81 def _is_win_9x():
83 '''return true if run on windows 95, 98 or me.'''
82 '''return true if run on windows 95, 98 or me.'''
84 try:
83 try:
85 return sys.getwindowsversion()[3] == 1
84 return sys.getwindowsversion()[3] == 1
86 except AttributeError:
85 except AttributeError:
87 return 'command' in os.environ.get('comspec', '')
86 return 'command' in os.environ.get('comspec', '')
88
87
89 def openhardlinks():
88 def openhardlinks():
90 return not _is_win_9x()
89 return not _is_win_9x()
91
90
92 def parsepatchoutput(output_line):
91 def parsepatchoutput(output_line):
93 """parses the output produced by patch and returns the filename"""
92 """parses the output produced by patch and returns the filename"""
94 pf = output_line[14:]
93 pf = output_line[14:]
95 if pf[0] == '`':
94 if pf[0] == '`':
96 pf = pf[1:-1] # Remove the quotes
95 pf = pf[1:-1] # Remove the quotes
97 return pf
96 return pf
98
97
99 def sshargs(sshcmd, host, user, port):
98 def sshargs(sshcmd, host, user, port):
100 '''Build argument list for ssh or Plink'''
99 '''Build argument list for ssh or Plink'''
101 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
100 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
102 args = user and ("%s@%s" % (user, host)) or host
101 args = user and ("%s@%s" % (user, host)) or host
103 return port and ("%s %s %s" % (args, pflag, port)) or args
102 return port and ("%s %s %s" % (args, pflag, port)) or args
104
103
105 def setflags(f, l, x):
104 def setflags(f, l, x):
106 pass
105 pass
107
106
108 def copymode(src, dst, mode=None):
107 def copymode(src, dst, mode=None):
109 pass
108 pass
110
109
111 def checkexec(path):
110 def checkexec(path):
112 return False
111 return False
113
112
114 def checklink(path):
113 def checklink(path):
115 return False
114 return False
116
115
117 def setbinary(fd):
116 def setbinary(fd):
118 # When run without console, pipes may expose invalid
117 # When run without console, pipes may expose invalid
119 # fileno(), usually set to -1.
118 # fileno(), usually set to -1.
120 fno = getattr(fd, 'fileno', None)
119 fno = getattr(fd, 'fileno', None)
121 if fno is not None and fno() >= 0:
120 if fno is not None and fno() >= 0:
122 msvcrt.setmode(fno(), os.O_BINARY)
121 msvcrt.setmode(fno(), os.O_BINARY)
123
122
124 def pconvert(path):
123 def pconvert(path):
125 return path.replace(os.sep, '/')
124 return path.replace(os.sep, '/')
126
125
127 def localpath(path):
126 def localpath(path):
128 return path.replace('/', '\\')
127 return path.replace('/', '\\')
129
128
130 def normpath(path):
129 def normpath(path):
131 return pconvert(os.path.normpath(path))
130 return pconvert(os.path.normpath(path))
132
131
133 def normcase(path):
132 def normcase(path):
134 return encoding.upper(path)
133 return encoding.upper(path)
135
134
136 def realpath(path):
135 def realpath(path):
137 '''
136 '''
138 Returns the true, canonical file system path equivalent to the given
137 Returns the true, canonical file system path equivalent to the given
139 path.
138 path.
140 '''
139 '''
141 # TODO: There may be a more clever way to do this that also handles other,
140 # TODO: There may be a more clever way to do this that also handles other,
142 # less common file systems.
141 # less common file systems.
143 return os.path.normpath(normcase(os.path.realpath(path)))
142 return os.path.normpath(normcase(os.path.realpath(path)))
144
143
145 def samestat(s1, s2):
144 def samestat(s1, s2):
146 return False
145 return False
147
146
148 # A sequence of backslashes is special iff it precedes a double quote:
147 # A sequence of backslashes is special iff it precedes a double quote:
149 # - if there's an even number of backslashes, the double quote is not
148 # - if there's an even number of backslashes, the double quote is not
150 # quoted (i.e. it ends the quoted region)
149 # quoted (i.e. it ends the quoted region)
151 # - if there's an odd number of backslashes, the double quote is quoted
150 # - if there's an odd number of backslashes, the double quote is quoted
152 # - in both cases, every pair of backslashes is unquoted into a single
151 # - in both cases, every pair of backslashes is unquoted into a single
153 # backslash
152 # backslash
154 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
153 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
155 # So, to quote a string, we must surround it in double quotes, double
154 # So, to quote a string, we must surround it in double quotes, double
156 # the number of backslashes that preceed double quotes and add another
155 # the number of backslashes that preceed double quotes and add another
157 # backslash before every double quote (being careful with the double
156 # backslash before every double quote (being careful with the double
158 # quote we've appended to the end)
157 # quote we've appended to the end)
159 _quotere = None
158 _quotere = None
160 def shellquote(s):
159 def shellquote(s):
161 global _quotere
160 global _quotere
162 if _quotere is None:
161 if _quotere is None:
163 _quotere = re.compile(r'(\\*)("|\\$)')
162 _quotere = re.compile(r'(\\*)("|\\$)')
164 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
163 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
165
164
166 def quotecommand(cmd):
165 def quotecommand(cmd):
167 """Build a command string suitable for os.popen* calls."""
166 """Build a command string suitable for os.popen* calls."""
168 if sys.version_info < (2, 7, 1):
167 if sys.version_info < (2, 7, 1):
169 # Python versions since 2.7.1 do this extra quoting themselves
168 # Python versions since 2.7.1 do this extra quoting themselves
170 return '"' + cmd + '"'
169 return '"' + cmd + '"'
171 return cmd
170 return cmd
172
171
173 def popen(command, mode='r'):
172 def popen(command, mode='r'):
174 # Work around "popen spawned process may not write to stdout
173 # Work around "popen spawned process may not write to stdout
175 # under windows"
174 # under windows"
176 # http://bugs.python.org/issue1366
175 # http://bugs.python.org/issue1366
177 command += " 2> %s" % nulldev
176 command += " 2> %s" % os.devnull
178 return os.popen(quotecommand(command), mode)
177 return os.popen(quotecommand(command), mode)
179
178
180 def explainexit(code):
179 def explainexit(code):
181 return _("exited with status %d") % code, code
180 return _("exited with status %d") % code, code
182
181
183 # if you change this stub into a real check, please try to implement the
182 # if you change this stub into a real check, please try to implement the
184 # username and groupname functions above, too.
183 # username and groupname functions above, too.
185 def isowner(st):
184 def isowner(st):
186 return True
185 return True
187
186
188 def findexe(command):
187 def findexe(command):
189 '''Find executable for command searching like cmd.exe does.
188 '''Find executable for command searching like cmd.exe does.
190 If command is a basename then PATH is searched for command.
189 If command is a basename then PATH is searched for command.
191 PATH isn't searched if command is an absolute or relative path.
190 PATH isn't searched if command is an absolute or relative path.
192 An extension from PATHEXT is found and added if not present.
191 An extension from PATHEXT is found and added if not present.
193 If command isn't found None is returned.'''
192 If command isn't found None is returned.'''
194 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
193 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
195 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
194 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
196 if os.path.splitext(command)[1].lower() in pathexts:
195 if os.path.splitext(command)[1].lower() in pathexts:
197 pathexts = ['']
196 pathexts = ['']
198
197
199 def findexisting(pathcommand):
198 def findexisting(pathcommand):
200 'Will append extension (if needed) and return existing file'
199 'Will append extension (if needed) and return existing file'
201 for ext in pathexts:
200 for ext in pathexts:
202 executable = pathcommand + ext
201 executable = pathcommand + ext
203 if os.path.exists(executable):
202 if os.path.exists(executable):
204 return executable
203 return executable
205 return None
204 return None
206
205
207 if os.sep in command:
206 if os.sep in command:
208 return findexisting(command)
207 return findexisting(command)
209
208
210 for path in os.environ.get('PATH', '').split(os.pathsep):
209 for path in os.environ.get('PATH', '').split(os.pathsep):
211 executable = findexisting(os.path.join(path, command))
210 executable = findexisting(os.path.join(path, command))
212 if executable is not None:
211 if executable is not None:
213 return executable
212 return executable
214 return findexisting(os.path.expanduser(os.path.expandvars(command)))
213 return findexisting(os.path.expanduser(os.path.expandvars(command)))
215
214
216 def statfiles(files):
215 def statfiles(files):
217 '''Stat each file in files and yield stat or None if file does not exist.
216 '''Stat each file in files and yield stat or None if file does not exist.
218 Cluster and cache stat per directory to minimize number of OS stat calls.'''
217 Cluster and cache stat per directory to minimize number of OS stat calls.'''
219 dircache = {} # dirname -> filename -> status | None if file does not exist
218 dircache = {} # dirname -> filename -> status | None if file does not exist
220 for nf in files:
219 for nf in files:
221 nf = normcase(nf)
220 nf = normcase(nf)
222 dir, base = os.path.split(nf)
221 dir, base = os.path.split(nf)
223 if not dir:
222 if not dir:
224 dir = '.'
223 dir = '.'
225 cache = dircache.get(dir, None)
224 cache = dircache.get(dir, None)
226 if cache is None:
225 if cache is None:
227 try:
226 try:
228 dmap = dict([(normcase(n), s)
227 dmap = dict([(normcase(n), s)
229 for n, k, s in osutil.listdir(dir, True)])
228 for n, k, s in osutil.listdir(dir, True)])
230 except OSError, err:
229 except OSError, err:
231 # handle directory not found in Python version prior to 2.5
230 # handle directory not found in Python version prior to 2.5
232 # Python <= 2.4 returns native Windows code 3 in errno
231 # Python <= 2.4 returns native Windows code 3 in errno
233 # Python >= 2.5 returns ENOENT and adds winerror field
232 # Python >= 2.5 returns ENOENT and adds winerror field
234 # EINVAL is raised if dir is not a directory.
233 # EINVAL is raised if dir is not a directory.
235 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
234 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
236 errno.ENOTDIR):
235 errno.ENOTDIR):
237 raise
236 raise
238 dmap = {}
237 dmap = {}
239 cache = dircache.setdefault(dir, dmap)
238 cache = dircache.setdefault(dir, dmap)
240 yield cache.get(base, None)
239 yield cache.get(base, None)
241
240
242 def username(uid=None):
241 def username(uid=None):
243 """Return the name of the user with the given uid.
242 """Return the name of the user with the given uid.
244
243
245 If uid is None, return the name of the current user."""
244 If uid is None, return the name of the current user."""
246 return None
245 return None
247
246
248 def groupname(gid=None):
247 def groupname(gid=None):
249 """Return the name of the group with the given gid.
248 """Return the name of the group with the given gid.
250
249
251 If gid is None, return the name of the current group."""
250 If gid is None, return the name of the current group."""
252 return None
251 return None
253
252
254 def _removedirs(name):
253 def _removedirs(name):
255 """special version of os.removedirs that does not remove symlinked
254 """special version of os.removedirs that does not remove symlinked
256 directories or junction points if they actually contain files"""
255 directories or junction points if they actually contain files"""
257 if osutil.listdir(name):
256 if osutil.listdir(name):
258 return
257 return
259 os.rmdir(name)
258 os.rmdir(name)
260 head, tail = os.path.split(name)
259 head, tail = os.path.split(name)
261 if not tail:
260 if not tail:
262 head, tail = os.path.split(head)
261 head, tail = os.path.split(head)
263 while head and tail:
262 while head and tail:
264 try:
263 try:
265 if osutil.listdir(head):
264 if osutil.listdir(head):
266 return
265 return
267 os.rmdir(head)
266 os.rmdir(head)
268 except (ValueError, OSError):
267 except (ValueError, OSError):
269 break
268 break
270 head, tail = os.path.split(head)
269 head, tail = os.path.split(head)
271
270
272 def unlinkpath(f):
271 def unlinkpath(f):
273 """unlink and remove the directory if it is empty"""
272 """unlink and remove the directory if it is empty"""
274 unlink(f)
273 unlink(f)
275 # try removing directories that might now be empty
274 # try removing directories that might now be empty
276 try:
275 try:
277 _removedirs(os.path.dirname(f))
276 _removedirs(os.path.dirname(f))
278 except OSError:
277 except OSError:
279 pass
278 pass
280
279
281 def rename(src, dst):
280 def rename(src, dst):
282 '''atomically rename file src to dst, replacing dst if it exists'''
281 '''atomically rename file src to dst, replacing dst if it exists'''
283 try:
282 try:
284 os.rename(src, dst)
283 os.rename(src, dst)
285 except OSError, e:
284 except OSError, e:
286 if e.errno != errno.EEXIST:
285 if e.errno != errno.EEXIST:
287 raise
286 raise
288 unlink(dst)
287 unlink(dst)
289 os.rename(src, dst)
288 os.rename(src, dst)
290
289
291 def gethgcmd():
290 def gethgcmd():
292 return [sys.executable] + sys.argv[:1]
291 return [sys.executable] + sys.argv[:1]
293
292
294 def groupmembers(name):
293 def groupmembers(name):
295 # Don't support groups on Windows for now
294 # Don't support groups on Windows for now
296 raise KeyError
295 raise KeyError
297
296
298 def isexec(f):
297 def isexec(f):
299 return False
298 return False
300
299
301 class cachestat(object):
300 class cachestat(object):
302 def __init__(self, path):
301 def __init__(self, path):
303 pass
302 pass
304
303
305 def cacheable(self):
304 def cacheable(self):
306 return False
305 return False
307
306
308 def lookupreg(key, valname=None, scope=None):
307 def lookupreg(key, valname=None, scope=None):
309 ''' Look up a key/value name in the Windows registry.
308 ''' Look up a key/value name in the Windows registry.
310
309
311 valname: value name. If unspecified, the default value for the key
310 valname: value name. If unspecified, the default value for the key
312 is used.
311 is used.
313 scope: optionally specify scope for registry lookup, this can be
312 scope: optionally specify scope for registry lookup, this can be
314 a sequence of scopes to look up in order. Default (CURRENT_USER,
313 a sequence of scopes to look up in order. Default (CURRENT_USER,
315 LOCAL_MACHINE).
314 LOCAL_MACHINE).
316 '''
315 '''
317 if scope is None:
316 if scope is None:
318 scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE)
317 scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE)
319 elif not isinstance(scope, (list, tuple)):
318 elif not isinstance(scope, (list, tuple)):
320 scope = (scope,)
319 scope = (scope,)
321 for s in scope:
320 for s in scope:
322 try:
321 try:
323 val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0]
322 val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0]
324 # never let a Unicode string escape into the wild
323 # never let a Unicode string escape into the wild
325 return encoding.tolocal(val.encode('UTF-8'))
324 return encoding.tolocal(val.encode('UTF-8'))
326 except EnvironmentError:
325 except EnvironmentError:
327 pass
326 pass
328
327
329 expandglobs = True
328 expandglobs = True
General Comments 0
You need to be logged in to leave comments. Login now