##// END OF EJS Templates
py3: remove a couple of superfluous calls to pycompat.rapply()...
Matt Harbison -
r39868:f1d60214 default
parent child Browse files
Show More
@@ -1,553 +1,553 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import base64
9 import base64
10 import datetime
10 import datetime
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import shlex
14 import shlex
15 import subprocess
15 import subprocess
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 encoding,
19 encoding,
20 error,
20 error,
21 phases,
21 phases,
22 pycompat,
22 pycompat,
23 util,
23 util,
24 )
24 )
25 from mercurial.utils import (
25 from mercurial.utils import (
26 procutil,
26 procutil,
27 )
27 )
28
28
29 pickle = util.pickle
29 pickle = util.pickle
30 propertycache = util.propertycache
30 propertycache = util.propertycache
31
31
32 def _encodeornone(d):
32 def _encodeornone(d):
33 if d is None:
33 if d is None:
34 return
34 return
35 return d.encode('latin1')
35 return d.encode('latin1')
36
36
37 class _shlexpy3proxy(object):
37 class _shlexpy3proxy(object):
38
38
39 def __init__(self, l):
39 def __init__(self, l):
40 self._l = l
40 self._l = l
41
41
42 def __iter__(self):
42 def __iter__(self):
43 return (_encodeornone(v) for v in self._l)
43 return (_encodeornone(v) for v in self._l)
44
44
45 def get_token(self):
45 def get_token(self):
46 return _encodeornone(self._l.get_token())
46 return _encodeornone(self._l.get_token())
47
47
48 @property
48 @property
49 def infile(self):
49 def infile(self):
50 return self._l.infile or '<unknown>'
50 return self._l.infile or '<unknown>'
51
51
52 @property
52 @property
53 def lineno(self):
53 def lineno(self):
54 return self._l.lineno
54 return self._l.lineno
55
55
56 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
56 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
57 if data is None:
57 if data is None:
58 if pycompat.ispy3:
58 if pycompat.ispy3:
59 data = open(filepath, 'r', encoding=r'latin1')
59 data = open(filepath, 'r', encoding=r'latin1')
60 else:
60 else:
61 data = open(filepath, 'r')
61 data = open(filepath, 'r')
62 else:
62 else:
63 if filepath is not None:
63 if filepath is not None:
64 raise error.ProgrammingError(
64 raise error.ProgrammingError(
65 'shlexer only accepts data or filepath, not both')
65 'shlexer only accepts data or filepath, not both')
66 if pycompat.ispy3:
66 if pycompat.ispy3:
67 data = data.decode('latin1')
67 data = data.decode('latin1')
68 l = shlex.shlex(data, infile=filepath, posix=True)
68 l = shlex.shlex(data, infile=filepath, posix=True)
69 if whitespace is not None:
69 if whitespace is not None:
70 l.whitespace_split = True
70 l.whitespace_split = True
71 if pycompat.ispy3:
71 if pycompat.ispy3:
72 l.whitespace += whitespace.decode('latin1')
72 l.whitespace += whitespace.decode('latin1')
73 else:
73 else:
74 l.whitespace += whitespace
74 l.whitespace += whitespace
75 if wordchars is not None:
75 if wordchars is not None:
76 if pycompat.ispy3:
76 if pycompat.ispy3:
77 l.wordchars += wordchars.decode('latin1')
77 l.wordchars += wordchars.decode('latin1')
78 else:
78 else:
79 l.wordchars += wordchars
79 l.wordchars += wordchars
80 if pycompat.ispy3:
80 if pycompat.ispy3:
81 return _shlexpy3proxy(l)
81 return _shlexpy3proxy(l)
82 return l
82 return l
83
83
84 def encodeargs(args):
84 def encodeargs(args):
85 def encodearg(s):
85 def encodearg(s):
86 lines = base64.encodestring(s)
86 lines = base64.encodestring(s)
87 lines = [l.splitlines()[0] for l in lines]
87 lines = [l.splitlines()[0] for l in lines]
88 return ''.join(lines)
88 return ''.join(lines)
89
89
90 s = pickle.dumps(args)
90 s = pickle.dumps(args)
91 return encodearg(s)
91 return encodearg(s)
92
92
93 def decodeargs(s):
93 def decodeargs(s):
94 s = base64.decodestring(s)
94 s = base64.decodestring(s)
95 return pickle.loads(s)
95 return pickle.loads(s)
96
96
97 class MissingTool(Exception):
97 class MissingTool(Exception):
98 pass
98 pass
99
99
100 def checktool(exe, name=None, abort=True):
100 def checktool(exe, name=None, abort=True):
101 name = name or exe
101 name = name or exe
102 if not procutil.findexe(exe):
102 if not procutil.findexe(exe):
103 if abort:
103 if abort:
104 exc = error.Abort
104 exc = error.Abort
105 else:
105 else:
106 exc = MissingTool
106 exc = MissingTool
107 raise exc(_('cannot find required "%s" tool') % name)
107 raise exc(_('cannot find required "%s" tool') % name)
108
108
109 class NoRepo(Exception):
109 class NoRepo(Exception):
110 pass
110 pass
111
111
112 SKIPREV = 'SKIP'
112 SKIPREV = 'SKIP'
113
113
114 class commit(object):
114 class commit(object):
115 def __init__(self, author, date, desc, parents, branch=None, rev=None,
115 def __init__(self, author, date, desc, parents, branch=None, rev=None,
116 extra=None, sortkey=None, saverev=True, phase=phases.draft,
116 extra=None, sortkey=None, saverev=True, phase=phases.draft,
117 optparents=None):
117 optparents=None):
118 self.author = author or 'unknown'
118 self.author = author or 'unknown'
119 self.date = date or '0 0'
119 self.date = date or '0 0'
120 self.desc = desc
120 self.desc = desc
121 self.parents = parents # will be converted and used as parents
121 self.parents = parents # will be converted and used as parents
122 self.optparents = optparents or [] # will be used if already converted
122 self.optparents = optparents or [] # will be used if already converted
123 self.branch = branch
123 self.branch = branch
124 self.rev = rev
124 self.rev = rev
125 self.extra = extra or {}
125 self.extra = extra or {}
126 self.sortkey = sortkey
126 self.sortkey = sortkey
127 self.saverev = saverev
127 self.saverev = saverev
128 self.phase = phase
128 self.phase = phase
129
129
130 class converter_source(object):
130 class converter_source(object):
131 """Conversion source interface"""
131 """Conversion source interface"""
132
132
133 def __init__(self, ui, repotype, path=None, revs=None):
133 def __init__(self, ui, repotype, path=None, revs=None):
134 """Initialize conversion source (or raise NoRepo("message")
134 """Initialize conversion source (or raise NoRepo("message")
135 exception if path is not a valid repository)"""
135 exception if path is not a valid repository)"""
136 self.ui = ui
136 self.ui = ui
137 self.path = path
137 self.path = path
138 self.revs = revs
138 self.revs = revs
139 self.repotype = repotype
139 self.repotype = repotype
140
140
141 self.encoding = 'utf-8'
141 self.encoding = 'utf-8'
142
142
143 def checkhexformat(self, revstr, mapname='splicemap'):
143 def checkhexformat(self, revstr, mapname='splicemap'):
144 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
144 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
145 such format for their revision numbering
145 such format for their revision numbering
146 """
146 """
147 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
147 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
148 raise error.Abort(_('%s entry %s is not a valid revision'
148 raise error.Abort(_('%s entry %s is not a valid revision'
149 ' identifier') % (mapname, revstr))
149 ' identifier') % (mapname, revstr))
150
150
151 def before(self):
151 def before(self):
152 pass
152 pass
153
153
154 def after(self):
154 def after(self):
155 pass
155 pass
156
156
157 def targetfilebelongstosource(self, targetfilename):
157 def targetfilebelongstosource(self, targetfilename):
158 """Returns true if the given targetfile belongs to the source repo. This
158 """Returns true if the given targetfile belongs to the source repo. This
159 is useful when only a subdirectory of the target belongs to the source
159 is useful when only a subdirectory of the target belongs to the source
160 repo."""
160 repo."""
161 # For normal full repo converts, this is always True.
161 # For normal full repo converts, this is always True.
162 return True
162 return True
163
163
164 def setrevmap(self, revmap):
164 def setrevmap(self, revmap):
165 """set the map of already-converted revisions"""
165 """set the map of already-converted revisions"""
166
166
167 def getheads(self):
167 def getheads(self):
168 """Return a list of this repository's heads"""
168 """Return a list of this repository's heads"""
169 raise NotImplementedError
169 raise NotImplementedError
170
170
171 def getfile(self, name, rev):
171 def getfile(self, name, rev):
172 """Return a pair (data, mode) where data is the file content
172 """Return a pair (data, mode) where data is the file content
173 as a string and mode one of '', 'x' or 'l'. rev is the
173 as a string and mode one of '', 'x' or 'l'. rev is the
174 identifier returned by a previous call to getchanges().
174 identifier returned by a previous call to getchanges().
175 Data is None if file is missing/deleted in rev.
175 Data is None if file is missing/deleted in rev.
176 """
176 """
177 raise NotImplementedError
177 raise NotImplementedError
178
178
179 def getchanges(self, version, full):
179 def getchanges(self, version, full):
180 """Returns a tuple of (files, copies, cleanp2).
180 """Returns a tuple of (files, copies, cleanp2).
181
181
182 files is a sorted list of (filename, id) tuples for all files
182 files is a sorted list of (filename, id) tuples for all files
183 changed between version and its first parent returned by
183 changed between version and its first parent returned by
184 getcommit(). If full, all files in that revision is returned.
184 getcommit(). If full, all files in that revision is returned.
185 id is the source revision id of the file.
185 id is the source revision id of the file.
186
186
187 copies is a dictionary of dest: source
187 copies is a dictionary of dest: source
188
188
189 cleanp2 is the set of files filenames that are clean against p2.
189 cleanp2 is the set of files filenames that are clean against p2.
190 (Files that are clean against p1 are already not in files (unless
190 (Files that are clean against p1 are already not in files (unless
191 full). This makes it possible to handle p2 clean files similarly.)
191 full). This makes it possible to handle p2 clean files similarly.)
192 """
192 """
193 raise NotImplementedError
193 raise NotImplementedError
194
194
195 def getcommit(self, version):
195 def getcommit(self, version):
196 """Return the commit object for version"""
196 """Return the commit object for version"""
197 raise NotImplementedError
197 raise NotImplementedError
198
198
199 def numcommits(self):
199 def numcommits(self):
200 """Return the number of commits in this source.
200 """Return the number of commits in this source.
201
201
202 If unknown, return None.
202 If unknown, return None.
203 """
203 """
204 return None
204 return None
205
205
206 def gettags(self):
206 def gettags(self):
207 """Return the tags as a dictionary of name: revision
207 """Return the tags as a dictionary of name: revision
208
208
209 Tag names must be UTF-8 strings.
209 Tag names must be UTF-8 strings.
210 """
210 """
211 raise NotImplementedError
211 raise NotImplementedError
212
212
213 def recode(self, s, encoding=None):
213 def recode(self, s, encoding=None):
214 if not encoding:
214 if not encoding:
215 encoding = self.encoding or 'utf-8'
215 encoding = self.encoding or 'utf-8'
216
216
217 if isinstance(s, pycompat.unicode):
217 if isinstance(s, pycompat.unicode):
218 return s.encode("utf-8")
218 return s.encode("utf-8")
219 try:
219 try:
220 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
220 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
221 except UnicodeError:
221 except UnicodeError:
222 try:
222 try:
223 return s.decode("latin-1").encode("utf-8")
223 return s.decode("latin-1").encode("utf-8")
224 except UnicodeError:
224 except UnicodeError:
225 return s.decode(pycompat.sysstr(encoding),
225 return s.decode(pycompat.sysstr(encoding),
226 "replace").encode("utf-8")
226 "replace").encode("utf-8")
227
227
228 def getchangedfiles(self, rev, i):
228 def getchangedfiles(self, rev, i):
229 """Return the files changed by rev compared to parent[i].
229 """Return the files changed by rev compared to parent[i].
230
230
231 i is an index selecting one of the parents of rev. The return
231 i is an index selecting one of the parents of rev. The return
232 value should be the list of files that are different in rev and
232 value should be the list of files that are different in rev and
233 this parent.
233 this parent.
234
234
235 If rev has no parents, i is None.
235 If rev has no parents, i is None.
236
236
237 This function is only needed to support --filemap
237 This function is only needed to support --filemap
238 """
238 """
239 raise NotImplementedError
239 raise NotImplementedError
240
240
241 def converted(self, rev, sinkrev):
241 def converted(self, rev, sinkrev):
242 '''Notify the source that a revision has been converted.'''
242 '''Notify the source that a revision has been converted.'''
243
243
244 def hasnativeorder(self):
244 def hasnativeorder(self):
245 """Return true if this source has a meaningful, native revision
245 """Return true if this source has a meaningful, native revision
246 order. For instance, Mercurial revisions are store sequentially
246 order. For instance, Mercurial revisions are store sequentially
247 while there is no such global ordering with Darcs.
247 while there is no such global ordering with Darcs.
248 """
248 """
249 return False
249 return False
250
250
251 def hasnativeclose(self):
251 def hasnativeclose(self):
252 """Return true if this source has ability to close branch.
252 """Return true if this source has ability to close branch.
253 """
253 """
254 return False
254 return False
255
255
256 def lookuprev(self, rev):
256 def lookuprev(self, rev):
257 """If rev is a meaningful revision reference in source, return
257 """If rev is a meaningful revision reference in source, return
258 the referenced identifier in the same format used by getcommit().
258 the referenced identifier in the same format used by getcommit().
259 return None otherwise.
259 return None otherwise.
260 """
260 """
261 return None
261 return None
262
262
263 def getbookmarks(self):
263 def getbookmarks(self):
264 """Return the bookmarks as a dictionary of name: revision
264 """Return the bookmarks as a dictionary of name: revision
265
265
266 Bookmark names are to be UTF-8 strings.
266 Bookmark names are to be UTF-8 strings.
267 """
267 """
268 return {}
268 return {}
269
269
270 def checkrevformat(self, revstr, mapname='splicemap'):
270 def checkrevformat(self, revstr, mapname='splicemap'):
271 """revstr is a string that describes a revision in the given
271 """revstr is a string that describes a revision in the given
272 source control system. Return true if revstr has correct
272 source control system. Return true if revstr has correct
273 format.
273 format.
274 """
274 """
275 return True
275 return True
276
276
277 class converter_sink(object):
277 class converter_sink(object):
278 """Conversion sink (target) interface"""
278 """Conversion sink (target) interface"""
279
279
280 def __init__(self, ui, repotype, path):
280 def __init__(self, ui, repotype, path):
281 """Initialize conversion sink (or raise NoRepo("message")
281 """Initialize conversion sink (or raise NoRepo("message")
282 exception if path is not a valid repository)
282 exception if path is not a valid repository)
283
283
284 created is a list of paths to remove if a fatal error occurs
284 created is a list of paths to remove if a fatal error occurs
285 later"""
285 later"""
286 self.ui = ui
286 self.ui = ui
287 self.path = path
287 self.path = path
288 self.created = []
288 self.created = []
289 self.repotype = repotype
289 self.repotype = repotype
290
290
291 def revmapfile(self):
291 def revmapfile(self):
292 """Path to a file that will contain lines
292 """Path to a file that will contain lines
293 source_rev_id sink_rev_id
293 source_rev_id sink_rev_id
294 mapping equivalent revision identifiers for each system."""
294 mapping equivalent revision identifiers for each system."""
295 raise NotImplementedError
295 raise NotImplementedError
296
296
297 def authorfile(self):
297 def authorfile(self):
298 """Path to a file that will contain lines
298 """Path to a file that will contain lines
299 srcauthor=dstauthor
299 srcauthor=dstauthor
300 mapping equivalent authors identifiers for each system."""
300 mapping equivalent authors identifiers for each system."""
301 return None
301 return None
302
302
303 def putcommit(self, files, copies, parents, commit, source, revmap, full,
303 def putcommit(self, files, copies, parents, commit, source, revmap, full,
304 cleanp2):
304 cleanp2):
305 """Create a revision with all changed files listed in 'files'
305 """Create a revision with all changed files listed in 'files'
306 and having listed parents. 'commit' is a commit object
306 and having listed parents. 'commit' is a commit object
307 containing at a minimum the author, date, and message for this
307 containing at a minimum the author, date, and message for this
308 changeset. 'files' is a list of (path, version) tuples,
308 changeset. 'files' is a list of (path, version) tuples,
309 'copies' is a dictionary mapping destinations to sources,
309 'copies' is a dictionary mapping destinations to sources,
310 'source' is the source repository, and 'revmap' is a mapfile
310 'source' is the source repository, and 'revmap' is a mapfile
311 of source revisions to converted revisions. Only getfile() and
311 of source revisions to converted revisions. Only getfile() and
312 lookuprev() should be called on 'source'. 'full' means that 'files'
312 lookuprev() should be called on 'source'. 'full' means that 'files'
313 is complete and all other files should be removed.
313 is complete and all other files should be removed.
314 'cleanp2' is a set of the filenames that are unchanged from p2
314 'cleanp2' is a set of the filenames that are unchanged from p2
315 (only in the common merge case where there two parents).
315 (only in the common merge case where there two parents).
316
316
317 Note that the sink repository is not told to update itself to
317 Note that the sink repository is not told to update itself to
318 a particular revision (or even what that revision would be)
318 a particular revision (or even what that revision would be)
319 before it receives the file data.
319 before it receives the file data.
320 """
320 """
321 raise NotImplementedError
321 raise NotImplementedError
322
322
323 def puttags(self, tags):
323 def puttags(self, tags):
324 """Put tags into sink.
324 """Put tags into sink.
325
325
326 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
326 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
327 Return a pair (tag_revision, tag_parent_revision), or (None, None)
327 Return a pair (tag_revision, tag_parent_revision), or (None, None)
328 if nothing was changed.
328 if nothing was changed.
329 """
329 """
330 raise NotImplementedError
330 raise NotImplementedError
331
331
332 def setbranch(self, branch, pbranches):
332 def setbranch(self, branch, pbranches):
333 """Set the current branch name. Called before the first putcommit
333 """Set the current branch name. Called before the first putcommit
334 on the branch.
334 on the branch.
335 branch: branch name for subsequent commits
335 branch: branch name for subsequent commits
336 pbranches: (converted parent revision, parent branch) tuples"""
336 pbranches: (converted parent revision, parent branch) tuples"""
337
337
338 def setfilemapmode(self, active):
338 def setfilemapmode(self, active):
339 """Tell the destination that we're using a filemap
339 """Tell the destination that we're using a filemap
340
340
341 Some converter_sources (svn in particular) can claim that a file
341 Some converter_sources (svn in particular) can claim that a file
342 was changed in a revision, even if there was no change. This method
342 was changed in a revision, even if there was no change. This method
343 tells the destination that we're using a filemap and that it should
343 tells the destination that we're using a filemap and that it should
344 filter empty revisions.
344 filter empty revisions.
345 """
345 """
346
346
347 def before(self):
347 def before(self):
348 pass
348 pass
349
349
350 def after(self):
350 def after(self):
351 pass
351 pass
352
352
353 def putbookmarks(self, bookmarks):
353 def putbookmarks(self, bookmarks):
354 """Put bookmarks into sink.
354 """Put bookmarks into sink.
355
355
356 bookmarks: {bookmarkname: sink_rev_id, ...}
356 bookmarks: {bookmarkname: sink_rev_id, ...}
357 where bookmarkname is an UTF-8 string.
357 where bookmarkname is an UTF-8 string.
358 """
358 """
359
359
360 def hascommitfrommap(self, rev):
360 def hascommitfrommap(self, rev):
361 """Return False if a rev mentioned in a filemap is known to not be
361 """Return False if a rev mentioned in a filemap is known to not be
362 present."""
362 present."""
363 raise NotImplementedError
363 raise NotImplementedError
364
364
365 def hascommitforsplicemap(self, rev):
365 def hascommitforsplicemap(self, rev):
366 """This method is for the special needs for splicemap handling and not
366 """This method is for the special needs for splicemap handling and not
367 for general use. Returns True if the sink contains rev, aborts on some
367 for general use. Returns True if the sink contains rev, aborts on some
368 special cases."""
368 special cases."""
369 raise NotImplementedError
369 raise NotImplementedError
370
370
371 class commandline(object):
371 class commandline(object):
372 def __init__(self, ui, command):
372 def __init__(self, ui, command):
373 self.ui = ui
373 self.ui = ui
374 self.command = command
374 self.command = command
375
375
376 def prerun(self):
376 def prerun(self):
377 pass
377 pass
378
378
379 def postrun(self):
379 def postrun(self):
380 pass
380 pass
381
381
382 def _cmdline(self, cmd, *args, **kwargs):
382 def _cmdline(self, cmd, *args, **kwargs):
383 kwargs = pycompat.byteskwargs(kwargs)
383 kwargs = pycompat.byteskwargs(kwargs)
384 cmdline = [self.command, cmd] + list(args)
384 cmdline = [self.command, cmd] + list(args)
385 for k, v in kwargs.iteritems():
385 for k, v in kwargs.iteritems():
386 if len(k) == 1:
386 if len(k) == 1:
387 cmdline.append('-' + k)
387 cmdline.append('-' + k)
388 else:
388 else:
389 cmdline.append('--' + k.replace('_', '-'))
389 cmdline.append('--' + k.replace('_', '-'))
390 try:
390 try:
391 if len(k) == 1:
391 if len(k) == 1:
392 cmdline.append('' + v)
392 cmdline.append('' + v)
393 else:
393 else:
394 cmdline[-1] += '=' + v
394 cmdline[-1] += '=' + v
395 except TypeError:
395 except TypeError:
396 pass
396 pass
397 cmdline = [procutil.shellquote(arg) for arg in cmdline]
397 cmdline = [procutil.shellquote(arg) for arg in cmdline]
398 if not self.ui.debugflag:
398 if not self.ui.debugflag:
399 cmdline += ['2>', pycompat.bytestr(os.devnull)]
399 cmdline += ['2>', pycompat.bytestr(os.devnull)]
400 cmdline = ' '.join(cmdline)
400 cmdline = ' '.join(cmdline)
401 return cmdline
401 return cmdline
402
402
403 def _run(self, cmd, *args, **kwargs):
403 def _run(self, cmd, *args, **kwargs):
404 def popen(cmdline):
404 def popen(cmdline):
405 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmdline),
405 p = subprocess.Popen(procutil.tonativestr(cmdline),
406 shell=True, bufsize=-1,
406 shell=True, bufsize=-1,
407 close_fds=procutil.closefds,
407 close_fds=procutil.closefds,
408 stdout=subprocess.PIPE)
408 stdout=subprocess.PIPE)
409 return p
409 return p
410 return self._dorun(popen, cmd, *args, **kwargs)
410 return self._dorun(popen, cmd, *args, **kwargs)
411
411
412 def _run2(self, cmd, *args, **kwargs):
412 def _run2(self, cmd, *args, **kwargs):
413 return self._dorun(procutil.popen2, cmd, *args, **kwargs)
413 return self._dorun(procutil.popen2, cmd, *args, **kwargs)
414
414
415 def _run3(self, cmd, *args, **kwargs):
415 def _run3(self, cmd, *args, **kwargs):
416 return self._dorun(procutil.popen3, cmd, *args, **kwargs)
416 return self._dorun(procutil.popen3, cmd, *args, **kwargs)
417
417
418 def _dorun(self, openfunc, cmd, *args, **kwargs):
418 def _dorun(self, openfunc, cmd, *args, **kwargs):
419 cmdline = self._cmdline(cmd, *args, **kwargs)
419 cmdline = self._cmdline(cmd, *args, **kwargs)
420 self.ui.debug('running: %s\n' % (cmdline,))
420 self.ui.debug('running: %s\n' % (cmdline,))
421 self.prerun()
421 self.prerun()
422 try:
422 try:
423 return openfunc(cmdline)
423 return openfunc(cmdline)
424 finally:
424 finally:
425 self.postrun()
425 self.postrun()
426
426
427 def run(self, cmd, *args, **kwargs):
427 def run(self, cmd, *args, **kwargs):
428 p = self._run(cmd, *args, **kwargs)
428 p = self._run(cmd, *args, **kwargs)
429 output = p.communicate()[0]
429 output = p.communicate()[0]
430 self.ui.debug(output)
430 self.ui.debug(output)
431 return output, p.returncode
431 return output, p.returncode
432
432
433 def runlines(self, cmd, *args, **kwargs):
433 def runlines(self, cmd, *args, **kwargs):
434 p = self._run(cmd, *args, **kwargs)
434 p = self._run(cmd, *args, **kwargs)
435 output = p.stdout.readlines()
435 output = p.stdout.readlines()
436 p.wait()
436 p.wait()
437 self.ui.debug(''.join(output))
437 self.ui.debug(''.join(output))
438 return output, p.returncode
438 return output, p.returncode
439
439
440 def checkexit(self, status, output=''):
440 def checkexit(self, status, output=''):
441 if status:
441 if status:
442 if output:
442 if output:
443 self.ui.warn(_('%s error:\n') % self.command)
443 self.ui.warn(_('%s error:\n') % self.command)
444 self.ui.warn(output)
444 self.ui.warn(output)
445 msg = procutil.explainexit(status)
445 msg = procutil.explainexit(status)
446 raise error.Abort('%s %s' % (self.command, msg))
446 raise error.Abort('%s %s' % (self.command, msg))
447
447
448 def run0(self, cmd, *args, **kwargs):
448 def run0(self, cmd, *args, **kwargs):
449 output, status = self.run(cmd, *args, **kwargs)
449 output, status = self.run(cmd, *args, **kwargs)
450 self.checkexit(status, output)
450 self.checkexit(status, output)
451 return output
451 return output
452
452
453 def runlines0(self, cmd, *args, **kwargs):
453 def runlines0(self, cmd, *args, **kwargs):
454 output, status = self.runlines(cmd, *args, **kwargs)
454 output, status = self.runlines(cmd, *args, **kwargs)
455 self.checkexit(status, ''.join(output))
455 self.checkexit(status, ''.join(output))
456 return output
456 return output
457
457
458 @propertycache
458 @propertycache
459 def argmax(self):
459 def argmax(self):
460 # POSIX requires at least 4096 bytes for ARG_MAX
460 # POSIX requires at least 4096 bytes for ARG_MAX
461 argmax = 4096
461 argmax = 4096
462 try:
462 try:
463 argmax = os.sysconf(r"SC_ARG_MAX")
463 argmax = os.sysconf(r"SC_ARG_MAX")
464 except (AttributeError, ValueError):
464 except (AttributeError, ValueError):
465 pass
465 pass
466
466
467 # Windows shells impose their own limits on command line length,
467 # Windows shells impose their own limits on command line length,
468 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
468 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
469 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
469 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
470 # details about cmd.exe limitations.
470 # details about cmd.exe limitations.
471
471
472 # Since ARG_MAX is for command line _and_ environment, lower our limit
472 # Since ARG_MAX is for command line _and_ environment, lower our limit
473 # (and make happy Windows shells while doing this).
473 # (and make happy Windows shells while doing this).
474 return argmax // 2 - 1
474 return argmax // 2 - 1
475
475
476 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
476 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
477 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
477 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
478 limit = self.argmax - cmdlen
478 limit = self.argmax - cmdlen
479 numbytes = 0
479 numbytes = 0
480 fl = []
480 fl = []
481 for fn in arglist:
481 for fn in arglist:
482 b = len(fn) + 3
482 b = len(fn) + 3
483 if numbytes + b < limit or len(fl) == 0:
483 if numbytes + b < limit or len(fl) == 0:
484 fl.append(fn)
484 fl.append(fn)
485 numbytes += b
485 numbytes += b
486 else:
486 else:
487 yield fl
487 yield fl
488 fl = [fn]
488 fl = [fn]
489 numbytes = b
489 numbytes = b
490 if fl:
490 if fl:
491 yield fl
491 yield fl
492
492
493 def xargs(self, arglist, cmd, *args, **kwargs):
493 def xargs(self, arglist, cmd, *args, **kwargs):
494 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
494 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
495 self.run0(cmd, *(list(args) + l), **kwargs)
495 self.run0(cmd, *(list(args) + l), **kwargs)
496
496
497 class mapfile(dict):
497 class mapfile(dict):
498 def __init__(self, ui, path):
498 def __init__(self, ui, path):
499 super(mapfile, self).__init__()
499 super(mapfile, self).__init__()
500 self.ui = ui
500 self.ui = ui
501 self.path = path
501 self.path = path
502 self.fp = None
502 self.fp = None
503 self.order = []
503 self.order = []
504 self._read()
504 self._read()
505
505
506 def _read(self):
506 def _read(self):
507 if not self.path:
507 if not self.path:
508 return
508 return
509 try:
509 try:
510 fp = open(self.path, 'rb')
510 fp = open(self.path, 'rb')
511 except IOError as err:
511 except IOError as err:
512 if err.errno != errno.ENOENT:
512 if err.errno != errno.ENOENT:
513 raise
513 raise
514 return
514 return
515 for i, line in enumerate(util.iterfile(fp)):
515 for i, line in enumerate(util.iterfile(fp)):
516 line = line.splitlines()[0].rstrip()
516 line = line.splitlines()[0].rstrip()
517 if not line:
517 if not line:
518 # Ignore blank lines
518 # Ignore blank lines
519 continue
519 continue
520 try:
520 try:
521 key, value = line.rsplit(' ', 1)
521 key, value = line.rsplit(' ', 1)
522 except ValueError:
522 except ValueError:
523 raise error.Abort(
523 raise error.Abort(
524 _('syntax error in %s(%d): key/value pair expected')
524 _('syntax error in %s(%d): key/value pair expected')
525 % (self.path, i + 1))
525 % (self.path, i + 1))
526 if key not in self:
526 if key not in self:
527 self.order.append(key)
527 self.order.append(key)
528 super(mapfile, self).__setitem__(key, value)
528 super(mapfile, self).__setitem__(key, value)
529 fp.close()
529 fp.close()
530
530
531 def __setitem__(self, key, value):
531 def __setitem__(self, key, value):
532 if self.fp is None:
532 if self.fp is None:
533 try:
533 try:
534 self.fp = open(self.path, 'ab')
534 self.fp = open(self.path, 'ab')
535 except IOError as err:
535 except IOError as err:
536 raise error.Abort(
536 raise error.Abort(
537 _('could not open map file %r: %s') %
537 _('could not open map file %r: %s') %
538 (self.path, encoding.strtolocal(err.strerror)))
538 (self.path, encoding.strtolocal(err.strerror)))
539 self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
539 self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
540 self.fp.flush()
540 self.fp.flush()
541 super(mapfile, self).__setitem__(key, value)
541 super(mapfile, self).__setitem__(key, value)
542
542
543 def close(self):
543 def close(self):
544 if self.fp:
544 if self.fp:
545 self.fp.close()
545 self.fp.close()
546 self.fp = None
546 self.fp = None
547
547
548 def makedatetimestamp(t):
548 def makedatetimestamp(t):
549 """Like dateutil.makedate() but for time t instead of current time"""
549 """Like dateutil.makedate() but for time t instead of current time"""
550 delta = (datetime.datetime.utcfromtimestamp(t) -
550 delta = (datetime.datetime.utcfromtimestamp(t) -
551 datetime.datetime.fromtimestamp(t))
551 datetime.datetime.fromtimestamp(t))
552 tz = delta.days * 86400 + delta.seconds
552 tz = delta.days * 86400 + delta.seconds
553 return t, tz
553 return t, tz
@@ -1,615 +1,615 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:fileset=set:**.cpp or **.hpp
18 clang-format:fileset=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. If there is any output on
22 fixed file content is expected on standard output. If there is any output on
23 standard error, the file will not be affected. Some values may be substituted
23 standard error, the file will not be affected. Some values may be substituted
24 into the command::
24 into the command::
25
25
26 {rootpath} The path of the file being fixed, relative to the repo root
26 {rootpath} The path of the file being fixed, relative to the repo root
27 {basename} The name of the file being fixed, without the directory path
27 {basename} The name of the file being fixed, without the directory path
28
28
29 If the :linerange suboption is set, the tool will only be run if there are
29 If the :linerange suboption is set, the tool will only be run if there are
30 changed lines in a file. The value of this suboption is appended to the shell
30 changed lines in a file. The value of this suboption is appended to the shell
31 command once for every range of changed lines in the file. Some values may be
31 command once for every range of changed lines in the file. Some values may be
32 substituted into the command::
32 substituted into the command::
33
33
34 {first} The 1-based line number of the first line in the modified range
34 {first} The 1-based line number of the first line in the modified range
35 {last} The 1-based line number of the last line in the modified range
35 {last} The 1-based line number of the last line in the modified range
36
36
37 The :fileset suboption determines which files will be passed through each
37 The :fileset suboption determines which files will be passed through each
38 configured tool. See :hg:`help fileset` for possible values. If there are file
38 configured tool. See :hg:`help fileset` for possible values. If there are file
39 arguments to :hg:`fix`, the intersection of these filesets is used.
39 arguments to :hg:`fix`, the intersection of these filesets is used.
40
40
41 There is also a configurable limit for the maximum size of file that will be
41 There is also a configurable limit for the maximum size of file that will be
42 processed by :hg:`fix`::
42 processed by :hg:`fix`::
43
43
44 [fix]
44 [fix]
45 maxfilesize=2MB
45 maxfilesize=2MB
46
46
47 """
47 """
48
48
49 from __future__ import absolute_import
49 from __future__ import absolute_import
50
50
51 import collections
51 import collections
52 import itertools
52 import itertools
53 import os
53 import os
54 import re
54 import re
55 import subprocess
55 import subprocess
56
56
57 from mercurial.i18n import _
57 from mercurial.i18n import _
58 from mercurial.node import nullrev
58 from mercurial.node import nullrev
59 from mercurial.node import wdirrev
59 from mercurial.node import wdirrev
60
60
61 from mercurial.utils import (
61 from mercurial.utils import (
62 procutil,
62 procutil,
63 )
63 )
64
64
65 from mercurial import (
65 from mercurial import (
66 cmdutil,
66 cmdutil,
67 context,
67 context,
68 copies,
68 copies,
69 error,
69 error,
70 mdiff,
70 mdiff,
71 merge,
71 merge,
72 obsolete,
72 obsolete,
73 pycompat,
73 pycompat,
74 registrar,
74 registrar,
75 scmutil,
75 scmutil,
76 util,
76 util,
77 worker,
77 worker,
78 )
78 )
79
79
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 # be specifying the version(s) of Mercurial they are tested with, or
82 # be specifying the version(s) of Mercurial they are tested with, or
83 # leave the attribute unspecified.
83 # leave the attribute unspecified.
84 testedwith = 'ships-with-hg-core'
84 testedwith = 'ships-with-hg-core'
85
85
86 cmdtable = {}
86 cmdtable = {}
87 command = registrar.command(cmdtable)
87 command = registrar.command(cmdtable)
88
88
89 configtable = {}
89 configtable = {}
90 configitem = registrar.configitem(configtable)
90 configitem = registrar.configitem(configtable)
91
91
92 # Register the suboptions allowed for each configured fixer.
92 # Register the suboptions allowed for each configured fixer.
93 FIXER_ATTRS = ('command', 'linerange', 'fileset')
93 FIXER_ATTRS = ('command', 'linerange', 'fileset')
94
94
95 for key in FIXER_ATTRS:
95 for key in FIXER_ATTRS:
96 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
96 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
97
97
98 # A good default size allows most source code files to be fixed, but avoids
98 # A good default size allows most source code files to be fixed, but avoids
99 # letting fixer tools choke on huge inputs, which could be surprising to the
99 # letting fixer tools choke on huge inputs, which could be surprising to the
100 # user.
100 # user.
101 configitem('fix', 'maxfilesize', default='2MB')
101 configitem('fix', 'maxfilesize', default='2MB')
102
102
103 allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
103 allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
104 baseopt = ('', 'base', [], _('revisions to diff against (overrides automatic '
104 baseopt = ('', 'base', [], _('revisions to diff against (overrides automatic '
105 'selection, and applies to every revision being '
105 'selection, and applies to every revision being '
106 'fixed)'), _('REV'))
106 'fixed)'), _('REV'))
107 revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
107 revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
108 wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
108 wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
109 wholeopt = ('', 'whole', False, _('always fix every line of a file'))
109 wholeopt = ('', 'whole', False, _('always fix every line of a file'))
110 usage = _('[OPTION]... [FILE]...')
110 usage = _('[OPTION]... [FILE]...')
111
111
112 @command('fix', [allopt, baseopt, revopt, wdiropt, wholeopt], usage)
112 @command('fix', [allopt, baseopt, revopt, wdiropt, wholeopt], usage)
113 def fix(ui, repo, *pats, **opts):
113 def fix(ui, repo, *pats, **opts):
114 """rewrite file content in changesets or working directory
114 """rewrite file content in changesets or working directory
115
115
116 Runs any configured tools to fix the content of files. Only affects files
116 Runs any configured tools to fix the content of files. Only affects files
117 with changes, unless file arguments are provided. Only affects changed lines
117 with changes, unless file arguments are provided. Only affects changed lines
118 of files, unless the --whole flag is used. Some tools may always affect the
118 of files, unless the --whole flag is used. Some tools may always affect the
119 whole file regardless of --whole.
119 whole file regardless of --whole.
120
120
121 If revisions are specified with --rev, those revisions will be checked, and
121 If revisions are specified with --rev, those revisions will be checked, and
122 they may be replaced with new revisions that have fixed file content. It is
122 they may be replaced with new revisions that have fixed file content. It is
123 desirable to specify all descendants of each specified revision, so that the
123 desirable to specify all descendants of each specified revision, so that the
124 fixes propagate to the descendants. If all descendants are fixed at the same
124 fixes propagate to the descendants. If all descendants are fixed at the same
125 time, no merging, rebasing, or evolution will be required.
125 time, no merging, rebasing, or evolution will be required.
126
126
127 If --working-dir is used, files with uncommitted changes in the working copy
127 If --working-dir is used, files with uncommitted changes in the working copy
128 will be fixed. If the checked-out revision is also fixed, the working
128 will be fixed. If the checked-out revision is also fixed, the working
129 directory will update to the replacement revision.
129 directory will update to the replacement revision.
130
130
131 When determining what lines of each file to fix at each revision, the whole
131 When determining what lines of each file to fix at each revision, the whole
132 set of revisions being fixed is considered, so that fixes to earlier
132 set of revisions being fixed is considered, so that fixes to earlier
133 revisions are not forgotten in later ones. The --base flag can be used to
133 revisions are not forgotten in later ones. The --base flag can be used to
134 override this default behavior, though it is not usually desirable to do so.
134 override this default behavior, though it is not usually desirable to do so.
135 """
135 """
136 opts = pycompat.byteskwargs(opts)
136 opts = pycompat.byteskwargs(opts)
137 if opts['all']:
137 if opts['all']:
138 if opts['rev']:
138 if opts['rev']:
139 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
139 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
140 opts['rev'] = ['not public() and not obsolete()']
140 opts['rev'] = ['not public() and not obsolete()']
141 opts['working_dir'] = True
141 opts['working_dir'] = True
142 with repo.wlock(), repo.lock(), repo.transaction('fix'):
142 with repo.wlock(), repo.lock(), repo.transaction('fix'):
143 revstofix = getrevstofix(ui, repo, opts)
143 revstofix = getrevstofix(ui, repo, opts)
144 basectxs = getbasectxs(repo, opts, revstofix)
144 basectxs = getbasectxs(repo, opts, revstofix)
145 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
145 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
146 basectxs)
146 basectxs)
147 fixers = getfixers(ui)
147 fixers = getfixers(ui)
148
148
149 # There are no data dependencies between the workers fixing each file
149 # There are no data dependencies between the workers fixing each file
150 # revision, so we can use all available parallelism.
150 # revision, so we can use all available parallelism.
151 def getfixes(items):
151 def getfixes(items):
152 for rev, path in items:
152 for rev, path in items:
153 ctx = repo[rev]
153 ctx = repo[rev]
154 olddata = ctx[path].data()
154 olddata = ctx[path].data()
155 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
155 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
156 # Don't waste memory/time passing unchanged content back, but
156 # Don't waste memory/time passing unchanged content back, but
157 # produce one result per item either way.
157 # produce one result per item either way.
158 yield (rev, path, newdata if newdata != olddata else None)
158 yield (rev, path, newdata if newdata != olddata else None)
159 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue)
159 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue)
160
160
161 # We have to hold on to the data for each successor revision in memory
161 # We have to hold on to the data for each successor revision in memory
162 # until all its parents are committed. We ensure this by committing and
162 # until all its parents are committed. We ensure this by committing and
163 # freeing memory for the revisions in some topological order. This
163 # freeing memory for the revisions in some topological order. This
164 # leaves a little bit of memory efficiency on the table, but also makes
164 # leaves a little bit of memory efficiency on the table, but also makes
165 # the tests deterministic. It might also be considered a feature since
165 # the tests deterministic. It might also be considered a feature since
166 # it makes the results more easily reproducible.
166 # it makes the results more easily reproducible.
167 filedata = collections.defaultdict(dict)
167 filedata = collections.defaultdict(dict)
168 replacements = {}
168 replacements = {}
169 wdirwritten = False
169 wdirwritten = False
170 commitorder = sorted(revstofix, reverse=True)
170 commitorder = sorted(revstofix, reverse=True)
171 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
171 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
172 total=sum(numitems.values())) as progress:
172 total=sum(numitems.values())) as progress:
173 for rev, path, newdata in results:
173 for rev, path, newdata in results:
174 progress.increment(item=path)
174 progress.increment(item=path)
175 if newdata is not None:
175 if newdata is not None:
176 filedata[rev][path] = newdata
176 filedata[rev][path] = newdata
177 numitems[rev] -= 1
177 numitems[rev] -= 1
178 # Apply the fixes for this and any other revisions that are
178 # Apply the fixes for this and any other revisions that are
179 # ready and sitting at the front of the queue. Using a loop here
179 # ready and sitting at the front of the queue. Using a loop here
180 # prevents the queue from being blocked by the first revision to
180 # prevents the queue from being blocked by the first revision to
181 # be ready out of order.
181 # be ready out of order.
182 while commitorder and not numitems[commitorder[-1]]:
182 while commitorder and not numitems[commitorder[-1]]:
183 rev = commitorder.pop()
183 rev = commitorder.pop()
184 ctx = repo[rev]
184 ctx = repo[rev]
185 if rev == wdirrev:
185 if rev == wdirrev:
186 writeworkingdir(repo, ctx, filedata[rev], replacements)
186 writeworkingdir(repo, ctx, filedata[rev], replacements)
187 wdirwritten = bool(filedata[rev])
187 wdirwritten = bool(filedata[rev])
188 else:
188 else:
189 replacerev(ui, repo, ctx, filedata[rev], replacements)
189 replacerev(ui, repo, ctx, filedata[rev], replacements)
190 del filedata[rev]
190 del filedata[rev]
191
191
192 cleanup(repo, replacements, wdirwritten)
192 cleanup(repo, replacements, wdirwritten)
193
193
194 def cleanup(repo, replacements, wdirwritten):
194 def cleanup(repo, replacements, wdirwritten):
195 """Calls scmutil.cleanupnodes() with the given replacements.
195 """Calls scmutil.cleanupnodes() with the given replacements.
196
196
197 "replacements" is a dict from nodeid to nodeid, with one key and one value
197 "replacements" is a dict from nodeid to nodeid, with one key and one value
198 for every revision that was affected by fixing. This is slightly different
198 for every revision that was affected by fixing. This is slightly different
199 from cleanupnodes().
199 from cleanupnodes().
200
200
201 "wdirwritten" is a bool which tells whether the working copy was affected by
201 "wdirwritten" is a bool which tells whether the working copy was affected by
202 fixing, since it has no entry in "replacements".
202 fixing, since it has no entry in "replacements".
203
203
204 Useful as a hook point for extending "hg fix" with output summarizing the
204 Useful as a hook point for extending "hg fix" with output summarizing the
205 effects of the command, though we choose not to output anything here.
205 effects of the command, though we choose not to output anything here.
206 """
206 """
207 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
207 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
208 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
208 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
209
209
210 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
210 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
211 """"Constructs the list of files to be fixed at specific revisions
211 """"Constructs the list of files to be fixed at specific revisions
212
212
213 It is up to the caller how to consume the work items, and the only
213 It is up to the caller how to consume the work items, and the only
214 dependence between them is that replacement revisions must be committed in
214 dependence between them is that replacement revisions must be committed in
215 topological order. Each work item represents a file in the working copy or
215 topological order. Each work item represents a file in the working copy or
216 in some revision that should be fixed and written back to the working copy
216 in some revision that should be fixed and written back to the working copy
217 or into a replacement revision.
217 or into a replacement revision.
218
218
219 Work items for the same revision are grouped together, so that a worker
219 Work items for the same revision are grouped together, so that a worker
220 pool starting with the first N items in parallel is likely to finish the
220 pool starting with the first N items in parallel is likely to finish the
221 first revision's work before other revisions. This can allow us to write
221 first revision's work before other revisions. This can allow us to write
222 the result to disk and reduce memory footprint. At time of writing, the
222 the result to disk and reduce memory footprint. At time of writing, the
223 partition strategy in worker.py seems favorable to this. We also sort the
223 partition strategy in worker.py seems favorable to this. We also sort the
224 items by ascending revision number to match the order in which we commit
224 items by ascending revision number to match the order in which we commit
225 the fixes later.
225 the fixes later.
226 """
226 """
227 workqueue = []
227 workqueue = []
228 numitems = collections.defaultdict(int)
228 numitems = collections.defaultdict(int)
229 maxfilesize = ui.configbytes('fix', 'maxfilesize')
229 maxfilesize = ui.configbytes('fix', 'maxfilesize')
230 for rev in sorted(revstofix):
230 for rev in sorted(revstofix):
231 fixctx = repo[rev]
231 fixctx = repo[rev]
232 match = scmutil.match(fixctx, pats, opts)
232 match = scmutil.match(fixctx, pats, opts)
233 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
233 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
234 fixctx):
234 fixctx):
235 if path not in fixctx:
235 if path not in fixctx:
236 continue
236 continue
237 fctx = fixctx[path]
237 fctx = fixctx[path]
238 if fctx.islink():
238 if fctx.islink():
239 continue
239 continue
240 if fctx.size() > maxfilesize:
240 if fctx.size() > maxfilesize:
241 ui.warn(_('ignoring file larger than %s: %s\n') %
241 ui.warn(_('ignoring file larger than %s: %s\n') %
242 (util.bytecount(maxfilesize), path))
242 (util.bytecount(maxfilesize), path))
243 continue
243 continue
244 workqueue.append((rev, path))
244 workqueue.append((rev, path))
245 numitems[rev] += 1
245 numitems[rev] += 1
246 return workqueue, numitems
246 return workqueue, numitems
247
247
248 def getrevstofix(ui, repo, opts):
248 def getrevstofix(ui, repo, opts):
249 """Returns the set of revision numbers that should be fixed"""
249 """Returns the set of revision numbers that should be fixed"""
250 revs = set(scmutil.revrange(repo, opts['rev']))
250 revs = set(scmutil.revrange(repo, opts['rev']))
251 for rev in revs:
251 for rev in revs:
252 checkfixablectx(ui, repo, repo[rev])
252 checkfixablectx(ui, repo, repo[rev])
253 if revs:
253 if revs:
254 cmdutil.checkunfinished(repo)
254 cmdutil.checkunfinished(repo)
255 checknodescendants(repo, revs)
255 checknodescendants(repo, revs)
256 if opts.get('working_dir'):
256 if opts.get('working_dir'):
257 revs.add(wdirrev)
257 revs.add(wdirrev)
258 if list(merge.mergestate.read(repo).unresolved()):
258 if list(merge.mergestate.read(repo).unresolved()):
259 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
259 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
260 if not revs:
260 if not revs:
261 raise error.Abort(
261 raise error.Abort(
262 'no changesets specified', hint='use --rev or --working-dir')
262 'no changesets specified', hint='use --rev or --working-dir')
263 return revs
263 return revs
264
264
265 def checknodescendants(repo, revs):
265 def checknodescendants(repo, revs):
266 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
266 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
267 repo.revs('(%ld::) - (%ld)', revs, revs)):
267 repo.revs('(%ld::) - (%ld)', revs, revs)):
268 raise error.Abort(_('can only fix a changeset together '
268 raise error.Abort(_('can only fix a changeset together '
269 'with all its descendants'))
269 'with all its descendants'))
270
270
271 def checkfixablectx(ui, repo, ctx):
271 def checkfixablectx(ui, repo, ctx):
272 """Aborts if the revision shouldn't be replaced with a fixed one."""
272 """Aborts if the revision shouldn't be replaced with a fixed one."""
273 if not ctx.mutable():
273 if not ctx.mutable():
274 raise error.Abort('can\'t fix immutable changeset %s' %
274 raise error.Abort('can\'t fix immutable changeset %s' %
275 (scmutil.formatchangeid(ctx),))
275 (scmutil.formatchangeid(ctx),))
276 if ctx.obsolete():
276 if ctx.obsolete():
277 # It would be better to actually check if the revision has a successor.
277 # It would be better to actually check if the revision has a successor.
278 allowdivergence = ui.configbool('experimental',
278 allowdivergence = ui.configbool('experimental',
279 'evolution.allowdivergence')
279 'evolution.allowdivergence')
280 if not allowdivergence:
280 if not allowdivergence:
281 raise error.Abort('fixing obsolete revision could cause divergence')
281 raise error.Abort('fixing obsolete revision could cause divergence')
282
282
283 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
283 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
284 """Returns the set of files that should be fixed in a context
284 """Returns the set of files that should be fixed in a context
285
285
286 The result depends on the base contexts; we include any file that has
286 The result depends on the base contexts; we include any file that has
287 changed relative to any of the base contexts. Base contexts should be
287 changed relative to any of the base contexts. Base contexts should be
288 ancestors of the context being fixed.
288 ancestors of the context being fixed.
289 """
289 """
290 files = set()
290 files = set()
291 for basectx in basectxs:
291 for basectx in basectxs:
292 stat = basectx.status(fixctx, match=match, listclean=bool(pats),
292 stat = basectx.status(fixctx, match=match, listclean=bool(pats),
293 listunknown=bool(pats))
293 listunknown=bool(pats))
294 files.update(
294 files.update(
295 set(itertools.chain(stat.added, stat.modified, stat.clean,
295 set(itertools.chain(stat.added, stat.modified, stat.clean,
296 stat.unknown)))
296 stat.unknown)))
297 return files
297 return files
298
298
299 def lineranges(opts, path, basectxs, fixctx, content2):
299 def lineranges(opts, path, basectxs, fixctx, content2):
300 """Returns the set of line ranges that should be fixed in a file
300 """Returns the set of line ranges that should be fixed in a file
301
301
302 Of the form [(10, 20), (30, 40)].
302 Of the form [(10, 20), (30, 40)].
303
303
304 This depends on the given base contexts; we must consider lines that have
304 This depends on the given base contexts; we must consider lines that have
305 changed versus any of the base contexts, and whether the file has been
305 changed versus any of the base contexts, and whether the file has been
306 renamed versus any of them.
306 renamed versus any of them.
307
307
308 Another way to understand this is that we exclude line ranges that are
308 Another way to understand this is that we exclude line ranges that are
309 common to the file in all base contexts.
309 common to the file in all base contexts.
310 """
310 """
311 if opts.get('whole'):
311 if opts.get('whole'):
312 # Return a range containing all lines. Rely on the diff implementation's
312 # Return a range containing all lines. Rely on the diff implementation's
313 # idea of how many lines are in the file, instead of reimplementing it.
313 # idea of how many lines are in the file, instead of reimplementing it.
314 return difflineranges('', content2)
314 return difflineranges('', content2)
315
315
316 rangeslist = []
316 rangeslist = []
317 for basectx in basectxs:
317 for basectx in basectxs:
318 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
318 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
319 if basepath in basectx:
319 if basepath in basectx:
320 content1 = basectx[basepath].data()
320 content1 = basectx[basepath].data()
321 else:
321 else:
322 content1 = ''
322 content1 = ''
323 rangeslist.extend(difflineranges(content1, content2))
323 rangeslist.extend(difflineranges(content1, content2))
324 return unionranges(rangeslist)
324 return unionranges(rangeslist)
325
325
326 def unionranges(rangeslist):
326 def unionranges(rangeslist):
327 """Return the union of some closed intervals
327 """Return the union of some closed intervals
328
328
329 >>> unionranges([])
329 >>> unionranges([])
330 []
330 []
331 >>> unionranges([(1, 100)])
331 >>> unionranges([(1, 100)])
332 [(1, 100)]
332 [(1, 100)]
333 >>> unionranges([(1, 100), (1, 100)])
333 >>> unionranges([(1, 100), (1, 100)])
334 [(1, 100)]
334 [(1, 100)]
335 >>> unionranges([(1, 100), (2, 100)])
335 >>> unionranges([(1, 100), (2, 100)])
336 [(1, 100)]
336 [(1, 100)]
337 >>> unionranges([(1, 99), (1, 100)])
337 >>> unionranges([(1, 99), (1, 100)])
338 [(1, 100)]
338 [(1, 100)]
339 >>> unionranges([(1, 100), (40, 60)])
339 >>> unionranges([(1, 100), (40, 60)])
340 [(1, 100)]
340 [(1, 100)]
341 >>> unionranges([(1, 49), (50, 100)])
341 >>> unionranges([(1, 49), (50, 100)])
342 [(1, 100)]
342 [(1, 100)]
343 >>> unionranges([(1, 48), (50, 100)])
343 >>> unionranges([(1, 48), (50, 100)])
344 [(1, 48), (50, 100)]
344 [(1, 48), (50, 100)]
345 >>> unionranges([(1, 2), (3, 4), (5, 6)])
345 >>> unionranges([(1, 2), (3, 4), (5, 6)])
346 [(1, 6)]
346 [(1, 6)]
347 """
347 """
348 rangeslist = sorted(set(rangeslist))
348 rangeslist = sorted(set(rangeslist))
349 unioned = []
349 unioned = []
350 if rangeslist:
350 if rangeslist:
351 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
351 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
352 for a, b in rangeslist:
352 for a, b in rangeslist:
353 c, d = unioned[-1]
353 c, d = unioned[-1]
354 if a > d + 1:
354 if a > d + 1:
355 unioned.append((a, b))
355 unioned.append((a, b))
356 else:
356 else:
357 unioned[-1] = (c, max(b, d))
357 unioned[-1] = (c, max(b, d))
358 return unioned
358 return unioned
359
359
360 def difflineranges(content1, content2):
360 def difflineranges(content1, content2):
361 """Return list of line number ranges in content2 that differ from content1.
361 """Return list of line number ranges in content2 that differ from content1.
362
362
363 Line numbers are 1-based. The numbers are the first and last line contained
363 Line numbers are 1-based. The numbers are the first and last line contained
364 in the range. Single-line ranges have the same line number for the first and
364 in the range. Single-line ranges have the same line number for the first and
365 last line. Excludes any empty ranges that result from lines that are only
365 last line. Excludes any empty ranges that result from lines that are only
366 present in content1. Relies on mdiff's idea of where the line endings are in
366 present in content1. Relies on mdiff's idea of where the line endings are in
367 the string.
367 the string.
368
368
369 >>> from mercurial import pycompat
369 >>> from mercurial import pycompat
370 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
370 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
371 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
371 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
372 >>> difflineranges2(b'', b'')
372 >>> difflineranges2(b'', b'')
373 []
373 []
374 >>> difflineranges2(b'a', b'')
374 >>> difflineranges2(b'a', b'')
375 []
375 []
376 >>> difflineranges2(b'', b'A')
376 >>> difflineranges2(b'', b'A')
377 [(1, 1)]
377 [(1, 1)]
378 >>> difflineranges2(b'a', b'a')
378 >>> difflineranges2(b'a', b'a')
379 []
379 []
380 >>> difflineranges2(b'a', b'A')
380 >>> difflineranges2(b'a', b'A')
381 [(1, 1)]
381 [(1, 1)]
382 >>> difflineranges2(b'ab', b'')
382 >>> difflineranges2(b'ab', b'')
383 []
383 []
384 >>> difflineranges2(b'', b'AB')
384 >>> difflineranges2(b'', b'AB')
385 [(1, 2)]
385 [(1, 2)]
386 >>> difflineranges2(b'abc', b'ac')
386 >>> difflineranges2(b'abc', b'ac')
387 []
387 []
388 >>> difflineranges2(b'ab', b'aCb')
388 >>> difflineranges2(b'ab', b'aCb')
389 [(2, 2)]
389 [(2, 2)]
390 >>> difflineranges2(b'abc', b'aBc')
390 >>> difflineranges2(b'abc', b'aBc')
391 [(2, 2)]
391 [(2, 2)]
392 >>> difflineranges2(b'ab', b'AB')
392 >>> difflineranges2(b'ab', b'AB')
393 [(1, 2)]
393 [(1, 2)]
394 >>> difflineranges2(b'abcde', b'aBcDe')
394 >>> difflineranges2(b'abcde', b'aBcDe')
395 [(2, 2), (4, 4)]
395 [(2, 2), (4, 4)]
396 >>> difflineranges2(b'abcde', b'aBCDe')
396 >>> difflineranges2(b'abcde', b'aBCDe')
397 [(2, 4)]
397 [(2, 4)]
398 """
398 """
399 ranges = []
399 ranges = []
400 for lines, kind in mdiff.allblocks(content1, content2):
400 for lines, kind in mdiff.allblocks(content1, content2):
401 firstline, lastline = lines[2:4]
401 firstline, lastline = lines[2:4]
402 if kind == '!' and firstline != lastline:
402 if kind == '!' and firstline != lastline:
403 ranges.append((firstline + 1, lastline))
403 ranges.append((firstline + 1, lastline))
404 return ranges
404 return ranges
405
405
406 def getbasectxs(repo, opts, revstofix):
406 def getbasectxs(repo, opts, revstofix):
407 """Returns a map of the base contexts for each revision
407 """Returns a map of the base contexts for each revision
408
408
409 The base contexts determine which lines are considered modified when we
409 The base contexts determine which lines are considered modified when we
410 attempt to fix just the modified lines in a file. It also determines which
410 attempt to fix just the modified lines in a file. It also determines which
411 files we attempt to fix, so it is important to compute this even when
411 files we attempt to fix, so it is important to compute this even when
412 --whole is used.
412 --whole is used.
413 """
413 """
414 # The --base flag overrides the usual logic, and we give every revision
414 # The --base flag overrides the usual logic, and we give every revision
415 # exactly the set of baserevs that the user specified.
415 # exactly the set of baserevs that the user specified.
416 if opts.get('base'):
416 if opts.get('base'):
417 baserevs = set(scmutil.revrange(repo, opts.get('base')))
417 baserevs = set(scmutil.revrange(repo, opts.get('base')))
418 if not baserevs:
418 if not baserevs:
419 baserevs = {nullrev}
419 baserevs = {nullrev}
420 basectxs = {repo[rev] for rev in baserevs}
420 basectxs = {repo[rev] for rev in baserevs}
421 return {rev: basectxs for rev in revstofix}
421 return {rev: basectxs for rev in revstofix}
422
422
423 # Proceed in topological order so that we can easily determine each
423 # Proceed in topological order so that we can easily determine each
424 # revision's baserevs by looking at its parents and their baserevs.
424 # revision's baserevs by looking at its parents and their baserevs.
425 basectxs = collections.defaultdict(set)
425 basectxs = collections.defaultdict(set)
426 for rev in sorted(revstofix):
426 for rev in sorted(revstofix):
427 ctx = repo[rev]
427 ctx = repo[rev]
428 for pctx in ctx.parents():
428 for pctx in ctx.parents():
429 if pctx.rev() in basectxs:
429 if pctx.rev() in basectxs:
430 basectxs[rev].update(basectxs[pctx.rev()])
430 basectxs[rev].update(basectxs[pctx.rev()])
431 else:
431 else:
432 basectxs[rev].add(pctx)
432 basectxs[rev].add(pctx)
433 return basectxs
433 return basectxs
434
434
435 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
435 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
436 """Run any configured fixers that should affect the file in this context
436 """Run any configured fixers that should affect the file in this context
437
437
438 Returns the file content that results from applying the fixers in some order
438 Returns the file content that results from applying the fixers in some order
439 starting with the file's content in the fixctx. Fixers that support line
439 starting with the file's content in the fixctx. Fixers that support line
440 ranges will affect lines that have changed relative to any of the basectxs
440 ranges will affect lines that have changed relative to any of the basectxs
441 (i.e. they will only avoid lines that are common to all basectxs).
441 (i.e. they will only avoid lines that are common to all basectxs).
442
442
443 A fixer tool's stdout will become the file's new content if and only if it
443 A fixer tool's stdout will become the file's new content if and only if it
444 exits with code zero.
444 exits with code zero.
445 """
445 """
446 newdata = fixctx[path].data()
446 newdata = fixctx[path].data()
447 for fixername, fixer in fixers.iteritems():
447 for fixername, fixer in fixers.iteritems():
448 if fixer.affects(opts, fixctx, path):
448 if fixer.affects(opts, fixctx, path):
449 rangesfn = lambda: lineranges(opts, path, basectxs, fixctx, newdata)
449 rangesfn = lambda: lineranges(opts, path, basectxs, fixctx, newdata)
450 command = fixer.command(ui, path, rangesfn)
450 command = fixer.command(ui, path, rangesfn)
451 if command is None:
451 if command is None:
452 continue
452 continue
453 ui.debug('subprocess: %s\n' % (command,))
453 ui.debug('subprocess: %s\n' % (command,))
454 proc = subprocess.Popen(
454 proc = subprocess.Popen(
455 pycompat.rapply(procutil.tonativestr, command),
455 procutil.tonativestr(command),
456 shell=True,
456 shell=True,
457 cwd=procutil.tonativestr(b'/'),
457 cwd=procutil.tonativestr(b'/'),
458 stdin=subprocess.PIPE,
458 stdin=subprocess.PIPE,
459 stdout=subprocess.PIPE,
459 stdout=subprocess.PIPE,
460 stderr=subprocess.PIPE)
460 stderr=subprocess.PIPE)
461 newerdata, stderr = proc.communicate(newdata)
461 newerdata, stderr = proc.communicate(newdata)
462 if stderr:
462 if stderr:
463 showstderr(ui, fixctx.rev(), fixername, stderr)
463 showstderr(ui, fixctx.rev(), fixername, stderr)
464 if proc.returncode == 0:
464 if proc.returncode == 0:
465 newdata = newerdata
465 newdata = newerdata
466 elif not stderr:
466 elif not stderr:
467 showstderr(ui, fixctx.rev(), fixername,
467 showstderr(ui, fixctx.rev(), fixername,
468 _('exited with status %d\n') % (proc.returncode,))
468 _('exited with status %d\n') % (proc.returncode,))
469 return newdata
469 return newdata
470
470
471 def showstderr(ui, rev, fixername, stderr):
471 def showstderr(ui, rev, fixername, stderr):
472 """Writes the lines of the stderr string as warnings on the ui
472 """Writes the lines of the stderr string as warnings on the ui
473
473
474 Uses the revision number and fixername to give more context to each line of
474 Uses the revision number and fixername to give more context to each line of
475 the error message. Doesn't include file names, since those take up a lot of
475 the error message. Doesn't include file names, since those take up a lot of
476 space and would tend to be included in the error message if they were
476 space and would tend to be included in the error message if they were
477 relevant.
477 relevant.
478 """
478 """
479 for line in re.split('[\r\n]+', stderr):
479 for line in re.split('[\r\n]+', stderr):
480 if line:
480 if line:
481 ui.warn(('['))
481 ui.warn(('['))
482 if rev is None:
482 if rev is None:
483 ui.warn(_('wdir'), label='evolve.rev')
483 ui.warn(_('wdir'), label='evolve.rev')
484 else:
484 else:
485 ui.warn((str(rev)), label='evolve.rev')
485 ui.warn((str(rev)), label='evolve.rev')
486 ui.warn(('] %s: %s\n') % (fixername, line))
486 ui.warn(('] %s: %s\n') % (fixername, line))
487
487
488 def writeworkingdir(repo, ctx, filedata, replacements):
488 def writeworkingdir(repo, ctx, filedata, replacements):
489 """Write new content to the working copy and check out the new p1 if any
489 """Write new content to the working copy and check out the new p1 if any
490
490
491 We check out a new revision if and only if we fixed something in both the
491 We check out a new revision if and only if we fixed something in both the
492 working directory and its parent revision. This avoids the need for a full
492 working directory and its parent revision. This avoids the need for a full
493 update/merge, and means that the working directory simply isn't affected
493 update/merge, and means that the working directory simply isn't affected
494 unless the --working-dir flag is given.
494 unless the --working-dir flag is given.
495
495
496 Directly updates the dirstate for the affected files.
496 Directly updates the dirstate for the affected files.
497 """
497 """
498 for path, data in filedata.iteritems():
498 for path, data in filedata.iteritems():
499 fctx = ctx[path]
499 fctx = ctx[path]
500 fctx.write(data, fctx.flags())
500 fctx.write(data, fctx.flags())
501 if repo.dirstate[path] == 'n':
501 if repo.dirstate[path] == 'n':
502 repo.dirstate.normallookup(path)
502 repo.dirstate.normallookup(path)
503
503
504 oldparentnodes = repo.dirstate.parents()
504 oldparentnodes = repo.dirstate.parents()
505 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
505 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
506 if newparentnodes != oldparentnodes:
506 if newparentnodes != oldparentnodes:
507 repo.setparents(*newparentnodes)
507 repo.setparents(*newparentnodes)
508
508
509 def replacerev(ui, repo, ctx, filedata, replacements):
509 def replacerev(ui, repo, ctx, filedata, replacements):
510 """Commit a new revision like the given one, but with file content changes
510 """Commit a new revision like the given one, but with file content changes
511
511
512 "ctx" is the original revision to be replaced by a modified one.
512 "ctx" is the original revision to be replaced by a modified one.
513
513
514 "filedata" is a dict that maps paths to their new file content. All other
514 "filedata" is a dict that maps paths to their new file content. All other
515 paths will be recreated from the original revision without changes.
515 paths will be recreated from the original revision without changes.
516 "filedata" may contain paths that didn't exist in the original revision;
516 "filedata" may contain paths that didn't exist in the original revision;
517 they will be added.
517 they will be added.
518
518
519 "replacements" is a dict that maps a single node to a single node, and it is
519 "replacements" is a dict that maps a single node to a single node, and it is
520 updated to indicate the original revision is replaced by the newly created
520 updated to indicate the original revision is replaced by the newly created
521 one. No entry is added if the replacement's node already exists.
521 one. No entry is added if the replacement's node already exists.
522
522
523 The new revision has the same parents as the old one, unless those parents
523 The new revision has the same parents as the old one, unless those parents
524 have already been replaced, in which case those replacements are the parents
524 have already been replaced, in which case those replacements are the parents
525 of this new revision. Thus, if revisions are replaced in topological order,
525 of this new revision. Thus, if revisions are replaced in topological order,
526 there is no need to rebase them into the original topology later.
526 there is no need to rebase them into the original topology later.
527 """
527 """
528
528
529 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
529 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
530 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
530 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
531 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
531 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
532 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
532 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
533
533
534 def filectxfn(repo, memctx, path):
534 def filectxfn(repo, memctx, path):
535 if path not in ctx:
535 if path not in ctx:
536 return None
536 return None
537 fctx = ctx[path]
537 fctx = ctx[path]
538 copied = fctx.renamed()
538 copied = fctx.renamed()
539 if copied:
539 if copied:
540 copied = copied[0]
540 copied = copied[0]
541 return context.memfilectx(
541 return context.memfilectx(
542 repo,
542 repo,
543 memctx,
543 memctx,
544 path=fctx.path(),
544 path=fctx.path(),
545 data=filedata.get(path, fctx.data()),
545 data=filedata.get(path, fctx.data()),
546 islink=fctx.islink(),
546 islink=fctx.islink(),
547 isexec=fctx.isexec(),
547 isexec=fctx.isexec(),
548 copied=copied)
548 copied=copied)
549
549
550 memctx = context.memctx(
550 memctx = context.memctx(
551 repo,
551 repo,
552 parents=(newp1node, newp2node),
552 parents=(newp1node, newp2node),
553 text=ctx.description(),
553 text=ctx.description(),
554 files=set(ctx.files()) | set(filedata.keys()),
554 files=set(ctx.files()) | set(filedata.keys()),
555 filectxfn=filectxfn,
555 filectxfn=filectxfn,
556 user=ctx.user(),
556 user=ctx.user(),
557 date=ctx.date(),
557 date=ctx.date(),
558 extra=ctx.extra(),
558 extra=ctx.extra(),
559 branch=ctx.branch(),
559 branch=ctx.branch(),
560 editor=None)
560 editor=None)
561 sucnode = memctx.commit()
561 sucnode = memctx.commit()
562 prenode = ctx.node()
562 prenode = ctx.node()
563 if prenode == sucnode:
563 if prenode == sucnode:
564 ui.debug('node %s already existed\n' % (ctx.hex()))
564 ui.debug('node %s already existed\n' % (ctx.hex()))
565 else:
565 else:
566 replacements[ctx.node()] = sucnode
566 replacements[ctx.node()] = sucnode
567
567
568 def getfixers(ui):
568 def getfixers(ui):
569 """Returns a map of configured fixer tools indexed by their names
569 """Returns a map of configured fixer tools indexed by their names
570
570
571 Each value is a Fixer object with methods that implement the behavior of the
571 Each value is a Fixer object with methods that implement the behavior of the
572 fixer's config suboptions. Does not validate the config values.
572 fixer's config suboptions. Does not validate the config values.
573 """
573 """
574 result = {}
574 result = {}
575 for name in fixernames(ui):
575 for name in fixernames(ui):
576 result[name] = Fixer()
576 result[name] = Fixer()
577 attrs = ui.configsuboptions('fix', name)[1]
577 attrs = ui.configsuboptions('fix', name)[1]
578 for key in FIXER_ATTRS:
578 for key in FIXER_ATTRS:
579 setattr(result[name], pycompat.sysstr('_' + key),
579 setattr(result[name], pycompat.sysstr('_' + key),
580 attrs.get(key, ''))
580 attrs.get(key, ''))
581 return result
581 return result
582
582
583 def fixernames(ui):
583 def fixernames(ui):
584 """Returns the names of [fix] config options that have suboptions"""
584 """Returns the names of [fix] config options that have suboptions"""
585 names = set()
585 names = set()
586 for k, v in ui.configitems('fix'):
586 for k, v in ui.configitems('fix'):
587 if ':' in k:
587 if ':' in k:
588 names.add(k.split(':', 1)[0])
588 names.add(k.split(':', 1)[0])
589 return names
589 return names
590
590
591 class Fixer(object):
591 class Fixer(object):
592 """Wraps the raw config values for a fixer with methods"""
592 """Wraps the raw config values for a fixer with methods"""
593
593
594 def affects(self, opts, fixctx, path):
594 def affects(self, opts, fixctx, path):
595 """Should this fixer run on the file at the given path and context?"""
595 """Should this fixer run on the file at the given path and context?"""
596 return scmutil.match(fixctx, [self._fileset], opts)(path)
596 return scmutil.match(fixctx, [self._fileset], opts)(path)
597
597
598 def command(self, ui, path, rangesfn):
598 def command(self, ui, path, rangesfn):
599 """A shell command to use to invoke this fixer on the given file/lines
599 """A shell command to use to invoke this fixer on the given file/lines
600
600
601 May return None if there is no appropriate command to run for the given
601 May return None if there is no appropriate command to run for the given
602 parameters.
602 parameters.
603 """
603 """
604 expand = cmdutil.rendercommandtemplate
604 expand = cmdutil.rendercommandtemplate
605 parts = [expand(ui, self._command,
605 parts = [expand(ui, self._command,
606 {'rootpath': path, 'basename': os.path.basename(path)})]
606 {'rootpath': path, 'basename': os.path.basename(path)})]
607 if self._linerange:
607 if self._linerange:
608 ranges = rangesfn()
608 ranges = rangesfn()
609 if not ranges:
609 if not ranges:
610 # No line ranges to fix, so don't run the fixer.
610 # No line ranges to fix, so don't run the fixer.
611 return None
611 return None
612 for first, last in ranges:
612 for first, last in ranges:
613 parts.append(expand(ui, self._linerange,
613 parts.append(expand(ui, self._linerange,
614 {'first': first, 'last': last}))
614 {'first': first, 'last': last}))
615 return ' '.join(parts)
615 return ' '.join(parts)
@@ -1,138 +1,138 b''
1 # logtoprocess.py - send ui.log() data to a subprocess
1 # logtoprocess.py - send ui.log() data to a subprocess
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """send ui.log() data to a subprocess (EXPERIMENTAL)
7 """send ui.log() data to a subprocess (EXPERIMENTAL)
8
8
9 This extension lets you specify a shell command per ui.log() event,
9 This extension lets you specify a shell command per ui.log() event,
10 sending all remaining arguments to as environment variables to that command.
10 sending all remaining arguments to as environment variables to that command.
11
11
12 Each positional argument to the method results in a `MSG[N]` key in the
12 Each positional argument to the method results in a `MSG[N]` key in the
13 environment, starting at 1 (so `MSG1`, `MSG2`, etc.). Each keyword argument
13 environment, starting at 1 (so `MSG1`, `MSG2`, etc.). Each keyword argument
14 is set as a `OPT_UPPERCASE_KEY` variable (so the key is uppercased, and
14 is set as a `OPT_UPPERCASE_KEY` variable (so the key is uppercased, and
15 prefixed with `OPT_`). The original event name is passed in the `EVENT`
15 prefixed with `OPT_`). The original event name is passed in the `EVENT`
16 environment variable, and the process ID of mercurial is given in `HGPID`.
16 environment variable, and the process ID of mercurial is given in `HGPID`.
17
17
18 So given a call `ui.log('foo', 'bar', 'baz', spam='eggs'), a script configured
18 So given a call `ui.log('foo', 'bar', 'baz', spam='eggs'), a script configured
19 for the `foo` event can expect an environment with `MSG1=bar`, `MSG2=baz`, and
19 for the `foo` event can expect an environment with `MSG1=bar`, `MSG2=baz`, and
20 `OPT_SPAM=eggs`.
20 `OPT_SPAM=eggs`.
21
21
22 Scripts are configured in the `[logtoprocess]` section, each key an event name.
22 Scripts are configured in the `[logtoprocess]` section, each key an event name.
23 For example::
23 For example::
24
24
25 [logtoprocess]
25 [logtoprocess]
26 commandexception = echo "$MSG2$MSG3" > /var/log/mercurial_exceptions.log
26 commandexception = echo "$MSG2$MSG3" > /var/log/mercurial_exceptions.log
27
27
28 would log the warning message and traceback of any failed command dispatch.
28 would log the warning message and traceback of any failed command dispatch.
29
29
30 Scripts are run asynchronously as detached daemon processes; mercurial will
30 Scripts are run asynchronously as detached daemon processes; mercurial will
31 not ensure that they exit cleanly.
31 not ensure that they exit cleanly.
32
32
33 """
33 """
34
34
35 from __future__ import absolute_import
35 from __future__ import absolute_import
36
36
37 import itertools
37 import itertools
38 import os
38 import os
39 import subprocess
39 import subprocess
40 import sys
40 import sys
41
41
42 from mercurial import (
42 from mercurial import (
43 encoding,
43 encoding,
44 pycompat,
44 pycompat,
45 )
45 )
46
46
47 from mercurial.utils import (
47 from mercurial.utils import (
48 procutil,
48 procutil,
49 )
49 )
50
50
51 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
51 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
53 # be specifying the version(s) of Mercurial they are tested with, or
53 # be specifying the version(s) of Mercurial they are tested with, or
54 # leave the attribute unspecified.
54 # leave the attribute unspecified.
55 testedwith = 'ships-with-hg-core'
55 testedwith = 'ships-with-hg-core'
56
56
57 def uisetup(ui):
57 def uisetup(ui):
58 if pycompat.iswindows:
58 if pycompat.iswindows:
59 # no fork on Windows, but we can create a detached process
59 # no fork on Windows, but we can create a detached process
60 # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863.aspx
60 # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863.aspx
61 # No stdlib constant exists for this value
61 # No stdlib constant exists for this value
62 DETACHED_PROCESS = 0x00000008
62 DETACHED_PROCESS = 0x00000008
63 _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP
63 _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP
64
64
65 def runshellcommand(script, env):
65 def runshellcommand(script, env):
66 # we can't use close_fds *and* redirect stdin. I'm not sure that we
66 # we can't use close_fds *and* redirect stdin. I'm not sure that we
67 # need to because the detached process has no console connection.
67 # need to because the detached process has no console connection.
68 subprocess.Popen(
68 subprocess.Popen(
69 pycompat.rapply(procutil.tonativestr, script),
69 procutil.tonativestr(script),
70 shell=True, env=procutil.tonativeenv(env), close_fds=True,
70 shell=True, env=procutil.tonativeenv(env), close_fds=True,
71 creationflags=_creationflags)
71 creationflags=_creationflags)
72 else:
72 else:
73 def runshellcommand(script, env):
73 def runshellcommand(script, env):
74 # double-fork to completely detach from the parent process
74 # double-fork to completely detach from the parent process
75 # based on http://code.activestate.com/recipes/278731
75 # based on http://code.activestate.com/recipes/278731
76 pid = os.fork()
76 pid = os.fork()
77 if pid:
77 if pid:
78 # parent
78 # parent
79 return
79 return
80 # subprocess.Popen() forks again, all we need to add is
80 # subprocess.Popen() forks again, all we need to add is
81 # flag the new process as a new session.
81 # flag the new process as a new session.
82 if sys.version_info < (3, 2):
82 if sys.version_info < (3, 2):
83 newsession = {'preexec_fn': os.setsid}
83 newsession = {'preexec_fn': os.setsid}
84 else:
84 else:
85 newsession = {'start_new_session': True}
85 newsession = {'start_new_session': True}
86 try:
86 try:
87 # connect stdin to devnull to make sure the subprocess can't
87 # connect stdin to devnull to make sure the subprocess can't
88 # muck up that stream for mercurial.
88 # muck up that stream for mercurial.
89 subprocess.Popen(
89 subprocess.Popen(
90 pycompat.rapply(procutil.tonativestr, script),
90 procutil.tonativestr(script),
91 shell=True, stdin=open(os.devnull, 'r'),
91 shell=True, stdin=open(os.devnull, 'r'),
92 env=procutil.tonativeenv(env),
92 env=procutil.tonativeenv(env),
93 close_fds=True, **newsession)
93 close_fds=True, **newsession)
94 finally:
94 finally:
95 # mission accomplished, this child needs to exit and not
95 # mission accomplished, this child needs to exit and not
96 # continue the hg process here.
96 # continue the hg process here.
97 os._exit(0)
97 os._exit(0)
98
98
99 class logtoprocessui(ui.__class__):
99 class logtoprocessui(ui.__class__):
100 def log(self, event, *msg, **opts):
100 def log(self, event, *msg, **opts):
101 """Map log events to external commands
101 """Map log events to external commands
102
102
103 Arguments are passed on as environment variables.
103 Arguments are passed on as environment variables.
104
104
105 """
105 """
106 script = self.config('logtoprocess', event)
106 script = self.config('logtoprocess', event)
107 if script:
107 if script:
108 if msg:
108 if msg:
109 # try to format the log message given the remaining
109 # try to format the log message given the remaining
110 # arguments
110 # arguments
111 try:
111 try:
112 # Python string formatting with % either uses a
112 # Python string formatting with % either uses a
113 # dictionary *or* tuple, but not both. If we have
113 # dictionary *or* tuple, but not both. If we have
114 # keyword options, assume we need a mapping.
114 # keyword options, assume we need a mapping.
115 formatted = msg[0] % (opts or msg[1:])
115 formatted = msg[0] % (opts or msg[1:])
116 except (TypeError, KeyError):
116 except (TypeError, KeyError):
117 # Failed to apply the arguments, ignore
117 # Failed to apply the arguments, ignore
118 formatted = msg[0]
118 formatted = msg[0]
119 messages = (formatted,) + msg[1:]
119 messages = (formatted,) + msg[1:]
120 else:
120 else:
121 messages = msg
121 messages = msg
122 # positional arguments are listed as MSG[N] keys in the
122 # positional arguments are listed as MSG[N] keys in the
123 # environment
123 # environment
124 msgpairs = (
124 msgpairs = (
125 ('MSG{0:d}'.format(i), str(m))
125 ('MSG{0:d}'.format(i), str(m))
126 for i, m in enumerate(messages, 1))
126 for i, m in enumerate(messages, 1))
127 # keyword arguments get prefixed with OPT_ and uppercased
127 # keyword arguments get prefixed with OPT_ and uppercased
128 optpairs = (
128 optpairs = (
129 ('OPT_{0}'.format(key.upper()), str(value))
129 ('OPT_{0}'.format(key.upper()), str(value))
130 for key, value in opts.iteritems())
130 for key, value in opts.iteritems())
131 env = dict(itertools.chain(encoding.environ.items(),
131 env = dict(itertools.chain(encoding.environ.items(),
132 msgpairs, optpairs),
132 msgpairs, optpairs),
133 EVENT=event, HGPID=str(os.getpid()))
133 EVENT=event, HGPID=str(os.getpid()))
134 runshellcommand(script, env)
134 runshellcommand(script, env)
135 return super(logtoprocessui, self).log(event, *msg, **opts)
135 return super(logtoprocessui, self).log(event, *msg, **opts)
136
136
137 # Replace the class for this instance and all clones created from it:
137 # Replace the class for this instance and all clones created from it:
138 ui.__class__ = logtoprocessui
138 ui.__class__ = logtoprocessui
@@ -1,1774 +1,1774 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 revsetlang,
39 revsetlang,
40 similar,
40 similar,
41 url,
41 url,
42 util,
42 util,
43 vfs,
43 vfs,
44 )
44 )
45
45
46 from .utils import (
46 from .utils import (
47 procutil,
47 procutil,
48 stringutil,
48 stringutil,
49 )
49 )
50
50
51 if pycompat.iswindows:
51 if pycompat.iswindows:
52 from . import scmwindows as scmplatform
52 from . import scmwindows as scmplatform
53 else:
53 else:
54 from . import scmposix as scmplatform
54 from . import scmposix as scmplatform
55
55
56 parsers = policy.importmod(r'parsers')
56 parsers = policy.importmod(r'parsers')
57
57
58 termsize = scmplatform.termsize
58 termsize = scmplatform.termsize
59
59
60 class status(tuple):
60 class status(tuple):
61 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
61 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 and 'ignored' properties are only relevant to the working copy.
62 and 'ignored' properties are only relevant to the working copy.
63 '''
63 '''
64
64
65 __slots__ = ()
65 __slots__ = ()
66
66
67 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
67 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 clean):
68 clean):
69 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
69 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 ignored, clean))
70 ignored, clean))
71
71
72 @property
72 @property
73 def modified(self):
73 def modified(self):
74 '''files that have been modified'''
74 '''files that have been modified'''
75 return self[0]
75 return self[0]
76
76
77 @property
77 @property
78 def added(self):
78 def added(self):
79 '''files that have been added'''
79 '''files that have been added'''
80 return self[1]
80 return self[1]
81
81
82 @property
82 @property
83 def removed(self):
83 def removed(self):
84 '''files that have been removed'''
84 '''files that have been removed'''
85 return self[2]
85 return self[2]
86
86
87 @property
87 @property
88 def deleted(self):
88 def deleted(self):
89 '''files that are in the dirstate, but have been deleted from the
89 '''files that are in the dirstate, but have been deleted from the
90 working copy (aka "missing")
90 working copy (aka "missing")
91 '''
91 '''
92 return self[3]
92 return self[3]
93
93
94 @property
94 @property
95 def unknown(self):
95 def unknown(self):
96 '''files not in the dirstate that are not ignored'''
96 '''files not in the dirstate that are not ignored'''
97 return self[4]
97 return self[4]
98
98
99 @property
99 @property
100 def ignored(self):
100 def ignored(self):
101 '''files not in the dirstate that are ignored (by _dirignore())'''
101 '''files not in the dirstate that are ignored (by _dirignore())'''
102 return self[5]
102 return self[5]
103
103
104 @property
104 @property
105 def clean(self):
105 def clean(self):
106 '''files that have not been modified'''
106 '''files that have not been modified'''
107 return self[6]
107 return self[6]
108
108
109 def __repr__(self, *args, **kwargs):
109 def __repr__(self, *args, **kwargs):
110 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
110 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 r'unknown=%s, ignored=%s, clean=%s>') %
111 r'unknown=%s, ignored=%s, clean=%s>') %
112 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
112 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113
113
114 def itersubrepos(ctx1, ctx2):
114 def itersubrepos(ctx1, ctx2):
115 """find subrepos in ctx1 or ctx2"""
115 """find subrepos in ctx1 or ctx2"""
116 # Create a (subpath, ctx) mapping where we prefer subpaths from
116 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # ctx1. The subpaths from ctx2 are important when the .hgsub file
117 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # has been modified (in ctx2) but not yet committed (in ctx1).
118 # has been modified (in ctx2) but not yet committed (in ctx1).
119 subpaths = dict.fromkeys(ctx2.substate, ctx2)
119 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
120 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121
121
122 missing = set()
122 missing = set()
123
123
124 for subpath in ctx2.substate:
124 for subpath in ctx2.substate:
125 if subpath not in ctx1.substate:
125 if subpath not in ctx1.substate:
126 del subpaths[subpath]
126 del subpaths[subpath]
127 missing.add(subpath)
127 missing.add(subpath)
128
128
129 for subpath, ctx in sorted(subpaths.iteritems()):
129 for subpath, ctx in sorted(subpaths.iteritems()):
130 yield subpath, ctx.sub(subpath)
130 yield subpath, ctx.sub(subpath)
131
131
132 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
132 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # status and diff will have an accurate result when it does
133 # status and diff will have an accurate result when it does
134 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
134 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # against itself.
135 # against itself.
136 for subpath in missing:
136 for subpath in missing:
137 yield subpath, ctx2.nullsub(subpath, ctx1)
137 yield subpath, ctx2.nullsub(subpath, ctx1)
138
138
139 def nochangesfound(ui, repo, excluded=None):
139 def nochangesfound(ui, repo, excluded=None):
140 '''Report no changes for push/pull, excluded is None or a list of
140 '''Report no changes for push/pull, excluded is None or a list of
141 nodes excluded from the push/pull.
141 nodes excluded from the push/pull.
142 '''
142 '''
143 secretlist = []
143 secretlist = []
144 if excluded:
144 if excluded:
145 for n in excluded:
145 for n in excluded:
146 ctx = repo[n]
146 ctx = repo[n]
147 if ctx.phase() >= phases.secret and not ctx.extinct():
147 if ctx.phase() >= phases.secret and not ctx.extinct():
148 secretlist.append(n)
148 secretlist.append(n)
149
149
150 if secretlist:
150 if secretlist:
151 ui.status(_("no changes found (ignored %d secret changesets)\n")
151 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 % len(secretlist))
152 % len(secretlist))
153 else:
153 else:
154 ui.status(_("no changes found\n"))
154 ui.status(_("no changes found\n"))
155
155
156 def callcatch(ui, func):
156 def callcatch(ui, func):
157 """call func() with global exception handling
157 """call func() with global exception handling
158
158
159 return func() if no exception happens. otherwise do some error handling
159 return func() if no exception happens. otherwise do some error handling
160 and return an exit code accordingly. does not handle all exceptions.
160 and return an exit code accordingly. does not handle all exceptions.
161 """
161 """
162 try:
162 try:
163 try:
163 try:
164 return func()
164 return func()
165 except: # re-raises
165 except: # re-raises
166 ui.traceback()
166 ui.traceback()
167 raise
167 raise
168 # Global exception handling, alphabetically
168 # Global exception handling, alphabetically
169 # Mercurial-specific first, followed by built-in and library exceptions
169 # Mercurial-specific first, followed by built-in and library exceptions
170 except error.LockHeld as inst:
170 except error.LockHeld as inst:
171 if inst.errno == errno.ETIMEDOUT:
171 if inst.errno == errno.ETIMEDOUT:
172 reason = _('timed out waiting for lock held by %r') % inst.locker
172 reason = _('timed out waiting for lock held by %r') % inst.locker
173 else:
173 else:
174 reason = _('lock held by %r') % inst.locker
174 reason = _('lock held by %r') % inst.locker
175 ui.error(_("abort: %s: %s\n") % (
175 ui.error(_("abort: %s: %s\n") % (
176 inst.desc or stringutil.forcebytestr(inst.filename), reason))
176 inst.desc or stringutil.forcebytestr(inst.filename), reason))
177 if not inst.locker:
177 if not inst.locker:
178 ui.error(_("(lock might be very busy)\n"))
178 ui.error(_("(lock might be very busy)\n"))
179 except error.LockUnavailable as inst:
179 except error.LockUnavailable as inst:
180 ui.error(_("abort: could not lock %s: %s\n") %
180 ui.error(_("abort: could not lock %s: %s\n") %
181 (inst.desc or stringutil.forcebytestr(inst.filename),
181 (inst.desc or stringutil.forcebytestr(inst.filename),
182 encoding.strtolocal(inst.strerror)))
182 encoding.strtolocal(inst.strerror)))
183 except error.OutOfBandError as inst:
183 except error.OutOfBandError as inst:
184 if inst.args:
184 if inst.args:
185 msg = _("abort: remote error:\n")
185 msg = _("abort: remote error:\n")
186 else:
186 else:
187 msg = _("abort: remote error\n")
187 msg = _("abort: remote error\n")
188 ui.error(msg)
188 ui.error(msg)
189 if inst.args:
189 if inst.args:
190 ui.error(''.join(inst.args))
190 ui.error(''.join(inst.args))
191 if inst.hint:
191 if inst.hint:
192 ui.error('(%s)\n' % inst.hint)
192 ui.error('(%s)\n' % inst.hint)
193 except error.RepoError as inst:
193 except error.RepoError as inst:
194 ui.error(_("abort: %s!\n") % inst)
194 ui.error(_("abort: %s!\n") % inst)
195 if inst.hint:
195 if inst.hint:
196 ui.error(_("(%s)\n") % inst.hint)
196 ui.error(_("(%s)\n") % inst.hint)
197 except error.ResponseError as inst:
197 except error.ResponseError as inst:
198 ui.error(_("abort: %s") % inst.args[0])
198 ui.error(_("abort: %s") % inst.args[0])
199 msg = inst.args[1]
199 msg = inst.args[1]
200 if isinstance(msg, type(u'')):
200 if isinstance(msg, type(u'')):
201 msg = pycompat.sysbytes(msg)
201 msg = pycompat.sysbytes(msg)
202 if not isinstance(msg, bytes):
202 if not isinstance(msg, bytes):
203 ui.error(" %r\n" % (msg,))
203 ui.error(" %r\n" % (msg,))
204 elif not msg:
204 elif not msg:
205 ui.error(_(" empty string\n"))
205 ui.error(_(" empty string\n"))
206 else:
206 else:
207 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
207 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
208 except error.CensoredNodeError as inst:
208 except error.CensoredNodeError as inst:
209 ui.error(_("abort: file censored %s!\n") % inst)
209 ui.error(_("abort: file censored %s!\n") % inst)
210 except error.StorageError as inst:
210 except error.StorageError as inst:
211 ui.error(_("abort: %s!\n") % inst)
211 ui.error(_("abort: %s!\n") % inst)
212 except error.InterventionRequired as inst:
212 except error.InterventionRequired as inst:
213 ui.error("%s\n" % inst)
213 ui.error("%s\n" % inst)
214 if inst.hint:
214 if inst.hint:
215 ui.error(_("(%s)\n") % inst.hint)
215 ui.error(_("(%s)\n") % inst.hint)
216 return 1
216 return 1
217 except error.WdirUnsupported:
217 except error.WdirUnsupported:
218 ui.error(_("abort: working directory revision cannot be specified\n"))
218 ui.error(_("abort: working directory revision cannot be specified\n"))
219 except error.Abort as inst:
219 except error.Abort as inst:
220 ui.error(_("abort: %s\n") % inst)
220 ui.error(_("abort: %s\n") % inst)
221 if inst.hint:
221 if inst.hint:
222 ui.error(_("(%s)\n") % inst.hint)
222 ui.error(_("(%s)\n") % inst.hint)
223 except ImportError as inst:
223 except ImportError as inst:
224 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
224 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
225 m = stringutil.forcebytestr(inst).split()[-1]
225 m = stringutil.forcebytestr(inst).split()[-1]
226 if m in "mpatch bdiff".split():
226 if m in "mpatch bdiff".split():
227 ui.error(_("(did you forget to compile extensions?)\n"))
227 ui.error(_("(did you forget to compile extensions?)\n"))
228 elif m in "zlib".split():
228 elif m in "zlib".split():
229 ui.error(_("(is your Python install correct?)\n"))
229 ui.error(_("(is your Python install correct?)\n"))
230 except IOError as inst:
230 except IOError as inst:
231 if util.safehasattr(inst, "code"):
231 if util.safehasattr(inst, "code"):
232 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
232 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
233 elif util.safehasattr(inst, "reason"):
233 elif util.safehasattr(inst, "reason"):
234 try: # usually it is in the form (errno, strerror)
234 try: # usually it is in the form (errno, strerror)
235 reason = inst.reason.args[1]
235 reason = inst.reason.args[1]
236 except (AttributeError, IndexError):
236 except (AttributeError, IndexError):
237 # it might be anything, for example a string
237 # it might be anything, for example a string
238 reason = inst.reason
238 reason = inst.reason
239 if isinstance(reason, pycompat.unicode):
239 if isinstance(reason, pycompat.unicode):
240 # SSLError of Python 2.7.9 contains a unicode
240 # SSLError of Python 2.7.9 contains a unicode
241 reason = encoding.unitolocal(reason)
241 reason = encoding.unitolocal(reason)
242 ui.error(_("abort: error: %s\n") % reason)
242 ui.error(_("abort: error: %s\n") % reason)
243 elif (util.safehasattr(inst, "args")
243 elif (util.safehasattr(inst, "args")
244 and inst.args and inst.args[0] == errno.EPIPE):
244 and inst.args and inst.args[0] == errno.EPIPE):
245 pass
245 pass
246 elif getattr(inst, "strerror", None):
246 elif getattr(inst, "strerror", None):
247 if getattr(inst, "filename", None):
247 if getattr(inst, "filename", None):
248 ui.error(_("abort: %s: %s\n") % (
248 ui.error(_("abort: %s: %s\n") % (
249 encoding.strtolocal(inst.strerror),
249 encoding.strtolocal(inst.strerror),
250 stringutil.forcebytestr(inst.filename)))
250 stringutil.forcebytestr(inst.filename)))
251 else:
251 else:
252 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
252 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
253 else:
253 else:
254 raise
254 raise
255 except OSError as inst:
255 except OSError as inst:
256 if getattr(inst, "filename", None) is not None:
256 if getattr(inst, "filename", None) is not None:
257 ui.error(_("abort: %s: '%s'\n") % (
257 ui.error(_("abort: %s: '%s'\n") % (
258 encoding.strtolocal(inst.strerror),
258 encoding.strtolocal(inst.strerror),
259 stringutil.forcebytestr(inst.filename)))
259 stringutil.forcebytestr(inst.filename)))
260 else:
260 else:
261 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
261 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
262 except MemoryError:
262 except MemoryError:
263 ui.error(_("abort: out of memory\n"))
263 ui.error(_("abort: out of memory\n"))
264 except SystemExit as inst:
264 except SystemExit as inst:
265 # Commands shouldn't sys.exit directly, but give a return code.
265 # Commands shouldn't sys.exit directly, but give a return code.
266 # Just in case catch this and and pass exit code to caller.
266 # Just in case catch this and and pass exit code to caller.
267 return inst.code
267 return inst.code
268 except socket.error as inst:
268 except socket.error as inst:
269 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
269 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
270
270
271 return -1
271 return -1
272
272
273 def checknewlabel(repo, lbl, kind):
273 def checknewlabel(repo, lbl, kind):
274 # Do not use the "kind" parameter in ui output.
274 # Do not use the "kind" parameter in ui output.
275 # It makes strings difficult to translate.
275 # It makes strings difficult to translate.
276 if lbl in ['tip', '.', 'null']:
276 if lbl in ['tip', '.', 'null']:
277 raise error.Abort(_("the name '%s' is reserved") % lbl)
277 raise error.Abort(_("the name '%s' is reserved") % lbl)
278 for c in (':', '\0', '\n', '\r'):
278 for c in (':', '\0', '\n', '\r'):
279 if c in lbl:
279 if c in lbl:
280 raise error.Abort(
280 raise error.Abort(
281 _("%r cannot be used in a name") % pycompat.bytestr(c))
281 _("%r cannot be used in a name") % pycompat.bytestr(c))
282 try:
282 try:
283 int(lbl)
283 int(lbl)
284 raise error.Abort(_("cannot use an integer as a name"))
284 raise error.Abort(_("cannot use an integer as a name"))
285 except ValueError:
285 except ValueError:
286 pass
286 pass
287 if lbl.strip() != lbl:
287 if lbl.strip() != lbl:
288 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
288 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
289
289
290 def checkfilename(f):
290 def checkfilename(f):
291 '''Check that the filename f is an acceptable filename for a tracked file'''
291 '''Check that the filename f is an acceptable filename for a tracked file'''
292 if '\r' in f or '\n' in f:
292 if '\r' in f or '\n' in f:
293 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
293 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
294 % pycompat.bytestr(f))
294 % pycompat.bytestr(f))
295
295
296 def checkportable(ui, f):
296 def checkportable(ui, f):
297 '''Check if filename f is portable and warn or abort depending on config'''
297 '''Check if filename f is portable and warn or abort depending on config'''
298 checkfilename(f)
298 checkfilename(f)
299 abort, warn = checkportabilityalert(ui)
299 abort, warn = checkportabilityalert(ui)
300 if abort or warn:
300 if abort or warn:
301 msg = util.checkwinfilename(f)
301 msg = util.checkwinfilename(f)
302 if msg:
302 if msg:
303 msg = "%s: %s" % (msg, procutil.shellquote(f))
303 msg = "%s: %s" % (msg, procutil.shellquote(f))
304 if abort:
304 if abort:
305 raise error.Abort(msg)
305 raise error.Abort(msg)
306 ui.warn(_("warning: %s\n") % msg)
306 ui.warn(_("warning: %s\n") % msg)
307
307
308 def checkportabilityalert(ui):
308 def checkportabilityalert(ui):
309 '''check if the user's config requests nothing, a warning, or abort for
309 '''check if the user's config requests nothing, a warning, or abort for
310 non-portable filenames'''
310 non-portable filenames'''
311 val = ui.config('ui', 'portablefilenames')
311 val = ui.config('ui', 'portablefilenames')
312 lval = val.lower()
312 lval = val.lower()
313 bval = stringutil.parsebool(val)
313 bval = stringutil.parsebool(val)
314 abort = pycompat.iswindows or lval == 'abort'
314 abort = pycompat.iswindows or lval == 'abort'
315 warn = bval or lval == 'warn'
315 warn = bval or lval == 'warn'
316 if bval is None and not (warn or abort or lval == 'ignore'):
316 if bval is None and not (warn or abort or lval == 'ignore'):
317 raise error.ConfigError(
317 raise error.ConfigError(
318 _("ui.portablefilenames value is invalid ('%s')") % val)
318 _("ui.portablefilenames value is invalid ('%s')") % val)
319 return abort, warn
319 return abort, warn
320
320
321 class casecollisionauditor(object):
321 class casecollisionauditor(object):
322 def __init__(self, ui, abort, dirstate):
322 def __init__(self, ui, abort, dirstate):
323 self._ui = ui
323 self._ui = ui
324 self._abort = abort
324 self._abort = abort
325 allfiles = '\0'.join(dirstate._map)
325 allfiles = '\0'.join(dirstate._map)
326 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
326 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
327 self._dirstate = dirstate
327 self._dirstate = dirstate
328 # The purpose of _newfiles is so that we don't complain about
328 # The purpose of _newfiles is so that we don't complain about
329 # case collisions if someone were to call this object with the
329 # case collisions if someone were to call this object with the
330 # same filename twice.
330 # same filename twice.
331 self._newfiles = set()
331 self._newfiles = set()
332
332
333 def __call__(self, f):
333 def __call__(self, f):
334 if f in self._newfiles:
334 if f in self._newfiles:
335 return
335 return
336 fl = encoding.lower(f)
336 fl = encoding.lower(f)
337 if fl in self._loweredfiles and f not in self._dirstate:
337 if fl in self._loweredfiles and f not in self._dirstate:
338 msg = _('possible case-folding collision for %s') % f
338 msg = _('possible case-folding collision for %s') % f
339 if self._abort:
339 if self._abort:
340 raise error.Abort(msg)
340 raise error.Abort(msg)
341 self._ui.warn(_("warning: %s\n") % msg)
341 self._ui.warn(_("warning: %s\n") % msg)
342 self._loweredfiles.add(fl)
342 self._loweredfiles.add(fl)
343 self._newfiles.add(f)
343 self._newfiles.add(f)
344
344
345 def filteredhash(repo, maxrev):
345 def filteredhash(repo, maxrev):
346 """build hash of filtered revisions in the current repoview.
346 """build hash of filtered revisions in the current repoview.
347
347
348 Multiple caches perform up-to-date validation by checking that the
348 Multiple caches perform up-to-date validation by checking that the
349 tiprev and tipnode stored in the cache file match the current repository.
349 tiprev and tipnode stored in the cache file match the current repository.
350 However, this is not sufficient for validating repoviews because the set
350 However, this is not sufficient for validating repoviews because the set
351 of revisions in the view may change without the repository tiprev and
351 of revisions in the view may change without the repository tiprev and
352 tipnode changing.
352 tipnode changing.
353
353
354 This function hashes all the revs filtered from the view and returns
354 This function hashes all the revs filtered from the view and returns
355 that SHA-1 digest.
355 that SHA-1 digest.
356 """
356 """
357 cl = repo.changelog
357 cl = repo.changelog
358 if not cl.filteredrevs:
358 if not cl.filteredrevs:
359 return None
359 return None
360 key = None
360 key = None
361 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
361 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
362 if revs:
362 if revs:
363 s = hashlib.sha1()
363 s = hashlib.sha1()
364 for rev in revs:
364 for rev in revs:
365 s.update('%d;' % rev)
365 s.update('%d;' % rev)
366 key = s.digest()
366 key = s.digest()
367 return key
367 return key
368
368
369 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
369 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
370 '''yield every hg repository under path, always recursively.
370 '''yield every hg repository under path, always recursively.
371 The recurse flag will only control recursion into repo working dirs'''
371 The recurse flag will only control recursion into repo working dirs'''
372 def errhandler(err):
372 def errhandler(err):
373 if err.filename == path:
373 if err.filename == path:
374 raise err
374 raise err
375 samestat = getattr(os.path, 'samestat', None)
375 samestat = getattr(os.path, 'samestat', None)
376 if followsym and samestat is not None:
376 if followsym and samestat is not None:
377 def adddir(dirlst, dirname):
377 def adddir(dirlst, dirname):
378 dirstat = os.stat(dirname)
378 dirstat = os.stat(dirname)
379 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
379 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
380 if not match:
380 if not match:
381 dirlst.append(dirstat)
381 dirlst.append(dirstat)
382 return not match
382 return not match
383 else:
383 else:
384 followsym = False
384 followsym = False
385
385
386 if (seen_dirs is None) and followsym:
386 if (seen_dirs is None) and followsym:
387 seen_dirs = []
387 seen_dirs = []
388 adddir(seen_dirs, path)
388 adddir(seen_dirs, path)
389 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
389 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
390 dirs.sort()
390 dirs.sort()
391 if '.hg' in dirs:
391 if '.hg' in dirs:
392 yield root # found a repository
392 yield root # found a repository
393 qroot = os.path.join(root, '.hg', 'patches')
393 qroot = os.path.join(root, '.hg', 'patches')
394 if os.path.isdir(os.path.join(qroot, '.hg')):
394 if os.path.isdir(os.path.join(qroot, '.hg')):
395 yield qroot # we have a patch queue repo here
395 yield qroot # we have a patch queue repo here
396 if recurse:
396 if recurse:
397 # avoid recursing inside the .hg directory
397 # avoid recursing inside the .hg directory
398 dirs.remove('.hg')
398 dirs.remove('.hg')
399 else:
399 else:
400 dirs[:] = [] # don't descend further
400 dirs[:] = [] # don't descend further
401 elif followsym:
401 elif followsym:
402 newdirs = []
402 newdirs = []
403 for d in dirs:
403 for d in dirs:
404 fname = os.path.join(root, d)
404 fname = os.path.join(root, d)
405 if adddir(seen_dirs, fname):
405 if adddir(seen_dirs, fname):
406 if os.path.islink(fname):
406 if os.path.islink(fname):
407 for hgname in walkrepos(fname, True, seen_dirs):
407 for hgname in walkrepos(fname, True, seen_dirs):
408 yield hgname
408 yield hgname
409 else:
409 else:
410 newdirs.append(d)
410 newdirs.append(d)
411 dirs[:] = newdirs
411 dirs[:] = newdirs
412
412
413 def binnode(ctx):
413 def binnode(ctx):
414 """Return binary node id for a given basectx"""
414 """Return binary node id for a given basectx"""
415 node = ctx.node()
415 node = ctx.node()
416 if node is None:
416 if node is None:
417 return wdirid
417 return wdirid
418 return node
418 return node
419
419
420 def intrev(ctx):
420 def intrev(ctx):
421 """Return integer for a given basectx that can be used in comparison or
421 """Return integer for a given basectx that can be used in comparison or
422 arithmetic operation"""
422 arithmetic operation"""
423 rev = ctx.rev()
423 rev = ctx.rev()
424 if rev is None:
424 if rev is None:
425 return wdirrev
425 return wdirrev
426 return rev
426 return rev
427
427
428 def formatchangeid(ctx):
428 def formatchangeid(ctx):
429 """Format changectx as '{rev}:{node|formatnode}', which is the default
429 """Format changectx as '{rev}:{node|formatnode}', which is the default
430 template provided by logcmdutil.changesettemplater"""
430 template provided by logcmdutil.changesettemplater"""
431 repo = ctx.repo()
431 repo = ctx.repo()
432 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
432 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
433
433
434 def formatrevnode(ui, rev, node):
434 def formatrevnode(ui, rev, node):
435 """Format given revision and node depending on the current verbosity"""
435 """Format given revision and node depending on the current verbosity"""
436 if ui.debugflag:
436 if ui.debugflag:
437 hexfunc = hex
437 hexfunc = hex
438 else:
438 else:
439 hexfunc = short
439 hexfunc = short
440 return '%d:%s' % (rev, hexfunc(node))
440 return '%d:%s' % (rev, hexfunc(node))
441
441
442 def resolvehexnodeidprefix(repo, prefix):
442 def resolvehexnodeidprefix(repo, prefix):
443 if (prefix.startswith('x') and
443 if (prefix.startswith('x') and
444 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
444 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
445 prefix = prefix[1:]
445 prefix = prefix[1:]
446 try:
446 try:
447 # Uses unfiltered repo because it's faster when prefix is ambiguous/
447 # Uses unfiltered repo because it's faster when prefix is ambiguous/
448 # This matches the shortesthexnodeidprefix() function below.
448 # This matches the shortesthexnodeidprefix() function below.
449 node = repo.unfiltered().changelog._partialmatch(prefix)
449 node = repo.unfiltered().changelog._partialmatch(prefix)
450 except error.AmbiguousPrefixLookupError:
450 except error.AmbiguousPrefixLookupError:
451 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
451 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
452 if revset:
452 if revset:
453 # Clear config to avoid infinite recursion
453 # Clear config to avoid infinite recursion
454 configoverrides = {('experimental',
454 configoverrides = {('experimental',
455 'revisions.disambiguatewithin'): None}
455 'revisions.disambiguatewithin'): None}
456 with repo.ui.configoverride(configoverrides):
456 with repo.ui.configoverride(configoverrides):
457 revs = repo.anyrevs([revset], user=True)
457 revs = repo.anyrevs([revset], user=True)
458 matches = []
458 matches = []
459 for rev in revs:
459 for rev in revs:
460 node = repo.changelog.node(rev)
460 node = repo.changelog.node(rev)
461 if hex(node).startswith(prefix):
461 if hex(node).startswith(prefix):
462 matches.append(node)
462 matches.append(node)
463 if len(matches) == 1:
463 if len(matches) == 1:
464 return matches[0]
464 return matches[0]
465 raise
465 raise
466 if node is None:
466 if node is None:
467 return
467 return
468 repo.changelog.rev(node) # make sure node isn't filtered
468 repo.changelog.rev(node) # make sure node isn't filtered
469 return node
469 return node
470
470
471 def mayberevnum(repo, prefix):
471 def mayberevnum(repo, prefix):
472 """Checks if the given prefix may be mistaken for a revision number"""
472 """Checks if the given prefix may be mistaken for a revision number"""
473 try:
473 try:
474 i = int(prefix)
474 i = int(prefix)
475 # if we are a pure int, then starting with zero will not be
475 # if we are a pure int, then starting with zero will not be
476 # confused as a rev; or, obviously, if the int is larger
476 # confused as a rev; or, obviously, if the int is larger
477 # than the value of the tip rev
477 # than the value of the tip rev
478 if prefix[0:1] == b'0' or i >= len(repo):
478 if prefix[0:1] == b'0' or i >= len(repo):
479 return False
479 return False
480 return True
480 return True
481 except ValueError:
481 except ValueError:
482 return False
482 return False
483
483
484 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
484 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
485 """Find the shortest unambiguous prefix that matches hexnode.
485 """Find the shortest unambiguous prefix that matches hexnode.
486
486
487 If "cache" is not None, it must be a dictionary that can be used for
487 If "cache" is not None, it must be a dictionary that can be used for
488 caching between calls to this method.
488 caching between calls to this method.
489 """
489 """
490 # _partialmatch() of filtered changelog could take O(len(repo)) time,
490 # _partialmatch() of filtered changelog could take O(len(repo)) time,
491 # which would be unacceptably slow. so we look for hash collision in
491 # which would be unacceptably slow. so we look for hash collision in
492 # unfiltered space, which means some hashes may be slightly longer.
492 # unfiltered space, which means some hashes may be slightly longer.
493
493
494 def disambiguate(prefix):
494 def disambiguate(prefix):
495 """Disambiguate against revnums."""
495 """Disambiguate against revnums."""
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 if mayberevnum(repo, prefix):
497 if mayberevnum(repo, prefix):
498 return 'x' + prefix
498 return 'x' + prefix
499 else:
499 else:
500 return prefix
500 return prefix
501
501
502 hexnode = hex(node)
502 hexnode = hex(node)
503 for length in range(len(prefix), len(hexnode) + 1):
503 for length in range(len(prefix), len(hexnode) + 1):
504 prefix = hexnode[:length]
504 prefix = hexnode[:length]
505 if not mayberevnum(repo, prefix):
505 if not mayberevnum(repo, prefix):
506 return prefix
506 return prefix
507
507
508 cl = repo.unfiltered().changelog
508 cl = repo.unfiltered().changelog
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 if revset:
510 if revset:
511 revs = None
511 revs = None
512 if cache is not None:
512 if cache is not None:
513 revs = cache.get('disambiguationrevset')
513 revs = cache.get('disambiguationrevset')
514 if revs is None:
514 if revs is None:
515 revs = repo.anyrevs([revset], user=True)
515 revs = repo.anyrevs([revset], user=True)
516 if cache is not None:
516 if cache is not None:
517 cache['disambiguationrevset'] = revs
517 cache['disambiguationrevset'] = revs
518 if cl.rev(node) in revs:
518 if cl.rev(node) in revs:
519 hexnode = hex(node)
519 hexnode = hex(node)
520 nodetree = None
520 nodetree = None
521 if cache is not None:
521 if cache is not None:
522 nodetree = cache.get('disambiguationnodetree')
522 nodetree = cache.get('disambiguationnodetree')
523 if not nodetree:
523 if not nodetree:
524 try:
524 try:
525 nodetree = parsers.nodetree(cl.index, len(revs))
525 nodetree = parsers.nodetree(cl.index, len(revs))
526 except AttributeError:
526 except AttributeError:
527 # no native nodetree
527 # no native nodetree
528 pass
528 pass
529 else:
529 else:
530 for r in revs:
530 for r in revs:
531 nodetree.insert(r)
531 nodetree.insert(r)
532 if cache is not None:
532 if cache is not None:
533 cache['disambiguationnodetree'] = nodetree
533 cache['disambiguationnodetree'] = nodetree
534 if nodetree is not None:
534 if nodetree is not None:
535 length = max(nodetree.shortest(node), minlength)
535 length = max(nodetree.shortest(node), minlength)
536 prefix = hexnode[:length]
536 prefix = hexnode[:length]
537 return disambiguate(prefix)
537 return disambiguate(prefix)
538 for length in range(minlength, len(hexnode) + 1):
538 for length in range(minlength, len(hexnode) + 1):
539 matches = []
539 matches = []
540 prefix = hexnode[:length]
540 prefix = hexnode[:length]
541 for rev in revs:
541 for rev in revs:
542 otherhexnode = repo[rev].hex()
542 otherhexnode = repo[rev].hex()
543 if prefix == otherhexnode[:length]:
543 if prefix == otherhexnode[:length]:
544 matches.append(otherhexnode)
544 matches.append(otherhexnode)
545 if len(matches) == 1:
545 if len(matches) == 1:
546 return disambiguate(prefix)
546 return disambiguate(prefix)
547
547
548 try:
548 try:
549 return disambiguate(cl.shortest(node, minlength))
549 return disambiguate(cl.shortest(node, minlength))
550 except error.LookupError:
550 except error.LookupError:
551 raise error.RepoLookupError()
551 raise error.RepoLookupError()
552
552
553 def isrevsymbol(repo, symbol):
553 def isrevsymbol(repo, symbol):
554 """Checks if a symbol exists in the repo.
554 """Checks if a symbol exists in the repo.
555
555
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 symbol is an ambiguous nodeid prefix.
557 symbol is an ambiguous nodeid prefix.
558 """
558 """
559 try:
559 try:
560 revsymbol(repo, symbol)
560 revsymbol(repo, symbol)
561 return True
561 return True
562 except error.RepoLookupError:
562 except error.RepoLookupError:
563 return False
563 return False
564
564
565 def revsymbol(repo, symbol):
565 def revsymbol(repo, symbol):
566 """Returns a context given a single revision symbol (as string).
566 """Returns a context given a single revision symbol (as string).
567
567
568 This is similar to revsingle(), but accepts only a single revision symbol,
568 This is similar to revsingle(), but accepts only a single revision symbol,
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 not "max(public())".
570 not "max(public())".
571 """
571 """
572 if not isinstance(symbol, bytes):
572 if not isinstance(symbol, bytes):
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 "repo[symbol]?" % (symbol, type(symbol)))
574 "repo[symbol]?" % (symbol, type(symbol)))
575 raise error.ProgrammingError(msg)
575 raise error.ProgrammingError(msg)
576 try:
576 try:
577 if symbol in ('.', 'tip', 'null'):
577 if symbol in ('.', 'tip', 'null'):
578 return repo[symbol]
578 return repo[symbol]
579
579
580 try:
580 try:
581 r = int(symbol)
581 r = int(symbol)
582 if '%d' % r != symbol:
582 if '%d' % r != symbol:
583 raise ValueError
583 raise ValueError
584 l = len(repo.changelog)
584 l = len(repo.changelog)
585 if r < 0:
585 if r < 0:
586 r += l
586 r += l
587 if r < 0 or r >= l and r != wdirrev:
587 if r < 0 or r >= l and r != wdirrev:
588 raise ValueError
588 raise ValueError
589 return repo[r]
589 return repo[r]
590 except error.FilteredIndexError:
590 except error.FilteredIndexError:
591 raise
591 raise
592 except (ValueError, OverflowError, IndexError):
592 except (ValueError, OverflowError, IndexError):
593 pass
593 pass
594
594
595 if len(symbol) == 40:
595 if len(symbol) == 40:
596 try:
596 try:
597 node = bin(symbol)
597 node = bin(symbol)
598 rev = repo.changelog.rev(node)
598 rev = repo.changelog.rev(node)
599 return repo[rev]
599 return repo[rev]
600 except error.FilteredLookupError:
600 except error.FilteredLookupError:
601 raise
601 raise
602 except (TypeError, LookupError):
602 except (TypeError, LookupError):
603 pass
603 pass
604
604
605 # look up bookmarks through the name interface
605 # look up bookmarks through the name interface
606 try:
606 try:
607 node = repo.names.singlenode(repo, symbol)
607 node = repo.names.singlenode(repo, symbol)
608 rev = repo.changelog.rev(node)
608 rev = repo.changelog.rev(node)
609 return repo[rev]
609 return repo[rev]
610 except KeyError:
610 except KeyError:
611 pass
611 pass
612
612
613 node = resolvehexnodeidprefix(repo, symbol)
613 node = resolvehexnodeidprefix(repo, symbol)
614 if node is not None:
614 if node is not None:
615 rev = repo.changelog.rev(node)
615 rev = repo.changelog.rev(node)
616 return repo[rev]
616 return repo[rev]
617
617
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619
619
620 except error.WdirUnsupported:
620 except error.WdirUnsupported:
621 return repo[None]
621 return repo[None]
622 except (error.FilteredIndexError, error.FilteredLookupError,
622 except (error.FilteredIndexError, error.FilteredLookupError,
623 error.FilteredRepoLookupError):
623 error.FilteredRepoLookupError):
624 raise _filterederror(repo, symbol)
624 raise _filterederror(repo, symbol)
625
625
626 def _filterederror(repo, changeid):
626 def _filterederror(repo, changeid):
627 """build an exception to be raised about a filtered changeid
627 """build an exception to be raised about a filtered changeid
628
628
629 This is extracted in a function to help extensions (eg: evolve) to
629 This is extracted in a function to help extensions (eg: evolve) to
630 experiment with various message variants."""
630 experiment with various message variants."""
631 if repo.filtername.startswith('visible'):
631 if repo.filtername.startswith('visible'):
632
632
633 # Check if the changeset is obsolete
633 # Check if the changeset is obsolete
634 unfilteredrepo = repo.unfiltered()
634 unfilteredrepo = repo.unfiltered()
635 ctx = revsymbol(unfilteredrepo, changeid)
635 ctx = revsymbol(unfilteredrepo, changeid)
636
636
637 # If the changeset is obsolete, enrich the message with the reason
637 # If the changeset is obsolete, enrich the message with the reason
638 # that made this changeset not visible
638 # that made this changeset not visible
639 if ctx.obsolete():
639 if ctx.obsolete():
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 else:
641 else:
642 msg = _("hidden revision '%s'") % changeid
642 msg = _("hidden revision '%s'") % changeid
643
643
644 hint = _('use --hidden to access hidden revisions')
644 hint = _('use --hidden to access hidden revisions')
645
645
646 return error.FilteredRepoLookupError(msg, hint=hint)
646 return error.FilteredRepoLookupError(msg, hint=hint)
647 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg = _("filtered revision '%s' (not in '%s' subset)")
648 msg %= (changeid, repo.filtername)
648 msg %= (changeid, repo.filtername)
649 return error.FilteredRepoLookupError(msg)
649 return error.FilteredRepoLookupError(msg)
650
650
651 def revsingle(repo, revspec, default='.', localalias=None):
651 def revsingle(repo, revspec, default='.', localalias=None):
652 if not revspec and revspec != 0:
652 if not revspec and revspec != 0:
653 return repo[default]
653 return repo[default]
654
654
655 l = revrange(repo, [revspec], localalias=localalias)
655 l = revrange(repo, [revspec], localalias=localalias)
656 if not l:
656 if not l:
657 raise error.Abort(_('empty revision set'))
657 raise error.Abort(_('empty revision set'))
658 return repo[l.last()]
658 return repo[l.last()]
659
659
660 def _pairspec(revspec):
660 def _pairspec(revspec):
661 tree = revsetlang.parse(revspec)
661 tree = revsetlang.parse(revspec)
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663
663
664 def revpair(repo, revs):
664 def revpair(repo, revs):
665 if not revs:
665 if not revs:
666 return repo['.'], repo[None]
666 return repo['.'], repo[None]
667
667
668 l = revrange(repo, revs)
668 l = revrange(repo, revs)
669
669
670 if not l:
670 if not l:
671 first = second = None
671 first = second = None
672 elif l.isascending():
672 elif l.isascending():
673 first = l.min()
673 first = l.min()
674 second = l.max()
674 second = l.max()
675 elif l.isdescending():
675 elif l.isdescending():
676 first = l.max()
676 first = l.max()
677 second = l.min()
677 second = l.min()
678 else:
678 else:
679 first = l.first()
679 first = l.first()
680 second = l.last()
680 second = l.last()
681
681
682 if first is None:
682 if first is None:
683 raise error.Abort(_('empty revision range'))
683 raise error.Abort(_('empty revision range'))
684 if (first == second and len(revs) >= 2
684 if (first == second and len(revs) >= 2
685 and not all(revrange(repo, [r]) for r in revs)):
685 and not all(revrange(repo, [r]) for r in revs)):
686 raise error.Abort(_('empty revision on one side of range'))
686 raise error.Abort(_('empty revision on one side of range'))
687
687
688 # if top-level is range expression, the result must always be a pair
688 # if top-level is range expression, the result must always be a pair
689 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
689 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
690 return repo[first], repo[None]
690 return repo[first], repo[None]
691
691
692 return repo[first], repo[second]
692 return repo[first], repo[second]
693
693
694 def revrange(repo, specs, localalias=None):
694 def revrange(repo, specs, localalias=None):
695 """Execute 1 to many revsets and return the union.
695 """Execute 1 to many revsets and return the union.
696
696
697 This is the preferred mechanism for executing revsets using user-specified
697 This is the preferred mechanism for executing revsets using user-specified
698 config options, such as revset aliases.
698 config options, such as revset aliases.
699
699
700 The revsets specified by ``specs`` will be executed via a chained ``OR``
700 The revsets specified by ``specs`` will be executed via a chained ``OR``
701 expression. If ``specs`` is empty, an empty result is returned.
701 expression. If ``specs`` is empty, an empty result is returned.
702
702
703 ``specs`` can contain integers, in which case they are assumed to be
703 ``specs`` can contain integers, in which case they are assumed to be
704 revision numbers.
704 revision numbers.
705
705
706 It is assumed the revsets are already formatted. If you have arguments
706 It is assumed the revsets are already formatted. If you have arguments
707 that need to be expanded in the revset, call ``revsetlang.formatspec()``
707 that need to be expanded in the revset, call ``revsetlang.formatspec()``
708 and pass the result as an element of ``specs``.
708 and pass the result as an element of ``specs``.
709
709
710 Specifying a single revset is allowed.
710 Specifying a single revset is allowed.
711
711
712 Returns a ``revset.abstractsmartset`` which is a list-like interface over
712 Returns a ``revset.abstractsmartset`` which is a list-like interface over
713 integer revisions.
713 integer revisions.
714 """
714 """
715 allspecs = []
715 allspecs = []
716 for spec in specs:
716 for spec in specs:
717 if isinstance(spec, int):
717 if isinstance(spec, int):
718 spec = revsetlang.formatspec('rev(%d)', spec)
718 spec = revsetlang.formatspec('rev(%d)', spec)
719 allspecs.append(spec)
719 allspecs.append(spec)
720 return repo.anyrevs(allspecs, user=True, localalias=localalias)
720 return repo.anyrevs(allspecs, user=True, localalias=localalias)
721
721
722 def meaningfulparents(repo, ctx):
722 def meaningfulparents(repo, ctx):
723 """Return list of meaningful (or all if debug) parentrevs for rev.
723 """Return list of meaningful (or all if debug) parentrevs for rev.
724
724
725 For merges (two non-nullrev revisions) both parents are meaningful.
725 For merges (two non-nullrev revisions) both parents are meaningful.
726 Otherwise the first parent revision is considered meaningful if it
726 Otherwise the first parent revision is considered meaningful if it
727 is not the preceding revision.
727 is not the preceding revision.
728 """
728 """
729 parents = ctx.parents()
729 parents = ctx.parents()
730 if len(parents) > 1:
730 if len(parents) > 1:
731 return parents
731 return parents
732 if repo.ui.debugflag:
732 if repo.ui.debugflag:
733 return [parents[0], repo['null']]
733 return [parents[0], repo['null']]
734 if parents[0].rev() >= intrev(ctx) - 1:
734 if parents[0].rev() >= intrev(ctx) - 1:
735 return []
735 return []
736 return parents
736 return parents
737
737
738 def expandpats(pats):
738 def expandpats(pats):
739 '''Expand bare globs when running on windows.
739 '''Expand bare globs when running on windows.
740 On posix we assume it already has already been done by sh.'''
740 On posix we assume it already has already been done by sh.'''
741 if not util.expandglobs:
741 if not util.expandglobs:
742 return list(pats)
742 return list(pats)
743 ret = []
743 ret = []
744 for kindpat in pats:
744 for kindpat in pats:
745 kind, pat = matchmod._patsplit(kindpat, None)
745 kind, pat = matchmod._patsplit(kindpat, None)
746 if kind is None:
746 if kind is None:
747 try:
747 try:
748 globbed = glob.glob(pat)
748 globbed = glob.glob(pat)
749 except re.error:
749 except re.error:
750 globbed = [pat]
750 globbed = [pat]
751 if globbed:
751 if globbed:
752 ret.extend(globbed)
752 ret.extend(globbed)
753 continue
753 continue
754 ret.append(kindpat)
754 ret.append(kindpat)
755 return ret
755 return ret
756
756
757 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
757 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
758 badfn=None):
758 badfn=None):
759 '''Return a matcher and the patterns that were used.
759 '''Return a matcher and the patterns that were used.
760 The matcher will warn about bad matches, unless an alternate badfn callback
760 The matcher will warn about bad matches, unless an alternate badfn callback
761 is provided.'''
761 is provided.'''
762 if pats == ("",):
762 if pats == ("",):
763 pats = []
763 pats = []
764 if opts is None:
764 if opts is None:
765 opts = {}
765 opts = {}
766 if not globbed and default == 'relpath':
766 if not globbed and default == 'relpath':
767 pats = expandpats(pats or [])
767 pats = expandpats(pats or [])
768
768
769 def bad(f, msg):
769 def bad(f, msg):
770 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
770 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
771
771
772 if badfn is None:
772 if badfn is None:
773 badfn = bad
773 badfn = bad
774
774
775 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
775 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
776 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
776 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
777
777
778 if m.always():
778 if m.always():
779 pats = []
779 pats = []
780 return m, pats
780 return m, pats
781
781
782 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
782 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
783 badfn=None):
783 badfn=None):
784 '''Return a matcher that will warn about bad matches.'''
784 '''Return a matcher that will warn about bad matches.'''
785 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
785 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
786
786
787 def matchall(repo):
787 def matchall(repo):
788 '''Return a matcher that will efficiently match everything.'''
788 '''Return a matcher that will efficiently match everything.'''
789 return matchmod.always(repo.root, repo.getcwd())
789 return matchmod.always(repo.root, repo.getcwd())
790
790
791 def matchfiles(repo, files, badfn=None):
791 def matchfiles(repo, files, badfn=None):
792 '''Return a matcher that will efficiently match exactly these files.'''
792 '''Return a matcher that will efficiently match exactly these files.'''
793 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
793 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
794
794
795 def parsefollowlinespattern(repo, rev, pat, msg):
795 def parsefollowlinespattern(repo, rev, pat, msg):
796 """Return a file name from `pat` pattern suitable for usage in followlines
796 """Return a file name from `pat` pattern suitable for usage in followlines
797 logic.
797 logic.
798 """
798 """
799 if not matchmod.patkind(pat):
799 if not matchmod.patkind(pat):
800 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
800 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
801 else:
801 else:
802 ctx = repo[rev]
802 ctx = repo[rev]
803 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
803 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
804 files = [f for f in ctx if m(f)]
804 files = [f for f in ctx if m(f)]
805 if len(files) != 1:
805 if len(files) != 1:
806 raise error.ParseError(msg)
806 raise error.ParseError(msg)
807 return files[0]
807 return files[0]
808
808
809 def origpath(ui, repo, filepath):
809 def origpath(ui, repo, filepath):
810 '''customize where .orig files are created
810 '''customize where .orig files are created
811
811
812 Fetch user defined path from config file: [ui] origbackuppath = <path>
812 Fetch user defined path from config file: [ui] origbackuppath = <path>
813 Fall back to default (filepath with .orig suffix) if not specified
813 Fall back to default (filepath with .orig suffix) if not specified
814 '''
814 '''
815 origbackuppath = ui.config('ui', 'origbackuppath')
815 origbackuppath = ui.config('ui', 'origbackuppath')
816 if not origbackuppath:
816 if not origbackuppath:
817 return filepath + ".orig"
817 return filepath + ".orig"
818
818
819 # Convert filepath from an absolute path into a path inside the repo.
819 # Convert filepath from an absolute path into a path inside the repo.
820 filepathfromroot = util.normpath(os.path.relpath(filepath,
820 filepathfromroot = util.normpath(os.path.relpath(filepath,
821 start=repo.root))
821 start=repo.root))
822
822
823 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
823 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
824 origbackupdir = origvfs.dirname(filepathfromroot)
824 origbackupdir = origvfs.dirname(filepathfromroot)
825 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
825 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
826 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
826 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
827
827
828 # Remove any files that conflict with the backup file's path
828 # Remove any files that conflict with the backup file's path
829 for f in reversed(list(util.finddirs(filepathfromroot))):
829 for f in reversed(list(util.finddirs(filepathfromroot))):
830 if origvfs.isfileorlink(f):
830 if origvfs.isfileorlink(f):
831 ui.note(_('removing conflicting file: %s\n')
831 ui.note(_('removing conflicting file: %s\n')
832 % origvfs.join(f))
832 % origvfs.join(f))
833 origvfs.unlink(f)
833 origvfs.unlink(f)
834 break
834 break
835
835
836 origvfs.makedirs(origbackupdir)
836 origvfs.makedirs(origbackupdir)
837
837
838 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
838 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
839 ui.note(_('removing conflicting directory: %s\n')
839 ui.note(_('removing conflicting directory: %s\n')
840 % origvfs.join(filepathfromroot))
840 % origvfs.join(filepathfromroot))
841 origvfs.rmtree(filepathfromroot, forcibly=True)
841 origvfs.rmtree(filepathfromroot, forcibly=True)
842
842
843 return origvfs.join(filepathfromroot)
843 return origvfs.join(filepathfromroot)
844
844
845 class _containsnode(object):
845 class _containsnode(object):
846 """proxy __contains__(node) to container.__contains__ which accepts revs"""
846 """proxy __contains__(node) to container.__contains__ which accepts revs"""
847
847
848 def __init__(self, repo, revcontainer):
848 def __init__(self, repo, revcontainer):
849 self._torev = repo.changelog.rev
849 self._torev = repo.changelog.rev
850 self._revcontains = revcontainer.__contains__
850 self._revcontains = revcontainer.__contains__
851
851
852 def __contains__(self, node):
852 def __contains__(self, node):
853 return self._revcontains(self._torev(node))
853 return self._revcontains(self._torev(node))
854
854
855 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
855 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
856 fixphase=False, targetphase=None, backup=True):
856 fixphase=False, targetphase=None, backup=True):
857 """do common cleanups when old nodes are replaced by new nodes
857 """do common cleanups when old nodes are replaced by new nodes
858
858
859 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
859 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
860 (we might also want to move working directory parent in the future)
860 (we might also want to move working directory parent in the future)
861
861
862 By default, bookmark moves are calculated automatically from 'replacements',
862 By default, bookmark moves are calculated automatically from 'replacements',
863 but 'moves' can be used to override that. Also, 'moves' may include
863 but 'moves' can be used to override that. Also, 'moves' may include
864 additional bookmark moves that should not have associated obsmarkers.
864 additional bookmark moves that should not have associated obsmarkers.
865
865
866 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
866 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
867 have replacements. operation is a string, like "rebase".
867 have replacements. operation is a string, like "rebase".
868
868
869 metadata is dictionary containing metadata to be stored in obsmarker if
869 metadata is dictionary containing metadata to be stored in obsmarker if
870 obsolescence is enabled.
870 obsolescence is enabled.
871 """
871 """
872 assert fixphase or targetphase is None
872 assert fixphase or targetphase is None
873 if not replacements and not moves:
873 if not replacements and not moves:
874 return
874 return
875
875
876 # translate mapping's other forms
876 # translate mapping's other forms
877 if not util.safehasattr(replacements, 'items'):
877 if not util.safehasattr(replacements, 'items'):
878 replacements = {n: () for n in replacements}
878 replacements = {n: () for n in replacements}
879
879
880 # Calculate bookmark movements
880 # Calculate bookmark movements
881 if moves is None:
881 if moves is None:
882 moves = {}
882 moves = {}
883 # Unfiltered repo is needed since nodes in replacements might be hidden.
883 # Unfiltered repo is needed since nodes in replacements might be hidden.
884 unfi = repo.unfiltered()
884 unfi = repo.unfiltered()
885 for oldnode, newnodes in replacements.items():
885 for oldnode, newnodes in replacements.items():
886 if oldnode in moves:
886 if oldnode in moves:
887 continue
887 continue
888 if len(newnodes) > 1:
888 if len(newnodes) > 1:
889 # usually a split, take the one with biggest rev number
889 # usually a split, take the one with biggest rev number
890 newnode = next(unfi.set('max(%ln)', newnodes)).node()
890 newnode = next(unfi.set('max(%ln)', newnodes)).node()
891 elif len(newnodes) == 0:
891 elif len(newnodes) == 0:
892 # move bookmark backwards
892 # move bookmark backwards
893 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
893 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
894 list(replacements)))
894 list(replacements)))
895 if roots:
895 if roots:
896 newnode = roots[0].node()
896 newnode = roots[0].node()
897 else:
897 else:
898 newnode = nullid
898 newnode = nullid
899 else:
899 else:
900 newnode = newnodes[0]
900 newnode = newnodes[0]
901 moves[oldnode] = newnode
901 moves[oldnode] = newnode
902
902
903 allnewnodes = [n for ns in replacements.values() for n in ns]
903 allnewnodes = [n for ns in replacements.values() for n in ns]
904 toretract = {}
904 toretract = {}
905 toadvance = {}
905 toadvance = {}
906 if fixphase:
906 if fixphase:
907 precursors = {}
907 precursors = {}
908 for oldnode, newnodes in replacements.items():
908 for oldnode, newnodes in replacements.items():
909 for newnode in newnodes:
909 for newnode in newnodes:
910 precursors.setdefault(newnode, []).append(oldnode)
910 precursors.setdefault(newnode, []).append(oldnode)
911
911
912 allnewnodes.sort(key=lambda n: unfi[n].rev())
912 allnewnodes.sort(key=lambda n: unfi[n].rev())
913 newphases = {}
913 newphases = {}
914 def phase(ctx):
914 def phase(ctx):
915 return newphases.get(ctx.node(), ctx.phase())
915 return newphases.get(ctx.node(), ctx.phase())
916 for newnode in allnewnodes:
916 for newnode in allnewnodes:
917 ctx = unfi[newnode]
917 ctx = unfi[newnode]
918 parentphase = max(phase(p) for p in ctx.parents())
918 parentphase = max(phase(p) for p in ctx.parents())
919 if targetphase is None:
919 if targetphase is None:
920 oldphase = max(unfi[oldnode].phase()
920 oldphase = max(unfi[oldnode].phase()
921 for oldnode in precursors[newnode])
921 for oldnode in precursors[newnode])
922 newphase = max(oldphase, parentphase)
922 newphase = max(oldphase, parentphase)
923 else:
923 else:
924 newphase = max(targetphase, parentphase)
924 newphase = max(targetphase, parentphase)
925 newphases[newnode] = newphase
925 newphases[newnode] = newphase
926 if newphase > ctx.phase():
926 if newphase > ctx.phase():
927 toretract.setdefault(newphase, []).append(newnode)
927 toretract.setdefault(newphase, []).append(newnode)
928 elif newphase < ctx.phase():
928 elif newphase < ctx.phase():
929 toadvance.setdefault(newphase, []).append(newnode)
929 toadvance.setdefault(newphase, []).append(newnode)
930
930
931 with repo.transaction('cleanup') as tr:
931 with repo.transaction('cleanup') as tr:
932 # Move bookmarks
932 # Move bookmarks
933 bmarks = repo._bookmarks
933 bmarks = repo._bookmarks
934 bmarkchanges = []
934 bmarkchanges = []
935 for oldnode, newnode in moves.items():
935 for oldnode, newnode in moves.items():
936 oldbmarks = repo.nodebookmarks(oldnode)
936 oldbmarks = repo.nodebookmarks(oldnode)
937 if not oldbmarks:
937 if not oldbmarks:
938 continue
938 continue
939 from . import bookmarks # avoid import cycle
939 from . import bookmarks # avoid import cycle
940 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
940 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
941 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
941 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
942 hex(oldnode), hex(newnode)))
942 hex(oldnode), hex(newnode)))
943 # Delete divergent bookmarks being parents of related newnodes
943 # Delete divergent bookmarks being parents of related newnodes
944 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
944 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
945 allnewnodes, newnode, oldnode)
945 allnewnodes, newnode, oldnode)
946 deletenodes = _containsnode(repo, deleterevs)
946 deletenodes = _containsnode(repo, deleterevs)
947 for name in oldbmarks:
947 for name in oldbmarks:
948 bmarkchanges.append((name, newnode))
948 bmarkchanges.append((name, newnode))
949 for b in bookmarks.divergent2delete(repo, deletenodes, name):
949 for b in bookmarks.divergent2delete(repo, deletenodes, name):
950 bmarkchanges.append((b, None))
950 bmarkchanges.append((b, None))
951
951
952 if bmarkchanges:
952 if bmarkchanges:
953 bmarks.applychanges(repo, tr, bmarkchanges)
953 bmarks.applychanges(repo, tr, bmarkchanges)
954
954
955 for phase, nodes in toretract.items():
955 for phase, nodes in toretract.items():
956 phases.retractboundary(repo, tr, phase, nodes)
956 phases.retractboundary(repo, tr, phase, nodes)
957 for phase, nodes in toadvance.items():
957 for phase, nodes in toadvance.items():
958 phases.advanceboundary(repo, tr, phase, nodes)
958 phases.advanceboundary(repo, tr, phase, nodes)
959
959
960 # Obsolete or strip nodes
960 # Obsolete or strip nodes
961 if obsolete.isenabled(repo, obsolete.createmarkersopt):
961 if obsolete.isenabled(repo, obsolete.createmarkersopt):
962 # If a node is already obsoleted, and we want to obsolete it
962 # If a node is already obsoleted, and we want to obsolete it
963 # without a successor, skip that obssolete request since it's
963 # without a successor, skip that obssolete request since it's
964 # unnecessary. That's the "if s or not isobs(n)" check below.
964 # unnecessary. That's the "if s or not isobs(n)" check below.
965 # Also sort the node in topology order, that might be useful for
965 # Also sort the node in topology order, that might be useful for
966 # some obsstore logic.
966 # some obsstore logic.
967 # NOTE: the filtering and sorting might belong to createmarkers.
967 # NOTE: the filtering and sorting might belong to createmarkers.
968 isobs = unfi.obsstore.successors.__contains__
968 isobs = unfi.obsstore.successors.__contains__
969 torev = unfi.changelog.rev
969 torev = unfi.changelog.rev
970 sortfunc = lambda ns: torev(ns[0])
970 sortfunc = lambda ns: torev(ns[0])
971 rels = [(unfi[n], tuple(unfi[m] for m in s))
971 rels = [(unfi[n], tuple(unfi[m] for m in s))
972 for n, s in sorted(replacements.items(), key=sortfunc)
972 for n, s in sorted(replacements.items(), key=sortfunc)
973 if s or not isobs(n)]
973 if s or not isobs(n)]
974 if rels:
974 if rels:
975 obsolete.createmarkers(repo, rels, operation=operation,
975 obsolete.createmarkers(repo, rels, operation=operation,
976 metadata=metadata)
976 metadata=metadata)
977 else:
977 else:
978 from . import repair # avoid import cycle
978 from . import repair # avoid import cycle
979 tostrip = list(replacements)
979 tostrip = list(replacements)
980 if tostrip:
980 if tostrip:
981 repair.delayedstrip(repo.ui, repo, tostrip, operation,
981 repair.delayedstrip(repo.ui, repo, tostrip, operation,
982 backup=backup)
982 backup=backup)
983
983
984 def addremove(repo, matcher, prefix, opts=None):
984 def addremove(repo, matcher, prefix, opts=None):
985 if opts is None:
985 if opts is None:
986 opts = {}
986 opts = {}
987 m = matcher
987 m = matcher
988 dry_run = opts.get('dry_run')
988 dry_run = opts.get('dry_run')
989 try:
989 try:
990 similarity = float(opts.get('similarity') or 0)
990 similarity = float(opts.get('similarity') or 0)
991 except ValueError:
991 except ValueError:
992 raise error.Abort(_('similarity must be a number'))
992 raise error.Abort(_('similarity must be a number'))
993 if similarity < 0 or similarity > 100:
993 if similarity < 0 or similarity > 100:
994 raise error.Abort(_('similarity must be between 0 and 100'))
994 raise error.Abort(_('similarity must be between 0 and 100'))
995 similarity /= 100.0
995 similarity /= 100.0
996
996
997 ret = 0
997 ret = 0
998 join = lambda f: os.path.join(prefix, f)
998 join = lambda f: os.path.join(prefix, f)
999
999
1000 wctx = repo[None]
1000 wctx = repo[None]
1001 for subpath in sorted(wctx.substate):
1001 for subpath in sorted(wctx.substate):
1002 submatch = matchmod.subdirmatcher(subpath, m)
1002 submatch = matchmod.subdirmatcher(subpath, m)
1003 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1003 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1004 sub = wctx.sub(subpath)
1004 sub = wctx.sub(subpath)
1005 try:
1005 try:
1006 if sub.addremove(submatch, prefix, opts):
1006 if sub.addremove(submatch, prefix, opts):
1007 ret = 1
1007 ret = 1
1008 except error.LookupError:
1008 except error.LookupError:
1009 repo.ui.status(_("skipping missing subrepository: %s\n")
1009 repo.ui.status(_("skipping missing subrepository: %s\n")
1010 % join(subpath))
1010 % join(subpath))
1011
1011
1012 rejected = []
1012 rejected = []
1013 def badfn(f, msg):
1013 def badfn(f, msg):
1014 if f in m.files():
1014 if f in m.files():
1015 m.bad(f, msg)
1015 m.bad(f, msg)
1016 rejected.append(f)
1016 rejected.append(f)
1017
1017
1018 badmatch = matchmod.badmatch(m, badfn)
1018 badmatch = matchmod.badmatch(m, badfn)
1019 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1019 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1020 badmatch)
1020 badmatch)
1021
1021
1022 unknownset = set(unknown + forgotten)
1022 unknownset = set(unknown + forgotten)
1023 toprint = unknownset.copy()
1023 toprint = unknownset.copy()
1024 toprint.update(deleted)
1024 toprint.update(deleted)
1025 for abs in sorted(toprint):
1025 for abs in sorted(toprint):
1026 if repo.ui.verbose or not m.exact(abs):
1026 if repo.ui.verbose or not m.exact(abs):
1027 if abs in unknownset:
1027 if abs in unknownset:
1028 status = _('adding %s\n') % m.uipath(abs)
1028 status = _('adding %s\n') % m.uipath(abs)
1029 label = 'addremove.added'
1029 label = 'addremove.added'
1030 else:
1030 else:
1031 status = _('removing %s\n') % m.uipath(abs)
1031 status = _('removing %s\n') % m.uipath(abs)
1032 label = 'addremove.removed'
1032 label = 'addremove.removed'
1033 repo.ui.status(status, label=label)
1033 repo.ui.status(status, label=label)
1034
1034
1035 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1035 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1036 similarity)
1036 similarity)
1037
1037
1038 if not dry_run:
1038 if not dry_run:
1039 _markchanges(repo, unknown + forgotten, deleted, renames)
1039 _markchanges(repo, unknown + forgotten, deleted, renames)
1040
1040
1041 for f in rejected:
1041 for f in rejected:
1042 if f in m.files():
1042 if f in m.files():
1043 return 1
1043 return 1
1044 return ret
1044 return ret
1045
1045
1046 def marktouched(repo, files, similarity=0.0):
1046 def marktouched(repo, files, similarity=0.0):
1047 '''Assert that files have somehow been operated upon. files are relative to
1047 '''Assert that files have somehow been operated upon. files are relative to
1048 the repo root.'''
1048 the repo root.'''
1049 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1049 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1050 rejected = []
1050 rejected = []
1051
1051
1052 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1052 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1053
1053
1054 if repo.ui.verbose:
1054 if repo.ui.verbose:
1055 unknownset = set(unknown + forgotten)
1055 unknownset = set(unknown + forgotten)
1056 toprint = unknownset.copy()
1056 toprint = unknownset.copy()
1057 toprint.update(deleted)
1057 toprint.update(deleted)
1058 for abs in sorted(toprint):
1058 for abs in sorted(toprint):
1059 if abs in unknownset:
1059 if abs in unknownset:
1060 status = _('adding %s\n') % abs
1060 status = _('adding %s\n') % abs
1061 else:
1061 else:
1062 status = _('removing %s\n') % abs
1062 status = _('removing %s\n') % abs
1063 repo.ui.status(status)
1063 repo.ui.status(status)
1064
1064
1065 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1065 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1066 similarity)
1066 similarity)
1067
1067
1068 _markchanges(repo, unknown + forgotten, deleted, renames)
1068 _markchanges(repo, unknown + forgotten, deleted, renames)
1069
1069
1070 for f in rejected:
1070 for f in rejected:
1071 if f in m.files():
1071 if f in m.files():
1072 return 1
1072 return 1
1073 return 0
1073 return 0
1074
1074
1075 def _interestingfiles(repo, matcher):
1075 def _interestingfiles(repo, matcher):
1076 '''Walk dirstate with matcher, looking for files that addremove would care
1076 '''Walk dirstate with matcher, looking for files that addremove would care
1077 about.
1077 about.
1078
1078
1079 This is different from dirstate.status because it doesn't care about
1079 This is different from dirstate.status because it doesn't care about
1080 whether files are modified or clean.'''
1080 whether files are modified or clean.'''
1081 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1081 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1082 audit_path = pathutil.pathauditor(repo.root, cached=True)
1082 audit_path = pathutil.pathauditor(repo.root, cached=True)
1083
1083
1084 ctx = repo[None]
1084 ctx = repo[None]
1085 dirstate = repo.dirstate
1085 dirstate = repo.dirstate
1086 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1086 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1087 unknown=True, ignored=False, full=False)
1087 unknown=True, ignored=False, full=False)
1088 for abs, st in walkresults.iteritems():
1088 for abs, st in walkresults.iteritems():
1089 dstate = dirstate[abs]
1089 dstate = dirstate[abs]
1090 if dstate == '?' and audit_path.check(abs):
1090 if dstate == '?' and audit_path.check(abs):
1091 unknown.append(abs)
1091 unknown.append(abs)
1092 elif dstate != 'r' and not st:
1092 elif dstate != 'r' and not st:
1093 deleted.append(abs)
1093 deleted.append(abs)
1094 elif dstate == 'r' and st:
1094 elif dstate == 'r' and st:
1095 forgotten.append(abs)
1095 forgotten.append(abs)
1096 # for finding renames
1096 # for finding renames
1097 elif dstate == 'r' and not st:
1097 elif dstate == 'r' and not st:
1098 removed.append(abs)
1098 removed.append(abs)
1099 elif dstate == 'a':
1099 elif dstate == 'a':
1100 added.append(abs)
1100 added.append(abs)
1101
1101
1102 return added, unknown, deleted, removed, forgotten
1102 return added, unknown, deleted, removed, forgotten
1103
1103
1104 def _findrenames(repo, matcher, added, removed, similarity):
1104 def _findrenames(repo, matcher, added, removed, similarity):
1105 '''Find renames from removed files to added ones.'''
1105 '''Find renames from removed files to added ones.'''
1106 renames = {}
1106 renames = {}
1107 if similarity > 0:
1107 if similarity > 0:
1108 for old, new, score in similar.findrenames(repo, added, removed,
1108 for old, new, score in similar.findrenames(repo, added, removed,
1109 similarity):
1109 similarity):
1110 if (repo.ui.verbose or not matcher.exact(old)
1110 if (repo.ui.verbose or not matcher.exact(old)
1111 or not matcher.exact(new)):
1111 or not matcher.exact(new)):
1112 repo.ui.status(_('recording removal of %s as rename to %s '
1112 repo.ui.status(_('recording removal of %s as rename to %s '
1113 '(%d%% similar)\n') %
1113 '(%d%% similar)\n') %
1114 (matcher.rel(old), matcher.rel(new),
1114 (matcher.rel(old), matcher.rel(new),
1115 score * 100))
1115 score * 100))
1116 renames[new] = old
1116 renames[new] = old
1117 return renames
1117 return renames
1118
1118
1119 def _markchanges(repo, unknown, deleted, renames):
1119 def _markchanges(repo, unknown, deleted, renames):
1120 '''Marks the files in unknown as added, the files in deleted as removed,
1120 '''Marks the files in unknown as added, the files in deleted as removed,
1121 and the files in renames as copied.'''
1121 and the files in renames as copied.'''
1122 wctx = repo[None]
1122 wctx = repo[None]
1123 with repo.wlock():
1123 with repo.wlock():
1124 wctx.forget(deleted)
1124 wctx.forget(deleted)
1125 wctx.add(unknown)
1125 wctx.add(unknown)
1126 for new, old in renames.iteritems():
1126 for new, old in renames.iteritems():
1127 wctx.copy(old, new)
1127 wctx.copy(old, new)
1128
1128
1129 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1129 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1130 """Update the dirstate to reflect the intent of copying src to dst. For
1130 """Update the dirstate to reflect the intent of copying src to dst. For
1131 different reasons it might not end with dst being marked as copied from src.
1131 different reasons it might not end with dst being marked as copied from src.
1132 """
1132 """
1133 origsrc = repo.dirstate.copied(src) or src
1133 origsrc = repo.dirstate.copied(src) or src
1134 if dst == origsrc: # copying back a copy?
1134 if dst == origsrc: # copying back a copy?
1135 if repo.dirstate[dst] not in 'mn' and not dryrun:
1135 if repo.dirstate[dst] not in 'mn' and not dryrun:
1136 repo.dirstate.normallookup(dst)
1136 repo.dirstate.normallookup(dst)
1137 else:
1137 else:
1138 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1138 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1139 if not ui.quiet:
1139 if not ui.quiet:
1140 ui.warn(_("%s has not been committed yet, so no copy "
1140 ui.warn(_("%s has not been committed yet, so no copy "
1141 "data will be stored for %s.\n")
1141 "data will be stored for %s.\n")
1142 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1142 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1143 if repo.dirstate[dst] in '?r' and not dryrun:
1143 if repo.dirstate[dst] in '?r' and not dryrun:
1144 wctx.add([dst])
1144 wctx.add([dst])
1145 elif not dryrun:
1145 elif not dryrun:
1146 wctx.copy(origsrc, dst)
1146 wctx.copy(origsrc, dst)
1147
1147
1148 def writerequires(opener, requirements):
1148 def writerequires(opener, requirements):
1149 with opener('requires', 'w') as fp:
1149 with opener('requires', 'w') as fp:
1150 for r in sorted(requirements):
1150 for r in sorted(requirements):
1151 fp.write("%s\n" % r)
1151 fp.write("%s\n" % r)
1152
1152
1153 class filecachesubentry(object):
1153 class filecachesubentry(object):
1154 def __init__(self, path, stat):
1154 def __init__(self, path, stat):
1155 self.path = path
1155 self.path = path
1156 self.cachestat = None
1156 self.cachestat = None
1157 self._cacheable = None
1157 self._cacheable = None
1158
1158
1159 if stat:
1159 if stat:
1160 self.cachestat = filecachesubentry.stat(self.path)
1160 self.cachestat = filecachesubentry.stat(self.path)
1161
1161
1162 if self.cachestat:
1162 if self.cachestat:
1163 self._cacheable = self.cachestat.cacheable()
1163 self._cacheable = self.cachestat.cacheable()
1164 else:
1164 else:
1165 # None means we don't know yet
1165 # None means we don't know yet
1166 self._cacheable = None
1166 self._cacheable = None
1167
1167
1168 def refresh(self):
1168 def refresh(self):
1169 if self.cacheable():
1169 if self.cacheable():
1170 self.cachestat = filecachesubentry.stat(self.path)
1170 self.cachestat = filecachesubentry.stat(self.path)
1171
1171
1172 def cacheable(self):
1172 def cacheable(self):
1173 if self._cacheable is not None:
1173 if self._cacheable is not None:
1174 return self._cacheable
1174 return self._cacheable
1175
1175
1176 # we don't know yet, assume it is for now
1176 # we don't know yet, assume it is for now
1177 return True
1177 return True
1178
1178
1179 def changed(self):
1179 def changed(self):
1180 # no point in going further if we can't cache it
1180 # no point in going further if we can't cache it
1181 if not self.cacheable():
1181 if not self.cacheable():
1182 return True
1182 return True
1183
1183
1184 newstat = filecachesubentry.stat(self.path)
1184 newstat = filecachesubentry.stat(self.path)
1185
1185
1186 # we may not know if it's cacheable yet, check again now
1186 # we may not know if it's cacheable yet, check again now
1187 if newstat and self._cacheable is None:
1187 if newstat and self._cacheable is None:
1188 self._cacheable = newstat.cacheable()
1188 self._cacheable = newstat.cacheable()
1189
1189
1190 # check again
1190 # check again
1191 if not self._cacheable:
1191 if not self._cacheable:
1192 return True
1192 return True
1193
1193
1194 if self.cachestat != newstat:
1194 if self.cachestat != newstat:
1195 self.cachestat = newstat
1195 self.cachestat = newstat
1196 return True
1196 return True
1197 else:
1197 else:
1198 return False
1198 return False
1199
1199
1200 @staticmethod
1200 @staticmethod
1201 def stat(path):
1201 def stat(path):
1202 try:
1202 try:
1203 return util.cachestat(path)
1203 return util.cachestat(path)
1204 except OSError as e:
1204 except OSError as e:
1205 if e.errno != errno.ENOENT:
1205 if e.errno != errno.ENOENT:
1206 raise
1206 raise
1207
1207
1208 class filecacheentry(object):
1208 class filecacheentry(object):
1209 def __init__(self, paths, stat=True):
1209 def __init__(self, paths, stat=True):
1210 self._entries = []
1210 self._entries = []
1211 for path in paths:
1211 for path in paths:
1212 self._entries.append(filecachesubentry(path, stat))
1212 self._entries.append(filecachesubentry(path, stat))
1213
1213
1214 def changed(self):
1214 def changed(self):
1215 '''true if any entry has changed'''
1215 '''true if any entry has changed'''
1216 for entry in self._entries:
1216 for entry in self._entries:
1217 if entry.changed():
1217 if entry.changed():
1218 return True
1218 return True
1219 return False
1219 return False
1220
1220
1221 def refresh(self):
1221 def refresh(self):
1222 for entry in self._entries:
1222 for entry in self._entries:
1223 entry.refresh()
1223 entry.refresh()
1224
1224
1225 class filecache(object):
1225 class filecache(object):
1226 """A property like decorator that tracks files under .hg/ for updates.
1226 """A property like decorator that tracks files under .hg/ for updates.
1227
1227
1228 On first access, the files defined as arguments are stat()ed and the
1228 On first access, the files defined as arguments are stat()ed and the
1229 results cached. The decorated function is called. The results are stashed
1229 results cached. The decorated function is called. The results are stashed
1230 away in a ``_filecache`` dict on the object whose method is decorated.
1230 away in a ``_filecache`` dict on the object whose method is decorated.
1231
1231
1232 On subsequent access, the cached result is returned.
1232 On subsequent access, the cached result is returned.
1233
1233
1234 On external property set operations, stat() calls are performed and the new
1234 On external property set operations, stat() calls are performed and the new
1235 value is cached.
1235 value is cached.
1236
1236
1237 On property delete operations, cached data is removed.
1237 On property delete operations, cached data is removed.
1238
1238
1239 When using the property API, cached data is always returned, if available:
1239 When using the property API, cached data is always returned, if available:
1240 no stat() is performed to check if the file has changed and if the function
1240 no stat() is performed to check if the file has changed and if the function
1241 needs to be called to reflect file changes.
1241 needs to be called to reflect file changes.
1242
1242
1243 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1243 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1244 can populate an entry before the property's getter is called. In this case,
1244 can populate an entry before the property's getter is called. In this case,
1245 entries in ``_filecache`` will be used during property operations,
1245 entries in ``_filecache`` will be used during property operations,
1246 if available. If the underlying file changes, it is up to external callers
1246 if available. If the underlying file changes, it is up to external callers
1247 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1247 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1248 method result as well as possibly calling ``del obj._filecache[attr]`` to
1248 method result as well as possibly calling ``del obj._filecache[attr]`` to
1249 remove the ``filecacheentry``.
1249 remove the ``filecacheentry``.
1250 """
1250 """
1251
1251
1252 def __init__(self, *paths):
1252 def __init__(self, *paths):
1253 self.paths = paths
1253 self.paths = paths
1254
1254
1255 def join(self, obj, fname):
1255 def join(self, obj, fname):
1256 """Used to compute the runtime path of a cached file.
1256 """Used to compute the runtime path of a cached file.
1257
1257
1258 Users should subclass filecache and provide their own version of this
1258 Users should subclass filecache and provide their own version of this
1259 function to call the appropriate join function on 'obj' (an instance
1259 function to call the appropriate join function on 'obj' (an instance
1260 of the class that its member function was decorated).
1260 of the class that its member function was decorated).
1261 """
1261 """
1262 raise NotImplementedError
1262 raise NotImplementedError
1263
1263
1264 def __call__(self, func):
1264 def __call__(self, func):
1265 self.func = func
1265 self.func = func
1266 self.sname = func.__name__
1266 self.sname = func.__name__
1267 self.name = pycompat.sysbytes(self.sname)
1267 self.name = pycompat.sysbytes(self.sname)
1268 return self
1268 return self
1269
1269
1270 def __get__(self, obj, type=None):
1270 def __get__(self, obj, type=None):
1271 # if accessed on the class, return the descriptor itself.
1271 # if accessed on the class, return the descriptor itself.
1272 if obj is None:
1272 if obj is None:
1273 return self
1273 return self
1274 # do we need to check if the file changed?
1274 # do we need to check if the file changed?
1275 if self.sname in obj.__dict__:
1275 if self.sname in obj.__dict__:
1276 assert self.name in obj._filecache, self.name
1276 assert self.name in obj._filecache, self.name
1277 return obj.__dict__[self.sname]
1277 return obj.__dict__[self.sname]
1278
1278
1279 entry = obj._filecache.get(self.name)
1279 entry = obj._filecache.get(self.name)
1280
1280
1281 if entry:
1281 if entry:
1282 if entry.changed():
1282 if entry.changed():
1283 entry.obj = self.func(obj)
1283 entry.obj = self.func(obj)
1284 else:
1284 else:
1285 paths = [self.join(obj, path) for path in self.paths]
1285 paths = [self.join(obj, path) for path in self.paths]
1286
1286
1287 # We stat -before- creating the object so our cache doesn't lie if
1287 # We stat -before- creating the object so our cache doesn't lie if
1288 # a writer modified between the time we read and stat
1288 # a writer modified between the time we read and stat
1289 entry = filecacheentry(paths, True)
1289 entry = filecacheentry(paths, True)
1290 entry.obj = self.func(obj)
1290 entry.obj = self.func(obj)
1291
1291
1292 obj._filecache[self.name] = entry
1292 obj._filecache[self.name] = entry
1293
1293
1294 obj.__dict__[self.sname] = entry.obj
1294 obj.__dict__[self.sname] = entry.obj
1295 return entry.obj
1295 return entry.obj
1296
1296
1297 def __set__(self, obj, value):
1297 def __set__(self, obj, value):
1298 if self.name not in obj._filecache:
1298 if self.name not in obj._filecache:
1299 # we add an entry for the missing value because X in __dict__
1299 # we add an entry for the missing value because X in __dict__
1300 # implies X in _filecache
1300 # implies X in _filecache
1301 paths = [self.join(obj, path) for path in self.paths]
1301 paths = [self.join(obj, path) for path in self.paths]
1302 ce = filecacheentry(paths, False)
1302 ce = filecacheentry(paths, False)
1303 obj._filecache[self.name] = ce
1303 obj._filecache[self.name] = ce
1304 else:
1304 else:
1305 ce = obj._filecache[self.name]
1305 ce = obj._filecache[self.name]
1306
1306
1307 ce.obj = value # update cached copy
1307 ce.obj = value # update cached copy
1308 obj.__dict__[self.sname] = value # update copy returned by obj.x
1308 obj.__dict__[self.sname] = value # update copy returned by obj.x
1309
1309
1310 def __delete__(self, obj):
1310 def __delete__(self, obj):
1311 try:
1311 try:
1312 del obj.__dict__[self.sname]
1312 del obj.__dict__[self.sname]
1313 except KeyError:
1313 except KeyError:
1314 raise AttributeError(self.sname)
1314 raise AttributeError(self.sname)
1315
1315
1316 def extdatasource(repo, source):
1316 def extdatasource(repo, source):
1317 """Gather a map of rev -> value dict from the specified source
1317 """Gather a map of rev -> value dict from the specified source
1318
1318
1319 A source spec is treated as a URL, with a special case shell: type
1319 A source spec is treated as a URL, with a special case shell: type
1320 for parsing the output from a shell command.
1320 for parsing the output from a shell command.
1321
1321
1322 The data is parsed as a series of newline-separated records where
1322 The data is parsed as a series of newline-separated records where
1323 each record is a revision specifier optionally followed by a space
1323 each record is a revision specifier optionally followed by a space
1324 and a freeform string value. If the revision is known locally, it
1324 and a freeform string value. If the revision is known locally, it
1325 is converted to a rev, otherwise the record is skipped.
1325 is converted to a rev, otherwise the record is skipped.
1326
1326
1327 Note that both key and value are treated as UTF-8 and converted to
1327 Note that both key and value are treated as UTF-8 and converted to
1328 the local encoding. This allows uniformity between local and
1328 the local encoding. This allows uniformity between local and
1329 remote data sources.
1329 remote data sources.
1330 """
1330 """
1331
1331
1332 spec = repo.ui.config("extdata", source)
1332 spec = repo.ui.config("extdata", source)
1333 if not spec:
1333 if not spec:
1334 raise error.Abort(_("unknown extdata source '%s'") % source)
1334 raise error.Abort(_("unknown extdata source '%s'") % source)
1335
1335
1336 data = {}
1336 data = {}
1337 src = proc = None
1337 src = proc = None
1338 try:
1338 try:
1339 if spec.startswith("shell:"):
1339 if spec.startswith("shell:"):
1340 # external commands should be run relative to the repo root
1340 # external commands should be run relative to the repo root
1341 cmd = spec[6:]
1341 cmd = spec[6:]
1342 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
1342 proc = subprocess.Popen(procutil.tonativestr(cmd),
1343 shell=True, bufsize=-1,
1343 shell=True, bufsize=-1,
1344 close_fds=procutil.closefds,
1344 close_fds=procutil.closefds,
1345 stdout=subprocess.PIPE,
1345 stdout=subprocess.PIPE,
1346 cwd=procutil.tonativestr(repo.root))
1346 cwd=procutil.tonativestr(repo.root))
1347 src = proc.stdout
1347 src = proc.stdout
1348 else:
1348 else:
1349 # treat as a URL or file
1349 # treat as a URL or file
1350 src = url.open(repo.ui, spec)
1350 src = url.open(repo.ui, spec)
1351 for l in src:
1351 for l in src:
1352 if " " in l:
1352 if " " in l:
1353 k, v = l.strip().split(" ", 1)
1353 k, v = l.strip().split(" ", 1)
1354 else:
1354 else:
1355 k, v = l.strip(), ""
1355 k, v = l.strip(), ""
1356
1356
1357 k = encoding.tolocal(k)
1357 k = encoding.tolocal(k)
1358 try:
1358 try:
1359 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1359 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1360 except (error.LookupError, error.RepoLookupError):
1360 except (error.LookupError, error.RepoLookupError):
1361 pass # we ignore data for nodes that don't exist locally
1361 pass # we ignore data for nodes that don't exist locally
1362 finally:
1362 finally:
1363 if proc:
1363 if proc:
1364 proc.communicate()
1364 proc.communicate()
1365 if src:
1365 if src:
1366 src.close()
1366 src.close()
1367 if proc and proc.returncode != 0:
1367 if proc and proc.returncode != 0:
1368 raise error.Abort(_("extdata command '%s' failed: %s")
1368 raise error.Abort(_("extdata command '%s' failed: %s")
1369 % (cmd, procutil.explainexit(proc.returncode)))
1369 % (cmd, procutil.explainexit(proc.returncode)))
1370
1370
1371 return data
1371 return data
1372
1372
1373 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1373 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1374 if lock is None:
1374 if lock is None:
1375 raise error.LockInheritanceContractViolation(
1375 raise error.LockInheritanceContractViolation(
1376 'lock can only be inherited while held')
1376 'lock can only be inherited while held')
1377 if environ is None:
1377 if environ is None:
1378 environ = {}
1378 environ = {}
1379 with lock.inherit() as locker:
1379 with lock.inherit() as locker:
1380 environ[envvar] = locker
1380 environ[envvar] = locker
1381 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1381 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1382
1382
1383 def wlocksub(repo, cmd, *args, **kwargs):
1383 def wlocksub(repo, cmd, *args, **kwargs):
1384 """run cmd as a subprocess that allows inheriting repo's wlock
1384 """run cmd as a subprocess that allows inheriting repo's wlock
1385
1385
1386 This can only be called while the wlock is held. This takes all the
1386 This can only be called while the wlock is held. This takes all the
1387 arguments that ui.system does, and returns the exit code of the
1387 arguments that ui.system does, and returns the exit code of the
1388 subprocess."""
1388 subprocess."""
1389 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1389 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1390 **kwargs)
1390 **kwargs)
1391
1391
1392 class progress(object):
1392 class progress(object):
1393 def __init__(self, ui, topic, unit="", total=None):
1393 def __init__(self, ui, topic, unit="", total=None):
1394 self.ui = ui
1394 self.ui = ui
1395 self.pos = 0
1395 self.pos = 0
1396 self.topic = topic
1396 self.topic = topic
1397 self.unit = unit
1397 self.unit = unit
1398 self.total = total
1398 self.total = total
1399
1399
1400 def __enter__(self):
1400 def __enter__(self):
1401 return self
1401 return self
1402
1402
1403 def __exit__(self, exc_type, exc_value, exc_tb):
1403 def __exit__(self, exc_type, exc_value, exc_tb):
1404 self.complete()
1404 self.complete()
1405
1405
1406 def update(self, pos, item="", total=None):
1406 def update(self, pos, item="", total=None):
1407 assert pos is not None
1407 assert pos is not None
1408 if total:
1408 if total:
1409 self.total = total
1409 self.total = total
1410 self.pos = pos
1410 self.pos = pos
1411 self._print(item)
1411 self._print(item)
1412
1412
1413 def increment(self, step=1, item="", total=None):
1413 def increment(self, step=1, item="", total=None):
1414 self.update(self.pos + step, item, total)
1414 self.update(self.pos + step, item, total)
1415
1415
1416 def complete(self):
1416 def complete(self):
1417 self.ui.progress(self.topic, None)
1417 self.ui.progress(self.topic, None)
1418
1418
1419 def _print(self, item):
1419 def _print(self, item):
1420 self.ui.progress(self.topic, self.pos, item, self.unit,
1420 self.ui.progress(self.topic, self.pos, item, self.unit,
1421 self.total)
1421 self.total)
1422
1422
1423 def gdinitconfig(ui):
1423 def gdinitconfig(ui):
1424 """helper function to know if a repo should be created as general delta
1424 """helper function to know if a repo should be created as general delta
1425 """
1425 """
1426 # experimental config: format.generaldelta
1426 # experimental config: format.generaldelta
1427 return (ui.configbool('format', 'generaldelta')
1427 return (ui.configbool('format', 'generaldelta')
1428 or ui.configbool('format', 'usegeneraldelta')
1428 or ui.configbool('format', 'usegeneraldelta')
1429 or ui.configbool('format', 'sparse-revlog'))
1429 or ui.configbool('format', 'sparse-revlog'))
1430
1430
1431 def gddeltaconfig(ui):
1431 def gddeltaconfig(ui):
1432 """helper function to know if incoming delta should be optimised
1432 """helper function to know if incoming delta should be optimised
1433 """
1433 """
1434 # experimental config: format.generaldelta
1434 # experimental config: format.generaldelta
1435 return ui.configbool('format', 'generaldelta')
1435 return ui.configbool('format', 'generaldelta')
1436
1436
1437 class simplekeyvaluefile(object):
1437 class simplekeyvaluefile(object):
1438 """A simple file with key=value lines
1438 """A simple file with key=value lines
1439
1439
1440 Keys must be alphanumerics and start with a letter, values must not
1440 Keys must be alphanumerics and start with a letter, values must not
1441 contain '\n' characters"""
1441 contain '\n' characters"""
1442 firstlinekey = '__firstline'
1442 firstlinekey = '__firstline'
1443
1443
1444 def __init__(self, vfs, path, keys=None):
1444 def __init__(self, vfs, path, keys=None):
1445 self.vfs = vfs
1445 self.vfs = vfs
1446 self.path = path
1446 self.path = path
1447
1447
1448 def read(self, firstlinenonkeyval=False):
1448 def read(self, firstlinenonkeyval=False):
1449 """Read the contents of a simple key-value file
1449 """Read the contents of a simple key-value file
1450
1450
1451 'firstlinenonkeyval' indicates whether the first line of file should
1451 'firstlinenonkeyval' indicates whether the first line of file should
1452 be treated as a key-value pair or reuturned fully under the
1452 be treated as a key-value pair or reuturned fully under the
1453 __firstline key."""
1453 __firstline key."""
1454 lines = self.vfs.readlines(self.path)
1454 lines = self.vfs.readlines(self.path)
1455 d = {}
1455 d = {}
1456 if firstlinenonkeyval:
1456 if firstlinenonkeyval:
1457 if not lines:
1457 if not lines:
1458 e = _("empty simplekeyvalue file")
1458 e = _("empty simplekeyvalue file")
1459 raise error.CorruptedState(e)
1459 raise error.CorruptedState(e)
1460 # we don't want to include '\n' in the __firstline
1460 # we don't want to include '\n' in the __firstline
1461 d[self.firstlinekey] = lines[0][:-1]
1461 d[self.firstlinekey] = lines[0][:-1]
1462 del lines[0]
1462 del lines[0]
1463
1463
1464 try:
1464 try:
1465 # the 'if line.strip()' part prevents us from failing on empty
1465 # the 'if line.strip()' part prevents us from failing on empty
1466 # lines which only contain '\n' therefore are not skipped
1466 # lines which only contain '\n' therefore are not skipped
1467 # by 'if line'
1467 # by 'if line'
1468 updatedict = dict(line[:-1].split('=', 1) for line in lines
1468 updatedict = dict(line[:-1].split('=', 1) for line in lines
1469 if line.strip())
1469 if line.strip())
1470 if self.firstlinekey in updatedict:
1470 if self.firstlinekey in updatedict:
1471 e = _("%r can't be used as a key")
1471 e = _("%r can't be used as a key")
1472 raise error.CorruptedState(e % self.firstlinekey)
1472 raise error.CorruptedState(e % self.firstlinekey)
1473 d.update(updatedict)
1473 d.update(updatedict)
1474 except ValueError as e:
1474 except ValueError as e:
1475 raise error.CorruptedState(str(e))
1475 raise error.CorruptedState(str(e))
1476 return d
1476 return d
1477
1477
1478 def write(self, data, firstline=None):
1478 def write(self, data, firstline=None):
1479 """Write key=>value mapping to a file
1479 """Write key=>value mapping to a file
1480 data is a dict. Keys must be alphanumerical and start with a letter.
1480 data is a dict. Keys must be alphanumerical and start with a letter.
1481 Values must not contain newline characters.
1481 Values must not contain newline characters.
1482
1482
1483 If 'firstline' is not None, it is written to file before
1483 If 'firstline' is not None, it is written to file before
1484 everything else, as it is, not in a key=value form"""
1484 everything else, as it is, not in a key=value form"""
1485 lines = []
1485 lines = []
1486 if firstline is not None:
1486 if firstline is not None:
1487 lines.append('%s\n' % firstline)
1487 lines.append('%s\n' % firstline)
1488
1488
1489 for k, v in data.items():
1489 for k, v in data.items():
1490 if k == self.firstlinekey:
1490 if k == self.firstlinekey:
1491 e = "key name '%s' is reserved" % self.firstlinekey
1491 e = "key name '%s' is reserved" % self.firstlinekey
1492 raise error.ProgrammingError(e)
1492 raise error.ProgrammingError(e)
1493 if not k[0:1].isalpha():
1493 if not k[0:1].isalpha():
1494 e = "keys must start with a letter in a key-value file"
1494 e = "keys must start with a letter in a key-value file"
1495 raise error.ProgrammingError(e)
1495 raise error.ProgrammingError(e)
1496 if not k.isalnum():
1496 if not k.isalnum():
1497 e = "invalid key name in a simple key-value file"
1497 e = "invalid key name in a simple key-value file"
1498 raise error.ProgrammingError(e)
1498 raise error.ProgrammingError(e)
1499 if '\n' in v:
1499 if '\n' in v:
1500 e = "invalid value in a simple key-value file"
1500 e = "invalid value in a simple key-value file"
1501 raise error.ProgrammingError(e)
1501 raise error.ProgrammingError(e)
1502 lines.append("%s=%s\n" % (k, v))
1502 lines.append("%s=%s\n" % (k, v))
1503 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1503 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1504 fp.write(''.join(lines))
1504 fp.write(''.join(lines))
1505
1505
1506 _reportobsoletedsource = [
1506 _reportobsoletedsource = [
1507 'debugobsolete',
1507 'debugobsolete',
1508 'pull',
1508 'pull',
1509 'push',
1509 'push',
1510 'serve',
1510 'serve',
1511 'unbundle',
1511 'unbundle',
1512 ]
1512 ]
1513
1513
1514 _reportnewcssource = [
1514 _reportnewcssource = [
1515 'pull',
1515 'pull',
1516 'unbundle',
1516 'unbundle',
1517 ]
1517 ]
1518
1518
1519 def prefetchfiles(repo, revs, match):
1519 def prefetchfiles(repo, revs, match):
1520 """Invokes the registered file prefetch functions, allowing extensions to
1520 """Invokes the registered file prefetch functions, allowing extensions to
1521 ensure the corresponding files are available locally, before the command
1521 ensure the corresponding files are available locally, before the command
1522 uses them."""
1522 uses them."""
1523 if match:
1523 if match:
1524 # The command itself will complain about files that don't exist, so
1524 # The command itself will complain about files that don't exist, so
1525 # don't duplicate the message.
1525 # don't duplicate the message.
1526 match = matchmod.badmatch(match, lambda fn, msg: None)
1526 match = matchmod.badmatch(match, lambda fn, msg: None)
1527 else:
1527 else:
1528 match = matchall(repo)
1528 match = matchall(repo)
1529
1529
1530 fileprefetchhooks(repo, revs, match)
1530 fileprefetchhooks(repo, revs, match)
1531
1531
1532 # a list of (repo, revs, match) prefetch functions
1532 # a list of (repo, revs, match) prefetch functions
1533 fileprefetchhooks = util.hooks()
1533 fileprefetchhooks = util.hooks()
1534
1534
1535 # A marker that tells the evolve extension to suppress its own reporting
1535 # A marker that tells the evolve extension to suppress its own reporting
1536 _reportstroubledchangesets = True
1536 _reportstroubledchangesets = True
1537
1537
1538 def registersummarycallback(repo, otr, txnname=''):
1538 def registersummarycallback(repo, otr, txnname=''):
1539 """register a callback to issue a summary after the transaction is closed
1539 """register a callback to issue a summary after the transaction is closed
1540 """
1540 """
1541 def txmatch(sources):
1541 def txmatch(sources):
1542 return any(txnname.startswith(source) for source in sources)
1542 return any(txnname.startswith(source) for source in sources)
1543
1543
1544 categories = []
1544 categories = []
1545
1545
1546 def reportsummary(func):
1546 def reportsummary(func):
1547 """decorator for report callbacks."""
1547 """decorator for report callbacks."""
1548 # The repoview life cycle is shorter than the one of the actual
1548 # The repoview life cycle is shorter than the one of the actual
1549 # underlying repository. So the filtered object can die before the
1549 # underlying repository. So the filtered object can die before the
1550 # weakref is used leading to troubles. We keep a reference to the
1550 # weakref is used leading to troubles. We keep a reference to the
1551 # unfiltered object and restore the filtering when retrieving the
1551 # unfiltered object and restore the filtering when retrieving the
1552 # repository through the weakref.
1552 # repository through the weakref.
1553 filtername = repo.filtername
1553 filtername = repo.filtername
1554 reporef = weakref.ref(repo.unfiltered())
1554 reporef = weakref.ref(repo.unfiltered())
1555 def wrapped(tr):
1555 def wrapped(tr):
1556 repo = reporef()
1556 repo = reporef()
1557 if filtername:
1557 if filtername:
1558 repo = repo.filtered(filtername)
1558 repo = repo.filtered(filtername)
1559 func(repo, tr)
1559 func(repo, tr)
1560 newcat = '%02i-txnreport' % len(categories)
1560 newcat = '%02i-txnreport' % len(categories)
1561 otr.addpostclose(newcat, wrapped)
1561 otr.addpostclose(newcat, wrapped)
1562 categories.append(newcat)
1562 categories.append(newcat)
1563 return wrapped
1563 return wrapped
1564
1564
1565 if txmatch(_reportobsoletedsource):
1565 if txmatch(_reportobsoletedsource):
1566 @reportsummary
1566 @reportsummary
1567 def reportobsoleted(repo, tr):
1567 def reportobsoleted(repo, tr):
1568 obsoleted = obsutil.getobsoleted(repo, tr)
1568 obsoleted = obsutil.getobsoleted(repo, tr)
1569 if obsoleted:
1569 if obsoleted:
1570 repo.ui.status(_('obsoleted %i changesets\n')
1570 repo.ui.status(_('obsoleted %i changesets\n')
1571 % len(obsoleted))
1571 % len(obsoleted))
1572
1572
1573 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1573 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1574 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1574 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1575 instabilitytypes = [
1575 instabilitytypes = [
1576 ('orphan', 'orphan'),
1576 ('orphan', 'orphan'),
1577 ('phase-divergent', 'phasedivergent'),
1577 ('phase-divergent', 'phasedivergent'),
1578 ('content-divergent', 'contentdivergent'),
1578 ('content-divergent', 'contentdivergent'),
1579 ]
1579 ]
1580
1580
1581 def getinstabilitycounts(repo):
1581 def getinstabilitycounts(repo):
1582 filtered = repo.changelog.filteredrevs
1582 filtered = repo.changelog.filteredrevs
1583 counts = {}
1583 counts = {}
1584 for instability, revset in instabilitytypes:
1584 for instability, revset in instabilitytypes:
1585 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1585 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1586 filtered)
1586 filtered)
1587 return counts
1587 return counts
1588
1588
1589 oldinstabilitycounts = getinstabilitycounts(repo)
1589 oldinstabilitycounts = getinstabilitycounts(repo)
1590 @reportsummary
1590 @reportsummary
1591 def reportnewinstabilities(repo, tr):
1591 def reportnewinstabilities(repo, tr):
1592 newinstabilitycounts = getinstabilitycounts(repo)
1592 newinstabilitycounts = getinstabilitycounts(repo)
1593 for instability, revset in instabilitytypes:
1593 for instability, revset in instabilitytypes:
1594 delta = (newinstabilitycounts[instability] -
1594 delta = (newinstabilitycounts[instability] -
1595 oldinstabilitycounts[instability])
1595 oldinstabilitycounts[instability])
1596 msg = getinstabilitymessage(delta, instability)
1596 msg = getinstabilitymessage(delta, instability)
1597 if msg:
1597 if msg:
1598 repo.ui.warn(msg)
1598 repo.ui.warn(msg)
1599
1599
1600 if txmatch(_reportnewcssource):
1600 if txmatch(_reportnewcssource):
1601 @reportsummary
1601 @reportsummary
1602 def reportnewcs(repo, tr):
1602 def reportnewcs(repo, tr):
1603 """Report the range of new revisions pulled/unbundled."""
1603 """Report the range of new revisions pulled/unbundled."""
1604 origrepolen = tr.changes.get('origrepolen', len(repo))
1604 origrepolen = tr.changes.get('origrepolen', len(repo))
1605 if origrepolen >= len(repo):
1605 if origrepolen >= len(repo):
1606 return
1606 return
1607
1607
1608 # Compute the bounds of new revisions' range, excluding obsoletes.
1608 # Compute the bounds of new revisions' range, excluding obsoletes.
1609 unfi = repo.unfiltered()
1609 unfi = repo.unfiltered()
1610 revs = unfi.revs('%d: and not obsolete()', origrepolen)
1610 revs = unfi.revs('%d: and not obsolete()', origrepolen)
1611 if not revs:
1611 if not revs:
1612 # Got only obsoletes.
1612 # Got only obsoletes.
1613 return
1613 return
1614 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1614 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1615
1615
1616 if minrev == maxrev:
1616 if minrev == maxrev:
1617 revrange = minrev
1617 revrange = minrev
1618 else:
1618 else:
1619 revrange = '%s:%s' % (minrev, maxrev)
1619 revrange = '%s:%s' % (minrev, maxrev)
1620 draft = len(repo.revs('%ld and draft()', revs))
1620 draft = len(repo.revs('%ld and draft()', revs))
1621 secret = len(repo.revs('%ld and secret()', revs))
1621 secret = len(repo.revs('%ld and secret()', revs))
1622 if not (draft or secret):
1622 if not (draft or secret):
1623 msg = _('new changesets %s\n') % revrange
1623 msg = _('new changesets %s\n') % revrange
1624 elif draft and secret:
1624 elif draft and secret:
1625 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1625 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1626 msg %= (revrange, draft, secret)
1626 msg %= (revrange, draft, secret)
1627 elif draft:
1627 elif draft:
1628 msg = _('new changesets %s (%d drafts)\n')
1628 msg = _('new changesets %s (%d drafts)\n')
1629 msg %= (revrange, draft)
1629 msg %= (revrange, draft)
1630 elif secret:
1630 elif secret:
1631 msg = _('new changesets %s (%d secrets)\n')
1631 msg = _('new changesets %s (%d secrets)\n')
1632 msg %= (revrange, secret)
1632 msg %= (revrange, secret)
1633 else:
1633 else:
1634 raise error.ProgrammingError('entered unreachable condition')
1634 raise error.ProgrammingError('entered unreachable condition')
1635 repo.ui.status(msg)
1635 repo.ui.status(msg)
1636
1636
1637 @reportsummary
1637 @reportsummary
1638 def reportphasechanges(repo, tr):
1638 def reportphasechanges(repo, tr):
1639 """Report statistics of phase changes for changesets pre-existing
1639 """Report statistics of phase changes for changesets pre-existing
1640 pull/unbundle.
1640 pull/unbundle.
1641 """
1641 """
1642 origrepolen = tr.changes.get('origrepolen', len(repo))
1642 origrepolen = tr.changes.get('origrepolen', len(repo))
1643 phasetracking = tr.changes.get('phases', {})
1643 phasetracking = tr.changes.get('phases', {})
1644 if not phasetracking:
1644 if not phasetracking:
1645 return
1645 return
1646 published = [
1646 published = [
1647 rev for rev, (old, new) in phasetracking.iteritems()
1647 rev for rev, (old, new) in phasetracking.iteritems()
1648 if new == phases.public and rev < origrepolen
1648 if new == phases.public and rev < origrepolen
1649 ]
1649 ]
1650 if not published:
1650 if not published:
1651 return
1651 return
1652 repo.ui.status(_('%d local changesets published\n')
1652 repo.ui.status(_('%d local changesets published\n')
1653 % len(published))
1653 % len(published))
1654
1654
1655 def getinstabilitymessage(delta, instability):
1655 def getinstabilitymessage(delta, instability):
1656 """function to return the message to show warning about new instabilities
1656 """function to return the message to show warning about new instabilities
1657
1657
1658 exists as a separate function so that extension can wrap to show more
1658 exists as a separate function so that extension can wrap to show more
1659 information like how to fix instabilities"""
1659 information like how to fix instabilities"""
1660 if delta > 0:
1660 if delta > 0:
1661 return _('%i new %s changesets\n') % (delta, instability)
1661 return _('%i new %s changesets\n') % (delta, instability)
1662
1662
1663 def nodesummaries(repo, nodes, maxnumnodes=4):
1663 def nodesummaries(repo, nodes, maxnumnodes=4):
1664 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1664 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1665 return ' '.join(short(h) for h in nodes)
1665 return ' '.join(short(h) for h in nodes)
1666 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1666 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1667 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1667 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1668
1668
1669 def enforcesinglehead(repo, tr, desc):
1669 def enforcesinglehead(repo, tr, desc):
1670 """check that no named branch has multiple heads"""
1670 """check that no named branch has multiple heads"""
1671 if desc in ('strip', 'repair'):
1671 if desc in ('strip', 'repair'):
1672 # skip the logic during strip
1672 # skip the logic during strip
1673 return
1673 return
1674 visible = repo.filtered('visible')
1674 visible = repo.filtered('visible')
1675 # possible improvement: we could restrict the check to affected branch
1675 # possible improvement: we could restrict the check to affected branch
1676 for name, heads in visible.branchmap().iteritems():
1676 for name, heads in visible.branchmap().iteritems():
1677 if len(heads) > 1:
1677 if len(heads) > 1:
1678 msg = _('rejecting multiple heads on branch "%s"')
1678 msg = _('rejecting multiple heads on branch "%s"')
1679 msg %= name
1679 msg %= name
1680 hint = _('%d heads: %s')
1680 hint = _('%d heads: %s')
1681 hint %= (len(heads), nodesummaries(repo, heads))
1681 hint %= (len(heads), nodesummaries(repo, heads))
1682 raise error.Abort(msg, hint=hint)
1682 raise error.Abort(msg, hint=hint)
1683
1683
1684 def wrapconvertsink(sink):
1684 def wrapconvertsink(sink):
1685 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1685 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1686 before it is used, whether or not the convert extension was formally loaded.
1686 before it is used, whether or not the convert extension was formally loaded.
1687 """
1687 """
1688 return sink
1688 return sink
1689
1689
1690 def unhidehashlikerevs(repo, specs, hiddentype):
1690 def unhidehashlikerevs(repo, specs, hiddentype):
1691 """parse the user specs and unhide changesets whose hash or revision number
1691 """parse the user specs and unhide changesets whose hash or revision number
1692 is passed.
1692 is passed.
1693
1693
1694 hiddentype can be: 1) 'warn': warn while unhiding changesets
1694 hiddentype can be: 1) 'warn': warn while unhiding changesets
1695 2) 'nowarn': don't warn while unhiding changesets
1695 2) 'nowarn': don't warn while unhiding changesets
1696
1696
1697 returns a repo object with the required changesets unhidden
1697 returns a repo object with the required changesets unhidden
1698 """
1698 """
1699 if not repo.filtername or not repo.ui.configbool('experimental',
1699 if not repo.filtername or not repo.ui.configbool('experimental',
1700 'directaccess'):
1700 'directaccess'):
1701 return repo
1701 return repo
1702
1702
1703 if repo.filtername not in ('visible', 'visible-hidden'):
1703 if repo.filtername not in ('visible', 'visible-hidden'):
1704 return repo
1704 return repo
1705
1705
1706 symbols = set()
1706 symbols = set()
1707 for spec in specs:
1707 for spec in specs:
1708 try:
1708 try:
1709 tree = revsetlang.parse(spec)
1709 tree = revsetlang.parse(spec)
1710 except error.ParseError: # will be reported by scmutil.revrange()
1710 except error.ParseError: # will be reported by scmutil.revrange()
1711 continue
1711 continue
1712
1712
1713 symbols.update(revsetlang.gethashlikesymbols(tree))
1713 symbols.update(revsetlang.gethashlikesymbols(tree))
1714
1714
1715 if not symbols:
1715 if not symbols:
1716 return repo
1716 return repo
1717
1717
1718 revs = _getrevsfromsymbols(repo, symbols)
1718 revs = _getrevsfromsymbols(repo, symbols)
1719
1719
1720 if not revs:
1720 if not revs:
1721 return repo
1721 return repo
1722
1722
1723 if hiddentype == 'warn':
1723 if hiddentype == 'warn':
1724 unfi = repo.unfiltered()
1724 unfi = repo.unfiltered()
1725 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1725 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1726 repo.ui.warn(_("warning: accessing hidden changesets for write "
1726 repo.ui.warn(_("warning: accessing hidden changesets for write "
1727 "operation: %s\n") % revstr)
1727 "operation: %s\n") % revstr)
1728
1728
1729 # we have to use new filtername to separate branch/tags cache until we can
1729 # we have to use new filtername to separate branch/tags cache until we can
1730 # disbale these cache when revisions are dynamically pinned.
1730 # disbale these cache when revisions are dynamically pinned.
1731 return repo.filtered('visible-hidden', revs)
1731 return repo.filtered('visible-hidden', revs)
1732
1732
1733 def _getrevsfromsymbols(repo, symbols):
1733 def _getrevsfromsymbols(repo, symbols):
1734 """parse the list of symbols and returns a set of revision numbers of hidden
1734 """parse the list of symbols and returns a set of revision numbers of hidden
1735 changesets present in symbols"""
1735 changesets present in symbols"""
1736 revs = set()
1736 revs = set()
1737 unfi = repo.unfiltered()
1737 unfi = repo.unfiltered()
1738 unficl = unfi.changelog
1738 unficl = unfi.changelog
1739 cl = repo.changelog
1739 cl = repo.changelog
1740 tiprev = len(unficl)
1740 tiprev = len(unficl)
1741 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1741 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1742 for s in symbols:
1742 for s in symbols:
1743 try:
1743 try:
1744 n = int(s)
1744 n = int(s)
1745 if n <= tiprev:
1745 if n <= tiprev:
1746 if not allowrevnums:
1746 if not allowrevnums:
1747 continue
1747 continue
1748 else:
1748 else:
1749 if n not in cl:
1749 if n not in cl:
1750 revs.add(n)
1750 revs.add(n)
1751 continue
1751 continue
1752 except ValueError:
1752 except ValueError:
1753 pass
1753 pass
1754
1754
1755 try:
1755 try:
1756 s = resolvehexnodeidprefix(unfi, s)
1756 s = resolvehexnodeidprefix(unfi, s)
1757 except (error.LookupError, error.WdirUnsupported):
1757 except (error.LookupError, error.WdirUnsupported):
1758 s = None
1758 s = None
1759
1759
1760 if s is not None:
1760 if s is not None:
1761 rev = unficl.rev(s)
1761 rev = unficl.rev(s)
1762 if rev not in cl:
1762 if rev not in cl:
1763 revs.add(rev)
1763 revs.add(rev)
1764
1764
1765 return revs
1765 return revs
1766
1766
1767 def bookmarkrevs(repo, mark):
1767 def bookmarkrevs(repo, mark):
1768 """
1768 """
1769 Select revisions reachable by a given bookmark
1769 Select revisions reachable by a given bookmark
1770 """
1770 """
1771 return repo.revs("ancestors(bookmark(%s)) - "
1771 return repo.revs("ancestors(bookmark(%s)) - "
1772 "ancestors(head() and not bookmark(%s)) - "
1772 "ancestors(head() and not bookmark(%s)) - "
1773 "ancestors(bookmark() and not bookmark(%s))",
1773 "ancestors(bookmark() and not bookmark(%s))",
1774 mark, mark, mark)
1774 mark, mark, mark)
@@ -1,469 +1,469 b''
1 # procutil.py - utility for managing processes and executable environment
1 # procutil.py - utility for managing processes and executable environment
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import contextlib
12 import contextlib
13 import imp
13 import imp
14 import io
14 import io
15 import os
15 import os
16 import signal
16 import signal
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import time
19 import time
20
20
21 from ..i18n import _
21 from ..i18n import _
22
22
23 from .. import (
23 from .. import (
24 encoding,
24 encoding,
25 error,
25 error,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 )
28 )
29
29
30 osutil = policy.importmod(r'osutil')
30 osutil = policy.importmod(r'osutil')
31
31
32 stderr = pycompat.stderr
32 stderr = pycompat.stderr
33 stdin = pycompat.stdin
33 stdin = pycompat.stdin
34 stdout = pycompat.stdout
34 stdout = pycompat.stdout
35
35
36 def isatty(fp):
36 def isatty(fp):
37 try:
37 try:
38 return fp.isatty()
38 return fp.isatty()
39 except AttributeError:
39 except AttributeError:
40 return False
40 return False
41
41
42 # glibc determines buffering on first write to stdout - if we replace a TTY
42 # glibc determines buffering on first write to stdout - if we replace a TTY
43 # destined stdout with a pipe destined stdout (e.g. pager), we want line
43 # destined stdout with a pipe destined stdout (e.g. pager), we want line
44 # buffering (or unbuffered, on Windows)
44 # buffering (or unbuffered, on Windows)
45 if isatty(stdout):
45 if isatty(stdout):
46 if pycompat.iswindows:
46 if pycompat.iswindows:
47 # Windows doesn't support line buffering
47 # Windows doesn't support line buffering
48 stdout = os.fdopen(stdout.fileno(), r'wb', 0)
48 stdout = os.fdopen(stdout.fileno(), r'wb', 0)
49 else:
49 else:
50 stdout = os.fdopen(stdout.fileno(), r'wb', 1)
50 stdout = os.fdopen(stdout.fileno(), r'wb', 1)
51
51
52 if pycompat.iswindows:
52 if pycompat.iswindows:
53 from .. import windows as platform
53 from .. import windows as platform
54 stdout = platform.winstdout(stdout)
54 stdout = platform.winstdout(stdout)
55 else:
55 else:
56 from .. import posix as platform
56 from .. import posix as platform
57
57
58 findexe = platform.findexe
58 findexe = platform.findexe
59 _gethgcmd = platform.gethgcmd
59 _gethgcmd = platform.gethgcmd
60 getuser = platform.getuser
60 getuser = platform.getuser
61 getpid = os.getpid
61 getpid = os.getpid
62 hidewindow = platform.hidewindow
62 hidewindow = platform.hidewindow
63 quotecommand = platform.quotecommand
63 quotecommand = platform.quotecommand
64 readpipe = platform.readpipe
64 readpipe = platform.readpipe
65 setbinary = platform.setbinary
65 setbinary = platform.setbinary
66 setsignalhandler = platform.setsignalhandler
66 setsignalhandler = platform.setsignalhandler
67 shellquote = platform.shellquote
67 shellquote = platform.shellquote
68 shellsplit = platform.shellsplit
68 shellsplit = platform.shellsplit
69 spawndetached = platform.spawndetached
69 spawndetached = platform.spawndetached
70 sshargs = platform.sshargs
70 sshargs = platform.sshargs
71 testpid = platform.testpid
71 testpid = platform.testpid
72
72
73 try:
73 try:
74 setprocname = osutil.setprocname
74 setprocname = osutil.setprocname
75 except AttributeError:
75 except AttributeError:
76 pass
76 pass
77 try:
77 try:
78 unblocksignal = osutil.unblocksignal
78 unblocksignal = osutil.unblocksignal
79 except AttributeError:
79 except AttributeError:
80 pass
80 pass
81
81
82 closefds = pycompat.isposix
82 closefds = pycompat.isposix
83
83
84 def explainexit(code):
84 def explainexit(code):
85 """return a message describing a subprocess status
85 """return a message describing a subprocess status
86 (codes from kill are negative - not os.system/wait encoding)"""
86 (codes from kill are negative - not os.system/wait encoding)"""
87 if code >= 0:
87 if code >= 0:
88 return _("exited with status %d") % code
88 return _("exited with status %d") % code
89 return _("killed by signal %d") % -code
89 return _("killed by signal %d") % -code
90
90
91 class _pfile(object):
91 class _pfile(object):
92 """File-like wrapper for a stream opened by subprocess.Popen()"""
92 """File-like wrapper for a stream opened by subprocess.Popen()"""
93
93
94 def __init__(self, proc, fp):
94 def __init__(self, proc, fp):
95 self._proc = proc
95 self._proc = proc
96 self._fp = fp
96 self._fp = fp
97
97
98 def close(self):
98 def close(self):
99 # unlike os.popen(), this returns an integer in subprocess coding
99 # unlike os.popen(), this returns an integer in subprocess coding
100 self._fp.close()
100 self._fp.close()
101 return self._proc.wait()
101 return self._proc.wait()
102
102
103 def __iter__(self):
103 def __iter__(self):
104 return iter(self._fp)
104 return iter(self._fp)
105
105
106 def __getattr__(self, attr):
106 def __getattr__(self, attr):
107 return getattr(self._fp, attr)
107 return getattr(self._fp, attr)
108
108
109 def __enter__(self):
109 def __enter__(self):
110 return self
110 return self
111
111
112 def __exit__(self, exc_type, exc_value, exc_tb):
112 def __exit__(self, exc_type, exc_value, exc_tb):
113 self.close()
113 self.close()
114
114
115 def popen(cmd, mode='rb', bufsize=-1):
115 def popen(cmd, mode='rb', bufsize=-1):
116 if mode == 'rb':
116 if mode == 'rb':
117 return _popenreader(cmd, bufsize)
117 return _popenreader(cmd, bufsize)
118 elif mode == 'wb':
118 elif mode == 'wb':
119 return _popenwriter(cmd, bufsize)
119 return _popenwriter(cmd, bufsize)
120 raise error.ProgrammingError('unsupported mode: %r' % mode)
120 raise error.ProgrammingError('unsupported mode: %r' % mode)
121
121
122 def _popenreader(cmd, bufsize):
122 def _popenreader(cmd, bufsize):
123 p = subprocess.Popen(tonativestr(quotecommand(cmd)),
123 p = subprocess.Popen(tonativestr(quotecommand(cmd)),
124 shell=True, bufsize=bufsize,
124 shell=True, bufsize=bufsize,
125 close_fds=closefds,
125 close_fds=closefds,
126 stdout=subprocess.PIPE)
126 stdout=subprocess.PIPE)
127 return _pfile(p, p.stdout)
127 return _pfile(p, p.stdout)
128
128
129 def _popenwriter(cmd, bufsize):
129 def _popenwriter(cmd, bufsize):
130 p = subprocess.Popen(tonativestr(quotecommand(cmd)),
130 p = subprocess.Popen(tonativestr(quotecommand(cmd)),
131 shell=True, bufsize=bufsize,
131 shell=True, bufsize=bufsize,
132 close_fds=closefds,
132 close_fds=closefds,
133 stdin=subprocess.PIPE)
133 stdin=subprocess.PIPE)
134 return _pfile(p, p.stdin)
134 return _pfile(p, p.stdin)
135
135
136 def popen2(cmd, env=None):
136 def popen2(cmd, env=None):
137 # Setting bufsize to -1 lets the system decide the buffer size.
137 # Setting bufsize to -1 lets the system decide the buffer size.
138 # The default for bufsize is 0, meaning unbuffered. This leads to
138 # The default for bufsize is 0, meaning unbuffered. This leads to
139 # poor performance on Mac OS X: http://bugs.python.org/issue4194
139 # poor performance on Mac OS X: http://bugs.python.org/issue4194
140 p = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
140 p = subprocess.Popen(tonativestr(cmd),
141 shell=True, bufsize=-1,
141 shell=True, bufsize=-1,
142 close_fds=closefds,
142 close_fds=closefds,
143 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
143 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
144 env=tonativeenv(env))
144 env=tonativeenv(env))
145 return p.stdin, p.stdout
145 return p.stdin, p.stdout
146
146
147 def popen3(cmd, env=None):
147 def popen3(cmd, env=None):
148 stdin, stdout, stderr, p = popen4(cmd, env)
148 stdin, stdout, stderr, p = popen4(cmd, env)
149 return stdin, stdout, stderr
149 return stdin, stdout, stderr
150
150
151 def popen4(cmd, env=None, bufsize=-1):
151 def popen4(cmd, env=None, bufsize=-1):
152 p = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
152 p = subprocess.Popen(tonativestr(cmd),
153 shell=True, bufsize=bufsize,
153 shell=True, bufsize=bufsize,
154 close_fds=closefds,
154 close_fds=closefds,
155 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
155 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
156 stderr=subprocess.PIPE,
156 stderr=subprocess.PIPE,
157 env=tonativeenv(env))
157 env=tonativeenv(env))
158 return p.stdin, p.stdout, p.stderr, p
158 return p.stdin, p.stdout, p.stderr, p
159
159
160 def pipefilter(s, cmd):
160 def pipefilter(s, cmd):
161 '''filter string S through command CMD, returning its output'''
161 '''filter string S through command CMD, returning its output'''
162 p = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
162 p = subprocess.Popen(tonativestr(cmd),
163 shell=True, close_fds=closefds,
163 shell=True, close_fds=closefds,
164 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
164 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
165 pout, perr = p.communicate(s)
165 pout, perr = p.communicate(s)
166 return pout
166 return pout
167
167
168 def tempfilter(s, cmd):
168 def tempfilter(s, cmd):
169 '''filter string S through a pair of temporary files with CMD.
169 '''filter string S through a pair of temporary files with CMD.
170 CMD is used as a template to create the real command to be run,
170 CMD is used as a template to create the real command to be run,
171 with the strings INFILE and OUTFILE replaced by the real names of
171 with the strings INFILE and OUTFILE replaced by the real names of
172 the temporary files generated.'''
172 the temporary files generated.'''
173 inname, outname = None, None
173 inname, outname = None, None
174 try:
174 try:
175 infd, inname = pycompat.mkstemp(prefix='hg-filter-in-')
175 infd, inname = pycompat.mkstemp(prefix='hg-filter-in-')
176 fp = os.fdopen(infd, r'wb')
176 fp = os.fdopen(infd, r'wb')
177 fp.write(s)
177 fp.write(s)
178 fp.close()
178 fp.close()
179 outfd, outname = pycompat.mkstemp(prefix='hg-filter-out-')
179 outfd, outname = pycompat.mkstemp(prefix='hg-filter-out-')
180 os.close(outfd)
180 os.close(outfd)
181 cmd = cmd.replace('INFILE', inname)
181 cmd = cmd.replace('INFILE', inname)
182 cmd = cmd.replace('OUTFILE', outname)
182 cmd = cmd.replace('OUTFILE', outname)
183 code = system(cmd)
183 code = system(cmd)
184 if pycompat.sysplatform == 'OpenVMS' and code & 1:
184 if pycompat.sysplatform == 'OpenVMS' and code & 1:
185 code = 0
185 code = 0
186 if code:
186 if code:
187 raise error.Abort(_("command '%s' failed: %s") %
187 raise error.Abort(_("command '%s' failed: %s") %
188 (cmd, explainexit(code)))
188 (cmd, explainexit(code)))
189 with open(outname, 'rb') as fp:
189 with open(outname, 'rb') as fp:
190 return fp.read()
190 return fp.read()
191 finally:
191 finally:
192 try:
192 try:
193 if inname:
193 if inname:
194 os.unlink(inname)
194 os.unlink(inname)
195 except OSError:
195 except OSError:
196 pass
196 pass
197 try:
197 try:
198 if outname:
198 if outname:
199 os.unlink(outname)
199 os.unlink(outname)
200 except OSError:
200 except OSError:
201 pass
201 pass
202
202
203 _filtertable = {
203 _filtertable = {
204 'tempfile:': tempfilter,
204 'tempfile:': tempfilter,
205 'pipe:': pipefilter,
205 'pipe:': pipefilter,
206 }
206 }
207
207
208 def filter(s, cmd):
208 def filter(s, cmd):
209 "filter a string through a command that transforms its input to its output"
209 "filter a string through a command that transforms its input to its output"
210 for name, fn in _filtertable.iteritems():
210 for name, fn in _filtertable.iteritems():
211 if cmd.startswith(name):
211 if cmd.startswith(name):
212 return fn(s, cmd[len(name):].lstrip())
212 return fn(s, cmd[len(name):].lstrip())
213 return pipefilter(s, cmd)
213 return pipefilter(s, cmd)
214
214
215 def mainfrozen():
215 def mainfrozen():
216 """return True if we are a frozen executable.
216 """return True if we are a frozen executable.
217
217
218 The code supports py2exe (most common, Windows only) and tools/freeze
218 The code supports py2exe (most common, Windows only) and tools/freeze
219 (portable, not much used).
219 (portable, not much used).
220 """
220 """
221 return (pycompat.safehasattr(sys, "frozen") or # new py2exe
221 return (pycompat.safehasattr(sys, "frozen") or # new py2exe
222 pycompat.safehasattr(sys, "importers") or # old py2exe
222 pycompat.safehasattr(sys, "importers") or # old py2exe
223 imp.is_frozen(u"__main__")) # tools/freeze
223 imp.is_frozen(u"__main__")) # tools/freeze
224
224
225 _hgexecutable = None
225 _hgexecutable = None
226
226
227 def hgexecutable():
227 def hgexecutable():
228 """return location of the 'hg' executable.
228 """return location of the 'hg' executable.
229
229
230 Defaults to $HG or 'hg' in the search path.
230 Defaults to $HG or 'hg' in the search path.
231 """
231 """
232 if _hgexecutable is None:
232 if _hgexecutable is None:
233 hg = encoding.environ.get('HG')
233 hg = encoding.environ.get('HG')
234 mainmod = sys.modules[r'__main__']
234 mainmod = sys.modules[r'__main__']
235 if hg:
235 if hg:
236 _sethgexecutable(hg)
236 _sethgexecutable(hg)
237 elif mainfrozen():
237 elif mainfrozen():
238 if getattr(sys, 'frozen', None) == 'macosx_app':
238 if getattr(sys, 'frozen', None) == 'macosx_app':
239 # Env variable set by py2app
239 # Env variable set by py2app
240 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
240 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
241 else:
241 else:
242 _sethgexecutable(pycompat.sysexecutable)
242 _sethgexecutable(pycompat.sysexecutable)
243 elif (os.path.basename(
243 elif (os.path.basename(
244 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
244 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
245 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
245 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
246 else:
246 else:
247 exe = findexe('hg') or os.path.basename(sys.argv[0])
247 exe = findexe('hg') or os.path.basename(sys.argv[0])
248 _sethgexecutable(exe)
248 _sethgexecutable(exe)
249 return _hgexecutable
249 return _hgexecutable
250
250
251 def _sethgexecutable(path):
251 def _sethgexecutable(path):
252 """set location of the 'hg' executable"""
252 """set location of the 'hg' executable"""
253 global _hgexecutable
253 global _hgexecutable
254 _hgexecutable = path
254 _hgexecutable = path
255
255
256 def _testfileno(f, stdf):
256 def _testfileno(f, stdf):
257 fileno = getattr(f, 'fileno', None)
257 fileno = getattr(f, 'fileno', None)
258 try:
258 try:
259 return fileno and fileno() == stdf.fileno()
259 return fileno and fileno() == stdf.fileno()
260 except io.UnsupportedOperation:
260 except io.UnsupportedOperation:
261 return False # fileno() raised UnsupportedOperation
261 return False # fileno() raised UnsupportedOperation
262
262
263 def isstdin(f):
263 def isstdin(f):
264 return _testfileno(f, sys.__stdin__)
264 return _testfileno(f, sys.__stdin__)
265
265
266 def isstdout(f):
266 def isstdout(f):
267 return _testfileno(f, sys.__stdout__)
267 return _testfileno(f, sys.__stdout__)
268
268
269 def protectstdio(uin, uout):
269 def protectstdio(uin, uout):
270 """Duplicate streams and redirect original if (uin, uout) are stdio
270 """Duplicate streams and redirect original if (uin, uout) are stdio
271
271
272 If uin is stdin, it's redirected to /dev/null. If uout is stdout, it's
272 If uin is stdin, it's redirected to /dev/null. If uout is stdout, it's
273 redirected to stderr so the output is still readable.
273 redirected to stderr so the output is still readable.
274
274
275 Returns (fin, fout) which point to the original (uin, uout) fds, but
275 Returns (fin, fout) which point to the original (uin, uout) fds, but
276 may be copy of (uin, uout). The returned streams can be considered
276 may be copy of (uin, uout). The returned streams can be considered
277 "owned" in that print(), exec(), etc. never reach to them.
277 "owned" in that print(), exec(), etc. never reach to them.
278 """
278 """
279 uout.flush()
279 uout.flush()
280 fin, fout = uin, uout
280 fin, fout = uin, uout
281 if uin is stdin:
281 if uin is stdin:
282 newfd = os.dup(uin.fileno())
282 newfd = os.dup(uin.fileno())
283 nullfd = os.open(os.devnull, os.O_RDONLY)
283 nullfd = os.open(os.devnull, os.O_RDONLY)
284 os.dup2(nullfd, uin.fileno())
284 os.dup2(nullfd, uin.fileno())
285 os.close(nullfd)
285 os.close(nullfd)
286 fin = os.fdopen(newfd, r'rb')
286 fin = os.fdopen(newfd, r'rb')
287 if uout is stdout:
287 if uout is stdout:
288 newfd = os.dup(uout.fileno())
288 newfd = os.dup(uout.fileno())
289 os.dup2(stderr.fileno(), uout.fileno())
289 os.dup2(stderr.fileno(), uout.fileno())
290 fout = os.fdopen(newfd, r'wb')
290 fout = os.fdopen(newfd, r'wb')
291 return fin, fout
291 return fin, fout
292
292
293 def restorestdio(uin, uout, fin, fout):
293 def restorestdio(uin, uout, fin, fout):
294 """Restore (uin, uout) streams from possibly duplicated (fin, fout)"""
294 """Restore (uin, uout) streams from possibly duplicated (fin, fout)"""
295 uout.flush()
295 uout.flush()
296 for f, uif in [(fin, uin), (fout, uout)]:
296 for f, uif in [(fin, uin), (fout, uout)]:
297 if f is not uif:
297 if f is not uif:
298 os.dup2(f.fileno(), uif.fileno())
298 os.dup2(f.fileno(), uif.fileno())
299 f.close()
299 f.close()
300
300
301 @contextlib.contextmanager
301 @contextlib.contextmanager
302 def protectedstdio(uin, uout):
302 def protectedstdio(uin, uout):
303 """Run code block with protected standard streams"""
303 """Run code block with protected standard streams"""
304 fin, fout = protectstdio(uin, uout)
304 fin, fout = protectstdio(uin, uout)
305 try:
305 try:
306 yield fin, fout
306 yield fin, fout
307 finally:
307 finally:
308 restorestdio(uin, uout, fin, fout)
308 restorestdio(uin, uout, fin, fout)
309
309
310 def shellenviron(environ=None):
310 def shellenviron(environ=None):
311 """return environ with optional override, useful for shelling out"""
311 """return environ with optional override, useful for shelling out"""
312 def py2shell(val):
312 def py2shell(val):
313 'convert python object into string that is useful to shell'
313 'convert python object into string that is useful to shell'
314 if val is None or val is False:
314 if val is None or val is False:
315 return '0'
315 return '0'
316 if val is True:
316 if val is True:
317 return '1'
317 return '1'
318 return pycompat.bytestr(val)
318 return pycompat.bytestr(val)
319 env = dict(encoding.environ)
319 env = dict(encoding.environ)
320 if environ:
320 if environ:
321 env.update((k, py2shell(v)) for k, v in environ.iteritems())
321 env.update((k, py2shell(v)) for k, v in environ.iteritems())
322 env['HG'] = hgexecutable()
322 env['HG'] = hgexecutable()
323 return env
323 return env
324
324
325 if pycompat.iswindows:
325 if pycompat.iswindows:
326 def shelltonative(cmd, env):
326 def shelltonative(cmd, env):
327 return platform.shelltocmdexe(cmd, shellenviron(env))
327 return platform.shelltocmdexe(cmd, shellenviron(env))
328
328
329 tonativestr = encoding.strfromlocal
329 tonativestr = encoding.strfromlocal
330 else:
330 else:
331 def shelltonative(cmd, env):
331 def shelltonative(cmd, env):
332 return cmd
332 return cmd
333
333
334 tonativestr = pycompat.identity
334 tonativestr = pycompat.identity
335
335
336 def tonativeenv(env):
336 def tonativeenv(env):
337 '''convert the environment from bytes to strings suitable for Popen(), etc.
337 '''convert the environment from bytes to strings suitable for Popen(), etc.
338 '''
338 '''
339 return pycompat.rapply(tonativestr, env)
339 return pycompat.rapply(tonativestr, env)
340
340
341 def system(cmd, environ=None, cwd=None, out=None):
341 def system(cmd, environ=None, cwd=None, out=None):
342 '''enhanced shell command execution.
342 '''enhanced shell command execution.
343 run with environment maybe modified, maybe in different dir.
343 run with environment maybe modified, maybe in different dir.
344
344
345 if out is specified, it is assumed to be a file-like object that has a
345 if out is specified, it is assumed to be a file-like object that has a
346 write() method. stdout and stderr will be redirected to out.'''
346 write() method. stdout and stderr will be redirected to out.'''
347 try:
347 try:
348 stdout.flush()
348 stdout.flush()
349 except Exception:
349 except Exception:
350 pass
350 pass
351 cmd = quotecommand(cmd)
351 cmd = quotecommand(cmd)
352 env = shellenviron(environ)
352 env = shellenviron(environ)
353 if out is None or isstdout(out):
353 if out is None or isstdout(out):
354 rc = subprocess.call(pycompat.rapply(tonativestr, cmd),
354 rc = subprocess.call(tonativestr(cmd),
355 shell=True, close_fds=closefds,
355 shell=True, close_fds=closefds,
356 env=tonativeenv(env),
356 env=tonativeenv(env),
357 cwd=pycompat.rapply(tonativestr, cwd))
357 cwd=pycompat.rapply(tonativestr, cwd))
358 else:
358 else:
359 proc = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
359 proc = subprocess.Popen(tonativestr(cmd),
360 shell=True, close_fds=closefds,
360 shell=True, close_fds=closefds,
361 env=tonativeenv(env),
361 env=tonativeenv(env),
362 cwd=pycompat.rapply(tonativestr, cwd),
362 cwd=pycompat.rapply(tonativestr, cwd),
363 stdout=subprocess.PIPE,
363 stdout=subprocess.PIPE,
364 stderr=subprocess.STDOUT)
364 stderr=subprocess.STDOUT)
365 for line in iter(proc.stdout.readline, ''):
365 for line in iter(proc.stdout.readline, ''):
366 out.write(line)
366 out.write(line)
367 proc.wait()
367 proc.wait()
368 rc = proc.returncode
368 rc = proc.returncode
369 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
369 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
370 rc = 0
370 rc = 0
371 return rc
371 return rc
372
372
373 def gui():
373 def gui():
374 '''Are we running in a GUI?'''
374 '''Are we running in a GUI?'''
375 if pycompat.isdarwin:
375 if pycompat.isdarwin:
376 if 'SSH_CONNECTION' in encoding.environ:
376 if 'SSH_CONNECTION' in encoding.environ:
377 # handle SSH access to a box where the user is logged in
377 # handle SSH access to a box where the user is logged in
378 return False
378 return False
379 elif getattr(osutil, 'isgui', None):
379 elif getattr(osutil, 'isgui', None):
380 # check if a CoreGraphics session is available
380 # check if a CoreGraphics session is available
381 return osutil.isgui()
381 return osutil.isgui()
382 else:
382 else:
383 # pure build; use a safe default
383 # pure build; use a safe default
384 return True
384 return True
385 else:
385 else:
386 return pycompat.iswindows or encoding.environ.get("DISPLAY")
386 return pycompat.iswindows or encoding.environ.get("DISPLAY")
387
387
388 def hgcmd():
388 def hgcmd():
389 """Return the command used to execute current hg
389 """Return the command used to execute current hg
390
390
391 This is different from hgexecutable() because on Windows we want
391 This is different from hgexecutable() because on Windows we want
392 to avoid things opening new shell windows like batch files, so we
392 to avoid things opening new shell windows like batch files, so we
393 get either the python call or current executable.
393 get either the python call or current executable.
394 """
394 """
395 if mainfrozen():
395 if mainfrozen():
396 if getattr(sys, 'frozen', None) == 'macosx_app':
396 if getattr(sys, 'frozen', None) == 'macosx_app':
397 # Env variable set by py2app
397 # Env variable set by py2app
398 return [encoding.environ['EXECUTABLEPATH']]
398 return [encoding.environ['EXECUTABLEPATH']]
399 else:
399 else:
400 return [pycompat.sysexecutable]
400 return [pycompat.sysexecutable]
401 return _gethgcmd()
401 return _gethgcmd()
402
402
403 def rundetached(args, condfn):
403 def rundetached(args, condfn):
404 """Execute the argument list in a detached process.
404 """Execute the argument list in a detached process.
405
405
406 condfn is a callable which is called repeatedly and should return
406 condfn is a callable which is called repeatedly and should return
407 True once the child process is known to have started successfully.
407 True once the child process is known to have started successfully.
408 At this point, the child process PID is returned. If the child
408 At this point, the child process PID is returned. If the child
409 process fails to start or finishes before condfn() evaluates to
409 process fails to start or finishes before condfn() evaluates to
410 True, return -1.
410 True, return -1.
411 """
411 """
412 # Windows case is easier because the child process is either
412 # Windows case is easier because the child process is either
413 # successfully starting and validating the condition or exiting
413 # successfully starting and validating the condition or exiting
414 # on failure. We just poll on its PID. On Unix, if the child
414 # on failure. We just poll on its PID. On Unix, if the child
415 # process fails to start, it will be left in a zombie state until
415 # process fails to start, it will be left in a zombie state until
416 # the parent wait on it, which we cannot do since we expect a long
416 # the parent wait on it, which we cannot do since we expect a long
417 # running process on success. Instead we listen for SIGCHLD telling
417 # running process on success. Instead we listen for SIGCHLD telling
418 # us our child process terminated.
418 # us our child process terminated.
419 terminated = set()
419 terminated = set()
420 def handler(signum, frame):
420 def handler(signum, frame):
421 terminated.add(os.wait())
421 terminated.add(os.wait())
422 prevhandler = None
422 prevhandler = None
423 SIGCHLD = getattr(signal, 'SIGCHLD', None)
423 SIGCHLD = getattr(signal, 'SIGCHLD', None)
424 if SIGCHLD is not None:
424 if SIGCHLD is not None:
425 prevhandler = signal.signal(SIGCHLD, handler)
425 prevhandler = signal.signal(SIGCHLD, handler)
426 try:
426 try:
427 pid = spawndetached(args)
427 pid = spawndetached(args)
428 while not condfn():
428 while not condfn():
429 if ((pid in terminated or not testpid(pid))
429 if ((pid in terminated or not testpid(pid))
430 and not condfn()):
430 and not condfn()):
431 return -1
431 return -1
432 time.sleep(0.1)
432 time.sleep(0.1)
433 return pid
433 return pid
434 finally:
434 finally:
435 if prevhandler is not None:
435 if prevhandler is not None:
436 signal.signal(signal.SIGCHLD, prevhandler)
436 signal.signal(signal.SIGCHLD, prevhandler)
437
437
438 @contextlib.contextmanager
438 @contextlib.contextmanager
439 def uninterruptable(warn):
439 def uninterruptable(warn):
440 """Inhibit SIGINT handling on a region of code.
440 """Inhibit SIGINT handling on a region of code.
441
441
442 Note that if this is called in a non-main thread, it turns into a no-op.
442 Note that if this is called in a non-main thread, it turns into a no-op.
443
443
444 Args:
444 Args:
445 warn: A callable which takes no arguments, and returns True if the
445 warn: A callable which takes no arguments, and returns True if the
446 previous signal handling should be restored.
446 previous signal handling should be restored.
447 """
447 """
448
448
449 oldsiginthandler = [signal.getsignal(signal.SIGINT)]
449 oldsiginthandler = [signal.getsignal(signal.SIGINT)]
450 shouldbail = []
450 shouldbail = []
451
451
452 def disabledsiginthandler(*args):
452 def disabledsiginthandler(*args):
453 if warn():
453 if warn():
454 signal.signal(signal.SIGINT, oldsiginthandler[0])
454 signal.signal(signal.SIGINT, oldsiginthandler[0])
455 del oldsiginthandler[0]
455 del oldsiginthandler[0]
456 shouldbail.append(True)
456 shouldbail.append(True)
457
457
458 try:
458 try:
459 try:
459 try:
460 signal.signal(signal.SIGINT, disabledsiginthandler)
460 signal.signal(signal.SIGINT, disabledsiginthandler)
461 except ValueError:
461 except ValueError:
462 # wrong thread, oh well, we tried
462 # wrong thread, oh well, we tried
463 del oldsiginthandler[0]
463 del oldsiginthandler[0]
464 yield
464 yield
465 finally:
465 finally:
466 if oldsiginthandler:
466 if oldsiginthandler:
467 signal.signal(signal.SIGINT, oldsiginthandler[0])
467 signal.signal(signal.SIGINT, oldsiginthandler[0])
468 if shouldbail:
468 if shouldbail:
469 raise KeyboardInterrupt
469 raise KeyboardInterrupt
General Comments 0
You need to be logged in to leave comments. Login now