##// END OF EJS Templates
py3: remove a couple of superfluous calls to pycompat.rapply()...
Matt Harbison -
r39868:f1d60214 default
parent child Browse files
Show More
@@ -1,553 +1,553 b''
1 1 # common.py - common code for the convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import base64
10 10 import datetime
11 11 import errno
12 12 import os
13 13 import re
14 14 import shlex
15 15 import subprocess
16 16
17 17 from mercurial.i18n import _
18 18 from mercurial import (
19 19 encoding,
20 20 error,
21 21 phases,
22 22 pycompat,
23 23 util,
24 24 )
25 25 from mercurial.utils import (
26 26 procutil,
27 27 )
28 28
29 29 pickle = util.pickle
30 30 propertycache = util.propertycache
31 31
32 32 def _encodeornone(d):
33 33 if d is None:
34 34 return
35 35 return d.encode('latin1')
36 36
37 37 class _shlexpy3proxy(object):
38 38
39 39 def __init__(self, l):
40 40 self._l = l
41 41
42 42 def __iter__(self):
43 43 return (_encodeornone(v) for v in self._l)
44 44
45 45 def get_token(self):
46 46 return _encodeornone(self._l.get_token())
47 47
48 48 @property
49 49 def infile(self):
50 50 return self._l.infile or '<unknown>'
51 51
52 52 @property
53 53 def lineno(self):
54 54 return self._l.lineno
55 55
56 56 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
57 57 if data is None:
58 58 if pycompat.ispy3:
59 59 data = open(filepath, 'r', encoding=r'latin1')
60 60 else:
61 61 data = open(filepath, 'r')
62 62 else:
63 63 if filepath is not None:
64 64 raise error.ProgrammingError(
65 65 'shlexer only accepts data or filepath, not both')
66 66 if pycompat.ispy3:
67 67 data = data.decode('latin1')
68 68 l = shlex.shlex(data, infile=filepath, posix=True)
69 69 if whitespace is not None:
70 70 l.whitespace_split = True
71 71 if pycompat.ispy3:
72 72 l.whitespace += whitespace.decode('latin1')
73 73 else:
74 74 l.whitespace += whitespace
75 75 if wordchars is not None:
76 76 if pycompat.ispy3:
77 77 l.wordchars += wordchars.decode('latin1')
78 78 else:
79 79 l.wordchars += wordchars
80 80 if pycompat.ispy3:
81 81 return _shlexpy3proxy(l)
82 82 return l
83 83
84 84 def encodeargs(args):
85 85 def encodearg(s):
86 86 lines = base64.encodestring(s)
87 87 lines = [l.splitlines()[0] for l in lines]
88 88 return ''.join(lines)
89 89
90 90 s = pickle.dumps(args)
91 91 return encodearg(s)
92 92
93 93 def decodeargs(s):
94 94 s = base64.decodestring(s)
95 95 return pickle.loads(s)
96 96
97 97 class MissingTool(Exception):
98 98 pass
99 99
100 100 def checktool(exe, name=None, abort=True):
101 101 name = name or exe
102 102 if not procutil.findexe(exe):
103 103 if abort:
104 104 exc = error.Abort
105 105 else:
106 106 exc = MissingTool
107 107 raise exc(_('cannot find required "%s" tool') % name)
108 108
109 109 class NoRepo(Exception):
110 110 pass
111 111
112 112 SKIPREV = 'SKIP'
113 113
114 114 class commit(object):
115 115 def __init__(self, author, date, desc, parents, branch=None, rev=None,
116 116 extra=None, sortkey=None, saverev=True, phase=phases.draft,
117 117 optparents=None):
118 118 self.author = author or 'unknown'
119 119 self.date = date or '0 0'
120 120 self.desc = desc
121 121 self.parents = parents # will be converted and used as parents
122 122 self.optparents = optparents or [] # will be used if already converted
123 123 self.branch = branch
124 124 self.rev = rev
125 125 self.extra = extra or {}
126 126 self.sortkey = sortkey
127 127 self.saverev = saverev
128 128 self.phase = phase
129 129
130 130 class converter_source(object):
131 131 """Conversion source interface"""
132 132
133 133 def __init__(self, ui, repotype, path=None, revs=None):
134 134 """Initialize conversion source (or raise NoRepo("message")
135 135 exception if path is not a valid repository)"""
136 136 self.ui = ui
137 137 self.path = path
138 138 self.revs = revs
139 139 self.repotype = repotype
140 140
141 141 self.encoding = 'utf-8'
142 142
143 143 def checkhexformat(self, revstr, mapname='splicemap'):
144 144 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
145 145 such format for their revision numbering
146 146 """
147 147 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
148 148 raise error.Abort(_('%s entry %s is not a valid revision'
149 149 ' identifier') % (mapname, revstr))
150 150
151 151 def before(self):
152 152 pass
153 153
154 154 def after(self):
155 155 pass
156 156
157 157 def targetfilebelongstosource(self, targetfilename):
158 158 """Returns true if the given targetfile belongs to the source repo. This
159 159 is useful when only a subdirectory of the target belongs to the source
160 160 repo."""
161 161 # For normal full repo converts, this is always True.
162 162 return True
163 163
164 164 def setrevmap(self, revmap):
165 165 """set the map of already-converted revisions"""
166 166
167 167 def getheads(self):
168 168 """Return a list of this repository's heads"""
169 169 raise NotImplementedError
170 170
171 171 def getfile(self, name, rev):
172 172 """Return a pair (data, mode) where data is the file content
173 173 as a string and mode one of '', 'x' or 'l'. rev is the
174 174 identifier returned by a previous call to getchanges().
175 175 Data is None if file is missing/deleted in rev.
176 176 """
177 177 raise NotImplementedError
178 178
179 179 def getchanges(self, version, full):
180 180 """Returns a tuple of (files, copies, cleanp2).
181 181
182 182 files is a sorted list of (filename, id) tuples for all files
183 183 changed between version and its first parent returned by
184 184 getcommit(). If full, all files in that revision is returned.
185 185 id is the source revision id of the file.
186 186
187 187 copies is a dictionary of dest: source
188 188
189 189 cleanp2 is the set of files filenames that are clean against p2.
190 190 (Files that are clean against p1 are already not in files (unless
191 191 full). This makes it possible to handle p2 clean files similarly.)
192 192 """
193 193 raise NotImplementedError
194 194
195 195 def getcommit(self, version):
196 196 """Return the commit object for version"""
197 197 raise NotImplementedError
198 198
199 199 def numcommits(self):
200 200 """Return the number of commits in this source.
201 201
202 202 If unknown, return None.
203 203 """
204 204 return None
205 205
206 206 def gettags(self):
207 207 """Return the tags as a dictionary of name: revision
208 208
209 209 Tag names must be UTF-8 strings.
210 210 """
211 211 raise NotImplementedError
212 212
213 213 def recode(self, s, encoding=None):
214 214 if not encoding:
215 215 encoding = self.encoding or 'utf-8'
216 216
217 217 if isinstance(s, pycompat.unicode):
218 218 return s.encode("utf-8")
219 219 try:
220 220 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
221 221 except UnicodeError:
222 222 try:
223 223 return s.decode("latin-1").encode("utf-8")
224 224 except UnicodeError:
225 225 return s.decode(pycompat.sysstr(encoding),
226 226 "replace").encode("utf-8")
227 227
228 228 def getchangedfiles(self, rev, i):
229 229 """Return the files changed by rev compared to parent[i].
230 230
231 231 i is an index selecting one of the parents of rev. The return
232 232 value should be the list of files that are different in rev and
233 233 this parent.
234 234
235 235 If rev has no parents, i is None.
236 236
237 237 This function is only needed to support --filemap
238 238 """
239 239 raise NotImplementedError
240 240
241 241 def converted(self, rev, sinkrev):
242 242 '''Notify the source that a revision has been converted.'''
243 243
244 244 def hasnativeorder(self):
245 245 """Return true if this source has a meaningful, native revision
246 246 order. For instance, Mercurial revisions are store sequentially
247 247 while there is no such global ordering with Darcs.
248 248 """
249 249 return False
250 250
251 251 def hasnativeclose(self):
252 252 """Return true if this source has ability to close branch.
253 253 """
254 254 return False
255 255
256 256 def lookuprev(self, rev):
257 257 """If rev is a meaningful revision reference in source, return
258 258 the referenced identifier in the same format used by getcommit().
259 259 return None otherwise.
260 260 """
261 261 return None
262 262
263 263 def getbookmarks(self):
264 264 """Return the bookmarks as a dictionary of name: revision
265 265
266 266 Bookmark names are to be UTF-8 strings.
267 267 """
268 268 return {}
269 269
270 270 def checkrevformat(self, revstr, mapname='splicemap'):
271 271 """revstr is a string that describes a revision in the given
272 272 source control system. Return true if revstr has correct
273 273 format.
274 274 """
275 275 return True
276 276
277 277 class converter_sink(object):
278 278 """Conversion sink (target) interface"""
279 279
280 280 def __init__(self, ui, repotype, path):
281 281 """Initialize conversion sink (or raise NoRepo("message")
282 282 exception if path is not a valid repository)
283 283
284 284 created is a list of paths to remove if a fatal error occurs
285 285 later"""
286 286 self.ui = ui
287 287 self.path = path
288 288 self.created = []
289 289 self.repotype = repotype
290 290
291 291 def revmapfile(self):
292 292 """Path to a file that will contain lines
293 293 source_rev_id sink_rev_id
294 294 mapping equivalent revision identifiers for each system."""
295 295 raise NotImplementedError
296 296
297 297 def authorfile(self):
298 298 """Path to a file that will contain lines
299 299 srcauthor=dstauthor
300 300 mapping equivalent authors identifiers for each system."""
301 301 return None
302 302
303 303 def putcommit(self, files, copies, parents, commit, source, revmap, full,
304 304 cleanp2):
305 305 """Create a revision with all changed files listed in 'files'
306 306 and having listed parents. 'commit' is a commit object
307 307 containing at a minimum the author, date, and message for this
308 308 changeset. 'files' is a list of (path, version) tuples,
309 309 'copies' is a dictionary mapping destinations to sources,
310 310 'source' is the source repository, and 'revmap' is a mapfile
311 311 of source revisions to converted revisions. Only getfile() and
312 312 lookuprev() should be called on 'source'. 'full' means that 'files'
313 313 is complete and all other files should be removed.
314 314 'cleanp2' is a set of the filenames that are unchanged from p2
315 315 (only in the common merge case where there two parents).
316 316
317 317 Note that the sink repository is not told to update itself to
318 318 a particular revision (or even what that revision would be)
319 319 before it receives the file data.
320 320 """
321 321 raise NotImplementedError
322 322
323 323 def puttags(self, tags):
324 324 """Put tags into sink.
325 325
326 326 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
327 327 Return a pair (tag_revision, tag_parent_revision), or (None, None)
328 328 if nothing was changed.
329 329 """
330 330 raise NotImplementedError
331 331
332 332 def setbranch(self, branch, pbranches):
333 333 """Set the current branch name. Called before the first putcommit
334 334 on the branch.
335 335 branch: branch name for subsequent commits
336 336 pbranches: (converted parent revision, parent branch) tuples"""
337 337
338 338 def setfilemapmode(self, active):
339 339 """Tell the destination that we're using a filemap
340 340
341 341 Some converter_sources (svn in particular) can claim that a file
342 342 was changed in a revision, even if there was no change. This method
343 343 tells the destination that we're using a filemap and that it should
344 344 filter empty revisions.
345 345 """
346 346
347 347 def before(self):
348 348 pass
349 349
350 350 def after(self):
351 351 pass
352 352
353 353 def putbookmarks(self, bookmarks):
354 354 """Put bookmarks into sink.
355 355
356 356 bookmarks: {bookmarkname: sink_rev_id, ...}
357 357 where bookmarkname is an UTF-8 string.
358 358 """
359 359
360 360 def hascommitfrommap(self, rev):
361 361 """Return False if a rev mentioned in a filemap is known to not be
362 362 present."""
363 363 raise NotImplementedError
364 364
365 365 def hascommitforsplicemap(self, rev):
366 366 """This method is for the special needs for splicemap handling and not
367 367 for general use. Returns True if the sink contains rev, aborts on some
368 368 special cases."""
369 369 raise NotImplementedError
370 370
371 371 class commandline(object):
372 372 def __init__(self, ui, command):
373 373 self.ui = ui
374 374 self.command = command
375 375
376 376 def prerun(self):
377 377 pass
378 378
379 379 def postrun(self):
380 380 pass
381 381
382 382 def _cmdline(self, cmd, *args, **kwargs):
383 383 kwargs = pycompat.byteskwargs(kwargs)
384 384 cmdline = [self.command, cmd] + list(args)
385 385 for k, v in kwargs.iteritems():
386 386 if len(k) == 1:
387 387 cmdline.append('-' + k)
388 388 else:
389 389 cmdline.append('--' + k.replace('_', '-'))
390 390 try:
391 391 if len(k) == 1:
392 392 cmdline.append('' + v)
393 393 else:
394 394 cmdline[-1] += '=' + v
395 395 except TypeError:
396 396 pass
397 397 cmdline = [procutil.shellquote(arg) for arg in cmdline]
398 398 if not self.ui.debugflag:
399 399 cmdline += ['2>', pycompat.bytestr(os.devnull)]
400 400 cmdline = ' '.join(cmdline)
401 401 return cmdline
402 402
403 403 def _run(self, cmd, *args, **kwargs):
404 404 def popen(cmdline):
405 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmdline),
405 p = subprocess.Popen(procutil.tonativestr(cmdline),
406 406 shell=True, bufsize=-1,
407 407 close_fds=procutil.closefds,
408 408 stdout=subprocess.PIPE)
409 409 return p
410 410 return self._dorun(popen, cmd, *args, **kwargs)
411 411
412 412 def _run2(self, cmd, *args, **kwargs):
413 413 return self._dorun(procutil.popen2, cmd, *args, **kwargs)
414 414
415 415 def _run3(self, cmd, *args, **kwargs):
416 416 return self._dorun(procutil.popen3, cmd, *args, **kwargs)
417 417
418 418 def _dorun(self, openfunc, cmd, *args, **kwargs):
419 419 cmdline = self._cmdline(cmd, *args, **kwargs)
420 420 self.ui.debug('running: %s\n' % (cmdline,))
421 421 self.prerun()
422 422 try:
423 423 return openfunc(cmdline)
424 424 finally:
425 425 self.postrun()
426 426
427 427 def run(self, cmd, *args, **kwargs):
428 428 p = self._run(cmd, *args, **kwargs)
429 429 output = p.communicate()[0]
430 430 self.ui.debug(output)
431 431 return output, p.returncode
432 432
433 433 def runlines(self, cmd, *args, **kwargs):
434 434 p = self._run(cmd, *args, **kwargs)
435 435 output = p.stdout.readlines()
436 436 p.wait()
437 437 self.ui.debug(''.join(output))
438 438 return output, p.returncode
439 439
440 440 def checkexit(self, status, output=''):
441 441 if status:
442 442 if output:
443 443 self.ui.warn(_('%s error:\n') % self.command)
444 444 self.ui.warn(output)
445 445 msg = procutil.explainexit(status)
446 446 raise error.Abort('%s %s' % (self.command, msg))
447 447
448 448 def run0(self, cmd, *args, **kwargs):
449 449 output, status = self.run(cmd, *args, **kwargs)
450 450 self.checkexit(status, output)
451 451 return output
452 452
453 453 def runlines0(self, cmd, *args, **kwargs):
454 454 output, status = self.runlines(cmd, *args, **kwargs)
455 455 self.checkexit(status, ''.join(output))
456 456 return output
457 457
458 458 @propertycache
459 459 def argmax(self):
460 460 # POSIX requires at least 4096 bytes for ARG_MAX
461 461 argmax = 4096
462 462 try:
463 463 argmax = os.sysconf(r"SC_ARG_MAX")
464 464 except (AttributeError, ValueError):
465 465 pass
466 466
467 467 # Windows shells impose their own limits on command line length,
468 468 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
469 469 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
470 470 # details about cmd.exe limitations.
471 471
472 472 # Since ARG_MAX is for command line _and_ environment, lower our limit
473 473 # (and make happy Windows shells while doing this).
474 474 return argmax // 2 - 1
475 475
476 476 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
477 477 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
478 478 limit = self.argmax - cmdlen
479 479 numbytes = 0
480 480 fl = []
481 481 for fn in arglist:
482 482 b = len(fn) + 3
483 483 if numbytes + b < limit or len(fl) == 0:
484 484 fl.append(fn)
485 485 numbytes += b
486 486 else:
487 487 yield fl
488 488 fl = [fn]
489 489 numbytes = b
490 490 if fl:
491 491 yield fl
492 492
493 493 def xargs(self, arglist, cmd, *args, **kwargs):
494 494 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
495 495 self.run0(cmd, *(list(args) + l), **kwargs)
496 496
497 497 class mapfile(dict):
498 498 def __init__(self, ui, path):
499 499 super(mapfile, self).__init__()
500 500 self.ui = ui
501 501 self.path = path
502 502 self.fp = None
503 503 self.order = []
504 504 self._read()
505 505
506 506 def _read(self):
507 507 if not self.path:
508 508 return
509 509 try:
510 510 fp = open(self.path, 'rb')
511 511 except IOError as err:
512 512 if err.errno != errno.ENOENT:
513 513 raise
514 514 return
515 515 for i, line in enumerate(util.iterfile(fp)):
516 516 line = line.splitlines()[0].rstrip()
517 517 if not line:
518 518 # Ignore blank lines
519 519 continue
520 520 try:
521 521 key, value = line.rsplit(' ', 1)
522 522 except ValueError:
523 523 raise error.Abort(
524 524 _('syntax error in %s(%d): key/value pair expected')
525 525 % (self.path, i + 1))
526 526 if key not in self:
527 527 self.order.append(key)
528 528 super(mapfile, self).__setitem__(key, value)
529 529 fp.close()
530 530
531 531 def __setitem__(self, key, value):
532 532 if self.fp is None:
533 533 try:
534 534 self.fp = open(self.path, 'ab')
535 535 except IOError as err:
536 536 raise error.Abort(
537 537 _('could not open map file %r: %s') %
538 538 (self.path, encoding.strtolocal(err.strerror)))
539 539 self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
540 540 self.fp.flush()
541 541 super(mapfile, self).__setitem__(key, value)
542 542
543 543 def close(self):
544 544 if self.fp:
545 545 self.fp.close()
546 546 self.fp = None
547 547
548 548 def makedatetimestamp(t):
549 549 """Like dateutil.makedate() but for time t instead of current time"""
550 550 delta = (datetime.datetime.utcfromtimestamp(t) -
551 551 datetime.datetime.fromtimestamp(t))
552 552 tz = delta.days * 86400 + delta.seconds
553 553 return t, tz
@@ -1,615 +1,615 b''
1 1 # fix - rewrite file content in changesets and working copy
2 2 #
3 3 # Copyright 2018 Google LLC.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8 8
9 9 Provides a command that runs configured tools on the contents of modified files,
10 10 writing back any fixes to the working copy or replacing changesets.
11 11
12 12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 13 formatting fixes to modified lines in C++ code::
14 14
15 15 [fix]
16 16 clang-format:command=clang-format --assume-filename={rootpath}
17 17 clang-format:linerange=--lines={first}:{last}
18 18 clang-format:fileset=set:**.cpp or **.hpp
19 19
20 20 The :command suboption forms the first part of the shell command that will be
21 21 used to fix a file. The content of the file is passed on standard input, and the
22 22 fixed file content is expected on standard output. If there is any output on
23 23 standard error, the file will not be affected. Some values may be substituted
24 24 into the command::
25 25
26 26 {rootpath} The path of the file being fixed, relative to the repo root
27 27 {basename} The name of the file being fixed, without the directory path
28 28
29 29 If the :linerange suboption is set, the tool will only be run if there are
30 30 changed lines in a file. The value of this suboption is appended to the shell
31 31 command once for every range of changed lines in the file. Some values may be
32 32 substituted into the command::
33 33
34 34 {first} The 1-based line number of the first line in the modified range
35 35 {last} The 1-based line number of the last line in the modified range
36 36
37 37 The :fileset suboption determines which files will be passed through each
38 38 configured tool. See :hg:`help fileset` for possible values. If there are file
39 39 arguments to :hg:`fix`, the intersection of these filesets is used.
40 40
41 41 There is also a configurable limit for the maximum size of file that will be
42 42 processed by :hg:`fix`::
43 43
44 44 [fix]
45 45 maxfilesize=2MB
46 46
47 47 """
48 48
49 49 from __future__ import absolute_import
50 50
51 51 import collections
52 52 import itertools
53 53 import os
54 54 import re
55 55 import subprocess
56 56
57 57 from mercurial.i18n import _
58 58 from mercurial.node import nullrev
59 59 from mercurial.node import wdirrev
60 60
61 61 from mercurial.utils import (
62 62 procutil,
63 63 )
64 64
65 65 from mercurial import (
66 66 cmdutil,
67 67 context,
68 68 copies,
69 69 error,
70 70 mdiff,
71 71 merge,
72 72 obsolete,
73 73 pycompat,
74 74 registrar,
75 75 scmutil,
76 76 util,
77 77 worker,
78 78 )
79 79
80 80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
81 81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 82 # be specifying the version(s) of Mercurial they are tested with, or
83 83 # leave the attribute unspecified.
84 84 testedwith = 'ships-with-hg-core'
85 85
86 86 cmdtable = {}
87 87 command = registrar.command(cmdtable)
88 88
89 89 configtable = {}
90 90 configitem = registrar.configitem(configtable)
91 91
92 92 # Register the suboptions allowed for each configured fixer.
93 93 FIXER_ATTRS = ('command', 'linerange', 'fileset')
94 94
95 95 for key in FIXER_ATTRS:
96 96 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
97 97
98 98 # A good default size allows most source code files to be fixed, but avoids
99 99 # letting fixer tools choke on huge inputs, which could be surprising to the
100 100 # user.
101 101 configitem('fix', 'maxfilesize', default='2MB')
102 102
103 103 allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
104 104 baseopt = ('', 'base', [], _('revisions to diff against (overrides automatic '
105 105 'selection, and applies to every revision being '
106 106 'fixed)'), _('REV'))
107 107 revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
108 108 wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
109 109 wholeopt = ('', 'whole', False, _('always fix every line of a file'))
110 110 usage = _('[OPTION]... [FILE]...')
111 111
112 112 @command('fix', [allopt, baseopt, revopt, wdiropt, wholeopt], usage)
113 113 def fix(ui, repo, *pats, **opts):
114 114 """rewrite file content in changesets or working directory
115 115
116 116 Runs any configured tools to fix the content of files. Only affects files
117 117 with changes, unless file arguments are provided. Only affects changed lines
118 118 of files, unless the --whole flag is used. Some tools may always affect the
119 119 whole file regardless of --whole.
120 120
121 121 If revisions are specified with --rev, those revisions will be checked, and
122 122 they may be replaced with new revisions that have fixed file content. It is
123 123 desirable to specify all descendants of each specified revision, so that the
124 124 fixes propagate to the descendants. If all descendants are fixed at the same
125 125 time, no merging, rebasing, or evolution will be required.
126 126
127 127 If --working-dir is used, files with uncommitted changes in the working copy
128 128 will be fixed. If the checked-out revision is also fixed, the working
129 129 directory will update to the replacement revision.
130 130
131 131 When determining what lines of each file to fix at each revision, the whole
132 132 set of revisions being fixed is considered, so that fixes to earlier
133 133 revisions are not forgotten in later ones. The --base flag can be used to
134 134 override this default behavior, though it is not usually desirable to do so.
135 135 """
136 136 opts = pycompat.byteskwargs(opts)
137 137 if opts['all']:
138 138 if opts['rev']:
139 139 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
140 140 opts['rev'] = ['not public() and not obsolete()']
141 141 opts['working_dir'] = True
142 142 with repo.wlock(), repo.lock(), repo.transaction('fix'):
143 143 revstofix = getrevstofix(ui, repo, opts)
144 144 basectxs = getbasectxs(repo, opts, revstofix)
145 145 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
146 146 basectxs)
147 147 fixers = getfixers(ui)
148 148
149 149 # There are no data dependencies between the workers fixing each file
150 150 # revision, so we can use all available parallelism.
151 151 def getfixes(items):
152 152 for rev, path in items:
153 153 ctx = repo[rev]
154 154 olddata = ctx[path].data()
155 155 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
156 156 # Don't waste memory/time passing unchanged content back, but
157 157 # produce one result per item either way.
158 158 yield (rev, path, newdata if newdata != olddata else None)
159 159 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue)
160 160
161 161 # We have to hold on to the data for each successor revision in memory
162 162 # until all its parents are committed. We ensure this by committing and
163 163 # freeing memory for the revisions in some topological order. This
164 164 # leaves a little bit of memory efficiency on the table, but also makes
165 165 # the tests deterministic. It might also be considered a feature since
166 166 # it makes the results more easily reproducible.
167 167 filedata = collections.defaultdict(dict)
168 168 replacements = {}
169 169 wdirwritten = False
170 170 commitorder = sorted(revstofix, reverse=True)
171 171 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
172 172 total=sum(numitems.values())) as progress:
173 173 for rev, path, newdata in results:
174 174 progress.increment(item=path)
175 175 if newdata is not None:
176 176 filedata[rev][path] = newdata
177 177 numitems[rev] -= 1
178 178 # Apply the fixes for this and any other revisions that are
179 179 # ready and sitting at the front of the queue. Using a loop here
180 180 # prevents the queue from being blocked by the first revision to
181 181 # be ready out of order.
182 182 while commitorder and not numitems[commitorder[-1]]:
183 183 rev = commitorder.pop()
184 184 ctx = repo[rev]
185 185 if rev == wdirrev:
186 186 writeworkingdir(repo, ctx, filedata[rev], replacements)
187 187 wdirwritten = bool(filedata[rev])
188 188 else:
189 189 replacerev(ui, repo, ctx, filedata[rev], replacements)
190 190 del filedata[rev]
191 191
192 192 cleanup(repo, replacements, wdirwritten)
193 193
194 194 def cleanup(repo, replacements, wdirwritten):
195 195 """Calls scmutil.cleanupnodes() with the given replacements.
196 196
197 197 "replacements" is a dict from nodeid to nodeid, with one key and one value
198 198 for every revision that was affected by fixing. This is slightly different
199 199 from cleanupnodes().
200 200
201 201 "wdirwritten" is a bool which tells whether the working copy was affected by
202 202 fixing, since it has no entry in "replacements".
203 203
204 204 Useful as a hook point for extending "hg fix" with output summarizing the
205 205 effects of the command, though we choose not to output anything here.
206 206 """
207 207 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
208 208 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
209 209
210 210 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
211 211 """"Constructs the list of files to be fixed at specific revisions
212 212
213 213 It is up to the caller how to consume the work items, and the only
214 214 dependence between them is that replacement revisions must be committed in
215 215 topological order. Each work item represents a file in the working copy or
216 216 in some revision that should be fixed and written back to the working copy
217 217 or into a replacement revision.
218 218
219 219 Work items for the same revision are grouped together, so that a worker
220 220 pool starting with the first N items in parallel is likely to finish the
221 221 first revision's work before other revisions. This can allow us to write
222 222 the result to disk and reduce memory footprint. At time of writing, the
223 223 partition strategy in worker.py seems favorable to this. We also sort the
224 224 items by ascending revision number to match the order in which we commit
225 225 the fixes later.
226 226 """
227 227 workqueue = []
228 228 numitems = collections.defaultdict(int)
229 229 maxfilesize = ui.configbytes('fix', 'maxfilesize')
230 230 for rev in sorted(revstofix):
231 231 fixctx = repo[rev]
232 232 match = scmutil.match(fixctx, pats, opts)
233 233 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
234 234 fixctx):
235 235 if path not in fixctx:
236 236 continue
237 237 fctx = fixctx[path]
238 238 if fctx.islink():
239 239 continue
240 240 if fctx.size() > maxfilesize:
241 241 ui.warn(_('ignoring file larger than %s: %s\n') %
242 242 (util.bytecount(maxfilesize), path))
243 243 continue
244 244 workqueue.append((rev, path))
245 245 numitems[rev] += 1
246 246 return workqueue, numitems
247 247
248 248 def getrevstofix(ui, repo, opts):
249 249 """Returns the set of revision numbers that should be fixed"""
250 250 revs = set(scmutil.revrange(repo, opts['rev']))
251 251 for rev in revs:
252 252 checkfixablectx(ui, repo, repo[rev])
253 253 if revs:
254 254 cmdutil.checkunfinished(repo)
255 255 checknodescendants(repo, revs)
256 256 if opts.get('working_dir'):
257 257 revs.add(wdirrev)
258 258 if list(merge.mergestate.read(repo).unresolved()):
259 259 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
260 260 if not revs:
261 261 raise error.Abort(
262 262 'no changesets specified', hint='use --rev or --working-dir')
263 263 return revs
264 264
265 265 def checknodescendants(repo, revs):
266 266 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
267 267 repo.revs('(%ld::) - (%ld)', revs, revs)):
268 268 raise error.Abort(_('can only fix a changeset together '
269 269 'with all its descendants'))
270 270
271 271 def checkfixablectx(ui, repo, ctx):
272 272 """Aborts if the revision shouldn't be replaced with a fixed one."""
273 273 if not ctx.mutable():
274 274 raise error.Abort('can\'t fix immutable changeset %s' %
275 275 (scmutil.formatchangeid(ctx),))
276 276 if ctx.obsolete():
277 277 # It would be better to actually check if the revision has a successor.
278 278 allowdivergence = ui.configbool('experimental',
279 279 'evolution.allowdivergence')
280 280 if not allowdivergence:
281 281 raise error.Abort('fixing obsolete revision could cause divergence')
282 282
283 283 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
284 284 """Returns the set of files that should be fixed in a context
285 285
286 286 The result depends on the base contexts; we include any file that has
287 287 changed relative to any of the base contexts. Base contexts should be
288 288 ancestors of the context being fixed.
289 289 """
290 290 files = set()
291 291 for basectx in basectxs:
292 292 stat = basectx.status(fixctx, match=match, listclean=bool(pats),
293 293 listunknown=bool(pats))
294 294 files.update(
295 295 set(itertools.chain(stat.added, stat.modified, stat.clean,
296 296 stat.unknown)))
297 297 return files
298 298
299 299 def lineranges(opts, path, basectxs, fixctx, content2):
300 300 """Returns the set of line ranges that should be fixed in a file
301 301
302 302 Of the form [(10, 20), (30, 40)].
303 303
304 304 This depends on the given base contexts; we must consider lines that have
305 305 changed versus any of the base contexts, and whether the file has been
306 306 renamed versus any of them.
307 307
308 308 Another way to understand this is that we exclude line ranges that are
309 309 common to the file in all base contexts.
310 310 """
311 311 if opts.get('whole'):
312 312 # Return a range containing all lines. Rely on the diff implementation's
313 313 # idea of how many lines are in the file, instead of reimplementing it.
314 314 return difflineranges('', content2)
315 315
316 316 rangeslist = []
317 317 for basectx in basectxs:
318 318 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
319 319 if basepath in basectx:
320 320 content1 = basectx[basepath].data()
321 321 else:
322 322 content1 = ''
323 323 rangeslist.extend(difflineranges(content1, content2))
324 324 return unionranges(rangeslist)
325 325
326 326 def unionranges(rangeslist):
327 327 """Return the union of some closed intervals
328 328
329 329 >>> unionranges([])
330 330 []
331 331 >>> unionranges([(1, 100)])
332 332 [(1, 100)]
333 333 >>> unionranges([(1, 100), (1, 100)])
334 334 [(1, 100)]
335 335 >>> unionranges([(1, 100), (2, 100)])
336 336 [(1, 100)]
337 337 >>> unionranges([(1, 99), (1, 100)])
338 338 [(1, 100)]
339 339 >>> unionranges([(1, 100), (40, 60)])
340 340 [(1, 100)]
341 341 >>> unionranges([(1, 49), (50, 100)])
342 342 [(1, 100)]
343 343 >>> unionranges([(1, 48), (50, 100)])
344 344 [(1, 48), (50, 100)]
345 345 >>> unionranges([(1, 2), (3, 4), (5, 6)])
346 346 [(1, 6)]
347 347 """
348 348 rangeslist = sorted(set(rangeslist))
349 349 unioned = []
350 350 if rangeslist:
351 351 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
352 352 for a, b in rangeslist:
353 353 c, d = unioned[-1]
354 354 if a > d + 1:
355 355 unioned.append((a, b))
356 356 else:
357 357 unioned[-1] = (c, max(b, d))
358 358 return unioned
359 359
360 360 def difflineranges(content1, content2):
361 361 """Return list of line number ranges in content2 that differ from content1.
362 362
363 363 Line numbers are 1-based. The numbers are the first and last line contained
364 364 in the range. Single-line ranges have the same line number for the first and
365 365 last line. Excludes any empty ranges that result from lines that are only
366 366 present in content1. Relies on mdiff's idea of where the line endings are in
367 367 the string.
368 368
369 369 >>> from mercurial import pycompat
370 370 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
371 371 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
372 372 >>> difflineranges2(b'', b'')
373 373 []
374 374 >>> difflineranges2(b'a', b'')
375 375 []
376 376 >>> difflineranges2(b'', b'A')
377 377 [(1, 1)]
378 378 >>> difflineranges2(b'a', b'a')
379 379 []
380 380 >>> difflineranges2(b'a', b'A')
381 381 [(1, 1)]
382 382 >>> difflineranges2(b'ab', b'')
383 383 []
384 384 >>> difflineranges2(b'', b'AB')
385 385 [(1, 2)]
386 386 >>> difflineranges2(b'abc', b'ac')
387 387 []
388 388 >>> difflineranges2(b'ab', b'aCb')
389 389 [(2, 2)]
390 390 >>> difflineranges2(b'abc', b'aBc')
391 391 [(2, 2)]
392 392 >>> difflineranges2(b'ab', b'AB')
393 393 [(1, 2)]
394 394 >>> difflineranges2(b'abcde', b'aBcDe')
395 395 [(2, 2), (4, 4)]
396 396 >>> difflineranges2(b'abcde', b'aBCDe')
397 397 [(2, 4)]
398 398 """
399 399 ranges = []
400 400 for lines, kind in mdiff.allblocks(content1, content2):
401 401 firstline, lastline = lines[2:4]
402 402 if kind == '!' and firstline != lastline:
403 403 ranges.append((firstline + 1, lastline))
404 404 return ranges
405 405
406 406 def getbasectxs(repo, opts, revstofix):
407 407 """Returns a map of the base contexts for each revision
408 408
409 409 The base contexts determine which lines are considered modified when we
410 410 attempt to fix just the modified lines in a file. It also determines which
411 411 files we attempt to fix, so it is important to compute this even when
412 412 --whole is used.
413 413 """
414 414 # The --base flag overrides the usual logic, and we give every revision
415 415 # exactly the set of baserevs that the user specified.
416 416 if opts.get('base'):
417 417 baserevs = set(scmutil.revrange(repo, opts.get('base')))
418 418 if not baserevs:
419 419 baserevs = {nullrev}
420 420 basectxs = {repo[rev] for rev in baserevs}
421 421 return {rev: basectxs for rev in revstofix}
422 422
423 423 # Proceed in topological order so that we can easily determine each
424 424 # revision's baserevs by looking at its parents and their baserevs.
425 425 basectxs = collections.defaultdict(set)
426 426 for rev in sorted(revstofix):
427 427 ctx = repo[rev]
428 428 for pctx in ctx.parents():
429 429 if pctx.rev() in basectxs:
430 430 basectxs[rev].update(basectxs[pctx.rev()])
431 431 else:
432 432 basectxs[rev].add(pctx)
433 433 return basectxs
434 434
435 435 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
436 436 """Run any configured fixers that should affect the file in this context
437 437
438 438 Returns the file content that results from applying the fixers in some order
439 439 starting with the file's content in the fixctx. Fixers that support line
440 440 ranges will affect lines that have changed relative to any of the basectxs
441 441 (i.e. they will only avoid lines that are common to all basectxs).
442 442
443 443 A fixer tool's stdout will become the file's new content if and only if it
444 444 exits with code zero.
445 445 """
446 446 newdata = fixctx[path].data()
447 447 for fixername, fixer in fixers.iteritems():
448 448 if fixer.affects(opts, fixctx, path):
449 449 rangesfn = lambda: lineranges(opts, path, basectxs, fixctx, newdata)
450 450 command = fixer.command(ui, path, rangesfn)
451 451 if command is None:
452 452 continue
453 453 ui.debug('subprocess: %s\n' % (command,))
454 454 proc = subprocess.Popen(
455 pycompat.rapply(procutil.tonativestr, command),
455 procutil.tonativestr(command),
456 456 shell=True,
457 457 cwd=procutil.tonativestr(b'/'),
458 458 stdin=subprocess.PIPE,
459 459 stdout=subprocess.PIPE,
460 460 stderr=subprocess.PIPE)
461 461 newerdata, stderr = proc.communicate(newdata)
462 462 if stderr:
463 463 showstderr(ui, fixctx.rev(), fixername, stderr)
464 464 if proc.returncode == 0:
465 465 newdata = newerdata
466 466 elif not stderr:
467 467 showstderr(ui, fixctx.rev(), fixername,
468 468 _('exited with status %d\n') % (proc.returncode,))
469 469 return newdata
470 470
471 471 def showstderr(ui, rev, fixername, stderr):
472 472 """Writes the lines of the stderr string as warnings on the ui
473 473
474 474 Uses the revision number and fixername to give more context to each line of
475 475 the error message. Doesn't include file names, since those take up a lot of
476 476 space and would tend to be included in the error message if they were
477 477 relevant.
478 478 """
479 479 for line in re.split('[\r\n]+', stderr):
480 480 if line:
481 481 ui.warn(('['))
482 482 if rev is None:
483 483 ui.warn(_('wdir'), label='evolve.rev')
484 484 else:
485 485 ui.warn((str(rev)), label='evolve.rev')
486 486 ui.warn(('] %s: %s\n') % (fixername, line))
487 487
488 488 def writeworkingdir(repo, ctx, filedata, replacements):
489 489 """Write new content to the working copy and check out the new p1 if any
490 490
491 491 We check out a new revision if and only if we fixed something in both the
492 492 working directory and its parent revision. This avoids the need for a full
493 493 update/merge, and means that the working directory simply isn't affected
494 494 unless the --working-dir flag is given.
495 495
496 496 Directly updates the dirstate for the affected files.
497 497 """
498 498 for path, data in filedata.iteritems():
499 499 fctx = ctx[path]
500 500 fctx.write(data, fctx.flags())
501 501 if repo.dirstate[path] == 'n':
502 502 repo.dirstate.normallookup(path)
503 503
504 504 oldparentnodes = repo.dirstate.parents()
505 505 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
506 506 if newparentnodes != oldparentnodes:
507 507 repo.setparents(*newparentnodes)
508 508
509 509 def replacerev(ui, repo, ctx, filedata, replacements):
510 510 """Commit a new revision like the given one, but with file content changes
511 511
512 512 "ctx" is the original revision to be replaced by a modified one.
513 513
514 514 "filedata" is a dict that maps paths to their new file content. All other
515 515 paths will be recreated from the original revision without changes.
516 516 "filedata" may contain paths that didn't exist in the original revision;
517 517 they will be added.
518 518
519 519 "replacements" is a dict that maps a single node to a single node, and it is
520 520 updated to indicate the original revision is replaced by the newly created
521 521 one. No entry is added if the replacement's node already exists.
522 522
523 523 The new revision has the same parents as the old one, unless those parents
524 524 have already been replaced, in which case those replacements are the parents
525 525 of this new revision. Thus, if revisions are replaced in topological order,
526 526 there is no need to rebase them into the original topology later.
527 527 """
528 528
529 529 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
530 530 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
531 531 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
532 532 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
533 533
534 534 def filectxfn(repo, memctx, path):
535 535 if path not in ctx:
536 536 return None
537 537 fctx = ctx[path]
538 538 copied = fctx.renamed()
539 539 if copied:
540 540 copied = copied[0]
541 541 return context.memfilectx(
542 542 repo,
543 543 memctx,
544 544 path=fctx.path(),
545 545 data=filedata.get(path, fctx.data()),
546 546 islink=fctx.islink(),
547 547 isexec=fctx.isexec(),
548 548 copied=copied)
549 549
550 550 memctx = context.memctx(
551 551 repo,
552 552 parents=(newp1node, newp2node),
553 553 text=ctx.description(),
554 554 files=set(ctx.files()) | set(filedata.keys()),
555 555 filectxfn=filectxfn,
556 556 user=ctx.user(),
557 557 date=ctx.date(),
558 558 extra=ctx.extra(),
559 559 branch=ctx.branch(),
560 560 editor=None)
561 561 sucnode = memctx.commit()
562 562 prenode = ctx.node()
563 563 if prenode == sucnode:
564 564 ui.debug('node %s already existed\n' % (ctx.hex()))
565 565 else:
566 566 replacements[ctx.node()] = sucnode
567 567
568 568 def getfixers(ui):
569 569 """Returns a map of configured fixer tools indexed by their names
570 570
571 571 Each value is a Fixer object with methods that implement the behavior of the
572 572 fixer's config suboptions. Does not validate the config values.
573 573 """
574 574 result = {}
575 575 for name in fixernames(ui):
576 576 result[name] = Fixer()
577 577 attrs = ui.configsuboptions('fix', name)[1]
578 578 for key in FIXER_ATTRS:
579 579 setattr(result[name], pycompat.sysstr('_' + key),
580 580 attrs.get(key, ''))
581 581 return result
582 582
583 583 def fixernames(ui):
584 584 """Returns the names of [fix] config options that have suboptions"""
585 585 names = set()
586 586 for k, v in ui.configitems('fix'):
587 587 if ':' in k:
588 588 names.add(k.split(':', 1)[0])
589 589 return names
590 590
591 591 class Fixer(object):
592 592 """Wraps the raw config values for a fixer with methods"""
593 593
594 594 def affects(self, opts, fixctx, path):
595 595 """Should this fixer run on the file at the given path and context?"""
596 596 return scmutil.match(fixctx, [self._fileset], opts)(path)
597 597
598 598 def command(self, ui, path, rangesfn):
599 599 """A shell command to use to invoke this fixer on the given file/lines
600 600
601 601 May return None if there is no appropriate command to run for the given
602 602 parameters.
603 603 """
604 604 expand = cmdutil.rendercommandtemplate
605 605 parts = [expand(ui, self._command,
606 606 {'rootpath': path, 'basename': os.path.basename(path)})]
607 607 if self._linerange:
608 608 ranges = rangesfn()
609 609 if not ranges:
610 610 # No line ranges to fix, so don't run the fixer.
611 611 return None
612 612 for first, last in ranges:
613 613 parts.append(expand(ui, self._linerange,
614 614 {'first': first, 'last': last}))
615 615 return ' '.join(parts)
@@ -1,138 +1,138 b''
1 1 # logtoprocess.py - send ui.log() data to a subprocess
2 2 #
3 3 # Copyright 2016 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """send ui.log() data to a subprocess (EXPERIMENTAL)
8 8
9 9 This extension lets you specify a shell command per ui.log() event,
10 10 sending all remaining arguments to as environment variables to that command.
11 11
12 12 Each positional argument to the method results in a `MSG[N]` key in the
13 13 environment, starting at 1 (so `MSG1`, `MSG2`, etc.). Each keyword argument
14 14 is set as a `OPT_UPPERCASE_KEY` variable (so the key is uppercased, and
15 15 prefixed with `OPT_`). The original event name is passed in the `EVENT`
16 16 environment variable, and the process ID of mercurial is given in `HGPID`.
17 17
18 18 So given a call `ui.log('foo', 'bar', 'baz', spam='eggs'), a script configured
19 19 for the `foo` event can expect an environment with `MSG1=bar`, `MSG2=baz`, and
20 20 `OPT_SPAM=eggs`.
21 21
22 22 Scripts are configured in the `[logtoprocess]` section, each key an event name.
23 23 For example::
24 24
25 25 [logtoprocess]
26 26 commandexception = echo "$MSG2$MSG3" > /var/log/mercurial_exceptions.log
27 27
28 28 would log the warning message and traceback of any failed command dispatch.
29 29
30 30 Scripts are run asynchronously as detached daemon processes; mercurial will
31 31 not ensure that they exit cleanly.
32 32
33 33 """
34 34
35 35 from __future__ import absolute_import
36 36
37 37 import itertools
38 38 import os
39 39 import subprocess
40 40 import sys
41 41
42 42 from mercurial import (
43 43 encoding,
44 44 pycompat,
45 45 )
46 46
47 47 from mercurial.utils import (
48 48 procutil,
49 49 )
50 50
51 51 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
52 52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
53 53 # be specifying the version(s) of Mercurial they are tested with, or
54 54 # leave the attribute unspecified.
55 55 testedwith = 'ships-with-hg-core'
56 56
57 57 def uisetup(ui):
58 58 if pycompat.iswindows:
59 59 # no fork on Windows, but we can create a detached process
60 60 # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863.aspx
61 61 # No stdlib constant exists for this value
62 62 DETACHED_PROCESS = 0x00000008
63 63 _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP
64 64
65 65 def runshellcommand(script, env):
66 66 # we can't use close_fds *and* redirect stdin. I'm not sure that we
67 67 # need to because the detached process has no console connection.
68 68 subprocess.Popen(
69 pycompat.rapply(procutil.tonativestr, script),
69 procutil.tonativestr(script),
70 70 shell=True, env=procutil.tonativeenv(env), close_fds=True,
71 71 creationflags=_creationflags)
72 72 else:
73 73 def runshellcommand(script, env):
74 74 # double-fork to completely detach from the parent process
75 75 # based on http://code.activestate.com/recipes/278731
76 76 pid = os.fork()
77 77 if pid:
78 78 # parent
79 79 return
80 80 # subprocess.Popen() forks again, all we need to add is
81 81 # flag the new process as a new session.
82 82 if sys.version_info < (3, 2):
83 83 newsession = {'preexec_fn': os.setsid}
84 84 else:
85 85 newsession = {'start_new_session': True}
86 86 try:
87 87 # connect stdin to devnull to make sure the subprocess can't
88 88 # muck up that stream for mercurial.
89 89 subprocess.Popen(
90 pycompat.rapply(procutil.tonativestr, script),
90 procutil.tonativestr(script),
91 91 shell=True, stdin=open(os.devnull, 'r'),
92 92 env=procutil.tonativeenv(env),
93 93 close_fds=True, **newsession)
94 94 finally:
95 95 # mission accomplished, this child needs to exit and not
96 96 # continue the hg process here.
97 97 os._exit(0)
98 98
99 99 class logtoprocessui(ui.__class__):
100 100 def log(self, event, *msg, **opts):
101 101 """Map log events to external commands
102 102
103 103 Arguments are passed on as environment variables.
104 104
105 105 """
106 106 script = self.config('logtoprocess', event)
107 107 if script:
108 108 if msg:
109 109 # try to format the log message given the remaining
110 110 # arguments
111 111 try:
112 112 # Python string formatting with % either uses a
113 113 # dictionary *or* tuple, but not both. If we have
114 114 # keyword options, assume we need a mapping.
115 115 formatted = msg[0] % (opts or msg[1:])
116 116 except (TypeError, KeyError):
117 117 # Failed to apply the arguments, ignore
118 118 formatted = msg[0]
119 119 messages = (formatted,) + msg[1:]
120 120 else:
121 121 messages = msg
122 122 # positional arguments are listed as MSG[N] keys in the
123 123 # environment
124 124 msgpairs = (
125 125 ('MSG{0:d}'.format(i), str(m))
126 126 for i, m in enumerate(messages, 1))
127 127 # keyword arguments get prefixed with OPT_ and uppercased
128 128 optpairs = (
129 129 ('OPT_{0}'.format(key.upper()), str(value))
130 130 for key, value in opts.iteritems())
131 131 env = dict(itertools.chain(encoding.environ.items(),
132 132 msgpairs, optpairs),
133 133 EVENT=event, HGPID=str(os.getpid()))
134 134 runshellcommand(script, env)
135 135 return super(logtoprocessui, self).log(event, *msg, **opts)
136 136
137 137 # Replace the class for this instance and all clones created from it:
138 138 ui.__class__ = logtoprocessui
@@ -1,1774 +1,1774 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 policy,
38 38 pycompat,
39 39 revsetlang,
40 40 similar,
41 41 url,
42 42 util,
43 43 vfs,
44 44 )
45 45
46 46 from .utils import (
47 47 procutil,
48 48 stringutil,
49 49 )
50 50
51 51 if pycompat.iswindows:
52 52 from . import scmwindows as scmplatform
53 53 else:
54 54 from . import scmposix as scmplatform
55 55
56 56 parsers = policy.importmod(r'parsers')
57 57
58 58 termsize = scmplatform.termsize
59 59
60 60 class status(tuple):
61 61 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 62 and 'ignored' properties are only relevant to the working copy.
63 63 '''
64 64
65 65 __slots__ = ()
66 66
67 67 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 68 clean):
69 69 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 70 ignored, clean))
71 71
72 72 @property
73 73 def modified(self):
74 74 '''files that have been modified'''
75 75 return self[0]
76 76
77 77 @property
78 78 def added(self):
79 79 '''files that have been added'''
80 80 return self[1]
81 81
82 82 @property
83 83 def removed(self):
84 84 '''files that have been removed'''
85 85 return self[2]
86 86
87 87 @property
88 88 def deleted(self):
89 89 '''files that are in the dirstate, but have been deleted from the
90 90 working copy (aka "missing")
91 91 '''
92 92 return self[3]
93 93
94 94 @property
95 95 def unknown(self):
96 96 '''files not in the dirstate that are not ignored'''
97 97 return self[4]
98 98
99 99 @property
100 100 def ignored(self):
101 101 '''files not in the dirstate that are ignored (by _dirignore())'''
102 102 return self[5]
103 103
104 104 @property
105 105 def clean(self):
106 106 '''files that have not been modified'''
107 107 return self[6]
108 108
109 109 def __repr__(self, *args, **kwargs):
110 110 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 111 r'unknown=%s, ignored=%s, clean=%s>') %
112 112 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 113
114 114 def itersubrepos(ctx1, ctx2):
115 115 """find subrepos in ctx1 or ctx2"""
116 116 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 117 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 118 # has been modified (in ctx2) but not yet committed (in ctx1).
119 119 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 120 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 121
122 122 missing = set()
123 123
124 124 for subpath in ctx2.substate:
125 125 if subpath not in ctx1.substate:
126 126 del subpaths[subpath]
127 127 missing.add(subpath)
128 128
129 129 for subpath, ctx in sorted(subpaths.iteritems()):
130 130 yield subpath, ctx.sub(subpath)
131 131
132 132 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 133 # status and diff will have an accurate result when it does
134 134 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 135 # against itself.
136 136 for subpath in missing:
137 137 yield subpath, ctx2.nullsub(subpath, ctx1)
138 138
139 139 def nochangesfound(ui, repo, excluded=None):
140 140 '''Report no changes for push/pull, excluded is None or a list of
141 141 nodes excluded from the push/pull.
142 142 '''
143 143 secretlist = []
144 144 if excluded:
145 145 for n in excluded:
146 146 ctx = repo[n]
147 147 if ctx.phase() >= phases.secret and not ctx.extinct():
148 148 secretlist.append(n)
149 149
150 150 if secretlist:
151 151 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 152 % len(secretlist))
153 153 else:
154 154 ui.status(_("no changes found\n"))
155 155
156 156 def callcatch(ui, func):
157 157 """call func() with global exception handling
158 158
159 159 return func() if no exception happens. otherwise do some error handling
160 160 and return an exit code accordingly. does not handle all exceptions.
161 161 """
162 162 try:
163 163 try:
164 164 return func()
165 165 except: # re-raises
166 166 ui.traceback()
167 167 raise
168 168 # Global exception handling, alphabetically
169 169 # Mercurial-specific first, followed by built-in and library exceptions
170 170 except error.LockHeld as inst:
171 171 if inst.errno == errno.ETIMEDOUT:
172 172 reason = _('timed out waiting for lock held by %r') % inst.locker
173 173 else:
174 174 reason = _('lock held by %r') % inst.locker
175 175 ui.error(_("abort: %s: %s\n") % (
176 176 inst.desc or stringutil.forcebytestr(inst.filename), reason))
177 177 if not inst.locker:
178 178 ui.error(_("(lock might be very busy)\n"))
179 179 except error.LockUnavailable as inst:
180 180 ui.error(_("abort: could not lock %s: %s\n") %
181 181 (inst.desc or stringutil.forcebytestr(inst.filename),
182 182 encoding.strtolocal(inst.strerror)))
183 183 except error.OutOfBandError as inst:
184 184 if inst.args:
185 185 msg = _("abort: remote error:\n")
186 186 else:
187 187 msg = _("abort: remote error\n")
188 188 ui.error(msg)
189 189 if inst.args:
190 190 ui.error(''.join(inst.args))
191 191 if inst.hint:
192 192 ui.error('(%s)\n' % inst.hint)
193 193 except error.RepoError as inst:
194 194 ui.error(_("abort: %s!\n") % inst)
195 195 if inst.hint:
196 196 ui.error(_("(%s)\n") % inst.hint)
197 197 except error.ResponseError as inst:
198 198 ui.error(_("abort: %s") % inst.args[0])
199 199 msg = inst.args[1]
200 200 if isinstance(msg, type(u'')):
201 201 msg = pycompat.sysbytes(msg)
202 202 if not isinstance(msg, bytes):
203 203 ui.error(" %r\n" % (msg,))
204 204 elif not msg:
205 205 ui.error(_(" empty string\n"))
206 206 else:
207 207 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
208 208 except error.CensoredNodeError as inst:
209 209 ui.error(_("abort: file censored %s!\n") % inst)
210 210 except error.StorageError as inst:
211 211 ui.error(_("abort: %s!\n") % inst)
212 212 except error.InterventionRequired as inst:
213 213 ui.error("%s\n" % inst)
214 214 if inst.hint:
215 215 ui.error(_("(%s)\n") % inst.hint)
216 216 return 1
217 217 except error.WdirUnsupported:
218 218 ui.error(_("abort: working directory revision cannot be specified\n"))
219 219 except error.Abort as inst:
220 220 ui.error(_("abort: %s\n") % inst)
221 221 if inst.hint:
222 222 ui.error(_("(%s)\n") % inst.hint)
223 223 except ImportError as inst:
224 224 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
225 225 m = stringutil.forcebytestr(inst).split()[-1]
226 226 if m in "mpatch bdiff".split():
227 227 ui.error(_("(did you forget to compile extensions?)\n"))
228 228 elif m in "zlib".split():
229 229 ui.error(_("(is your Python install correct?)\n"))
230 230 except IOError as inst:
231 231 if util.safehasattr(inst, "code"):
232 232 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
233 233 elif util.safehasattr(inst, "reason"):
234 234 try: # usually it is in the form (errno, strerror)
235 235 reason = inst.reason.args[1]
236 236 except (AttributeError, IndexError):
237 237 # it might be anything, for example a string
238 238 reason = inst.reason
239 239 if isinstance(reason, pycompat.unicode):
240 240 # SSLError of Python 2.7.9 contains a unicode
241 241 reason = encoding.unitolocal(reason)
242 242 ui.error(_("abort: error: %s\n") % reason)
243 243 elif (util.safehasattr(inst, "args")
244 244 and inst.args and inst.args[0] == errno.EPIPE):
245 245 pass
246 246 elif getattr(inst, "strerror", None):
247 247 if getattr(inst, "filename", None):
248 248 ui.error(_("abort: %s: %s\n") % (
249 249 encoding.strtolocal(inst.strerror),
250 250 stringutil.forcebytestr(inst.filename)))
251 251 else:
252 252 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
253 253 else:
254 254 raise
255 255 except OSError as inst:
256 256 if getattr(inst, "filename", None) is not None:
257 257 ui.error(_("abort: %s: '%s'\n") % (
258 258 encoding.strtolocal(inst.strerror),
259 259 stringutil.forcebytestr(inst.filename)))
260 260 else:
261 261 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
262 262 except MemoryError:
263 263 ui.error(_("abort: out of memory\n"))
264 264 except SystemExit as inst:
265 265 # Commands shouldn't sys.exit directly, but give a return code.
266 266 # Just in case catch this and and pass exit code to caller.
267 267 return inst.code
268 268 except socket.error as inst:
269 269 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
270 270
271 271 return -1
272 272
273 273 def checknewlabel(repo, lbl, kind):
274 274 # Do not use the "kind" parameter in ui output.
275 275 # It makes strings difficult to translate.
276 276 if lbl in ['tip', '.', 'null']:
277 277 raise error.Abort(_("the name '%s' is reserved") % lbl)
278 278 for c in (':', '\0', '\n', '\r'):
279 279 if c in lbl:
280 280 raise error.Abort(
281 281 _("%r cannot be used in a name") % pycompat.bytestr(c))
282 282 try:
283 283 int(lbl)
284 284 raise error.Abort(_("cannot use an integer as a name"))
285 285 except ValueError:
286 286 pass
287 287 if lbl.strip() != lbl:
288 288 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
289 289
290 290 def checkfilename(f):
291 291 '''Check that the filename f is an acceptable filename for a tracked file'''
292 292 if '\r' in f or '\n' in f:
293 293 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
294 294 % pycompat.bytestr(f))
295 295
296 296 def checkportable(ui, f):
297 297 '''Check if filename f is portable and warn or abort depending on config'''
298 298 checkfilename(f)
299 299 abort, warn = checkportabilityalert(ui)
300 300 if abort or warn:
301 301 msg = util.checkwinfilename(f)
302 302 if msg:
303 303 msg = "%s: %s" % (msg, procutil.shellquote(f))
304 304 if abort:
305 305 raise error.Abort(msg)
306 306 ui.warn(_("warning: %s\n") % msg)
307 307
308 308 def checkportabilityalert(ui):
309 309 '''check if the user's config requests nothing, a warning, or abort for
310 310 non-portable filenames'''
311 311 val = ui.config('ui', 'portablefilenames')
312 312 lval = val.lower()
313 313 bval = stringutil.parsebool(val)
314 314 abort = pycompat.iswindows or lval == 'abort'
315 315 warn = bval or lval == 'warn'
316 316 if bval is None and not (warn or abort or lval == 'ignore'):
317 317 raise error.ConfigError(
318 318 _("ui.portablefilenames value is invalid ('%s')") % val)
319 319 return abort, warn
320 320
321 321 class casecollisionauditor(object):
322 322 def __init__(self, ui, abort, dirstate):
323 323 self._ui = ui
324 324 self._abort = abort
325 325 allfiles = '\0'.join(dirstate._map)
326 326 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
327 327 self._dirstate = dirstate
328 328 # The purpose of _newfiles is so that we don't complain about
329 329 # case collisions if someone were to call this object with the
330 330 # same filename twice.
331 331 self._newfiles = set()
332 332
333 333 def __call__(self, f):
334 334 if f in self._newfiles:
335 335 return
336 336 fl = encoding.lower(f)
337 337 if fl in self._loweredfiles and f not in self._dirstate:
338 338 msg = _('possible case-folding collision for %s') % f
339 339 if self._abort:
340 340 raise error.Abort(msg)
341 341 self._ui.warn(_("warning: %s\n") % msg)
342 342 self._loweredfiles.add(fl)
343 343 self._newfiles.add(f)
344 344
345 345 def filteredhash(repo, maxrev):
346 346 """build hash of filtered revisions in the current repoview.
347 347
348 348 Multiple caches perform up-to-date validation by checking that the
349 349 tiprev and tipnode stored in the cache file match the current repository.
350 350 However, this is not sufficient for validating repoviews because the set
351 351 of revisions in the view may change without the repository tiprev and
352 352 tipnode changing.
353 353
354 354 This function hashes all the revs filtered from the view and returns
355 355 that SHA-1 digest.
356 356 """
357 357 cl = repo.changelog
358 358 if not cl.filteredrevs:
359 359 return None
360 360 key = None
361 361 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
362 362 if revs:
363 363 s = hashlib.sha1()
364 364 for rev in revs:
365 365 s.update('%d;' % rev)
366 366 key = s.digest()
367 367 return key
368 368
369 369 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
370 370 '''yield every hg repository under path, always recursively.
371 371 The recurse flag will only control recursion into repo working dirs'''
372 372 def errhandler(err):
373 373 if err.filename == path:
374 374 raise err
375 375 samestat = getattr(os.path, 'samestat', None)
376 376 if followsym and samestat is not None:
377 377 def adddir(dirlst, dirname):
378 378 dirstat = os.stat(dirname)
379 379 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
380 380 if not match:
381 381 dirlst.append(dirstat)
382 382 return not match
383 383 else:
384 384 followsym = False
385 385
386 386 if (seen_dirs is None) and followsym:
387 387 seen_dirs = []
388 388 adddir(seen_dirs, path)
389 389 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
390 390 dirs.sort()
391 391 if '.hg' in dirs:
392 392 yield root # found a repository
393 393 qroot = os.path.join(root, '.hg', 'patches')
394 394 if os.path.isdir(os.path.join(qroot, '.hg')):
395 395 yield qroot # we have a patch queue repo here
396 396 if recurse:
397 397 # avoid recursing inside the .hg directory
398 398 dirs.remove('.hg')
399 399 else:
400 400 dirs[:] = [] # don't descend further
401 401 elif followsym:
402 402 newdirs = []
403 403 for d in dirs:
404 404 fname = os.path.join(root, d)
405 405 if adddir(seen_dirs, fname):
406 406 if os.path.islink(fname):
407 407 for hgname in walkrepos(fname, True, seen_dirs):
408 408 yield hgname
409 409 else:
410 410 newdirs.append(d)
411 411 dirs[:] = newdirs
412 412
413 413 def binnode(ctx):
414 414 """Return binary node id for a given basectx"""
415 415 node = ctx.node()
416 416 if node is None:
417 417 return wdirid
418 418 return node
419 419
420 420 def intrev(ctx):
421 421 """Return integer for a given basectx that can be used in comparison or
422 422 arithmetic operation"""
423 423 rev = ctx.rev()
424 424 if rev is None:
425 425 return wdirrev
426 426 return rev
427 427
428 428 def formatchangeid(ctx):
429 429 """Format changectx as '{rev}:{node|formatnode}', which is the default
430 430 template provided by logcmdutil.changesettemplater"""
431 431 repo = ctx.repo()
432 432 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
433 433
434 434 def formatrevnode(ui, rev, node):
435 435 """Format given revision and node depending on the current verbosity"""
436 436 if ui.debugflag:
437 437 hexfunc = hex
438 438 else:
439 439 hexfunc = short
440 440 return '%d:%s' % (rev, hexfunc(node))
441 441
442 442 def resolvehexnodeidprefix(repo, prefix):
443 443 if (prefix.startswith('x') and
444 444 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
445 445 prefix = prefix[1:]
446 446 try:
447 447 # Uses unfiltered repo because it's faster when prefix is ambiguous/
448 448 # This matches the shortesthexnodeidprefix() function below.
449 449 node = repo.unfiltered().changelog._partialmatch(prefix)
450 450 except error.AmbiguousPrefixLookupError:
451 451 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
452 452 if revset:
453 453 # Clear config to avoid infinite recursion
454 454 configoverrides = {('experimental',
455 455 'revisions.disambiguatewithin'): None}
456 456 with repo.ui.configoverride(configoverrides):
457 457 revs = repo.anyrevs([revset], user=True)
458 458 matches = []
459 459 for rev in revs:
460 460 node = repo.changelog.node(rev)
461 461 if hex(node).startswith(prefix):
462 462 matches.append(node)
463 463 if len(matches) == 1:
464 464 return matches[0]
465 465 raise
466 466 if node is None:
467 467 return
468 468 repo.changelog.rev(node) # make sure node isn't filtered
469 469 return node
470 470
471 471 def mayberevnum(repo, prefix):
472 472 """Checks if the given prefix may be mistaken for a revision number"""
473 473 try:
474 474 i = int(prefix)
475 475 # if we are a pure int, then starting with zero will not be
476 476 # confused as a rev; or, obviously, if the int is larger
477 477 # than the value of the tip rev
478 478 if prefix[0:1] == b'0' or i >= len(repo):
479 479 return False
480 480 return True
481 481 except ValueError:
482 482 return False
483 483
484 484 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
485 485 """Find the shortest unambiguous prefix that matches hexnode.
486 486
487 487 If "cache" is not None, it must be a dictionary that can be used for
488 488 caching between calls to this method.
489 489 """
490 490 # _partialmatch() of filtered changelog could take O(len(repo)) time,
491 491 # which would be unacceptably slow. so we look for hash collision in
492 492 # unfiltered space, which means some hashes may be slightly longer.
493 493
494 494 def disambiguate(prefix):
495 495 """Disambiguate against revnums."""
496 496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 497 if mayberevnum(repo, prefix):
498 498 return 'x' + prefix
499 499 else:
500 500 return prefix
501 501
502 502 hexnode = hex(node)
503 503 for length in range(len(prefix), len(hexnode) + 1):
504 504 prefix = hexnode[:length]
505 505 if not mayberevnum(repo, prefix):
506 506 return prefix
507 507
508 508 cl = repo.unfiltered().changelog
509 509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 510 if revset:
511 511 revs = None
512 512 if cache is not None:
513 513 revs = cache.get('disambiguationrevset')
514 514 if revs is None:
515 515 revs = repo.anyrevs([revset], user=True)
516 516 if cache is not None:
517 517 cache['disambiguationrevset'] = revs
518 518 if cl.rev(node) in revs:
519 519 hexnode = hex(node)
520 520 nodetree = None
521 521 if cache is not None:
522 522 nodetree = cache.get('disambiguationnodetree')
523 523 if not nodetree:
524 524 try:
525 525 nodetree = parsers.nodetree(cl.index, len(revs))
526 526 except AttributeError:
527 527 # no native nodetree
528 528 pass
529 529 else:
530 530 for r in revs:
531 531 nodetree.insert(r)
532 532 if cache is not None:
533 533 cache['disambiguationnodetree'] = nodetree
534 534 if nodetree is not None:
535 535 length = max(nodetree.shortest(node), minlength)
536 536 prefix = hexnode[:length]
537 537 return disambiguate(prefix)
538 538 for length in range(minlength, len(hexnode) + 1):
539 539 matches = []
540 540 prefix = hexnode[:length]
541 541 for rev in revs:
542 542 otherhexnode = repo[rev].hex()
543 543 if prefix == otherhexnode[:length]:
544 544 matches.append(otherhexnode)
545 545 if len(matches) == 1:
546 546 return disambiguate(prefix)
547 547
548 548 try:
549 549 return disambiguate(cl.shortest(node, minlength))
550 550 except error.LookupError:
551 551 raise error.RepoLookupError()
552 552
553 553 def isrevsymbol(repo, symbol):
554 554 """Checks if a symbol exists in the repo.
555 555
556 556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 557 symbol is an ambiguous nodeid prefix.
558 558 """
559 559 try:
560 560 revsymbol(repo, symbol)
561 561 return True
562 562 except error.RepoLookupError:
563 563 return False
564 564
565 565 def revsymbol(repo, symbol):
566 566 """Returns a context given a single revision symbol (as string).
567 567
568 568 This is similar to revsingle(), but accepts only a single revision symbol,
569 569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 570 not "max(public())".
571 571 """
572 572 if not isinstance(symbol, bytes):
573 573 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 574 "repo[symbol]?" % (symbol, type(symbol)))
575 575 raise error.ProgrammingError(msg)
576 576 try:
577 577 if symbol in ('.', 'tip', 'null'):
578 578 return repo[symbol]
579 579
580 580 try:
581 581 r = int(symbol)
582 582 if '%d' % r != symbol:
583 583 raise ValueError
584 584 l = len(repo.changelog)
585 585 if r < 0:
586 586 r += l
587 587 if r < 0 or r >= l and r != wdirrev:
588 588 raise ValueError
589 589 return repo[r]
590 590 except error.FilteredIndexError:
591 591 raise
592 592 except (ValueError, OverflowError, IndexError):
593 593 pass
594 594
595 595 if len(symbol) == 40:
596 596 try:
597 597 node = bin(symbol)
598 598 rev = repo.changelog.rev(node)
599 599 return repo[rev]
600 600 except error.FilteredLookupError:
601 601 raise
602 602 except (TypeError, LookupError):
603 603 pass
604 604
605 605 # look up bookmarks through the name interface
606 606 try:
607 607 node = repo.names.singlenode(repo, symbol)
608 608 rev = repo.changelog.rev(node)
609 609 return repo[rev]
610 610 except KeyError:
611 611 pass
612 612
613 613 node = resolvehexnodeidprefix(repo, symbol)
614 614 if node is not None:
615 615 rev = repo.changelog.rev(node)
616 616 return repo[rev]
617 617
618 618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619 619
620 620 except error.WdirUnsupported:
621 621 return repo[None]
622 622 except (error.FilteredIndexError, error.FilteredLookupError,
623 623 error.FilteredRepoLookupError):
624 624 raise _filterederror(repo, symbol)
625 625
626 626 def _filterederror(repo, changeid):
627 627 """build an exception to be raised about a filtered changeid
628 628
629 629 This is extracted in a function to help extensions (eg: evolve) to
630 630 experiment with various message variants."""
631 631 if repo.filtername.startswith('visible'):
632 632
633 633 # Check if the changeset is obsolete
634 634 unfilteredrepo = repo.unfiltered()
635 635 ctx = revsymbol(unfilteredrepo, changeid)
636 636
637 637 # If the changeset is obsolete, enrich the message with the reason
638 638 # that made this changeset not visible
639 639 if ctx.obsolete():
640 640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 641 else:
642 642 msg = _("hidden revision '%s'") % changeid
643 643
644 644 hint = _('use --hidden to access hidden revisions')
645 645
646 646 return error.FilteredRepoLookupError(msg, hint=hint)
647 647 msg = _("filtered revision '%s' (not in '%s' subset)")
648 648 msg %= (changeid, repo.filtername)
649 649 return error.FilteredRepoLookupError(msg)
650 650
651 651 def revsingle(repo, revspec, default='.', localalias=None):
652 652 if not revspec and revspec != 0:
653 653 return repo[default]
654 654
655 655 l = revrange(repo, [revspec], localalias=localalias)
656 656 if not l:
657 657 raise error.Abort(_('empty revision set'))
658 658 return repo[l.last()]
659 659
660 660 def _pairspec(revspec):
661 661 tree = revsetlang.parse(revspec)
662 662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663 663
664 664 def revpair(repo, revs):
665 665 if not revs:
666 666 return repo['.'], repo[None]
667 667
668 668 l = revrange(repo, revs)
669 669
670 670 if not l:
671 671 first = second = None
672 672 elif l.isascending():
673 673 first = l.min()
674 674 second = l.max()
675 675 elif l.isdescending():
676 676 first = l.max()
677 677 second = l.min()
678 678 else:
679 679 first = l.first()
680 680 second = l.last()
681 681
682 682 if first is None:
683 683 raise error.Abort(_('empty revision range'))
684 684 if (first == second and len(revs) >= 2
685 685 and not all(revrange(repo, [r]) for r in revs)):
686 686 raise error.Abort(_('empty revision on one side of range'))
687 687
688 688 # if top-level is range expression, the result must always be a pair
689 689 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
690 690 return repo[first], repo[None]
691 691
692 692 return repo[first], repo[second]
693 693
694 694 def revrange(repo, specs, localalias=None):
695 695 """Execute 1 to many revsets and return the union.
696 696
697 697 This is the preferred mechanism for executing revsets using user-specified
698 698 config options, such as revset aliases.
699 699
700 700 The revsets specified by ``specs`` will be executed via a chained ``OR``
701 701 expression. If ``specs`` is empty, an empty result is returned.
702 702
703 703 ``specs`` can contain integers, in which case they are assumed to be
704 704 revision numbers.
705 705
706 706 It is assumed the revsets are already formatted. If you have arguments
707 707 that need to be expanded in the revset, call ``revsetlang.formatspec()``
708 708 and pass the result as an element of ``specs``.
709 709
710 710 Specifying a single revset is allowed.
711 711
712 712 Returns a ``revset.abstractsmartset`` which is a list-like interface over
713 713 integer revisions.
714 714 """
715 715 allspecs = []
716 716 for spec in specs:
717 717 if isinstance(spec, int):
718 718 spec = revsetlang.formatspec('rev(%d)', spec)
719 719 allspecs.append(spec)
720 720 return repo.anyrevs(allspecs, user=True, localalias=localalias)
721 721
722 722 def meaningfulparents(repo, ctx):
723 723 """Return list of meaningful (or all if debug) parentrevs for rev.
724 724
725 725 For merges (two non-nullrev revisions) both parents are meaningful.
726 726 Otherwise the first parent revision is considered meaningful if it
727 727 is not the preceding revision.
728 728 """
729 729 parents = ctx.parents()
730 730 if len(parents) > 1:
731 731 return parents
732 732 if repo.ui.debugflag:
733 733 return [parents[0], repo['null']]
734 734 if parents[0].rev() >= intrev(ctx) - 1:
735 735 return []
736 736 return parents
737 737
738 738 def expandpats(pats):
739 739 '''Expand bare globs when running on windows.
740 740 On posix we assume it already has already been done by sh.'''
741 741 if not util.expandglobs:
742 742 return list(pats)
743 743 ret = []
744 744 for kindpat in pats:
745 745 kind, pat = matchmod._patsplit(kindpat, None)
746 746 if kind is None:
747 747 try:
748 748 globbed = glob.glob(pat)
749 749 except re.error:
750 750 globbed = [pat]
751 751 if globbed:
752 752 ret.extend(globbed)
753 753 continue
754 754 ret.append(kindpat)
755 755 return ret
756 756
757 757 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
758 758 badfn=None):
759 759 '''Return a matcher and the patterns that were used.
760 760 The matcher will warn about bad matches, unless an alternate badfn callback
761 761 is provided.'''
762 762 if pats == ("",):
763 763 pats = []
764 764 if opts is None:
765 765 opts = {}
766 766 if not globbed and default == 'relpath':
767 767 pats = expandpats(pats or [])
768 768
769 769 def bad(f, msg):
770 770 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
771 771
772 772 if badfn is None:
773 773 badfn = bad
774 774
775 775 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
776 776 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
777 777
778 778 if m.always():
779 779 pats = []
780 780 return m, pats
781 781
782 782 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
783 783 badfn=None):
784 784 '''Return a matcher that will warn about bad matches.'''
785 785 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
786 786
787 787 def matchall(repo):
788 788 '''Return a matcher that will efficiently match everything.'''
789 789 return matchmod.always(repo.root, repo.getcwd())
790 790
791 791 def matchfiles(repo, files, badfn=None):
792 792 '''Return a matcher that will efficiently match exactly these files.'''
793 793 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
794 794
795 795 def parsefollowlinespattern(repo, rev, pat, msg):
796 796 """Return a file name from `pat` pattern suitable for usage in followlines
797 797 logic.
798 798 """
799 799 if not matchmod.patkind(pat):
800 800 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
801 801 else:
802 802 ctx = repo[rev]
803 803 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
804 804 files = [f for f in ctx if m(f)]
805 805 if len(files) != 1:
806 806 raise error.ParseError(msg)
807 807 return files[0]
808 808
809 809 def origpath(ui, repo, filepath):
810 810 '''customize where .orig files are created
811 811
812 812 Fetch user defined path from config file: [ui] origbackuppath = <path>
813 813 Fall back to default (filepath with .orig suffix) if not specified
814 814 '''
815 815 origbackuppath = ui.config('ui', 'origbackuppath')
816 816 if not origbackuppath:
817 817 return filepath + ".orig"
818 818
819 819 # Convert filepath from an absolute path into a path inside the repo.
820 820 filepathfromroot = util.normpath(os.path.relpath(filepath,
821 821 start=repo.root))
822 822
823 823 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
824 824 origbackupdir = origvfs.dirname(filepathfromroot)
825 825 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
826 826 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
827 827
828 828 # Remove any files that conflict with the backup file's path
829 829 for f in reversed(list(util.finddirs(filepathfromroot))):
830 830 if origvfs.isfileorlink(f):
831 831 ui.note(_('removing conflicting file: %s\n')
832 832 % origvfs.join(f))
833 833 origvfs.unlink(f)
834 834 break
835 835
836 836 origvfs.makedirs(origbackupdir)
837 837
838 838 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
839 839 ui.note(_('removing conflicting directory: %s\n')
840 840 % origvfs.join(filepathfromroot))
841 841 origvfs.rmtree(filepathfromroot, forcibly=True)
842 842
843 843 return origvfs.join(filepathfromroot)
844 844
845 845 class _containsnode(object):
846 846 """proxy __contains__(node) to container.__contains__ which accepts revs"""
847 847
848 848 def __init__(self, repo, revcontainer):
849 849 self._torev = repo.changelog.rev
850 850 self._revcontains = revcontainer.__contains__
851 851
852 852 def __contains__(self, node):
853 853 return self._revcontains(self._torev(node))
854 854
855 855 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
856 856 fixphase=False, targetphase=None, backup=True):
857 857 """do common cleanups when old nodes are replaced by new nodes
858 858
859 859 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
860 860 (we might also want to move working directory parent in the future)
861 861
862 862 By default, bookmark moves are calculated automatically from 'replacements',
863 863 but 'moves' can be used to override that. Also, 'moves' may include
864 864 additional bookmark moves that should not have associated obsmarkers.
865 865
866 866 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
867 867 have replacements. operation is a string, like "rebase".
868 868
869 869 metadata is dictionary containing metadata to be stored in obsmarker if
870 870 obsolescence is enabled.
871 871 """
872 872 assert fixphase or targetphase is None
873 873 if not replacements and not moves:
874 874 return
875 875
876 876 # translate mapping's other forms
877 877 if not util.safehasattr(replacements, 'items'):
878 878 replacements = {n: () for n in replacements}
879 879
880 880 # Calculate bookmark movements
881 881 if moves is None:
882 882 moves = {}
883 883 # Unfiltered repo is needed since nodes in replacements might be hidden.
884 884 unfi = repo.unfiltered()
885 885 for oldnode, newnodes in replacements.items():
886 886 if oldnode in moves:
887 887 continue
888 888 if len(newnodes) > 1:
889 889 # usually a split, take the one with biggest rev number
890 890 newnode = next(unfi.set('max(%ln)', newnodes)).node()
891 891 elif len(newnodes) == 0:
892 892 # move bookmark backwards
893 893 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
894 894 list(replacements)))
895 895 if roots:
896 896 newnode = roots[0].node()
897 897 else:
898 898 newnode = nullid
899 899 else:
900 900 newnode = newnodes[0]
901 901 moves[oldnode] = newnode
902 902
903 903 allnewnodes = [n for ns in replacements.values() for n in ns]
904 904 toretract = {}
905 905 toadvance = {}
906 906 if fixphase:
907 907 precursors = {}
908 908 for oldnode, newnodes in replacements.items():
909 909 for newnode in newnodes:
910 910 precursors.setdefault(newnode, []).append(oldnode)
911 911
912 912 allnewnodes.sort(key=lambda n: unfi[n].rev())
913 913 newphases = {}
914 914 def phase(ctx):
915 915 return newphases.get(ctx.node(), ctx.phase())
916 916 for newnode in allnewnodes:
917 917 ctx = unfi[newnode]
918 918 parentphase = max(phase(p) for p in ctx.parents())
919 919 if targetphase is None:
920 920 oldphase = max(unfi[oldnode].phase()
921 921 for oldnode in precursors[newnode])
922 922 newphase = max(oldphase, parentphase)
923 923 else:
924 924 newphase = max(targetphase, parentphase)
925 925 newphases[newnode] = newphase
926 926 if newphase > ctx.phase():
927 927 toretract.setdefault(newphase, []).append(newnode)
928 928 elif newphase < ctx.phase():
929 929 toadvance.setdefault(newphase, []).append(newnode)
930 930
931 931 with repo.transaction('cleanup') as tr:
932 932 # Move bookmarks
933 933 bmarks = repo._bookmarks
934 934 bmarkchanges = []
935 935 for oldnode, newnode in moves.items():
936 936 oldbmarks = repo.nodebookmarks(oldnode)
937 937 if not oldbmarks:
938 938 continue
939 939 from . import bookmarks # avoid import cycle
940 940 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
941 941 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
942 942 hex(oldnode), hex(newnode)))
943 943 # Delete divergent bookmarks being parents of related newnodes
944 944 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
945 945 allnewnodes, newnode, oldnode)
946 946 deletenodes = _containsnode(repo, deleterevs)
947 947 for name in oldbmarks:
948 948 bmarkchanges.append((name, newnode))
949 949 for b in bookmarks.divergent2delete(repo, deletenodes, name):
950 950 bmarkchanges.append((b, None))
951 951
952 952 if bmarkchanges:
953 953 bmarks.applychanges(repo, tr, bmarkchanges)
954 954
955 955 for phase, nodes in toretract.items():
956 956 phases.retractboundary(repo, tr, phase, nodes)
957 957 for phase, nodes in toadvance.items():
958 958 phases.advanceboundary(repo, tr, phase, nodes)
959 959
960 960 # Obsolete or strip nodes
961 961 if obsolete.isenabled(repo, obsolete.createmarkersopt):
962 962 # If a node is already obsoleted, and we want to obsolete it
963 963 # without a successor, skip that obssolete request since it's
964 964 # unnecessary. That's the "if s or not isobs(n)" check below.
965 965 # Also sort the node in topology order, that might be useful for
966 966 # some obsstore logic.
967 967 # NOTE: the filtering and sorting might belong to createmarkers.
968 968 isobs = unfi.obsstore.successors.__contains__
969 969 torev = unfi.changelog.rev
970 970 sortfunc = lambda ns: torev(ns[0])
971 971 rels = [(unfi[n], tuple(unfi[m] for m in s))
972 972 for n, s in sorted(replacements.items(), key=sortfunc)
973 973 if s or not isobs(n)]
974 974 if rels:
975 975 obsolete.createmarkers(repo, rels, operation=operation,
976 976 metadata=metadata)
977 977 else:
978 978 from . import repair # avoid import cycle
979 979 tostrip = list(replacements)
980 980 if tostrip:
981 981 repair.delayedstrip(repo.ui, repo, tostrip, operation,
982 982 backup=backup)
983 983
984 984 def addremove(repo, matcher, prefix, opts=None):
985 985 if opts is None:
986 986 opts = {}
987 987 m = matcher
988 988 dry_run = opts.get('dry_run')
989 989 try:
990 990 similarity = float(opts.get('similarity') or 0)
991 991 except ValueError:
992 992 raise error.Abort(_('similarity must be a number'))
993 993 if similarity < 0 or similarity > 100:
994 994 raise error.Abort(_('similarity must be between 0 and 100'))
995 995 similarity /= 100.0
996 996
997 997 ret = 0
998 998 join = lambda f: os.path.join(prefix, f)
999 999
1000 1000 wctx = repo[None]
1001 1001 for subpath in sorted(wctx.substate):
1002 1002 submatch = matchmod.subdirmatcher(subpath, m)
1003 1003 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1004 1004 sub = wctx.sub(subpath)
1005 1005 try:
1006 1006 if sub.addremove(submatch, prefix, opts):
1007 1007 ret = 1
1008 1008 except error.LookupError:
1009 1009 repo.ui.status(_("skipping missing subrepository: %s\n")
1010 1010 % join(subpath))
1011 1011
1012 1012 rejected = []
1013 1013 def badfn(f, msg):
1014 1014 if f in m.files():
1015 1015 m.bad(f, msg)
1016 1016 rejected.append(f)
1017 1017
1018 1018 badmatch = matchmod.badmatch(m, badfn)
1019 1019 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1020 1020 badmatch)
1021 1021
1022 1022 unknownset = set(unknown + forgotten)
1023 1023 toprint = unknownset.copy()
1024 1024 toprint.update(deleted)
1025 1025 for abs in sorted(toprint):
1026 1026 if repo.ui.verbose or not m.exact(abs):
1027 1027 if abs in unknownset:
1028 1028 status = _('adding %s\n') % m.uipath(abs)
1029 1029 label = 'addremove.added'
1030 1030 else:
1031 1031 status = _('removing %s\n') % m.uipath(abs)
1032 1032 label = 'addremove.removed'
1033 1033 repo.ui.status(status, label=label)
1034 1034
1035 1035 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1036 1036 similarity)
1037 1037
1038 1038 if not dry_run:
1039 1039 _markchanges(repo, unknown + forgotten, deleted, renames)
1040 1040
1041 1041 for f in rejected:
1042 1042 if f in m.files():
1043 1043 return 1
1044 1044 return ret
1045 1045
1046 1046 def marktouched(repo, files, similarity=0.0):
1047 1047 '''Assert that files have somehow been operated upon. files are relative to
1048 1048 the repo root.'''
1049 1049 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1050 1050 rejected = []
1051 1051
1052 1052 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1053 1053
1054 1054 if repo.ui.verbose:
1055 1055 unknownset = set(unknown + forgotten)
1056 1056 toprint = unknownset.copy()
1057 1057 toprint.update(deleted)
1058 1058 for abs in sorted(toprint):
1059 1059 if abs in unknownset:
1060 1060 status = _('adding %s\n') % abs
1061 1061 else:
1062 1062 status = _('removing %s\n') % abs
1063 1063 repo.ui.status(status)
1064 1064
1065 1065 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1066 1066 similarity)
1067 1067
1068 1068 _markchanges(repo, unknown + forgotten, deleted, renames)
1069 1069
1070 1070 for f in rejected:
1071 1071 if f in m.files():
1072 1072 return 1
1073 1073 return 0
1074 1074
1075 1075 def _interestingfiles(repo, matcher):
1076 1076 '''Walk dirstate with matcher, looking for files that addremove would care
1077 1077 about.
1078 1078
1079 1079 This is different from dirstate.status because it doesn't care about
1080 1080 whether files are modified or clean.'''
1081 1081 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1082 1082 audit_path = pathutil.pathauditor(repo.root, cached=True)
1083 1083
1084 1084 ctx = repo[None]
1085 1085 dirstate = repo.dirstate
1086 1086 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1087 1087 unknown=True, ignored=False, full=False)
1088 1088 for abs, st in walkresults.iteritems():
1089 1089 dstate = dirstate[abs]
1090 1090 if dstate == '?' and audit_path.check(abs):
1091 1091 unknown.append(abs)
1092 1092 elif dstate != 'r' and not st:
1093 1093 deleted.append(abs)
1094 1094 elif dstate == 'r' and st:
1095 1095 forgotten.append(abs)
1096 1096 # for finding renames
1097 1097 elif dstate == 'r' and not st:
1098 1098 removed.append(abs)
1099 1099 elif dstate == 'a':
1100 1100 added.append(abs)
1101 1101
1102 1102 return added, unknown, deleted, removed, forgotten
1103 1103
1104 1104 def _findrenames(repo, matcher, added, removed, similarity):
1105 1105 '''Find renames from removed files to added ones.'''
1106 1106 renames = {}
1107 1107 if similarity > 0:
1108 1108 for old, new, score in similar.findrenames(repo, added, removed,
1109 1109 similarity):
1110 1110 if (repo.ui.verbose or not matcher.exact(old)
1111 1111 or not matcher.exact(new)):
1112 1112 repo.ui.status(_('recording removal of %s as rename to %s '
1113 1113 '(%d%% similar)\n') %
1114 1114 (matcher.rel(old), matcher.rel(new),
1115 1115 score * 100))
1116 1116 renames[new] = old
1117 1117 return renames
1118 1118
1119 1119 def _markchanges(repo, unknown, deleted, renames):
1120 1120 '''Marks the files in unknown as added, the files in deleted as removed,
1121 1121 and the files in renames as copied.'''
1122 1122 wctx = repo[None]
1123 1123 with repo.wlock():
1124 1124 wctx.forget(deleted)
1125 1125 wctx.add(unknown)
1126 1126 for new, old in renames.iteritems():
1127 1127 wctx.copy(old, new)
1128 1128
1129 1129 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1130 1130 """Update the dirstate to reflect the intent of copying src to dst. For
1131 1131 different reasons it might not end with dst being marked as copied from src.
1132 1132 """
1133 1133 origsrc = repo.dirstate.copied(src) or src
1134 1134 if dst == origsrc: # copying back a copy?
1135 1135 if repo.dirstate[dst] not in 'mn' and not dryrun:
1136 1136 repo.dirstate.normallookup(dst)
1137 1137 else:
1138 1138 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1139 1139 if not ui.quiet:
1140 1140 ui.warn(_("%s has not been committed yet, so no copy "
1141 1141 "data will be stored for %s.\n")
1142 1142 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1143 1143 if repo.dirstate[dst] in '?r' and not dryrun:
1144 1144 wctx.add([dst])
1145 1145 elif not dryrun:
1146 1146 wctx.copy(origsrc, dst)
1147 1147
1148 1148 def writerequires(opener, requirements):
1149 1149 with opener('requires', 'w') as fp:
1150 1150 for r in sorted(requirements):
1151 1151 fp.write("%s\n" % r)
1152 1152
1153 1153 class filecachesubentry(object):
1154 1154 def __init__(self, path, stat):
1155 1155 self.path = path
1156 1156 self.cachestat = None
1157 1157 self._cacheable = None
1158 1158
1159 1159 if stat:
1160 1160 self.cachestat = filecachesubentry.stat(self.path)
1161 1161
1162 1162 if self.cachestat:
1163 1163 self._cacheable = self.cachestat.cacheable()
1164 1164 else:
1165 1165 # None means we don't know yet
1166 1166 self._cacheable = None
1167 1167
1168 1168 def refresh(self):
1169 1169 if self.cacheable():
1170 1170 self.cachestat = filecachesubentry.stat(self.path)
1171 1171
1172 1172 def cacheable(self):
1173 1173 if self._cacheable is not None:
1174 1174 return self._cacheable
1175 1175
1176 1176 # we don't know yet, assume it is for now
1177 1177 return True
1178 1178
1179 1179 def changed(self):
1180 1180 # no point in going further if we can't cache it
1181 1181 if not self.cacheable():
1182 1182 return True
1183 1183
1184 1184 newstat = filecachesubentry.stat(self.path)
1185 1185
1186 1186 # we may not know if it's cacheable yet, check again now
1187 1187 if newstat and self._cacheable is None:
1188 1188 self._cacheable = newstat.cacheable()
1189 1189
1190 1190 # check again
1191 1191 if not self._cacheable:
1192 1192 return True
1193 1193
1194 1194 if self.cachestat != newstat:
1195 1195 self.cachestat = newstat
1196 1196 return True
1197 1197 else:
1198 1198 return False
1199 1199
1200 1200 @staticmethod
1201 1201 def stat(path):
1202 1202 try:
1203 1203 return util.cachestat(path)
1204 1204 except OSError as e:
1205 1205 if e.errno != errno.ENOENT:
1206 1206 raise
1207 1207
1208 1208 class filecacheentry(object):
1209 1209 def __init__(self, paths, stat=True):
1210 1210 self._entries = []
1211 1211 for path in paths:
1212 1212 self._entries.append(filecachesubentry(path, stat))
1213 1213
1214 1214 def changed(self):
1215 1215 '''true if any entry has changed'''
1216 1216 for entry in self._entries:
1217 1217 if entry.changed():
1218 1218 return True
1219 1219 return False
1220 1220
1221 1221 def refresh(self):
1222 1222 for entry in self._entries:
1223 1223 entry.refresh()
1224 1224
1225 1225 class filecache(object):
1226 1226 """A property like decorator that tracks files under .hg/ for updates.
1227 1227
1228 1228 On first access, the files defined as arguments are stat()ed and the
1229 1229 results cached. The decorated function is called. The results are stashed
1230 1230 away in a ``_filecache`` dict on the object whose method is decorated.
1231 1231
1232 1232 On subsequent access, the cached result is returned.
1233 1233
1234 1234 On external property set operations, stat() calls are performed and the new
1235 1235 value is cached.
1236 1236
1237 1237 On property delete operations, cached data is removed.
1238 1238
1239 1239 When using the property API, cached data is always returned, if available:
1240 1240 no stat() is performed to check if the file has changed and if the function
1241 1241 needs to be called to reflect file changes.
1242 1242
1243 1243 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1244 1244 can populate an entry before the property's getter is called. In this case,
1245 1245 entries in ``_filecache`` will be used during property operations,
1246 1246 if available. If the underlying file changes, it is up to external callers
1247 1247 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1248 1248 method result as well as possibly calling ``del obj._filecache[attr]`` to
1249 1249 remove the ``filecacheentry``.
1250 1250 """
1251 1251
1252 1252 def __init__(self, *paths):
1253 1253 self.paths = paths
1254 1254
1255 1255 def join(self, obj, fname):
1256 1256 """Used to compute the runtime path of a cached file.
1257 1257
1258 1258 Users should subclass filecache and provide their own version of this
1259 1259 function to call the appropriate join function on 'obj' (an instance
1260 1260 of the class that its member function was decorated).
1261 1261 """
1262 1262 raise NotImplementedError
1263 1263
1264 1264 def __call__(self, func):
1265 1265 self.func = func
1266 1266 self.sname = func.__name__
1267 1267 self.name = pycompat.sysbytes(self.sname)
1268 1268 return self
1269 1269
1270 1270 def __get__(self, obj, type=None):
1271 1271 # if accessed on the class, return the descriptor itself.
1272 1272 if obj is None:
1273 1273 return self
1274 1274 # do we need to check if the file changed?
1275 1275 if self.sname in obj.__dict__:
1276 1276 assert self.name in obj._filecache, self.name
1277 1277 return obj.__dict__[self.sname]
1278 1278
1279 1279 entry = obj._filecache.get(self.name)
1280 1280
1281 1281 if entry:
1282 1282 if entry.changed():
1283 1283 entry.obj = self.func(obj)
1284 1284 else:
1285 1285 paths = [self.join(obj, path) for path in self.paths]
1286 1286
1287 1287 # We stat -before- creating the object so our cache doesn't lie if
1288 1288 # a writer modified between the time we read and stat
1289 1289 entry = filecacheentry(paths, True)
1290 1290 entry.obj = self.func(obj)
1291 1291
1292 1292 obj._filecache[self.name] = entry
1293 1293
1294 1294 obj.__dict__[self.sname] = entry.obj
1295 1295 return entry.obj
1296 1296
1297 1297 def __set__(self, obj, value):
1298 1298 if self.name not in obj._filecache:
1299 1299 # we add an entry for the missing value because X in __dict__
1300 1300 # implies X in _filecache
1301 1301 paths = [self.join(obj, path) for path in self.paths]
1302 1302 ce = filecacheentry(paths, False)
1303 1303 obj._filecache[self.name] = ce
1304 1304 else:
1305 1305 ce = obj._filecache[self.name]
1306 1306
1307 1307 ce.obj = value # update cached copy
1308 1308 obj.__dict__[self.sname] = value # update copy returned by obj.x
1309 1309
1310 1310 def __delete__(self, obj):
1311 1311 try:
1312 1312 del obj.__dict__[self.sname]
1313 1313 except KeyError:
1314 1314 raise AttributeError(self.sname)
1315 1315
1316 1316 def extdatasource(repo, source):
1317 1317 """Gather a map of rev -> value dict from the specified source
1318 1318
1319 1319 A source spec is treated as a URL, with a special case shell: type
1320 1320 for parsing the output from a shell command.
1321 1321
1322 1322 The data is parsed as a series of newline-separated records where
1323 1323 each record is a revision specifier optionally followed by a space
1324 1324 and a freeform string value. If the revision is known locally, it
1325 1325 is converted to a rev, otherwise the record is skipped.
1326 1326
1327 1327 Note that both key and value are treated as UTF-8 and converted to
1328 1328 the local encoding. This allows uniformity between local and
1329 1329 remote data sources.
1330 1330 """
1331 1331
1332 1332 spec = repo.ui.config("extdata", source)
1333 1333 if not spec:
1334 1334 raise error.Abort(_("unknown extdata source '%s'") % source)
1335 1335
1336 1336 data = {}
1337 1337 src = proc = None
1338 1338 try:
1339 1339 if spec.startswith("shell:"):
1340 1340 # external commands should be run relative to the repo root
1341 1341 cmd = spec[6:]
1342 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
1342 proc = subprocess.Popen(procutil.tonativestr(cmd),
1343 1343 shell=True, bufsize=-1,
1344 1344 close_fds=procutil.closefds,
1345 1345 stdout=subprocess.PIPE,
1346 1346 cwd=procutil.tonativestr(repo.root))
1347 1347 src = proc.stdout
1348 1348 else:
1349 1349 # treat as a URL or file
1350 1350 src = url.open(repo.ui, spec)
1351 1351 for l in src:
1352 1352 if " " in l:
1353 1353 k, v = l.strip().split(" ", 1)
1354 1354 else:
1355 1355 k, v = l.strip(), ""
1356 1356
1357 1357 k = encoding.tolocal(k)
1358 1358 try:
1359 1359 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1360 1360 except (error.LookupError, error.RepoLookupError):
1361 1361 pass # we ignore data for nodes that don't exist locally
1362 1362 finally:
1363 1363 if proc:
1364 1364 proc.communicate()
1365 1365 if src:
1366 1366 src.close()
1367 1367 if proc and proc.returncode != 0:
1368 1368 raise error.Abort(_("extdata command '%s' failed: %s")
1369 1369 % (cmd, procutil.explainexit(proc.returncode)))
1370 1370
1371 1371 return data
1372 1372
1373 1373 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1374 1374 if lock is None:
1375 1375 raise error.LockInheritanceContractViolation(
1376 1376 'lock can only be inherited while held')
1377 1377 if environ is None:
1378 1378 environ = {}
1379 1379 with lock.inherit() as locker:
1380 1380 environ[envvar] = locker
1381 1381 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1382 1382
1383 1383 def wlocksub(repo, cmd, *args, **kwargs):
1384 1384 """run cmd as a subprocess that allows inheriting repo's wlock
1385 1385
1386 1386 This can only be called while the wlock is held. This takes all the
1387 1387 arguments that ui.system does, and returns the exit code of the
1388 1388 subprocess."""
1389 1389 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1390 1390 **kwargs)
1391 1391
1392 1392 class progress(object):
1393 1393 def __init__(self, ui, topic, unit="", total=None):
1394 1394 self.ui = ui
1395 1395 self.pos = 0
1396 1396 self.topic = topic
1397 1397 self.unit = unit
1398 1398 self.total = total
1399 1399
1400 1400 def __enter__(self):
1401 1401 return self
1402 1402
1403 1403 def __exit__(self, exc_type, exc_value, exc_tb):
1404 1404 self.complete()
1405 1405
1406 1406 def update(self, pos, item="", total=None):
1407 1407 assert pos is not None
1408 1408 if total:
1409 1409 self.total = total
1410 1410 self.pos = pos
1411 1411 self._print(item)
1412 1412
1413 1413 def increment(self, step=1, item="", total=None):
1414 1414 self.update(self.pos + step, item, total)
1415 1415
1416 1416 def complete(self):
1417 1417 self.ui.progress(self.topic, None)
1418 1418
1419 1419 def _print(self, item):
1420 1420 self.ui.progress(self.topic, self.pos, item, self.unit,
1421 1421 self.total)
1422 1422
1423 1423 def gdinitconfig(ui):
1424 1424 """helper function to know if a repo should be created as general delta
1425 1425 """
1426 1426 # experimental config: format.generaldelta
1427 1427 return (ui.configbool('format', 'generaldelta')
1428 1428 or ui.configbool('format', 'usegeneraldelta')
1429 1429 or ui.configbool('format', 'sparse-revlog'))
1430 1430
1431 1431 def gddeltaconfig(ui):
1432 1432 """helper function to know if incoming delta should be optimised
1433 1433 """
1434 1434 # experimental config: format.generaldelta
1435 1435 return ui.configbool('format', 'generaldelta')
1436 1436
1437 1437 class simplekeyvaluefile(object):
1438 1438 """A simple file with key=value lines
1439 1439
1440 1440 Keys must be alphanumerics and start with a letter, values must not
1441 1441 contain '\n' characters"""
1442 1442 firstlinekey = '__firstline'
1443 1443
1444 1444 def __init__(self, vfs, path, keys=None):
1445 1445 self.vfs = vfs
1446 1446 self.path = path
1447 1447
1448 1448 def read(self, firstlinenonkeyval=False):
1449 1449 """Read the contents of a simple key-value file
1450 1450
1451 1451 'firstlinenonkeyval' indicates whether the first line of file should
1452 1452 be treated as a key-value pair or reuturned fully under the
1453 1453 __firstline key."""
1454 1454 lines = self.vfs.readlines(self.path)
1455 1455 d = {}
1456 1456 if firstlinenonkeyval:
1457 1457 if not lines:
1458 1458 e = _("empty simplekeyvalue file")
1459 1459 raise error.CorruptedState(e)
1460 1460 # we don't want to include '\n' in the __firstline
1461 1461 d[self.firstlinekey] = lines[0][:-1]
1462 1462 del lines[0]
1463 1463
1464 1464 try:
1465 1465 # the 'if line.strip()' part prevents us from failing on empty
1466 1466 # lines which only contain '\n' therefore are not skipped
1467 1467 # by 'if line'
1468 1468 updatedict = dict(line[:-1].split('=', 1) for line in lines
1469 1469 if line.strip())
1470 1470 if self.firstlinekey in updatedict:
1471 1471 e = _("%r can't be used as a key")
1472 1472 raise error.CorruptedState(e % self.firstlinekey)
1473 1473 d.update(updatedict)
1474 1474 except ValueError as e:
1475 1475 raise error.CorruptedState(str(e))
1476 1476 return d
1477 1477
1478 1478 def write(self, data, firstline=None):
1479 1479 """Write key=>value mapping to a file
1480 1480 data is a dict. Keys must be alphanumerical and start with a letter.
1481 1481 Values must not contain newline characters.
1482 1482
1483 1483 If 'firstline' is not None, it is written to file before
1484 1484 everything else, as it is, not in a key=value form"""
1485 1485 lines = []
1486 1486 if firstline is not None:
1487 1487 lines.append('%s\n' % firstline)
1488 1488
1489 1489 for k, v in data.items():
1490 1490 if k == self.firstlinekey:
1491 1491 e = "key name '%s' is reserved" % self.firstlinekey
1492 1492 raise error.ProgrammingError(e)
1493 1493 if not k[0:1].isalpha():
1494 1494 e = "keys must start with a letter in a key-value file"
1495 1495 raise error.ProgrammingError(e)
1496 1496 if not k.isalnum():
1497 1497 e = "invalid key name in a simple key-value file"
1498 1498 raise error.ProgrammingError(e)
1499 1499 if '\n' in v:
1500 1500 e = "invalid value in a simple key-value file"
1501 1501 raise error.ProgrammingError(e)
1502 1502 lines.append("%s=%s\n" % (k, v))
1503 1503 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1504 1504 fp.write(''.join(lines))
1505 1505
1506 1506 _reportobsoletedsource = [
1507 1507 'debugobsolete',
1508 1508 'pull',
1509 1509 'push',
1510 1510 'serve',
1511 1511 'unbundle',
1512 1512 ]
1513 1513
1514 1514 _reportnewcssource = [
1515 1515 'pull',
1516 1516 'unbundle',
1517 1517 ]
1518 1518
1519 1519 def prefetchfiles(repo, revs, match):
1520 1520 """Invokes the registered file prefetch functions, allowing extensions to
1521 1521 ensure the corresponding files are available locally, before the command
1522 1522 uses them."""
1523 1523 if match:
1524 1524 # The command itself will complain about files that don't exist, so
1525 1525 # don't duplicate the message.
1526 1526 match = matchmod.badmatch(match, lambda fn, msg: None)
1527 1527 else:
1528 1528 match = matchall(repo)
1529 1529
1530 1530 fileprefetchhooks(repo, revs, match)
1531 1531
1532 1532 # a list of (repo, revs, match) prefetch functions
1533 1533 fileprefetchhooks = util.hooks()
1534 1534
1535 1535 # A marker that tells the evolve extension to suppress its own reporting
1536 1536 _reportstroubledchangesets = True
1537 1537
1538 1538 def registersummarycallback(repo, otr, txnname=''):
1539 1539 """register a callback to issue a summary after the transaction is closed
1540 1540 """
1541 1541 def txmatch(sources):
1542 1542 return any(txnname.startswith(source) for source in sources)
1543 1543
1544 1544 categories = []
1545 1545
1546 1546 def reportsummary(func):
1547 1547 """decorator for report callbacks."""
1548 1548 # The repoview life cycle is shorter than the one of the actual
1549 1549 # underlying repository. So the filtered object can die before the
1550 1550 # weakref is used leading to troubles. We keep a reference to the
1551 1551 # unfiltered object and restore the filtering when retrieving the
1552 1552 # repository through the weakref.
1553 1553 filtername = repo.filtername
1554 1554 reporef = weakref.ref(repo.unfiltered())
1555 1555 def wrapped(tr):
1556 1556 repo = reporef()
1557 1557 if filtername:
1558 1558 repo = repo.filtered(filtername)
1559 1559 func(repo, tr)
1560 1560 newcat = '%02i-txnreport' % len(categories)
1561 1561 otr.addpostclose(newcat, wrapped)
1562 1562 categories.append(newcat)
1563 1563 return wrapped
1564 1564
1565 1565 if txmatch(_reportobsoletedsource):
1566 1566 @reportsummary
1567 1567 def reportobsoleted(repo, tr):
1568 1568 obsoleted = obsutil.getobsoleted(repo, tr)
1569 1569 if obsoleted:
1570 1570 repo.ui.status(_('obsoleted %i changesets\n')
1571 1571 % len(obsoleted))
1572 1572
1573 1573 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1574 1574 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1575 1575 instabilitytypes = [
1576 1576 ('orphan', 'orphan'),
1577 1577 ('phase-divergent', 'phasedivergent'),
1578 1578 ('content-divergent', 'contentdivergent'),
1579 1579 ]
1580 1580
1581 1581 def getinstabilitycounts(repo):
1582 1582 filtered = repo.changelog.filteredrevs
1583 1583 counts = {}
1584 1584 for instability, revset in instabilitytypes:
1585 1585 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1586 1586 filtered)
1587 1587 return counts
1588 1588
1589 1589 oldinstabilitycounts = getinstabilitycounts(repo)
1590 1590 @reportsummary
1591 1591 def reportnewinstabilities(repo, tr):
1592 1592 newinstabilitycounts = getinstabilitycounts(repo)
1593 1593 for instability, revset in instabilitytypes:
1594 1594 delta = (newinstabilitycounts[instability] -
1595 1595 oldinstabilitycounts[instability])
1596 1596 msg = getinstabilitymessage(delta, instability)
1597 1597 if msg:
1598 1598 repo.ui.warn(msg)
1599 1599
1600 1600 if txmatch(_reportnewcssource):
1601 1601 @reportsummary
1602 1602 def reportnewcs(repo, tr):
1603 1603 """Report the range of new revisions pulled/unbundled."""
1604 1604 origrepolen = tr.changes.get('origrepolen', len(repo))
1605 1605 if origrepolen >= len(repo):
1606 1606 return
1607 1607
1608 1608 # Compute the bounds of new revisions' range, excluding obsoletes.
1609 1609 unfi = repo.unfiltered()
1610 1610 revs = unfi.revs('%d: and not obsolete()', origrepolen)
1611 1611 if not revs:
1612 1612 # Got only obsoletes.
1613 1613 return
1614 1614 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1615 1615
1616 1616 if minrev == maxrev:
1617 1617 revrange = minrev
1618 1618 else:
1619 1619 revrange = '%s:%s' % (minrev, maxrev)
1620 1620 draft = len(repo.revs('%ld and draft()', revs))
1621 1621 secret = len(repo.revs('%ld and secret()', revs))
1622 1622 if not (draft or secret):
1623 1623 msg = _('new changesets %s\n') % revrange
1624 1624 elif draft and secret:
1625 1625 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1626 1626 msg %= (revrange, draft, secret)
1627 1627 elif draft:
1628 1628 msg = _('new changesets %s (%d drafts)\n')
1629 1629 msg %= (revrange, draft)
1630 1630 elif secret:
1631 1631 msg = _('new changesets %s (%d secrets)\n')
1632 1632 msg %= (revrange, secret)
1633 1633 else:
1634 1634 raise error.ProgrammingError('entered unreachable condition')
1635 1635 repo.ui.status(msg)
1636 1636
1637 1637 @reportsummary
1638 1638 def reportphasechanges(repo, tr):
1639 1639 """Report statistics of phase changes for changesets pre-existing
1640 1640 pull/unbundle.
1641 1641 """
1642 1642 origrepolen = tr.changes.get('origrepolen', len(repo))
1643 1643 phasetracking = tr.changes.get('phases', {})
1644 1644 if not phasetracking:
1645 1645 return
1646 1646 published = [
1647 1647 rev for rev, (old, new) in phasetracking.iteritems()
1648 1648 if new == phases.public and rev < origrepolen
1649 1649 ]
1650 1650 if not published:
1651 1651 return
1652 1652 repo.ui.status(_('%d local changesets published\n')
1653 1653 % len(published))
1654 1654
1655 1655 def getinstabilitymessage(delta, instability):
1656 1656 """function to return the message to show warning about new instabilities
1657 1657
1658 1658 exists as a separate function so that extension can wrap to show more
1659 1659 information like how to fix instabilities"""
1660 1660 if delta > 0:
1661 1661 return _('%i new %s changesets\n') % (delta, instability)
1662 1662
1663 1663 def nodesummaries(repo, nodes, maxnumnodes=4):
1664 1664 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1665 1665 return ' '.join(short(h) for h in nodes)
1666 1666 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1667 1667 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1668 1668
1669 1669 def enforcesinglehead(repo, tr, desc):
1670 1670 """check that no named branch has multiple heads"""
1671 1671 if desc in ('strip', 'repair'):
1672 1672 # skip the logic during strip
1673 1673 return
1674 1674 visible = repo.filtered('visible')
1675 1675 # possible improvement: we could restrict the check to affected branch
1676 1676 for name, heads in visible.branchmap().iteritems():
1677 1677 if len(heads) > 1:
1678 1678 msg = _('rejecting multiple heads on branch "%s"')
1679 1679 msg %= name
1680 1680 hint = _('%d heads: %s')
1681 1681 hint %= (len(heads), nodesummaries(repo, heads))
1682 1682 raise error.Abort(msg, hint=hint)
1683 1683
1684 1684 def wrapconvertsink(sink):
1685 1685 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1686 1686 before it is used, whether or not the convert extension was formally loaded.
1687 1687 """
1688 1688 return sink
1689 1689
1690 1690 def unhidehashlikerevs(repo, specs, hiddentype):
1691 1691 """parse the user specs and unhide changesets whose hash or revision number
1692 1692 is passed.
1693 1693
1694 1694 hiddentype can be: 1) 'warn': warn while unhiding changesets
1695 1695 2) 'nowarn': don't warn while unhiding changesets
1696 1696
1697 1697 returns a repo object with the required changesets unhidden
1698 1698 """
1699 1699 if not repo.filtername or not repo.ui.configbool('experimental',
1700 1700 'directaccess'):
1701 1701 return repo
1702 1702
1703 1703 if repo.filtername not in ('visible', 'visible-hidden'):
1704 1704 return repo
1705 1705
1706 1706 symbols = set()
1707 1707 for spec in specs:
1708 1708 try:
1709 1709 tree = revsetlang.parse(spec)
1710 1710 except error.ParseError: # will be reported by scmutil.revrange()
1711 1711 continue
1712 1712
1713 1713 symbols.update(revsetlang.gethashlikesymbols(tree))
1714 1714
1715 1715 if not symbols:
1716 1716 return repo
1717 1717
1718 1718 revs = _getrevsfromsymbols(repo, symbols)
1719 1719
1720 1720 if not revs:
1721 1721 return repo
1722 1722
1723 1723 if hiddentype == 'warn':
1724 1724 unfi = repo.unfiltered()
1725 1725 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1726 1726 repo.ui.warn(_("warning: accessing hidden changesets for write "
1727 1727 "operation: %s\n") % revstr)
1728 1728
1729 1729 # we have to use new filtername to separate branch/tags cache until we can
1730 1730 # disbale these cache when revisions are dynamically pinned.
1731 1731 return repo.filtered('visible-hidden', revs)
1732 1732
1733 1733 def _getrevsfromsymbols(repo, symbols):
1734 1734 """parse the list of symbols and returns a set of revision numbers of hidden
1735 1735 changesets present in symbols"""
1736 1736 revs = set()
1737 1737 unfi = repo.unfiltered()
1738 1738 unficl = unfi.changelog
1739 1739 cl = repo.changelog
1740 1740 tiprev = len(unficl)
1741 1741 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1742 1742 for s in symbols:
1743 1743 try:
1744 1744 n = int(s)
1745 1745 if n <= tiprev:
1746 1746 if not allowrevnums:
1747 1747 continue
1748 1748 else:
1749 1749 if n not in cl:
1750 1750 revs.add(n)
1751 1751 continue
1752 1752 except ValueError:
1753 1753 pass
1754 1754
1755 1755 try:
1756 1756 s = resolvehexnodeidprefix(unfi, s)
1757 1757 except (error.LookupError, error.WdirUnsupported):
1758 1758 s = None
1759 1759
1760 1760 if s is not None:
1761 1761 rev = unficl.rev(s)
1762 1762 if rev not in cl:
1763 1763 revs.add(rev)
1764 1764
1765 1765 return revs
1766 1766
1767 1767 def bookmarkrevs(repo, mark):
1768 1768 """
1769 1769 Select revisions reachable by a given bookmark
1770 1770 """
1771 1771 return repo.revs("ancestors(bookmark(%s)) - "
1772 1772 "ancestors(head() and not bookmark(%s)) - "
1773 1773 "ancestors(bookmark() and not bookmark(%s))",
1774 1774 mark, mark, mark)
@@ -1,469 +1,469 b''
1 1 # procutil.py - utility for managing processes and executable environment
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 from __future__ import absolute_import
11 11
12 12 import contextlib
13 13 import imp
14 14 import io
15 15 import os
16 16 import signal
17 17 import subprocess
18 18 import sys
19 19 import time
20 20
21 21 from ..i18n import _
22 22
23 23 from .. import (
24 24 encoding,
25 25 error,
26 26 policy,
27 27 pycompat,
28 28 )
29 29
30 30 osutil = policy.importmod(r'osutil')
31 31
32 32 stderr = pycompat.stderr
33 33 stdin = pycompat.stdin
34 34 stdout = pycompat.stdout
35 35
36 36 def isatty(fp):
37 37 try:
38 38 return fp.isatty()
39 39 except AttributeError:
40 40 return False
41 41
42 42 # glibc determines buffering on first write to stdout - if we replace a TTY
43 43 # destined stdout with a pipe destined stdout (e.g. pager), we want line
44 44 # buffering (or unbuffered, on Windows)
45 45 if isatty(stdout):
46 46 if pycompat.iswindows:
47 47 # Windows doesn't support line buffering
48 48 stdout = os.fdopen(stdout.fileno(), r'wb', 0)
49 49 else:
50 50 stdout = os.fdopen(stdout.fileno(), r'wb', 1)
51 51
52 52 if pycompat.iswindows:
53 53 from .. import windows as platform
54 54 stdout = platform.winstdout(stdout)
55 55 else:
56 56 from .. import posix as platform
57 57
58 58 findexe = platform.findexe
59 59 _gethgcmd = platform.gethgcmd
60 60 getuser = platform.getuser
61 61 getpid = os.getpid
62 62 hidewindow = platform.hidewindow
63 63 quotecommand = platform.quotecommand
64 64 readpipe = platform.readpipe
65 65 setbinary = platform.setbinary
66 66 setsignalhandler = platform.setsignalhandler
67 67 shellquote = platform.shellquote
68 68 shellsplit = platform.shellsplit
69 69 spawndetached = platform.spawndetached
70 70 sshargs = platform.sshargs
71 71 testpid = platform.testpid
72 72
73 73 try:
74 74 setprocname = osutil.setprocname
75 75 except AttributeError:
76 76 pass
77 77 try:
78 78 unblocksignal = osutil.unblocksignal
79 79 except AttributeError:
80 80 pass
81 81
82 82 closefds = pycompat.isposix
83 83
84 84 def explainexit(code):
85 85 """return a message describing a subprocess status
86 86 (codes from kill are negative - not os.system/wait encoding)"""
87 87 if code >= 0:
88 88 return _("exited with status %d") % code
89 89 return _("killed by signal %d") % -code
90 90
91 91 class _pfile(object):
92 92 """File-like wrapper for a stream opened by subprocess.Popen()"""
93 93
94 94 def __init__(self, proc, fp):
95 95 self._proc = proc
96 96 self._fp = fp
97 97
98 98 def close(self):
99 99 # unlike os.popen(), this returns an integer in subprocess coding
100 100 self._fp.close()
101 101 return self._proc.wait()
102 102
103 103 def __iter__(self):
104 104 return iter(self._fp)
105 105
106 106 def __getattr__(self, attr):
107 107 return getattr(self._fp, attr)
108 108
109 109 def __enter__(self):
110 110 return self
111 111
112 112 def __exit__(self, exc_type, exc_value, exc_tb):
113 113 self.close()
114 114
115 115 def popen(cmd, mode='rb', bufsize=-1):
116 116 if mode == 'rb':
117 117 return _popenreader(cmd, bufsize)
118 118 elif mode == 'wb':
119 119 return _popenwriter(cmd, bufsize)
120 120 raise error.ProgrammingError('unsupported mode: %r' % mode)
121 121
122 122 def _popenreader(cmd, bufsize):
123 123 p = subprocess.Popen(tonativestr(quotecommand(cmd)),
124 124 shell=True, bufsize=bufsize,
125 125 close_fds=closefds,
126 126 stdout=subprocess.PIPE)
127 127 return _pfile(p, p.stdout)
128 128
129 129 def _popenwriter(cmd, bufsize):
130 130 p = subprocess.Popen(tonativestr(quotecommand(cmd)),
131 131 shell=True, bufsize=bufsize,
132 132 close_fds=closefds,
133 133 stdin=subprocess.PIPE)
134 134 return _pfile(p, p.stdin)
135 135
136 136 def popen2(cmd, env=None):
137 137 # Setting bufsize to -1 lets the system decide the buffer size.
138 138 # The default for bufsize is 0, meaning unbuffered. This leads to
139 139 # poor performance on Mac OS X: http://bugs.python.org/issue4194
140 p = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
140 p = subprocess.Popen(tonativestr(cmd),
141 141 shell=True, bufsize=-1,
142 142 close_fds=closefds,
143 143 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
144 144 env=tonativeenv(env))
145 145 return p.stdin, p.stdout
146 146
147 147 def popen3(cmd, env=None):
148 148 stdin, stdout, stderr, p = popen4(cmd, env)
149 149 return stdin, stdout, stderr
150 150
151 151 def popen4(cmd, env=None, bufsize=-1):
152 p = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
152 p = subprocess.Popen(tonativestr(cmd),
153 153 shell=True, bufsize=bufsize,
154 154 close_fds=closefds,
155 155 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
156 156 stderr=subprocess.PIPE,
157 157 env=tonativeenv(env))
158 158 return p.stdin, p.stdout, p.stderr, p
159 159
160 160 def pipefilter(s, cmd):
161 161 '''filter string S through command CMD, returning its output'''
162 p = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
162 p = subprocess.Popen(tonativestr(cmd),
163 163 shell=True, close_fds=closefds,
164 164 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
165 165 pout, perr = p.communicate(s)
166 166 return pout
167 167
168 168 def tempfilter(s, cmd):
169 169 '''filter string S through a pair of temporary files with CMD.
170 170 CMD is used as a template to create the real command to be run,
171 171 with the strings INFILE and OUTFILE replaced by the real names of
172 172 the temporary files generated.'''
173 173 inname, outname = None, None
174 174 try:
175 175 infd, inname = pycompat.mkstemp(prefix='hg-filter-in-')
176 176 fp = os.fdopen(infd, r'wb')
177 177 fp.write(s)
178 178 fp.close()
179 179 outfd, outname = pycompat.mkstemp(prefix='hg-filter-out-')
180 180 os.close(outfd)
181 181 cmd = cmd.replace('INFILE', inname)
182 182 cmd = cmd.replace('OUTFILE', outname)
183 183 code = system(cmd)
184 184 if pycompat.sysplatform == 'OpenVMS' and code & 1:
185 185 code = 0
186 186 if code:
187 187 raise error.Abort(_("command '%s' failed: %s") %
188 188 (cmd, explainexit(code)))
189 189 with open(outname, 'rb') as fp:
190 190 return fp.read()
191 191 finally:
192 192 try:
193 193 if inname:
194 194 os.unlink(inname)
195 195 except OSError:
196 196 pass
197 197 try:
198 198 if outname:
199 199 os.unlink(outname)
200 200 except OSError:
201 201 pass
202 202
203 203 _filtertable = {
204 204 'tempfile:': tempfilter,
205 205 'pipe:': pipefilter,
206 206 }
207 207
208 208 def filter(s, cmd):
209 209 "filter a string through a command that transforms its input to its output"
210 210 for name, fn in _filtertable.iteritems():
211 211 if cmd.startswith(name):
212 212 return fn(s, cmd[len(name):].lstrip())
213 213 return pipefilter(s, cmd)
214 214
215 215 def mainfrozen():
216 216 """return True if we are a frozen executable.
217 217
218 218 The code supports py2exe (most common, Windows only) and tools/freeze
219 219 (portable, not much used).
220 220 """
221 221 return (pycompat.safehasattr(sys, "frozen") or # new py2exe
222 222 pycompat.safehasattr(sys, "importers") or # old py2exe
223 223 imp.is_frozen(u"__main__")) # tools/freeze
224 224
225 225 _hgexecutable = None
226 226
227 227 def hgexecutable():
228 228 """return location of the 'hg' executable.
229 229
230 230 Defaults to $HG or 'hg' in the search path.
231 231 """
232 232 if _hgexecutable is None:
233 233 hg = encoding.environ.get('HG')
234 234 mainmod = sys.modules[r'__main__']
235 235 if hg:
236 236 _sethgexecutable(hg)
237 237 elif mainfrozen():
238 238 if getattr(sys, 'frozen', None) == 'macosx_app':
239 239 # Env variable set by py2app
240 240 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
241 241 else:
242 242 _sethgexecutable(pycompat.sysexecutable)
243 243 elif (os.path.basename(
244 244 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
245 245 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
246 246 else:
247 247 exe = findexe('hg') or os.path.basename(sys.argv[0])
248 248 _sethgexecutable(exe)
249 249 return _hgexecutable
250 250
251 251 def _sethgexecutable(path):
252 252 """set location of the 'hg' executable"""
253 253 global _hgexecutable
254 254 _hgexecutable = path
255 255
256 256 def _testfileno(f, stdf):
257 257 fileno = getattr(f, 'fileno', None)
258 258 try:
259 259 return fileno and fileno() == stdf.fileno()
260 260 except io.UnsupportedOperation:
261 261 return False # fileno() raised UnsupportedOperation
262 262
263 263 def isstdin(f):
264 264 return _testfileno(f, sys.__stdin__)
265 265
266 266 def isstdout(f):
267 267 return _testfileno(f, sys.__stdout__)
268 268
269 269 def protectstdio(uin, uout):
270 270 """Duplicate streams and redirect original if (uin, uout) are stdio
271 271
272 272 If uin is stdin, it's redirected to /dev/null. If uout is stdout, it's
273 273 redirected to stderr so the output is still readable.
274 274
275 275 Returns (fin, fout) which point to the original (uin, uout) fds, but
276 276 may be copy of (uin, uout). The returned streams can be considered
277 277 "owned" in that print(), exec(), etc. never reach to them.
278 278 """
279 279 uout.flush()
280 280 fin, fout = uin, uout
281 281 if uin is stdin:
282 282 newfd = os.dup(uin.fileno())
283 283 nullfd = os.open(os.devnull, os.O_RDONLY)
284 284 os.dup2(nullfd, uin.fileno())
285 285 os.close(nullfd)
286 286 fin = os.fdopen(newfd, r'rb')
287 287 if uout is stdout:
288 288 newfd = os.dup(uout.fileno())
289 289 os.dup2(stderr.fileno(), uout.fileno())
290 290 fout = os.fdopen(newfd, r'wb')
291 291 return fin, fout
292 292
293 293 def restorestdio(uin, uout, fin, fout):
294 294 """Restore (uin, uout) streams from possibly duplicated (fin, fout)"""
295 295 uout.flush()
296 296 for f, uif in [(fin, uin), (fout, uout)]:
297 297 if f is not uif:
298 298 os.dup2(f.fileno(), uif.fileno())
299 299 f.close()
300 300
301 301 @contextlib.contextmanager
302 302 def protectedstdio(uin, uout):
303 303 """Run code block with protected standard streams"""
304 304 fin, fout = protectstdio(uin, uout)
305 305 try:
306 306 yield fin, fout
307 307 finally:
308 308 restorestdio(uin, uout, fin, fout)
309 309
310 310 def shellenviron(environ=None):
311 311 """return environ with optional override, useful for shelling out"""
312 312 def py2shell(val):
313 313 'convert python object into string that is useful to shell'
314 314 if val is None or val is False:
315 315 return '0'
316 316 if val is True:
317 317 return '1'
318 318 return pycompat.bytestr(val)
319 319 env = dict(encoding.environ)
320 320 if environ:
321 321 env.update((k, py2shell(v)) for k, v in environ.iteritems())
322 322 env['HG'] = hgexecutable()
323 323 return env
324 324
325 325 if pycompat.iswindows:
326 326 def shelltonative(cmd, env):
327 327 return platform.shelltocmdexe(cmd, shellenviron(env))
328 328
329 329 tonativestr = encoding.strfromlocal
330 330 else:
331 331 def shelltonative(cmd, env):
332 332 return cmd
333 333
334 334 tonativestr = pycompat.identity
335 335
336 336 def tonativeenv(env):
337 337 '''convert the environment from bytes to strings suitable for Popen(), etc.
338 338 '''
339 339 return pycompat.rapply(tonativestr, env)
340 340
341 341 def system(cmd, environ=None, cwd=None, out=None):
342 342 '''enhanced shell command execution.
343 343 run with environment maybe modified, maybe in different dir.
344 344
345 345 if out is specified, it is assumed to be a file-like object that has a
346 346 write() method. stdout and stderr will be redirected to out.'''
347 347 try:
348 348 stdout.flush()
349 349 except Exception:
350 350 pass
351 351 cmd = quotecommand(cmd)
352 352 env = shellenviron(environ)
353 353 if out is None or isstdout(out):
354 rc = subprocess.call(pycompat.rapply(tonativestr, cmd),
354 rc = subprocess.call(tonativestr(cmd),
355 355 shell=True, close_fds=closefds,
356 356 env=tonativeenv(env),
357 357 cwd=pycompat.rapply(tonativestr, cwd))
358 358 else:
359 proc = subprocess.Popen(pycompat.rapply(tonativestr, cmd),
359 proc = subprocess.Popen(tonativestr(cmd),
360 360 shell=True, close_fds=closefds,
361 361 env=tonativeenv(env),
362 362 cwd=pycompat.rapply(tonativestr, cwd),
363 363 stdout=subprocess.PIPE,
364 364 stderr=subprocess.STDOUT)
365 365 for line in iter(proc.stdout.readline, ''):
366 366 out.write(line)
367 367 proc.wait()
368 368 rc = proc.returncode
369 369 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
370 370 rc = 0
371 371 return rc
372 372
373 373 def gui():
374 374 '''Are we running in a GUI?'''
375 375 if pycompat.isdarwin:
376 376 if 'SSH_CONNECTION' in encoding.environ:
377 377 # handle SSH access to a box where the user is logged in
378 378 return False
379 379 elif getattr(osutil, 'isgui', None):
380 380 # check if a CoreGraphics session is available
381 381 return osutil.isgui()
382 382 else:
383 383 # pure build; use a safe default
384 384 return True
385 385 else:
386 386 return pycompat.iswindows or encoding.environ.get("DISPLAY")
387 387
388 388 def hgcmd():
389 389 """Return the command used to execute current hg
390 390
391 391 This is different from hgexecutable() because on Windows we want
392 392 to avoid things opening new shell windows like batch files, so we
393 393 get either the python call or current executable.
394 394 """
395 395 if mainfrozen():
396 396 if getattr(sys, 'frozen', None) == 'macosx_app':
397 397 # Env variable set by py2app
398 398 return [encoding.environ['EXECUTABLEPATH']]
399 399 else:
400 400 return [pycompat.sysexecutable]
401 401 return _gethgcmd()
402 402
403 403 def rundetached(args, condfn):
404 404 """Execute the argument list in a detached process.
405 405
406 406 condfn is a callable which is called repeatedly and should return
407 407 True once the child process is known to have started successfully.
408 408 At this point, the child process PID is returned. If the child
409 409 process fails to start or finishes before condfn() evaluates to
410 410 True, return -1.
411 411 """
412 412 # Windows case is easier because the child process is either
413 413 # successfully starting and validating the condition or exiting
414 414 # on failure. We just poll on its PID. On Unix, if the child
415 415 # process fails to start, it will be left in a zombie state until
416 416 # the parent wait on it, which we cannot do since we expect a long
417 417 # running process on success. Instead we listen for SIGCHLD telling
418 418 # us our child process terminated.
419 419 terminated = set()
420 420 def handler(signum, frame):
421 421 terminated.add(os.wait())
422 422 prevhandler = None
423 423 SIGCHLD = getattr(signal, 'SIGCHLD', None)
424 424 if SIGCHLD is not None:
425 425 prevhandler = signal.signal(SIGCHLD, handler)
426 426 try:
427 427 pid = spawndetached(args)
428 428 while not condfn():
429 429 if ((pid in terminated or not testpid(pid))
430 430 and not condfn()):
431 431 return -1
432 432 time.sleep(0.1)
433 433 return pid
434 434 finally:
435 435 if prevhandler is not None:
436 436 signal.signal(signal.SIGCHLD, prevhandler)
437 437
438 438 @contextlib.contextmanager
439 439 def uninterruptable(warn):
440 440 """Inhibit SIGINT handling on a region of code.
441 441
442 442 Note that if this is called in a non-main thread, it turns into a no-op.
443 443
444 444 Args:
445 445 warn: A callable which takes no arguments, and returns True if the
446 446 previous signal handling should be restored.
447 447 """
448 448
449 449 oldsiginthandler = [signal.getsignal(signal.SIGINT)]
450 450 shouldbail = []
451 451
452 452 def disabledsiginthandler(*args):
453 453 if warn():
454 454 signal.signal(signal.SIGINT, oldsiginthandler[0])
455 455 del oldsiginthandler[0]
456 456 shouldbail.append(True)
457 457
458 458 try:
459 459 try:
460 460 signal.signal(signal.SIGINT, disabledsiginthandler)
461 461 except ValueError:
462 462 # wrong thread, oh well, we tried
463 463 del oldsiginthandler[0]
464 464 yield
465 465 finally:
466 466 if oldsiginthandler:
467 467 signal.signal(signal.SIGINT, oldsiginthandler[0])
468 468 if shouldbail:
469 469 raise KeyboardInterrupt
General Comments 0
You need to be logged in to leave comments. Login now