##// END OF EJS Templates
py3: catch specific OSError subclasses instead of checking errno...
Manuel Jacob -
r50205:050dc873 default
parent child Browse files
Show More
@@ -1,1470 +1,1467 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import collections
10 10 import contextlib
11 import errno
12 11 import os
13 12 import stat
14 13 import uuid
15 14
16 15 from .i18n import _
17 16 from .pycompat import delattr
18 17
19 18 from hgdemandimport import tracing
20 19
21 20 from . import (
22 21 dirstatemap,
23 22 encoding,
24 23 error,
25 24 match as matchmod,
26 25 node,
27 26 pathutil,
28 27 policy,
29 28 pycompat,
30 29 scmutil,
31 30 sparse,
32 31 util,
33 32 )
34 33
35 34 from .dirstateutils import (
36 35 timestamp,
37 36 )
38 37
39 38 from .interfaces import (
40 39 dirstate as intdirstate,
41 40 util as interfaceutil,
42 41 )
43 42
44 43 parsers = policy.importmod('parsers')
45 44 rustmod = policy.importrust('dirstate')
46 45
47 46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
48 47
49 48 propertycache = util.propertycache
50 49 filecache = scmutil.filecache
51 50 _rangemask = dirstatemap.rangemask
52 51
53 52 DirstateItem = dirstatemap.DirstateItem
54 53
55 54
56 55 class repocache(filecache):
57 56 """filecache for files in .hg/"""
58 57
59 58 def join(self, obj, fname):
60 59 return obj._opener.join(fname)
61 60
62 61
63 62 class rootcache(filecache):
64 63 """filecache for files in the repository root"""
65 64
66 65 def join(self, obj, fname):
67 66 return obj._join(fname)
68 67
69 68
70 69 def requires_parents_change(func):
71 70 def wrap(self, *args, **kwargs):
72 71 if not self.pendingparentchange():
73 72 msg = 'calling `%s` outside of a parentchange context'
74 73 msg %= func.__name__
75 74 raise error.ProgrammingError(msg)
76 75 return func(self, *args, **kwargs)
77 76
78 77 return wrap
79 78
80 79
81 80 def requires_no_parents_change(func):
82 81 def wrap(self, *args, **kwargs):
83 82 if self.pendingparentchange():
84 83 msg = 'calling `%s` inside of a parentchange context'
85 84 msg %= func.__name__
86 85 raise error.ProgrammingError(msg)
87 86 return func(self, *args, **kwargs)
88 87
89 88 return wrap
90 89
91 90
92 91 @interfaceutil.implementer(intdirstate.idirstate)
93 92 class dirstate:
94 93 def __init__(
95 94 self,
96 95 opener,
97 96 ui,
98 97 root,
99 98 validate,
100 99 sparsematchfn,
101 100 nodeconstants,
102 101 use_dirstate_v2,
103 102 use_tracked_hint=False,
104 103 ):
105 104 """Create a new dirstate object.
106 105
107 106 opener is an open()-like callable that can be used to open the
108 107 dirstate file; root is the root of the directory tracked by
109 108 the dirstate.
110 109 """
111 110 self._use_dirstate_v2 = use_dirstate_v2
112 111 self._use_tracked_hint = use_tracked_hint
113 112 self._nodeconstants = nodeconstants
114 113 self._opener = opener
115 114 self._validate = validate
116 115 self._root = root
117 116 self._sparsematchfn = sparsematchfn
118 117 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
119 118 # UNC path pointing to root share (issue4557)
120 119 self._rootdir = pathutil.normasprefix(root)
121 120 # True is any internal state may be different
122 121 self._dirty = False
123 122 # True if the set of tracked file may be different
124 123 self._dirty_tracked_set = False
125 124 self._ui = ui
126 125 self._filecache = {}
127 126 self._parentwriters = 0
128 127 self._filename = b'dirstate'
129 128 self._filename_th = b'dirstate-tracked-hint'
130 129 self._pendingfilename = b'%s.pending' % self._filename
131 130 self._plchangecallbacks = {}
132 131 self._origpl = None
133 132 self._mapcls = dirstatemap.dirstatemap
134 133 # Access and cache cwd early, so we don't access it for the first time
135 134 # after a working-copy update caused it to not exist (accessing it then
136 135 # raises an exception).
137 136 self._cwd
138 137
139 138 def prefetch_parents(self):
140 139 """make sure the parents are loaded
141 140
142 141 Used to avoid a race condition.
143 142 """
144 143 self._pl
145 144
146 145 @contextlib.contextmanager
147 146 def parentchange(self):
148 147 """Context manager for handling dirstate parents.
149 148
150 149 If an exception occurs in the scope of the context manager,
151 150 the incoherent dirstate won't be written when wlock is
152 151 released.
153 152 """
154 153 self._parentwriters += 1
155 154 yield
156 155 # Typically we want the "undo" step of a context manager in a
157 156 # finally block so it happens even when an exception
158 157 # occurs. In this case, however, we only want to decrement
159 158 # parentwriters if the code in the with statement exits
160 159 # normally, so we don't have a try/finally here on purpose.
161 160 self._parentwriters -= 1
162 161
163 162 def pendingparentchange(self):
164 163 """Returns true if the dirstate is in the middle of a set of changes
165 164 that modify the dirstate parent.
166 165 """
167 166 return self._parentwriters > 0
168 167
169 168 @propertycache
170 169 def _map(self):
171 170 """Return the dirstate contents (see documentation for dirstatemap)."""
172 171 self._map = self._mapcls(
173 172 self._ui,
174 173 self._opener,
175 174 self._root,
176 175 self._nodeconstants,
177 176 self._use_dirstate_v2,
178 177 )
179 178 return self._map
180 179
181 180 @property
182 181 def _sparsematcher(self):
183 182 """The matcher for the sparse checkout.
184 183
185 184 The working directory may not include every file from a manifest. The
186 185 matcher obtained by this property will match a path if it is to be
187 186 included in the working directory.
188 187 """
189 188 # TODO there is potential to cache this property. For now, the matcher
190 189 # is resolved on every access. (But the called function does use a
191 190 # cache to keep the lookup fast.)
192 191 return self._sparsematchfn()
193 192
194 193 @repocache(b'branch')
195 194 def _branch(self):
196 195 try:
197 196 return self._opener.read(b"branch").strip() or b"default"
198 197 except FileNotFoundError:
199 198 return b"default"
200 199
201 200 @property
202 201 def _pl(self):
203 202 return self._map.parents()
204 203
205 204 def hasdir(self, d):
206 205 return self._map.hastrackeddir(d)
207 206
208 207 @rootcache(b'.hgignore')
209 208 def _ignore(self):
210 209 files = self._ignorefiles()
211 210 if not files:
212 211 return matchmod.never()
213 212
214 213 pats = [b'include:%s' % f for f in files]
215 214 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
216 215
217 216 @propertycache
218 217 def _slash(self):
219 218 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
220 219
221 220 @propertycache
222 221 def _checklink(self):
223 222 return util.checklink(self._root)
224 223
225 224 @propertycache
226 225 def _checkexec(self):
227 226 return bool(util.checkexec(self._root))
228 227
229 228 @propertycache
230 229 def _checkcase(self):
231 230 return not util.fscasesensitive(self._join(b'.hg'))
232 231
233 232 def _join(self, f):
234 233 # much faster than os.path.join()
235 234 # it's safe because f is always a relative path
236 235 return self._rootdir + f
237 236
238 237 def flagfunc(self, buildfallback):
239 238 """build a callable that returns flags associated with a filename
240 239
241 240 The information is extracted from three possible layers:
242 241 1. the file system if it supports the information
243 242 2. the "fallback" information stored in the dirstate if any
244 243 3. a more expensive mechanism inferring the flags from the parents.
245 244 """
246 245
247 246 # small hack to cache the result of buildfallback()
248 247 fallback_func = []
249 248
250 249 def get_flags(x):
251 250 entry = None
252 251 fallback_value = None
253 252 try:
254 253 st = os.lstat(self._join(x))
255 254 except OSError:
256 255 return b''
257 256
258 257 if self._checklink:
259 258 if util.statislink(st):
260 259 return b'l'
261 260 else:
262 261 entry = self.get_entry(x)
263 262 if entry.has_fallback_symlink:
264 263 if entry.fallback_symlink:
265 264 return b'l'
266 265 else:
267 266 if not fallback_func:
268 267 fallback_func.append(buildfallback())
269 268 fallback_value = fallback_func[0](x)
270 269 if b'l' in fallback_value:
271 270 return b'l'
272 271
273 272 if self._checkexec:
274 273 if util.statisexec(st):
275 274 return b'x'
276 275 else:
277 276 if entry is None:
278 277 entry = self.get_entry(x)
279 278 if entry.has_fallback_exec:
280 279 if entry.fallback_exec:
281 280 return b'x'
282 281 else:
283 282 if fallback_value is None:
284 283 if not fallback_func:
285 284 fallback_func.append(buildfallback())
286 285 fallback_value = fallback_func[0](x)
287 286 if b'x' in fallback_value:
288 287 return b'x'
289 288 return b''
290 289
291 290 return get_flags
292 291
293 292 @propertycache
294 293 def _cwd(self):
295 294 # internal config: ui.forcecwd
296 295 forcecwd = self._ui.config(b'ui', b'forcecwd')
297 296 if forcecwd:
298 297 return forcecwd
299 298 return encoding.getcwd()
300 299
301 300 def getcwd(self):
302 301 """Return the path from which a canonical path is calculated.
303 302
304 303 This path should be used to resolve file patterns or to convert
305 304 canonical paths back to file paths for display. It shouldn't be
306 305 used to get real file paths. Use vfs functions instead.
307 306 """
308 307 cwd = self._cwd
309 308 if cwd == self._root:
310 309 return b''
311 310 # self._root ends with a path separator if self._root is '/' or 'C:\'
312 311 rootsep = self._root
313 312 if not util.endswithsep(rootsep):
314 313 rootsep += pycompat.ossep
315 314 if cwd.startswith(rootsep):
316 315 return cwd[len(rootsep) :]
317 316 else:
318 317 # we're outside the repo. return an absolute path.
319 318 return cwd
320 319
321 320 def pathto(self, f, cwd=None):
322 321 if cwd is None:
323 322 cwd = self.getcwd()
324 323 path = util.pathto(self._root, cwd, f)
325 324 if self._slash:
326 325 return util.pconvert(path)
327 326 return path
328 327
329 328 def get_entry(self, path):
330 329 """return a DirstateItem for the associated path"""
331 330 entry = self._map.get(path)
332 331 if entry is None:
333 332 return DirstateItem()
334 333 return entry
335 334
336 335 def __contains__(self, key):
337 336 return key in self._map
338 337
339 338 def __iter__(self):
340 339 return iter(sorted(self._map))
341 340
342 341 def items(self):
343 342 return self._map.items()
344 343
345 344 iteritems = items
346 345
347 346 def parents(self):
348 347 return [self._validate(p) for p in self._pl]
349 348
350 349 def p1(self):
351 350 return self._validate(self._pl[0])
352 351
353 352 def p2(self):
354 353 return self._validate(self._pl[1])
355 354
356 355 @property
357 356 def in_merge(self):
358 357 """True if a merge is in progress"""
359 358 return self._pl[1] != self._nodeconstants.nullid
360 359
361 360 def branch(self):
362 361 return encoding.tolocal(self._branch)
363 362
364 363 def setparents(self, p1, p2=None):
365 364 """Set dirstate parents to p1 and p2.
366 365
367 366 When moving from two parents to one, "merged" entries a
368 367 adjusted to normal and previous copy records discarded and
369 368 returned by the call.
370 369
371 370 See localrepo.setparents()
372 371 """
373 372 if p2 is None:
374 373 p2 = self._nodeconstants.nullid
375 374 if self._parentwriters == 0:
376 375 raise ValueError(
377 376 b"cannot set dirstate parent outside of "
378 377 b"dirstate.parentchange context manager"
379 378 )
380 379
381 380 self._dirty = True
382 381 oldp2 = self._pl[1]
383 382 if self._origpl is None:
384 383 self._origpl = self._pl
385 384 nullid = self._nodeconstants.nullid
386 385 # True if we need to fold p2 related state back to a linear case
387 386 fold_p2 = oldp2 != nullid and p2 == nullid
388 387 return self._map.setparents(p1, p2, fold_p2=fold_p2)
389 388
390 389 def setbranch(self, branch):
391 390 self.__class__._branch.set(self, encoding.fromlocal(branch))
392 391 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
393 392 try:
394 393 f.write(self._branch + b'\n')
395 394 f.close()
396 395
397 396 # make sure filecache has the correct stat info for _branch after
398 397 # replacing the underlying file
399 398 ce = self._filecache[b'_branch']
400 399 if ce:
401 400 ce.refresh()
402 401 except: # re-raises
403 402 f.discard()
404 403 raise
405 404
406 405 def invalidate(self):
407 406 """Causes the next access to reread the dirstate.
408 407
409 408 This is different from localrepo.invalidatedirstate() because it always
410 409 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
411 410 check whether the dirstate has changed before rereading it."""
412 411
413 412 for a in ("_map", "_branch", "_ignore"):
414 413 if a in self.__dict__:
415 414 delattr(self, a)
416 415 self._dirty = False
417 416 self._dirty_tracked_set = False
418 417 self._parentwriters = 0
419 418 self._origpl = None
420 419
421 420 def copy(self, source, dest):
422 421 """Mark dest as a copy of source. Unmark dest if source is None."""
423 422 if source == dest:
424 423 return
425 424 self._dirty = True
426 425 if source is not None:
427 426 self._map.copymap[dest] = source
428 427 else:
429 428 self._map.copymap.pop(dest, None)
430 429
431 430 def copied(self, file):
432 431 return self._map.copymap.get(file, None)
433 432
434 433 def copies(self):
435 434 return self._map.copymap
436 435
437 436 @requires_no_parents_change
438 437 def set_tracked(self, filename, reset_copy=False):
439 438 """a "public" method for generic code to mark a file as tracked
440 439
441 440 This function is to be called outside of "update/merge" case. For
442 441 example by a command like `hg add X`.
443 442
444 443 if reset_copy is set, any existing copy information will be dropped.
445 444
446 445 return True the file was previously untracked, False otherwise.
447 446 """
448 447 self._dirty = True
449 448 entry = self._map.get(filename)
450 449 if entry is None or not entry.tracked:
451 450 self._check_new_tracked_filename(filename)
452 451 pre_tracked = self._map.set_tracked(filename)
453 452 if reset_copy:
454 453 self._map.copymap.pop(filename, None)
455 454 if pre_tracked:
456 455 self._dirty_tracked_set = True
457 456 return pre_tracked
458 457
459 458 @requires_no_parents_change
460 459 def set_untracked(self, filename):
461 460 """a "public" method for generic code to mark a file as untracked
462 461
463 462 This function is to be called outside of "update/merge" case. For
464 463 example by a command like `hg remove X`.
465 464
466 465 return True the file was previously tracked, False otherwise.
467 466 """
468 467 ret = self._map.set_untracked(filename)
469 468 if ret:
470 469 self._dirty = True
471 470 self._dirty_tracked_set = True
472 471 return ret
473 472
474 473 @requires_no_parents_change
475 474 def set_clean(self, filename, parentfiledata):
476 475 """record that the current state of the file on disk is known to be clean"""
477 476 self._dirty = True
478 477 if not self._map[filename].tracked:
479 478 self._check_new_tracked_filename(filename)
480 479 (mode, size, mtime) = parentfiledata
481 480 self._map.set_clean(filename, mode, size, mtime)
482 481
483 482 @requires_no_parents_change
484 483 def set_possibly_dirty(self, filename):
485 484 """record that the current state of the file on disk is unknown"""
486 485 self._dirty = True
487 486 self._map.set_possibly_dirty(filename)
488 487
489 488 @requires_parents_change
490 489 def update_file_p1(
491 490 self,
492 491 filename,
493 492 p1_tracked,
494 493 ):
495 494 """Set a file as tracked in the parent (or not)
496 495
497 496 This is to be called when adjust the dirstate to a new parent after an history
498 497 rewriting operation.
499 498
500 499 It should not be called during a merge (p2 != nullid) and only within
501 500 a `with dirstate.parentchange():` context.
502 501 """
503 502 if self.in_merge:
504 503 msg = b'update_file_reference should not be called when merging'
505 504 raise error.ProgrammingError(msg)
506 505 entry = self._map.get(filename)
507 506 if entry is None:
508 507 wc_tracked = False
509 508 else:
510 509 wc_tracked = entry.tracked
511 510 if not (p1_tracked or wc_tracked):
512 511 # the file is no longer relevant to anyone
513 512 if self._map.get(filename) is not None:
514 513 self._map.reset_state(filename)
515 514 self._dirty = True
516 515 elif (not p1_tracked) and wc_tracked:
517 516 if entry is not None and entry.added:
518 517 return # avoid dropping copy information (maybe?)
519 518
520 519 self._map.reset_state(
521 520 filename,
522 521 wc_tracked,
523 522 p1_tracked,
524 523 # the underlying reference might have changed, we will have to
525 524 # check it.
526 525 has_meaningful_mtime=False,
527 526 )
528 527
529 528 @requires_parents_change
530 529 def update_file(
531 530 self,
532 531 filename,
533 532 wc_tracked,
534 533 p1_tracked,
535 534 p2_info=False,
536 535 possibly_dirty=False,
537 536 parentfiledata=None,
538 537 ):
539 538 """update the information about a file in the dirstate
540 539
541 540 This is to be called when the direstates parent changes to keep track
542 541 of what is the file situation in regards to the working copy and its parent.
543 542
544 543 This function must be called within a `dirstate.parentchange` context.
545 544
546 545 note: the API is at an early stage and we might need to adjust it
547 546 depending of what information ends up being relevant and useful to
548 547 other processing.
549 548 """
550 549
551 550 # note: I do not think we need to double check name clash here since we
552 551 # are in a update/merge case that should already have taken care of
553 552 # this. The test agrees
554 553
555 554 self._dirty = True
556 555 old_entry = self._map.get(filename)
557 556 if old_entry is None:
558 557 prev_tracked = False
559 558 else:
560 559 prev_tracked = old_entry.tracked
561 560 if prev_tracked != wc_tracked:
562 561 self._dirty_tracked_set = True
563 562
564 563 self._map.reset_state(
565 564 filename,
566 565 wc_tracked,
567 566 p1_tracked,
568 567 p2_info=p2_info,
569 568 has_meaningful_mtime=not possibly_dirty,
570 569 parentfiledata=parentfiledata,
571 570 )
572 571
573 572 def _check_new_tracked_filename(self, filename):
574 573 scmutil.checkfilename(filename)
575 574 if self._map.hastrackeddir(filename):
576 575 msg = _(b'directory %r already in dirstate')
577 576 msg %= pycompat.bytestr(filename)
578 577 raise error.Abort(msg)
579 578 # shadows
580 579 for d in pathutil.finddirs(filename):
581 580 if self._map.hastrackeddir(d):
582 581 break
583 582 entry = self._map.get(d)
584 583 if entry is not None and not entry.removed:
585 584 msg = _(b'file %r in dirstate clashes with %r')
586 585 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
587 586 raise error.Abort(msg)
588 587
589 588 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
590 589 if exists is None:
591 590 exists = os.path.lexists(os.path.join(self._root, path))
592 591 if not exists:
593 592 # Maybe a path component exists
594 593 if not ignoremissing and b'/' in path:
595 594 d, f = path.rsplit(b'/', 1)
596 595 d = self._normalize(d, False, ignoremissing, None)
597 596 folded = d + b"/" + f
598 597 else:
599 598 # No path components, preserve original case
600 599 folded = path
601 600 else:
602 601 # recursively normalize leading directory components
603 602 # against dirstate
604 603 if b'/' in normed:
605 604 d, f = normed.rsplit(b'/', 1)
606 605 d = self._normalize(d, False, ignoremissing, True)
607 606 r = self._root + b"/" + d
608 607 folded = d + b"/" + util.fspath(f, r)
609 608 else:
610 609 folded = util.fspath(normed, self._root)
611 610 storemap[normed] = folded
612 611
613 612 return folded
614 613
615 614 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
616 615 normed = util.normcase(path)
617 616 folded = self._map.filefoldmap.get(normed, None)
618 617 if folded is None:
619 618 if isknown:
620 619 folded = path
621 620 else:
622 621 folded = self._discoverpath(
623 622 path, normed, ignoremissing, exists, self._map.filefoldmap
624 623 )
625 624 return folded
626 625
627 626 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
628 627 normed = util.normcase(path)
629 628 folded = self._map.filefoldmap.get(normed, None)
630 629 if folded is None:
631 630 folded = self._map.dirfoldmap.get(normed, None)
632 631 if folded is None:
633 632 if isknown:
634 633 folded = path
635 634 else:
636 635 # store discovered result in dirfoldmap so that future
637 636 # normalizefile calls don't start matching directories
638 637 folded = self._discoverpath(
639 638 path, normed, ignoremissing, exists, self._map.dirfoldmap
640 639 )
641 640 return folded
642 641
643 642 def normalize(self, path, isknown=False, ignoremissing=False):
644 643 """
645 644 normalize the case of a pathname when on a casefolding filesystem
646 645
647 646 isknown specifies whether the filename came from walking the
648 647 disk, to avoid extra filesystem access.
649 648
650 649 If ignoremissing is True, missing path are returned
651 650 unchanged. Otherwise, we try harder to normalize possibly
652 651 existing path components.
653 652
654 653 The normalized case is determined based on the following precedence:
655 654
656 655 - version of name already stored in the dirstate
657 656 - version of name stored on disk
658 657 - version provided via command arguments
659 658 """
660 659
661 660 if self._checkcase:
662 661 return self._normalize(path, isknown, ignoremissing)
663 662 return path
664 663
665 664 def clear(self):
666 665 self._map.clear()
667 666 self._dirty = True
668 667
669 668 def rebuild(self, parent, allfiles, changedfiles=None):
670 669 if changedfiles is None:
671 670 # Rebuild entire dirstate
672 671 to_lookup = allfiles
673 672 to_drop = []
674 673 self.clear()
675 674 elif len(changedfiles) < 10:
676 675 # Avoid turning allfiles into a set, which can be expensive if it's
677 676 # large.
678 677 to_lookup = []
679 678 to_drop = []
680 679 for f in changedfiles:
681 680 if f in allfiles:
682 681 to_lookup.append(f)
683 682 else:
684 683 to_drop.append(f)
685 684 else:
686 685 changedfilesset = set(changedfiles)
687 686 to_lookup = changedfilesset & set(allfiles)
688 687 to_drop = changedfilesset - to_lookup
689 688
690 689 if self._origpl is None:
691 690 self._origpl = self._pl
692 691 self._map.setparents(parent, self._nodeconstants.nullid)
693 692
694 693 for f in to_lookup:
695 694
696 695 if self.in_merge:
697 696 self.set_tracked(f)
698 697 else:
699 698 self._map.reset_state(
700 699 f,
701 700 wc_tracked=True,
702 701 p1_tracked=True,
703 702 )
704 703 for f in to_drop:
705 704 self._map.reset_state(f)
706 705
707 706 self._dirty = True
708 707
709 708 def identity(self):
710 709 """Return identity of dirstate itself to detect changing in storage
711 710
712 711 If identity of previous dirstate is equal to this, writing
713 712 changes based on the former dirstate out can keep consistency.
714 713 """
715 714 return self._map.identity
716 715
717 716 def write(self, tr):
718 717 if not self._dirty:
719 718 return
720 719
721 720 write_key = self._use_tracked_hint and self._dirty_tracked_set
722 721 if tr:
723 722 # delay writing in-memory changes out
724 723 tr.addfilegenerator(
725 724 b'dirstate-1-main',
726 725 (self._filename,),
727 726 lambda f: self._writedirstate(tr, f),
728 727 location=b'plain',
729 728 post_finalize=True,
730 729 )
731 730 if write_key:
732 731 tr.addfilegenerator(
733 732 b'dirstate-2-key-post',
734 733 (self._filename_th,),
735 734 lambda f: self._write_tracked_hint(tr, f),
736 735 location=b'plain',
737 736 post_finalize=True,
738 737 )
739 738 return
740 739
741 740 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
742 741 with file(self._filename) as f:
743 742 self._writedirstate(tr, f)
744 743 if write_key:
745 744 # we update the key-file after writing to make sure reader have a
746 745 # key that match the newly written content
747 746 with file(self._filename_th) as f:
748 747 self._write_tracked_hint(tr, f)
749 748
750 749 def delete_tracked_hint(self):
751 750 """remove the tracked_hint file
752 751
753 752 To be used by format downgrades operation"""
754 753 self._opener.unlink(self._filename_th)
755 754 self._use_tracked_hint = False
756 755
757 756 def addparentchangecallback(self, category, callback):
758 757 """add a callback to be called when the wd parents are changed
759 758
760 759 Callback will be called with the following arguments:
761 760 dirstate, (oldp1, oldp2), (newp1, newp2)
762 761
763 762 Category is a unique identifier to allow overwriting an old callback
764 763 with a newer callback.
765 764 """
766 765 self._plchangecallbacks[category] = callback
767 766
768 767 def _writedirstate(self, tr, st):
769 768 # notify callbacks about parents change
770 769 if self._origpl is not None and self._origpl != self._pl:
771 770 for c, callback in sorted(self._plchangecallbacks.items()):
772 771 callback(self, self._origpl, self._pl)
773 772 self._origpl = None
774 773 self._map.write(tr, st)
775 774 self._dirty = False
776 775 self._dirty_tracked_set = False
777 776
778 777 def _write_tracked_hint(self, tr, f):
779 778 key = node.hex(uuid.uuid4().bytes)
780 779 f.write(b"1\n%s\n" % key) # 1 is the format version
781 780
782 781 def _dirignore(self, f):
783 782 if self._ignore(f):
784 783 return True
785 784 for p in pathutil.finddirs(f):
786 785 if self._ignore(p):
787 786 return True
788 787 return False
789 788
790 789 def _ignorefiles(self):
791 790 files = []
792 791 if os.path.exists(self._join(b'.hgignore')):
793 792 files.append(self._join(b'.hgignore'))
794 793 for name, path in self._ui.configitems(b"ui"):
795 794 if name == b'ignore' or name.startswith(b'ignore.'):
796 795 # we need to use os.path.join here rather than self._join
797 796 # because path is arbitrary and user-specified
798 797 files.append(os.path.join(self._rootdir, util.expandpath(path)))
799 798 return files
800 799
801 800 def _ignorefileandline(self, f):
802 801 files = collections.deque(self._ignorefiles())
803 802 visited = set()
804 803 while files:
805 804 i = files.popleft()
806 805 patterns = matchmod.readpatternfile(
807 806 i, self._ui.warn, sourceinfo=True
808 807 )
809 808 for pattern, lineno, line in patterns:
810 809 kind, p = matchmod._patsplit(pattern, b'glob')
811 810 if kind == b"subinclude":
812 811 if p not in visited:
813 812 files.append(p)
814 813 continue
815 814 m = matchmod.match(
816 815 self._root, b'', [], [pattern], warn=self._ui.warn
817 816 )
818 817 if m(f):
819 818 return (i, lineno, line)
820 819 visited.add(i)
821 820 return (None, -1, b"")
822 821
823 822 def _walkexplicit(self, match, subrepos):
824 823 """Get stat data about the files explicitly specified by match.
825 824
826 825 Return a triple (results, dirsfound, dirsnotfound).
827 826 - results is a mapping from filename to stat result. It also contains
828 827 listings mapping subrepos and .hg to None.
829 828 - dirsfound is a list of files found to be directories.
830 829 - dirsnotfound is a list of files that the dirstate thinks are
831 830 directories and that were not found."""
832 831
833 832 def badtype(mode):
834 833 kind = _(b'unknown')
835 834 if stat.S_ISCHR(mode):
836 835 kind = _(b'character device')
837 836 elif stat.S_ISBLK(mode):
838 837 kind = _(b'block device')
839 838 elif stat.S_ISFIFO(mode):
840 839 kind = _(b'fifo')
841 840 elif stat.S_ISSOCK(mode):
842 841 kind = _(b'socket')
843 842 elif stat.S_ISDIR(mode):
844 843 kind = _(b'directory')
845 844 return _(b'unsupported file type (type is %s)') % kind
846 845
847 846 badfn = match.bad
848 847 dmap = self._map
849 848 lstat = os.lstat
850 849 getkind = stat.S_IFMT
851 850 dirkind = stat.S_IFDIR
852 851 regkind = stat.S_IFREG
853 852 lnkkind = stat.S_IFLNK
854 853 join = self._join
855 854 dirsfound = []
856 855 foundadd = dirsfound.append
857 856 dirsnotfound = []
858 857 notfoundadd = dirsnotfound.append
859 858
860 859 if not match.isexact() and self._checkcase:
861 860 normalize = self._normalize
862 861 else:
863 862 normalize = None
864 863
865 864 files = sorted(match.files())
866 865 subrepos.sort()
867 866 i, j = 0, 0
868 867 while i < len(files) and j < len(subrepos):
869 868 subpath = subrepos[j] + b"/"
870 869 if files[i] < subpath:
871 870 i += 1
872 871 continue
873 872 while i < len(files) and files[i].startswith(subpath):
874 873 del files[i]
875 874 j += 1
876 875
877 876 if not files or b'' in files:
878 877 files = [b'']
879 878 # constructing the foldmap is expensive, so don't do it for the
880 879 # common case where files is ['']
881 880 normalize = None
882 881 results = dict.fromkeys(subrepos)
883 882 results[b'.hg'] = None
884 883
885 884 for ff in files:
886 885 if normalize:
887 886 nf = normalize(ff, False, True)
888 887 else:
889 888 nf = ff
890 889 if nf in results:
891 890 continue
892 891
893 892 try:
894 893 st = lstat(join(nf))
895 894 kind = getkind(st.st_mode)
896 895 if kind == dirkind:
897 896 if nf in dmap:
898 897 # file replaced by dir on disk but still in dirstate
899 898 results[nf] = None
900 899 foundadd((nf, ff))
901 900 elif kind == regkind or kind == lnkkind:
902 901 results[nf] = st
903 902 else:
904 903 badfn(ff, badtype(kind))
905 904 if nf in dmap:
906 905 results[nf] = None
907 906 except OSError as inst: # nf not found on disk - it is dirstate only
908 907 if nf in dmap: # does it exactly match a missing file?
909 908 results[nf] = None
910 909 else: # does it match a missing directory?
911 910 if self._map.hasdir(nf):
912 911 notfoundadd(nf)
913 912 else:
914 913 badfn(ff, encoding.strtolocal(inst.strerror))
915 914
916 915 # match.files() may contain explicitly-specified paths that shouldn't
917 916 # be taken; drop them from the list of files found. dirsfound/notfound
918 917 # aren't filtered here because they will be tested later.
919 918 if match.anypats():
920 919 for f in list(results):
921 920 if f == b'.hg' or f in subrepos:
922 921 # keep sentinel to disable further out-of-repo walks
923 922 continue
924 923 if not match(f):
925 924 del results[f]
926 925
927 926 # Case insensitive filesystems cannot rely on lstat() failing to detect
928 927 # a case-only rename. Prune the stat object for any file that does not
929 928 # match the case in the filesystem, if there are multiple files that
930 929 # normalize to the same path.
931 930 if match.isexact() and self._checkcase:
932 931 normed = {}
933 932
934 933 for f, st in results.items():
935 934 if st is None:
936 935 continue
937 936
938 937 nc = util.normcase(f)
939 938 paths = normed.get(nc)
940 939
941 940 if paths is None:
942 941 paths = set()
943 942 normed[nc] = paths
944 943
945 944 paths.add(f)
946 945
947 946 for norm, paths in normed.items():
948 947 if len(paths) > 1:
949 948 for path in paths:
950 949 folded = self._discoverpath(
951 950 path, norm, True, None, self._map.dirfoldmap
952 951 )
953 952 if path != folded:
954 953 results[path] = None
955 954
956 955 return results, dirsfound, dirsnotfound
957 956
958 957 def walk(self, match, subrepos, unknown, ignored, full=True):
959 958 """
960 959 Walk recursively through the directory tree, finding all files
961 960 matched by match.
962 961
963 962 If full is False, maybe skip some known-clean files.
964 963
965 964 Return a dict mapping filename to stat-like object (either
966 965 mercurial.osutil.stat instance or return value of os.stat()).
967 966
968 967 """
969 968 # full is a flag that extensions that hook into walk can use -- this
970 969 # implementation doesn't use it at all. This satisfies the contract
971 970 # because we only guarantee a "maybe".
972 971
973 972 if ignored:
974 973 ignore = util.never
975 974 dirignore = util.never
976 975 elif unknown:
977 976 ignore = self._ignore
978 977 dirignore = self._dirignore
979 978 else:
980 979 # if not unknown and not ignored, drop dir recursion and step 2
981 980 ignore = util.always
982 981 dirignore = util.always
983 982
984 983 matchfn = match.matchfn
985 984 matchalways = match.always()
986 985 matchtdir = match.traversedir
987 986 dmap = self._map
988 987 listdir = util.listdir
989 988 lstat = os.lstat
990 989 dirkind = stat.S_IFDIR
991 990 regkind = stat.S_IFREG
992 991 lnkkind = stat.S_IFLNK
993 992 join = self._join
994 993
995 994 exact = skipstep3 = False
996 995 if match.isexact(): # match.exact
997 996 exact = True
998 997 dirignore = util.always # skip step 2
999 998 elif match.prefix(): # match.match, no patterns
1000 999 skipstep3 = True
1001 1000
1002 1001 if not exact and self._checkcase:
1003 1002 normalize = self._normalize
1004 1003 normalizefile = self._normalizefile
1005 1004 skipstep3 = False
1006 1005 else:
1007 1006 normalize = self._normalize
1008 1007 normalizefile = None
1009 1008
1010 1009 # step 1: find all explicit files
1011 1010 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1012 1011 if matchtdir:
1013 1012 for d in work:
1014 1013 matchtdir(d[0])
1015 1014 for d in dirsnotfound:
1016 1015 matchtdir(d)
1017 1016
1018 1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1019 1018 work = [d for d in work if not dirignore(d[0])]
1020 1019
1021 1020 # step 2: visit subdirectories
1022 1021 def traverse(work, alreadynormed):
1023 1022 wadd = work.append
1024 1023 while work:
1025 1024 tracing.counter('dirstate.walk work', len(work))
1026 1025 nd = work.pop()
1027 1026 visitentries = match.visitchildrenset(nd)
1028 1027 if not visitentries:
1029 1028 continue
1030 1029 if visitentries == b'this' or visitentries == b'all':
1031 1030 visitentries = None
1032 1031 skip = None
1033 1032 if nd != b'':
1034 1033 skip = b'.hg'
1035 1034 try:
1036 1035 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1037 1036 entries = listdir(join(nd), stat=True, skip=skip)
1038 except OSError as inst:
1039 if inst.errno in (errno.EACCES, errno.ENOENT):
1040 match.bad(
1041 self.pathto(nd), encoding.strtolocal(inst.strerror)
1042 )
1043 continue
1044 raise
1037 except (PermissionError, FileNotFoundError) as inst:
1038 match.bad(
1039 self.pathto(nd), encoding.strtolocal(inst.strerror)
1040 )
1041 continue
1045 1042 for f, kind, st in entries:
1046 1043 # Some matchers may return files in the visitentries set,
1047 1044 # instead of 'this', if the matcher explicitly mentions them
1048 1045 # and is not an exactmatcher. This is acceptable; we do not
1049 1046 # make any hard assumptions about file-or-directory below
1050 1047 # based on the presence of `f` in visitentries. If
1051 1048 # visitchildrenset returned a set, we can always skip the
1052 1049 # entries *not* in the set it provided regardless of whether
1053 1050 # they're actually a file or a directory.
1054 1051 if visitentries and f not in visitentries:
1055 1052 continue
1056 1053 if normalizefile:
1057 1054 # even though f might be a directory, we're only
1058 1055 # interested in comparing it to files currently in the
1059 1056 # dmap -- therefore normalizefile is enough
1060 1057 nf = normalizefile(
1061 1058 nd and (nd + b"/" + f) or f, True, True
1062 1059 )
1063 1060 else:
1064 1061 nf = nd and (nd + b"/" + f) or f
1065 1062 if nf not in results:
1066 1063 if kind == dirkind:
1067 1064 if not ignore(nf):
1068 1065 if matchtdir:
1069 1066 matchtdir(nf)
1070 1067 wadd(nf)
1071 1068 if nf in dmap and (matchalways or matchfn(nf)):
1072 1069 results[nf] = None
1073 1070 elif kind == regkind or kind == lnkkind:
1074 1071 if nf in dmap:
1075 1072 if matchalways or matchfn(nf):
1076 1073 results[nf] = st
1077 1074 elif (matchalways or matchfn(nf)) and not ignore(
1078 1075 nf
1079 1076 ):
1080 1077 # unknown file -- normalize if necessary
1081 1078 if not alreadynormed:
1082 1079 nf = normalize(nf, False, True)
1083 1080 results[nf] = st
1084 1081 elif nf in dmap and (matchalways or matchfn(nf)):
1085 1082 results[nf] = None
1086 1083
1087 1084 for nd, d in work:
1088 1085 # alreadynormed means that processwork doesn't have to do any
1089 1086 # expensive directory normalization
1090 1087 alreadynormed = not normalize or nd == d
1091 1088 traverse([d], alreadynormed)
1092 1089
1093 1090 for s in subrepos:
1094 1091 del results[s]
1095 1092 del results[b'.hg']
1096 1093
1097 1094 # step 3: visit remaining files from dmap
1098 1095 if not skipstep3 and not exact:
1099 1096 # If a dmap file is not in results yet, it was either
1100 1097 # a) not matching matchfn b) ignored, c) missing, or d) under a
1101 1098 # symlink directory.
1102 1099 if not results and matchalways:
1103 1100 visit = [f for f in dmap]
1104 1101 else:
1105 1102 visit = [f for f in dmap if f not in results and matchfn(f)]
1106 1103 visit.sort()
1107 1104
1108 1105 if unknown:
1109 1106 # unknown == True means we walked all dirs under the roots
1110 1107 # that wasn't ignored, and everything that matched was stat'ed
1111 1108 # and is already in results.
1112 1109 # The rest must thus be ignored or under a symlink.
1113 1110 audit_path = pathutil.pathauditor(self._root, cached=True)
1114 1111
1115 1112 for nf in iter(visit):
1116 1113 # If a stat for the same file was already added with a
1117 1114 # different case, don't add one for this, since that would
1118 1115 # make it appear as if the file exists under both names
1119 1116 # on disk.
1120 1117 if (
1121 1118 normalizefile
1122 1119 and normalizefile(nf, True, True) in results
1123 1120 ):
1124 1121 results[nf] = None
1125 1122 # Report ignored items in the dmap as long as they are not
1126 1123 # under a symlink directory.
1127 1124 elif audit_path.check(nf):
1128 1125 try:
1129 1126 results[nf] = lstat(join(nf))
1130 1127 # file was just ignored, no links, and exists
1131 1128 except OSError:
1132 1129 # file doesn't exist
1133 1130 results[nf] = None
1134 1131 else:
1135 1132 # It's either missing or under a symlink directory
1136 1133 # which we in this case report as missing
1137 1134 results[nf] = None
1138 1135 else:
1139 1136 # We may not have walked the full directory tree above,
1140 1137 # so stat and check everything we missed.
1141 1138 iv = iter(visit)
1142 1139 for st in util.statfiles([join(i) for i in visit]):
1143 1140 results[next(iv)] = st
1144 1141 return results
1145 1142
1146 1143 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1147 1144 # Force Rayon (Rust parallelism library) to respect the number of
1148 1145 # workers. This is a temporary workaround until Rust code knows
1149 1146 # how to read the config file.
1150 1147 numcpus = self._ui.configint(b"worker", b"numcpus")
1151 1148 if numcpus is not None:
1152 1149 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1153 1150
1154 1151 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1155 1152 if not workers_enabled:
1156 1153 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1157 1154
1158 1155 (
1159 1156 lookup,
1160 1157 modified,
1161 1158 added,
1162 1159 removed,
1163 1160 deleted,
1164 1161 clean,
1165 1162 ignored,
1166 1163 unknown,
1167 1164 warnings,
1168 1165 bad,
1169 1166 traversed,
1170 1167 dirty,
1171 1168 ) = rustmod.status(
1172 1169 self._map._map,
1173 1170 matcher,
1174 1171 self._rootdir,
1175 1172 self._ignorefiles(),
1176 1173 self._checkexec,
1177 1174 bool(list_clean),
1178 1175 bool(list_ignored),
1179 1176 bool(list_unknown),
1180 1177 bool(matcher.traversedir),
1181 1178 )
1182 1179
1183 1180 self._dirty |= dirty
1184 1181
1185 1182 if matcher.traversedir:
1186 1183 for dir in traversed:
1187 1184 matcher.traversedir(dir)
1188 1185
1189 1186 if self._ui.warn:
1190 1187 for item in warnings:
1191 1188 if isinstance(item, tuple):
1192 1189 file_path, syntax = item
1193 1190 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1194 1191 file_path,
1195 1192 syntax,
1196 1193 )
1197 1194 self._ui.warn(msg)
1198 1195 else:
1199 1196 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1200 1197 self._ui.warn(
1201 1198 msg
1202 1199 % (
1203 1200 pathutil.canonpath(
1204 1201 self._rootdir, self._rootdir, item
1205 1202 ),
1206 1203 b"No such file or directory",
1207 1204 )
1208 1205 )
1209 1206
1210 1207 for (fn, message) in bad:
1211 1208 matcher.bad(fn, encoding.strtolocal(message))
1212 1209
1213 1210 status = scmutil.status(
1214 1211 modified=modified,
1215 1212 added=added,
1216 1213 removed=removed,
1217 1214 deleted=deleted,
1218 1215 unknown=unknown,
1219 1216 ignored=ignored,
1220 1217 clean=clean,
1221 1218 )
1222 1219 return (lookup, status)
1223 1220
1224 1221 def status(self, match, subrepos, ignored, clean, unknown):
1225 1222 """Determine the status of the working copy relative to the
1226 1223 dirstate and return a pair of (unsure, status), where status is of type
1227 1224 scmutil.status and:
1228 1225
1229 1226 unsure:
1230 1227 files that might have been modified since the dirstate was
1231 1228 written, but need to be read to be sure (size is the same
1232 1229 but mtime differs)
1233 1230 status.modified:
1234 1231 files that have definitely been modified since the dirstate
1235 1232 was written (different size or mode)
1236 1233 status.clean:
1237 1234 files that have definitely not been modified since the
1238 1235 dirstate was written
1239 1236 """
1240 1237 listignored, listclean, listunknown = ignored, clean, unknown
1241 1238 lookup, modified, added, unknown, ignored = [], [], [], [], []
1242 1239 removed, deleted, clean = [], [], []
1243 1240
1244 1241 dmap = self._map
1245 1242 dmap.preload()
1246 1243
1247 1244 use_rust = True
1248 1245
1249 1246 allowed_matchers = (
1250 1247 matchmod.alwaysmatcher,
1251 1248 matchmod.exactmatcher,
1252 1249 matchmod.includematcher,
1253 1250 )
1254 1251
1255 1252 if rustmod is None:
1256 1253 use_rust = False
1257 1254 elif self._checkcase:
1258 1255 # Case-insensitive filesystems are not handled yet
1259 1256 use_rust = False
1260 1257 elif subrepos:
1261 1258 use_rust = False
1262 1259 elif sparse.enabled:
1263 1260 use_rust = False
1264 1261 elif not isinstance(match, allowed_matchers):
1265 1262 # Some matchers have yet to be implemented
1266 1263 use_rust = False
1267 1264
1268 1265 # Get the time from the filesystem so we can disambiguate files that
1269 1266 # appear modified in the present or future.
1270 1267 try:
1271 1268 mtime_boundary = timestamp.get_fs_now(self._opener)
1272 1269 except OSError:
1273 1270 # In largefiles or readonly context
1274 1271 mtime_boundary = None
1275 1272
1276 1273 if use_rust:
1277 1274 try:
1278 1275 res = self._rust_status(
1279 1276 match, listclean, listignored, listunknown
1280 1277 )
1281 1278 return res + (mtime_boundary,)
1282 1279 except rustmod.FallbackError:
1283 1280 pass
1284 1281
1285 1282 def noop(f):
1286 1283 pass
1287 1284
1288 1285 dcontains = dmap.__contains__
1289 1286 dget = dmap.__getitem__
1290 1287 ladd = lookup.append # aka "unsure"
1291 1288 madd = modified.append
1292 1289 aadd = added.append
1293 1290 uadd = unknown.append if listunknown else noop
1294 1291 iadd = ignored.append if listignored else noop
1295 1292 radd = removed.append
1296 1293 dadd = deleted.append
1297 1294 cadd = clean.append if listclean else noop
1298 1295 mexact = match.exact
1299 1296 dirignore = self._dirignore
1300 1297 checkexec = self._checkexec
1301 1298 checklink = self._checklink
1302 1299 copymap = self._map.copymap
1303 1300
1304 1301 # We need to do full walks when either
1305 1302 # - we're listing all clean files, or
1306 1303 # - match.traversedir does something, because match.traversedir should
1307 1304 # be called for every dir in the working dir
1308 1305 full = listclean or match.traversedir is not None
1309 1306 for fn, st in self.walk(
1310 1307 match, subrepos, listunknown, listignored, full=full
1311 1308 ).items():
1312 1309 if not dcontains(fn):
1313 1310 if (listignored or mexact(fn)) and dirignore(fn):
1314 1311 if listignored:
1315 1312 iadd(fn)
1316 1313 else:
1317 1314 uadd(fn)
1318 1315 continue
1319 1316
1320 1317 t = dget(fn)
1321 1318 mode = t.mode
1322 1319 size = t.size
1323 1320
1324 1321 if not st and t.tracked:
1325 1322 dadd(fn)
1326 1323 elif t.p2_info:
1327 1324 madd(fn)
1328 1325 elif t.added:
1329 1326 aadd(fn)
1330 1327 elif t.removed:
1331 1328 radd(fn)
1332 1329 elif t.tracked:
1333 1330 if not checklink and t.has_fallback_symlink:
1334 1331 # If the file system does not support symlink, the mode
1335 1332 # might not be correctly stored in the dirstate, so do not
1336 1333 # trust it.
1337 1334 ladd(fn)
1338 1335 elif not checkexec and t.has_fallback_exec:
1339 1336 # If the file system does not support exec bits, the mode
1340 1337 # might not be correctly stored in the dirstate, so do not
1341 1338 # trust it.
1342 1339 ladd(fn)
1343 1340 elif (
1344 1341 size >= 0
1345 1342 and (
1346 1343 (size != st.st_size and size != st.st_size & _rangemask)
1347 1344 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1348 1345 )
1349 1346 or fn in copymap
1350 1347 ):
1351 1348 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1352 1349 # issue6456: Size returned may be longer due to
1353 1350 # encryption on EXT-4 fscrypt, undecided.
1354 1351 ladd(fn)
1355 1352 else:
1356 1353 madd(fn)
1357 1354 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1358 1355 # There might be a change in the future if for example the
1359 1356 # internal clock is off, but this is a case where the issues
1360 1357 # the user would face would be a lot worse and there is
1361 1358 # nothing we can really do.
1362 1359 ladd(fn)
1363 1360 elif listclean:
1364 1361 cadd(fn)
1365 1362 status = scmutil.status(
1366 1363 modified, added, removed, deleted, unknown, ignored, clean
1367 1364 )
1368 1365 return (lookup, status, mtime_boundary)
1369 1366
1370 1367 def matches(self, match):
1371 1368 """
1372 1369 return files in the dirstate (in whatever state) filtered by match
1373 1370 """
1374 1371 dmap = self._map
1375 1372 if rustmod is not None:
1376 1373 dmap = self._map._map
1377 1374
1378 1375 if match.always():
1379 1376 return dmap.keys()
1380 1377 files = match.files()
1381 1378 if match.isexact():
1382 1379 # fast path -- filter the other way around, since typically files is
1383 1380 # much smaller than dmap
1384 1381 return [f for f in files if f in dmap]
1385 1382 if match.prefix() and all(fn in dmap for fn in files):
1386 1383 # fast path -- all the values are known to be files, so just return
1387 1384 # that
1388 1385 return list(files)
1389 1386 return [f for f in dmap if match(f)]
1390 1387
1391 1388 def _actualfilename(self, tr):
1392 1389 if tr:
1393 1390 return self._pendingfilename
1394 1391 else:
1395 1392 return self._filename
1396 1393
1397 1394 def savebackup(self, tr, backupname):
1398 1395 '''Save current dirstate into backup file'''
1399 1396 filename = self._actualfilename(tr)
1400 1397 assert backupname != filename
1401 1398
1402 1399 # use '_writedirstate' instead of 'write' to write changes certainly,
1403 1400 # because the latter omits writing out if transaction is running.
1404 1401 # output file will be used to create backup of dirstate at this point.
1405 1402 if self._dirty or not self._opener.exists(filename):
1406 1403 self._writedirstate(
1407 1404 tr,
1408 1405 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1409 1406 )
1410 1407
1411 1408 if tr:
1412 1409 # ensure that subsequent tr.writepending returns True for
1413 1410 # changes written out above, even if dirstate is never
1414 1411 # changed after this
1415 1412 tr.addfilegenerator(
1416 1413 b'dirstate-1-main',
1417 1414 (self._filename,),
1418 1415 lambda f: self._writedirstate(tr, f),
1419 1416 location=b'plain',
1420 1417 post_finalize=True,
1421 1418 )
1422 1419
1423 1420 # ensure that pending file written above is unlinked at
1424 1421 # failure, even if tr.writepending isn't invoked until the
1425 1422 # end of this transaction
1426 1423 tr.registertmp(filename, location=b'plain')
1427 1424
1428 1425 self._opener.tryunlink(backupname)
1429 1426 # hardlink backup is okay because _writedirstate is always called
1430 1427 # with an "atomictemp=True" file.
1431 1428 util.copyfile(
1432 1429 self._opener.join(filename),
1433 1430 self._opener.join(backupname),
1434 1431 hardlink=True,
1435 1432 )
1436 1433
1437 1434 def restorebackup(self, tr, backupname):
1438 1435 '''Restore dirstate by backup file'''
1439 1436 # this "invalidate()" prevents "wlock.release()" from writing
1440 1437 # changes of dirstate out after restoring from backup file
1441 1438 self.invalidate()
1442 1439 filename = self._actualfilename(tr)
1443 1440 o = self._opener
1444 1441 if util.samefile(o.join(backupname), o.join(filename)):
1445 1442 o.unlink(backupname)
1446 1443 else:
1447 1444 o.rename(backupname, filename, checkambig=True)
1448 1445
1449 1446 def clearbackup(self, tr, backupname):
1450 1447 '''Clear backup file'''
1451 1448 self._opener.unlink(backupname)
1452 1449
1453 1450 def verify(self, m1, m2):
1454 1451 """check the dirstate content again the parent manifest and yield errors"""
1455 1452 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1456 1453 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1457 1454 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1458 1455 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1459 1456 for f, entry in self.items():
1460 1457 state = entry.state
1461 1458 if state in b"nr" and f not in m1:
1462 1459 yield (missing_from_p1, f, state)
1463 1460 if state in b"a" and f in m1:
1464 1461 yield (unexpected_in_p1, f, state)
1465 1462 if state in b"m" and f not in m1 and f not in m2:
1466 1463 yield (missing_from_ps, f, state)
1467 1464 for f in m1:
1468 1465 state = self.get_entry(f).state
1469 1466 if state not in b"nrm":
1470 1467 yield (missing_from_ds, f, state)
@@ -1,621 +1,618 b''
1 1 # fileset.py - file set queries for mercurial
2 2 #
3 3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 import errno
10 9 import re
11 10
12 11 from .i18n import _
13 12 from .pycompat import getattr
14 13 from . import (
15 14 error,
16 15 filesetlang,
17 16 match as matchmod,
18 17 mergestate as mergestatemod,
19 18 pycompat,
20 19 registrar,
21 20 scmutil,
22 21 util,
23 22 )
24 23 from .utils import stringutil
25 24
26 25 # common weight constants
27 26 _WEIGHT_CHECK_FILENAME = filesetlang.WEIGHT_CHECK_FILENAME
28 27 _WEIGHT_READ_CONTENTS = filesetlang.WEIGHT_READ_CONTENTS
29 28 _WEIGHT_STATUS = filesetlang.WEIGHT_STATUS
30 29 _WEIGHT_STATUS_THOROUGH = filesetlang.WEIGHT_STATUS_THOROUGH
31 30
32 31 # helpers for processing parsed tree
33 32 getsymbol = filesetlang.getsymbol
34 33 getstring = filesetlang.getstring
35 34 _getkindpat = filesetlang.getkindpat
36 35 getpattern = filesetlang.getpattern
37 36 getargs = filesetlang.getargs
38 37
39 38
40 39 def getmatch(mctx, x):
41 40 if not x:
42 41 raise error.ParseError(_(b"missing argument"))
43 42 return methods[x[0]](mctx, *x[1:])
44 43
45 44
46 45 def getmatchwithstatus(mctx, x, hint):
47 46 keys = set(getstring(hint, b'status hint must be a string').split())
48 47 return getmatch(mctx.withstatus(keys), x)
49 48
50 49
51 50 def stringmatch(mctx, x):
52 51 return mctx.matcher([x])
53 52
54 53
55 54 def kindpatmatch(mctx, x, y):
56 55 return stringmatch(
57 56 mctx,
58 57 _getkindpat(
59 58 x, y, matchmod.allpatternkinds, _(b"pattern must be a string")
60 59 ),
61 60 )
62 61
63 62
64 63 def patternsmatch(mctx, *xs):
65 64 allkinds = matchmod.allpatternkinds
66 65 patterns = [
67 66 getpattern(x, allkinds, _(b"pattern must be a string")) for x in xs
68 67 ]
69 68 return mctx.matcher(patterns)
70 69
71 70
72 71 def andmatch(mctx, x, y):
73 72 xm = getmatch(mctx, x)
74 73 ym = getmatch(mctx.narrowed(xm), y)
75 74 return matchmod.intersectmatchers(xm, ym)
76 75
77 76
78 77 def ormatch(mctx, *xs):
79 78 ms = [getmatch(mctx, x) for x in xs]
80 79 return matchmod.unionmatcher(ms)
81 80
82 81
83 82 def notmatch(mctx, x):
84 83 m = getmatch(mctx, x)
85 84 return mctx.predicate(lambda f: not m(f), predrepr=(b'<not %r>', m))
86 85
87 86
88 87 def minusmatch(mctx, x, y):
89 88 xm = getmatch(mctx, x)
90 89 ym = getmatch(mctx.narrowed(xm), y)
91 90 return matchmod.differencematcher(xm, ym)
92 91
93 92
94 93 def listmatch(mctx, *xs):
95 94 raise error.ParseError(
96 95 _(b"can't use a list in this context"),
97 96 hint=_(b'see \'hg help "filesets.x or y"\''),
98 97 )
99 98
100 99
101 100 def func(mctx, a, b):
102 101 funcname = getsymbol(a)
103 102 if funcname in symbols:
104 103 return symbols[funcname](mctx, b)
105 104
106 105 keep = lambda fn: getattr(fn, '__doc__', None) is not None
107 106
108 107 syms = [s for (s, fn) in symbols.items() if keep(fn)]
109 108 raise error.UnknownIdentifier(funcname, syms)
110 109
111 110
112 111 # symbols are callable like:
113 112 # fun(mctx, x)
114 113 # with:
115 114 # mctx - current matchctx instance
116 115 # x - argument in tree form
117 116 symbols = filesetlang.symbols
118 117
119 118 predicate = registrar.filesetpredicate(symbols)
120 119
121 120
122 121 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
123 122 def modified(mctx, x):
124 123 """File that is modified according to :hg:`status`."""
125 124 # i18n: "modified" is a keyword
126 125 getargs(x, 0, 0, _(b"modified takes no arguments"))
127 126 s = set(mctx.status().modified)
128 127 return mctx.predicate(s.__contains__, predrepr=b'modified')
129 128
130 129
131 130 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
132 131 def added(mctx, x):
133 132 """File that is added according to :hg:`status`."""
134 133 # i18n: "added" is a keyword
135 134 getargs(x, 0, 0, _(b"added takes no arguments"))
136 135 s = set(mctx.status().added)
137 136 return mctx.predicate(s.__contains__, predrepr=b'added')
138 137
139 138
140 139 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
141 140 def removed(mctx, x):
142 141 """File that is removed according to :hg:`status`."""
143 142 # i18n: "removed" is a keyword
144 143 getargs(x, 0, 0, _(b"removed takes no arguments"))
145 144 s = set(mctx.status().removed)
146 145 return mctx.predicate(s.__contains__, predrepr=b'removed')
147 146
148 147
149 148 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
150 149 def deleted(mctx, x):
151 150 """Alias for ``missing()``."""
152 151 # i18n: "deleted" is a keyword
153 152 getargs(x, 0, 0, _(b"deleted takes no arguments"))
154 153 s = set(mctx.status().deleted)
155 154 return mctx.predicate(s.__contains__, predrepr=b'deleted')
156 155
157 156
158 157 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
159 158 def missing(mctx, x):
160 159 """File that is missing according to :hg:`status`."""
161 160 # i18n: "missing" is a keyword
162 161 getargs(x, 0, 0, _(b"missing takes no arguments"))
163 162 s = set(mctx.status().deleted)
164 163 return mctx.predicate(s.__contains__, predrepr=b'deleted')
165 164
166 165
167 166 @predicate(b'unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
168 167 def unknown(mctx, x):
169 168 """File that is unknown according to :hg:`status`."""
170 169 # i18n: "unknown" is a keyword
171 170 getargs(x, 0, 0, _(b"unknown takes no arguments"))
172 171 s = set(mctx.status().unknown)
173 172 return mctx.predicate(s.__contains__, predrepr=b'unknown')
174 173
175 174
176 175 @predicate(b'ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
177 176 def ignored(mctx, x):
178 177 """File that is ignored according to :hg:`status`."""
179 178 # i18n: "ignored" is a keyword
180 179 getargs(x, 0, 0, _(b"ignored takes no arguments"))
181 180 s = set(mctx.status().ignored)
182 181 return mctx.predicate(s.__contains__, predrepr=b'ignored')
183 182
184 183
185 184 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
186 185 def clean(mctx, x):
187 186 """File that is clean according to :hg:`status`."""
188 187 # i18n: "clean" is a keyword
189 188 getargs(x, 0, 0, _(b"clean takes no arguments"))
190 189 s = set(mctx.status().clean)
191 190 return mctx.predicate(s.__contains__, predrepr=b'clean')
192 191
193 192
194 193 @predicate(b'tracked()')
195 194 def tracked(mctx, x):
196 195 """File that is under Mercurial control."""
197 196 # i18n: "tracked" is a keyword
198 197 getargs(x, 0, 0, _(b"tracked takes no arguments"))
199 198 return mctx.predicate(mctx.ctx.__contains__, predrepr=b'tracked')
200 199
201 200
202 201 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
203 202 def binary(mctx, x):
204 203 """File that appears to be binary (contains NUL bytes)."""
205 204 # i18n: "binary" is a keyword
206 205 getargs(x, 0, 0, _(b"binary takes no arguments"))
207 206 return mctx.fpredicate(
208 207 lambda fctx: fctx.isbinary(), predrepr=b'binary', cache=True
209 208 )
210 209
211 210
212 211 @predicate(b'exec()')
213 212 def exec_(mctx, x):
214 213 """File that is marked as executable."""
215 214 # i18n: "exec" is a keyword
216 215 getargs(x, 0, 0, _(b"exec takes no arguments"))
217 216 ctx = mctx.ctx
218 217 return mctx.predicate(lambda f: ctx.flags(f) == b'x', predrepr=b'exec')
219 218
220 219
221 220 @predicate(b'symlink()')
222 221 def symlink(mctx, x):
223 222 """File that is marked as a symlink."""
224 223 # i18n: "symlink" is a keyword
225 224 getargs(x, 0, 0, _(b"symlink takes no arguments"))
226 225 ctx = mctx.ctx
227 226 return mctx.predicate(lambda f: ctx.flags(f) == b'l', predrepr=b'symlink')
228 227
229 228
230 229 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
231 230 def resolved(mctx, x):
232 231 """File that is marked resolved according to :hg:`resolve -l`."""
233 232 # i18n: "resolved" is a keyword
234 233 getargs(x, 0, 0, _(b"resolved takes no arguments"))
235 234 if mctx.ctx.rev() is not None:
236 235 return mctx.never()
237 236 ms = mergestatemod.mergestate.read(mctx.ctx.repo())
238 237 return mctx.predicate(
239 238 lambda f: f in ms and ms[f] == b'r', predrepr=b'resolved'
240 239 )
241 240
242 241
243 242 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
244 243 def unresolved(mctx, x):
245 244 """File that is marked unresolved according to :hg:`resolve -l`."""
246 245 # i18n: "unresolved" is a keyword
247 246 getargs(x, 0, 0, _(b"unresolved takes no arguments"))
248 247 if mctx.ctx.rev() is not None:
249 248 return mctx.never()
250 249 ms = mergestatemod.mergestate.read(mctx.ctx.repo())
251 250 return mctx.predicate(
252 251 lambda f: f in ms and ms[f] == b'u', predrepr=b'unresolved'
253 252 )
254 253
255 254
256 255 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
257 256 def hgignore(mctx, x):
258 257 """File that matches the active .hgignore pattern."""
259 258 # i18n: "hgignore" is a keyword
260 259 getargs(x, 0, 0, _(b"hgignore takes no arguments"))
261 260 return mctx.ctx.repo().dirstate._ignore
262 261
263 262
264 263 @predicate(b'portable()', weight=_WEIGHT_CHECK_FILENAME)
265 264 def portable(mctx, x):
266 265 """File that has a portable name. (This doesn't include filenames with case
267 266 collisions.)
268 267 """
269 268 # i18n: "portable" is a keyword
270 269 getargs(x, 0, 0, _(b"portable takes no arguments"))
271 270 return mctx.predicate(
272 271 lambda f: util.checkwinfilename(f) is None, predrepr=b'portable'
273 272 )
274 273
275 274
276 275 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
277 276 def grep(mctx, x):
278 277 """File contains the given regular expression."""
279 278 try:
280 279 # i18n: "grep" is a keyword
281 280 r = re.compile(getstring(x, _(b"grep requires a pattern")))
282 281 except re.error as e:
283 282 raise error.ParseError(
284 283 _(b'invalid match pattern: %s') % stringutil.forcebytestr(e)
285 284 )
286 285 return mctx.fpredicate(
287 286 lambda fctx: r.search(fctx.data()),
288 287 predrepr=(b'grep(%r)', r.pattern),
289 288 cache=True,
290 289 )
291 290
292 291
293 292 def _sizetomax(s):
294 293 try:
295 294 s = s.strip().lower()
296 295 for k, v in util._sizeunits:
297 296 if s.endswith(k):
298 297 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
299 298 n = s[: -len(k)]
300 299 inc = 1.0
301 300 if b"." in n:
302 301 inc /= 10 ** len(n.split(b".")[1])
303 302 return int((float(n) + inc) * v) - 1
304 303 # no extension, this is a precise value
305 304 return int(s)
306 305 except ValueError:
307 306 raise error.ParseError(_(b"couldn't parse size: %s") % s)
308 307
309 308
310 309 def sizematcher(expr):
311 310 """Return a function(size) -> bool from the ``size()`` expression"""
312 311 expr = expr.strip()
313 312 if b'-' in expr: # do we have a range?
314 313 a, b = expr.split(b'-', 1)
315 314 a = util.sizetoint(a)
316 315 b = util.sizetoint(b)
317 316 return lambda x: x >= a and x <= b
318 317 elif expr.startswith(b"<="):
319 318 a = util.sizetoint(expr[2:])
320 319 return lambda x: x <= a
321 320 elif expr.startswith(b"<"):
322 321 a = util.sizetoint(expr[1:])
323 322 return lambda x: x < a
324 323 elif expr.startswith(b">="):
325 324 a = util.sizetoint(expr[2:])
326 325 return lambda x: x >= a
327 326 elif expr.startswith(b">"):
328 327 a = util.sizetoint(expr[1:])
329 328 return lambda x: x > a
330 329 else:
331 330 a = util.sizetoint(expr)
332 331 b = _sizetomax(expr)
333 332 return lambda x: x >= a and x <= b
334 333
335 334
336 335 @predicate(b'size(expression)', weight=_WEIGHT_STATUS)
337 336 def size(mctx, x):
338 337 """File size matches the given expression. Examples:
339 338
340 339 - size('1k') - files from 1024 to 2047 bytes
341 340 - size('< 20k') - files less than 20480 bytes
342 341 - size('>= .5MB') - files at least 524288 bytes
343 342 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
344 343 """
345 344 # i18n: "size" is a keyword
346 345 expr = getstring(x, _(b"size requires an expression"))
347 346 m = sizematcher(expr)
348 347 return mctx.fpredicate(
349 348 lambda fctx: m(fctx.size()), predrepr=(b'size(%r)', expr), cache=True
350 349 )
351 350
352 351
353 352 @predicate(b'encoding(name)', weight=_WEIGHT_READ_CONTENTS)
354 353 def encoding(mctx, x):
355 354 """File can be successfully decoded with the given character
356 355 encoding. May not be useful for encodings other than ASCII and
357 356 UTF-8.
358 357 """
359 358
360 359 # i18n: "encoding" is a keyword
361 360 enc = getstring(x, _(b"encoding requires an encoding name"))
362 361
363 362 def encp(fctx):
364 363 d = fctx.data()
365 364 try:
366 365 d.decode(pycompat.sysstr(enc))
367 366 return True
368 367 except LookupError:
369 368 raise error.Abort(_(b"unknown encoding '%s'") % enc)
370 369 except UnicodeDecodeError:
371 370 return False
372 371
373 372 return mctx.fpredicate(encp, predrepr=(b'encoding(%r)', enc), cache=True)
374 373
375 374
376 375 @predicate(b'eol(style)', weight=_WEIGHT_READ_CONTENTS)
377 376 def eol(mctx, x):
378 377 """File contains newlines of the given style (dos, unix, mac). Binary
379 378 files are excluded, files with mixed line endings match multiple
380 379 styles.
381 380 """
382 381
383 382 # i18n: "eol" is a keyword
384 383 enc = getstring(x, _(b"eol requires a style name"))
385 384
386 385 def eolp(fctx):
387 386 if fctx.isbinary():
388 387 return False
389 388 d = fctx.data()
390 389 if (enc == b'dos' or enc == b'win') and b'\r\n' in d:
391 390 return True
392 391 elif enc == b'unix' and re.search(b'(?<!\r)\n', d):
393 392 return True
394 393 elif enc == b'mac' and re.search(b'\r(?!\n)', d):
395 394 return True
396 395 return False
397 396
398 397 return mctx.fpredicate(eolp, predrepr=(b'eol(%r)', enc), cache=True)
399 398
400 399
401 400 @predicate(b'copied()')
402 401 def copied(mctx, x):
403 402 """File that is recorded as being copied."""
404 403 # i18n: "copied" is a keyword
405 404 getargs(x, 0, 0, _(b"copied takes no arguments"))
406 405
407 406 def copiedp(fctx):
408 407 p = fctx.parents()
409 408 return p and p[0].path() != fctx.path()
410 409
411 410 return mctx.fpredicate(copiedp, predrepr=b'copied', cache=True)
412 411
413 412
414 413 @predicate(b'revs(revs, pattern)', weight=_WEIGHT_STATUS)
415 414 def revs(mctx, x):
416 415 """Evaluate set in the specified revisions. If the revset match multiple
417 416 revs, this will return file matching pattern in any of the revision.
418 417 """
419 418 # i18n: "revs" is a keyword
420 419 r, x = getargs(x, 2, 2, _(b"revs takes two arguments"))
421 420 # i18n: "revs" is a keyword
422 421 revspec = getstring(r, _(b"first argument to revs must be a revision"))
423 422 repo = mctx.ctx.repo()
424 423 revs = scmutil.revrange(repo, [revspec])
425 424
426 425 matchers = []
427 426 for r in revs:
428 427 ctx = repo[r]
429 428 mc = mctx.switch(ctx.p1(), ctx)
430 429 matchers.append(getmatch(mc, x))
431 430 if not matchers:
432 431 return mctx.never()
433 432 if len(matchers) == 1:
434 433 return matchers[0]
435 434 return matchmod.unionmatcher(matchers)
436 435
437 436
438 437 @predicate(b'status(base, rev, pattern)', weight=_WEIGHT_STATUS)
439 438 def status(mctx, x):
440 439 """Evaluate predicate using status change between ``base`` and
441 440 ``rev``. Examples:
442 441
443 442 - ``status(3, 7, added())`` - matches files added from "3" to "7"
444 443 """
445 444 repo = mctx.ctx.repo()
446 445 # i18n: "status" is a keyword
447 446 b, r, x = getargs(x, 3, 3, _(b"status takes three arguments"))
448 447 # i18n: "status" is a keyword
449 448 baseerr = _(b"first argument to status must be a revision")
450 449 baserevspec = getstring(b, baseerr)
451 450 if not baserevspec:
452 451 raise error.ParseError(baseerr)
453 452 reverr = _(b"second argument to status must be a revision")
454 453 revspec = getstring(r, reverr)
455 454 if not revspec:
456 455 raise error.ParseError(reverr)
457 456 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
458 457 mc = mctx.switch(basectx, ctx)
459 458 return getmatch(mc, x)
460 459
461 460
462 461 @predicate(b'subrepo([pattern])')
463 462 def subrepo(mctx, x):
464 463 """Subrepositories whose paths match the given pattern."""
465 464 # i18n: "subrepo" is a keyword
466 465 getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
467 466 ctx = mctx.ctx
468 467 sstate = ctx.substate
469 468 if x:
470 469 pat = getpattern(
471 470 x,
472 471 matchmod.allpatternkinds,
473 472 # i18n: "subrepo" is a keyword
474 473 _(b"subrepo requires a pattern or no arguments"),
475 474 )
476 475 fast = not matchmod.patkind(pat)
477 476 if fast:
478 477
479 478 def m(s):
480 479 return s == pat
481 480
482 481 else:
483 482 m = matchmod.match(ctx.repo().root, b'', [pat], ctx=ctx)
484 483 return mctx.predicate(
485 484 lambda f: f in sstate and m(f), predrepr=(b'subrepo(%r)', pat)
486 485 )
487 486 else:
488 487 return mctx.predicate(sstate.__contains__, predrepr=b'subrepo')
489 488
490 489
491 490 methods = {
492 491 b'withstatus': getmatchwithstatus,
493 492 b'string': stringmatch,
494 493 b'symbol': stringmatch,
495 494 b'kindpat': kindpatmatch,
496 495 b'patterns': patternsmatch,
497 496 b'and': andmatch,
498 497 b'or': ormatch,
499 498 b'minus': minusmatch,
500 499 b'list': listmatch,
501 500 b'not': notmatch,
502 501 b'func': func,
503 502 }
504 503
505 504
506 505 class matchctx:
507 506 def __init__(self, basectx, ctx, cwd, badfn=None):
508 507 self._basectx = basectx
509 508 self.ctx = ctx
510 509 self._badfn = badfn
511 510 self._match = None
512 511 self._status = None
513 512 self.cwd = cwd
514 513
515 514 def narrowed(self, match):
516 515 """Create matchctx for a sub-tree narrowed by the given matcher"""
517 516 mctx = matchctx(self._basectx, self.ctx, self.cwd, self._badfn)
518 517 mctx._match = match
519 518 # leave wider status which we don't have to care
520 519 mctx._status = self._status
521 520 return mctx
522 521
523 522 def switch(self, basectx, ctx):
524 523 mctx = matchctx(basectx, ctx, self.cwd, self._badfn)
525 524 mctx._match = self._match
526 525 return mctx
527 526
528 527 def withstatus(self, keys):
529 528 """Create matchctx which has precomputed status specified by the keys"""
530 529 mctx = matchctx(self._basectx, self.ctx, self.cwd, self._badfn)
531 530 mctx._match = self._match
532 531 mctx._buildstatus(keys)
533 532 return mctx
534 533
535 534 def _buildstatus(self, keys):
536 535 self._status = self._basectx.status(
537 536 self.ctx,
538 537 self._match,
539 538 listignored=b'ignored' in keys,
540 539 listclean=b'clean' in keys,
541 540 listunknown=b'unknown' in keys,
542 541 )
543 542
544 543 def status(self):
545 544 return self._status
546 545
547 546 def matcher(self, patterns):
548 547 return self.ctx.match(patterns, badfn=self._badfn, cwd=self.cwd)
549 548
550 549 def predicate(self, predfn, predrepr=None, cache=False):
551 550 """Create a matcher to select files by predfn(filename)"""
552 551 if cache:
553 552 predfn = util.cachefunc(predfn)
554 553 return matchmod.predicatematcher(
555 554 predfn, predrepr=predrepr, badfn=self._badfn
556 555 )
557 556
558 557 def fpredicate(self, predfn, predrepr=None, cache=False):
559 558 """Create a matcher to select files by predfn(fctx) at the current
560 559 revision
561 560
562 561 Missing files are ignored.
563 562 """
564 563 ctx = self.ctx
565 564 if ctx.rev() is None:
566 565
567 566 def fctxpredfn(f):
568 567 try:
569 568 fctx = ctx[f]
570 569 except error.LookupError:
571 570 return False
572 571 try:
573 572 fctx.audit()
574 573 except error.Abort:
575 574 return False
576 575 try:
577 576 return predfn(fctx)
578 except (IOError, OSError) as e:
579 # open()-ing a directory fails with EACCES on Windows
580 if e.errno in (
581 errno.ENOENT,
582 errno.EACCES,
583 errno.ENOTDIR,
584 errno.EISDIR,
585 ):
586 return False
587 raise
577 # open()-ing a directory fails with PermissionError on Windows
578 except (
579 FileNotFoundError,
580 PermissionError,
581 NotADirectoryError,
582 IsADirectoryError,
583 ):
584 return False
588 585
589 586 else:
590 587
591 588 def fctxpredfn(f):
592 589 try:
593 590 fctx = ctx[f]
594 591 except error.LookupError:
595 592 return False
596 593 return predfn(fctx)
597 594
598 595 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
599 596
600 597 def never(self):
601 598 """Create a matcher to select nothing"""
602 599 return matchmod.never(badfn=self._badfn)
603 600
604 601
605 602 def match(ctx, cwd, expr, badfn=None):
606 603 """Create a matcher for a single fileset expression"""
607 604 tree = filesetlang.parse(expr)
608 605 tree = filesetlang.analyze(tree)
609 606 tree = filesetlang.optimize(tree)
610 607 mctx = matchctx(ctx.p1(), ctx, cwd, badfn=badfn)
611 608 return getmatch(mctx, tree)
612 609
613 610
614 611 def loadpredicate(ui, extname, registrarobj):
615 612 """Load fileset predicates from specified registrarobj"""
616 613 for name, func in registrarobj._table.items():
617 614 symbols[name] = func
618 615
619 616
620 617 # tell hggettext to extract docstrings from these functions:
621 618 i18nfunctions = symbols.values()
@@ -1,394 +1,393 b''
1 1 # lock.py - simple advisory locking scheme for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import contextlib
10 10 import errno
11 11 import os
12 12 import signal
13 13 import socket
14 14 import time
15 15 import warnings
16 16
17 17 from .i18n import _
18 18 from .pycompat import getattr
19 19
20 20 from . import (
21 21 encoding,
22 22 error,
23 23 pycompat,
24 24 util,
25 25 )
26 26
27 27 from .utils import procutil
28 28
29 29
30 30 def _getlockprefix():
31 31 """Return a string which is used to differentiate pid namespaces
32 32
33 33 It's useful to detect "dead" processes and remove stale locks with
34 34 confidence. Typically it's just hostname. On modern linux, we include an
35 35 extra Linux-specific pid namespace identifier.
36 36 """
37 37 result = encoding.strtolocal(socket.gethostname())
38 38 if pycompat.sysplatform.startswith(b'linux'):
39 39 try:
40 40 result += b'/%x' % os.stat(b'/proc/self/ns/pid').st_ino
41 except OSError as ex:
42 if ex.errno not in (errno.ENOENT, errno.EACCES, errno.ENOTDIR):
43 raise
41 except (FileNotFoundError, PermissionError, NotADirectoryError):
42 pass
44 43 return result
45 44
46 45
47 46 @contextlib.contextmanager
48 47 def _delayedinterrupt():
49 48 """Block signal interrupt while doing something critical
50 49
51 50 This makes sure that the code block wrapped by this context manager won't
52 51 be interrupted.
53 52
54 53 For Windows developers: It appears not possible to guard time.sleep()
55 54 from CTRL_C_EVENT, so please don't use time.sleep() to test if this is
56 55 working.
57 56 """
58 57 assertedsigs = []
59 58 blocked = False
60 59 orighandlers = {}
61 60
62 61 def raiseinterrupt(num):
63 62 if num == getattr(signal, 'SIGINT', None) or num == getattr(
64 63 signal, 'CTRL_C_EVENT', None
65 64 ):
66 65 raise KeyboardInterrupt
67 66 else:
68 67 raise error.SignalInterrupt
69 68
70 69 def catchterm(num, frame):
71 70 if blocked:
72 71 assertedsigs.append(num)
73 72 else:
74 73 raiseinterrupt(num)
75 74
76 75 try:
77 76 # save handlers first so they can be restored even if a setup is
78 77 # interrupted between signal.signal() and orighandlers[] =.
79 78 for name in [
80 79 b'CTRL_C_EVENT',
81 80 b'SIGINT',
82 81 b'SIGBREAK',
83 82 b'SIGHUP',
84 83 b'SIGTERM',
85 84 ]:
86 85 num = getattr(signal, name, None)
87 86 if num and num not in orighandlers:
88 87 orighandlers[num] = signal.getsignal(num)
89 88 try:
90 89 for num in orighandlers:
91 90 signal.signal(num, catchterm)
92 91 except ValueError:
93 92 pass # in a thread? no luck
94 93
95 94 blocked = True
96 95 yield
97 96 finally:
98 97 # no simple way to reliably restore all signal handlers because
99 98 # any loops, recursive function calls, except blocks, etc. can be
100 99 # interrupted. so instead, make catchterm() raise interrupt.
101 100 blocked = False
102 101 try:
103 102 for num, handler in orighandlers.items():
104 103 signal.signal(num, handler)
105 104 except ValueError:
106 105 pass # in a thread?
107 106
108 107 # re-raise interrupt exception if any, which may be shadowed by a new
109 108 # interrupt occurred while re-raising the first one
110 109 if assertedsigs:
111 110 raiseinterrupt(assertedsigs[0])
112 111
113 112
114 113 def trylock(ui, vfs, lockname, timeout, warntimeout, *args, **kwargs):
115 114 """return an acquired lock or raise an a LockHeld exception
116 115
117 116 This function is responsible to issue warnings and or debug messages about
118 117 the held lock while trying to acquires it."""
119 118
120 119 def printwarning(printer, locker):
121 120 """issue the usual "waiting on lock" message through any channel"""
122 121 # show more details for new-style locks
123 122 if b':' in locker:
124 123 host, pid = locker.split(b":", 1)
125 124 msg = _(
126 125 b"waiting for lock on %s held by process %r on host %r\n"
127 126 ) % (
128 127 pycompat.bytestr(l.desc),
129 128 pycompat.bytestr(pid),
130 129 pycompat.bytestr(host),
131 130 )
132 131 else:
133 132 msg = _(b"waiting for lock on %s held by %r\n") % (
134 133 l.desc,
135 134 pycompat.bytestr(locker),
136 135 )
137 136 printer(msg)
138 137
139 138 l = lock(vfs, lockname, 0, *args, dolock=False, **kwargs)
140 139
141 140 debugidx = 0 if (warntimeout and timeout) else -1
142 141 warningidx = 0
143 142 if not timeout:
144 143 warningidx = -1
145 144 elif warntimeout:
146 145 warningidx = warntimeout
147 146
148 147 delay = 0
149 148 while True:
150 149 try:
151 150 l._trylock()
152 151 break
153 152 except error.LockHeld as inst:
154 153 if delay == debugidx:
155 154 printwarning(ui.debug, inst.locker)
156 155 if delay == warningidx:
157 156 printwarning(ui.warn, inst.locker)
158 157 if timeout <= delay:
159 158 raise error.LockHeld(
160 159 errno.ETIMEDOUT, inst.filename, l.desc, inst.locker
161 160 )
162 161 time.sleep(1)
163 162 delay += 1
164 163
165 164 l.delay = delay
166 165 if l.delay:
167 166 if 0 <= warningidx <= l.delay:
168 167 ui.warn(_(b"got lock after %d seconds\n") % l.delay)
169 168 else:
170 169 ui.debug(b"got lock after %d seconds\n" % l.delay)
171 170 if l.acquirefn:
172 171 l.acquirefn()
173 172 return l
174 173
175 174
176 175 class lock:
177 176 """An advisory lock held by one process to control access to a set
178 177 of files. Non-cooperating processes or incorrectly written scripts
179 178 can ignore Mercurial's locking scheme and stomp all over the
180 179 repository, so don't do that.
181 180
182 181 Typically used via localrepository.lock() to lock the repository
183 182 store (.hg/store/) or localrepository.wlock() to lock everything
184 183 else under .hg/."""
185 184
186 185 # lock is symlink on platforms that support it, file on others.
187 186
188 187 # symlink is used because create of directory entry and contents
189 188 # are atomic even over nfs.
190 189
191 190 # old-style lock: symlink to pid
192 191 # new-style lock: symlink to hostname:pid
193 192
194 193 _host = None
195 194
196 195 def __init__(
197 196 self,
198 197 vfs,
199 198 fname,
200 199 timeout=-1,
201 200 releasefn=None,
202 201 acquirefn=None,
203 202 desc=None,
204 203 signalsafe=True,
205 204 dolock=True,
206 205 ):
207 206 self.vfs = vfs
208 207 self.f = fname
209 208 self.held = 0
210 209 self.timeout = timeout
211 210 self.releasefn = releasefn
212 211 self.acquirefn = acquirefn
213 212 self.desc = desc
214 213 if signalsafe:
215 214 self._maybedelayedinterrupt = _delayedinterrupt
216 215 else:
217 216 self._maybedelayedinterrupt = util.nullcontextmanager
218 217 self.postrelease = []
219 218 self.pid = self._getpid()
220 219 if dolock:
221 220 self.delay = self.lock()
222 221 if self.acquirefn:
223 222 self.acquirefn()
224 223
225 224 def __enter__(self):
226 225 return self
227 226
228 227 def __exit__(self, exc_type, exc_value, exc_tb):
229 228 success = all(a is None for a in (exc_type, exc_value, exc_tb))
230 229 self.release(success=success)
231 230
232 231 def __del__(self):
233 232 if self.held:
234 233 warnings.warn(
235 234 "use lock.release instead of del lock",
236 235 category=DeprecationWarning,
237 236 stacklevel=2,
238 237 )
239 238
240 239 # ensure the lock will be removed
241 240 # even if recursive locking did occur
242 241 self.held = 1
243 242
244 243 self.release()
245 244
246 245 def _getpid(self):
247 246 # wrapper around procutil.getpid() to make testing easier
248 247 return procutil.getpid()
249 248
250 249 def lock(self):
251 250 timeout = self.timeout
252 251 while True:
253 252 try:
254 253 self._trylock()
255 254 return self.timeout - timeout
256 255 except error.LockHeld as inst:
257 256 if timeout != 0:
258 257 time.sleep(1)
259 258 if timeout > 0:
260 259 timeout -= 1
261 260 continue
262 261 raise error.LockHeld(
263 262 errno.ETIMEDOUT, inst.filename, self.desc, inst.locker
264 263 )
265 264
266 265 def _trylock(self):
267 266 if self.held:
268 267 self.held += 1
269 268 return
270 269 if lock._host is None:
271 270 lock._host = _getlockprefix()
272 271 lockname = b'%s:%d' % (lock._host, self.pid)
273 272 retry = 5
274 273 while not self.held and retry:
275 274 retry -= 1
276 275 try:
277 276 with self._maybedelayedinterrupt():
278 277 self.vfs.makelock(lockname, self.f)
279 278 self.held = 1
280 279 except (OSError, IOError) as why:
281 280 if why.errno == errno.EEXIST:
282 281 locker = self._readlock()
283 282 if locker is None:
284 283 continue
285 284
286 285 locker = self._testlock(locker)
287 286 if locker is not None:
288 287 raise error.LockHeld(
289 288 errno.EAGAIN,
290 289 self.vfs.join(self.f),
291 290 self.desc,
292 291 locker,
293 292 )
294 293 else:
295 294 raise error.LockUnavailable(
296 295 why.errno, why.strerror, why.filename, self.desc
297 296 )
298 297
299 298 if not self.held:
300 299 # use empty locker to mean "busy for frequent lock/unlock
301 300 # by many processes"
302 301 raise error.LockHeld(
303 302 errno.EAGAIN, self.vfs.join(self.f), self.desc, b""
304 303 )
305 304
306 305 def _readlock(self):
307 306 """read lock and return its value
308 307
309 308 Returns None if no lock exists, pid for old-style locks, and host:pid
310 309 for new-style locks.
311 310 """
312 311 try:
313 312 return self.vfs.readlock(self.f)
314 313 except FileNotFoundError:
315 314 return None
316 315
317 316 def _lockshouldbebroken(self, locker):
318 317 if locker is None:
319 318 return False
320 319 try:
321 320 host, pid = locker.split(b":", 1)
322 321 except ValueError:
323 322 return False
324 323 if host != lock._host:
325 324 return False
326 325 try:
327 326 pid = int(pid)
328 327 except ValueError:
329 328 return False
330 329 if procutil.testpid(pid):
331 330 return False
332 331 return True
333 332
334 333 def _testlock(self, locker):
335 334 if not self._lockshouldbebroken(locker):
336 335 return locker
337 336
338 337 # if locker dead, break lock. must do this with another lock
339 338 # held, or can race and break valid lock.
340 339 try:
341 340 with lock(self.vfs, self.f + b'.break', timeout=0):
342 341 locker = self._readlock()
343 342 if not self._lockshouldbebroken(locker):
344 343 return locker
345 344 self.vfs.unlink(self.f)
346 345 except error.LockError:
347 346 return locker
348 347
349 348 def testlock(self):
350 349 """return id of locker if lock is valid, else None.
351 350
352 351 If old-style lock, we cannot tell what machine locker is on.
353 352 with new-style lock, if locker is on this machine, we can
354 353 see if locker is alive. If locker is on this machine but
355 354 not alive, we can safely break lock.
356 355
357 356 The lock file is only deleted when None is returned.
358 357
359 358 """
360 359 locker = self._readlock()
361 360 return self._testlock(locker)
362 361
363 362 def release(self, success=True):
364 363 """release the lock and execute callback function if any
365 364
366 365 If the lock has been acquired multiple times, the actual release is
367 366 delayed to the last release call."""
368 367 if self.held > 1:
369 368 self.held -= 1
370 369 elif self.held == 1:
371 370 self.held = 0
372 371 if self._getpid() != self.pid:
373 372 # we forked, and are not the parent
374 373 return
375 374 try:
376 375 if self.releasefn:
377 376 self.releasefn()
378 377 finally:
379 378 try:
380 379 self.vfs.unlink(self.f)
381 380 except OSError:
382 381 pass
383 382 # The postrelease functions typically assume the lock is not held
384 383 # at all.
385 384 for callback in self.postrelease:
386 385 callback(success)
387 386 # Prevent double usage and help clear cycles.
388 387 self.postrelease = None
389 388
390 389
391 390 def release(*locks):
392 391 for lock in locks:
393 392 if lock is not None:
394 393 lock.release()
@@ -1,755 +1,753 b''
1 1 # posix.py - Posix utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import errno
10 10 import fcntl
11 11 import getpass
12 12 import grp
13 13 import os
14 14 import pwd
15 15 import re
16 16 import select
17 17 import stat
18 18 import sys
19 19 import tempfile
20 20 import unicodedata
21 21
22 22 from .i18n import _
23 23 from .pycompat import (
24 24 getattr,
25 25 open,
26 26 )
27 27 from . import (
28 28 encoding,
29 29 error,
30 30 policy,
31 31 pycompat,
32 32 )
33 33
34 34 osutil = policy.importmod('osutil')
35 35
36 36 normpath = os.path.normpath
37 37 samestat = os.path.samestat
38 38 abspath = os.path.abspath # re-exports
39 39
40 40 try:
41 41 oslink = os.link
42 42 except AttributeError:
43 43 # Some platforms build Python without os.link on systems that are
44 44 # vaguely unix-like but don't have hardlink support. For those
45 45 # poor souls, just say we tried and that it failed so we fall back
46 46 # to copies.
47 47 def oslink(src, dst):
48 48 raise OSError(
49 49 errno.EINVAL, b'hardlinks not supported: %s to %s' % (src, dst)
50 50 )
51 51
52 52
53 53 readlink = os.readlink
54 54 unlink = os.unlink
55 55 rename = os.rename
56 56 removedirs = os.removedirs
57 57 expandglobs = False
58 58
59 59 umask = os.umask(0)
60 60 os.umask(umask)
61 61
62 62 posixfile = open
63 63
64 64
65 65 def split(p):
66 66 """Same as posixpath.split, but faster
67 67
68 68 >>> import posixpath
69 69 >>> for f in [b'/absolute/path/to/file',
70 70 ... b'relative/path/to/file',
71 71 ... b'file_alone',
72 72 ... b'path/to/directory/',
73 73 ... b'/multiple/path//separators',
74 74 ... b'/file_at_root',
75 75 ... b'///multiple_leading_separators_at_root',
76 76 ... b'']:
77 77 ... assert split(f) == posixpath.split(f), f
78 78 """
79 79 ht = p.rsplit(b'/', 1)
80 80 if len(ht) == 1:
81 81 return b'', p
82 82 nh = ht[0].rstrip(b'/')
83 83 if nh:
84 84 return nh, ht[1]
85 85 return ht[0] + b'/', ht[1]
86 86
87 87
88 88 def openhardlinks():
89 89 '''return true if it is safe to hold open file handles to hardlinks'''
90 90 return True
91 91
92 92
93 93 def nlinks(name):
94 94 '''return number of hardlinks for the given file'''
95 95 return os.lstat(name).st_nlink
96 96
97 97
98 98 def parsepatchoutput(output_line):
99 99 """parses the output produced by patch and returns the filename"""
100 100 pf = output_line[14:]
101 101 if pycompat.sysplatform == b'OpenVMS':
102 102 if pf[0] == b'`':
103 103 pf = pf[1:-1] # Remove the quotes
104 104 else:
105 105 if pf.startswith(b"'") and pf.endswith(b"'") and b" " in pf:
106 106 pf = pf[1:-1] # Remove the quotes
107 107 return pf
108 108
109 109
110 110 def sshargs(sshcmd, host, user, port):
111 111 '''Build argument list for ssh'''
112 112 args = user and (b"%s@%s" % (user, host)) or host
113 113 if b'-' in args[:1]:
114 114 raise error.Abort(
115 115 _(b'illegal ssh hostname or username starting with -: %s') % args
116 116 )
117 117 args = shellquote(args)
118 118 if port:
119 119 args = b'-p %s %s' % (shellquote(port), args)
120 120 return args
121 121
122 122
123 123 def isexec(f):
124 124 """check whether a file is executable"""
125 125 return os.lstat(f).st_mode & 0o100 != 0
126 126
127 127
128 128 def setflags(f, l, x):
129 129 st = os.lstat(f)
130 130 s = st.st_mode
131 131 if l:
132 132 if not stat.S_ISLNK(s):
133 133 # switch file to link
134 134 with open(f, b'rb') as fp:
135 135 data = fp.read()
136 136 unlink(f)
137 137 try:
138 138 os.symlink(data, f)
139 139 except OSError:
140 140 # failed to make a link, rewrite file
141 141 with open(f, b"wb") as fp:
142 142 fp.write(data)
143 143
144 144 # no chmod needed at this point
145 145 return
146 146 if stat.S_ISLNK(s):
147 147 # switch link to file
148 148 data = os.readlink(f)
149 149 unlink(f)
150 150 with open(f, b"wb") as fp:
151 151 fp.write(data)
152 152 s = 0o666 & ~umask # avoid restatting for chmod
153 153
154 154 sx = s & 0o100
155 155 if st.st_nlink > 1 and bool(x) != bool(sx):
156 156 # the file is a hardlink, break it
157 157 with open(f, b"rb") as fp:
158 158 data = fp.read()
159 159 unlink(f)
160 160 with open(f, b"wb") as fp:
161 161 fp.write(data)
162 162
163 163 if x and not sx:
164 164 # Turn on +x for every +r bit when making a file executable
165 165 # and obey umask.
166 166 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
167 167 elif not x and sx:
168 168 # Turn off all +x bits
169 169 os.chmod(f, s & 0o666)
170 170
171 171
172 172 def copymode(src, dst, mode=None, enforcewritable=False):
173 173 """Copy the file mode from the file at path src to dst.
174 174 If src doesn't exist, we're using mode instead. If mode is None, we're
175 175 using umask."""
176 176 try:
177 177 st_mode = os.lstat(src).st_mode & 0o777
178 178 except FileNotFoundError:
179 179 st_mode = mode
180 180 if st_mode is None:
181 181 st_mode = ~umask
182 182 st_mode &= 0o666
183 183
184 184 new_mode = st_mode
185 185
186 186 if enforcewritable:
187 187 new_mode |= stat.S_IWUSR
188 188
189 189 os.chmod(dst, new_mode)
190 190
191 191
192 192 def checkexec(path):
193 193 """
194 194 Check whether the given path is on a filesystem with UNIX-like exec flags
195 195
196 196 Requires a directory (like /foo/.hg)
197 197 """
198 198
199 199 # VFAT on some Linux versions can flip mode but it doesn't persist
200 200 # a FS remount. Frequently we can detect it if files are created
201 201 # with exec bit on.
202 202
203 203 try:
204 204 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
205 205 basedir = os.path.join(path, b'.hg')
206 206 cachedir = os.path.join(basedir, b'wcache')
207 207 storedir = os.path.join(basedir, b'store')
208 208 if not os.path.exists(cachedir):
209 209 try:
210 210 # we want to create the 'cache' directory, not the '.hg' one.
211 211 # Automatically creating '.hg' directory could silently spawn
212 212 # invalid Mercurial repositories. That seems like a bad idea.
213 213 os.mkdir(cachedir)
214 214 if os.path.exists(storedir):
215 215 copymode(storedir, cachedir)
216 216 else:
217 217 copymode(basedir, cachedir)
218 218 except (IOError, OSError):
219 219 # we other fallback logic triggers
220 220 pass
221 221 if os.path.isdir(cachedir):
222 222 checkisexec = os.path.join(cachedir, b'checkisexec')
223 223 checknoexec = os.path.join(cachedir, b'checknoexec')
224 224
225 225 try:
226 226 m = os.stat(checkisexec).st_mode
227 227 except FileNotFoundError:
228 228 # checkisexec does not exist - fall through ...
229 229 pass
230 230 else:
231 231 # checkisexec exists, check if it actually is exec
232 232 if m & EXECFLAGS != 0:
233 233 # ensure checkisexec exists, check it isn't exec
234 234 try:
235 235 m = os.stat(checknoexec).st_mode
236 236 except FileNotFoundError:
237 237 open(checknoexec, b'w').close() # might fail
238 238 m = os.stat(checknoexec).st_mode
239 239 if m & EXECFLAGS == 0:
240 240 # check-exec is exec and check-no-exec is not exec
241 241 return True
242 242 # checknoexec exists but is exec - delete it
243 243 unlink(checknoexec)
244 244 # checkisexec exists but is not exec - delete it
245 245 unlink(checkisexec)
246 246
247 247 # check using one file, leave it as checkisexec
248 248 checkdir = cachedir
249 249 else:
250 250 # check directly in path and don't leave checkisexec behind
251 251 checkdir = path
252 252 checkisexec = None
253 253 fh, fn = pycompat.mkstemp(dir=checkdir, prefix=b'hg-checkexec-')
254 254 try:
255 255 os.close(fh)
256 256 m = os.stat(fn).st_mode
257 257 if m & EXECFLAGS == 0:
258 258 os.chmod(fn, m & 0o777 | EXECFLAGS)
259 259 if os.stat(fn).st_mode & EXECFLAGS != 0:
260 260 if checkisexec is not None:
261 261 os.rename(fn, checkisexec)
262 262 fn = None
263 263 return True
264 264 finally:
265 265 if fn is not None:
266 266 unlink(fn)
267 267 except (IOError, OSError):
268 268 # we don't care, the user probably won't be able to commit anyway
269 269 return False
270 270
271 271
272 272 def checklink(path):
273 273 """check whether the given path is on a symlink-capable filesystem"""
274 274 # mktemp is not racy because symlink creation will fail if the
275 275 # file already exists
276 276 while True:
277 277 cachedir = os.path.join(path, b'.hg', b'wcache')
278 278 checklink = os.path.join(cachedir, b'checklink')
279 279 # try fast path, read only
280 280 if os.path.islink(checklink):
281 281 return True
282 282 if os.path.isdir(cachedir):
283 283 checkdir = cachedir
284 284 else:
285 285 checkdir = path
286 286 cachedir = None
287 287 name = tempfile.mktemp(
288 288 dir=pycompat.fsdecode(checkdir), prefix=r'checklink-'
289 289 )
290 290 name = pycompat.fsencode(name)
291 291 try:
292 292 fd = None
293 293 if cachedir is None:
294 294 fd = pycompat.namedtempfile(
295 295 dir=checkdir, prefix=b'hg-checklink-'
296 296 )
297 297 target = os.path.basename(fd.name)
298 298 else:
299 299 # create a fixed file to link to; doesn't matter if it
300 300 # already exists.
301 301 target = b'checklink-target'
302 302 try:
303 303 fullpath = os.path.join(cachedir, target)
304 304 open(fullpath, b'w').close()
305 305 except PermissionError:
306 306 # If we can't write to cachedir, just pretend
307 307 # that the fs is readonly and by association
308 308 # that the fs won't support symlinks. This
309 309 # seems like the least dangerous way to avoid
310 310 # data loss.
311 311 return False
312 312 try:
313 313 os.symlink(target, name)
314 314 if cachedir is None:
315 315 unlink(name)
316 316 else:
317 317 try:
318 318 os.rename(name, checklink)
319 319 except OSError:
320 320 unlink(name)
321 321 return True
322 322 except FileExistsError:
323 323 # link creation might race, try again
324 324 continue
325 325 finally:
326 326 if fd is not None:
327 327 fd.close()
328 328 except AttributeError:
329 329 return False
330 330 except OSError as inst:
331 331 # sshfs might report failure while successfully creating the link
332 332 if inst.errno == errno.EIO and os.path.exists(name):
333 333 unlink(name)
334 334 return False
335 335
336 336
337 337 def checkosfilename(path):
338 338 """Check that the base-relative path is a valid filename on this platform.
339 339 Returns None if the path is ok, or a UI string describing the problem."""
340 340 return None # on posix platforms, every path is ok
341 341
342 342
343 343 def getfsmountpoint(dirpath):
344 344 """Get the filesystem mount point from a directory (best-effort)
345 345
346 346 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
347 347 """
348 348 return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
349 349
350 350
351 351 def getfstype(dirpath):
352 352 """Get the filesystem type name from a directory (best-effort)
353 353
354 354 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
355 355 """
356 356 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
357 357
358 358
359 359 def get_password():
360 360 return encoding.strtolocal(getpass.getpass(''))
361 361
362 362
363 363 def setbinary(fd):
364 364 pass
365 365
366 366
367 367 def pconvert(path):
368 368 return path
369 369
370 370
371 371 def localpath(path):
372 372 return path
373 373
374 374
375 375 def samefile(fpath1, fpath2):
376 376 """Returns whether path1 and path2 refer to the same file. This is only
377 377 guaranteed to work for files, not directories."""
378 378 return os.path.samefile(fpath1, fpath2)
379 379
380 380
381 381 def samedevice(fpath1, fpath2):
382 382 """Returns whether fpath1 and fpath2 are on the same device. This is only
383 383 guaranteed to work for files, not directories."""
384 384 st1 = os.lstat(fpath1)
385 385 st2 = os.lstat(fpath2)
386 386 return st1.st_dev == st2.st_dev
387 387
388 388
389 389 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
390 390 def normcase(path):
391 391 return path.lower()
392 392
393 393
394 394 # what normcase does to ASCII strings
395 395 normcasespec = encoding.normcasespecs.lower
396 396 # fallback normcase function for non-ASCII strings
397 397 normcasefallback = normcase
398 398
399 399 if pycompat.isdarwin:
400 400
401 401 def normcase(path):
402 402 """
403 403 Normalize a filename for OS X-compatible comparison:
404 404 - escape-encode invalid characters
405 405 - decompose to NFD
406 406 - lowercase
407 407 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
408 408
409 409 >>> normcase(b'UPPER')
410 410 'upper'
411 411 >>> normcase(b'Caf\\xc3\\xa9')
412 412 'cafe\\xcc\\x81'
413 413 >>> normcase(b'\\xc3\\x89')
414 414 'e\\xcc\\x81'
415 415 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
416 416 '%b8%ca%c3\\xca\\xbe%c8.jpg'
417 417 """
418 418
419 419 try:
420 420 return encoding.asciilower(path) # exception for non-ASCII
421 421 except UnicodeDecodeError:
422 422 return normcasefallback(path)
423 423
424 424 normcasespec = encoding.normcasespecs.lower
425 425
426 426 def normcasefallback(path):
427 427 try:
428 428 u = path.decode('utf-8')
429 429 except UnicodeDecodeError:
430 430 # OS X percent-encodes any bytes that aren't valid utf-8
431 431 s = b''
432 432 pos = 0
433 433 l = len(path)
434 434 while pos < l:
435 435 try:
436 436 c = encoding.getutf8char(path, pos)
437 437 pos += len(c)
438 438 except ValueError:
439 439 c = b'%%%02X' % ord(path[pos : pos + 1])
440 440 pos += 1
441 441 s += c
442 442
443 443 u = s.decode('utf-8')
444 444
445 445 # Decompose then lowercase (HFS+ technote specifies lower)
446 446 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
447 447 # drop HFS+ ignored characters
448 448 return encoding.hfsignoreclean(enc)
449 449
450 450
451 451 if pycompat.sysplatform == b'cygwin':
452 452 # workaround for cygwin, in which mount point part of path is
453 453 # treated as case sensitive, even though underlying NTFS is case
454 454 # insensitive.
455 455
456 456 # default mount points
457 457 cygwinmountpoints = sorted(
458 458 [
459 459 b"/usr/bin",
460 460 b"/usr/lib",
461 461 b"/cygdrive",
462 462 ],
463 463 reverse=True,
464 464 )
465 465
466 466 # use upper-ing as normcase as same as NTFS workaround
467 467 def normcase(path):
468 468 pathlen = len(path)
469 469 if (pathlen == 0) or (path[0] != pycompat.ossep):
470 470 # treat as relative
471 471 return encoding.upper(path)
472 472
473 473 # to preserve case of mountpoint part
474 474 for mp in cygwinmountpoints:
475 475 if not path.startswith(mp):
476 476 continue
477 477
478 478 mplen = len(mp)
479 479 if mplen == pathlen: # mount point itself
480 480 return mp
481 481 if path[mplen] == pycompat.ossep:
482 482 return mp + encoding.upper(path[mplen:])
483 483
484 484 return encoding.upper(path)
485 485
486 486 normcasespec = encoding.normcasespecs.other
487 487 normcasefallback = normcase
488 488
489 489 # Cygwin translates native ACLs to POSIX permissions,
490 490 # but these translations are not supported by native
491 491 # tools, so the exec bit tends to be set erroneously.
492 492 # Therefore, disable executable bit access on Cygwin.
493 493 def checkexec(path):
494 494 return False
495 495
496 496 # Similarly, Cygwin's symlink emulation is likely to create
497 497 # problems when Mercurial is used from both Cygwin and native
498 498 # Windows, with other native tools, or on shared volumes
499 499 def checklink(path):
500 500 return False
501 501
502 502
503 503 _needsshellquote = None
504 504
505 505
506 506 def shellquote(s):
507 507 if pycompat.sysplatform == b'OpenVMS':
508 508 return b'"%s"' % s
509 509 global _needsshellquote
510 510 if _needsshellquote is None:
511 511 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
512 512 if s and not _needsshellquote(s):
513 513 # "s" shouldn't have to be quoted
514 514 return s
515 515 else:
516 516 return b"'%s'" % s.replace(b"'", b"'\\''")
517 517
518 518
519 519 def shellsplit(s):
520 520 """Parse a command string in POSIX shell way (best-effort)"""
521 521 return pycompat.shlexsplit(s, posix=True)
522 522
523 523
524 524 def testpid(pid):
525 525 '''return False if pid dead, True if running or not sure'''
526 526 if pycompat.sysplatform == b'OpenVMS':
527 527 return True
528 528 try:
529 529 os.kill(pid, 0)
530 530 return True
531 531 except OSError as inst:
532 532 return inst.errno != errno.ESRCH
533 533
534 534
535 535 def isowner(st):
536 536 """Return True if the stat object st is from the current user."""
537 537 return st.st_uid == os.getuid()
538 538
539 539
540 540 def findexe(command):
541 541 """Find executable for command searching like which does.
542 542 If command is a basename then PATH is searched for command.
543 543 PATH isn't searched if command is an absolute or relative path.
544 544 If command isn't found None is returned."""
545 545 if pycompat.sysplatform == b'OpenVMS':
546 546 return command
547 547
548 548 def findexisting(executable):
549 549 b'Will return executable if existing file'
550 550 if os.path.isfile(executable) and os.access(executable, os.X_OK):
551 551 return executable
552 552 return None
553 553
554 554 if pycompat.ossep in command:
555 555 return findexisting(command)
556 556
557 557 if pycompat.sysplatform == b'plan9':
558 558 return findexisting(os.path.join(b'/bin', command))
559 559
560 560 for path in encoding.environ.get(b'PATH', b'').split(pycompat.ospathsep):
561 561 executable = findexisting(os.path.join(path, command))
562 562 if executable is not None:
563 563 return executable
564 564 return None
565 565
566 566
567 567 def setsignalhandler():
568 568 pass
569 569
570 570
571 571 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
572 572
573 573
574 574 def statfiles(files):
575 575 """Stat each file in files. Yield each stat, or None if a file does not
576 576 exist or has a type we don't care about."""
577 577 lstat = os.lstat
578 578 getkind = stat.S_IFMT
579 579 for nf in files:
580 580 try:
581 581 st = lstat(nf)
582 582 if getkind(st.st_mode) not in _wantedkinds:
583 583 st = None
584 except OSError as err:
585 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
586 raise
584 except (FileNotFoundError, NotADirectoryError):
587 585 st = None
588 586 yield st
589 587
590 588
591 589 def getuser():
592 590 '''return name of current user'''
593 591 return pycompat.fsencode(getpass.getuser())
594 592
595 593
596 594 def username(uid=None):
597 595 """Return the name of the user with the given uid.
598 596
599 597 If uid is None, return the name of the current user."""
600 598
601 599 if uid is None:
602 600 uid = os.getuid()
603 601 try:
604 602 return pycompat.fsencode(pwd.getpwuid(uid)[0])
605 603 except KeyError:
606 604 return b'%d' % uid
607 605
608 606
609 607 def groupname(gid=None):
610 608 """Return the name of the group with the given gid.
611 609
612 610 If gid is None, return the name of the current group."""
613 611
614 612 if gid is None:
615 613 gid = os.getgid()
616 614 try:
617 615 return pycompat.fsencode(grp.getgrgid(gid)[0])
618 616 except KeyError:
619 617 return pycompat.bytestr(gid)
620 618
621 619
622 620 def groupmembers(name):
623 621 """Return the list of members of the group with the given
624 622 name, KeyError if the group does not exist.
625 623 """
626 624 name = pycompat.fsdecode(name)
627 625 return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
628 626
629 627
630 628 def spawndetached(args):
631 629 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args)
632 630
633 631
634 632 def gethgcmd():
635 633 return sys.argv[:1]
636 634
637 635
638 636 def makedir(path, notindexed):
639 637 os.mkdir(path)
640 638
641 639
642 640 def lookupreg(key, name=None, scope=None):
643 641 return None
644 642
645 643
646 644 def hidewindow():
647 645 """Hide current shell window.
648 646
649 647 Used to hide the window opened when starting asynchronous
650 648 child process under Windows, unneeded on other systems.
651 649 """
652 650 pass
653 651
654 652
655 653 class cachestat:
656 654 def __init__(self, path):
657 655 self.stat = os.stat(path)
658 656
659 657 def cacheable(self):
660 658 return bool(self.stat.st_ino)
661 659
662 660 __hash__ = object.__hash__
663 661
664 662 def __eq__(self, other):
665 663 try:
666 664 # Only dev, ino, size, mtime and atime are likely to change. Out
667 665 # of these, we shouldn't compare atime but should compare the
668 666 # rest. However, one of the other fields changing indicates
669 667 # something fishy going on, so return False if anything but atime
670 668 # changes.
671 669 return (
672 670 self.stat.st_mode == other.stat.st_mode
673 671 and self.stat.st_ino == other.stat.st_ino
674 672 and self.stat.st_dev == other.stat.st_dev
675 673 and self.stat.st_nlink == other.stat.st_nlink
676 674 and self.stat.st_uid == other.stat.st_uid
677 675 and self.stat.st_gid == other.stat.st_gid
678 676 and self.stat.st_size == other.stat.st_size
679 677 and self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME]
680 678 and self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME]
681 679 )
682 680 except AttributeError:
683 681 return False
684 682
685 683 def __ne__(self, other):
686 684 return not self == other
687 685
688 686
689 687 def statislink(st):
690 688 '''check whether a stat result is a symlink'''
691 689 return st and stat.S_ISLNK(st.st_mode)
692 690
693 691
694 692 def statisexec(st):
695 693 '''check whether a stat result is an executable file'''
696 694 return st and (st.st_mode & 0o100 != 0)
697 695
698 696
699 697 def poll(fds):
700 698 """block until something happens on any file descriptor
701 699
702 700 This is a generic helper that will check for any activity
703 701 (read, write. exception) and return the list of touched files.
704 702
705 703 In unsupported cases, it will raise a NotImplementedError"""
706 704 try:
707 705 res = select.select(fds, fds, fds)
708 706 except ValueError: # out of range file descriptor
709 707 raise NotImplementedError()
710 708 return sorted(list(set(sum(res, []))))
711 709
712 710
713 711 def readpipe(pipe):
714 712 """Read all available data from a pipe."""
715 713 # We can't fstat() a pipe because Linux will always report 0.
716 714 # So, we set the pipe to non-blocking mode and read everything
717 715 # that's available.
718 716 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
719 717 flags |= os.O_NONBLOCK
720 718 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
721 719
722 720 try:
723 721 chunks = []
724 722 while True:
725 723 try:
726 724 s = pipe.read()
727 725 if not s:
728 726 break
729 727 chunks.append(s)
730 728 except IOError:
731 729 break
732 730
733 731 return b''.join(chunks)
734 732 finally:
735 733 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
736 734
737 735
738 736 def bindunixsocket(sock, path):
739 737 """Bind the UNIX domain socket to the specified path"""
740 738 # use relative path instead of full path at bind() if possible, since
741 739 # AF_UNIX path has very small length limit (107 chars) on common
742 740 # platforms (see sys/un.h)
743 741 dirname, basename = os.path.split(path)
744 742 bakwdfd = None
745 743
746 744 try:
747 745 if dirname:
748 746 bakwdfd = os.open(b'.', os.O_DIRECTORY)
749 747 os.chdir(dirname)
750 748 sock.bind(basename)
751 749 if bakwdfd:
752 750 os.fchdir(bakwdfd)
753 751 finally:
754 752 if bakwdfd:
755 753 os.close(bakwdfd)
@@ -1,1781 +1,1777 b''
1 1 #
2 2 # This is the mercurial setup script.
3 3 #
4 4 # 'python setup.py install', or
5 5 # 'python setup.py --help' for more options
6 6 import os
7 7
8 8 # Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
9 9 # in bytestrings.
10 10 supportedpy = ','.join(
11 11 [
12 12 '>=3.6.2',
13 13 ]
14 14 )
15 15
16 16 import sys, platform
17 17 import sysconfig
18 18
19 19
20 20 def sysstr(s):
21 21 return s.decode('latin-1')
22 22
23 23
24 24 import ssl
25 25
26 26 # ssl.HAS_TLSv1* are preferred to check support but they were added in Python
27 27 # 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
28 28 # (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
29 29 # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2
30 30 # support. At the mentioned commit, they were unconditionally defined.
31 31 _notset = object()
32 32 has_tlsv1_1 = getattr(ssl, 'HAS_TLSv1_1', _notset)
33 33 if has_tlsv1_1 is _notset:
34 34 has_tlsv1_1 = getattr(ssl, 'PROTOCOL_TLSv1_1', _notset) is not _notset
35 35 has_tlsv1_2 = getattr(ssl, 'HAS_TLSv1_2', _notset)
36 36 if has_tlsv1_2 is _notset:
37 37 has_tlsv1_2 = getattr(ssl, 'PROTOCOL_TLSv1_2', _notset) is not _notset
38 38 if not (has_tlsv1_1 or has_tlsv1_2):
39 39 error = """
40 40 The `ssl` module does not advertise support for TLS 1.1 or TLS 1.2.
41 41 Please make sure that your Python installation was compiled against an OpenSSL
42 42 version enabling these features (likely this requires the OpenSSL version to
43 43 be at least 1.0.1).
44 44 """
45 45 print(error, file=sys.stderr)
46 46 sys.exit(1)
47 47
48 48 DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
49 49
50 50 # Solaris Python packaging brain damage
51 51 try:
52 52 import hashlib
53 53
54 54 sha = hashlib.sha1()
55 55 except ImportError:
56 56 try:
57 57 import sha
58 58
59 59 sha.sha # silence unused import warning
60 60 except ImportError:
61 61 raise SystemExit(
62 62 "Couldn't import standard hashlib (incomplete Python install)."
63 63 )
64 64
65 65 try:
66 66 import zlib
67 67
68 68 zlib.compressobj # silence unused import warning
69 69 except ImportError:
70 70 raise SystemExit(
71 71 "Couldn't import standard zlib (incomplete Python install)."
72 72 )
73 73
74 74 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
75 75 isironpython = False
76 76 try:
77 77 isironpython = (
78 78 platform.python_implementation().lower().find("ironpython") != -1
79 79 )
80 80 except AttributeError:
81 81 pass
82 82
83 83 if isironpython:
84 84 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
85 85 else:
86 86 try:
87 87 import bz2
88 88
89 89 bz2.BZ2Compressor # silence unused import warning
90 90 except ImportError:
91 91 raise SystemExit(
92 92 "Couldn't import standard bz2 (incomplete Python install)."
93 93 )
94 94
95 95 ispypy = "PyPy" in sys.version
96 96
97 97 import ctypes
98 import errno
99 98 import stat, subprocess, time
100 99 import re
101 100 import shutil
102 101 import tempfile
103 102
104 103 # We have issues with setuptools on some platforms and builders. Until
105 104 # those are resolved, setuptools is opt-in except for platforms where
106 105 # we don't have issues.
107 106 issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
108 107 if issetuptools:
109 108 from setuptools import setup
110 109 else:
111 110 from distutils.core import setup
112 111 from distutils.ccompiler import new_compiler
113 112 from distutils.core import Command, Extension
114 113 from distutils.dist import Distribution
115 114 from distutils.command.build import build
116 115 from distutils.command.build_ext import build_ext
117 116 from distutils.command.build_py import build_py
118 117 from distutils.command.build_scripts import build_scripts
119 118 from distutils.command.install import install
120 119 from distutils.command.install_lib import install_lib
121 120 from distutils.command.install_scripts import install_scripts
122 121 from distutils import log
123 122 from distutils.spawn import spawn, find_executable
124 123 from distutils import file_util
125 124 from distutils.errors import (
126 125 CCompilerError,
127 126 DistutilsError,
128 127 DistutilsExecError,
129 128 )
130 129 from distutils.sysconfig import get_python_inc, get_config_var
131 130 from distutils.version import StrictVersion
132 131
133 132 # Explain to distutils.StrictVersion how our release candidates are versioned
134 133 StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$')
135 134
136 135
137 136 def write_if_changed(path, content):
138 137 """Write content to a file iff the content hasn't changed."""
139 138 if os.path.exists(path):
140 139 with open(path, 'rb') as fh:
141 140 current = fh.read()
142 141 else:
143 142 current = b''
144 143
145 144 if current != content:
146 145 with open(path, 'wb') as fh:
147 146 fh.write(content)
148 147
149 148
150 149 scripts = ['hg']
151 150 if os.name == 'nt':
152 151 # We remove hg.bat if we are able to build hg.exe.
153 152 scripts.append('contrib/win32/hg.bat')
154 153
155 154
156 155 def cancompile(cc, code):
157 156 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
158 157 devnull = oldstderr = None
159 158 try:
160 159 fname = os.path.join(tmpdir, 'testcomp.c')
161 160 f = open(fname, 'w')
162 161 f.write(code)
163 162 f.close()
164 163 # Redirect stderr to /dev/null to hide any error messages
165 164 # from the compiler.
166 165 # This will have to be changed if we ever have to check
167 166 # for a function on Windows.
168 167 devnull = open('/dev/null', 'w')
169 168 oldstderr = os.dup(sys.stderr.fileno())
170 169 os.dup2(devnull.fileno(), sys.stderr.fileno())
171 170 objects = cc.compile([fname], output_dir=tmpdir)
172 171 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
173 172 return True
174 173 except Exception:
175 174 return False
176 175 finally:
177 176 if oldstderr is not None:
178 177 os.dup2(oldstderr, sys.stderr.fileno())
179 178 if devnull is not None:
180 179 devnull.close()
181 180 shutil.rmtree(tmpdir)
182 181
183 182
184 183 # simplified version of distutils.ccompiler.CCompiler.has_function
185 184 # that actually removes its temporary files.
186 185 def hasfunction(cc, funcname):
187 186 code = 'int main(void) { %s(); }\n' % funcname
188 187 return cancompile(cc, code)
189 188
190 189
191 190 def hasheader(cc, headername):
192 191 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
193 192 return cancompile(cc, code)
194 193
195 194
196 195 # py2exe needs to be installed to work
197 196 try:
198 197 import py2exe
199 198
200 199 py2exe.patch_distutils()
201 200 py2exeloaded = True
202 201 # import py2exe's patched Distribution class
203 202 from distutils.core import Distribution
204 203 except ImportError:
205 204 py2exeloaded = False
206 205
207 206
208 207 def runcmd(cmd, env, cwd=None):
209 208 p = subprocess.Popen(
210 209 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
211 210 )
212 211 out, err = p.communicate()
213 212 return p.returncode, out, err
214 213
215 214
216 215 class hgcommand:
217 216 def __init__(self, cmd, env):
218 217 self.cmd = cmd
219 218 self.env = env
220 219
221 220 def run(self, args):
222 221 cmd = self.cmd + args
223 222 returncode, out, err = runcmd(cmd, self.env)
224 223 err = filterhgerr(err)
225 224 if err or returncode != 0:
226 225 print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
227 226 print(err, file=sys.stderr)
228 227 return b''
229 228 return out
230 229
231 230
232 231 def filterhgerr(err):
233 232 # If root is executing setup.py, but the repository is owned by
234 233 # another user (as in "sudo python setup.py install") we will get
235 234 # trust warnings since the .hg/hgrc file is untrusted. That is
236 235 # fine, we don't want to load it anyway. Python may warn about
237 236 # a missing __init__.py in mercurial/locale, we also ignore that.
238 237 err = [
239 238 e
240 239 for e in err.splitlines()
241 240 if (
242 241 not e.startswith(b'not trusting file')
243 242 and not e.startswith(b'warning: Not importing')
244 243 and not e.startswith(b'obsolete feature not enabled')
245 244 and not e.startswith(b'*** failed to import extension')
246 245 and not e.startswith(b'devel-warn:')
247 246 and not (
248 247 e.startswith(b'(third party extension')
249 248 and e.endswith(b'or newer of Mercurial; disabling)')
250 249 )
251 250 )
252 251 ]
253 252 return b'\n'.join(b' ' + e for e in err)
254 253
255 254
256 255 def findhg():
257 256 """Try to figure out how we should invoke hg for examining the local
258 257 repository contents.
259 258
260 259 Returns an hgcommand object."""
261 260 # By default, prefer the "hg" command in the user's path. This was
262 261 # presumably the hg command that the user used to create this repository.
263 262 #
264 263 # This repository may require extensions or other settings that would not
265 264 # be enabled by running the hg script directly from this local repository.
266 265 hgenv = os.environ.copy()
267 266 # Use HGPLAIN to disable hgrc settings that would change output formatting,
268 267 # and disable localization for the same reasons.
269 268 hgenv['HGPLAIN'] = '1'
270 269 hgenv['LANGUAGE'] = 'C'
271 270 hgcmd = ['hg']
272 271 # Run a simple "hg log" command just to see if using hg from the user's
273 272 # path works and can successfully interact with this repository. Windows
274 273 # gives precedence to hg.exe in the current directory, so fall back to the
275 274 # python invocation of local hg, where pythonXY.dll can always be found.
276 275 check_cmd = ['log', '-r.', '-Ttest']
277 276 if os.name != 'nt' or not os.path.exists("hg.exe"):
278 277 try:
279 278 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
280 279 except EnvironmentError:
281 280 retcode = -1
282 281 if retcode == 0 and not filterhgerr(err):
283 282 return hgcommand(hgcmd, hgenv)
284 283
285 284 # Fall back to trying the local hg installation.
286 285 hgenv = localhgenv()
287 286 hgcmd = [sys.executable, 'hg']
288 287 try:
289 288 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
290 289 except EnvironmentError:
291 290 retcode = -1
292 291 if retcode == 0 and not filterhgerr(err):
293 292 return hgcommand(hgcmd, hgenv)
294 293
295 294 raise SystemExit(
296 295 'Unable to find a working hg binary to extract the '
297 296 'version from the repository tags'
298 297 )
299 298
300 299
301 300 def localhgenv():
302 301 """Get an environment dictionary to use for invoking or importing
303 302 mercurial from the local repository."""
304 303 # Execute hg out of this directory with a custom environment which takes
305 304 # care to not use any hgrc files and do no localization.
306 305 env = {
307 306 'HGMODULEPOLICY': 'py',
308 307 'HGRCPATH': '',
309 308 'LANGUAGE': 'C',
310 309 'PATH': '',
311 310 } # make pypi modules that use os.environ['PATH'] happy
312 311 if 'LD_LIBRARY_PATH' in os.environ:
313 312 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
314 313 if 'SystemRoot' in os.environ:
315 314 # SystemRoot is required by Windows to load various DLLs. See:
316 315 # https://bugs.python.org/issue13524#msg148850
317 316 env['SystemRoot'] = os.environ['SystemRoot']
318 317 return env
319 318
320 319
321 320 version = ''
322 321
323 322 if os.path.isdir('.hg'):
324 323 hg = findhg()
325 324 cmd = ['log', '-r', '.', '--template', '{tags}\n']
326 325 numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()]
327 326 hgid = sysstr(hg.run(['id', '-i'])).strip()
328 327 if not hgid:
329 328 # Bail out if hg is having problems interacting with this repository,
330 329 # rather than falling through and producing a bogus version number.
331 330 # Continuing with an invalid version number will break extensions
332 331 # that define minimumhgversion.
333 332 raise SystemExit('Unable to determine hg version from local repository')
334 333 if numerictags: # tag(s) found
335 334 version = numerictags[-1]
336 335 if hgid.endswith('+'): # propagate the dirty status to the tag
337 336 version += '+'
338 337 else: # no tag found
339 338 ltagcmd = ['parents', '--template', '{latesttag}']
340 339 ltag = sysstr(hg.run(ltagcmd))
341 340 changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
342 341 changessince = len(hg.run(changessincecmd).splitlines())
343 342 version = '%s+hg%s.%s' % (ltag, changessince, hgid)
344 343 if version.endswith('+'):
345 344 version = version[:-1] + 'local' + time.strftime('%Y%m%d')
346 345 elif os.path.exists('.hg_archival.txt'):
347 346 kw = dict(
348 347 [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
349 348 )
350 349 if 'tag' in kw:
351 350 version = kw['tag']
352 351 elif 'latesttag' in kw:
353 352 if 'changessincelatesttag' in kw:
354 353 version = (
355 354 '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw
356 355 )
357 356 else:
358 357 version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw
359 358 else:
360 359 version = '0+hg' + kw.get('node', '')[:12]
361 360 elif os.path.exists('mercurial/__version__.py'):
362 361 with open('mercurial/__version__.py') as f:
363 362 data = f.read()
364 363 version = re.search('version = b"(.*)"', data).group(1)
365 364
366 365 if version:
367 366 versionb = version
368 367 if not isinstance(versionb, bytes):
369 368 versionb = versionb.encode('ascii')
370 369
371 370 write_if_changed(
372 371 'mercurial/__version__.py',
373 372 b''.join(
374 373 [
375 374 b'# this file is autogenerated by setup.py\n'
376 375 b'version = b"%s"\n' % versionb,
377 376 ]
378 377 ),
379 378 )
380 379
381 380
382 381 class hgbuild(build):
383 382 # Insert hgbuildmo first so that files in mercurial/locale/ are found
384 383 # when build_py is run next.
385 384 sub_commands = [('build_mo', None)] + build.sub_commands
386 385
387 386
388 387 class hgbuildmo(build):
389 388
390 389 description = "build translations (.mo files)"
391 390
392 391 def run(self):
393 392 if not find_executable('msgfmt'):
394 393 self.warn(
395 394 "could not find msgfmt executable, no translations "
396 395 "will be built"
397 396 )
398 397 return
399 398
400 399 podir = 'i18n'
401 400 if not os.path.isdir(podir):
402 401 self.warn("could not find %s/ directory" % podir)
403 402 return
404 403
405 404 join = os.path.join
406 405 for po in os.listdir(podir):
407 406 if not po.endswith('.po'):
408 407 continue
409 408 pofile = join(podir, po)
410 409 modir = join('locale', po[:-3], 'LC_MESSAGES')
411 410 mofile = join(modir, 'hg.mo')
412 411 mobuildfile = join('mercurial', mofile)
413 412 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
414 413 if sys.platform != 'sunos5':
415 414 # msgfmt on Solaris does not know about -c
416 415 cmd.append('-c')
417 416 self.mkpath(join('mercurial', modir))
418 417 self.make_file([pofile], mobuildfile, spawn, (cmd,))
419 418
420 419
421 420 class hgdist(Distribution):
422 421 pure = False
423 422 rust = False
424 423 no_rust = False
425 424 cffi = ispypy
426 425
427 426 global_options = Distribution.global_options + [
428 427 ('pure', None, "use pure (slow) Python code instead of C extensions"),
429 428 ('rust', None, "use Rust extensions additionally to C extensions"),
430 429 (
431 430 'no-rust',
432 431 None,
433 432 "do not use Rust extensions additionally to C extensions",
434 433 ),
435 434 ]
436 435
437 436 negative_opt = Distribution.negative_opt.copy()
438 437 boolean_options = ['pure', 'rust', 'no-rust']
439 438 negative_opt['no-rust'] = 'rust'
440 439
441 440 def _set_command_options(self, command_obj, option_dict=None):
442 441 # Not all distutils versions in the wild have boolean_options.
443 442 # This should be cleaned up when we're Python 3 only.
444 443 command_obj.boolean_options = (
445 444 getattr(command_obj, 'boolean_options', []) + self.boolean_options
446 445 )
447 446 return Distribution._set_command_options(
448 447 self, command_obj, option_dict=option_dict
449 448 )
450 449
451 450 def parse_command_line(self):
452 451 ret = Distribution.parse_command_line(self)
453 452 if not (self.rust or self.no_rust):
454 453 hgrustext = os.environ.get('HGWITHRUSTEXT')
455 454 # TODO record it for proper rebuild upon changes
456 455 # (see mercurial/__modulepolicy__.py)
457 456 if hgrustext != 'cpython' and hgrustext is not None:
458 457 if hgrustext:
459 458 msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
460 459 print(msg, file=sys.stderr)
461 460 hgrustext = None
462 461 self.rust = hgrustext is not None
463 462 self.no_rust = not self.rust
464 463 return ret
465 464
466 465 def has_ext_modules(self):
467 466 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
468 467 # too late for some cases
469 468 return not self.pure and Distribution.has_ext_modules(self)
470 469
471 470
472 471 # This is ugly as a one-liner. So use a variable.
473 472 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
474 473 buildextnegops['no-zstd'] = 'zstd'
475 474 buildextnegops['no-rust'] = 'rust'
476 475
477 476
478 477 class hgbuildext(build_ext):
479 478 user_options = build_ext.user_options + [
480 479 ('zstd', None, 'compile zstd bindings [default]'),
481 480 ('no-zstd', None, 'do not compile zstd bindings'),
482 481 (
483 482 'rust',
484 483 None,
485 484 'compile Rust extensions if they are in use '
486 485 '(requires Cargo) [default]',
487 486 ),
488 487 ('no-rust', None, 'do not compile Rust extensions'),
489 488 ]
490 489
491 490 boolean_options = build_ext.boolean_options + ['zstd', 'rust']
492 491 negative_opt = buildextnegops
493 492
494 493 def initialize_options(self):
495 494 self.zstd = True
496 495 self.rust = True
497 496
498 497 return build_ext.initialize_options(self)
499 498
500 499 def finalize_options(self):
501 500 # Unless overridden by the end user, build extensions in parallel.
502 501 # Only influences behavior on Python 3.5+.
503 502 if getattr(self, 'parallel', None) is None:
504 503 self.parallel = True
505 504
506 505 return build_ext.finalize_options(self)
507 506
508 507 def build_extensions(self):
509 508 ruststandalones = [
510 509 e for e in self.extensions if isinstance(e, RustStandaloneExtension)
511 510 ]
512 511 self.extensions = [
513 512 e for e in self.extensions if e not in ruststandalones
514 513 ]
515 514 # Filter out zstd if disabled via argument.
516 515 if not self.zstd:
517 516 self.extensions = [
518 517 e for e in self.extensions if e.name != 'mercurial.zstd'
519 518 ]
520 519
521 520 # Build Rust standalone extensions if it'll be used
522 521 # and its build is not explicitly disabled (for external build
523 522 # as Linux distributions would do)
524 523 if self.distribution.rust and self.rust:
525 524 if not sys.platform.startswith('linux'):
526 525 self.warn(
527 526 "rust extensions have only been tested on Linux "
528 527 "and may not behave correctly on other platforms"
529 528 )
530 529
531 530 for rustext in ruststandalones:
532 531 rustext.build('' if self.inplace else self.build_lib)
533 532
534 533 return build_ext.build_extensions(self)
535 534
536 535 def build_extension(self, ext):
537 536 if (
538 537 self.distribution.rust
539 538 and self.rust
540 539 and isinstance(ext, RustExtension)
541 540 ):
542 541 ext.rustbuild()
543 542 try:
544 543 build_ext.build_extension(self, ext)
545 544 except CCompilerError:
546 545 if not getattr(ext, 'optional', False):
547 546 raise
548 547 log.warn(
549 548 "Failed to build optional extension '%s' (skipping)", ext.name
550 549 )
551 550
552 551
553 552 class hgbuildscripts(build_scripts):
554 553 def run(self):
555 554 if os.name != 'nt' or self.distribution.pure:
556 555 return build_scripts.run(self)
557 556
558 557 exebuilt = False
559 558 try:
560 559 self.run_command('build_hgexe')
561 560 exebuilt = True
562 561 except (DistutilsError, CCompilerError):
563 562 log.warn('failed to build optional hg.exe')
564 563
565 564 if exebuilt:
566 565 # Copying hg.exe to the scripts build directory ensures it is
567 566 # installed by the install_scripts command.
568 567 hgexecommand = self.get_finalized_command('build_hgexe')
569 568 dest = os.path.join(self.build_dir, 'hg.exe')
570 569 self.mkpath(self.build_dir)
571 570 self.copy_file(hgexecommand.hgexepath, dest)
572 571
573 572 # Remove hg.bat because it is redundant with hg.exe.
574 573 self.scripts.remove('contrib/win32/hg.bat')
575 574
576 575 return build_scripts.run(self)
577 576
578 577
579 578 class hgbuildpy(build_py):
580 579 def finalize_options(self):
581 580 build_py.finalize_options(self)
582 581
583 582 if self.distribution.pure:
584 583 self.distribution.ext_modules = []
585 584 elif self.distribution.cffi:
586 585 from mercurial.cffi import (
587 586 bdiffbuild,
588 587 mpatchbuild,
589 588 )
590 589
591 590 exts = [
592 591 mpatchbuild.ffi.distutils_extension(),
593 592 bdiffbuild.ffi.distutils_extension(),
594 593 ]
595 594 # cffi modules go here
596 595 if sys.platform == 'darwin':
597 596 from mercurial.cffi import osutilbuild
598 597
599 598 exts.append(osutilbuild.ffi.distutils_extension())
600 599 self.distribution.ext_modules = exts
601 600 else:
602 601 h = os.path.join(get_python_inc(), 'Python.h')
603 602 if not os.path.exists(h):
604 603 raise SystemExit(
605 604 'Python headers are required to build '
606 605 'Mercurial but weren\'t found in %s' % h
607 606 )
608 607
609 608 def run(self):
610 609 basepath = os.path.join(self.build_lib, 'mercurial')
611 610 self.mkpath(basepath)
612 611
613 612 rust = self.distribution.rust
614 613 if self.distribution.pure:
615 614 modulepolicy = 'py'
616 615 elif self.build_lib == '.':
617 616 # in-place build should run without rebuilding and Rust extensions
618 617 modulepolicy = 'rust+c-allow' if rust else 'allow'
619 618 else:
620 619 modulepolicy = 'rust+c' if rust else 'c'
621 620
622 621 content = b''.join(
623 622 [
624 623 b'# this file is autogenerated by setup.py\n',
625 624 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
626 625 ]
627 626 )
628 627 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
629 628
630 629 build_py.run(self)
631 630
632 631
633 632 class buildhgextindex(Command):
634 633 description = 'generate prebuilt index of hgext (for frozen package)'
635 634 user_options = []
636 635 _indexfilename = 'hgext/__index__.py'
637 636
638 637 def initialize_options(self):
639 638 pass
640 639
641 640 def finalize_options(self):
642 641 pass
643 642
644 643 def run(self):
645 644 if os.path.exists(self._indexfilename):
646 645 with open(self._indexfilename, 'w') as f:
647 646 f.write('# empty\n')
648 647
649 648 # here no extension enabled, disabled() lists up everything
650 649 code = (
651 650 'import pprint; from mercurial import extensions; '
652 651 'ext = extensions.disabled();'
653 652 'ext.pop("__index__", None);'
654 653 'pprint.pprint(ext)'
655 654 )
656 655 returncode, out, err = runcmd(
657 656 [sys.executable, '-c', code], localhgenv()
658 657 )
659 658 if err or returncode != 0:
660 659 raise DistutilsExecError(err)
661 660
662 661 with open(self._indexfilename, 'wb') as f:
663 662 f.write(b'# this file is autogenerated by setup.py\n')
664 663 f.write(b'docs = ')
665 664 f.write(out)
666 665
667 666
668 667 class buildhgexe(build_ext):
669 668 description = 'compile hg.exe from mercurial/exewrapper.c'
670 669 user_options = build_ext.user_options + [
671 670 (
672 671 'long-paths-support',
673 672 None,
674 673 'enable support for long paths on '
675 674 'Windows (off by default and '
676 675 'experimental)',
677 676 ),
678 677 ]
679 678
680 679 LONG_PATHS_MANIFEST = """
681 680 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
682 681 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
683 682 <application>
684 683 <windowsSettings
685 684 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
686 685 <ws2:longPathAware>true</ws2:longPathAware>
687 686 </windowsSettings>
688 687 </application>
689 688 </assembly>"""
690 689
691 690 def initialize_options(self):
692 691 build_ext.initialize_options(self)
693 692 self.long_paths_support = False
694 693
695 694 def build_extensions(self):
696 695 if os.name != 'nt':
697 696 return
698 697 if isinstance(self.compiler, HackedMingw32CCompiler):
699 698 self.compiler.compiler_so = self.compiler.compiler # no -mdll
700 699 self.compiler.dll_libraries = [] # no -lmsrvc90
701 700
702 701 pythonlib = None
703 702
704 703 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
705 704 self.hgtarget = os.path.join(dir, 'hg')
706 705
707 706 if getattr(sys, 'dllhandle', None):
708 707 # Different Python installs can have different Python library
709 708 # names. e.g. the official CPython distribution uses pythonXY.dll
710 709 # and MinGW uses libpythonX.Y.dll.
711 710 _kernel32 = ctypes.windll.kernel32
712 711 _kernel32.GetModuleFileNameA.argtypes = [
713 712 ctypes.c_void_p,
714 713 ctypes.c_void_p,
715 714 ctypes.c_ulong,
716 715 ]
717 716 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
718 717 size = 1000
719 718 buf = ctypes.create_string_buffer(size + 1)
720 719 filelen = _kernel32.GetModuleFileNameA(
721 720 sys.dllhandle, ctypes.byref(buf), size
722 721 )
723 722
724 723 if filelen > 0 and filelen != size:
725 724 dllbasename = os.path.basename(buf.value)
726 725 if not dllbasename.lower().endswith(b'.dll'):
727 726 raise SystemExit(
728 727 'Python DLL does not end with .dll: %s' % dllbasename
729 728 )
730 729 pythonlib = dllbasename[:-4]
731 730
732 731 # Copy the pythonXY.dll next to the binary so that it runs
733 732 # without tampering with PATH.
734 733 dest = os.path.join(
735 734 os.path.dirname(self.hgtarget),
736 735 os.fsdecode(dllbasename),
737 736 )
738 737
739 738 if not os.path.exists(dest):
740 739 shutil.copy(buf.value, dest)
741 740
742 741 # Also overwrite python3.dll so that hgext.git is usable.
743 742 # TODO: also handle the MSYS flavor
744 743 python_x = os.path.join(
745 744 os.path.dirname(os.fsdecode(buf.value)),
746 745 "python3.dll",
747 746 )
748 747
749 748 if os.path.exists(python_x):
750 749 dest = os.path.join(
751 750 os.path.dirname(self.hgtarget),
752 751 os.path.basename(python_x),
753 752 )
754 753
755 754 shutil.copy(python_x, dest)
756 755
757 756 if not pythonlib:
758 757 log.warn(
759 758 'could not determine Python DLL filename; assuming pythonXY'
760 759 )
761 760
762 761 hv = sys.hexversion
763 762 pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
764 763
765 764 log.info('using %s as Python library name' % pythonlib)
766 765 with open('mercurial/hgpythonlib.h', 'wb') as f:
767 766 f.write(b'/* this file is autogenerated by setup.py */\n')
768 767 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
769 768
770 769 objects = self.compiler.compile(
771 770 ['mercurial/exewrapper.c'],
772 771 output_dir=self.build_temp,
773 772 macros=[('_UNICODE', None), ('UNICODE', None)],
774 773 )
775 774 self.compiler.link_executable(
776 775 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
777 776 )
778 777 if self.long_paths_support:
779 778 self.addlongpathsmanifest()
780 779
781 780 def addlongpathsmanifest(self):
782 781 r"""Add manifest pieces so that hg.exe understands long paths
783 782
784 783 This is an EXPERIMENTAL feature, use with care.
785 784 To enable long paths support, one needs to do two things:
786 785 - build Mercurial with --long-paths-support option
787 786 - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\
788 787 LongPathsEnabled to have value 1.
789 788
790 789 Please ignore 'warning 81010002: Unrecognized Element "longPathAware"';
791 790 it happens because Mercurial uses mt.exe circa 2008, which is not
792 791 yet aware of long paths support in the manifest (I think so at least).
793 792 This does not stop mt.exe from embedding/merging the XML properly.
794 793
795 794 Why resource #1 should be used for .exe manifests? I don't know and
796 795 wasn't able to find an explanation for mortals. But it seems to work.
797 796 """
798 797 exefname = self.compiler.executable_filename(self.hgtarget)
799 798 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
800 799 os.close(fdauto)
801 800 with open(manfname, 'w') as f:
802 801 f.write(self.LONG_PATHS_MANIFEST)
803 802 log.info("long paths manifest is written to '%s'" % manfname)
804 803 inputresource = '-inputresource:%s;#1' % exefname
805 804 outputresource = '-outputresource:%s;#1' % exefname
806 805 log.info("running mt.exe to update hg.exe's manifest in-place")
807 806 # supplying both -manifest and -inputresource to mt.exe makes
808 807 # it merge the embedded and supplied manifests in the -outputresource
809 808 self.spawn(
810 809 [
811 810 'mt.exe',
812 811 '-nologo',
813 812 '-manifest',
814 813 manfname,
815 814 inputresource,
816 815 outputresource,
817 816 ]
818 817 )
819 818 log.info("done updating hg.exe's manifest")
820 819 os.remove(manfname)
821 820
822 821 @property
823 822 def hgexepath(self):
824 823 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
825 824 return os.path.join(self.build_temp, dir, 'hg.exe')
826 825
827 826
828 827 class hgbuilddoc(Command):
829 828 description = 'build documentation'
830 829 user_options = [
831 830 ('man', None, 'generate man pages'),
832 831 ('html', None, 'generate html pages'),
833 832 ]
834 833
835 834 def initialize_options(self):
836 835 self.man = None
837 836 self.html = None
838 837
839 838 def finalize_options(self):
840 839 # If --man or --html are set, only generate what we're told to.
841 840 # Otherwise generate everything.
842 841 have_subset = self.man is not None or self.html is not None
843 842
844 843 if have_subset:
845 844 self.man = True if self.man else False
846 845 self.html = True if self.html else False
847 846 else:
848 847 self.man = True
849 848 self.html = True
850 849
851 850 def run(self):
852 851 def normalizecrlf(p):
853 852 with open(p, 'rb') as fh:
854 853 orig = fh.read()
855 854
856 855 if b'\r\n' not in orig:
857 856 return
858 857
859 858 log.info('normalizing %s to LF line endings' % p)
860 859 with open(p, 'wb') as fh:
861 860 fh.write(orig.replace(b'\r\n', b'\n'))
862 861
863 862 def gentxt(root):
864 863 txt = 'doc/%s.txt' % root
865 864 log.info('generating %s' % txt)
866 865 res, out, err = runcmd(
867 866 [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
868 867 )
869 868 if res:
870 869 raise SystemExit(
871 870 'error running gendoc.py: %s'
872 871 % '\n'.join([sysstr(out), sysstr(err)])
873 872 )
874 873
875 874 with open(txt, 'wb') as fh:
876 875 fh.write(out)
877 876
878 877 def gengendoc(root):
879 878 gendoc = 'doc/%s.gendoc.txt' % root
880 879
881 880 log.info('generating %s' % gendoc)
882 881 res, out, err = runcmd(
883 882 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
884 883 os.environ,
885 884 cwd='doc',
886 885 )
887 886 if res:
888 887 raise SystemExit(
889 888 'error running gendoc: %s'
890 889 % '\n'.join([sysstr(out), sysstr(err)])
891 890 )
892 891
893 892 with open(gendoc, 'wb') as fh:
894 893 fh.write(out)
895 894
896 895 def genman(root):
897 896 log.info('generating doc/%s' % root)
898 897 res, out, err = runcmd(
899 898 [
900 899 sys.executable,
901 900 'runrst',
902 901 'hgmanpage',
903 902 '--halt',
904 903 'warning',
905 904 '--strip-elements-with-class',
906 905 'htmlonly',
907 906 '%s.txt' % root,
908 907 root,
909 908 ],
910 909 os.environ,
911 910 cwd='doc',
912 911 )
913 912 if res:
914 913 raise SystemExit(
915 914 'error running runrst: %s'
916 915 % '\n'.join([sysstr(out), sysstr(err)])
917 916 )
918 917
919 918 normalizecrlf('doc/%s' % root)
920 919
921 920 def genhtml(root):
922 921 log.info('generating doc/%s.html' % root)
923 922 res, out, err = runcmd(
924 923 [
925 924 sys.executable,
926 925 'runrst',
927 926 'html',
928 927 '--halt',
929 928 'warning',
930 929 '--link-stylesheet',
931 930 '--stylesheet-path',
932 931 'style.css',
933 932 '%s.txt' % root,
934 933 '%s.html' % root,
935 934 ],
936 935 os.environ,
937 936 cwd='doc',
938 937 )
939 938 if res:
940 939 raise SystemExit(
941 940 'error running runrst: %s'
942 941 % '\n'.join([sysstr(out), sysstr(err)])
943 942 )
944 943
945 944 normalizecrlf('doc/%s.html' % root)
946 945
947 946 # This logic is duplicated in doc/Makefile.
948 947 sources = {
949 948 f
950 949 for f in os.listdir('mercurial/helptext')
951 950 if re.search(r'[0-9]\.txt$', f)
952 951 }
953 952
954 953 # common.txt is a one-off.
955 954 gentxt('common')
956 955
957 956 for source in sorted(sources):
958 957 assert source[-4:] == '.txt'
959 958 root = source[:-4]
960 959
961 960 gentxt(root)
962 961 gengendoc(root)
963 962
964 963 if self.man:
965 964 genman(root)
966 965 if self.html:
967 966 genhtml(root)
968 967
969 968
970 969 class hginstall(install):
971 970
972 971 user_options = install.user_options + [
973 972 (
974 973 'old-and-unmanageable',
975 974 None,
976 975 'noop, present for eggless setuptools compat',
977 976 ),
978 977 (
979 978 'single-version-externally-managed',
980 979 None,
981 980 'noop, present for eggless setuptools compat',
982 981 ),
983 982 ]
984 983
985 984 sub_commands = install.sub_commands + [
986 985 ('install_completion', lambda self: True)
987 986 ]
988 987
989 988 # Also helps setuptools not be sad while we refuse to create eggs.
990 989 single_version_externally_managed = True
991 990
992 991 def get_sub_commands(self):
993 992 # Screen out egg related commands to prevent egg generation. But allow
994 993 # mercurial.egg-info generation, since that is part of modern
995 994 # packaging.
996 995 excl = {'bdist_egg'}
997 996 return filter(lambda x: x not in excl, install.get_sub_commands(self))
998 997
999 998
1000 999 class hginstalllib(install_lib):
1001 1000 """
1002 1001 This is a specialization of install_lib that replaces the copy_file used
1003 1002 there so that it supports setting the mode of files after copying them,
1004 1003 instead of just preserving the mode that the files originally had. If your
1005 1004 system has a umask of something like 027, preserving the permissions when
1006 1005 copying will lead to a broken install.
1007 1006
1008 1007 Note that just passing keep_permissions=False to copy_file would be
1009 1008 insufficient, as it might still be applying a umask.
1010 1009 """
1011 1010
1012 1011 def run(self):
1013 1012 realcopyfile = file_util.copy_file
1014 1013
1015 1014 def copyfileandsetmode(*args, **kwargs):
1016 1015 src, dst = args[0], args[1]
1017 1016 dst, copied = realcopyfile(*args, **kwargs)
1018 1017 if copied:
1019 1018 st = os.stat(src)
1020 1019 # Persist executable bit (apply it to group and other if user
1021 1020 # has it)
1022 1021 if st[stat.ST_MODE] & stat.S_IXUSR:
1023 1022 setmode = int('0755', 8)
1024 1023 else:
1025 1024 setmode = int('0644', 8)
1026 1025 m = stat.S_IMODE(st[stat.ST_MODE])
1027 1026 m = (m & ~int('0777', 8)) | setmode
1028 1027 os.chmod(dst, m)
1029 1028
1030 1029 file_util.copy_file = copyfileandsetmode
1031 1030 try:
1032 1031 install_lib.run(self)
1033 1032 finally:
1034 1033 file_util.copy_file = realcopyfile
1035 1034
1036 1035
1037 1036 class hginstallscripts(install_scripts):
1038 1037 """
1039 1038 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1040 1039 the configured directory for modules. If possible, the path is made relative
1041 1040 to the directory for scripts.
1042 1041 """
1043 1042
1044 1043 def initialize_options(self):
1045 1044 install_scripts.initialize_options(self)
1046 1045
1047 1046 self.install_lib = None
1048 1047
1049 1048 def finalize_options(self):
1050 1049 install_scripts.finalize_options(self)
1051 1050 self.set_undefined_options('install', ('install_lib', 'install_lib'))
1052 1051
1053 1052 def run(self):
1054 1053 install_scripts.run(self)
1055 1054
1056 1055 # It only makes sense to replace @LIBDIR@ with the install path if
1057 1056 # the install path is known. For wheels, the logic below calculates
1058 1057 # the libdir to be "../..". This is because the internal layout of a
1059 1058 # wheel archive looks like:
1060 1059 #
1061 1060 # mercurial-3.6.1.data/scripts/hg
1062 1061 # mercurial/__init__.py
1063 1062 #
1064 1063 # When installing wheels, the subdirectories of the "<pkg>.data"
1065 1064 # directory are translated to system local paths and files therein
1066 1065 # are copied in place. The mercurial/* files are installed into the
1067 1066 # site-packages directory. However, the site-packages directory
1068 1067 # isn't known until wheel install time. This means we have no clue
1069 1068 # at wheel generation time what the installed site-packages directory
1070 1069 # will be. And, wheels don't appear to provide the ability to register
1071 1070 # custom code to run during wheel installation. This all means that
1072 1071 # we can't reliably set the libdir in wheels: the default behavior
1073 1072 # of looking in sys.path must do.
1074 1073
1075 1074 if (
1076 1075 os.path.splitdrive(self.install_dir)[0]
1077 1076 != os.path.splitdrive(self.install_lib)[0]
1078 1077 ):
1079 1078 # can't make relative paths from one drive to another, so use an
1080 1079 # absolute path instead
1081 1080 libdir = self.install_lib
1082 1081 else:
1083 1082 libdir = os.path.relpath(self.install_lib, self.install_dir)
1084 1083
1085 1084 for outfile in self.outfiles:
1086 1085 with open(outfile, 'rb') as fp:
1087 1086 data = fp.read()
1088 1087
1089 1088 # skip binary files
1090 1089 if b'\0' in data:
1091 1090 continue
1092 1091
1093 1092 # During local installs, the shebang will be rewritten to the final
1094 1093 # install path. During wheel packaging, the shebang has a special
1095 1094 # value.
1096 1095 if data.startswith(b'#!python'):
1097 1096 log.info(
1098 1097 'not rewriting @LIBDIR@ in %s because install path '
1099 1098 'not known' % outfile
1100 1099 )
1101 1100 continue
1102 1101
1103 1102 data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
1104 1103 with open(outfile, 'wb') as fp:
1105 1104 fp.write(data)
1106 1105
1107 1106
1108 1107 class hginstallcompletion(Command):
1109 1108 description = 'Install shell completion'
1110 1109
1111 1110 def initialize_options(self):
1112 1111 self.install_dir = None
1113 1112 self.outputs = []
1114 1113
1115 1114 def finalize_options(self):
1116 1115 self.set_undefined_options(
1117 1116 'install_data', ('install_dir', 'install_dir')
1118 1117 )
1119 1118
1120 1119 def get_outputs(self):
1121 1120 return self.outputs
1122 1121
1123 1122 def run(self):
1124 1123 for src, dir_path, dest in (
1125 1124 (
1126 1125 'bash_completion',
1127 1126 ('share', 'bash-completion', 'completions'),
1128 1127 'hg',
1129 1128 ),
1130 1129 ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
1131 1130 ):
1132 1131 dir = os.path.join(self.install_dir, *dir_path)
1133 1132 self.mkpath(dir)
1134 1133
1135 1134 dest = os.path.join(dir, dest)
1136 1135 self.outputs.append(dest)
1137 1136 self.copy_file(os.path.join('contrib', src), dest)
1138 1137
1139 1138
1140 1139 # virtualenv installs custom distutils/__init__.py and
1141 1140 # distutils/distutils.cfg files which essentially proxy back to the
1142 1141 # "real" distutils in the main Python install. The presence of this
1143 1142 # directory causes py2exe to pick up the "hacked" distutils package
1144 1143 # from the virtualenv and "import distutils" will fail from the py2exe
1145 1144 # build because the "real" distutils files can't be located.
1146 1145 #
1147 1146 # We work around this by monkeypatching the py2exe code finding Python
1148 1147 # modules to replace the found virtualenv distutils modules with the
1149 1148 # original versions via filesystem scanning. This is a bit hacky. But
1150 1149 # it allows us to use virtualenvs for py2exe packaging, which is more
1151 1150 # deterministic and reproducible.
1152 1151 #
1153 1152 # It's worth noting that the common StackOverflow suggestions for this
1154 1153 # problem involve copying the original distutils files into the
1155 1154 # virtualenv or into the staging directory after setup() is invoked.
1156 1155 # The former is very brittle and can easily break setup(). Our hacking
1157 1156 # of the found modules routine has a similar result as copying the files
1158 1157 # manually. But it makes fewer assumptions about how py2exe works and
1159 1158 # is less brittle.
1160 1159
1161 1160 # This only catches virtualenvs made with virtualenv (as opposed to
1162 1161 # venv, which is likely what Python 3 uses).
1163 1162 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
1164 1163
1165 1164 if py2exehacked:
1166 1165 from distutils.command.py2exe import py2exe as buildpy2exe
1167 1166 from py2exe.mf import Module as py2exemodule
1168 1167
1169 1168 class hgbuildpy2exe(buildpy2exe):
1170 1169 def find_needed_modules(self, mf, files, modules):
1171 1170 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
1172 1171
1173 1172 # Replace virtualenv's distutils modules with the real ones.
1174 1173 modules = {}
1175 1174 for k, v in res.modules.items():
1176 1175 if k != 'distutils' and not k.startswith('distutils.'):
1177 1176 modules[k] = v
1178 1177
1179 1178 res.modules = modules
1180 1179
1181 1180 import opcode
1182 1181
1183 1182 distutilsreal = os.path.join(
1184 1183 os.path.dirname(opcode.__file__), 'distutils'
1185 1184 )
1186 1185
1187 1186 for root, dirs, files in os.walk(distutilsreal):
1188 1187 for f in sorted(files):
1189 1188 if not f.endswith('.py'):
1190 1189 continue
1191 1190
1192 1191 full = os.path.join(root, f)
1193 1192
1194 1193 parents = ['distutils']
1195 1194
1196 1195 if root != distutilsreal:
1197 1196 rel = os.path.relpath(root, distutilsreal)
1198 1197 parents.extend(p for p in rel.split(os.sep))
1199 1198
1200 1199 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1201 1200
1202 1201 if modname.startswith('distutils.tests.'):
1203 1202 continue
1204 1203
1205 1204 if modname.endswith('.__init__'):
1206 1205 modname = modname[: -len('.__init__')]
1207 1206 path = os.path.dirname(full)
1208 1207 else:
1209 1208 path = None
1210 1209
1211 1210 res.modules[modname] = py2exemodule(
1212 1211 modname, full, path=path
1213 1212 )
1214 1213
1215 1214 if 'distutils' not in res.modules:
1216 1215 raise SystemExit('could not find distutils modules')
1217 1216
1218 1217 return res
1219 1218
1220 1219
1221 1220 cmdclass = {
1222 1221 'build': hgbuild,
1223 1222 'build_doc': hgbuilddoc,
1224 1223 'build_mo': hgbuildmo,
1225 1224 'build_ext': hgbuildext,
1226 1225 'build_py': hgbuildpy,
1227 1226 'build_scripts': hgbuildscripts,
1228 1227 'build_hgextindex': buildhgextindex,
1229 1228 'install': hginstall,
1230 1229 'install_completion': hginstallcompletion,
1231 1230 'install_lib': hginstalllib,
1232 1231 'install_scripts': hginstallscripts,
1233 1232 'build_hgexe': buildhgexe,
1234 1233 }
1235 1234
1236 1235 if py2exehacked:
1237 1236 cmdclass['py2exe'] = hgbuildpy2exe
1238 1237
1239 1238 packages = [
1240 1239 'mercurial',
1241 1240 'mercurial.cext',
1242 1241 'mercurial.cffi',
1243 1242 'mercurial.defaultrc',
1244 1243 'mercurial.dirstateutils',
1245 1244 'mercurial.helptext',
1246 1245 'mercurial.helptext.internals',
1247 1246 'mercurial.hgweb',
1248 1247 'mercurial.interfaces',
1249 1248 'mercurial.pure',
1250 1249 'mercurial.templates',
1251 1250 'mercurial.thirdparty',
1252 1251 'mercurial.thirdparty.attr',
1253 1252 'mercurial.thirdparty.zope',
1254 1253 'mercurial.thirdparty.zope.interface',
1255 1254 'mercurial.upgrade_utils',
1256 1255 'mercurial.utils',
1257 1256 'mercurial.revlogutils',
1258 1257 'mercurial.testing',
1259 1258 'hgext',
1260 1259 'hgext.convert',
1261 1260 'hgext.fsmonitor',
1262 1261 'hgext.fastannotate',
1263 1262 'hgext.fsmonitor.pywatchman',
1264 1263 'hgext.git',
1265 1264 'hgext.highlight',
1266 1265 'hgext.hooklib',
1267 1266 'hgext.infinitepush',
1268 1267 'hgext.largefiles',
1269 1268 'hgext.lfs',
1270 1269 'hgext.narrow',
1271 1270 'hgext.remotefilelog',
1272 1271 'hgext.zeroconf',
1273 1272 'hgext3rd',
1274 1273 'hgdemandimport',
1275 1274 ]
1276 1275
1277 1276 for name in os.listdir(os.path.join('mercurial', 'templates')):
1278 1277 if name != '__pycache__' and os.path.isdir(
1279 1278 os.path.join('mercurial', 'templates', name)
1280 1279 ):
1281 1280 packages.append('mercurial.templates.%s' % name)
1282 1281
1283 1282 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1284 1283 # py2exe can't cope with namespace packages very well, so we have to
1285 1284 # install any hgext3rd.* extensions that we want in the final py2exe
1286 1285 # image here. This is gross, but you gotta do what you gotta do.
1287 1286 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1288 1287
1289 1288 common_depends = [
1290 1289 'mercurial/bitmanipulation.h',
1291 1290 'mercurial/compat.h',
1292 1291 'mercurial/cext/util.h',
1293 1292 ]
1294 1293 common_include_dirs = ['mercurial']
1295 1294
1296 1295 common_cflags = []
1297 1296
1298 1297 # MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
1299 1298 # makes declarations not at the top of a scope in the headers.
1300 1299 if os.name != 'nt' and sys.version_info[1] < 9:
1301 1300 common_cflags = ['-Werror=declaration-after-statement']
1302 1301
1303 1302 osutil_cflags = []
1304 1303 osutil_ldflags = []
1305 1304
1306 1305 # platform specific macros
1307 1306 for plat, func in [('bsd', 'setproctitle')]:
1308 1307 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1309 1308 osutil_cflags.append('-DHAVE_%s' % func.upper())
1310 1309
1311 1310 for plat, macro, code in [
1312 1311 (
1313 1312 'bsd|darwin',
1314 1313 'BSD_STATFS',
1315 1314 '''
1316 1315 #include <sys/param.h>
1317 1316 #include <sys/mount.h>
1318 1317 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1319 1318 ''',
1320 1319 ),
1321 1320 (
1322 1321 'linux',
1323 1322 'LINUX_STATFS',
1324 1323 '''
1325 1324 #include <linux/magic.h>
1326 1325 #include <sys/vfs.h>
1327 1326 int main() { struct statfs s; return sizeof(s.f_type); }
1328 1327 ''',
1329 1328 ),
1330 1329 ]:
1331 1330 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1332 1331 osutil_cflags.append('-DHAVE_%s' % macro)
1333 1332
1334 1333 if sys.platform == 'darwin':
1335 1334 osutil_ldflags += ['-framework', 'ApplicationServices']
1336 1335
1337 1336 if sys.platform == 'sunos5':
1338 1337 osutil_ldflags += ['-lsocket']
1339 1338
1340 1339 xdiff_srcs = [
1341 1340 'mercurial/thirdparty/xdiff/xdiffi.c',
1342 1341 'mercurial/thirdparty/xdiff/xprepare.c',
1343 1342 'mercurial/thirdparty/xdiff/xutils.c',
1344 1343 ]
1345 1344
1346 1345 xdiff_headers = [
1347 1346 'mercurial/thirdparty/xdiff/xdiff.h',
1348 1347 'mercurial/thirdparty/xdiff/xdiffi.h',
1349 1348 'mercurial/thirdparty/xdiff/xinclude.h',
1350 1349 'mercurial/thirdparty/xdiff/xmacros.h',
1351 1350 'mercurial/thirdparty/xdiff/xprepare.h',
1352 1351 'mercurial/thirdparty/xdiff/xtypes.h',
1353 1352 'mercurial/thirdparty/xdiff/xutils.h',
1354 1353 ]
1355 1354
1356 1355
1357 1356 class RustCompilationError(CCompilerError):
1358 1357 """Exception class for Rust compilation errors."""
1359 1358
1360 1359
1361 1360 class RustExtension(Extension):
1362 1361 """Base classes for concrete Rust Extension classes."""
1363 1362
1364 1363 rusttargetdir = os.path.join('rust', 'target', 'release')
1365 1364
1366 1365 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1367 1366 Extension.__init__(self, mpath, sources, **kw)
1368 1367 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1369 1368
1370 1369 # adding Rust source and control files to depends so that the extension
1371 1370 # gets rebuilt if they've changed
1372 1371 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1373 1372 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1374 1373 if os.path.exists(cargo_lock):
1375 1374 self.depends.append(cargo_lock)
1376 1375 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1377 1376 self.depends.extend(
1378 1377 os.path.join(dirpath, fname)
1379 1378 for fname in fnames
1380 1379 if os.path.splitext(fname)[1] == '.rs'
1381 1380 )
1382 1381
1383 1382 @staticmethod
1384 1383 def rustdylibsuffix():
1385 1384 """Return the suffix for shared libraries produced by rustc.
1386 1385
1387 1386 See also: https://doc.rust-lang.org/reference/linkage.html
1388 1387 """
1389 1388 if sys.platform == 'darwin':
1390 1389 return '.dylib'
1391 1390 elif os.name == 'nt':
1392 1391 return '.dll'
1393 1392 else:
1394 1393 return '.so'
1395 1394
1396 1395 def rustbuild(self):
1397 1396 env = os.environ.copy()
1398 1397 if 'HGTEST_RESTOREENV' in env:
1399 1398 # Mercurial tests change HOME to a temporary directory,
1400 1399 # but, if installed with rustup, the Rust toolchain needs
1401 1400 # HOME to be correct (otherwise the 'no default toolchain'
1402 1401 # error message is issued and the build fails).
1403 1402 # This happens currently with test-hghave.t, which does
1404 1403 # invoke this build.
1405 1404
1406 1405 # Unix only fix (os.path.expanduser not really reliable if
1407 1406 # HOME is shadowed like this)
1408 1407 import pwd
1409 1408
1410 1409 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1411 1410
1412 1411 cargocmd = ['cargo', 'rustc', '--release']
1413 1412
1414 1413 rust_features = env.get("HG_RUST_FEATURES")
1415 1414 if rust_features:
1416 1415 cargocmd.extend(('--features', rust_features))
1417 1416
1418 1417 cargocmd.append('--')
1419 1418 if sys.platform == 'darwin':
1420 1419 cargocmd.extend(
1421 1420 ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
1422 1421 )
1423 1422 try:
1424 1423 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1425 except OSError as exc:
1426 if exc.errno == errno.ENOENT:
1427 raise RustCompilationError("Cargo not found")
1428 elif exc.errno == errno.EACCES:
1429 raise RustCompilationError(
1430 "Cargo found, but permission to execute it is denied"
1431 )
1432 else:
1433 raise
1424 except FileNotFoundError:
1425 raise RustCompilationError("Cargo not found")
1426 except PermissionError:
1427 raise RustCompilationError(
1428 "Cargo found, but permission to execute it is denied"
1429 )
1434 1430 except subprocess.CalledProcessError:
1435 1431 raise RustCompilationError(
1436 1432 "Cargo failed. Working directory: %r, "
1437 1433 "command: %r, environment: %r"
1438 1434 % (self.rustsrcdir, cargocmd, env)
1439 1435 )
1440 1436
1441 1437
1442 1438 class RustStandaloneExtension(RustExtension):
1443 1439 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1444 1440 RustExtension.__init__(
1445 1441 self, pydottedname, [], dylibname, rustcrate, **kw
1446 1442 )
1447 1443 self.dylibname = dylibname
1448 1444
1449 1445 def build(self, target_dir):
1450 1446 self.rustbuild()
1451 1447 target = [target_dir]
1452 1448 target.extend(self.name.split('.'))
1453 1449 target[-1] += DYLIB_SUFFIX
1454 1450 shutil.copy2(
1455 1451 os.path.join(
1456 1452 self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
1457 1453 ),
1458 1454 os.path.join(*target),
1459 1455 )
1460 1456
1461 1457
1462 1458 extmodules = [
1463 1459 Extension(
1464 1460 'mercurial.cext.base85',
1465 1461 ['mercurial/cext/base85.c'],
1466 1462 include_dirs=common_include_dirs,
1467 1463 extra_compile_args=common_cflags,
1468 1464 depends=common_depends,
1469 1465 ),
1470 1466 Extension(
1471 1467 'mercurial.cext.bdiff',
1472 1468 ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1473 1469 include_dirs=common_include_dirs,
1474 1470 extra_compile_args=common_cflags,
1475 1471 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
1476 1472 ),
1477 1473 Extension(
1478 1474 'mercurial.cext.mpatch',
1479 1475 ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
1480 1476 include_dirs=common_include_dirs,
1481 1477 extra_compile_args=common_cflags,
1482 1478 depends=common_depends,
1483 1479 ),
1484 1480 Extension(
1485 1481 'mercurial.cext.parsers',
1486 1482 [
1487 1483 'mercurial/cext/charencode.c',
1488 1484 'mercurial/cext/dirs.c',
1489 1485 'mercurial/cext/manifest.c',
1490 1486 'mercurial/cext/parsers.c',
1491 1487 'mercurial/cext/pathencode.c',
1492 1488 'mercurial/cext/revlog.c',
1493 1489 ],
1494 1490 include_dirs=common_include_dirs,
1495 1491 extra_compile_args=common_cflags,
1496 1492 depends=common_depends
1497 1493 + [
1498 1494 'mercurial/cext/charencode.h',
1499 1495 'mercurial/cext/revlog.h',
1500 1496 ],
1501 1497 ),
1502 1498 Extension(
1503 1499 'mercurial.cext.osutil',
1504 1500 ['mercurial/cext/osutil.c'],
1505 1501 include_dirs=common_include_dirs,
1506 1502 extra_compile_args=common_cflags + osutil_cflags,
1507 1503 extra_link_args=osutil_ldflags,
1508 1504 depends=common_depends,
1509 1505 ),
1510 1506 Extension(
1511 1507 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
1512 1508 [
1513 1509 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1514 1510 ],
1515 1511 extra_compile_args=common_cflags,
1516 1512 ),
1517 1513 Extension(
1518 1514 'mercurial.thirdparty.sha1dc',
1519 1515 [
1520 1516 'mercurial/thirdparty/sha1dc/cext.c',
1521 1517 'mercurial/thirdparty/sha1dc/lib/sha1.c',
1522 1518 'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
1523 1519 ],
1524 1520 extra_compile_args=common_cflags,
1525 1521 ),
1526 1522 Extension(
1527 1523 'hgext.fsmonitor.pywatchman.bser',
1528 1524 ['hgext/fsmonitor/pywatchman/bser.c'],
1529 1525 extra_compile_args=common_cflags,
1530 1526 ),
1531 1527 RustStandaloneExtension(
1532 1528 'mercurial.rustext',
1533 1529 'hg-cpython',
1534 1530 'librusthg',
1535 1531 ),
1536 1532 ]
1537 1533
1538 1534
1539 1535 sys.path.insert(0, 'contrib/python-zstandard')
1540 1536 import setup_zstd
1541 1537
1542 1538 zstd = setup_zstd.get_c_extension(
1543 1539 name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
1544 1540 )
1545 1541 zstd.extra_compile_args += common_cflags
1546 1542 extmodules.append(zstd)
1547 1543
1548 1544 try:
1549 1545 from distutils import cygwinccompiler
1550 1546
1551 1547 # the -mno-cygwin option has been deprecated for years
1552 1548 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1553 1549
1554 1550 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1555 1551 def __init__(self, *args, **kwargs):
1556 1552 mingw32compilerclass.__init__(self, *args, **kwargs)
1557 1553 for i in 'compiler compiler_so linker_exe linker_so'.split():
1558 1554 try:
1559 1555 getattr(self, i).remove('-mno-cygwin')
1560 1556 except ValueError:
1561 1557 pass
1562 1558
1563 1559 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1564 1560 except ImportError:
1565 1561 # the cygwinccompiler package is not available on some Python
1566 1562 # distributions like the ones from the optware project for Synology
1567 1563 # DiskStation boxes
1568 1564 class HackedMingw32CCompiler:
1569 1565 pass
1570 1566
1571 1567
1572 1568 if os.name == 'nt':
1573 1569 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1574 1570 # extra_link_args to distutils.extensions.Extension() doesn't have any
1575 1571 # effect.
1576 1572 from distutils import msvccompiler
1577 1573
1578 1574 msvccompilerclass = msvccompiler.MSVCCompiler
1579 1575
1580 1576 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1581 1577 def initialize(self):
1582 1578 msvccompilerclass.initialize(self)
1583 1579 # "warning LNK4197: export 'func' specified multiple times"
1584 1580 self.ldflags_shared.append('/ignore:4197')
1585 1581 self.ldflags_shared_debug.append('/ignore:4197')
1586 1582
1587 1583 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1588 1584
1589 1585 packagedata = {
1590 1586 'mercurial': [
1591 1587 'locale/*/LC_MESSAGES/hg.mo',
1592 1588 'dummycert.pem',
1593 1589 ],
1594 1590 'mercurial.defaultrc': [
1595 1591 '*.rc',
1596 1592 ],
1597 1593 'mercurial.helptext': [
1598 1594 '*.txt',
1599 1595 ],
1600 1596 'mercurial.helptext.internals': [
1601 1597 '*.txt',
1602 1598 ],
1603 1599 }
1604 1600
1605 1601
1606 1602 def ordinarypath(p):
1607 1603 return p and p[0] != '.' and p[-1] != '~'
1608 1604
1609 1605
1610 1606 for root in ('templates',):
1611 1607 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1612 1608 packagename = curdir.replace(os.sep, '.')
1613 1609 packagedata[packagename] = list(filter(ordinarypath, files))
1614 1610
1615 1611 datafiles = []
1616 1612
1617 1613 # distutils expects version to be str/unicode. Converting it to
1618 1614 # unicode on Python 2 still works because it won't contain any
1619 1615 # non-ascii bytes and will be implicitly converted back to bytes
1620 1616 # when operated on.
1621 1617 assert isinstance(version, str)
1622 1618 setupversion = version
1623 1619
1624 1620 extra = {}
1625 1621
1626 1622 py2exepackages = [
1627 1623 'hgdemandimport',
1628 1624 'hgext3rd',
1629 1625 'hgext',
1630 1626 'email',
1631 1627 # implicitly imported per module policy
1632 1628 # (cffi wouldn't be used as a frozen exe)
1633 1629 'mercurial.cext',
1634 1630 #'mercurial.cffi',
1635 1631 'mercurial.pure',
1636 1632 ]
1637 1633
1638 1634 py2exe_includes = []
1639 1635
1640 1636 py2exeexcludes = []
1641 1637 py2exedllexcludes = ['crypt32.dll']
1642 1638
1643 1639 if issetuptools:
1644 1640 extra['python_requires'] = supportedpy
1645 1641
1646 1642 if py2exeloaded:
1647 1643 extra['console'] = [
1648 1644 {
1649 1645 'script': 'hg',
1650 1646 'copyright': 'Copyright (C) 2005-2022 Olivia Mackall and others',
1651 1647 'product_version': version,
1652 1648 }
1653 1649 ]
1654 1650 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1655 1651 # Need to override hgbuild because it has a private copy of
1656 1652 # build.sub_commands.
1657 1653 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1658 1654 # put dlls in sub directory so that they won't pollute PATH
1659 1655 extra['zipfile'] = 'lib/library.zip'
1660 1656
1661 1657 # We allow some configuration to be supplemented via environment
1662 1658 # variables. This is better than setup.cfg files because it allows
1663 1659 # supplementing configs instead of replacing them.
1664 1660 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1665 1661 if extrapackages:
1666 1662 py2exepackages.extend(extrapackages.split(' '))
1667 1663
1668 1664 extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES')
1669 1665 if extra_includes:
1670 1666 py2exe_includes.extend(extra_includes.split(' '))
1671 1667
1672 1668 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1673 1669 if excludes:
1674 1670 py2exeexcludes.extend(excludes.split(' '))
1675 1671
1676 1672 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1677 1673 if dllexcludes:
1678 1674 py2exedllexcludes.extend(dllexcludes.split(' '))
1679 1675
1680 1676 if os.environ.get('PYOXIDIZER'):
1681 1677 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1682 1678
1683 1679 if os.name == 'nt':
1684 1680 # Windows binary file versions for exe/dll files must have the
1685 1681 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1686 1682 setupversion = setupversion.split(r'+', 1)[0]
1687 1683
1688 1684 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
1689 1685 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines()
1690 1686 if version:
1691 1687 version = version[0].decode('utf-8')
1692 1688 xcode4 = version.startswith('Xcode') and StrictVersion(
1693 1689 version.split()[1]
1694 1690 ) >= StrictVersion('4.0')
1695 1691 xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
1696 1692 else:
1697 1693 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
1698 1694 # installed, but instead with only command-line tools. Assume
1699 1695 # that only happens on >= Lion, thus no PPC support.
1700 1696 xcode4 = True
1701 1697 xcode51 = False
1702 1698
1703 1699 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
1704 1700 # distutils.sysconfig
1705 1701 if xcode4:
1706 1702 os.environ['ARCHFLAGS'] = ''
1707 1703
1708 1704 # XCode 5.1 changes clang such that it now fails to compile if the
1709 1705 # -mno-fused-madd flag is passed, but the version of Python shipped with
1710 1706 # OS X 10.9 Mavericks includes this flag. This causes problems in all
1711 1707 # C extension modules, and a bug has been filed upstream at
1712 1708 # http://bugs.python.org/issue21244. We also need to patch this here
1713 1709 # so Mercurial can continue to compile in the meantime.
1714 1710 if xcode51:
1715 1711 cflags = get_config_var('CFLAGS')
1716 1712 if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
1717 1713 os.environ['CFLAGS'] = (
1718 1714 os.environ.get('CFLAGS', '') + ' -Qunused-arguments'
1719 1715 )
1720 1716
1721 1717 setup(
1722 1718 name='mercurial',
1723 1719 version=setupversion,
1724 1720 author='Olivia Mackall and many others',
1725 1721 author_email='mercurial@mercurial-scm.org',
1726 1722 url='https://mercurial-scm.org/',
1727 1723 download_url='https://mercurial-scm.org/release/',
1728 1724 description=(
1729 1725 'Fast scalable distributed SCM (revision control, version '
1730 1726 'control) system'
1731 1727 ),
1732 1728 long_description=(
1733 1729 'Mercurial is a distributed SCM tool written in Python.'
1734 1730 ' It is used by a number of large projects that require'
1735 1731 ' fast, reliable distributed revision control, such as '
1736 1732 'Mozilla.'
1737 1733 ),
1738 1734 license='GNU GPLv2 or any later version',
1739 1735 classifiers=[
1740 1736 'Development Status :: 6 - Mature',
1741 1737 'Environment :: Console',
1742 1738 'Intended Audience :: Developers',
1743 1739 'Intended Audience :: System Administrators',
1744 1740 'License :: OSI Approved :: GNU General Public License (GPL)',
1745 1741 'Natural Language :: Danish',
1746 1742 'Natural Language :: English',
1747 1743 'Natural Language :: German',
1748 1744 'Natural Language :: Italian',
1749 1745 'Natural Language :: Japanese',
1750 1746 'Natural Language :: Portuguese (Brazilian)',
1751 1747 'Operating System :: Microsoft :: Windows',
1752 1748 'Operating System :: OS Independent',
1753 1749 'Operating System :: POSIX',
1754 1750 'Programming Language :: C',
1755 1751 'Programming Language :: Python',
1756 1752 'Topic :: Software Development :: Version Control',
1757 1753 ],
1758 1754 scripts=scripts,
1759 1755 packages=packages,
1760 1756 ext_modules=extmodules,
1761 1757 data_files=datafiles,
1762 1758 package_data=packagedata,
1763 1759 cmdclass=cmdclass,
1764 1760 distclass=hgdist,
1765 1761 options={
1766 1762 'py2exe': {
1767 1763 'bundle_files': 3,
1768 1764 'dll_excludes': py2exedllexcludes,
1769 1765 'includes': py2exe_includes,
1770 1766 'excludes': py2exeexcludes,
1771 1767 'packages': py2exepackages,
1772 1768 },
1773 1769 'bdist_mpkg': {
1774 1770 'zipdist': False,
1775 1771 'license': 'COPYING',
1776 1772 'readme': 'contrib/packaging/macosx/Readme.html',
1777 1773 'welcome': 'contrib/packaging/macosx/Welcome.html',
1778 1774 },
1779 1775 },
1780 1776 **extra
1781 1777 )
General Comments 0
You need to be logged in to leave comments. Login now