##// END OF EJS Templates
codemod: use pycompat.iswindows...
Jun Wu -
r34646:75979c8d default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,1353 +1,1353
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4 from __future__ import absolute_import
5 5
6 6 import os
7 7 import re
8 8 import tempfile
9 9 import xml.dom.minidom
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial import (
13 13 encoding,
14 14 error,
15 15 pycompat,
16 16 util,
17 17 vfs as vfsmod,
18 18 )
19 19
20 20 from . import common
21 21
22 22 pickle = util.pickle
23 23 stringio = util.stringio
24 24 propertycache = util.propertycache
25 25 urlerr = util.urlerr
26 26 urlreq = util.urlreq
27 27
28 28 commandline = common.commandline
29 29 commit = common.commit
30 30 converter_sink = common.converter_sink
31 31 converter_source = common.converter_source
32 32 decodeargs = common.decodeargs
33 33 encodeargs = common.encodeargs
34 34 makedatetimestamp = common.makedatetimestamp
35 35 mapfile = common.mapfile
36 36 MissingTool = common.MissingTool
37 37 NoRepo = common.NoRepo
38 38
39 39 # Subversion stuff. Works best with very recent Python SVN bindings
40 40 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
41 41 # these bindings.
42 42
43 43 try:
44 44 import svn
45 45 import svn.client
46 46 import svn.core
47 47 import svn.ra
48 48 import svn.delta
49 49 from . import transport
50 50 import warnings
51 51 warnings.filterwarnings('ignore',
52 52 module='svn.core',
53 53 category=DeprecationWarning)
54 54 svn.core.SubversionException # trigger import to catch error
55 55
56 56 except ImportError:
57 57 svn = None
58 58
59 59 class SvnPathNotFound(Exception):
60 60 pass
61 61
62 62 def revsplit(rev):
63 63 """Parse a revision string and return (uuid, path, revnum).
64 64 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
65 65 ... b'/proj%20B/mytrunk/mytrunk@1')
66 66 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
67 67 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
68 68 ('', '', 1)
69 69 >>> revsplit(b'@7')
70 70 ('', '', 7)
71 71 >>> revsplit(b'7')
72 72 ('', '', 0)
73 73 >>> revsplit(b'bad')
74 74 ('', '', 0)
75 75 """
76 76 parts = rev.rsplit('@', 1)
77 77 revnum = 0
78 78 if len(parts) > 1:
79 79 revnum = int(parts[1])
80 80 parts = parts[0].split('/', 1)
81 81 uuid = ''
82 82 mod = ''
83 83 if len(parts) > 1 and parts[0].startswith('svn:'):
84 84 uuid = parts[0][4:]
85 85 mod = '/' + parts[1]
86 86 return uuid, mod, revnum
87 87
88 88 def quote(s):
89 89 # As of svn 1.7, many svn calls expect "canonical" paths. In
90 90 # theory, we should call svn.core.*canonicalize() on all paths
91 91 # before passing them to the API. Instead, we assume the base url
92 92 # is canonical and copy the behaviour of svn URL encoding function
93 93 # so we can extend it safely with new components. The "safe"
94 94 # characters were taken from the "svn_uri__char_validity" table in
95 95 # libsvn_subr/path.c.
96 96 return urlreq.quote(s, "!$&'()*+,-./:=@_~")
97 97
98 98 def geturl(path):
99 99 try:
100 100 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
101 101 except svn.core.SubversionException:
102 102 # svn.client.url_from_path() fails with local repositories
103 103 pass
104 104 if os.path.isdir(path):
105 105 path = os.path.normpath(os.path.abspath(path))
106 if pycompat.osname == 'nt':
106 if pycompat.iswindows:
107 107 path = '/' + util.normpath(path)
108 108 # Module URL is later compared with the repository URL returned
109 109 # by svn API, which is UTF-8.
110 110 path = encoding.tolocal(path)
111 111 path = 'file://%s' % quote(path)
112 112 return svn.core.svn_path_canonicalize(path)
113 113
114 114 def optrev(number):
115 115 optrev = svn.core.svn_opt_revision_t()
116 116 optrev.kind = svn.core.svn_opt_revision_number
117 117 optrev.value.number = number
118 118 return optrev
119 119
120 120 class changedpath(object):
121 121 def __init__(self, p):
122 122 self.copyfrom_path = p.copyfrom_path
123 123 self.copyfrom_rev = p.copyfrom_rev
124 124 self.action = p.action
125 125
126 126 def get_log_child(fp, url, paths, start, end, limit=0,
127 127 discover_changed_paths=True, strict_node_history=False):
128 128 protocol = -1
129 129 def receiver(orig_paths, revnum, author, date, message, pool):
130 130 paths = {}
131 131 if orig_paths is not None:
132 132 for k, v in orig_paths.iteritems():
133 133 paths[k] = changedpath(v)
134 134 pickle.dump((paths, revnum, author, date, message),
135 135 fp, protocol)
136 136
137 137 try:
138 138 # Use an ra of our own so that our parent can consume
139 139 # our results without confusing the server.
140 140 t = transport.SvnRaTransport(url=url)
141 141 svn.ra.get_log(t.ra, paths, start, end, limit,
142 142 discover_changed_paths,
143 143 strict_node_history,
144 144 receiver)
145 145 except IOError:
146 146 # Caller may interrupt the iteration
147 147 pickle.dump(None, fp, protocol)
148 148 except Exception as inst:
149 149 pickle.dump(str(inst), fp, protocol)
150 150 else:
151 151 pickle.dump(None, fp, protocol)
152 152 fp.close()
153 153 # With large history, cleanup process goes crazy and suddenly
154 154 # consumes *huge* amount of memory. The output file being closed,
155 155 # there is no need for clean termination.
156 156 os._exit(0)
157 157
158 158 def debugsvnlog(ui, **opts):
159 159 """Fetch SVN log in a subprocess and channel them back to parent to
160 160 avoid memory collection issues.
161 161 """
162 162 if svn is None:
163 163 raise error.Abort(_('debugsvnlog could not load Subversion python '
164 164 'bindings'))
165 165
166 166 args = decodeargs(ui.fin.read())
167 167 get_log_child(ui.fout, *args)
168 168
169 169 class logstream(object):
170 170 """Interruptible revision log iterator."""
171 171 def __init__(self, stdout):
172 172 self._stdout = stdout
173 173
174 174 def __iter__(self):
175 175 while True:
176 176 try:
177 177 entry = pickle.load(self._stdout)
178 178 except EOFError:
179 179 raise error.Abort(_('Mercurial failed to run itself, check'
180 180 ' hg executable is in PATH'))
181 181 try:
182 182 orig_paths, revnum, author, date, message = entry
183 183 except (TypeError, ValueError):
184 184 if entry is None:
185 185 break
186 186 raise error.Abort(_("log stream exception '%s'") % entry)
187 187 yield entry
188 188
189 189 def close(self):
190 190 if self._stdout:
191 191 self._stdout.close()
192 192 self._stdout = None
193 193
194 194 class directlogstream(list):
195 195 """Direct revision log iterator.
196 196 This can be used for debugging and development but it will probably leak
197 197 memory and is not suitable for real conversions."""
198 198 def __init__(self, url, paths, start, end, limit=0,
199 199 discover_changed_paths=True, strict_node_history=False):
200 200
201 201 def receiver(orig_paths, revnum, author, date, message, pool):
202 202 paths = {}
203 203 if orig_paths is not None:
204 204 for k, v in orig_paths.iteritems():
205 205 paths[k] = changedpath(v)
206 206 self.append((paths, revnum, author, date, message))
207 207
208 208 # Use an ra of our own so that our parent can consume
209 209 # our results without confusing the server.
210 210 t = transport.SvnRaTransport(url=url)
211 211 svn.ra.get_log(t.ra, paths, start, end, limit,
212 212 discover_changed_paths,
213 213 strict_node_history,
214 214 receiver)
215 215
216 216 def close(self):
217 217 pass
218 218
219 219 # Check to see if the given path is a local Subversion repo. Verify this by
220 220 # looking for several svn-specific files and directories in the given
221 221 # directory.
222 222 def filecheck(ui, path, proto):
223 223 for x in ('locks', 'hooks', 'format', 'db'):
224 224 if not os.path.exists(os.path.join(path, x)):
225 225 return False
226 226 return True
227 227
228 228 # Check to see if a given path is the root of an svn repo over http. We verify
229 229 # this by requesting a version-controlled URL we know can't exist and looking
230 230 # for the svn-specific "not found" XML.
231 231 def httpcheck(ui, path, proto):
232 232 try:
233 233 opener = urlreq.buildopener()
234 234 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
235 235 data = rsp.read()
236 236 except urlerr.httperror as inst:
237 237 if inst.code != 404:
238 238 # Except for 404 we cannot know for sure this is not an svn repo
239 239 ui.warn(_('svn: cannot probe remote repository, assume it could '
240 240 'be a subversion repository. Use --source-type if you '
241 241 'know better.\n'))
242 242 return True
243 243 data = inst.fp.read()
244 244 except Exception:
245 245 # Could be urlerr.urlerror if the URL is invalid or anything else.
246 246 return False
247 247 return '<m:human-readable errcode="160013">' in data
248 248
249 249 protomap = {'http': httpcheck,
250 250 'https': httpcheck,
251 251 'file': filecheck,
252 252 }
253 253 def issvnurl(ui, url):
254 254 try:
255 255 proto, path = url.split('://', 1)
256 256 if proto == 'file':
257 if (pycompat.osname == 'nt' and path[:1] == '/'
257 if (pycompat.iswindows and path[:1] == '/'
258 258 and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
259 259 path = path[:2] + ':/' + path[6:]
260 260 path = urlreq.url2pathname(path)
261 261 except ValueError:
262 262 proto = 'file'
263 263 path = os.path.abspath(url)
264 264 if proto == 'file':
265 265 path = util.pconvert(path)
266 266 check = protomap.get(proto, lambda *args: False)
267 267 while '/' in path:
268 268 if check(ui, path, proto):
269 269 return True
270 270 path = path.rsplit('/', 1)[0]
271 271 return False
272 272
273 273 # SVN conversion code stolen from bzr-svn and tailor
274 274 #
275 275 # Subversion looks like a versioned filesystem, branches structures
276 276 # are defined by conventions and not enforced by the tool. First,
277 277 # we define the potential branches (modules) as "trunk" and "branches"
278 278 # children directories. Revisions are then identified by their
279 279 # module and revision number (and a repository identifier).
280 280 #
281 281 # The revision graph is really a tree (or a forest). By default, a
282 282 # revision parent is the previous revision in the same module. If the
283 283 # module directory is copied/moved from another module then the
284 284 # revision is the module root and its parent the source revision in
285 285 # the parent module. A revision has at most one parent.
286 286 #
287 287 class svn_source(converter_source):
288 288 def __init__(self, ui, url, revs=None):
289 289 super(svn_source, self).__init__(ui, url, revs=revs)
290 290
291 291 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
292 292 (os.path.exists(url) and
293 293 os.path.exists(os.path.join(url, '.svn'))) or
294 294 issvnurl(ui, url)):
295 295 raise NoRepo(_("%s does not look like a Subversion repository")
296 296 % url)
297 297 if svn is None:
298 298 raise MissingTool(_('could not load Subversion python bindings'))
299 299
300 300 try:
301 301 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
302 302 if version < (1, 4):
303 303 raise MissingTool(_('Subversion python bindings %d.%d found, '
304 304 '1.4 or later required') % version)
305 305 except AttributeError:
306 306 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
307 307 'or later required'))
308 308
309 309 self.lastrevs = {}
310 310
311 311 latest = None
312 312 try:
313 313 # Support file://path@rev syntax. Useful e.g. to convert
314 314 # deleted branches.
315 315 at = url.rfind('@')
316 316 if at >= 0:
317 317 latest = int(url[at + 1:])
318 318 url = url[:at]
319 319 except ValueError:
320 320 pass
321 321 self.url = geturl(url)
322 322 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
323 323 try:
324 324 self.transport = transport.SvnRaTransport(url=self.url)
325 325 self.ra = self.transport.ra
326 326 self.ctx = self.transport.client
327 327 self.baseurl = svn.ra.get_repos_root(self.ra)
328 328 # Module is either empty or a repository path starting with
329 329 # a slash and not ending with a slash.
330 330 self.module = urlreq.unquote(self.url[len(self.baseurl):])
331 331 self.prevmodule = None
332 332 self.rootmodule = self.module
333 333 self.commits = {}
334 334 self.paths = {}
335 335 self.uuid = svn.ra.get_uuid(self.ra)
336 336 except svn.core.SubversionException:
337 337 ui.traceback()
338 338 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
339 339 svn.core.SVN_VER_MINOR,
340 340 svn.core.SVN_VER_MICRO)
341 341 raise NoRepo(_("%s does not look like a Subversion repository "
342 342 "to libsvn version %s")
343 343 % (self.url, svnversion))
344 344
345 345 if revs:
346 346 if len(revs) > 1:
347 347 raise error.Abort(_('subversion source does not support '
348 348 'specifying multiple revisions'))
349 349 try:
350 350 latest = int(revs[0])
351 351 except ValueError:
352 352 raise error.Abort(_('svn: revision %s is not an integer') %
353 353 revs[0])
354 354
355 355 self.trunkname = self.ui.config('convert', 'svn.trunk',
356 356 'trunk').strip('/')
357 357 self.startrev = self.ui.config('convert', 'svn.startrev')
358 358 try:
359 359 self.startrev = int(self.startrev)
360 360 if self.startrev < 0:
361 361 self.startrev = 0
362 362 except ValueError:
363 363 raise error.Abort(_('svn: start revision %s is not an integer')
364 364 % self.startrev)
365 365
366 366 try:
367 367 self.head = self.latest(self.module, latest)
368 368 except SvnPathNotFound:
369 369 self.head = None
370 370 if not self.head:
371 371 raise error.Abort(_('no revision found in module %s')
372 372 % self.module)
373 373 self.last_changed = self.revnum(self.head)
374 374
375 375 self._changescache = (None, None)
376 376
377 377 if os.path.exists(os.path.join(url, '.svn/entries')):
378 378 self.wc = url
379 379 else:
380 380 self.wc = None
381 381 self.convertfp = None
382 382
383 383 def setrevmap(self, revmap):
384 384 lastrevs = {}
385 385 for revid in revmap.iterkeys():
386 386 uuid, module, revnum = revsplit(revid)
387 387 lastrevnum = lastrevs.setdefault(module, revnum)
388 388 if revnum > lastrevnum:
389 389 lastrevs[module] = revnum
390 390 self.lastrevs = lastrevs
391 391
392 392 def exists(self, path, optrev):
393 393 try:
394 394 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
395 395 optrev, False, self.ctx)
396 396 return True
397 397 except svn.core.SubversionException:
398 398 return False
399 399
400 400 def getheads(self):
401 401
402 402 def isdir(path, revnum):
403 403 kind = self._checkpath(path, revnum)
404 404 return kind == svn.core.svn_node_dir
405 405
406 406 def getcfgpath(name, rev):
407 407 cfgpath = self.ui.config('convert', 'svn.' + name)
408 408 if cfgpath is not None and cfgpath.strip() == '':
409 409 return None
410 410 path = (cfgpath or name).strip('/')
411 411 if not self.exists(path, rev):
412 412 if self.module.endswith(path) and name == 'trunk':
413 413 # we are converting from inside this directory
414 414 return None
415 415 if cfgpath:
416 416 raise error.Abort(_('expected %s to be at %r, but not found'
417 417 ) % (name, path))
418 418 return None
419 419 self.ui.note(_('found %s at %r\n') % (name, path))
420 420 return path
421 421
422 422 rev = optrev(self.last_changed)
423 423 oldmodule = ''
424 424 trunk = getcfgpath('trunk', rev)
425 425 self.tags = getcfgpath('tags', rev)
426 426 branches = getcfgpath('branches', rev)
427 427
428 428 # If the project has a trunk or branches, we will extract heads
429 429 # from them. We keep the project root otherwise.
430 430 if trunk:
431 431 oldmodule = self.module or ''
432 432 self.module += '/' + trunk
433 433 self.head = self.latest(self.module, self.last_changed)
434 434 if not self.head:
435 435 raise error.Abort(_('no revision found in module %s')
436 436 % self.module)
437 437
438 438 # First head in the list is the module's head
439 439 self.heads = [self.head]
440 440 if self.tags is not None:
441 441 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
442 442
443 443 # Check if branches bring a few more heads to the list
444 444 if branches:
445 445 rpath = self.url.strip('/')
446 446 branchnames = svn.client.ls(rpath + '/' + quote(branches),
447 447 rev, False, self.ctx)
448 448 for branch in sorted(branchnames):
449 449 module = '%s/%s/%s' % (oldmodule, branches, branch)
450 450 if not isdir(module, self.last_changed):
451 451 continue
452 452 brevid = self.latest(module, self.last_changed)
453 453 if not brevid:
454 454 self.ui.note(_('ignoring empty branch %s\n') % branch)
455 455 continue
456 456 self.ui.note(_('found branch %s at %d\n') %
457 457 (branch, self.revnum(brevid)))
458 458 self.heads.append(brevid)
459 459
460 460 if self.startrev and self.heads:
461 461 if len(self.heads) > 1:
462 462 raise error.Abort(_('svn: start revision is not supported '
463 463 'with more than one branch'))
464 464 revnum = self.revnum(self.heads[0])
465 465 if revnum < self.startrev:
466 466 raise error.Abort(
467 467 _('svn: no revision found after start revision %d')
468 468 % self.startrev)
469 469
470 470 return self.heads
471 471
472 472 def _getchanges(self, rev, full):
473 473 (paths, parents) = self.paths[rev]
474 474 copies = {}
475 475 if parents:
476 476 files, self.removed, copies = self.expandpaths(rev, paths, parents)
477 477 if full or not parents:
478 478 # Perform a full checkout on roots
479 479 uuid, module, revnum = revsplit(rev)
480 480 entries = svn.client.ls(self.baseurl + quote(module),
481 481 optrev(revnum), True, self.ctx)
482 482 files = [n for n, e in entries.iteritems()
483 483 if e.kind == svn.core.svn_node_file]
484 484 self.removed = set()
485 485
486 486 files.sort()
487 487 files = zip(files, [rev] * len(files))
488 488 return (files, copies)
489 489
490 490 def getchanges(self, rev, full):
491 491 # reuse cache from getchangedfiles
492 492 if self._changescache[0] == rev and not full:
493 493 (files, copies) = self._changescache[1]
494 494 else:
495 495 (files, copies) = self._getchanges(rev, full)
496 496 # caller caches the result, so free it here to release memory
497 497 del self.paths[rev]
498 498 return (files, copies, set())
499 499
500 500 def getchangedfiles(self, rev, i):
501 501 # called from filemap - cache computed values for reuse in getchanges
502 502 (files, copies) = self._getchanges(rev, False)
503 503 self._changescache = (rev, (files, copies))
504 504 return [f[0] for f in files]
505 505
506 506 def getcommit(self, rev):
507 507 if rev not in self.commits:
508 508 uuid, module, revnum = revsplit(rev)
509 509 self.module = module
510 510 self.reparent(module)
511 511 # We assume that:
512 512 # - requests for revisions after "stop" come from the
513 513 # revision graph backward traversal. Cache all of them
514 514 # down to stop, they will be used eventually.
515 515 # - requests for revisions before "stop" come to get
516 516 # isolated branches parents. Just fetch what is needed.
517 517 stop = self.lastrevs.get(module, 0)
518 518 if revnum < stop:
519 519 stop = revnum + 1
520 520 self._fetch_revisions(revnum, stop)
521 521 if rev not in self.commits:
522 522 raise error.Abort(_('svn: revision %s not found') % revnum)
523 523 revcommit = self.commits[rev]
524 524 # caller caches the result, so free it here to release memory
525 525 del self.commits[rev]
526 526 return revcommit
527 527
528 528 def checkrevformat(self, revstr, mapname='splicemap'):
529 529 """ fails if revision format does not match the correct format"""
530 530 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
531 531 r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
532 532 r'{12,12}(.*)\@[0-9]+$',revstr):
533 533 raise error.Abort(_('%s entry %s is not a valid revision'
534 534 ' identifier') % (mapname, revstr))
535 535
536 536 def numcommits(self):
537 537 return int(self.head.rsplit('@', 1)[1]) - self.startrev
538 538
539 539 def gettags(self):
540 540 tags = {}
541 541 if self.tags is None:
542 542 return tags
543 543
544 544 # svn tags are just a convention, project branches left in a
545 545 # 'tags' directory. There is no other relationship than
546 546 # ancestry, which is expensive to discover and makes them hard
547 547 # to update incrementally. Worse, past revisions may be
548 548 # referenced by tags far away in the future, requiring a deep
549 549 # history traversal on every calculation. Current code
550 550 # performs a single backward traversal, tracking moves within
551 551 # the tags directory (tag renaming) and recording a new tag
552 552 # everytime a project is copied from outside the tags
553 553 # directory. It also lists deleted tags, this behaviour may
554 554 # change in the future.
555 555 pendings = []
556 556 tagspath = self.tags
557 557 start = svn.ra.get_latest_revnum(self.ra)
558 558 stream = self._getlog([self.tags], start, self.startrev)
559 559 try:
560 560 for entry in stream:
561 561 origpaths, revnum, author, date, message = entry
562 562 if not origpaths:
563 563 origpaths = []
564 564 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
565 565 in origpaths.iteritems() if e.copyfrom_path]
566 566 # Apply moves/copies from more specific to general
567 567 copies.sort(reverse=True)
568 568
569 569 srctagspath = tagspath
570 570 if copies and copies[-1][2] == tagspath:
571 571 # Track tags directory moves
572 572 srctagspath = copies.pop()[0]
573 573
574 574 for source, sourcerev, dest in copies:
575 575 if not dest.startswith(tagspath + '/'):
576 576 continue
577 577 for tag in pendings:
578 578 if tag[0].startswith(dest):
579 579 tagpath = source + tag[0][len(dest):]
580 580 tag[:2] = [tagpath, sourcerev]
581 581 break
582 582 else:
583 583 pendings.append([source, sourcerev, dest])
584 584
585 585 # Filter out tags with children coming from different
586 586 # parts of the repository like:
587 587 # /tags/tag.1 (from /trunk:10)
588 588 # /tags/tag.1/foo (from /branches/foo:12)
589 589 # Here/tags/tag.1 discarded as well as its children.
590 590 # It happens with tools like cvs2svn. Such tags cannot
591 591 # be represented in mercurial.
592 592 addeds = dict((p, e.copyfrom_path) for p, e
593 593 in origpaths.iteritems()
594 594 if e.action == 'A' and e.copyfrom_path)
595 595 badroots = set()
596 596 for destroot in addeds:
597 597 for source, sourcerev, dest in pendings:
598 598 if (not dest.startswith(destroot + '/')
599 599 or source.startswith(addeds[destroot] + '/')):
600 600 continue
601 601 badroots.add(destroot)
602 602 break
603 603
604 604 for badroot in badroots:
605 605 pendings = [p for p in pendings if p[2] != badroot
606 606 and not p[2].startswith(badroot + '/')]
607 607
608 608 # Tell tag renamings from tag creations
609 609 renamings = []
610 610 for source, sourcerev, dest in pendings:
611 611 tagname = dest.split('/')[-1]
612 612 if source.startswith(srctagspath):
613 613 renamings.append([source, sourcerev, tagname])
614 614 continue
615 615 if tagname in tags:
616 616 # Keep the latest tag value
617 617 continue
618 618 # From revision may be fake, get one with changes
619 619 try:
620 620 tagid = self.latest(source, sourcerev)
621 621 if tagid and tagname not in tags:
622 622 tags[tagname] = tagid
623 623 except SvnPathNotFound:
624 624 # It happens when we are following directories
625 625 # we assumed were copied with their parents
626 626 # but were really created in the tag
627 627 # directory.
628 628 pass
629 629 pendings = renamings
630 630 tagspath = srctagspath
631 631 finally:
632 632 stream.close()
633 633 return tags
634 634
635 635 def converted(self, rev, destrev):
636 636 if not self.wc:
637 637 return
638 638 if self.convertfp is None:
639 639 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
640 640 'a')
641 641 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
642 642 self.convertfp.flush()
643 643
644 644 def revid(self, revnum, module=None):
645 645 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
646 646
647 647 def revnum(self, rev):
648 648 return int(rev.split('@')[-1])
649 649
650 650 def latest(self, path, stop=None):
651 651 """Find the latest revid affecting path, up to stop revision
652 652 number. If stop is None, default to repository latest
653 653 revision. It may return a revision in a different module,
654 654 since a branch may be moved without a change being
655 655 reported. Return None if computed module does not belong to
656 656 rootmodule subtree.
657 657 """
658 658 def findchanges(path, start, stop=None):
659 659 stream = self._getlog([path], start, stop or 1)
660 660 try:
661 661 for entry in stream:
662 662 paths, revnum, author, date, message = entry
663 663 if stop is None and paths:
664 664 # We do not know the latest changed revision,
665 665 # keep the first one with changed paths.
666 666 break
667 667 if revnum <= stop:
668 668 break
669 669
670 670 for p in paths:
671 671 if (not path.startswith(p) or
672 672 not paths[p].copyfrom_path):
673 673 continue
674 674 newpath = paths[p].copyfrom_path + path[len(p):]
675 675 self.ui.debug("branch renamed from %s to %s at %d\n" %
676 676 (path, newpath, revnum))
677 677 path = newpath
678 678 break
679 679 if not paths:
680 680 revnum = None
681 681 return revnum, path
682 682 finally:
683 683 stream.close()
684 684
685 685 if not path.startswith(self.rootmodule):
686 686 # Requests on foreign branches may be forbidden at server level
687 687 self.ui.debug('ignoring foreign branch %r\n' % path)
688 688 return None
689 689
690 690 if stop is None:
691 691 stop = svn.ra.get_latest_revnum(self.ra)
692 692 try:
693 693 prevmodule = self.reparent('')
694 694 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
695 695 self.reparent(prevmodule)
696 696 except svn.core.SubversionException:
697 697 dirent = None
698 698 if not dirent:
699 699 raise SvnPathNotFound(_('%s not found up to revision %d')
700 700 % (path, stop))
701 701
702 702 # stat() gives us the previous revision on this line of
703 703 # development, but it might be in *another module*. Fetch the
704 704 # log and detect renames down to the latest revision.
705 705 revnum, realpath = findchanges(path, stop, dirent.created_rev)
706 706 if revnum is None:
707 707 # Tools like svnsync can create empty revision, when
708 708 # synchronizing only a subtree for instance. These empty
709 709 # revisions created_rev still have their original values
710 710 # despite all changes having disappeared and can be
711 711 # returned by ra.stat(), at least when stating the root
712 712 # module. In that case, do not trust created_rev and scan
713 713 # the whole history.
714 714 revnum, realpath = findchanges(path, stop)
715 715 if revnum is None:
716 716 self.ui.debug('ignoring empty branch %r\n' % realpath)
717 717 return None
718 718
719 719 if not realpath.startswith(self.rootmodule):
720 720 self.ui.debug('ignoring foreign branch %r\n' % realpath)
721 721 return None
722 722 return self.revid(revnum, realpath)
723 723
724 724 def reparent(self, module):
725 725 """Reparent the svn transport and return the previous parent."""
726 726 if self.prevmodule == module:
727 727 return module
728 728 svnurl = self.baseurl + quote(module)
729 729 prevmodule = self.prevmodule
730 730 if prevmodule is None:
731 731 prevmodule = ''
732 732 self.ui.debug("reparent to %s\n" % svnurl)
733 733 svn.ra.reparent(self.ra, svnurl)
734 734 self.prevmodule = module
735 735 return prevmodule
736 736
737 737 def expandpaths(self, rev, paths, parents):
738 738 changed, removed = set(), set()
739 739 copies = {}
740 740
741 741 new_module, revnum = revsplit(rev)[1:]
742 742 if new_module != self.module:
743 743 self.module = new_module
744 744 self.reparent(self.module)
745 745
746 746 for i, (path, ent) in enumerate(paths):
747 747 self.ui.progress(_('scanning paths'), i, item=path,
748 748 total=len(paths), unit=_('paths'))
749 749 entrypath = self.getrelpath(path)
750 750
751 751 kind = self._checkpath(entrypath, revnum)
752 752 if kind == svn.core.svn_node_file:
753 753 changed.add(self.recode(entrypath))
754 754 if not ent.copyfrom_path or not parents:
755 755 continue
756 756 # Copy sources not in parent revisions cannot be
757 757 # represented, ignore their origin for now
758 758 pmodule, prevnum = revsplit(parents[0])[1:]
759 759 if ent.copyfrom_rev < prevnum:
760 760 continue
761 761 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
762 762 if not copyfrom_path:
763 763 continue
764 764 self.ui.debug("copied to %s from %s@%s\n" %
765 765 (entrypath, copyfrom_path, ent.copyfrom_rev))
766 766 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
767 767 elif kind == 0: # gone, but had better be a deleted *file*
768 768 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
769 769 pmodule, prevnum = revsplit(parents[0])[1:]
770 770 parentpath = pmodule + "/" + entrypath
771 771 fromkind = self._checkpath(entrypath, prevnum, pmodule)
772 772
773 773 if fromkind == svn.core.svn_node_file:
774 774 removed.add(self.recode(entrypath))
775 775 elif fromkind == svn.core.svn_node_dir:
776 776 oroot = parentpath.strip('/')
777 777 nroot = path.strip('/')
778 778 children = self._iterfiles(oroot, prevnum)
779 779 for childpath in children:
780 780 childpath = childpath.replace(oroot, nroot)
781 781 childpath = self.getrelpath("/" + childpath, pmodule)
782 782 if childpath:
783 783 removed.add(self.recode(childpath))
784 784 else:
785 785 self.ui.debug('unknown path in revision %d: %s\n' % \
786 786 (revnum, path))
787 787 elif kind == svn.core.svn_node_dir:
788 788 if ent.action == 'M':
789 789 # If the directory just had a prop change,
790 790 # then we shouldn't need to look for its children.
791 791 continue
792 792 if ent.action == 'R' and parents:
793 793 # If a directory is replacing a file, mark the previous
794 794 # file as deleted
795 795 pmodule, prevnum = revsplit(parents[0])[1:]
796 796 pkind = self._checkpath(entrypath, prevnum, pmodule)
797 797 if pkind == svn.core.svn_node_file:
798 798 removed.add(self.recode(entrypath))
799 799 elif pkind == svn.core.svn_node_dir:
800 800 # We do not know what files were kept or removed,
801 801 # mark them all as changed.
802 802 for childpath in self._iterfiles(pmodule, prevnum):
803 803 childpath = self.getrelpath("/" + childpath)
804 804 if childpath:
805 805 changed.add(self.recode(childpath))
806 806
807 807 for childpath in self._iterfiles(path, revnum):
808 808 childpath = self.getrelpath("/" + childpath)
809 809 if childpath:
810 810 changed.add(self.recode(childpath))
811 811
812 812 # Handle directory copies
813 813 if not ent.copyfrom_path or not parents:
814 814 continue
815 815 # Copy sources not in parent revisions cannot be
816 816 # represented, ignore their origin for now
817 817 pmodule, prevnum = revsplit(parents[0])[1:]
818 818 if ent.copyfrom_rev < prevnum:
819 819 continue
820 820 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
821 821 if not copyfrompath:
822 822 continue
823 823 self.ui.debug("mark %s came from %s:%d\n"
824 824 % (path, copyfrompath, ent.copyfrom_rev))
825 825 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
826 826 for childpath in children:
827 827 childpath = self.getrelpath("/" + childpath, pmodule)
828 828 if not childpath:
829 829 continue
830 830 copytopath = path + childpath[len(copyfrompath):]
831 831 copytopath = self.getrelpath(copytopath)
832 832 copies[self.recode(copytopath)] = self.recode(childpath)
833 833
834 834 self.ui.progress(_('scanning paths'), None)
835 835 changed.update(removed)
836 836 return (list(changed), removed, copies)
837 837
838 838 def _fetch_revisions(self, from_revnum, to_revnum):
839 839 if from_revnum < to_revnum:
840 840 from_revnum, to_revnum = to_revnum, from_revnum
841 841
842 842 self.child_cset = None
843 843
844 844 def parselogentry(orig_paths, revnum, author, date, message):
845 845 """Return the parsed commit object or None, and True if
846 846 the revision is a branch root.
847 847 """
848 848 self.ui.debug("parsing revision %d (%d changes)\n" %
849 849 (revnum, len(orig_paths)))
850 850
851 851 branched = False
852 852 rev = self.revid(revnum)
853 853 # branch log might return entries for a parent we already have
854 854
855 855 if rev in self.commits or revnum < to_revnum:
856 856 return None, branched
857 857
858 858 parents = []
859 859 # check whether this revision is the start of a branch or part
860 860 # of a branch renaming
861 861 orig_paths = sorted(orig_paths.iteritems())
862 862 root_paths = [(p, e) for p, e in orig_paths
863 863 if self.module.startswith(p)]
864 864 if root_paths:
865 865 path, ent = root_paths[-1]
866 866 if ent.copyfrom_path:
867 867 branched = True
868 868 newpath = ent.copyfrom_path + self.module[len(path):]
869 869 # ent.copyfrom_rev may not be the actual last revision
870 870 previd = self.latest(newpath, ent.copyfrom_rev)
871 871 if previd is not None:
872 872 prevmodule, prevnum = revsplit(previd)[1:]
873 873 if prevnum >= self.startrev:
874 874 parents = [previd]
875 875 self.ui.note(
876 876 _('found parent of branch %s at %d: %s\n') %
877 877 (self.module, prevnum, prevmodule))
878 878 else:
879 879 self.ui.debug("no copyfrom path, don't know what to do.\n")
880 880
881 881 paths = []
882 882 # filter out unrelated paths
883 883 for path, ent in orig_paths:
884 884 if self.getrelpath(path) is None:
885 885 continue
886 886 paths.append((path, ent))
887 887
888 888 # Example SVN datetime. Includes microseconds.
889 889 # ISO-8601 conformant
890 890 # '2007-01-04T17:35:00.902377Z'
891 891 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
892 892 if self.ui.configbool('convert', 'localtimezone'):
893 893 date = makedatetimestamp(date[0])
894 894
895 895 if message:
896 896 log = self.recode(message)
897 897 else:
898 898 log = ''
899 899
900 900 if author:
901 901 author = self.recode(author)
902 902 else:
903 903 author = ''
904 904
905 905 try:
906 906 branch = self.module.split("/")[-1]
907 907 if branch == self.trunkname:
908 908 branch = None
909 909 except IndexError:
910 910 branch = None
911 911
912 912 cset = commit(author=author,
913 913 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
914 914 desc=log,
915 915 parents=parents,
916 916 branch=branch,
917 917 rev=rev)
918 918
919 919 self.commits[rev] = cset
920 920 # The parents list is *shared* among self.paths and the
921 921 # commit object. Both will be updated below.
922 922 self.paths[rev] = (paths, cset.parents)
923 923 if self.child_cset and not self.child_cset.parents:
924 924 self.child_cset.parents[:] = [rev]
925 925 self.child_cset = cset
926 926 return cset, branched
927 927
928 928 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
929 929 (self.module, from_revnum, to_revnum))
930 930
931 931 try:
932 932 firstcset = None
933 933 lastonbranch = False
934 934 stream = self._getlog([self.module], from_revnum, to_revnum)
935 935 try:
936 936 for entry in stream:
937 937 paths, revnum, author, date, message = entry
938 938 if revnum < self.startrev:
939 939 lastonbranch = True
940 940 break
941 941 if not paths:
942 942 self.ui.debug('revision %d has no entries\n' % revnum)
943 943 # If we ever leave the loop on an empty
944 944 # revision, do not try to get a parent branch
945 945 lastonbranch = lastonbranch or revnum == 0
946 946 continue
947 947 cset, lastonbranch = parselogentry(paths, revnum, author,
948 948 date, message)
949 949 if cset:
950 950 firstcset = cset
951 951 if lastonbranch:
952 952 break
953 953 finally:
954 954 stream.close()
955 955
956 956 if not lastonbranch and firstcset and not firstcset.parents:
957 957 # The first revision of the sequence (the last fetched one)
958 958 # has invalid parents if not a branch root. Find the parent
959 959 # revision now, if any.
960 960 try:
961 961 firstrevnum = self.revnum(firstcset.rev)
962 962 if firstrevnum > 1:
963 963 latest = self.latest(self.module, firstrevnum - 1)
964 964 if latest:
965 965 firstcset.parents.append(latest)
966 966 except SvnPathNotFound:
967 967 pass
968 968 except svn.core.SubversionException as xxx_todo_changeme:
969 969 (inst, num) = xxx_todo_changeme.args
970 970 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
971 971 raise error.Abort(_('svn: branch has no revision %s')
972 972 % to_revnum)
973 973 raise
974 974
975 975 def getfile(self, file, rev):
976 976 # TODO: ra.get_file transmits the whole file instead of diffs.
977 977 if file in self.removed:
978 978 return None, None
979 979 mode = ''
980 980 try:
981 981 new_module, revnum = revsplit(rev)[1:]
982 982 if self.module != new_module:
983 983 self.module = new_module
984 984 self.reparent(self.module)
985 985 io = stringio()
986 986 info = svn.ra.get_file(self.ra, file, revnum, io)
987 987 data = io.getvalue()
988 988 # ra.get_file() seems to keep a reference on the input buffer
989 989 # preventing collection. Release it explicitly.
990 990 io.close()
991 991 if isinstance(info, list):
992 992 info = info[-1]
993 993 mode = ("svn:executable" in info) and 'x' or ''
994 994 mode = ("svn:special" in info) and 'l' or mode
995 995 except svn.core.SubversionException as e:
996 996 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
997 997 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
998 998 if e.apr_err in notfound: # File not found
999 999 return None, None
1000 1000 raise
1001 1001 if mode == 'l':
1002 1002 link_prefix = "link "
1003 1003 if data.startswith(link_prefix):
1004 1004 data = data[len(link_prefix):]
1005 1005 return data, mode
1006 1006
1007 1007 def _iterfiles(self, path, revnum):
1008 1008 """Enumerate all files in path at revnum, recursively."""
1009 1009 path = path.strip('/')
1010 1010 pool = svn.core.Pool()
1011 1011 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
1012 1012 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1013 1013 if path:
1014 1014 path += '/'
1015 1015 return ((path + p) for p, e in entries.iteritems()
1016 1016 if e.kind == svn.core.svn_node_file)
1017 1017
1018 1018 def getrelpath(self, path, module=None):
1019 1019 if module is None:
1020 1020 module = self.module
1021 1021 # Given the repository url of this wc, say
1022 1022 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1023 1023 # extract the "entry" portion (a relative path) from what
1024 1024 # svn log --xml says, i.e.
1025 1025 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1026 1026 # that is to say "tests/PloneTestCase.py"
1027 1027 if path.startswith(module):
1028 1028 relative = path.rstrip('/')[len(module):]
1029 1029 if relative.startswith('/'):
1030 1030 return relative[1:]
1031 1031 elif relative == '':
1032 1032 return relative
1033 1033
1034 1034 # The path is outside our tracked tree...
1035 1035 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1036 1036 return None
1037 1037
1038 1038 def _checkpath(self, path, revnum, module=None):
1039 1039 if module is not None:
1040 1040 prevmodule = self.reparent('')
1041 1041 path = module + '/' + path
1042 1042 try:
1043 1043 # ra.check_path does not like leading slashes very much, it leads
1044 1044 # to PROPFIND subversion errors
1045 1045 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1046 1046 finally:
1047 1047 if module is not None:
1048 1048 self.reparent(prevmodule)
1049 1049
1050 1050 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1051 1051 strict_node_history=False):
1052 1052 # Normalize path names, svn >= 1.5 only wants paths relative to
1053 1053 # supplied URL
1054 1054 relpaths = []
1055 1055 for p in paths:
1056 1056 if not p.startswith('/'):
1057 1057 p = self.module + '/' + p
1058 1058 relpaths.append(p.strip('/'))
1059 1059 args = [self.baseurl, relpaths, start, end, limit,
1060 1060 discover_changed_paths, strict_node_history]
1061 1061 # developer config: convert.svn.debugsvnlog
1062 1062 if not self.ui.configbool('convert', 'svn.debugsvnlog'):
1063 1063 return directlogstream(*args)
1064 1064 arg = encodeargs(args)
1065 1065 hgexe = util.hgexecutable()
1066 1066 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1067 1067 stdin, stdout = util.popen2(util.quotecommand(cmd))
1068 1068 stdin.write(arg)
1069 1069 try:
1070 1070 stdin.close()
1071 1071 except IOError:
1072 1072 raise error.Abort(_('Mercurial failed to run itself, check'
1073 1073 ' hg executable is in PATH'))
1074 1074 return logstream(stdout)
1075 1075
1076 1076 pre_revprop_change = '''#!/bin/sh
1077 1077
1078 1078 REPOS="$1"
1079 1079 REV="$2"
1080 1080 USER="$3"
1081 1081 PROPNAME="$4"
1082 1082 ACTION="$5"
1083 1083
1084 1084 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1085 1085 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1086 1086 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1087 1087
1088 1088 echo "Changing prohibited revision property" >&2
1089 1089 exit 1
1090 1090 '''
1091 1091
1092 1092 class svn_sink(converter_sink, commandline):
1093 1093 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1094 1094 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1095 1095
1096 1096 def prerun(self):
1097 1097 if self.wc:
1098 1098 os.chdir(self.wc)
1099 1099
1100 1100 def postrun(self):
1101 1101 if self.wc:
1102 1102 os.chdir(self.cwd)
1103 1103
1104 1104 def join(self, name):
1105 1105 return os.path.join(self.wc, '.svn', name)
1106 1106
1107 1107 def revmapfile(self):
1108 1108 return self.join('hg-shamap')
1109 1109
1110 1110 def authorfile(self):
1111 1111 return self.join('hg-authormap')
1112 1112
1113 1113 def __init__(self, ui, path):
1114 1114
1115 1115 converter_sink.__init__(self, ui, path)
1116 1116 commandline.__init__(self, ui, 'svn')
1117 1117 self.delete = []
1118 1118 self.setexec = []
1119 1119 self.delexec = []
1120 1120 self.copies = []
1121 1121 self.wc = None
1122 1122 self.cwd = pycompat.getcwd()
1123 1123
1124 1124 created = False
1125 1125 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1126 1126 self.wc = os.path.realpath(path)
1127 1127 self.run0('update')
1128 1128 else:
1129 1129 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1130 1130 path = os.path.realpath(path)
1131 1131 if os.path.isdir(os.path.dirname(path)):
1132 1132 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1133 1133 ui.status(_('initializing svn repository %r\n') %
1134 1134 os.path.basename(path))
1135 1135 commandline(ui, 'svnadmin').run0('create', path)
1136 1136 created = path
1137 1137 path = util.normpath(path)
1138 1138 if not path.startswith('/'):
1139 1139 path = '/' + path
1140 1140 path = 'file://' + path
1141 1141
1142 1142 wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) +
1143 1143 '-wc')
1144 1144 ui.status(_('initializing svn working copy %r\n')
1145 1145 % os.path.basename(wcpath))
1146 1146 self.run0('checkout', path, wcpath)
1147 1147
1148 1148 self.wc = wcpath
1149 1149 self.opener = vfsmod.vfs(self.wc)
1150 1150 self.wopener = vfsmod.vfs(self.wc)
1151 1151 self.childmap = mapfile(ui, self.join('hg-childmap'))
1152 1152 if util.checkexec(self.wc):
1153 1153 self.is_exec = util.isexec
1154 1154 else:
1155 1155 self.is_exec = None
1156 1156
1157 1157 if created:
1158 1158 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1159 1159 fp = open(hook, 'w')
1160 1160 fp.write(pre_revprop_change)
1161 1161 fp.close()
1162 1162 util.setflags(hook, False, True)
1163 1163
1164 1164 output = self.run0('info')
1165 1165 self.uuid = self.uuid_re.search(output).group(1).strip()
1166 1166
1167 1167 def wjoin(self, *names):
1168 1168 return os.path.join(self.wc, *names)
1169 1169
1170 1170 @propertycache
1171 1171 def manifest(self):
1172 1172 # As of svn 1.7, the "add" command fails when receiving
1173 1173 # already tracked entries, so we have to track and filter them
1174 1174 # ourselves.
1175 1175 m = set()
1176 1176 output = self.run0('ls', recursive=True, xml=True)
1177 1177 doc = xml.dom.minidom.parseString(output)
1178 1178 for e in doc.getElementsByTagName('entry'):
1179 1179 for n in e.childNodes:
1180 1180 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1181 1181 continue
1182 1182 name = ''.join(c.data for c in n.childNodes
1183 1183 if c.nodeType == c.TEXT_NODE)
1184 1184 # Entries are compared with names coming from
1185 1185 # mercurial, so bytes with undefined encoding. Our
1186 1186 # best bet is to assume they are in local
1187 1187 # encoding. They will be passed to command line calls
1188 1188 # later anyway, so they better be.
1189 1189 m.add(encoding.unitolocal(name))
1190 1190 break
1191 1191 return m
1192 1192
1193 1193 def putfile(self, filename, flags, data):
1194 1194 if 'l' in flags:
1195 1195 self.wopener.symlink(data, filename)
1196 1196 else:
1197 1197 try:
1198 1198 if os.path.islink(self.wjoin(filename)):
1199 1199 os.unlink(filename)
1200 1200 except OSError:
1201 1201 pass
1202 1202 self.wopener.write(filename, data)
1203 1203
1204 1204 if self.is_exec:
1205 1205 if self.is_exec(self.wjoin(filename)):
1206 1206 if 'x' not in flags:
1207 1207 self.delexec.append(filename)
1208 1208 else:
1209 1209 if 'x' in flags:
1210 1210 self.setexec.append(filename)
1211 1211 util.setflags(self.wjoin(filename), False, 'x' in flags)
1212 1212
1213 1213 def _copyfile(self, source, dest):
1214 1214 # SVN's copy command pukes if the destination file exists, but
1215 1215 # our copyfile method expects to record a copy that has
1216 1216 # already occurred. Cross the semantic gap.
1217 1217 wdest = self.wjoin(dest)
1218 1218 exists = os.path.lexists(wdest)
1219 1219 if exists:
1220 1220 fd, tempname = tempfile.mkstemp(
1221 1221 prefix='hg-copy-', dir=os.path.dirname(wdest))
1222 1222 os.close(fd)
1223 1223 os.unlink(tempname)
1224 1224 os.rename(wdest, tempname)
1225 1225 try:
1226 1226 self.run0('copy', source, dest)
1227 1227 finally:
1228 1228 self.manifest.add(dest)
1229 1229 if exists:
1230 1230 try:
1231 1231 os.unlink(wdest)
1232 1232 except OSError:
1233 1233 pass
1234 1234 os.rename(tempname, wdest)
1235 1235
1236 1236 def dirs_of(self, files):
1237 1237 dirs = set()
1238 1238 for f in files:
1239 1239 if os.path.isdir(self.wjoin(f)):
1240 1240 dirs.add(f)
1241 1241 i = len(f)
1242 1242 for i in iter(lambda: f.rfind('/', 0, i), -1):
1243 1243 dirs.add(f[:i])
1244 1244 return dirs
1245 1245
1246 1246 def add_dirs(self, files):
1247 1247 add_dirs = [d for d in sorted(self.dirs_of(files))
1248 1248 if d not in self.manifest]
1249 1249 if add_dirs:
1250 1250 self.manifest.update(add_dirs)
1251 1251 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1252 1252 return add_dirs
1253 1253
1254 1254 def add_files(self, files):
1255 1255 files = [f for f in files if f not in self.manifest]
1256 1256 if files:
1257 1257 self.manifest.update(files)
1258 1258 self.xargs(files, 'add', quiet=True)
1259 1259 return files
1260 1260
1261 1261 def addchild(self, parent, child):
1262 1262 self.childmap[parent] = child
1263 1263
1264 1264 def revid(self, rev):
1265 1265 return u"svn:%s@%s" % (self.uuid, rev)
1266 1266
1267 1267 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1268 1268 cleanp2):
1269 1269 for parent in parents:
1270 1270 try:
1271 1271 return self.revid(self.childmap[parent])
1272 1272 except KeyError:
1273 1273 pass
1274 1274
1275 1275 # Apply changes to working copy
1276 1276 for f, v in files:
1277 1277 data, mode = source.getfile(f, v)
1278 1278 if data is None:
1279 1279 self.delete.append(f)
1280 1280 else:
1281 1281 self.putfile(f, mode, data)
1282 1282 if f in copies:
1283 1283 self.copies.append([copies[f], f])
1284 1284 if full:
1285 1285 self.delete.extend(sorted(self.manifest.difference(files)))
1286 1286 files = [f[0] for f in files]
1287 1287
1288 1288 entries = set(self.delete)
1289 1289 files = frozenset(files)
1290 1290 entries.update(self.add_dirs(files.difference(entries)))
1291 1291 if self.copies:
1292 1292 for s, d in self.copies:
1293 1293 self._copyfile(s, d)
1294 1294 self.copies = []
1295 1295 if self.delete:
1296 1296 self.xargs(self.delete, 'delete')
1297 1297 for f in self.delete:
1298 1298 self.manifest.remove(f)
1299 1299 self.delete = []
1300 1300 entries.update(self.add_files(files.difference(entries)))
1301 1301 if self.delexec:
1302 1302 self.xargs(self.delexec, 'propdel', 'svn:executable')
1303 1303 self.delexec = []
1304 1304 if self.setexec:
1305 1305 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1306 1306 self.setexec = []
1307 1307
1308 1308 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1309 1309 fp = os.fdopen(fd, pycompat.sysstr('w'))
1310 1310 fp.write(commit.desc)
1311 1311 fp.close()
1312 1312 try:
1313 1313 output = self.run0('commit',
1314 1314 username=util.shortuser(commit.author),
1315 1315 file=messagefile,
1316 1316 encoding='utf-8')
1317 1317 try:
1318 1318 rev = self.commit_re.search(output).group(1)
1319 1319 except AttributeError:
1320 1320 if parents and not files:
1321 1321 return parents[0]
1322 1322 self.ui.warn(_('unexpected svn output:\n'))
1323 1323 self.ui.warn(output)
1324 1324 raise error.Abort(_('unable to cope with svn output'))
1325 1325 if commit.rev:
1326 1326 self.run('propset', 'hg:convert-rev', commit.rev,
1327 1327 revprop=True, revision=rev)
1328 1328 if commit.branch and commit.branch != 'default':
1329 1329 self.run('propset', 'hg:convert-branch', commit.branch,
1330 1330 revprop=True, revision=rev)
1331 1331 for parent in parents:
1332 1332 self.addchild(parent, rev)
1333 1333 return self.revid(rev)
1334 1334 finally:
1335 1335 os.unlink(messagefile)
1336 1336
1337 1337 def puttags(self, tags):
1338 1338 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1339 1339 return None, None
1340 1340
1341 1341 def hascommitfrommap(self, rev):
1342 1342 # We trust that revisions referenced in a map still is present
1343 1343 # TODO: implement something better if necessary and feasible
1344 1344 return True
1345 1345
1346 1346 def hascommitforsplicemap(self, rev):
1347 1347 # This is not correct as one can convert to an existing subversion
1348 1348 # repository and childmap would not list all revisions. Too bad.
1349 1349 if rev in self.childmap:
1350 1350 return True
1351 1351 raise error.Abort(_('splice map revision %s not found in subversion '
1352 1352 'child map (revision lookups are not implemented)')
1353 1353 % rev)
@@ -1,673 +1,673
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''largefiles utility code: must not import other modules in this package.'''
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 13 import hashlib
14 14 import os
15 15 import stat
16 16
17 17 from mercurial.i18n import _
18 18
19 19 from mercurial import (
20 20 dirstate,
21 21 encoding,
22 22 error,
23 23 httpconnection,
24 24 match as matchmod,
25 25 node,
26 26 pycompat,
27 27 scmutil,
28 28 sparse,
29 29 util,
30 30 vfs as vfsmod,
31 31 )
32 32
33 33 shortname = '.hglf'
34 34 shortnameslash = shortname + '/'
35 35 longname = 'largefiles'
36 36
37 37 # -- Private worker functions ------------------------------------------
38 38
39 39 def getminsize(ui, assumelfiles, opt, default=10):
40 40 lfsize = opt
41 41 if not lfsize and assumelfiles:
42 42 lfsize = ui.config(longname, 'minsize', default=default)
43 43 if lfsize:
44 44 try:
45 45 lfsize = float(lfsize)
46 46 except ValueError:
47 47 raise error.Abort(_('largefiles: size must be number (not %s)\n')
48 48 % lfsize)
49 49 if lfsize is None:
50 50 raise error.Abort(_('minimum size for largefiles must be specified'))
51 51 return lfsize
52 52
53 53 def link(src, dest):
54 54 """Try to create hardlink - if that fails, efficiently make a copy."""
55 55 util.makedirs(os.path.dirname(dest))
56 56 try:
57 57 util.oslink(src, dest)
58 58 except OSError:
59 59 # if hardlinks fail, fallback on atomic copy
60 60 with open(src, 'rb') as srcf, util.atomictempfile(dest) as dstf:
61 61 for chunk in util.filechunkiter(srcf):
62 62 dstf.write(chunk)
63 63 os.chmod(dest, os.stat(src).st_mode)
64 64
65 65 def usercachepath(ui, hash):
66 66 '''Return the correct location in the "global" largefiles cache for a file
67 67 with the given hash.
68 68 This cache is used for sharing of largefiles across repositories - both
69 69 to preserve download bandwidth and storage space.'''
70 70 return os.path.join(_usercachedir(ui), hash)
71 71
72 72 def _usercachedir(ui):
73 73 '''Return the location of the "global" largefiles cache.'''
74 74 path = ui.configpath(longname, 'usercache', None)
75 75 if path:
76 76 return path
77 if pycompat.osname == 'nt':
77 if pycompat.iswindows:
78 78 appdata = encoding.environ.get('LOCALAPPDATA',\
79 79 encoding.environ.get('APPDATA'))
80 80 if appdata:
81 81 return os.path.join(appdata, longname)
82 82 elif pycompat.sysplatform == 'darwin':
83 83 home = encoding.environ.get('HOME')
84 84 if home:
85 85 return os.path.join(home, 'Library', 'Caches', longname)
86 86 elif pycompat.osname == 'posix':
87 87 path = encoding.environ.get('XDG_CACHE_HOME')
88 88 if path:
89 89 return os.path.join(path, longname)
90 90 home = encoding.environ.get('HOME')
91 91 if home:
92 92 return os.path.join(home, '.cache', longname)
93 93 else:
94 94 raise error.Abort(_('unknown operating system: %s\n')
95 95 % pycompat.osname)
96 96 raise error.Abort(_('unknown %s usercache location') % longname)
97 97
98 98 def inusercache(ui, hash):
99 99 path = usercachepath(ui, hash)
100 100 return os.path.exists(path)
101 101
102 102 def findfile(repo, hash):
103 103 '''Return store path of the largefile with the specified hash.
104 104 As a side effect, the file might be linked from user cache.
105 105 Return None if the file can't be found locally.'''
106 106 path, exists = findstorepath(repo, hash)
107 107 if exists:
108 108 repo.ui.note(_('found %s in store\n') % hash)
109 109 return path
110 110 elif inusercache(repo.ui, hash):
111 111 repo.ui.note(_('found %s in system cache\n') % hash)
112 112 path = storepath(repo, hash)
113 113 link(usercachepath(repo.ui, hash), path)
114 114 return path
115 115 return None
116 116
117 117 class largefilesdirstate(dirstate.dirstate):
118 118 def __getitem__(self, key):
119 119 return super(largefilesdirstate, self).__getitem__(unixpath(key))
120 120 def normal(self, f):
121 121 return super(largefilesdirstate, self).normal(unixpath(f))
122 122 def remove(self, f):
123 123 return super(largefilesdirstate, self).remove(unixpath(f))
124 124 def add(self, f):
125 125 return super(largefilesdirstate, self).add(unixpath(f))
126 126 def drop(self, f):
127 127 return super(largefilesdirstate, self).drop(unixpath(f))
128 128 def forget(self, f):
129 129 return super(largefilesdirstate, self).forget(unixpath(f))
130 130 def normallookup(self, f):
131 131 return super(largefilesdirstate, self).normallookup(unixpath(f))
132 132 def _ignore(self, f):
133 133 return False
134 134 def write(self, tr=False):
135 135 # (1) disable PENDING mode always
136 136 # (lfdirstate isn't yet managed as a part of the transaction)
137 137 # (2) avoid develwarn 'use dirstate.write with ....'
138 138 super(largefilesdirstate, self).write(None)
139 139
140 140 def openlfdirstate(ui, repo, create=True):
141 141 '''
142 142 Return a dirstate object that tracks largefiles: i.e. its root is
143 143 the repo root, but it is saved in .hg/largefiles/dirstate.
144 144 '''
145 145 vfs = repo.vfs
146 146 lfstoredir = longname
147 147 opener = vfsmod.vfs(vfs.join(lfstoredir))
148 148 lfdirstate = largefilesdirstate(opener, ui, repo.root,
149 149 repo.dirstate._validate,
150 150 lambda: sparse.matcher(repo))
151 151
152 152 # If the largefiles dirstate does not exist, populate and create
153 153 # it. This ensures that we create it on the first meaningful
154 154 # largefiles operation in a new clone.
155 155 if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
156 156 matcher = getstandinmatcher(repo)
157 157 standins = repo.dirstate.walk(matcher, subrepos=[], unknown=False,
158 158 ignored=False)
159 159
160 160 if len(standins) > 0:
161 161 vfs.makedirs(lfstoredir)
162 162
163 163 for standin in standins:
164 164 lfile = splitstandin(standin)
165 165 lfdirstate.normallookup(lfile)
166 166 return lfdirstate
167 167
168 168 def lfdirstatestatus(lfdirstate, repo):
169 169 pctx = repo['.']
170 170 match = matchmod.always(repo.root, repo.getcwd())
171 171 unsure, s = lfdirstate.status(match, subrepos=[], ignored=False,
172 172 clean=False, unknown=False)
173 173 modified, clean = s.modified, s.clean
174 174 for lfile in unsure:
175 175 try:
176 176 fctx = pctx[standin(lfile)]
177 177 except LookupError:
178 178 fctx = None
179 179 if not fctx or readasstandin(fctx) != hashfile(repo.wjoin(lfile)):
180 180 modified.append(lfile)
181 181 else:
182 182 clean.append(lfile)
183 183 lfdirstate.normal(lfile)
184 184 return s
185 185
186 186 def listlfiles(repo, rev=None, matcher=None):
187 187 '''return a list of largefiles in the working copy or the
188 188 specified changeset'''
189 189
190 190 if matcher is None:
191 191 matcher = getstandinmatcher(repo)
192 192
193 193 # ignore unknown files in working directory
194 194 return [splitstandin(f)
195 195 for f in repo[rev].walk(matcher)
196 196 if rev is not None or repo.dirstate[f] != '?']
197 197
198 198 def instore(repo, hash, forcelocal=False):
199 199 '''Return true if a largefile with the given hash exists in the store'''
200 200 return os.path.exists(storepath(repo, hash, forcelocal))
201 201
202 202 def storepath(repo, hash, forcelocal=False):
203 203 '''Return the correct location in the repository largefiles store for a
204 204 file with the given hash.'''
205 205 if not forcelocal and repo.shared():
206 206 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
207 207 return repo.vfs.join(longname, hash)
208 208
209 209 def findstorepath(repo, hash):
210 210 '''Search through the local store path(s) to find the file for the given
211 211 hash. If the file is not found, its path in the primary store is returned.
212 212 The return value is a tuple of (path, exists(path)).
213 213 '''
214 214 # For shared repos, the primary store is in the share source. But for
215 215 # backward compatibility, force a lookup in the local store if it wasn't
216 216 # found in the share source.
217 217 path = storepath(repo, hash, False)
218 218
219 219 if instore(repo, hash):
220 220 return (path, True)
221 221 elif repo.shared() and instore(repo, hash, True):
222 222 return storepath(repo, hash, True), True
223 223
224 224 return (path, False)
225 225
226 226 def copyfromcache(repo, hash, filename):
227 227 '''Copy the specified largefile from the repo or system cache to
228 228 filename in the repository. Return true on success or false if the
229 229 file was not found in either cache (which should not happened:
230 230 this is meant to be called only after ensuring that the needed
231 231 largefile exists in the cache).'''
232 232 wvfs = repo.wvfs
233 233 path = findfile(repo, hash)
234 234 if path is None:
235 235 return False
236 236 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
237 237 # The write may fail before the file is fully written, but we
238 238 # don't use atomic writes in the working copy.
239 239 with open(path, 'rb') as srcfd, wvfs(filename, 'wb') as destfd:
240 240 gothash = copyandhash(
241 241 util.filechunkiter(srcfd), destfd)
242 242 if gothash != hash:
243 243 repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
244 244 % (filename, path, gothash))
245 245 wvfs.unlink(filename)
246 246 return False
247 247 return True
248 248
249 249 def copytostore(repo, ctx, file, fstandin):
250 250 wvfs = repo.wvfs
251 251 hash = readasstandin(ctx[fstandin])
252 252 if instore(repo, hash):
253 253 return
254 254 if wvfs.exists(file):
255 255 copytostoreabsolute(repo, wvfs.join(file), hash)
256 256 else:
257 257 repo.ui.warn(_("%s: largefile %s not available from local store\n") %
258 258 (file, hash))
259 259
260 260 def copyalltostore(repo, node):
261 261 '''Copy all largefiles in a given revision to the store'''
262 262
263 263 ctx = repo[node]
264 264 for filename in ctx.files():
265 265 realfile = splitstandin(filename)
266 266 if realfile is not None and filename in ctx.manifest():
267 267 copytostore(repo, ctx, realfile, filename)
268 268
269 269 def copytostoreabsolute(repo, file, hash):
270 270 if inusercache(repo.ui, hash):
271 271 link(usercachepath(repo.ui, hash), storepath(repo, hash))
272 272 else:
273 273 util.makedirs(os.path.dirname(storepath(repo, hash)))
274 274 with open(file, 'rb') as srcf:
275 275 with util.atomictempfile(storepath(repo, hash),
276 276 createmode=repo.store.createmode) as dstf:
277 277 for chunk in util.filechunkiter(srcf):
278 278 dstf.write(chunk)
279 279 linktousercache(repo, hash)
280 280
281 281 def linktousercache(repo, hash):
282 282 '''Link / copy the largefile with the specified hash from the store
283 283 to the cache.'''
284 284 path = usercachepath(repo.ui, hash)
285 285 link(storepath(repo, hash), path)
286 286
287 287 def getstandinmatcher(repo, rmatcher=None):
288 288 '''Return a match object that applies rmatcher to the standin directory'''
289 289 wvfs = repo.wvfs
290 290 standindir = shortname
291 291
292 292 # no warnings about missing files or directories
293 293 badfn = lambda f, msg: None
294 294
295 295 if rmatcher and not rmatcher.always():
296 296 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
297 297 if not pats:
298 298 pats = [wvfs.join(standindir)]
299 299 match = scmutil.match(repo[None], pats, badfn=badfn)
300 300 else:
301 301 # no patterns: relative to repo root
302 302 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
303 303 return match
304 304
305 305 def composestandinmatcher(repo, rmatcher):
306 306 '''Return a matcher that accepts standins corresponding to the
307 307 files accepted by rmatcher. Pass the list of files in the matcher
308 308 as the paths specified by the user.'''
309 309 smatcher = getstandinmatcher(repo, rmatcher)
310 310 isstandin = smatcher.matchfn
311 311 def composedmatchfn(f):
312 312 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
313 313 smatcher.matchfn = composedmatchfn
314 314
315 315 return smatcher
316 316
317 317 def standin(filename):
318 318 '''Return the repo-relative path to the standin for the specified big
319 319 file.'''
320 320 # Notes:
321 321 # 1) Some callers want an absolute path, but for instance addlargefiles
322 322 # needs it repo-relative so it can be passed to repo[None].add(). So
323 323 # leave it up to the caller to use repo.wjoin() to get an absolute path.
324 324 # 2) Join with '/' because that's what dirstate always uses, even on
325 325 # Windows. Change existing separator to '/' first in case we are
326 326 # passed filenames from an external source (like the command line).
327 327 return shortnameslash + util.pconvert(filename)
328 328
329 329 def isstandin(filename):
330 330 '''Return true if filename is a big file standin. filename must be
331 331 in Mercurial's internal form (slash-separated).'''
332 332 return filename.startswith(shortnameslash)
333 333
334 334 def splitstandin(filename):
335 335 # Split on / because that's what dirstate always uses, even on Windows.
336 336 # Change local separator to / first just in case we are passed filenames
337 337 # from an external source (like the command line).
338 338 bits = util.pconvert(filename).split('/', 1)
339 339 if len(bits) == 2 and bits[0] == shortname:
340 340 return bits[1]
341 341 else:
342 342 return None
343 343
344 344 def updatestandin(repo, lfile, standin):
345 345 """Re-calculate hash value of lfile and write it into standin
346 346
347 347 This assumes that "lfutil.standin(lfile) == standin", for efficiency.
348 348 """
349 349 file = repo.wjoin(lfile)
350 350 if repo.wvfs.exists(lfile):
351 351 hash = hashfile(file)
352 352 executable = getexecutable(file)
353 353 writestandin(repo, standin, hash, executable)
354 354 else:
355 355 raise error.Abort(_('%s: file not found!') % lfile)
356 356
357 357 def readasstandin(fctx):
358 358 '''read hex hash from given filectx of standin file
359 359
360 360 This encapsulates how "standin" data is stored into storage layer.'''
361 361 return fctx.data().strip()
362 362
363 363 def writestandin(repo, standin, hash, executable):
364 364 '''write hash to <repo.root>/<standin>'''
365 365 repo.wwrite(standin, hash + '\n', executable and 'x' or '')
366 366
367 367 def copyandhash(instream, outfile):
368 368 '''Read bytes from instream (iterable) and write them to outfile,
369 369 computing the SHA-1 hash of the data along the way. Return the hash.'''
370 370 hasher = hashlib.sha1('')
371 371 for data in instream:
372 372 hasher.update(data)
373 373 outfile.write(data)
374 374 return hasher.hexdigest()
375 375
376 376 def hashfile(file):
377 377 if not os.path.exists(file):
378 378 return ''
379 379 with open(file, 'rb') as fd:
380 380 return hexsha1(fd)
381 381
382 382 def getexecutable(filename):
383 383 mode = os.stat(filename).st_mode
384 384 return ((mode & stat.S_IXUSR) and
385 385 (mode & stat.S_IXGRP) and
386 386 (mode & stat.S_IXOTH))
387 387
388 388 def urljoin(first, second, *arg):
389 389 def join(left, right):
390 390 if not left.endswith('/'):
391 391 left += '/'
392 392 if right.startswith('/'):
393 393 right = right[1:]
394 394 return left + right
395 395
396 396 url = join(first, second)
397 397 for a in arg:
398 398 url = join(url, a)
399 399 return url
400 400
401 401 def hexsha1(fileobj):
402 402 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
403 403 object data"""
404 404 h = hashlib.sha1()
405 405 for chunk in util.filechunkiter(fileobj):
406 406 h.update(chunk)
407 407 return h.hexdigest()
408 408
409 409 def httpsendfile(ui, filename):
410 410 return httpconnection.httpsendfile(ui, filename, 'rb')
411 411
412 412 def unixpath(path):
413 413 '''Return a version of path normalized for use with the lfdirstate.'''
414 414 return util.pconvert(os.path.normpath(path))
415 415
416 416 def islfilesrepo(repo):
417 417 '''Return true if the repo is a largefile repo.'''
418 418 if ('largefiles' in repo.requirements and
419 419 any(shortnameslash in f[0] for f in repo.store.datafiles())):
420 420 return True
421 421
422 422 return any(openlfdirstate(repo.ui, repo, False))
423 423
424 424 class storeprotonotcapable(Exception):
425 425 def __init__(self, storetypes):
426 426 self.storetypes = storetypes
427 427
428 428 def getstandinsstate(repo):
429 429 standins = []
430 430 matcher = getstandinmatcher(repo)
431 431 wctx = repo[None]
432 432 for standin in repo.dirstate.walk(matcher, subrepos=[], unknown=False,
433 433 ignored=False):
434 434 lfile = splitstandin(standin)
435 435 try:
436 436 hash = readasstandin(wctx[standin])
437 437 except IOError:
438 438 hash = None
439 439 standins.append((lfile, hash))
440 440 return standins
441 441
442 442 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
443 443 lfstandin = standin(lfile)
444 444 if lfstandin in repo.dirstate:
445 445 stat = repo.dirstate._map[lfstandin]
446 446 state, mtime = stat[0], stat[3]
447 447 else:
448 448 state, mtime = '?', -1
449 449 if state == 'n':
450 450 if (normallookup or mtime < 0 or
451 451 not repo.wvfs.exists(lfile)):
452 452 # state 'n' doesn't ensure 'clean' in this case
453 453 lfdirstate.normallookup(lfile)
454 454 else:
455 455 lfdirstate.normal(lfile)
456 456 elif state == 'm':
457 457 lfdirstate.normallookup(lfile)
458 458 elif state == 'r':
459 459 lfdirstate.remove(lfile)
460 460 elif state == 'a':
461 461 lfdirstate.add(lfile)
462 462 elif state == '?':
463 463 lfdirstate.drop(lfile)
464 464
465 465 def markcommitted(orig, ctx, node):
466 466 repo = ctx.repo()
467 467
468 468 orig(node)
469 469
470 470 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
471 471 # because files coming from the 2nd parent are omitted in the latter.
472 472 #
473 473 # The former should be used to get targets of "synclfdirstate",
474 474 # because such files:
475 475 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
476 476 # - have to be marked as "n" after commit, but
477 477 # - aren't listed in "repo[node].files()"
478 478
479 479 lfdirstate = openlfdirstate(repo.ui, repo)
480 480 for f in ctx.files():
481 481 lfile = splitstandin(f)
482 482 if lfile is not None:
483 483 synclfdirstate(repo, lfdirstate, lfile, False)
484 484 lfdirstate.write()
485 485
486 486 # As part of committing, copy all of the largefiles into the cache.
487 487 #
488 488 # Using "node" instead of "ctx" implies additional "repo[node]"
489 489 # lookup while copyalltostore(), but can omit redundant check for
490 490 # files comming from the 2nd parent, which should exist in store
491 491 # at merging.
492 492 copyalltostore(repo, node)
493 493
494 494 def getlfilestoupdate(oldstandins, newstandins):
495 495 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
496 496 filelist = []
497 497 for f in changedstandins:
498 498 if f[0] not in filelist:
499 499 filelist.append(f[0])
500 500 return filelist
501 501
502 502 def getlfilestoupload(repo, missing, addfunc):
503 503 for i, n in enumerate(missing):
504 504 repo.ui.progress(_('finding outgoing largefiles'), i,
505 505 unit=_('revisions'), total=len(missing))
506 506 parents = [p for p in repo[n].parents() if p != node.nullid]
507 507
508 508 oldlfstatus = repo.lfstatus
509 509 repo.lfstatus = False
510 510 try:
511 511 ctx = repo[n]
512 512 finally:
513 513 repo.lfstatus = oldlfstatus
514 514
515 515 files = set(ctx.files())
516 516 if len(parents) == 2:
517 517 mc = ctx.manifest()
518 518 mp1 = ctx.parents()[0].manifest()
519 519 mp2 = ctx.parents()[1].manifest()
520 520 for f in mp1:
521 521 if f not in mc:
522 522 files.add(f)
523 523 for f in mp2:
524 524 if f not in mc:
525 525 files.add(f)
526 526 for f in mc:
527 527 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
528 528 files.add(f)
529 529 for fn in files:
530 530 if isstandin(fn) and fn in ctx:
531 531 addfunc(fn, readasstandin(ctx[fn]))
532 532 repo.ui.progress(_('finding outgoing largefiles'), None)
533 533
534 534 def updatestandinsbymatch(repo, match):
535 535 '''Update standins in the working directory according to specified match
536 536
537 537 This returns (possibly modified) ``match`` object to be used for
538 538 subsequent commit process.
539 539 '''
540 540
541 541 ui = repo.ui
542 542
543 543 # Case 1: user calls commit with no specific files or
544 544 # include/exclude patterns: refresh and commit all files that
545 545 # are "dirty".
546 546 if match is None or match.always():
547 547 # Spend a bit of time here to get a list of files we know
548 548 # are modified so we can compare only against those.
549 549 # It can cost a lot of time (several seconds)
550 550 # otherwise to update all standins if the largefiles are
551 551 # large.
552 552 lfdirstate = openlfdirstate(ui, repo)
553 553 dirtymatch = matchmod.always(repo.root, repo.getcwd())
554 554 unsure, s = lfdirstate.status(dirtymatch, subrepos=[], ignored=False,
555 555 clean=False, unknown=False)
556 556 modifiedfiles = unsure + s.modified + s.added + s.removed
557 557 lfiles = listlfiles(repo)
558 558 # this only loops through largefiles that exist (not
559 559 # removed/renamed)
560 560 for lfile in lfiles:
561 561 if lfile in modifiedfiles:
562 562 fstandin = standin(lfile)
563 563 if repo.wvfs.exists(fstandin):
564 564 # this handles the case where a rebase is being
565 565 # performed and the working copy is not updated
566 566 # yet.
567 567 if repo.wvfs.exists(lfile):
568 568 updatestandin(repo, lfile, fstandin)
569 569
570 570 return match
571 571
572 572 lfiles = listlfiles(repo)
573 573 match._files = repo._subdirlfs(match.files(), lfiles)
574 574
575 575 # Case 2: user calls commit with specified patterns: refresh
576 576 # any matching big files.
577 577 smatcher = composestandinmatcher(repo, match)
578 578 standins = repo.dirstate.walk(smatcher, subrepos=[], unknown=False,
579 579 ignored=False)
580 580
581 581 # No matching big files: get out of the way and pass control to
582 582 # the usual commit() method.
583 583 if not standins:
584 584 return match
585 585
586 586 # Refresh all matching big files. It's possible that the
587 587 # commit will end up failing, in which case the big files will
588 588 # stay refreshed. No harm done: the user modified them and
589 589 # asked to commit them, so sooner or later we're going to
590 590 # refresh the standins. Might as well leave them refreshed.
591 591 lfdirstate = openlfdirstate(ui, repo)
592 592 for fstandin in standins:
593 593 lfile = splitstandin(fstandin)
594 594 if lfdirstate[lfile] != 'r':
595 595 updatestandin(repo, lfile, fstandin)
596 596
597 597 # Cook up a new matcher that only matches regular files or
598 598 # standins corresponding to the big files requested by the
599 599 # user. Have to modify _files to prevent commit() from
600 600 # complaining "not tracked" for big files.
601 601 match = copy.copy(match)
602 602 origmatchfn = match.matchfn
603 603
604 604 # Check both the list of largefiles and the list of
605 605 # standins because if a largefile was removed, it
606 606 # won't be in the list of largefiles at this point
607 607 match._files += sorted(standins)
608 608
609 609 actualfiles = []
610 610 for f in match._files:
611 611 fstandin = standin(f)
612 612
613 613 # For largefiles, only one of the normal and standin should be
614 614 # committed (except if one of them is a remove). In the case of a
615 615 # standin removal, drop the normal file if it is unknown to dirstate.
616 616 # Thus, skip plain largefile names but keep the standin.
617 617 if f in lfiles or fstandin in standins:
618 618 if repo.dirstate[fstandin] != 'r':
619 619 if repo.dirstate[f] != 'r':
620 620 continue
621 621 elif repo.dirstate[f] == '?':
622 622 continue
623 623
624 624 actualfiles.append(f)
625 625 match._files = actualfiles
626 626
627 627 def matchfn(f):
628 628 if origmatchfn(f):
629 629 return f not in lfiles
630 630 else:
631 631 return f in standins
632 632
633 633 match.matchfn = matchfn
634 634
635 635 return match
636 636
637 637 class automatedcommithook(object):
638 638 '''Stateful hook to update standins at the 1st commit of resuming
639 639
640 640 For efficiency, updating standins in the working directory should
641 641 be avoided while automated committing (like rebase, transplant and
642 642 so on), because they should be updated before committing.
643 643
644 644 But the 1st commit of resuming automated committing (e.g. ``rebase
645 645 --continue``) should update them, because largefiles may be
646 646 modified manually.
647 647 '''
648 648 def __init__(self, resuming):
649 649 self.resuming = resuming
650 650
651 651 def __call__(self, repo, match):
652 652 if self.resuming:
653 653 self.resuming = False # avoids updating at subsequent commits
654 654 return updatestandinsbymatch(repo, match)
655 655 else:
656 656 return match
657 657
658 658 def getstatuswriter(ui, repo, forcibly=None):
659 659 '''Return the function to write largefiles specific status out
660 660
661 661 If ``forcibly`` is ``None``, this returns the last element of
662 662 ``repo._lfstatuswriters`` as "default" writer function.
663 663
664 664 Otherwise, this returns the function to always write out (or
665 665 ignore if ``not forcibly``) status.
666 666 '''
667 667 if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
668 668 return repo._lfstatuswriters[-1]
669 669 else:
670 670 if forcibly:
671 671 return ui.status # forcibly WRITE OUT
672 672 else:
673 673 return lambda *msg, **opts: None # forcibly IGNORE
@@ -1,133 +1,133
1 1 # logtoprocess.py - send ui.log() data to a subprocess
2 2 #
3 3 # Copyright 2016 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """send ui.log() data to a subprocess (EXPERIMENTAL)
8 8
9 9 This extension lets you specify a shell command per ui.log() event,
10 10 sending all remaining arguments to as environment variables to that command.
11 11
12 12 Each positional argument to the method results in a `MSG[N]` key in the
13 13 environment, starting at 1 (so `MSG1`, `MSG2`, etc.). Each keyword argument
14 14 is set as a `OPT_UPPERCASE_KEY` variable (so the key is uppercased, and
15 15 prefixed with `OPT_`). The original event name is passed in the `EVENT`
16 16 environment variable, and the process ID of mercurial is given in `HGPID`.
17 17
18 18 So given a call `ui.log('foo', 'bar', 'baz', spam='eggs'), a script configured
19 19 for the `foo` event can expect an environment with `MSG1=bar`, `MSG2=baz`, and
20 20 `OPT_SPAM=eggs`.
21 21
22 22 Scripts are configured in the `[logtoprocess]` section, each key an event name.
23 23 For example::
24 24
25 25 [logtoprocess]
26 26 commandexception = echo "$MSG2$MSG3" > /var/log/mercurial_exceptions.log
27 27
28 28 would log the warning message and traceback of any failed command dispatch.
29 29
30 30 Scripts are run asynchronously as detached daemon processes; mercurial will
31 31 not ensure that they exit cleanly.
32 32
33 33 """
34 34
35 35 from __future__ import absolute_import
36 36
37 37 import itertools
38 38 import os
39 39 import subprocess
40 40 import sys
41 41
42 42 from mercurial import (
43 43 encoding,
44 44 pycompat,
45 45 )
46 46
47 47 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
48 48 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
49 49 # be specifying the version(s) of Mercurial they are tested with, or
50 50 # leave the attribute unspecified.
51 51 testedwith = 'ships-with-hg-core'
52 52
53 53 def uisetup(ui):
54 if pycompat.osname == 'nt':
54 if pycompat.iswindows:
55 55 # no fork on Windows, but we can create a detached process
56 56 # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863.aspx
57 57 # No stdlib constant exists for this value
58 58 DETACHED_PROCESS = 0x00000008
59 59 _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP
60 60
61 61 def runshellcommand(script, env):
62 62 # we can't use close_fds *and* redirect stdin. I'm not sure that we
63 63 # need to because the detached process has no console connection.
64 64 subprocess.Popen(
65 65 script, shell=True, env=env, close_fds=True,
66 66 creationflags=_creationflags)
67 67 else:
68 68 def runshellcommand(script, env):
69 69 # double-fork to completely detach from the parent process
70 70 # based on http://code.activestate.com/recipes/278731
71 71 pid = os.fork()
72 72 if pid:
73 73 # parent
74 74 return
75 75 # subprocess.Popen() forks again, all we need to add is
76 76 # flag the new process as a new session.
77 77 if sys.version_info < (3, 2):
78 78 newsession = {'preexec_fn': os.setsid}
79 79 else:
80 80 newsession = {'start_new_session': True}
81 81 try:
82 82 # connect stdin to devnull to make sure the subprocess can't
83 83 # muck up that stream for mercurial.
84 84 subprocess.Popen(
85 85 script, shell=True, stdin=open(os.devnull, 'r'), env=env,
86 86 close_fds=True, **newsession)
87 87 finally:
88 88 # mission accomplished, this child needs to exit and not
89 89 # continue the hg process here.
90 90 os._exit(0)
91 91
92 92 class logtoprocessui(ui.__class__):
93 93 def log(self, event, *msg, **opts):
94 94 """Map log events to external commands
95 95
96 96 Arguments are passed on as environment variables.
97 97
98 98 """
99 99 script = self.config('logtoprocess', event)
100 100 if script:
101 101 if msg:
102 102 # try to format the log message given the remaining
103 103 # arguments
104 104 try:
105 105 # Python string formatting with % either uses a
106 106 # dictionary *or* tuple, but not both. If we have
107 107 # keyword options, assume we need a mapping.
108 108 formatted = msg[0] % (opts or msg[1:])
109 109 except (TypeError, KeyError):
110 110 # Failed to apply the arguments, ignore
111 111 formatted = msg[0]
112 112 messages = (formatted,) + msg[1:]
113 113 else:
114 114 messages = msg
115 115 # positional arguments are listed as MSG[N] keys in the
116 116 # environment
117 117 msgpairs = (
118 118 ('MSG{0:d}'.format(i), str(m))
119 119 for i, m in enumerate(messages, 1))
120 120 # keyword arguments get prefixed with OPT_ and uppercased
121 121 optpairs = (
122 122 ('OPT_{0}'.format(key.upper()), str(value))
123 123 for key, value in opts.iteritems())
124 124 env = dict(itertools.chain(encoding.environ.items(),
125 125 msgpairs, optpairs),
126 126 EVENT=event, HGPID=str(os.getpid()))
127 127 # Connect stdin to /dev/null to prevent child processes messing
128 128 # with mercurial's stdin.
129 129 runshellcommand(script, env)
130 130 return super(logtoprocessui, self).log(event, *msg, **opts)
131 131
132 132 # Replace the class for this instance and all clones created from it:
133 133 ui.__class__ = logtoprocessui
@@ -1,134 +1,134
1 1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5
6 6 """extend schemes with shortcuts to repository swarms
7 7
8 8 This extension allows you to specify shortcuts for parent URLs with a
9 9 lot of repositories to act like a scheme, for example::
10 10
11 11 [schemes]
12 12 py = http://code.python.org/hg/
13 13
14 14 After that you can use it like::
15 15
16 16 hg clone py://trunk/
17 17
18 18 Additionally there is support for some more complex schemas, for
19 19 example used by Google Code::
20 20
21 21 [schemes]
22 22 gcode = http://{1}.googlecode.com/hg/
23 23
24 24 The syntax is taken from Mercurial templates, and you have unlimited
25 25 number of variables, starting with ``{1}`` and continuing with
26 26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
27 27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
28 28 just appended to an URL.
29 29
30 30 For convenience, the extension adds these schemes by default::
31 31
32 32 [schemes]
33 33 py = http://hg.python.org/
34 34 bb = https://bitbucket.org/
35 35 bb+ssh = ssh://hg@bitbucket.org/
36 36 gcode = https://{1}.googlecode.com/hg/
37 37 kiln = https://{1}.kilnhg.com/Repo/
38 38
39 39 You can override a predefined scheme by defining a new scheme with the
40 40 same name.
41 41 """
42 42 from __future__ import absolute_import
43 43
44 44 import os
45 45 import re
46 46
47 47 from mercurial.i18n import _
48 48 from mercurial import (
49 49 error,
50 50 extensions,
51 51 hg,
52 52 pycompat,
53 53 registrar,
54 54 templater,
55 55 util,
56 56 )
57 57
58 58 cmdtable = {}
59 59 command = registrar.command(cmdtable)
60 60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 62 # be specifying the version(s) of Mercurial they are tested with, or
63 63 # leave the attribute unspecified.
64 64 testedwith = 'ships-with-hg-core'
65 65
66 66 _partre = re.compile(br'\{(\d+)\}')
67 67
68 68 class ShortRepository(object):
69 69 def __init__(self, url, scheme, templater):
70 70 self.scheme = scheme
71 71 self.templater = templater
72 72 self.url = url
73 73 try:
74 74 self.parts = max(map(int, _partre.findall(self.url)))
75 75 except ValueError:
76 76 self.parts = 0
77 77
78 78 def __repr__(self):
79 79 return '<ShortRepository: %s>' % self.scheme
80 80
81 81 def instance(self, ui, url, create):
82 82 url = self.resolve(url)
83 83 return hg._peerlookup(url).instance(ui, url, create)
84 84
85 85 def resolve(self, url):
86 86 # Should this use the util.url class, or is manual parsing better?
87 87 try:
88 88 url = url.split('://', 1)[1]
89 89 except IndexError:
90 90 raise error.Abort(_("no '://' in scheme url '%s'") % url)
91 91 parts = url.split('/', self.parts)
92 92 if len(parts) > self.parts:
93 93 tail = parts[-1]
94 94 parts = parts[:-1]
95 95 else:
96 96 tail = ''
97 97 context = dict((str(i + 1), v) for i, v in enumerate(parts))
98 98 return ''.join(self.templater.process(self.url, context)) + tail
99 99
100 100 def hasdriveletter(orig, path):
101 101 if path:
102 102 for scheme in schemes:
103 103 if path.startswith(scheme + ':'):
104 104 return False
105 105 return orig(path)
106 106
107 107 schemes = {
108 108 'py': 'http://hg.python.org/',
109 109 'bb': 'https://bitbucket.org/',
110 110 'bb+ssh': 'ssh://hg@bitbucket.org/',
111 111 'gcode': 'https://{1}.googlecode.com/hg/',
112 112 'kiln': 'https://{1}.kilnhg.com/Repo/'
113 113 }
114 114
115 115 def extsetup(ui):
116 116 schemes.update(dict(ui.configitems('schemes')))
117 117 t = templater.engine(lambda x: x)
118 118 for scheme, url in schemes.items():
119 if (pycompat.osname == 'nt' and len(scheme) == 1 and scheme.isalpha()
119 if (pycompat.iswindows and len(scheme) == 1 and scheme.isalpha()
120 120 and os.path.exists('%s:\\' % scheme)):
121 121 raise error.Abort(_('custom scheme %s:// conflicts with drive '
122 122 'letter %s:\\\n') % (scheme, scheme.upper()))
123 123 hg.schemes[scheme] = ShortRepository(url, scheme, t)
124 124
125 125 extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter)
126 126
127 127 @command('debugexpandscheme', norepo=True)
128 128 def expandscheme(ui, url, **opts):
129 129 """given a repo path, provide the scheme-expanded path
130 130 """
131 131 repo = hg._peerlookup(url)
132 132 if isinstance(repo, ShortRepository):
133 133 url = repo.resolve(url)
134 134 ui.write(url + '\n')
@@ -1,206 +1,206
1 1 # win32mbcs.py -- MBCS filename support for Mercurial
2 2 #
3 3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
4 4 #
5 5 # Version: 0.3
6 6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
7 7 #
8 8 # This software may be used and distributed according to the terms of the
9 9 # GNU General Public License version 2 or any later version.
10 10 #
11 11
12 12 '''allow the use of MBCS paths with problematic encodings
13 13
14 14 Some MBCS encodings are not good for some path operations (i.e.
15 15 splitting path, case conversion, etc.) with its encoded bytes. We call
16 16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
17 17 This extension can be used to fix the issue with those encodings by
18 18 wrapping some functions to convert to Unicode string before path
19 19 operation.
20 20
21 21 This extension is useful for:
22 22
23 23 - Japanese Windows users using shift_jis encoding.
24 24 - Chinese Windows users using big5 encoding.
25 25 - All users who use a repository with one of problematic encodings on
26 26 case-insensitive file system.
27 27
28 28 This extension is not needed for:
29 29
30 30 - Any user who use only ASCII chars in path.
31 31 - Any user who do not use any of problematic encodings.
32 32
33 33 Note that there are some limitations on using this extension:
34 34
35 35 - You should use single encoding in one repository.
36 36 - If the repository path ends with 0x5c, .hg/hgrc cannot be read.
37 37 - win32mbcs is not compatible with fixutf8 extension.
38 38
39 39 By default, win32mbcs uses encoding.encoding decided by Mercurial.
40 40 You can specify the encoding by config option::
41 41
42 42 [win32mbcs]
43 43 encoding = sjis
44 44
45 45 It is useful for the users who want to commit with UTF-8 log message.
46 46 '''
47 47 from __future__ import absolute_import
48 48
49 49 import os
50 50 import sys
51 51
52 52 from mercurial.i18n import _
53 53 from mercurial import (
54 54 encoding,
55 55 error,
56 56 pycompat,
57 57 registrar,
58 58 )
59 59
60 60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 62 # be specifying the version(s) of Mercurial they are tested with, or
63 63 # leave the attribute unspecified.
64 64 testedwith = 'ships-with-hg-core'
65 65
66 66 configtable = {}
67 67 configitem = registrar.configitem(configtable)
68 68
69 69 # Encoding.encoding may be updated by --encoding option.
70 70 # Use a lambda do delay the resolution.
71 71 configitem('win32mbcs', 'encoding',
72 72 default=lambda: encoding.encoding,
73 73 )
74 74
75 75 _encoding = None # see extsetup
76 76
77 77 def decode(arg):
78 78 if isinstance(arg, str):
79 79 uarg = arg.decode(_encoding)
80 80 if arg == uarg.encode(_encoding):
81 81 return uarg
82 82 raise UnicodeError("Not local encoding")
83 83 elif isinstance(arg, tuple):
84 84 return tuple(map(decode, arg))
85 85 elif isinstance(arg, list):
86 86 return map(decode, arg)
87 87 elif isinstance(arg, dict):
88 88 for k, v in arg.items():
89 89 arg[k] = decode(v)
90 90 return arg
91 91
92 92 def encode(arg):
93 93 if isinstance(arg, unicode):
94 94 return arg.encode(_encoding)
95 95 elif isinstance(arg, tuple):
96 96 return tuple(map(encode, arg))
97 97 elif isinstance(arg, list):
98 98 return map(encode, arg)
99 99 elif isinstance(arg, dict):
100 100 for k, v in arg.items():
101 101 arg[k] = encode(v)
102 102 return arg
103 103
104 104 def appendsep(s):
105 105 # ensure the path ends with os.sep, appending it if necessary.
106 106 try:
107 107 us = decode(s)
108 108 except UnicodeError:
109 109 us = s
110 110 if us and us[-1] not in ':/\\':
111 111 s += pycompat.ossep
112 112 return s
113 113
114 114
115 115 def basewrapper(func, argtype, enc, dec, args, kwds):
116 116 # check check already converted, then call original
117 117 for arg in args:
118 118 if isinstance(arg, argtype):
119 119 return func(*args, **kwds)
120 120
121 121 try:
122 122 # convert string arguments, call func, then convert back the
123 123 # return value.
124 124 return enc(func(*dec(args), **dec(kwds)))
125 125 except UnicodeError:
126 126 raise error.Abort(_("[win32mbcs] filename conversion failed with"
127 127 " %s encoding\n") % (_encoding))
128 128
129 129 def wrapper(func, args, kwds):
130 130 return basewrapper(func, unicode, encode, decode, args, kwds)
131 131
132 132
133 133 def reversewrapper(func, args, kwds):
134 134 return basewrapper(func, str, decode, encode, args, kwds)
135 135
136 136 def wrapperforlistdir(func, args, kwds):
137 137 # Ensure 'path' argument ends with os.sep to avoids
138 138 # misinterpreting last 0x5c of MBCS 2nd byte as path separator.
139 139 if args:
140 140 args = list(args)
141 141 args[0] = appendsep(args[0])
142 142 if 'path' in kwds:
143 143 kwds['path'] = appendsep(kwds['path'])
144 144 return func(*args, **kwds)
145 145
146 146 def wrapname(name, wrapper):
147 147 module, name = name.rsplit('.', 1)
148 148 module = sys.modules[module]
149 149 func = getattr(module, name)
150 150 def f(*args, **kwds):
151 151 return wrapper(func, args, kwds)
152 152 f.__name__ = func.__name__
153 153 setattr(module, name, f)
154 154
155 155 # List of functions to be wrapped.
156 156 # NOTE: os.path.dirname() and os.path.basename() are safe because
157 157 # they use result of os.path.split()
158 158 funcs = '''os.path.join os.path.split os.path.splitext
159 159 os.path.normpath os.makedirs mercurial.util.endswithsep
160 160 mercurial.util.splitpath mercurial.util.fscasesensitive
161 161 mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath
162 162 mercurial.util.checkwinfilename mercurial.util.checkosfilename
163 163 mercurial.util.split'''
164 164
165 165 # These functions are required to be called with local encoded string
166 166 # because they expects argument is local encoded string and cause
167 167 # problem with unicode string.
168 168 rfuncs = '''mercurial.encoding.upper mercurial.encoding.lower
169 169 mercurial.util._filenamebytestr'''
170 170
171 171 # List of Windows specific functions to be wrapped.
172 172 winfuncs = '''os.path.splitunc'''
173 173
174 174 # codec and alias names of sjis and big5 to be faked.
175 175 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
176 176 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
177 177 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
178 178 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
179 179
180 180 def extsetup(ui):
181 181 # TODO: decide use of config section for this extension
182 182 if ((not os.path.supports_unicode_filenames) and
183 183 (pycompat.sysplatform != 'cygwin')):
184 184 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
185 185 return
186 186 # determine encoding for filename
187 187 global _encoding
188 188 _encoding = ui.config('win32mbcs', 'encoding')
189 189 # fake is only for relevant environment.
190 190 if _encoding.lower() in problematic_encodings.split():
191 191 for f in funcs.split():
192 192 wrapname(f, wrapper)
193 if pycompat.osname == 'nt':
193 if pycompat.iswindows:
194 194 for f in winfuncs.split():
195 195 wrapname(f, wrapper)
196 196 wrapname("mercurial.util.listdir", wrapperforlistdir)
197 197 wrapname("mercurial.windows.listdir", wrapperforlistdir)
198 198 # wrap functions to be called with local byte string arguments
199 199 for f in rfuncs.split():
200 200 wrapname(f, reversewrapper)
201 201 # Check sys.args manually instead of using ui.debug() because
202 202 # command line options is not yet applied when
203 203 # extensions.loadall() is called.
204 204 if '--debug' in sys.argv:
205 205 ui.write(("[win32mbcs] activated with encoding: %s\n")
206 206 % _encoding)
@@ -1,518 +1,518
1 1 # utility for color output for Mercurial commands
2 2 #
3 3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> and other
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import re
11 11
12 12 from .i18n import _
13 13
14 14 from . import (
15 15 encoding,
16 16 pycompat,
17 17 util
18 18 )
19 19
20 20 try:
21 21 import curses
22 22 # Mapping from effect name to terminfo attribute name (or raw code) or
23 23 # color number. This will also force-load the curses module.
24 24 _baseterminfoparams = {
25 25 'none': (True, 'sgr0', ''),
26 26 'standout': (True, 'smso', ''),
27 27 'underline': (True, 'smul', ''),
28 28 'reverse': (True, 'rev', ''),
29 29 'inverse': (True, 'rev', ''),
30 30 'blink': (True, 'blink', ''),
31 31 'dim': (True, 'dim', ''),
32 32 'bold': (True, 'bold', ''),
33 33 'invisible': (True, 'invis', ''),
34 34 'italic': (True, 'sitm', ''),
35 35 'black': (False, curses.COLOR_BLACK, ''),
36 36 'red': (False, curses.COLOR_RED, ''),
37 37 'green': (False, curses.COLOR_GREEN, ''),
38 38 'yellow': (False, curses.COLOR_YELLOW, ''),
39 39 'blue': (False, curses.COLOR_BLUE, ''),
40 40 'magenta': (False, curses.COLOR_MAGENTA, ''),
41 41 'cyan': (False, curses.COLOR_CYAN, ''),
42 42 'white': (False, curses.COLOR_WHITE, ''),
43 43 }
44 44 except ImportError:
45 45 curses = None
46 46 _baseterminfoparams = {}
47 47
48 48 # start and stop parameters for effects
49 49 _effects = {
50 50 'none': 0,
51 51 'black': 30,
52 52 'red': 31,
53 53 'green': 32,
54 54 'yellow': 33,
55 55 'blue': 34,
56 56 'magenta': 35,
57 57 'cyan': 36,
58 58 'white': 37,
59 59 'bold': 1,
60 60 'italic': 3,
61 61 'underline': 4,
62 62 'inverse': 7,
63 63 'dim': 2,
64 64 'black_background': 40,
65 65 'red_background': 41,
66 66 'green_background': 42,
67 67 'yellow_background': 43,
68 68 'blue_background': 44,
69 69 'purple_background': 45,
70 70 'cyan_background': 46,
71 71 'white_background': 47,
72 72 }
73 73
74 74 _defaultstyles = {
75 75 'grep.match': 'red bold',
76 76 'grep.linenumber': 'green',
77 77 'grep.rev': 'green',
78 78 'grep.change': 'green',
79 79 'grep.sep': 'cyan',
80 80 'grep.filename': 'magenta',
81 81 'grep.user': 'magenta',
82 82 'grep.date': 'magenta',
83 83 'bookmarks.active': 'green',
84 84 'branches.active': 'none',
85 85 'branches.closed': 'black bold',
86 86 'branches.current': 'green',
87 87 'branches.inactive': 'none',
88 88 'diff.changed': 'white',
89 89 'diff.deleted': 'red',
90 90 'diff.diffline': 'bold',
91 91 'diff.extended': 'cyan bold',
92 92 'diff.file_a': 'red bold',
93 93 'diff.file_b': 'green bold',
94 94 'diff.hunk': 'magenta',
95 95 'diff.inserted': 'green',
96 96 'diff.tab': '',
97 97 'diff.trailingwhitespace': 'bold red_background',
98 98 'changeset.public': '',
99 99 'changeset.draft': '',
100 100 'changeset.secret': '',
101 101 'diffstat.deleted': 'red',
102 102 'diffstat.inserted': 'green',
103 103 'histedit.remaining': 'red bold',
104 104 'ui.prompt': 'yellow',
105 105 'log.changeset': 'yellow',
106 106 'patchbomb.finalsummary': '',
107 107 'patchbomb.from': 'magenta',
108 108 'patchbomb.to': 'cyan',
109 109 'patchbomb.subject': 'green',
110 110 'patchbomb.diffstats': '',
111 111 'rebase.rebased': 'blue',
112 112 'rebase.remaining': 'red bold',
113 113 'resolve.resolved': 'green bold',
114 114 'resolve.unresolved': 'red bold',
115 115 'shelve.age': 'cyan',
116 116 'shelve.newest': 'green bold',
117 117 'shelve.name': 'blue bold',
118 118 'status.added': 'green bold',
119 119 'status.clean': 'none',
120 120 'status.copied': 'none',
121 121 'status.deleted': 'cyan bold underline',
122 122 'status.ignored': 'black bold',
123 123 'status.modified': 'blue bold',
124 124 'status.removed': 'red bold',
125 125 'status.unknown': 'magenta bold underline',
126 126 'tags.normal': 'green',
127 127 'tags.local': 'black bold',
128 128 }
129 129
130 130 def loadcolortable(ui, extname, colortable):
131 131 _defaultstyles.update(colortable)
132 132
133 133 def _terminfosetup(ui, mode, formatted):
134 134 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
135 135
136 136 # If we failed to load curses, we go ahead and return.
137 137 if curses is None:
138 138 return
139 139 # Otherwise, see what the config file says.
140 140 if mode not in ('auto', 'terminfo'):
141 141 return
142 142 ui._terminfoparams.update(_baseterminfoparams)
143 143
144 144 for key, val in ui.configitems('color'):
145 145 if key.startswith('color.'):
146 146 newval = (False, int(val), '')
147 147 ui._terminfoparams[key[6:]] = newval
148 148 elif key.startswith('terminfo.'):
149 149 newval = (True, '', val.replace('\\E', '\x1b'))
150 150 ui._terminfoparams[key[9:]] = newval
151 151 try:
152 152 curses.setupterm()
153 153 except curses.error as e:
154 154 ui._terminfoparams.clear()
155 155 return
156 156
157 157 for key, (b, e, c) in ui._terminfoparams.items():
158 158 if not b:
159 159 continue
160 160 if not c and not curses.tigetstr(e):
161 161 # Most terminals don't support dim, invis, etc, so don't be
162 162 # noisy and use ui.debug().
163 163 ui.debug("no terminfo entry for %s\n" % e)
164 164 del ui._terminfoparams[key]
165 165 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
166 166 # Only warn about missing terminfo entries if we explicitly asked for
167 167 # terminfo mode and we're in a formatted terminal.
168 168 if mode == "terminfo" and formatted:
169 169 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
170 170 "ECMA-48 color\n"))
171 171 ui._terminfoparams.clear()
172 172
173 173 def setup(ui):
174 174 """configure color on a ui
175 175
176 176 That function both set the colormode for the ui object and read
177 177 the configuration looking for custom colors and effect definitions."""
178 178 mode = _modesetup(ui)
179 179 ui._colormode = mode
180 180 if mode and mode != 'debug':
181 181 configstyles(ui)
182 182
183 183 def _modesetup(ui):
184 184 if ui.plain():
185 185 return None
186 186 config = ui.config('ui', 'color')
187 187 if config == 'debug':
188 188 return 'debug'
189 189
190 190 auto = (config == 'auto')
191 191 always = False
192 192 if not auto and util.parsebool(config):
193 193 # We want the config to behave like a boolean, "on" is actually auto,
194 194 # but "always" value is treated as a special case to reduce confusion.
195 195 if ui.configsource('ui', 'color') == '--color' or config == 'always':
196 196 always = True
197 197 else:
198 198 auto = True
199 199
200 200 if not always and not auto:
201 201 return None
202 202
203 203 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
204 204 and ui.formatted()))
205 205
206 206 mode = ui.config('color', 'mode')
207 207
208 208 # If pager is active, color.pagermode overrides color.mode.
209 209 if getattr(ui, 'pageractive', False):
210 210 mode = ui.config('color', 'pagermode', mode)
211 211
212 212 realmode = mode
213 if pycompat.osname == 'nt':
213 if pycompat.iswindows:
214 214 from . import win32
215 215
216 216 term = encoding.environ.get('TERM')
217 217 # TERM won't be defined in a vanilla cmd.exe environment.
218 218
219 219 # UNIX-like environments on Windows such as Cygwin and MSYS will
220 220 # set TERM. They appear to make a best effort attempt at setting it
221 221 # to something appropriate. However, not all environments with TERM
222 222 # defined support ANSI.
223 223 ansienviron = term and 'xterm' in term
224 224
225 225 if mode == 'auto':
226 226 # Since "ansi" could result in terminal gibberish, we error on the
227 227 # side of selecting "win32". However, if w32effects is not defined,
228 228 # we almost certainly don't support "win32", so don't even try.
229 229 # w32ffects is not populated when stdout is redirected, so checking
230 230 # it first avoids win32 calls in a state known to error out.
231 231 if ansienviron or not w32effects or win32.enablevtmode():
232 232 realmode = 'ansi'
233 233 else:
234 234 realmode = 'win32'
235 235 # An empty w32effects is a clue that stdout is redirected, and thus
236 236 # cannot enable VT mode.
237 237 elif mode == 'ansi' and w32effects and not ansienviron:
238 238 win32.enablevtmode()
239 239 elif mode == 'auto':
240 240 realmode = 'ansi'
241 241
242 242 def modewarn():
243 243 # only warn if color.mode was explicitly set and we're in
244 244 # a formatted terminal
245 245 if mode == realmode and formatted:
246 246 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
247 247
248 248 if realmode == 'win32':
249 249 ui._terminfoparams.clear()
250 250 if not w32effects:
251 251 modewarn()
252 252 return None
253 253 elif realmode == 'ansi':
254 254 ui._terminfoparams.clear()
255 255 elif realmode == 'terminfo':
256 256 _terminfosetup(ui, mode, formatted)
257 257 if not ui._terminfoparams:
258 258 ## FIXME Shouldn't we return None in this case too?
259 259 modewarn()
260 260 realmode = 'ansi'
261 261 else:
262 262 return None
263 263
264 264 if always or (auto and formatted):
265 265 return realmode
266 266 return None
267 267
268 268 def configstyles(ui):
269 269 ui._styles.update(_defaultstyles)
270 270 for status, cfgeffects in ui.configitems('color'):
271 271 if '.' not in status or status.startswith(('color.', 'terminfo.')):
272 272 continue
273 273 cfgeffects = ui.configlist('color', status)
274 274 if cfgeffects:
275 275 good = []
276 276 for e in cfgeffects:
277 277 if valideffect(ui, e):
278 278 good.append(e)
279 279 else:
280 280 ui.warn(_("ignoring unknown color/effect %r "
281 281 "(configured in color.%s)\n")
282 282 % (e, status))
283 283 ui._styles[status] = ' '.join(good)
284 284
285 285 def _activeeffects(ui):
286 286 '''Return the effects map for the color mode set on the ui.'''
287 287 if ui._colormode == 'win32':
288 288 return w32effects
289 289 elif ui._colormode is not None:
290 290 return _effects
291 291 return {}
292 292
293 293 def valideffect(ui, effect):
294 294 'Determine if the effect is valid or not.'
295 295 return ((not ui._terminfoparams and effect in _activeeffects(ui))
296 296 or (effect in ui._terminfoparams
297 297 or effect[:-11] in ui._terminfoparams))
298 298
299 299 def _effect_str(ui, effect):
300 300 '''Helper function for render_effects().'''
301 301
302 302 bg = False
303 303 if effect.endswith('_background'):
304 304 bg = True
305 305 effect = effect[:-11]
306 306 try:
307 307 attr, val, termcode = ui._terminfoparams[effect]
308 308 except KeyError:
309 309 return ''
310 310 if attr:
311 311 if termcode:
312 312 return termcode
313 313 else:
314 314 return curses.tigetstr(val)
315 315 elif bg:
316 316 return curses.tparm(curses.tigetstr('setab'), val)
317 317 else:
318 318 return curses.tparm(curses.tigetstr('setaf'), val)
319 319
320 320 def _mergeeffects(text, start, stop):
321 321 """Insert start sequence at every occurrence of stop sequence
322 322
323 323 >>> s = _mergeeffects(b'cyan', b'[C]', b'|')
324 324 >>> s = _mergeeffects(s + b'yellow', b'[Y]', b'|')
325 325 >>> s = _mergeeffects(b'ma' + s + b'genta', b'[M]', b'|')
326 326 >>> s = _mergeeffects(b'red' + s, b'[R]', b'|')
327 327 >>> s
328 328 '[R]red[M]ma[Y][C]cyan|[R][M][Y]yellow|[R][M]genta|'
329 329 """
330 330 parts = []
331 331 for t in text.split(stop):
332 332 if not t:
333 333 continue
334 334 parts.extend([start, t, stop])
335 335 return ''.join(parts)
336 336
337 337 def _render_effects(ui, text, effects):
338 338 'Wrap text in commands to turn on each effect.'
339 339 if not text:
340 340 return text
341 341 if ui._terminfoparams:
342 342 start = ''.join(_effect_str(ui, effect)
343 343 for effect in ['none'] + effects.split())
344 344 stop = _effect_str(ui, 'none')
345 345 else:
346 346 activeeffects = _activeeffects(ui)
347 347 start = [pycompat.bytestr(activeeffects[e])
348 348 for e in ['none'] + effects.split()]
349 349 start = '\033[' + ';'.join(start) + 'm'
350 350 stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
351 351 return _mergeeffects(text, start, stop)
352 352
353 353 _ansieffectre = re.compile(br'\x1b\[[0-9;]*m')
354 354
355 355 def stripeffects(text):
356 356 """Strip ANSI control codes which could be inserted by colorlabel()"""
357 357 return _ansieffectre.sub('', text)
358 358
359 359 def colorlabel(ui, msg, label):
360 360 """add color control code according to the mode"""
361 361 if ui._colormode == 'debug':
362 362 if label and msg:
363 363 if msg[-1] == '\n':
364 364 msg = "[%s|%s]\n" % (label, msg[:-1])
365 365 else:
366 366 msg = "[%s|%s]" % (label, msg)
367 367 elif ui._colormode is not None:
368 368 effects = []
369 369 for l in label.split():
370 370 s = ui._styles.get(l, '')
371 371 if s:
372 372 effects.append(s)
373 373 elif valideffect(ui, l):
374 374 effects.append(l)
375 375 effects = ' '.join(effects)
376 376 if effects:
377 377 msg = '\n'.join([_render_effects(ui, line, effects)
378 378 for line in msg.split('\n')])
379 379 return msg
380 380
381 381 w32effects = None
382 if pycompat.osname == 'nt':
382 if pycompat.iswindows:
383 383 import ctypes
384 384
385 385 _kernel32 = ctypes.windll.kernel32
386 386
387 387 _WORD = ctypes.c_ushort
388 388
389 389 _INVALID_HANDLE_VALUE = -1
390 390
391 391 class _COORD(ctypes.Structure):
392 392 _fields_ = [('X', ctypes.c_short),
393 393 ('Y', ctypes.c_short)]
394 394
395 395 class _SMALL_RECT(ctypes.Structure):
396 396 _fields_ = [('Left', ctypes.c_short),
397 397 ('Top', ctypes.c_short),
398 398 ('Right', ctypes.c_short),
399 399 ('Bottom', ctypes.c_short)]
400 400
401 401 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
402 402 _fields_ = [('dwSize', _COORD),
403 403 ('dwCursorPosition', _COORD),
404 404 ('wAttributes', _WORD),
405 405 ('srWindow', _SMALL_RECT),
406 406 ('dwMaximumWindowSize', _COORD)]
407 407
408 408 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
409 409 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
410 410
411 411 _FOREGROUND_BLUE = 0x0001
412 412 _FOREGROUND_GREEN = 0x0002
413 413 _FOREGROUND_RED = 0x0004
414 414 _FOREGROUND_INTENSITY = 0x0008
415 415
416 416 _BACKGROUND_BLUE = 0x0010
417 417 _BACKGROUND_GREEN = 0x0020
418 418 _BACKGROUND_RED = 0x0040
419 419 _BACKGROUND_INTENSITY = 0x0080
420 420
421 421 _COMMON_LVB_REVERSE_VIDEO = 0x4000
422 422 _COMMON_LVB_UNDERSCORE = 0x8000
423 423
424 424 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
425 425 w32effects = {
426 426 'none': -1,
427 427 'black': 0,
428 428 'red': _FOREGROUND_RED,
429 429 'green': _FOREGROUND_GREEN,
430 430 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
431 431 'blue': _FOREGROUND_BLUE,
432 432 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
433 433 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
434 434 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
435 435 'bold': _FOREGROUND_INTENSITY,
436 436 'black_background': 0x100, # unused value > 0x0f
437 437 'red_background': _BACKGROUND_RED,
438 438 'green_background': _BACKGROUND_GREEN,
439 439 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
440 440 'blue_background': _BACKGROUND_BLUE,
441 441 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
442 442 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
443 443 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
444 444 _BACKGROUND_BLUE),
445 445 'bold_background': _BACKGROUND_INTENSITY,
446 446 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
447 447 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
448 448 }
449 449
450 450 passthrough = {_FOREGROUND_INTENSITY,
451 451 _BACKGROUND_INTENSITY,
452 452 _COMMON_LVB_UNDERSCORE,
453 453 _COMMON_LVB_REVERSE_VIDEO}
454 454
455 455 stdout = _kernel32.GetStdHandle(
456 456 _STD_OUTPUT_HANDLE) # don't close the handle returned
457 457 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
458 458 w32effects = None
459 459 else:
460 460 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
461 461 if not _kernel32.GetConsoleScreenBufferInfo(
462 462 stdout, ctypes.byref(csbi)):
463 463 # stdout may not support GetConsoleScreenBufferInfo()
464 464 # when called from subprocess or redirected
465 465 w32effects = None
466 466 else:
467 467 origattr = csbi.wAttributes
468 468 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
469 469 re.MULTILINE | re.DOTALL)
470 470
471 471 def win32print(ui, writefunc, *msgs, **opts):
472 472 for text in msgs:
473 473 _win32print(ui, text, writefunc, **opts)
474 474
475 475 def _win32print(ui, text, writefunc, **opts):
476 476 label = opts.get('label', '')
477 477 attr = origattr
478 478
479 479 def mapcolor(val, attr):
480 480 if val == -1:
481 481 return origattr
482 482 elif val in passthrough:
483 483 return attr | val
484 484 elif val > 0x0f:
485 485 return (val & 0x70) | (attr & 0x8f)
486 486 else:
487 487 return (val & 0x07) | (attr & 0xf8)
488 488
489 489 # determine console attributes based on labels
490 490 for l in label.split():
491 491 style = ui._styles.get(l, '')
492 492 for effect in style.split():
493 493 try:
494 494 attr = mapcolor(w32effects[effect], attr)
495 495 except KeyError:
496 496 # w32effects could not have certain attributes so we skip
497 497 # them if not found
498 498 pass
499 499 # hack to ensure regexp finds data
500 500 if not text.startswith('\033['):
501 501 text = '\033[m' + text
502 502
503 503 # Look for ANSI-like codes embedded in text
504 504 m = re.match(ansire, text)
505 505
506 506 try:
507 507 while m:
508 508 for sattr in m.group(1).split(';'):
509 509 if sattr:
510 510 attr = mapcolor(int(sattr), attr)
511 511 ui.flush()
512 512 _kernel32.SetConsoleTextAttribute(stdout, attr)
513 513 writefunc(m.group(2), **opts)
514 514 m = re.match(ansire, m.group(3))
515 515 finally:
516 516 # Explicitly reset original attributes
517 517 ui.flush()
518 518 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,2310 +1,2310
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 fctxs = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 fctxs[fn] = p2[fn]
225 225
226 226 def fctxfn(repo, cx, path):
227 227 return fctxs.get(path)
228 228
229 229 if len(ps) == 0 or ps[0] < 0:
230 230 pars = [None, None]
231 231 elif len(ps) == 1:
232 232 pars = [nodeids[ps[0]], None]
233 233 else:
234 234 pars = [nodeids[p] for p in ps]
235 235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 236 date=(id, 0),
237 237 user="debugbuilddag",
238 238 extra={'branch': atbranch})
239 239 nodeid = repo.commitctx(cx)
240 240 nodeids.append(nodeid)
241 241 at = id
242 242 elif type == 'l':
243 243 id, name = data
244 244 ui.note(('tag %s\n' % name))
245 245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 246 elif type == 'a':
247 247 ui.note(('branch %s\n' % data))
248 248 atbranch = data
249 249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 250 tr.close()
251 251
252 252 if tags:
253 253 repo.vfs.write("localtags", "".join(tags))
254 254 finally:
255 255 ui.progress(_('building'), None)
256 256 release(tr, lock, wlock)
257 257
258 258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 259 indent_string = ' ' * indent
260 260 if all:
261 261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 262 % indent_string)
263 263
264 264 def showchunks(named):
265 265 ui.write("\n%s%s\n" % (indent_string, named))
266 266 for deltadata in gen.deltaiter():
267 267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 268 ui.write("%s%s %s %s %s %s %s\n" %
269 269 (indent_string, hex(node), hex(p1), hex(p2),
270 270 hex(cs), hex(deltabase), len(delta)))
271 271
272 272 chunkdata = gen.changelogheader()
273 273 showchunks("changelog")
274 274 chunkdata = gen.manifestheader()
275 275 showchunks("manifest")
276 276 for chunkdata in iter(gen.filelogheader, {}):
277 277 fname = chunkdata['filename']
278 278 showchunks(fname)
279 279 else:
280 280 if isinstance(gen, bundle2.unbundle20):
281 281 raise error.Abort(_('use debugbundle2 for this file'))
282 282 chunkdata = gen.changelogheader()
283 283 for deltadata in gen.deltaiter():
284 284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 285 ui.write("%s%s\n" % (indent_string, hex(node)))
286 286
287 287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 288 """display version and markers contained in 'data'"""
289 289 opts = pycompat.byteskwargs(opts)
290 290 data = part.read()
291 291 indent_string = ' ' * indent
292 292 try:
293 293 version, markers = obsolete._readmarkers(data)
294 294 except error.UnknownVersion as exc:
295 295 msg = "%sunsupported version: %s (%d bytes)\n"
296 296 msg %= indent_string, exc.version, len(data)
297 297 ui.write(msg)
298 298 else:
299 299 msg = "%sversion: %s (%d bytes)\n"
300 300 msg %= indent_string, version, len(data)
301 301 ui.write(msg)
302 302 fm = ui.formatter('debugobsolete', opts)
303 303 for rawmarker in sorted(markers):
304 304 m = obsutil.marker(None, rawmarker)
305 305 fm.startitem()
306 306 fm.plain(indent_string)
307 307 cmdutil.showmarker(fm, m)
308 308 fm.end()
309 309
310 310 def _debugphaseheads(ui, data, indent=0):
311 311 """display version and markers contained in 'data'"""
312 312 indent_string = ' ' * indent
313 313 headsbyphase = phases.binarydecode(data)
314 314 for phase in phases.allphases:
315 315 for head in headsbyphase[phase]:
316 316 ui.write(indent_string)
317 317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318 318
319 319 def _quasirepr(thing):
320 320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 321 return '{%s}' % (
322 322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 323 return pycompat.bytestr(repr(thing))
324 324
325 325 def _debugbundle2(ui, gen, all=None, **opts):
326 326 """lists the contents of a bundle2"""
327 327 if not isinstance(gen, bundle2.unbundle20):
328 328 raise error.Abort(_('not a bundle2 file'))
329 329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 330 parttypes = opts.get(r'part_type', [])
331 331 for part in gen.iterparts():
332 332 if parttypes and part.type not in parttypes:
333 333 continue
334 334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 335 if part.type == 'changegroup':
336 336 version = part.params.get('version', '01')
337 337 cg = changegroup.getunbundler(version, part, 'UN')
338 338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 339 if part.type == 'obsmarkers':
340 340 _debugobsmarkers(ui, part, indent=4, **opts)
341 341 if part.type == 'phase-heads':
342 342 _debugphaseheads(ui, part, indent=4)
343 343
344 344 @command('debugbundle',
345 345 [('a', 'all', None, _('show all details')),
346 346 ('', 'part-type', [], _('show only the named part type')),
347 347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 348 _('FILE'),
349 349 norepo=True)
350 350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 351 """lists the contents of a bundle"""
352 352 with hg.openpath(ui, bundlepath) as f:
353 353 if spec:
354 354 spec = exchange.getbundlespec(ui, f)
355 355 ui.write('%s\n' % spec)
356 356 return
357 357
358 358 gen = exchange.readbundle(ui, f, bundlepath)
359 359 if isinstance(gen, bundle2.unbundle20):
360 360 return _debugbundle2(ui, gen, all=all, **opts)
361 361 _debugchangegroup(ui, gen, all=all, **opts)
362 362
363 363 @command('debugcheckstate', [], '')
364 364 def debugcheckstate(ui, repo):
365 365 """validate the correctness of the current dirstate"""
366 366 parent1, parent2 = repo.dirstate.parents()
367 367 m1 = repo[parent1].manifest()
368 368 m2 = repo[parent2].manifest()
369 369 errors = 0
370 370 for f in repo.dirstate:
371 371 state = repo.dirstate[f]
372 372 if state in "nr" and f not in m1:
373 373 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
374 374 errors += 1
375 375 if state in "a" and f in m1:
376 376 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
377 377 errors += 1
378 378 if state in "m" and f not in m1 and f not in m2:
379 379 ui.warn(_("%s in state %s, but not in either manifest\n") %
380 380 (f, state))
381 381 errors += 1
382 382 for f in m1:
383 383 state = repo.dirstate[f]
384 384 if state not in "nrm":
385 385 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
386 386 errors += 1
387 387 if errors:
388 388 error = _(".hg/dirstate inconsistent with current parent's manifest")
389 389 raise error.Abort(error)
390 390
391 391 @command('debugcolor',
392 392 [('', 'style', None, _('show all configured styles'))],
393 393 'hg debugcolor')
394 394 def debugcolor(ui, repo, **opts):
395 395 """show available color, effects or style"""
396 396 ui.write(('color mode: %s\n') % ui._colormode)
397 397 if opts.get(r'style'):
398 398 return _debugdisplaystyle(ui)
399 399 else:
400 400 return _debugdisplaycolor(ui)
401 401
402 402 def _debugdisplaycolor(ui):
403 403 ui = ui.copy()
404 404 ui._styles.clear()
405 405 for effect in color._activeeffects(ui).keys():
406 406 ui._styles[effect] = effect
407 407 if ui._terminfoparams:
408 408 for k, v in ui.configitems('color'):
409 409 if k.startswith('color.'):
410 410 ui._styles[k] = k[6:]
411 411 elif k.startswith('terminfo.'):
412 412 ui._styles[k] = k[9:]
413 413 ui.write(_('available colors:\n'))
414 414 # sort label with a '_' after the other to group '_background' entry.
415 415 items = sorted(ui._styles.items(),
416 416 key=lambda i: ('_' in i[0], i[0], i[1]))
417 417 for colorname, label in items:
418 418 ui.write(('%s\n') % colorname, label=label)
419 419
420 420 def _debugdisplaystyle(ui):
421 421 ui.write(_('available style:\n'))
422 422 width = max(len(s) for s in ui._styles)
423 423 for label, effects in sorted(ui._styles.items()):
424 424 ui.write('%s' % label, label=label)
425 425 if effects:
426 426 # 50
427 427 ui.write(': ')
428 428 ui.write(' ' * (max(0, width - len(label))))
429 429 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
430 430 ui.write('\n')
431 431
432 432 @command('debugcreatestreamclonebundle', [], 'FILE')
433 433 def debugcreatestreamclonebundle(ui, repo, fname):
434 434 """create a stream clone bundle file
435 435
436 436 Stream bundles are special bundles that are essentially archives of
437 437 revlog files. They are commonly used for cloning very quickly.
438 438 """
439 439 # TODO we may want to turn this into an abort when this functionality
440 440 # is moved into `hg bundle`.
441 441 if phases.hassecret(repo):
442 442 ui.warn(_('(warning: stream clone bundle will contain secret '
443 443 'revisions)\n'))
444 444
445 445 requirements, gen = streamclone.generatebundlev1(repo)
446 446 changegroup.writechunks(ui, gen, fname)
447 447
448 448 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
449 449
450 450 @command('debugdag',
451 451 [('t', 'tags', None, _('use tags as labels')),
452 452 ('b', 'branches', None, _('annotate with branch names')),
453 453 ('', 'dots', None, _('use dots for runs')),
454 454 ('s', 'spaces', None, _('separate elements by spaces'))],
455 455 _('[OPTION]... [FILE [REV]...]'),
456 456 optionalrepo=True)
457 457 def debugdag(ui, repo, file_=None, *revs, **opts):
458 458 """format the changelog or an index DAG as a concise textual description
459 459
460 460 If you pass a revlog index, the revlog's DAG is emitted. If you list
461 461 revision numbers, they get labeled in the output as rN.
462 462
463 463 Otherwise, the changelog DAG of the current repo is emitted.
464 464 """
465 465 spaces = opts.get(r'spaces')
466 466 dots = opts.get(r'dots')
467 467 if file_:
468 468 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
469 469 file_)
470 470 revs = set((int(r) for r in revs))
471 471 def events():
472 472 for r in rlog:
473 473 yield 'n', (r, list(p for p in rlog.parentrevs(r)
474 474 if p != -1))
475 475 if r in revs:
476 476 yield 'l', (r, "r%i" % r)
477 477 elif repo:
478 478 cl = repo.changelog
479 479 tags = opts.get(r'tags')
480 480 branches = opts.get(r'branches')
481 481 if tags:
482 482 labels = {}
483 483 for l, n in repo.tags().items():
484 484 labels.setdefault(cl.rev(n), []).append(l)
485 485 def events():
486 486 b = "default"
487 487 for r in cl:
488 488 if branches:
489 489 newb = cl.read(cl.node(r))[5]['branch']
490 490 if newb != b:
491 491 yield 'a', newb
492 492 b = newb
493 493 yield 'n', (r, list(p for p in cl.parentrevs(r)
494 494 if p != -1))
495 495 if tags:
496 496 ls = labels.get(r)
497 497 if ls:
498 498 for l in ls:
499 499 yield 'l', (r, l)
500 500 else:
501 501 raise error.Abort(_('need repo for changelog dag'))
502 502
503 503 for line in dagparser.dagtextlines(events(),
504 504 addspaces=spaces,
505 505 wraplabels=True,
506 506 wrapannotations=True,
507 507 wrapnonlinear=dots,
508 508 usedots=dots,
509 509 maxlinewidth=70):
510 510 ui.write(line)
511 511 ui.write("\n")
512 512
513 513 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
514 514 def debugdata(ui, repo, file_, rev=None, **opts):
515 515 """dump the contents of a data file revision"""
516 516 opts = pycompat.byteskwargs(opts)
517 517 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
518 518 if rev is not None:
519 519 raise error.CommandError('debugdata', _('invalid arguments'))
520 520 file_, rev = None, file_
521 521 elif rev is None:
522 522 raise error.CommandError('debugdata', _('invalid arguments'))
523 523 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
524 524 try:
525 525 ui.write(r.revision(r.lookup(rev), raw=True))
526 526 except KeyError:
527 527 raise error.Abort(_('invalid revision identifier %s') % rev)
528 528
529 529 @command('debugdate',
530 530 [('e', 'extended', None, _('try extended date formats'))],
531 531 _('[-e] DATE [RANGE]'),
532 532 norepo=True, optionalrepo=True)
533 533 def debugdate(ui, date, range=None, **opts):
534 534 """parse and display a date"""
535 535 if opts[r"extended"]:
536 536 d = util.parsedate(date, util.extendeddateformats)
537 537 else:
538 538 d = util.parsedate(date)
539 539 ui.write(("internal: %s %s\n") % d)
540 540 ui.write(("standard: %s\n") % util.datestr(d))
541 541 if range:
542 542 m = util.matchdate(range)
543 543 ui.write(("match: %s\n") % m(d[0]))
544 544
545 545 @command('debugdeltachain',
546 546 cmdutil.debugrevlogopts + cmdutil.formatteropts,
547 547 _('-c|-m|FILE'),
548 548 optionalrepo=True)
549 549 def debugdeltachain(ui, repo, file_=None, **opts):
550 550 """dump information about delta chains in a revlog
551 551
552 552 Output can be templatized. Available template keywords are:
553 553
554 554 :``rev``: revision number
555 555 :``chainid``: delta chain identifier (numbered by unique base)
556 556 :``chainlen``: delta chain length to this revision
557 557 :``prevrev``: previous revision in delta chain
558 558 :``deltatype``: role of delta / how it was computed
559 559 :``compsize``: compressed size of revision
560 560 :``uncompsize``: uncompressed size of revision
561 561 :``chainsize``: total size of compressed revisions in chain
562 562 :``chainratio``: total chain size divided by uncompressed revision size
563 563 (new delta chains typically start at ratio 2.00)
564 564 :``lindist``: linear distance from base revision in delta chain to end
565 565 of this revision
566 566 :``extradist``: total size of revisions not part of this delta chain from
567 567 base of delta chain to end of this revision; a measurement
568 568 of how much extra data we need to read/seek across to read
569 569 the delta chain for this revision
570 570 :``extraratio``: extradist divided by chainsize; another representation of
571 571 how much unrelated data is needed to load this delta chain
572 572 """
573 573 opts = pycompat.byteskwargs(opts)
574 574 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
575 575 index = r.index
576 576 generaldelta = r.version & revlog.FLAG_GENERALDELTA
577 577
578 578 def revinfo(rev):
579 579 e = index[rev]
580 580 compsize = e[1]
581 581 uncompsize = e[2]
582 582 chainsize = 0
583 583
584 584 if generaldelta:
585 585 if e[3] == e[5]:
586 586 deltatype = 'p1'
587 587 elif e[3] == e[6]:
588 588 deltatype = 'p2'
589 589 elif e[3] == rev - 1:
590 590 deltatype = 'prev'
591 591 elif e[3] == rev:
592 592 deltatype = 'base'
593 593 else:
594 594 deltatype = 'other'
595 595 else:
596 596 if e[3] == rev:
597 597 deltatype = 'base'
598 598 else:
599 599 deltatype = 'prev'
600 600
601 601 chain = r._deltachain(rev)[0]
602 602 for iterrev in chain:
603 603 e = index[iterrev]
604 604 chainsize += e[1]
605 605
606 606 return compsize, uncompsize, deltatype, chain, chainsize
607 607
608 608 fm = ui.formatter('debugdeltachain', opts)
609 609
610 610 fm.plain(' rev chain# chainlen prev delta '
611 611 'size rawsize chainsize ratio lindist extradist '
612 612 'extraratio\n')
613 613
614 614 chainbases = {}
615 615 for rev in r:
616 616 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
617 617 chainbase = chain[0]
618 618 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
619 619 basestart = r.start(chainbase)
620 620 revstart = r.start(rev)
621 621 lineardist = revstart + comp - basestart
622 622 extradist = lineardist - chainsize
623 623 try:
624 624 prevrev = chain[-2]
625 625 except IndexError:
626 626 prevrev = -1
627 627
628 628 chainratio = float(chainsize) / float(uncomp)
629 629 extraratio = float(extradist) / float(chainsize)
630 630
631 631 fm.startitem()
632 632 fm.write('rev chainid chainlen prevrev deltatype compsize '
633 633 'uncompsize chainsize chainratio lindist extradist '
634 634 'extraratio',
635 635 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
636 636 rev, chainid, len(chain), prevrev, deltatype, comp,
637 637 uncomp, chainsize, chainratio, lineardist, extradist,
638 638 extraratio,
639 639 rev=rev, chainid=chainid, chainlen=len(chain),
640 640 prevrev=prevrev, deltatype=deltatype, compsize=comp,
641 641 uncompsize=uncomp, chainsize=chainsize,
642 642 chainratio=chainratio, lindist=lineardist,
643 643 extradist=extradist, extraratio=extraratio)
644 644
645 645 fm.end()
646 646
647 647 @command('debugdirstate|debugstate',
648 648 [('', 'nodates', None, _('do not display the saved mtime')),
649 649 ('', 'datesort', None, _('sort by saved mtime'))],
650 650 _('[OPTION]...'))
651 651 def debugstate(ui, repo, **opts):
652 652 """show the contents of the current dirstate"""
653 653
654 654 nodates = opts.get(r'nodates')
655 655 datesort = opts.get(r'datesort')
656 656
657 657 timestr = ""
658 658 if datesort:
659 659 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
660 660 else:
661 661 keyfunc = None # sort by filename
662 662 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
663 663 if ent[3] == -1:
664 664 timestr = 'unset '
665 665 elif nodates:
666 666 timestr = 'set '
667 667 else:
668 668 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
669 669 time.localtime(ent[3]))
670 670 if ent[1] & 0o20000:
671 671 mode = 'lnk'
672 672 else:
673 673 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
674 674 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
675 675 for f in repo.dirstate.copies():
676 676 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
677 677
678 678 @command('debugdiscovery',
679 679 [('', 'old', None, _('use old-style discovery')),
680 680 ('', 'nonheads', None,
681 681 _('use old-style discovery with non-heads included')),
682 682 ] + cmdutil.remoteopts,
683 683 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
684 684 def debugdiscovery(ui, repo, remoteurl="default", **opts):
685 685 """runs the changeset discovery protocol in isolation"""
686 686 opts = pycompat.byteskwargs(opts)
687 687 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
688 688 opts.get('branch'))
689 689 remote = hg.peer(repo, opts, remoteurl)
690 690 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
691 691
692 692 # make sure tests are repeatable
693 693 random.seed(12323)
694 694
695 695 def doit(localheads, remoteheads, remote=remote):
696 696 if opts.get('old'):
697 697 if localheads:
698 698 raise error.Abort('cannot use localheads with old style '
699 699 'discovery')
700 700 if not util.safehasattr(remote, 'branches'):
701 701 # enable in-client legacy support
702 702 remote = localrepo.locallegacypeer(remote.local())
703 703 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
704 704 force=True)
705 705 common = set(common)
706 706 if not opts.get('nonheads'):
707 707 ui.write(("unpruned common: %s\n") %
708 708 " ".join(sorted(short(n) for n in common)))
709 709 dag = dagutil.revlogdag(repo.changelog)
710 710 all = dag.ancestorset(dag.internalizeall(common))
711 711 common = dag.externalizeall(dag.headsetofconnecteds(all))
712 712 else:
713 713 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
714 714 common = set(common)
715 715 rheads = set(hds)
716 716 lheads = set(repo.heads())
717 717 ui.write(("common heads: %s\n") %
718 718 " ".join(sorted(short(n) for n in common)))
719 719 if lheads <= common:
720 720 ui.write(("local is subset\n"))
721 721 elif rheads <= common:
722 722 ui.write(("remote is subset\n"))
723 723
724 724 serverlogs = opts.get('serverlog')
725 725 if serverlogs:
726 726 for filename in serverlogs:
727 727 with open(filename, 'r') as logfile:
728 728 line = logfile.readline()
729 729 while line:
730 730 parts = line.strip().split(';')
731 731 op = parts[1]
732 732 if op == 'cg':
733 733 pass
734 734 elif op == 'cgss':
735 735 doit(parts[2].split(' '), parts[3].split(' '))
736 736 elif op == 'unb':
737 737 doit(parts[3].split(' '), parts[2].split(' '))
738 738 line = logfile.readline()
739 739 else:
740 740 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
741 741 opts.get('remote_head'))
742 742 localrevs = opts.get('local_head')
743 743 doit(localrevs, remoterevs)
744 744
745 745 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
746 746 def debugextensions(ui, **opts):
747 747 '''show information about active extensions'''
748 748 opts = pycompat.byteskwargs(opts)
749 749 exts = extensions.extensions(ui)
750 750 hgver = util.version()
751 751 fm = ui.formatter('debugextensions', opts)
752 752 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
753 753 isinternal = extensions.ismoduleinternal(extmod)
754 754 extsource = pycompat.fsencode(extmod.__file__)
755 755 if isinternal:
756 756 exttestedwith = [] # never expose magic string to users
757 757 else:
758 758 exttestedwith = getattr(extmod, 'testedwith', '').split()
759 759 extbuglink = getattr(extmod, 'buglink', None)
760 760
761 761 fm.startitem()
762 762
763 763 if ui.quiet or ui.verbose:
764 764 fm.write('name', '%s\n', extname)
765 765 else:
766 766 fm.write('name', '%s', extname)
767 767 if isinternal or hgver in exttestedwith:
768 768 fm.plain('\n')
769 769 elif not exttestedwith:
770 770 fm.plain(_(' (untested!)\n'))
771 771 else:
772 772 lasttestedversion = exttestedwith[-1]
773 773 fm.plain(' (%s!)\n' % lasttestedversion)
774 774
775 775 fm.condwrite(ui.verbose and extsource, 'source',
776 776 _(' location: %s\n'), extsource or "")
777 777
778 778 if ui.verbose:
779 779 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
780 780 fm.data(bundled=isinternal)
781 781
782 782 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
783 783 _(' tested with: %s\n'),
784 784 fm.formatlist(exttestedwith, name='ver'))
785 785
786 786 fm.condwrite(ui.verbose and extbuglink, 'buglink',
787 787 _(' bug reporting: %s\n'), extbuglink or "")
788 788
789 789 fm.end()
790 790
791 791 @command('debugfileset',
792 792 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
793 793 _('[-r REV] FILESPEC'))
794 794 def debugfileset(ui, repo, expr, **opts):
795 795 '''parse and apply a fileset specification'''
796 796 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
797 797 if ui.verbose:
798 798 tree = fileset.parse(expr)
799 799 ui.note(fileset.prettyformat(tree), "\n")
800 800
801 801 for f in ctx.getfileset(expr):
802 802 ui.write("%s\n" % f)
803 803
804 804 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
805 805 def debugfsinfo(ui, path="."):
806 806 """show information detected about current filesystem"""
807 807 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
808 808 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
809 809 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
810 810 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
811 811 casesensitive = '(unknown)'
812 812 try:
813 813 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
814 814 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
815 815 except OSError:
816 816 pass
817 817 ui.write(('case-sensitive: %s\n') % casesensitive)
818 818
819 819 @command('debuggetbundle',
820 820 [('H', 'head', [], _('id of head node'), _('ID')),
821 821 ('C', 'common', [], _('id of common node'), _('ID')),
822 822 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
823 823 _('REPO FILE [-H|-C ID]...'),
824 824 norepo=True)
825 825 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
826 826 """retrieves a bundle from a repo
827 827
828 828 Every ID must be a full-length hex node id string. Saves the bundle to the
829 829 given file.
830 830 """
831 831 opts = pycompat.byteskwargs(opts)
832 832 repo = hg.peer(ui, opts, repopath)
833 833 if not repo.capable('getbundle'):
834 834 raise error.Abort("getbundle() not supported by target repository")
835 835 args = {}
836 836 if common:
837 837 args[r'common'] = [bin(s) for s in common]
838 838 if head:
839 839 args[r'heads'] = [bin(s) for s in head]
840 840 # TODO: get desired bundlecaps from command line.
841 841 args[r'bundlecaps'] = None
842 842 bundle = repo.getbundle('debug', **args)
843 843
844 844 bundletype = opts.get('type', 'bzip2').lower()
845 845 btypes = {'none': 'HG10UN',
846 846 'bzip2': 'HG10BZ',
847 847 'gzip': 'HG10GZ',
848 848 'bundle2': 'HG20'}
849 849 bundletype = btypes.get(bundletype)
850 850 if bundletype not in bundle2.bundletypes:
851 851 raise error.Abort(_('unknown bundle type specified with --type'))
852 852 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
853 853
854 854 @command('debugignore', [], '[FILE]')
855 855 def debugignore(ui, repo, *files, **opts):
856 856 """display the combined ignore pattern and information about ignored files
857 857
858 858 With no argument display the combined ignore pattern.
859 859
860 860 Given space separated file names, shows if the given file is ignored and
861 861 if so, show the ignore rule (file and line number) that matched it.
862 862 """
863 863 ignore = repo.dirstate._ignore
864 864 if not files:
865 865 # Show all the patterns
866 866 ui.write("%s\n" % repr(ignore))
867 867 else:
868 868 m = scmutil.match(repo[None], pats=files)
869 869 for f in m.files():
870 870 nf = util.normpath(f)
871 871 ignored = None
872 872 ignoredata = None
873 873 if nf != '.':
874 874 if ignore(nf):
875 875 ignored = nf
876 876 ignoredata = repo.dirstate._ignorefileandline(nf)
877 877 else:
878 878 for p in util.finddirs(nf):
879 879 if ignore(p):
880 880 ignored = p
881 881 ignoredata = repo.dirstate._ignorefileandline(p)
882 882 break
883 883 if ignored:
884 884 if ignored == nf:
885 885 ui.write(_("%s is ignored\n") % m.uipath(f))
886 886 else:
887 887 ui.write(_("%s is ignored because of "
888 888 "containing folder %s\n")
889 889 % (m.uipath(f), ignored))
890 890 ignorefile, lineno, line = ignoredata
891 891 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
892 892 % (ignorefile, lineno, line))
893 893 else:
894 894 ui.write(_("%s is not ignored\n") % m.uipath(f))
895 895
896 896 @command('debugindex', cmdutil.debugrevlogopts +
897 897 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
898 898 _('[-f FORMAT] -c|-m|FILE'),
899 899 optionalrepo=True)
900 900 def debugindex(ui, repo, file_=None, **opts):
901 901 """dump the contents of an index file"""
902 902 opts = pycompat.byteskwargs(opts)
903 903 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
904 904 format = opts.get('format', 0)
905 905 if format not in (0, 1):
906 906 raise error.Abort(_("unknown format %d") % format)
907 907
908 908 generaldelta = r.version & revlog.FLAG_GENERALDELTA
909 909 if generaldelta:
910 910 basehdr = ' delta'
911 911 else:
912 912 basehdr = ' base'
913 913
914 914 if ui.debugflag:
915 915 shortfn = hex
916 916 else:
917 917 shortfn = short
918 918
919 919 # There might not be anything in r, so have a sane default
920 920 idlen = 12
921 921 for i in r:
922 922 idlen = len(shortfn(r.node(i)))
923 923 break
924 924
925 925 if format == 0:
926 926 ui.write((" rev offset length " + basehdr + " linkrev"
927 927 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
928 928 elif format == 1:
929 929 ui.write((" rev flag offset length"
930 930 " size " + basehdr + " link p1 p2"
931 931 " %s\n") % "nodeid".rjust(idlen))
932 932
933 933 for i in r:
934 934 node = r.node(i)
935 935 if generaldelta:
936 936 base = r.deltaparent(i)
937 937 else:
938 938 base = r.chainbase(i)
939 939 if format == 0:
940 940 try:
941 941 pp = r.parents(node)
942 942 except Exception:
943 943 pp = [nullid, nullid]
944 944 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
945 945 i, r.start(i), r.length(i), base, r.linkrev(i),
946 946 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
947 947 elif format == 1:
948 948 pr = r.parentrevs(i)
949 949 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
950 950 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
951 951 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
952 952
953 953 @command('debugindexdot', cmdutil.debugrevlogopts,
954 954 _('-c|-m|FILE'), optionalrepo=True)
955 955 def debugindexdot(ui, repo, file_=None, **opts):
956 956 """dump an index DAG as a graphviz dot file"""
957 957 opts = pycompat.byteskwargs(opts)
958 958 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
959 959 ui.write(("digraph G {\n"))
960 960 for i in r:
961 961 node = r.node(i)
962 962 pp = r.parents(node)
963 963 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
964 964 if pp[1] != nullid:
965 965 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
966 966 ui.write("}\n")
967 967
968 968 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
969 969 def debuginstall(ui, **opts):
970 970 '''test Mercurial installation
971 971
972 972 Returns 0 on success.
973 973 '''
974 974 opts = pycompat.byteskwargs(opts)
975 975
976 976 def writetemp(contents):
977 977 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
978 978 f = os.fdopen(fd, pycompat.sysstr("wb"))
979 979 f.write(contents)
980 980 f.close()
981 981 return name
982 982
983 983 problems = 0
984 984
985 985 fm = ui.formatter('debuginstall', opts)
986 986 fm.startitem()
987 987
988 988 # encoding
989 989 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
990 990 err = None
991 991 try:
992 992 codecs.lookup(pycompat.sysstr(encoding.encoding))
993 993 except LookupError as inst:
994 994 err = util.forcebytestr(inst)
995 995 problems += 1
996 996 fm.condwrite(err, 'encodingerror', _(" %s\n"
997 997 " (check that your locale is properly set)\n"), err)
998 998
999 999 # Python
1000 1000 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1001 1001 pycompat.sysexecutable)
1002 1002 fm.write('pythonver', _("checking Python version (%s)\n"),
1003 1003 ("%d.%d.%d" % sys.version_info[:3]))
1004 1004 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1005 1005 os.path.dirname(pycompat.fsencode(os.__file__)))
1006 1006
1007 1007 security = set(sslutil.supportedprotocols)
1008 1008 if sslutil.hassni:
1009 1009 security.add('sni')
1010 1010
1011 1011 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1012 1012 fm.formatlist(sorted(security), name='protocol',
1013 1013 fmt='%s', sep=','))
1014 1014
1015 1015 # These are warnings, not errors. So don't increment problem count. This
1016 1016 # may change in the future.
1017 1017 if 'tls1.2' not in security:
1018 1018 fm.plain(_(' TLS 1.2 not supported by Python install; '
1019 1019 'network connections lack modern security\n'))
1020 1020 if 'sni' not in security:
1021 1021 fm.plain(_(' SNI not supported by Python install; may have '
1022 1022 'connectivity issues with some servers\n'))
1023 1023
1024 1024 # TODO print CA cert info
1025 1025
1026 1026 # hg version
1027 1027 hgver = util.version()
1028 1028 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1029 1029 hgver.split('+')[0])
1030 1030 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1031 1031 '+'.join(hgver.split('+')[1:]))
1032 1032
1033 1033 # compiled modules
1034 1034 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1035 1035 policy.policy)
1036 1036 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1037 1037 os.path.dirname(pycompat.fsencode(__file__)))
1038 1038
1039 1039 if policy.policy in ('c', 'allow'):
1040 1040 err = None
1041 1041 try:
1042 1042 from .cext import (
1043 1043 base85,
1044 1044 bdiff,
1045 1045 mpatch,
1046 1046 osutil,
1047 1047 )
1048 1048 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1049 1049 except Exception as inst:
1050 1050 err = util.forcebytestr(inst)
1051 1051 problems += 1
1052 1052 fm.condwrite(err, 'extensionserror', " %s\n", err)
1053 1053
1054 1054 compengines = util.compengines._engines.values()
1055 1055 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1056 1056 fm.formatlist(sorted(e.name() for e in compengines),
1057 1057 name='compengine', fmt='%s', sep=', '))
1058 1058 fm.write('compenginesavail', _('checking available compression engines '
1059 1059 '(%s)\n'),
1060 1060 fm.formatlist(sorted(e.name() for e in compengines
1061 1061 if e.available()),
1062 1062 name='compengine', fmt='%s', sep=', '))
1063 1063 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1064 1064 fm.write('compenginesserver', _('checking available compression engines '
1065 1065 'for wire protocol (%s)\n'),
1066 1066 fm.formatlist([e.name() for e in wirecompengines
1067 1067 if e.wireprotosupport()],
1068 1068 name='compengine', fmt='%s', sep=', '))
1069 1069
1070 1070 # templates
1071 1071 p = templater.templatepaths()
1072 1072 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1073 1073 fm.condwrite(not p, '', _(" no template directories found\n"))
1074 1074 if p:
1075 1075 m = templater.templatepath("map-cmdline.default")
1076 1076 if m:
1077 1077 # template found, check if it is working
1078 1078 err = None
1079 1079 try:
1080 1080 templater.templater.frommapfile(m)
1081 1081 except Exception as inst:
1082 1082 err = util.forcebytestr(inst)
1083 1083 p = None
1084 1084 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1085 1085 else:
1086 1086 p = None
1087 1087 fm.condwrite(p, 'defaulttemplate',
1088 1088 _("checking default template (%s)\n"), m)
1089 1089 fm.condwrite(not m, 'defaulttemplatenotfound',
1090 1090 _(" template '%s' not found\n"), "default")
1091 1091 if not p:
1092 1092 problems += 1
1093 1093 fm.condwrite(not p, '',
1094 1094 _(" (templates seem to have been installed incorrectly)\n"))
1095 1095
1096 1096 # editor
1097 1097 editor = ui.geteditor()
1098 1098 editor = util.expandpath(editor)
1099 1099 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1100 1100 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1101 1101 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1102 1102 _(" No commit editor set and can't find %s in PATH\n"
1103 1103 " (specify a commit editor in your configuration"
1104 1104 " file)\n"), not cmdpath and editor == 'vi' and editor)
1105 1105 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1106 1106 _(" Can't find editor '%s' in PATH\n"
1107 1107 " (specify a commit editor in your configuration"
1108 1108 " file)\n"), not cmdpath and editor)
1109 1109 if not cmdpath and editor != 'vi':
1110 1110 problems += 1
1111 1111
1112 1112 # check username
1113 1113 username = None
1114 1114 err = None
1115 1115 try:
1116 1116 username = ui.username()
1117 1117 except error.Abort as e:
1118 1118 err = util.forcebytestr(e)
1119 1119 problems += 1
1120 1120
1121 1121 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1122 1122 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1123 1123 " (specify a username in your configuration file)\n"), err)
1124 1124
1125 1125 fm.condwrite(not problems, '',
1126 1126 _("no problems detected\n"))
1127 1127 if not problems:
1128 1128 fm.data(problems=problems)
1129 1129 fm.condwrite(problems, 'problems',
1130 1130 _("%d problems detected,"
1131 1131 " please check your install!\n"), problems)
1132 1132 fm.end()
1133 1133
1134 1134 return problems
1135 1135
1136 1136 @command('debugknown', [], _('REPO ID...'), norepo=True)
1137 1137 def debugknown(ui, repopath, *ids, **opts):
1138 1138 """test whether node ids are known to a repo
1139 1139
1140 1140 Every ID must be a full-length hex node id string. Returns a list of 0s
1141 1141 and 1s indicating unknown/known.
1142 1142 """
1143 1143 opts = pycompat.byteskwargs(opts)
1144 1144 repo = hg.peer(ui, opts, repopath)
1145 1145 if not repo.capable('known'):
1146 1146 raise error.Abort("known() not supported by target repository")
1147 1147 flags = repo.known([bin(s) for s in ids])
1148 1148 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1149 1149
1150 1150 @command('debuglabelcomplete', [], _('LABEL...'))
1151 1151 def debuglabelcomplete(ui, repo, *args):
1152 1152 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1153 1153 debugnamecomplete(ui, repo, *args)
1154 1154
1155 1155 @command('debuglocks',
1156 1156 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1157 1157 ('W', 'force-wlock', None,
1158 1158 _('free the working state lock (DANGEROUS)'))],
1159 1159 _('[OPTION]...'))
1160 1160 def debuglocks(ui, repo, **opts):
1161 1161 """show or modify state of locks
1162 1162
1163 1163 By default, this command will show which locks are held. This
1164 1164 includes the user and process holding the lock, the amount of time
1165 1165 the lock has been held, and the machine name where the process is
1166 1166 running if it's not local.
1167 1167
1168 1168 Locks protect the integrity of Mercurial's data, so should be
1169 1169 treated with care. System crashes or other interruptions may cause
1170 1170 locks to not be properly released, though Mercurial will usually
1171 1171 detect and remove such stale locks automatically.
1172 1172
1173 1173 However, detecting stale locks may not always be possible (for
1174 1174 instance, on a shared filesystem). Removing locks may also be
1175 1175 blocked by filesystem permissions.
1176 1176
1177 1177 Returns 0 if no locks are held.
1178 1178
1179 1179 """
1180 1180
1181 1181 if opts.get(r'force_lock'):
1182 1182 repo.svfs.unlink('lock')
1183 1183 if opts.get(r'force_wlock'):
1184 1184 repo.vfs.unlink('wlock')
1185 1185 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1186 1186 return 0
1187 1187
1188 1188 now = time.time()
1189 1189 held = 0
1190 1190
1191 1191 def report(vfs, name, method):
1192 1192 # this causes stale locks to get reaped for more accurate reporting
1193 1193 try:
1194 1194 l = method(False)
1195 1195 except error.LockHeld:
1196 1196 l = None
1197 1197
1198 1198 if l:
1199 1199 l.release()
1200 1200 else:
1201 1201 try:
1202 1202 stat = vfs.lstat(name)
1203 1203 age = now - stat.st_mtime
1204 1204 user = util.username(stat.st_uid)
1205 1205 locker = vfs.readlock(name)
1206 1206 if ":" in locker:
1207 1207 host, pid = locker.split(':')
1208 1208 if host == socket.gethostname():
1209 1209 locker = 'user %s, process %s' % (user, pid)
1210 1210 else:
1211 1211 locker = 'user %s, process %s, host %s' \
1212 1212 % (user, pid, host)
1213 1213 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1214 1214 return 1
1215 1215 except OSError as e:
1216 1216 if e.errno != errno.ENOENT:
1217 1217 raise
1218 1218
1219 1219 ui.write(("%-6s free\n") % (name + ":"))
1220 1220 return 0
1221 1221
1222 1222 held += report(repo.svfs, "lock", repo.lock)
1223 1223 held += report(repo.vfs, "wlock", repo.wlock)
1224 1224
1225 1225 return held
1226 1226
1227 1227 @command('debugmergestate', [], '')
1228 1228 def debugmergestate(ui, repo, *args):
1229 1229 """print merge state
1230 1230
1231 1231 Use --verbose to print out information about whether v1 or v2 merge state
1232 1232 was chosen."""
1233 1233 def _hashornull(h):
1234 1234 if h == nullhex:
1235 1235 return 'null'
1236 1236 else:
1237 1237 return h
1238 1238
1239 1239 def printrecords(version):
1240 1240 ui.write(('* version %s records\n') % version)
1241 1241 if version == 1:
1242 1242 records = v1records
1243 1243 else:
1244 1244 records = v2records
1245 1245
1246 1246 for rtype, record in records:
1247 1247 # pretty print some record types
1248 1248 if rtype == 'L':
1249 1249 ui.write(('local: %s\n') % record)
1250 1250 elif rtype == 'O':
1251 1251 ui.write(('other: %s\n') % record)
1252 1252 elif rtype == 'm':
1253 1253 driver, mdstate = record.split('\0', 1)
1254 1254 ui.write(('merge driver: %s (state "%s")\n')
1255 1255 % (driver, mdstate))
1256 1256 elif rtype in 'FDC':
1257 1257 r = record.split('\0')
1258 1258 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1259 1259 if version == 1:
1260 1260 onode = 'not stored in v1 format'
1261 1261 flags = r[7]
1262 1262 else:
1263 1263 onode, flags = r[7:9]
1264 1264 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1265 1265 % (f, rtype, state, _hashornull(hash)))
1266 1266 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1267 1267 ui.write((' ancestor path: %s (node %s)\n')
1268 1268 % (afile, _hashornull(anode)))
1269 1269 ui.write((' other path: %s (node %s)\n')
1270 1270 % (ofile, _hashornull(onode)))
1271 1271 elif rtype == 'f':
1272 1272 filename, rawextras = record.split('\0', 1)
1273 1273 extras = rawextras.split('\0')
1274 1274 i = 0
1275 1275 extrastrings = []
1276 1276 while i < len(extras):
1277 1277 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1278 1278 i += 2
1279 1279
1280 1280 ui.write(('file extras: %s (%s)\n')
1281 1281 % (filename, ', '.join(extrastrings)))
1282 1282 elif rtype == 'l':
1283 1283 labels = record.split('\0', 2)
1284 1284 labels = [l for l in labels if len(l) > 0]
1285 1285 ui.write(('labels:\n'))
1286 1286 ui.write((' local: %s\n' % labels[0]))
1287 1287 ui.write((' other: %s\n' % labels[1]))
1288 1288 if len(labels) > 2:
1289 1289 ui.write((' base: %s\n' % labels[2]))
1290 1290 else:
1291 1291 ui.write(('unrecognized entry: %s\t%s\n')
1292 1292 % (rtype, record.replace('\0', '\t')))
1293 1293
1294 1294 # Avoid mergestate.read() since it may raise an exception for unsupported
1295 1295 # merge state records. We shouldn't be doing this, but this is OK since this
1296 1296 # command is pretty low-level.
1297 1297 ms = mergemod.mergestate(repo)
1298 1298
1299 1299 # sort so that reasonable information is on top
1300 1300 v1records = ms._readrecordsv1()
1301 1301 v2records = ms._readrecordsv2()
1302 1302 order = 'LOml'
1303 1303 def key(r):
1304 1304 idx = order.find(r[0])
1305 1305 if idx == -1:
1306 1306 return (1, r[1])
1307 1307 else:
1308 1308 return (0, idx)
1309 1309 v1records.sort(key=key)
1310 1310 v2records.sort(key=key)
1311 1311
1312 1312 if not v1records and not v2records:
1313 1313 ui.write(('no merge state found\n'))
1314 1314 elif not v2records:
1315 1315 ui.note(('no version 2 merge state\n'))
1316 1316 printrecords(1)
1317 1317 elif ms._v1v2match(v1records, v2records):
1318 1318 ui.note(('v1 and v2 states match: using v2\n'))
1319 1319 printrecords(2)
1320 1320 else:
1321 1321 ui.note(('v1 and v2 states mismatch: using v1\n'))
1322 1322 printrecords(1)
1323 1323 if ui.verbose:
1324 1324 printrecords(2)
1325 1325
1326 1326 @command('debugnamecomplete', [], _('NAME...'))
1327 1327 def debugnamecomplete(ui, repo, *args):
1328 1328 '''complete "names" - tags, open branch names, bookmark names'''
1329 1329
1330 1330 names = set()
1331 1331 # since we previously only listed open branches, we will handle that
1332 1332 # specially (after this for loop)
1333 1333 for name, ns in repo.names.iteritems():
1334 1334 if name != 'branches':
1335 1335 names.update(ns.listnames(repo))
1336 1336 names.update(tag for (tag, heads, tip, closed)
1337 1337 in repo.branchmap().iterbranches() if not closed)
1338 1338 completions = set()
1339 1339 if not args:
1340 1340 args = ['']
1341 1341 for a in args:
1342 1342 completions.update(n for n in names if n.startswith(a))
1343 1343 ui.write('\n'.join(sorted(completions)))
1344 1344 ui.write('\n')
1345 1345
1346 1346 @command('debugobsolete',
1347 1347 [('', 'flags', 0, _('markers flag')),
1348 1348 ('', 'record-parents', False,
1349 1349 _('record parent information for the precursor')),
1350 1350 ('r', 'rev', [], _('display markers relevant to REV')),
1351 1351 ('', 'exclusive', False, _('restrict display to markers only '
1352 1352 'relevant to REV')),
1353 1353 ('', 'index', False, _('display index of the marker')),
1354 1354 ('', 'delete', [], _('delete markers specified by indices')),
1355 1355 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1356 1356 _('[OBSOLETED [REPLACEMENT ...]]'))
1357 1357 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1358 1358 """create arbitrary obsolete marker
1359 1359
1360 1360 With no arguments, displays the list of obsolescence markers."""
1361 1361
1362 1362 opts = pycompat.byteskwargs(opts)
1363 1363
1364 1364 def parsenodeid(s):
1365 1365 try:
1366 1366 # We do not use revsingle/revrange functions here to accept
1367 1367 # arbitrary node identifiers, possibly not present in the
1368 1368 # local repository.
1369 1369 n = bin(s)
1370 1370 if len(n) != len(nullid):
1371 1371 raise TypeError()
1372 1372 return n
1373 1373 except TypeError:
1374 1374 raise error.Abort('changeset references must be full hexadecimal '
1375 1375 'node identifiers')
1376 1376
1377 1377 if opts.get('delete'):
1378 1378 indices = []
1379 1379 for v in opts.get('delete'):
1380 1380 try:
1381 1381 indices.append(int(v))
1382 1382 except ValueError:
1383 1383 raise error.Abort(_('invalid index value: %r') % v,
1384 1384 hint=_('use integers for indices'))
1385 1385
1386 1386 if repo.currenttransaction():
1387 1387 raise error.Abort(_('cannot delete obsmarkers in the middle '
1388 1388 'of transaction.'))
1389 1389
1390 1390 with repo.lock():
1391 1391 n = repair.deleteobsmarkers(repo.obsstore, indices)
1392 1392 ui.write(_('deleted %i obsolescence markers\n') % n)
1393 1393
1394 1394 return
1395 1395
1396 1396 if precursor is not None:
1397 1397 if opts['rev']:
1398 1398 raise error.Abort('cannot select revision when creating marker')
1399 1399 metadata = {}
1400 1400 metadata['user'] = opts['user'] or ui.username()
1401 1401 succs = tuple(parsenodeid(succ) for succ in successors)
1402 1402 l = repo.lock()
1403 1403 try:
1404 1404 tr = repo.transaction('debugobsolete')
1405 1405 try:
1406 1406 date = opts.get('date')
1407 1407 if date:
1408 1408 date = util.parsedate(date)
1409 1409 else:
1410 1410 date = None
1411 1411 prec = parsenodeid(precursor)
1412 1412 parents = None
1413 1413 if opts['record_parents']:
1414 1414 if prec not in repo.unfiltered():
1415 1415 raise error.Abort('cannot used --record-parents on '
1416 1416 'unknown changesets')
1417 1417 parents = repo.unfiltered()[prec].parents()
1418 1418 parents = tuple(p.node() for p in parents)
1419 1419 repo.obsstore.create(tr, prec, succs, opts['flags'],
1420 1420 parents=parents, date=date,
1421 1421 metadata=metadata, ui=ui)
1422 1422 tr.close()
1423 1423 except ValueError as exc:
1424 1424 raise error.Abort(_('bad obsmarker input: %s') % exc)
1425 1425 finally:
1426 1426 tr.release()
1427 1427 finally:
1428 1428 l.release()
1429 1429 else:
1430 1430 if opts['rev']:
1431 1431 revs = scmutil.revrange(repo, opts['rev'])
1432 1432 nodes = [repo[r].node() for r in revs]
1433 1433 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1434 1434 exclusive=opts['exclusive']))
1435 1435 markers.sort(key=lambda x: x._data)
1436 1436 else:
1437 1437 markers = obsutil.getmarkers(repo)
1438 1438
1439 1439 markerstoiter = markers
1440 1440 isrelevant = lambda m: True
1441 1441 if opts.get('rev') and opts.get('index'):
1442 1442 markerstoiter = obsutil.getmarkers(repo)
1443 1443 markerset = set(markers)
1444 1444 isrelevant = lambda m: m in markerset
1445 1445
1446 1446 fm = ui.formatter('debugobsolete', opts)
1447 1447 for i, m in enumerate(markerstoiter):
1448 1448 if not isrelevant(m):
1449 1449 # marker can be irrelevant when we're iterating over a set
1450 1450 # of markers (markerstoiter) which is bigger than the set
1451 1451 # of markers we want to display (markers)
1452 1452 # this can happen if both --index and --rev options are
1453 1453 # provided and thus we need to iterate over all of the markers
1454 1454 # to get the correct indices, but only display the ones that
1455 1455 # are relevant to --rev value
1456 1456 continue
1457 1457 fm.startitem()
1458 1458 ind = i if opts.get('index') else None
1459 1459 cmdutil.showmarker(fm, m, index=ind)
1460 1460 fm.end()
1461 1461
1462 1462 @command('debugpathcomplete',
1463 1463 [('f', 'full', None, _('complete an entire path')),
1464 1464 ('n', 'normal', None, _('show only normal files')),
1465 1465 ('a', 'added', None, _('show only added files')),
1466 1466 ('r', 'removed', None, _('show only removed files'))],
1467 1467 _('FILESPEC...'))
1468 1468 def debugpathcomplete(ui, repo, *specs, **opts):
1469 1469 '''complete part or all of a tracked path
1470 1470
1471 1471 This command supports shells that offer path name completion. It
1472 1472 currently completes only files already known to the dirstate.
1473 1473
1474 1474 Completion extends only to the next path segment unless
1475 1475 --full is specified, in which case entire paths are used.'''
1476 1476
1477 1477 def complete(path, acceptable):
1478 1478 dirstate = repo.dirstate
1479 1479 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1480 1480 rootdir = repo.root + pycompat.ossep
1481 1481 if spec != repo.root and not spec.startswith(rootdir):
1482 1482 return [], []
1483 1483 if os.path.isdir(spec):
1484 1484 spec += '/'
1485 1485 spec = spec[len(rootdir):]
1486 1486 fixpaths = pycompat.ossep != '/'
1487 1487 if fixpaths:
1488 1488 spec = spec.replace(pycompat.ossep, '/')
1489 1489 speclen = len(spec)
1490 1490 fullpaths = opts[r'full']
1491 1491 files, dirs = set(), set()
1492 1492 adddir, addfile = dirs.add, files.add
1493 1493 for f, st in dirstate.iteritems():
1494 1494 if f.startswith(spec) and st[0] in acceptable:
1495 1495 if fixpaths:
1496 1496 f = f.replace('/', pycompat.ossep)
1497 1497 if fullpaths:
1498 1498 addfile(f)
1499 1499 continue
1500 1500 s = f.find(pycompat.ossep, speclen)
1501 1501 if s >= 0:
1502 1502 adddir(f[:s])
1503 1503 else:
1504 1504 addfile(f)
1505 1505 return files, dirs
1506 1506
1507 1507 acceptable = ''
1508 1508 if opts[r'normal']:
1509 1509 acceptable += 'nm'
1510 1510 if opts[r'added']:
1511 1511 acceptable += 'a'
1512 1512 if opts[r'removed']:
1513 1513 acceptable += 'r'
1514 1514 cwd = repo.getcwd()
1515 1515 if not specs:
1516 1516 specs = ['.']
1517 1517
1518 1518 files, dirs = set(), set()
1519 1519 for spec in specs:
1520 1520 f, d = complete(spec, acceptable or 'nmar')
1521 1521 files.update(f)
1522 1522 dirs.update(d)
1523 1523 files.update(dirs)
1524 1524 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1525 1525 ui.write('\n')
1526 1526
1527 1527 @command('debugpickmergetool',
1528 1528 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1529 1529 ('', 'changedelete', None, _('emulate merging change and delete')),
1530 1530 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1531 1531 _('[PATTERN]...'),
1532 1532 inferrepo=True)
1533 1533 def debugpickmergetool(ui, repo, *pats, **opts):
1534 1534 """examine which merge tool is chosen for specified file
1535 1535
1536 1536 As described in :hg:`help merge-tools`, Mercurial examines
1537 1537 configurations below in this order to decide which merge tool is
1538 1538 chosen for specified file.
1539 1539
1540 1540 1. ``--tool`` option
1541 1541 2. ``HGMERGE`` environment variable
1542 1542 3. configurations in ``merge-patterns`` section
1543 1543 4. configuration of ``ui.merge``
1544 1544 5. configurations in ``merge-tools`` section
1545 1545 6. ``hgmerge`` tool (for historical reason only)
1546 1546 7. default tool for fallback (``:merge`` or ``:prompt``)
1547 1547
1548 1548 This command writes out examination result in the style below::
1549 1549
1550 1550 FILE = MERGETOOL
1551 1551
1552 1552 By default, all files known in the first parent context of the
1553 1553 working directory are examined. Use file patterns and/or -I/-X
1554 1554 options to limit target files. -r/--rev is also useful to examine
1555 1555 files in another context without actual updating to it.
1556 1556
1557 1557 With --debug, this command shows warning messages while matching
1558 1558 against ``merge-patterns`` and so on, too. It is recommended to
1559 1559 use this option with explicit file patterns and/or -I/-X options,
1560 1560 because this option increases amount of output per file according
1561 1561 to configurations in hgrc.
1562 1562
1563 1563 With -v/--verbose, this command shows configurations below at
1564 1564 first (only if specified).
1565 1565
1566 1566 - ``--tool`` option
1567 1567 - ``HGMERGE`` environment variable
1568 1568 - configuration of ``ui.merge``
1569 1569
1570 1570 If merge tool is chosen before matching against
1571 1571 ``merge-patterns``, this command can't show any helpful
1572 1572 information, even with --debug. In such case, information above is
1573 1573 useful to know why a merge tool is chosen.
1574 1574 """
1575 1575 opts = pycompat.byteskwargs(opts)
1576 1576 overrides = {}
1577 1577 if opts['tool']:
1578 1578 overrides[('ui', 'forcemerge')] = opts['tool']
1579 1579 ui.note(('with --tool %r\n') % (opts['tool']))
1580 1580
1581 1581 with ui.configoverride(overrides, 'debugmergepatterns'):
1582 1582 hgmerge = encoding.environ.get("HGMERGE")
1583 1583 if hgmerge is not None:
1584 1584 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1585 1585 uimerge = ui.config("ui", "merge")
1586 1586 if uimerge:
1587 1587 ui.note(('with ui.merge=%r\n') % (uimerge))
1588 1588
1589 1589 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 1590 m = scmutil.match(ctx, pats, opts)
1591 1591 changedelete = opts['changedelete']
1592 1592 for path in ctx.walk(m):
1593 1593 fctx = ctx[path]
1594 1594 try:
1595 1595 if not ui.debugflag:
1596 1596 ui.pushbuffer(error=True)
1597 1597 tool, toolpath = filemerge._picktool(repo, ui, path,
1598 1598 fctx.isbinary(),
1599 1599 'l' in fctx.flags(),
1600 1600 changedelete)
1601 1601 finally:
1602 1602 if not ui.debugflag:
1603 1603 ui.popbuffer()
1604 1604 ui.write(('%s = %s\n') % (path, tool))
1605 1605
1606 1606 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1607 1607 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1608 1608 '''access the pushkey key/value protocol
1609 1609
1610 1610 With two args, list the keys in the given namespace.
1611 1611
1612 1612 With five args, set a key to new if it currently is set to old.
1613 1613 Reports success or failure.
1614 1614 '''
1615 1615
1616 1616 target = hg.peer(ui, {}, repopath)
1617 1617 if keyinfo:
1618 1618 key, old, new = keyinfo
1619 1619 r = target.pushkey(namespace, key, old, new)
1620 1620 ui.status(str(r) + '\n')
1621 1621 return not r
1622 1622 else:
1623 1623 for k, v in sorted(target.listkeys(namespace).iteritems()):
1624 1624 ui.write("%s\t%s\n" % (util.escapestr(k),
1625 1625 util.escapestr(v)))
1626 1626
1627 1627 @command('debugpvec', [], _('A B'))
1628 1628 def debugpvec(ui, repo, a, b=None):
1629 1629 ca = scmutil.revsingle(repo, a)
1630 1630 cb = scmutil.revsingle(repo, b)
1631 1631 pa = pvec.ctxpvec(ca)
1632 1632 pb = pvec.ctxpvec(cb)
1633 1633 if pa == pb:
1634 1634 rel = "="
1635 1635 elif pa > pb:
1636 1636 rel = ">"
1637 1637 elif pa < pb:
1638 1638 rel = "<"
1639 1639 elif pa | pb:
1640 1640 rel = "|"
1641 1641 ui.write(_("a: %s\n") % pa)
1642 1642 ui.write(_("b: %s\n") % pb)
1643 1643 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1644 1644 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1645 1645 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1646 1646 pa.distance(pb), rel))
1647 1647
1648 1648 @command('debugrebuilddirstate|debugrebuildstate',
1649 1649 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1650 1650 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1651 1651 'the working copy parent')),
1652 1652 ],
1653 1653 _('[-r REV]'))
1654 1654 def debugrebuilddirstate(ui, repo, rev, **opts):
1655 1655 """rebuild the dirstate as it would look like for the given revision
1656 1656
1657 1657 If no revision is specified the first current parent will be used.
1658 1658
1659 1659 The dirstate will be set to the files of the given revision.
1660 1660 The actual working directory content or existing dirstate
1661 1661 information such as adds or removes is not considered.
1662 1662
1663 1663 ``minimal`` will only rebuild the dirstate status for files that claim to be
1664 1664 tracked but are not in the parent manifest, or that exist in the parent
1665 1665 manifest but are not in the dirstate. It will not change adds, removes, or
1666 1666 modified files that are in the working copy parent.
1667 1667
1668 1668 One use of this command is to make the next :hg:`status` invocation
1669 1669 check the actual file content.
1670 1670 """
1671 1671 ctx = scmutil.revsingle(repo, rev)
1672 1672 with repo.wlock():
1673 1673 dirstate = repo.dirstate
1674 1674 changedfiles = None
1675 1675 # See command doc for what minimal does.
1676 1676 if opts.get(r'minimal'):
1677 1677 manifestfiles = set(ctx.manifest().keys())
1678 1678 dirstatefiles = set(dirstate)
1679 1679 manifestonly = manifestfiles - dirstatefiles
1680 1680 dsonly = dirstatefiles - manifestfiles
1681 1681 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1682 1682 changedfiles = manifestonly | dsnotadded
1683 1683
1684 1684 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1685 1685
1686 1686 @command('debugrebuildfncache', [], '')
1687 1687 def debugrebuildfncache(ui, repo):
1688 1688 """rebuild the fncache file"""
1689 1689 repair.rebuildfncache(ui, repo)
1690 1690
1691 1691 @command('debugrename',
1692 1692 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1693 1693 _('[-r REV] FILE'))
1694 1694 def debugrename(ui, repo, file1, *pats, **opts):
1695 1695 """dump rename information"""
1696 1696
1697 1697 opts = pycompat.byteskwargs(opts)
1698 1698 ctx = scmutil.revsingle(repo, opts.get('rev'))
1699 1699 m = scmutil.match(ctx, (file1,) + pats, opts)
1700 1700 for abs in ctx.walk(m):
1701 1701 fctx = ctx[abs]
1702 1702 o = fctx.filelog().renamed(fctx.filenode())
1703 1703 rel = m.rel(abs)
1704 1704 if o:
1705 1705 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1706 1706 else:
1707 1707 ui.write(_("%s not renamed\n") % rel)
1708 1708
1709 1709 @command('debugrevlog', cmdutil.debugrevlogopts +
1710 1710 [('d', 'dump', False, _('dump index data'))],
1711 1711 _('-c|-m|FILE'),
1712 1712 optionalrepo=True)
1713 1713 def debugrevlog(ui, repo, file_=None, **opts):
1714 1714 """show data and statistics about a revlog"""
1715 1715 opts = pycompat.byteskwargs(opts)
1716 1716 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1717 1717
1718 1718 if opts.get("dump"):
1719 1719 numrevs = len(r)
1720 1720 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1721 1721 " rawsize totalsize compression heads chainlen\n"))
1722 1722 ts = 0
1723 1723 heads = set()
1724 1724
1725 1725 for rev in xrange(numrevs):
1726 1726 dbase = r.deltaparent(rev)
1727 1727 if dbase == -1:
1728 1728 dbase = rev
1729 1729 cbase = r.chainbase(rev)
1730 1730 clen = r.chainlen(rev)
1731 1731 p1, p2 = r.parentrevs(rev)
1732 1732 rs = r.rawsize(rev)
1733 1733 ts = ts + rs
1734 1734 heads -= set(r.parentrevs(rev))
1735 1735 heads.add(rev)
1736 1736 try:
1737 1737 compression = ts / r.end(rev)
1738 1738 except ZeroDivisionError:
1739 1739 compression = 0
1740 1740 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1741 1741 "%11d %5d %8d\n" %
1742 1742 (rev, p1, p2, r.start(rev), r.end(rev),
1743 1743 r.start(dbase), r.start(cbase),
1744 1744 r.start(p1), r.start(p2),
1745 1745 rs, ts, compression, len(heads), clen))
1746 1746 return 0
1747 1747
1748 1748 v = r.version
1749 1749 format = v & 0xFFFF
1750 1750 flags = []
1751 1751 gdelta = False
1752 1752 if v & revlog.FLAG_INLINE_DATA:
1753 1753 flags.append('inline')
1754 1754 if v & revlog.FLAG_GENERALDELTA:
1755 1755 gdelta = True
1756 1756 flags.append('generaldelta')
1757 1757 if not flags:
1758 1758 flags = ['(none)']
1759 1759
1760 1760 nummerges = 0
1761 1761 numfull = 0
1762 1762 numprev = 0
1763 1763 nump1 = 0
1764 1764 nump2 = 0
1765 1765 numother = 0
1766 1766 nump1prev = 0
1767 1767 nump2prev = 0
1768 1768 chainlengths = []
1769 1769 chainbases = []
1770 1770 chainspans = []
1771 1771
1772 1772 datasize = [None, 0, 0]
1773 1773 fullsize = [None, 0, 0]
1774 1774 deltasize = [None, 0, 0]
1775 1775 chunktypecounts = {}
1776 1776 chunktypesizes = {}
1777 1777
1778 1778 def addsize(size, l):
1779 1779 if l[0] is None or size < l[0]:
1780 1780 l[0] = size
1781 1781 if size > l[1]:
1782 1782 l[1] = size
1783 1783 l[2] += size
1784 1784
1785 1785 numrevs = len(r)
1786 1786 for rev in xrange(numrevs):
1787 1787 p1, p2 = r.parentrevs(rev)
1788 1788 delta = r.deltaparent(rev)
1789 1789 if format > 0:
1790 1790 addsize(r.rawsize(rev), datasize)
1791 1791 if p2 != nullrev:
1792 1792 nummerges += 1
1793 1793 size = r.length(rev)
1794 1794 if delta == nullrev:
1795 1795 chainlengths.append(0)
1796 1796 chainbases.append(r.start(rev))
1797 1797 chainspans.append(size)
1798 1798 numfull += 1
1799 1799 addsize(size, fullsize)
1800 1800 else:
1801 1801 chainlengths.append(chainlengths[delta] + 1)
1802 1802 baseaddr = chainbases[delta]
1803 1803 revaddr = r.start(rev)
1804 1804 chainbases.append(baseaddr)
1805 1805 chainspans.append((revaddr - baseaddr) + size)
1806 1806 addsize(size, deltasize)
1807 1807 if delta == rev - 1:
1808 1808 numprev += 1
1809 1809 if delta == p1:
1810 1810 nump1prev += 1
1811 1811 elif delta == p2:
1812 1812 nump2prev += 1
1813 1813 elif delta == p1:
1814 1814 nump1 += 1
1815 1815 elif delta == p2:
1816 1816 nump2 += 1
1817 1817 elif delta != nullrev:
1818 1818 numother += 1
1819 1819
1820 1820 # Obtain data on the raw chunks in the revlog.
1821 1821 segment = r._getsegmentforrevs(rev, rev)[1]
1822 1822 if segment:
1823 1823 chunktype = bytes(segment[0:1])
1824 1824 else:
1825 1825 chunktype = 'empty'
1826 1826
1827 1827 if chunktype not in chunktypecounts:
1828 1828 chunktypecounts[chunktype] = 0
1829 1829 chunktypesizes[chunktype] = 0
1830 1830
1831 1831 chunktypecounts[chunktype] += 1
1832 1832 chunktypesizes[chunktype] += size
1833 1833
1834 1834 # Adjust size min value for empty cases
1835 1835 for size in (datasize, fullsize, deltasize):
1836 1836 if size[0] is None:
1837 1837 size[0] = 0
1838 1838
1839 1839 numdeltas = numrevs - numfull
1840 1840 numoprev = numprev - nump1prev - nump2prev
1841 1841 totalrawsize = datasize[2]
1842 1842 datasize[2] /= numrevs
1843 1843 fulltotal = fullsize[2]
1844 1844 fullsize[2] /= numfull
1845 1845 deltatotal = deltasize[2]
1846 1846 if numrevs - numfull > 0:
1847 1847 deltasize[2] /= numrevs - numfull
1848 1848 totalsize = fulltotal + deltatotal
1849 1849 avgchainlen = sum(chainlengths) / numrevs
1850 1850 maxchainlen = max(chainlengths)
1851 1851 maxchainspan = max(chainspans)
1852 1852 compratio = 1
1853 1853 if totalsize:
1854 1854 compratio = totalrawsize / totalsize
1855 1855
1856 1856 basedfmtstr = '%%%dd\n'
1857 1857 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1858 1858
1859 1859 def dfmtstr(max):
1860 1860 return basedfmtstr % len(str(max))
1861 1861 def pcfmtstr(max, padding=0):
1862 1862 return basepcfmtstr % (len(str(max)), ' ' * padding)
1863 1863
1864 1864 def pcfmt(value, total):
1865 1865 if total:
1866 1866 return (value, 100 * float(value) / total)
1867 1867 else:
1868 1868 return value, 100.0
1869 1869
1870 1870 ui.write(('format : %d\n') % format)
1871 1871 ui.write(('flags : %s\n') % ', '.join(flags))
1872 1872
1873 1873 ui.write('\n')
1874 1874 fmt = pcfmtstr(totalsize)
1875 1875 fmt2 = dfmtstr(totalsize)
1876 1876 ui.write(('revisions : ') + fmt2 % numrevs)
1877 1877 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1878 1878 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1879 1879 ui.write(('revisions : ') + fmt2 % numrevs)
1880 1880 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1881 1881 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1882 1882 ui.write(('revision size : ') + fmt2 % totalsize)
1883 1883 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1884 1884 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1885 1885
1886 1886 def fmtchunktype(chunktype):
1887 1887 if chunktype == 'empty':
1888 1888 return ' %s : ' % chunktype
1889 1889 elif chunktype in pycompat.bytestr(string.ascii_letters):
1890 1890 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1891 1891 else:
1892 1892 return ' 0x%s : ' % hex(chunktype)
1893 1893
1894 1894 ui.write('\n')
1895 1895 ui.write(('chunks : ') + fmt2 % numrevs)
1896 1896 for chunktype in sorted(chunktypecounts):
1897 1897 ui.write(fmtchunktype(chunktype))
1898 1898 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1899 1899 ui.write(('chunks size : ') + fmt2 % totalsize)
1900 1900 for chunktype in sorted(chunktypecounts):
1901 1901 ui.write(fmtchunktype(chunktype))
1902 1902 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1903 1903
1904 1904 ui.write('\n')
1905 1905 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1906 1906 ui.write(('avg chain length : ') + fmt % avgchainlen)
1907 1907 ui.write(('max chain length : ') + fmt % maxchainlen)
1908 1908 ui.write(('max chain reach : ') + fmt % maxchainspan)
1909 1909 ui.write(('compression ratio : ') + fmt % compratio)
1910 1910
1911 1911 if format > 0:
1912 1912 ui.write('\n')
1913 1913 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1914 1914 % tuple(datasize))
1915 1915 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1916 1916 % tuple(fullsize))
1917 1917 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1918 1918 % tuple(deltasize))
1919 1919
1920 1920 if numdeltas > 0:
1921 1921 ui.write('\n')
1922 1922 fmt = pcfmtstr(numdeltas)
1923 1923 fmt2 = pcfmtstr(numdeltas, 4)
1924 1924 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1925 1925 if numprev > 0:
1926 1926 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1927 1927 numprev))
1928 1928 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1929 1929 numprev))
1930 1930 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1931 1931 numprev))
1932 1932 if gdelta:
1933 1933 ui.write(('deltas against p1 : ')
1934 1934 + fmt % pcfmt(nump1, numdeltas))
1935 1935 ui.write(('deltas against p2 : ')
1936 1936 + fmt % pcfmt(nump2, numdeltas))
1937 1937 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1938 1938 numdeltas))
1939 1939
1940 1940 @command('debugrevspec',
1941 1941 [('', 'optimize', None,
1942 1942 _('print parsed tree after optimizing (DEPRECATED)')),
1943 1943 ('', 'show-revs', True, _('print list of result revisions (default)')),
1944 1944 ('s', 'show-set', None, _('print internal representation of result set')),
1945 1945 ('p', 'show-stage', [],
1946 1946 _('print parsed tree at the given stage'), _('NAME')),
1947 1947 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1948 1948 ('', 'verify-optimized', False, _('verify optimized result')),
1949 1949 ],
1950 1950 ('REVSPEC'))
1951 1951 def debugrevspec(ui, repo, expr, **opts):
1952 1952 """parse and apply a revision specification
1953 1953
1954 1954 Use -p/--show-stage option to print the parsed tree at the given stages.
1955 1955 Use -p all to print tree at every stage.
1956 1956
1957 1957 Use --no-show-revs option with -s or -p to print only the set
1958 1958 representation or the parsed tree respectively.
1959 1959
1960 1960 Use --verify-optimized to compare the optimized result with the unoptimized
1961 1961 one. Returns 1 if the optimized result differs.
1962 1962 """
1963 1963 opts = pycompat.byteskwargs(opts)
1964 1964 aliases = ui.configitems('revsetalias')
1965 1965 stages = [
1966 1966 ('parsed', lambda tree: tree),
1967 1967 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1968 1968 ui.warn)),
1969 1969 ('concatenated', revsetlang.foldconcat),
1970 1970 ('analyzed', revsetlang.analyze),
1971 1971 ('optimized', revsetlang.optimize),
1972 1972 ]
1973 1973 if opts['no_optimized']:
1974 1974 stages = stages[:-1]
1975 1975 if opts['verify_optimized'] and opts['no_optimized']:
1976 1976 raise error.Abort(_('cannot use --verify-optimized with '
1977 1977 '--no-optimized'))
1978 1978 stagenames = set(n for n, f in stages)
1979 1979
1980 1980 showalways = set()
1981 1981 showchanged = set()
1982 1982 if ui.verbose and not opts['show_stage']:
1983 1983 # show parsed tree by --verbose (deprecated)
1984 1984 showalways.add('parsed')
1985 1985 showchanged.update(['expanded', 'concatenated'])
1986 1986 if opts['optimize']:
1987 1987 showalways.add('optimized')
1988 1988 if opts['show_stage'] and opts['optimize']:
1989 1989 raise error.Abort(_('cannot use --optimize with --show-stage'))
1990 1990 if opts['show_stage'] == ['all']:
1991 1991 showalways.update(stagenames)
1992 1992 else:
1993 1993 for n in opts['show_stage']:
1994 1994 if n not in stagenames:
1995 1995 raise error.Abort(_('invalid stage name: %s') % n)
1996 1996 showalways.update(opts['show_stage'])
1997 1997
1998 1998 treebystage = {}
1999 1999 printedtree = None
2000 2000 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2001 2001 for n, f in stages:
2002 2002 treebystage[n] = tree = f(tree)
2003 2003 if n in showalways or (n in showchanged and tree != printedtree):
2004 2004 if opts['show_stage'] or n != 'parsed':
2005 2005 ui.write(("* %s:\n") % n)
2006 2006 ui.write(revsetlang.prettyformat(tree), "\n")
2007 2007 printedtree = tree
2008 2008
2009 2009 if opts['verify_optimized']:
2010 2010 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2011 2011 brevs = revset.makematcher(treebystage['optimized'])(repo)
2012 2012 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2013 2013 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2014 2014 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2015 2015 arevs = list(arevs)
2016 2016 brevs = list(brevs)
2017 2017 if arevs == brevs:
2018 2018 return 0
2019 2019 ui.write(('--- analyzed\n'), label='diff.file_a')
2020 2020 ui.write(('+++ optimized\n'), label='diff.file_b')
2021 2021 sm = difflib.SequenceMatcher(None, arevs, brevs)
2022 2022 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2023 2023 if tag in ('delete', 'replace'):
2024 2024 for c in arevs[alo:ahi]:
2025 2025 ui.write('-%s\n' % c, label='diff.deleted')
2026 2026 if tag in ('insert', 'replace'):
2027 2027 for c in brevs[blo:bhi]:
2028 2028 ui.write('+%s\n' % c, label='diff.inserted')
2029 2029 if tag == 'equal':
2030 2030 for c in arevs[alo:ahi]:
2031 2031 ui.write(' %s\n' % c)
2032 2032 return 1
2033 2033
2034 2034 func = revset.makematcher(tree)
2035 2035 revs = func(repo)
2036 2036 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2037 2037 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2038 2038 if not opts['show_revs']:
2039 2039 return
2040 2040 for c in revs:
2041 2041 ui.write("%s\n" % c)
2042 2042
2043 2043 @command('debugsetparents', [], _('REV1 [REV2]'))
2044 2044 def debugsetparents(ui, repo, rev1, rev2=None):
2045 2045 """manually set the parents of the current working directory
2046 2046
2047 2047 This is useful for writing repository conversion tools, but should
2048 2048 be used with care. For example, neither the working directory nor the
2049 2049 dirstate is updated, so file status may be incorrect after running this
2050 2050 command.
2051 2051
2052 2052 Returns 0 on success.
2053 2053 """
2054 2054
2055 2055 r1 = scmutil.revsingle(repo, rev1).node()
2056 2056 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2057 2057
2058 2058 with repo.wlock():
2059 2059 repo.setparents(r1, r2)
2060 2060
2061 2061 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2062 2062 def debugssl(ui, repo, source=None, **opts):
2063 2063 '''test a secure connection to a server
2064 2064
2065 2065 This builds the certificate chain for the server on Windows, installing the
2066 2066 missing intermediates and trusted root via Windows Update if necessary. It
2067 2067 does nothing on other platforms.
2068 2068
2069 2069 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2070 2070 that server is used. See :hg:`help urls` for more information.
2071 2071
2072 2072 If the update succeeds, retry the original operation. Otherwise, the cause
2073 2073 of the SSL error is likely another issue.
2074 2074 '''
2075 if pycompat.osname != 'nt':
2075 if not pycompat.iswindows:
2076 2076 raise error.Abort(_('certificate chain building is only possible on '
2077 2077 'Windows'))
2078 2078
2079 2079 if not source:
2080 2080 if not repo:
2081 2081 raise error.Abort(_("there is no Mercurial repository here, and no "
2082 2082 "server specified"))
2083 2083 source = "default"
2084 2084
2085 2085 source, branches = hg.parseurl(ui.expandpath(source))
2086 2086 url = util.url(source)
2087 2087 addr = None
2088 2088
2089 2089 if url.scheme == 'https':
2090 2090 addr = (url.host, url.port or 443)
2091 2091 elif url.scheme == 'ssh':
2092 2092 addr = (url.host, url.port or 22)
2093 2093 else:
2094 2094 raise error.Abort(_("only https and ssh connections are supported"))
2095 2095
2096 2096 from . import win32
2097 2097
2098 2098 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2099 2099 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2100 2100
2101 2101 try:
2102 2102 s.connect(addr)
2103 2103 cert = s.getpeercert(True)
2104 2104
2105 2105 ui.status(_('checking the certificate chain for %s\n') % url.host)
2106 2106
2107 2107 complete = win32.checkcertificatechain(cert, build=False)
2108 2108
2109 2109 if not complete:
2110 2110 ui.status(_('certificate chain is incomplete, updating... '))
2111 2111
2112 2112 if not win32.checkcertificatechain(cert):
2113 2113 ui.status(_('failed.\n'))
2114 2114 else:
2115 2115 ui.status(_('done.\n'))
2116 2116 else:
2117 2117 ui.status(_('full certificate chain is available\n'))
2118 2118 finally:
2119 2119 s.close()
2120 2120
2121 2121 @command('debugsub',
2122 2122 [('r', 'rev', '',
2123 2123 _('revision to check'), _('REV'))],
2124 2124 _('[-r REV] [REV]'))
2125 2125 def debugsub(ui, repo, rev=None):
2126 2126 ctx = scmutil.revsingle(repo, rev, None)
2127 2127 for k, v in sorted(ctx.substate.items()):
2128 2128 ui.write(('path %s\n') % k)
2129 2129 ui.write((' source %s\n') % v[0])
2130 2130 ui.write((' revision %s\n') % v[1])
2131 2131
2132 2132 @command('debugsuccessorssets',
2133 2133 [('', 'closest', False, _('return closest successors sets only'))],
2134 2134 _('[REV]'))
2135 2135 def debugsuccessorssets(ui, repo, *revs, **opts):
2136 2136 """show set of successors for revision
2137 2137
2138 2138 A successors set of changeset A is a consistent group of revisions that
2139 2139 succeed A. It contains non-obsolete changesets only unless closests
2140 2140 successors set is set.
2141 2141
2142 2142 In most cases a changeset A has a single successors set containing a single
2143 2143 successor (changeset A replaced by A').
2144 2144
2145 2145 A changeset that is made obsolete with no successors are called "pruned".
2146 2146 Such changesets have no successors sets at all.
2147 2147
2148 2148 A changeset that has been "split" will have a successors set containing
2149 2149 more than one successor.
2150 2150
2151 2151 A changeset that has been rewritten in multiple different ways is called
2152 2152 "divergent". Such changesets have multiple successor sets (each of which
2153 2153 may also be split, i.e. have multiple successors).
2154 2154
2155 2155 Results are displayed as follows::
2156 2156
2157 2157 <rev1>
2158 2158 <successors-1A>
2159 2159 <rev2>
2160 2160 <successors-2A>
2161 2161 <successors-2B1> <successors-2B2> <successors-2B3>
2162 2162
2163 2163 Here rev2 has two possible (i.e. divergent) successors sets. The first
2164 2164 holds one element, whereas the second holds three (i.e. the changeset has
2165 2165 been split).
2166 2166 """
2167 2167 # passed to successorssets caching computation from one call to another
2168 2168 cache = {}
2169 2169 ctx2str = str
2170 2170 node2str = short
2171 2171 if ui.debug():
2172 2172 def ctx2str(ctx):
2173 2173 return ctx.hex()
2174 2174 node2str = hex
2175 2175 for rev in scmutil.revrange(repo, revs):
2176 2176 ctx = repo[rev]
2177 2177 ui.write('%s\n'% ctx2str(ctx))
2178 2178 for succsset in obsutil.successorssets(repo, ctx.node(),
2179 2179 closest=opts['closest'],
2180 2180 cache=cache):
2181 2181 if succsset:
2182 2182 ui.write(' ')
2183 2183 ui.write(node2str(succsset[0]))
2184 2184 for node in succsset[1:]:
2185 2185 ui.write(' ')
2186 2186 ui.write(node2str(node))
2187 2187 ui.write('\n')
2188 2188
2189 2189 @command('debugtemplate',
2190 2190 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2191 2191 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2192 2192 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2193 2193 optionalrepo=True)
2194 2194 def debugtemplate(ui, repo, tmpl, **opts):
2195 2195 """parse and apply a template
2196 2196
2197 2197 If -r/--rev is given, the template is processed as a log template and
2198 2198 applied to the given changesets. Otherwise, it is processed as a generic
2199 2199 template.
2200 2200
2201 2201 Use --verbose to print the parsed tree.
2202 2202 """
2203 2203 revs = None
2204 2204 if opts[r'rev']:
2205 2205 if repo is None:
2206 2206 raise error.RepoError(_('there is no Mercurial repository here '
2207 2207 '(.hg not found)'))
2208 2208 revs = scmutil.revrange(repo, opts[r'rev'])
2209 2209
2210 2210 props = {}
2211 2211 for d in opts[r'define']:
2212 2212 try:
2213 2213 k, v = (e.strip() for e in d.split('=', 1))
2214 2214 if not k or k == 'ui':
2215 2215 raise ValueError
2216 2216 props[k] = v
2217 2217 except ValueError:
2218 2218 raise error.Abort(_('malformed keyword definition: %s') % d)
2219 2219
2220 2220 if ui.verbose:
2221 2221 aliases = ui.configitems('templatealias')
2222 2222 tree = templater.parse(tmpl)
2223 2223 ui.note(templater.prettyformat(tree), '\n')
2224 2224 newtree = templater.expandaliases(tree, aliases)
2225 2225 if newtree != tree:
2226 2226 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2227 2227
2228 2228 if revs is None:
2229 2229 t = formatter.maketemplater(ui, tmpl)
2230 2230 props['ui'] = ui
2231 2231 ui.write(t.render(props))
2232 2232 else:
2233 2233 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2234 2234 for r in revs:
2235 2235 displayer.show(repo[r], **pycompat.strkwargs(props))
2236 2236 displayer.close()
2237 2237
2238 2238 @command('debugupdatecaches', [])
2239 2239 def debugupdatecaches(ui, repo, *pats, **opts):
2240 2240 """warm all known caches in the repository"""
2241 2241 with repo.wlock(), repo.lock():
2242 2242 repo.updatecaches()
2243 2243
2244 2244 @command('debugupgraderepo', [
2245 2245 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2246 2246 ('', 'run', False, _('performs an upgrade')),
2247 2247 ])
2248 2248 def debugupgraderepo(ui, repo, run=False, optimize=None):
2249 2249 """upgrade a repository to use different features
2250 2250
2251 2251 If no arguments are specified, the repository is evaluated for upgrade
2252 2252 and a list of problems and potential optimizations is printed.
2253 2253
2254 2254 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2255 2255 can be influenced via additional arguments. More details will be provided
2256 2256 by the command output when run without ``--run``.
2257 2257
2258 2258 During the upgrade, the repository will be locked and no writes will be
2259 2259 allowed.
2260 2260
2261 2261 At the end of the upgrade, the repository may not be readable while new
2262 2262 repository data is swapped in. This window will be as long as it takes to
2263 2263 rename some directories inside the ``.hg`` directory. On most machines, this
2264 2264 should complete almost instantaneously and the chances of a consumer being
2265 2265 unable to access the repository should be low.
2266 2266 """
2267 2267 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2268 2268
2269 2269 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2270 2270 inferrepo=True)
2271 2271 def debugwalk(ui, repo, *pats, **opts):
2272 2272 """show how files match on given patterns"""
2273 2273 opts = pycompat.byteskwargs(opts)
2274 2274 m = scmutil.match(repo[None], pats, opts)
2275 2275 ui.write(('matcher: %r\n' % m))
2276 2276 items = list(repo[None].walk(m))
2277 2277 if not items:
2278 2278 return
2279 2279 f = lambda fn: fn
2280 2280 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2281 2281 f = lambda fn: util.normpath(fn)
2282 2282 fmt = 'f %%-%ds %%-%ds %%s' % (
2283 2283 max([len(abs) for abs in items]),
2284 2284 max([len(m.rel(abs)) for abs in items]))
2285 2285 for abs in items:
2286 2286 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2287 2287 ui.write("%s\n" % line.rstrip())
2288 2288
2289 2289 @command('debugwireargs',
2290 2290 [('', 'three', '', 'three'),
2291 2291 ('', 'four', '', 'four'),
2292 2292 ('', 'five', '', 'five'),
2293 2293 ] + cmdutil.remoteopts,
2294 2294 _('REPO [OPTIONS]... [ONE [TWO]]'),
2295 2295 norepo=True)
2296 2296 def debugwireargs(ui, repopath, *vals, **opts):
2297 2297 opts = pycompat.byteskwargs(opts)
2298 2298 repo = hg.peer(ui, opts, repopath)
2299 2299 for opt in cmdutil.remoteopts:
2300 2300 del opts[opt[1]]
2301 2301 args = {}
2302 2302 for k, v in opts.iteritems():
2303 2303 if v:
2304 2304 args[k] = v
2305 2305 # run twice to check that we don't mess up the stream for the next command
2306 2306 res1 = repo.debugwireargs(*vals, **args)
2307 2307 res2 = repo.debugwireargs(*vals, **args)
2308 2308 ui.write("%s\n" % res1)
2309 2309 if res1 != res2:
2310 2310 ui.warn("%s\n" % res2)
@@ -1,334 +1,334
1 1 # hgweb/server.py - The standalone hg web server.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 import errno
12 12 import os
13 13 import socket
14 14 import sys
15 15 import traceback
16 16
17 17 from ..i18n import _
18 18
19 19 from .. import (
20 20 error,
21 21 pycompat,
22 22 util,
23 23 )
24 24
25 25 httpservermod = util.httpserver
26 26 socketserver = util.socketserver
27 27 urlerr = util.urlerr
28 28 urlreq = util.urlreq
29 29
30 30 from . import (
31 31 common,
32 32 )
33 33
34 34 def _splitURI(uri):
35 35 """Return path and query that has been split from uri
36 36
37 37 Just like CGI environment, the path is unquoted, the query is
38 38 not.
39 39 """
40 40 if '?' in uri:
41 41 path, query = uri.split('?', 1)
42 42 else:
43 43 path, query = uri, ''
44 44 return urlreq.unquote(path), query
45 45
46 46 class _error_logger(object):
47 47 def __init__(self, handler):
48 48 self.handler = handler
49 49 def flush(self):
50 50 pass
51 51 def write(self, str):
52 52 self.writelines(str.split('\n'))
53 53 def writelines(self, seq):
54 54 for msg in seq:
55 55 self.handler.log_error("HG error: %s", msg)
56 56
57 57 class _httprequesthandler(httpservermod.basehttprequesthandler):
58 58
59 59 url_scheme = 'http'
60 60
61 61 @staticmethod
62 62 def preparehttpserver(httpserver, ui):
63 63 """Prepare .socket of new HTTPServer instance"""
64 64
65 65 def __init__(self, *args, **kargs):
66 66 self.protocol_version = r'HTTP/1.1'
67 67 httpservermod.basehttprequesthandler.__init__(self, *args, **kargs)
68 68
69 69 def _log_any(self, fp, format, *args):
70 70 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
71 71 self.log_date_time_string(),
72 72 format % args))
73 73 fp.flush()
74 74
75 75 def log_error(self, format, *args):
76 76 self._log_any(self.server.errorlog, format, *args)
77 77
78 78 def log_message(self, format, *args):
79 79 self._log_any(self.server.accesslog, format, *args)
80 80
81 81 def log_request(self, code='-', size='-'):
82 82 xheaders = []
83 83 if util.safehasattr(self, 'headers'):
84 84 xheaders = [h for h in self.headers.items()
85 85 if h[0].startswith('x-')]
86 86 self.log_message('"%s" %s %s%s',
87 87 self.requestline, str(code), str(size),
88 88 ''.join([' %s:%s' % h for h in sorted(xheaders)]))
89 89
90 90 def do_write(self):
91 91 try:
92 92 self.do_hgweb()
93 93 except socket.error as inst:
94 94 if inst[0] != errno.EPIPE:
95 95 raise
96 96
97 97 def do_POST(self):
98 98 try:
99 99 self.do_write()
100 100 except Exception:
101 101 self._start_response("500 Internal Server Error", [])
102 102 self._write("Internal Server Error")
103 103 self._done()
104 104 tb = "".join(traceback.format_exception(*sys.exc_info()))
105 105 self.log_error("Exception happened during processing "
106 106 "request '%s':\n%s", self.path, tb)
107 107
108 108 def do_GET(self):
109 109 self.do_POST()
110 110
111 111 def do_hgweb(self):
112 112 path, query = _splitURI(self.path)
113 113
114 114 env = {}
115 115 env[r'GATEWAY_INTERFACE'] = r'CGI/1.1'
116 116 env[r'REQUEST_METHOD'] = self.command
117 117 env[r'SERVER_NAME'] = self.server.server_name
118 118 env[r'SERVER_PORT'] = str(self.server.server_port)
119 119 env[r'REQUEST_URI'] = self.path
120 120 env[r'SCRIPT_NAME'] = self.server.prefix
121 121 env[r'PATH_INFO'] = path[len(self.server.prefix):]
122 122 env[r'REMOTE_HOST'] = self.client_address[0]
123 123 env[r'REMOTE_ADDR'] = self.client_address[0]
124 124 if query:
125 125 env[r'QUERY_STRING'] = query
126 126
127 127 if self.headers.typeheader is None:
128 128 env[r'CONTENT_TYPE'] = self.headers.type
129 129 else:
130 130 env[r'CONTENT_TYPE'] = self.headers.typeheader
131 131 length = self.headers.getheader('content-length')
132 132 if length:
133 133 env[r'CONTENT_LENGTH'] = length
134 134 for header in [h for h in self.headers.keys()
135 135 if h not in ('content-type', 'content-length')]:
136 136 hkey = r'HTTP_' + header.replace(r'-', r'_').upper()
137 137 hval = self.headers.get(header)
138 138 hval = hval.replace(r'\n', r'').strip()
139 139 if hval:
140 140 env[hkey] = hval
141 141 env[r'SERVER_PROTOCOL'] = self.request_version
142 142 env[r'wsgi.version'] = (1, 0)
143 143 env[r'wsgi.url_scheme'] = self.url_scheme
144 144 if env.get(r'HTTP_EXPECT', '').lower() == '100-continue':
145 145 self.rfile = common.continuereader(self.rfile, self.wfile.write)
146 146
147 147 env[r'wsgi.input'] = self.rfile
148 148 env[r'wsgi.errors'] = _error_logger(self)
149 149 env[r'wsgi.multithread'] = isinstance(self.server,
150 150 socketserver.ThreadingMixIn)
151 151 env[r'wsgi.multiprocess'] = isinstance(self.server,
152 152 socketserver.ForkingMixIn)
153 153 env[r'wsgi.run_once'] = 0
154 154
155 155 self.saved_status = None
156 156 self.saved_headers = []
157 157 self.sent_headers = False
158 158 self.length = None
159 159 self._chunked = None
160 160 for chunk in self.server.application(env, self._start_response):
161 161 self._write(chunk)
162 162 if not self.sent_headers:
163 163 self.send_headers()
164 164 self._done()
165 165
166 166 def send_headers(self):
167 167 if not self.saved_status:
168 168 raise AssertionError("Sending headers before "
169 169 "start_response() called")
170 170 saved_status = self.saved_status.split(None, 1)
171 171 saved_status[0] = int(saved_status[0])
172 172 self.send_response(*saved_status)
173 173 self.length = None
174 174 self._chunked = False
175 175 for h in self.saved_headers:
176 176 self.send_header(*h)
177 177 if h[0].lower() == 'content-length':
178 178 self.length = int(h[1])
179 179 if (self.length is None and
180 180 saved_status[0] != common.HTTP_NOT_MODIFIED):
181 181 self._chunked = (not self.close_connection and
182 182 self.request_version == "HTTP/1.1")
183 183 if self._chunked:
184 184 self.send_header('Transfer-Encoding', 'chunked')
185 185 else:
186 186 self.send_header('Connection', 'close')
187 187 self.end_headers()
188 188 self.sent_headers = True
189 189
190 190 def _start_response(self, http_status, headers, exc_info=None):
191 191 code, msg = http_status.split(None, 1)
192 192 code = int(code)
193 193 self.saved_status = http_status
194 194 bad_headers = ('connection', 'transfer-encoding')
195 195 self.saved_headers = [h for h in headers
196 196 if h[0].lower() not in bad_headers]
197 197 return self._write
198 198
199 199 def _write(self, data):
200 200 if not self.saved_status:
201 201 raise AssertionError("data written before start_response() called")
202 202 elif not self.sent_headers:
203 203 self.send_headers()
204 204 if self.length is not None:
205 205 if len(data) > self.length:
206 206 raise AssertionError("Content-length header sent, but more "
207 207 "bytes than specified are being written.")
208 208 self.length = self.length - len(data)
209 209 elif self._chunked and data:
210 210 data = '%x\r\n%s\r\n' % (len(data), data)
211 211 self.wfile.write(data)
212 212 self.wfile.flush()
213 213
214 214 def _done(self):
215 215 if self._chunked:
216 216 self.wfile.write('0\r\n\r\n')
217 217 self.wfile.flush()
218 218
219 219 class _httprequesthandlerssl(_httprequesthandler):
220 220 """HTTPS handler based on Python's ssl module"""
221 221
222 222 url_scheme = 'https'
223 223
224 224 @staticmethod
225 225 def preparehttpserver(httpserver, ui):
226 226 try:
227 227 from .. import sslutil
228 228 sslutil.modernssl
229 229 except ImportError:
230 230 raise error.Abort(_("SSL support is unavailable"))
231 231
232 232 certfile = ui.config('web', 'certificate')
233 233
234 234 # These config options are currently only meant for testing. Use
235 235 # at your own risk.
236 236 cafile = ui.config('devel', 'servercafile')
237 237 reqcert = ui.configbool('devel', 'serverrequirecert')
238 238
239 239 httpserver.socket = sslutil.wrapserversocket(httpserver.socket,
240 240 ui,
241 241 certfile=certfile,
242 242 cafile=cafile,
243 243 requireclientcert=reqcert)
244 244
245 245 def setup(self):
246 246 self.connection = self.request
247 247 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
248 248 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
249 249
250 250 try:
251 251 import threading
252 252 threading.activeCount() # silence pyflakes and bypass demandimport
253 253 _mixin = socketserver.ThreadingMixIn
254 254 except ImportError:
255 255 if util.safehasattr(os, "fork"):
256 256 _mixin = socketserver.ForkingMixIn
257 257 else:
258 258 class _mixin(object):
259 259 pass
260 260
261 261 def openlog(opt, default):
262 262 if opt and opt != '-':
263 263 return open(opt, 'a')
264 264 return default
265 265
266 266 class MercurialHTTPServer(_mixin, httpservermod.httpserver, object):
267 267
268 268 # SO_REUSEADDR has broken semantics on windows
269 if pycompat.osname == 'nt':
269 if pycompat.iswindows:
270 270 allow_reuse_address = 0
271 271
272 272 def __init__(self, ui, app, addr, handler, **kwargs):
273 273 httpservermod.httpserver.__init__(self, addr, handler, **kwargs)
274 274 self.daemon_threads = True
275 275 self.application = app
276 276
277 277 handler.preparehttpserver(self, ui)
278 278
279 279 prefix = ui.config('web', 'prefix')
280 280 if prefix:
281 281 prefix = '/' + prefix.strip('/')
282 282 self.prefix = prefix
283 283
284 284 alog = openlog(ui.config('web', 'accesslog'), ui.fout)
285 285 elog = openlog(ui.config('web', 'errorlog'), ui.ferr)
286 286 self.accesslog = alog
287 287 self.errorlog = elog
288 288
289 289 self.addr, self.port = self.socket.getsockname()[0:2]
290 290 self.fqaddr = socket.getfqdn(addr[0])
291 291
292 292 class IPv6HTTPServer(MercurialHTTPServer):
293 293 address_family = getattr(socket, 'AF_INET6', None)
294 294 def __init__(self, *args, **kwargs):
295 295 if self.address_family is None:
296 296 raise error.RepoError(_('IPv6 is not available on this system'))
297 297 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
298 298
299 299 def create_server(ui, app):
300 300
301 301 if ui.config('web', 'certificate'):
302 302 handler = _httprequesthandlerssl
303 303 else:
304 304 handler = _httprequesthandler
305 305
306 306 if ui.configbool('web', 'ipv6'):
307 307 cls = IPv6HTTPServer
308 308 else:
309 309 cls = MercurialHTTPServer
310 310
311 311 # ugly hack due to python issue5853 (for threaded use)
312 312 try:
313 313 import mimetypes
314 314 mimetypes.init()
315 315 except UnicodeDecodeError:
316 316 # Python 2.x's mimetypes module attempts to decode strings
317 317 # from Windows' ANSI APIs as ascii (fail), then re-encode them
318 318 # as ascii (clown fail), because the default Python Unicode
319 319 # codec is hardcoded as ascii.
320 320
321 321 sys.argv # unwrap demand-loader so that reload() works
322 322 reload(sys) # resurrect sys.setdefaultencoding()
323 323 oldenc = sys.getdefaultencoding()
324 324 sys.setdefaultencoding("latin1") # or any full 8-bit encoding
325 325 mimetypes.init()
326 326 sys.setdefaultencoding(oldenc)
327 327
328 328 address = ui.config('web', 'address')
329 329 port = util.getport(ui.config('web', 'port'))
330 330 try:
331 331 return cls(ui, app, (address, port), handler)
332 332 except socket.error as inst:
333 333 raise error.Abort(_("cannot start server at '%s:%d': %s")
334 334 % (address, port, inst.args[1]))
@@ -1,109 +1,109
1 1 # i18n.py - internationalization support for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import gettext as gettextmod
11 11 import locale
12 12 import os
13 13 import sys
14 14
15 15 from . import (
16 16 encoding,
17 17 pycompat,
18 18 )
19 19
20 20 # modelled after templater.templatepath:
21 21 if getattr(sys, 'frozen', None) is not None:
22 22 module = pycompat.sysexecutable
23 23 else:
24 24 module = pycompat.fsencode(__file__)
25 25
26 26 try:
27 27 unicode
28 28 except NameError:
29 29 unicode = str
30 30
31 31 _languages = None
32 if (pycompat.osname == 'nt'
32 if (pycompat.iswindows
33 33 and 'LANGUAGE' not in encoding.environ
34 34 and 'LC_ALL' not in encoding.environ
35 35 and 'LC_MESSAGES' not in encoding.environ
36 36 and 'LANG' not in encoding.environ):
37 37 # Try to detect UI language by "User Interface Language Management" API
38 38 # if no locale variables are set. Note that locale.getdefaultlocale()
39 39 # uses GetLocaleInfo(), which may be different from UI language.
40 40 # (See http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx )
41 41 try:
42 42 import ctypes
43 43 langid = ctypes.windll.kernel32.GetUserDefaultUILanguage()
44 44 _languages = [locale.windows_locale[langid]]
45 45 except (ImportError, AttributeError, KeyError):
46 46 # ctypes not found or unknown langid
47 47 pass
48 48
49 49 _ugettext = None
50 50
51 51 def setdatapath(datapath):
52 52 datapath = pycompat.fsdecode(datapath)
53 53 localedir = os.path.join(datapath, pycompat.sysstr('locale'))
54 54 t = gettextmod.translation('hg', localedir, _languages, fallback=True)
55 55 global _ugettext
56 56 try:
57 57 _ugettext = t.ugettext
58 58 except AttributeError:
59 59 _ugettext = t.gettext
60 60
61 61 _msgcache = {}
62 62
63 63 def gettext(message):
64 64 """Translate message.
65 65
66 66 The message is looked up in the catalog to get a Unicode string,
67 67 which is encoded in the local encoding before being returned.
68 68
69 69 Important: message is restricted to characters in the encoding
70 70 given by sys.getdefaultencoding() which is most likely 'ascii'.
71 71 """
72 72 # If message is None, t.ugettext will return u'None' as the
73 73 # translation whereas our callers expect us to return None.
74 74 if message is None or not _ugettext:
75 75 return message
76 76
77 77 if message not in _msgcache:
78 78 if type(message) is unicode:
79 79 # goofy unicode docstrings in test
80 80 paragraphs = message.split(u'\n\n')
81 81 else:
82 82 paragraphs = [p.decode("ascii") for p in message.split('\n\n')]
83 83 # Be careful not to translate the empty string -- it holds the
84 84 # meta data of the .po file.
85 85 u = u'\n\n'.join([p and _ugettext(p) or u'' for p in paragraphs])
86 86 try:
87 87 # encoding.tolocal cannot be used since it will first try to
88 88 # decode the Unicode string. Calling u.decode(enc) really
89 89 # means u.encode(sys.getdefaultencoding()).decode(enc). Since
90 90 # the Python encoding defaults to 'ascii', this fails if the
91 91 # translated string use non-ASCII characters.
92 92 encodingstr = pycompat.sysstr(encoding.encoding)
93 93 _msgcache[message] = u.encode(encodingstr, "replace")
94 94 except LookupError:
95 95 # An unknown encoding results in a LookupError.
96 96 _msgcache[message] = message
97 97 return _msgcache[message]
98 98
99 99 def _plain():
100 100 if ('HGPLAIN' not in encoding.environ
101 101 and 'HGPLAINEXCEPT' not in encoding.environ):
102 102 return False
103 103 exceptions = encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
104 104 return 'i18n' not in exceptions
105 105
106 106 if _plain():
107 107 _ = lambda message: message
108 108 else:
109 109 _ = gettext
@@ -1,271 +1,271
1 1 # osutil.py - pure Python version of osutil.c
2 2 #
3 3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import ctypes
11 11 import ctypes.util
12 12 import os
13 13 import socket
14 14 import stat as statmod
15 15
16 16 from .. import (
17 17 pycompat,
18 18 )
19 19
20 20 def _mode_to_kind(mode):
21 21 if statmod.S_ISREG(mode):
22 22 return statmod.S_IFREG
23 23 if statmod.S_ISDIR(mode):
24 24 return statmod.S_IFDIR
25 25 if statmod.S_ISLNK(mode):
26 26 return statmod.S_IFLNK
27 27 if statmod.S_ISBLK(mode):
28 28 return statmod.S_IFBLK
29 29 if statmod.S_ISCHR(mode):
30 30 return statmod.S_IFCHR
31 31 if statmod.S_ISFIFO(mode):
32 32 return statmod.S_IFIFO
33 33 if statmod.S_ISSOCK(mode):
34 34 return statmod.S_IFSOCK
35 35 return mode
36 36
37 37 def listdir(path, stat=False, skip=None):
38 38 '''listdir(path, stat=False) -> list_of_tuples
39 39
40 40 Return a sorted list containing information about the entries
41 41 in the directory.
42 42
43 43 If stat is True, each element is a 3-tuple:
44 44
45 45 (name, type, stat object)
46 46
47 47 Otherwise, each element is a 2-tuple:
48 48
49 49 (name, type)
50 50 '''
51 51 result = []
52 52 prefix = path
53 53 if not prefix.endswith(pycompat.ossep):
54 54 prefix += pycompat.ossep
55 55 names = os.listdir(path)
56 56 names.sort()
57 57 for fn in names:
58 58 st = os.lstat(prefix + fn)
59 59 if fn == skip and statmod.S_ISDIR(st.st_mode):
60 60 return []
61 61 if stat:
62 62 result.append((fn, _mode_to_kind(st.st_mode), st))
63 63 else:
64 64 result.append((fn, _mode_to_kind(st.st_mode)))
65 65 return result
66 66
67 if pycompat.osname != 'nt':
67 if not pycompat.iswindows:
68 68 posixfile = open
69 69
70 70 _SCM_RIGHTS = 0x01
71 71 _socklen_t = ctypes.c_uint
72 72
73 73 if pycompat.sysplatform.startswith('linux'):
74 74 # socket.h says "the type should be socklen_t but the definition of
75 75 # the kernel is incompatible with this."
76 76 _cmsg_len_t = ctypes.c_size_t
77 77 _msg_controllen_t = ctypes.c_size_t
78 78 _msg_iovlen_t = ctypes.c_size_t
79 79 else:
80 80 _cmsg_len_t = _socklen_t
81 81 _msg_controllen_t = _socklen_t
82 82 _msg_iovlen_t = ctypes.c_int
83 83
84 84 class _iovec(ctypes.Structure):
85 85 _fields_ = [
86 86 (u'iov_base', ctypes.c_void_p),
87 87 (u'iov_len', ctypes.c_size_t),
88 88 ]
89 89
90 90 class _msghdr(ctypes.Structure):
91 91 _fields_ = [
92 92 (u'msg_name', ctypes.c_void_p),
93 93 (u'msg_namelen', _socklen_t),
94 94 (u'msg_iov', ctypes.POINTER(_iovec)),
95 95 (u'msg_iovlen', _msg_iovlen_t),
96 96 (u'msg_control', ctypes.c_void_p),
97 97 (u'msg_controllen', _msg_controllen_t),
98 98 (u'msg_flags', ctypes.c_int),
99 99 ]
100 100
101 101 class _cmsghdr(ctypes.Structure):
102 102 _fields_ = [
103 103 (u'cmsg_len', _cmsg_len_t),
104 104 (u'cmsg_level', ctypes.c_int),
105 105 (u'cmsg_type', ctypes.c_int),
106 106 (u'cmsg_data', ctypes.c_ubyte * 0),
107 107 ]
108 108
109 109 _libc = ctypes.CDLL(ctypes.util.find_library(u'c'), use_errno=True)
110 110 _recvmsg = getattr(_libc, 'recvmsg', None)
111 111 if _recvmsg:
112 112 _recvmsg.restype = getattr(ctypes, 'c_ssize_t', ctypes.c_long)
113 113 _recvmsg.argtypes = (ctypes.c_int, ctypes.POINTER(_msghdr),
114 114 ctypes.c_int)
115 115 else:
116 116 # recvmsg isn't always provided by libc; such systems are unsupported
117 117 def _recvmsg(sockfd, msg, flags):
118 118 raise NotImplementedError('unsupported platform')
119 119
120 120 def _CMSG_FIRSTHDR(msgh):
121 121 if msgh.msg_controllen < ctypes.sizeof(_cmsghdr):
122 122 return
123 123 cmsgptr = ctypes.cast(msgh.msg_control, ctypes.POINTER(_cmsghdr))
124 124 return cmsgptr.contents
125 125
126 126 # The pure version is less portable than the native version because the
127 127 # handling of socket ancillary data heavily depends on C preprocessor.
128 128 # Also, some length fields are wrongly typed in Linux kernel.
129 129 def recvfds(sockfd):
130 130 """receive list of file descriptors via socket"""
131 131 dummy = (ctypes.c_ubyte * 1)()
132 132 iov = _iovec(ctypes.cast(dummy, ctypes.c_void_p), ctypes.sizeof(dummy))
133 133 cbuf = ctypes.create_string_buffer(256)
134 134 msgh = _msghdr(None, 0,
135 135 ctypes.pointer(iov), 1,
136 136 ctypes.cast(cbuf, ctypes.c_void_p), ctypes.sizeof(cbuf),
137 137 0)
138 138 r = _recvmsg(sockfd, ctypes.byref(msgh), 0)
139 139 if r < 0:
140 140 e = ctypes.get_errno()
141 141 raise OSError(e, os.strerror(e))
142 142 # assumes that the first cmsg has fds because it isn't easy to write
143 143 # portable CMSG_NXTHDR() with ctypes.
144 144 cmsg = _CMSG_FIRSTHDR(msgh)
145 145 if not cmsg:
146 146 return []
147 147 if (cmsg.cmsg_level != socket.SOL_SOCKET or
148 148 cmsg.cmsg_type != _SCM_RIGHTS):
149 149 return []
150 150 rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int))
151 151 rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) /
152 152 ctypes.sizeof(ctypes.c_int))
153 153 return [rfds[i] for i in xrange(rfdscount)]
154 154
155 155 else:
156 156 import msvcrt
157 157
158 158 _kernel32 = ctypes.windll.kernel32
159 159
160 160 _DWORD = ctypes.c_ulong
161 161 _LPCSTR = _LPSTR = ctypes.c_char_p
162 162 _HANDLE = ctypes.c_void_p
163 163
164 164 _INVALID_HANDLE_VALUE = _HANDLE(-1).value
165 165
166 166 # CreateFile
167 167 _FILE_SHARE_READ = 0x00000001
168 168 _FILE_SHARE_WRITE = 0x00000002
169 169 _FILE_SHARE_DELETE = 0x00000004
170 170
171 171 _CREATE_ALWAYS = 2
172 172 _OPEN_EXISTING = 3
173 173 _OPEN_ALWAYS = 4
174 174
175 175 _GENERIC_READ = 0x80000000
176 176 _GENERIC_WRITE = 0x40000000
177 177
178 178 _FILE_ATTRIBUTE_NORMAL = 0x80
179 179
180 180 # open_osfhandle flags
181 181 _O_RDONLY = 0x0000
182 182 _O_RDWR = 0x0002
183 183 _O_APPEND = 0x0008
184 184
185 185 _O_TEXT = 0x4000
186 186 _O_BINARY = 0x8000
187 187
188 188 # types of parameters of C functions used (required by pypy)
189 189
190 190 _kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
191 191 _DWORD, _DWORD, _HANDLE]
192 192 _kernel32.CreateFileA.restype = _HANDLE
193 193
194 194 def _raiseioerror(name):
195 195 err = ctypes.WinError()
196 196 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
197 197
198 198 class posixfile(object):
199 199 '''a file object aiming for POSIX-like semantics
200 200
201 201 CPython's open() returns a file that was opened *without* setting the
202 202 _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
203 203 This even happens if any hardlinked copy of the file is in open state.
204 204 We set _FILE_SHARE_DELETE here, so files opened with posixfile can be
205 205 renamed and deleted while they are held open.
206 206 Note that if a file opened with posixfile is unlinked, the file
207 207 remains but cannot be opened again or be recreated under the same name,
208 208 until all reading processes have closed the file.'''
209 209
210 210 def __init__(self, name, mode='r', bufsize=-1):
211 211 if 'b' in mode:
212 212 flags = _O_BINARY
213 213 else:
214 214 flags = _O_TEXT
215 215
216 216 m0 = mode[0]
217 217 if m0 == 'r' and '+' not in mode:
218 218 flags |= _O_RDONLY
219 219 access = _GENERIC_READ
220 220 else:
221 221 # work around http://support.microsoft.com/kb/899149 and
222 222 # set _O_RDWR for 'w' and 'a', even if mode has no '+'
223 223 flags |= _O_RDWR
224 224 access = _GENERIC_READ | _GENERIC_WRITE
225 225
226 226 if m0 == 'r':
227 227 creation = _OPEN_EXISTING
228 228 elif m0 == 'w':
229 229 creation = _CREATE_ALWAYS
230 230 elif m0 == 'a':
231 231 creation = _OPEN_ALWAYS
232 232 flags |= _O_APPEND
233 233 else:
234 234 raise ValueError("invalid mode: %s" % mode)
235 235
236 236 fh = _kernel32.CreateFileA(name, access,
237 237 _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
238 238 None, creation, _FILE_ATTRIBUTE_NORMAL, None)
239 239 if fh == _INVALID_HANDLE_VALUE:
240 240 _raiseioerror(name)
241 241
242 242 fd = msvcrt.open_osfhandle(fh, flags)
243 243 if fd == -1:
244 244 _kernel32.CloseHandle(fh)
245 245 _raiseioerror(name)
246 246
247 247 f = os.fdopen(fd, pycompat.sysstr(mode), bufsize)
248 248 # unfortunately, f.name is '<fdopen>' at this point -- so we store
249 249 # the name on this wrapper. We cannot just assign to f.name,
250 250 # because that attribute is read-only.
251 251 object.__setattr__(self, r'name', name)
252 252 object.__setattr__(self, r'_file', f)
253 253
254 254 def __iter__(self):
255 255 return self._file
256 256
257 257 def __getattr__(self, name):
258 258 return getattr(self._file, name)
259 259
260 260 def __setattr__(self, name, value):
261 261 '''mimics the read-only attributes of Python file objects
262 262 by raising 'TypeError: readonly attribute' if someone tries:
263 263 f = posixfile('foo.txt')
264 264 f.name = 'bla' '''
265 265 return self._file.__setattr__(name, value)
266 266
267 267 def __enter__(self):
268 268 return self._file.__enter__()
269 269
270 270 def __exit__(self, exc_type, exc_value, exc_tb):
271 271 return self._file.__exit__(exc_type, exc_value, exc_tb)
@@ -1,98 +1,98
1 1 # rcutil.py - utilities about config paths, special config sections etc.
2 2 #
3 3 # Copyright Mercurial Contributors
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11
12 12 from . import (
13 13 encoding,
14 14 pycompat,
15 15 util,
16 16 )
17 17
18 if pycompat.osname == 'nt':
18 if pycompat.iswindows:
19 19 from . import scmwindows as scmplatform
20 20 else:
21 21 from . import scmposix as scmplatform
22 22
23 23 fallbackpager = scmplatform.fallbackpager
24 24 systemrcpath = scmplatform.systemrcpath
25 25 userrcpath = scmplatform.userrcpath
26 26
27 27 def _expandrcpath(path):
28 28 '''path could be a file or a directory. return a list of file paths'''
29 29 p = util.expandpath(path)
30 30 if os.path.isdir(p):
31 31 join = os.path.join
32 32 return [join(p, f) for f, k in util.listdir(p) if f.endswith('.rc')]
33 33 return [p]
34 34
35 35 def envrcitems(env=None):
36 36 '''Return [(section, name, value, source)] config items.
37 37
38 38 The config items are extracted from environment variables specified by env,
39 39 used to override systemrc, but not userrc.
40 40
41 41 If env is not provided, encoding.environ will be used.
42 42 '''
43 43 if env is None:
44 44 env = encoding.environ
45 45 checklist = [
46 46 ('EDITOR', 'ui', 'editor'),
47 47 ('VISUAL', 'ui', 'editor'),
48 48 ('PAGER', 'pager', 'pager'),
49 49 ]
50 50 result = []
51 51 for envname, section, configname in checklist:
52 52 if envname not in env:
53 53 continue
54 54 result.append((section, configname, env[envname], '$%s' % envname))
55 55 return result
56 56
57 57 def defaultrcpath():
58 58 '''return rc paths in default.d'''
59 59 path = []
60 60 defaultpath = os.path.join(util.datapath, 'default.d')
61 61 if os.path.isdir(defaultpath):
62 62 path = _expandrcpath(defaultpath)
63 63 return path
64 64
65 65 def rccomponents():
66 66 '''return an ordered [(type, obj)] about where to load configs.
67 67
68 68 respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is
69 69 used. if $HGRCPATH is not set, the platform default will be used.
70 70
71 71 if a directory is provided, *.rc files under it will be used.
72 72
73 73 type could be either 'path' or 'items', if type is 'path', obj is a string,
74 74 and is the config file path. if type is 'items', obj is a list of (section,
75 75 name, value, source) that should fill the config directly.
76 76 '''
77 77 envrc = ('items', envrcitems())
78 78
79 79 if 'HGRCPATH' in encoding.environ:
80 80 # assume HGRCPATH is all about user configs so environments can be
81 81 # overridden.
82 82 _rccomponents = [envrc]
83 83 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
84 84 if not p:
85 85 continue
86 86 _rccomponents.extend(('path', p) for p in _expandrcpath(p))
87 87 else:
88 88 normpaths = lambda paths: [('path', os.path.normpath(p)) for p in paths]
89 89 _rccomponents = normpaths(defaultrcpath() + systemrcpath())
90 90 _rccomponents.append(envrc)
91 91 _rccomponents.extend(normpaths(userrcpath()))
92 92 return _rccomponents
93 93
94 94 def defaultpagerenv():
95 95 '''return a dict of default environment variables and their values,
96 96 intended to be set before starting a pager.
97 97 '''
98 98 return {'LESS': 'FRX', 'LV': '-c'}
@@ -1,1228 +1,1228
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 if pycompat.osname == 'nt':
44 if pycompat.iswindows:
45 45 from . import scmwindows as scmplatform
46 46 else:
47 47 from . import scmposix as scmplatform
48 48
49 49 termsize = scmplatform.termsize
50 50
51 51 class status(tuple):
52 52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 53 and 'ignored' properties are only relevant to the working copy.
54 54 '''
55 55
56 56 __slots__ = ()
57 57
58 58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 59 clean):
60 60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 61 ignored, clean))
62 62
63 63 @property
64 64 def modified(self):
65 65 '''files that have been modified'''
66 66 return self[0]
67 67
68 68 @property
69 69 def added(self):
70 70 '''files that have been added'''
71 71 return self[1]
72 72
73 73 @property
74 74 def removed(self):
75 75 '''files that have been removed'''
76 76 return self[2]
77 77
78 78 @property
79 79 def deleted(self):
80 80 '''files that are in the dirstate, but have been deleted from the
81 81 working copy (aka "missing")
82 82 '''
83 83 return self[3]
84 84
85 85 @property
86 86 def unknown(self):
87 87 '''files not in the dirstate that are not ignored'''
88 88 return self[4]
89 89
90 90 @property
91 91 def ignored(self):
92 92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 93 return self[5]
94 94
95 95 @property
96 96 def clean(self):
97 97 '''files that have not been modified'''
98 98 return self[6]
99 99
100 100 def __repr__(self, *args, **kwargs):
101 101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 102 'unknown=%r, ignored=%r, clean=%r>') % self)
103 103
104 104 def itersubrepos(ctx1, ctx2):
105 105 """find subrepos in ctx1 or ctx2"""
106 106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111 111
112 112 missing = set()
113 113
114 114 for subpath in ctx2.substate:
115 115 if subpath not in ctx1.substate:
116 116 del subpaths[subpath]
117 117 missing.add(subpath)
118 118
119 119 for subpath, ctx in sorted(subpaths.iteritems()):
120 120 yield subpath, ctx.sub(subpath)
121 121
122 122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 123 # status and diff will have an accurate result when it does
124 124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 125 # against itself.
126 126 for subpath in missing:
127 127 yield subpath, ctx2.nullsub(subpath, ctx1)
128 128
129 129 def nochangesfound(ui, repo, excluded=None):
130 130 '''Report no changes for push/pull, excluded is None or a list of
131 131 nodes excluded from the push/pull.
132 132 '''
133 133 secretlist = []
134 134 if excluded:
135 135 for n in excluded:
136 136 ctx = repo[n]
137 137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 138 secretlist.append(n)
139 139
140 140 if secretlist:
141 141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 142 % len(secretlist))
143 143 else:
144 144 ui.status(_("no changes found\n"))
145 145
146 146 def callcatch(ui, func):
147 147 """call func() with global exception handling
148 148
149 149 return func() if no exception happens. otherwise do some error handling
150 150 and return an exit code accordingly. does not handle all exceptions.
151 151 """
152 152 try:
153 153 try:
154 154 return func()
155 155 except: # re-raises
156 156 ui.traceback()
157 157 raise
158 158 # Global exception handling, alphabetically
159 159 # Mercurial-specific first, followed by built-in and library exceptions
160 160 except error.LockHeld as inst:
161 161 if inst.errno == errno.ETIMEDOUT:
162 162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 163 else:
164 164 reason = _('lock held by %r') % inst.locker
165 165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 166 if not inst.locker:
167 167 ui.warn(_("(lock might be very busy)\n"))
168 168 except error.LockUnavailable as inst:
169 169 ui.warn(_("abort: could not lock %s: %s\n") %
170 170 (inst.desc or inst.filename,
171 171 encoding.strtolocal(inst.strerror)))
172 172 except error.OutOfBandError as inst:
173 173 if inst.args:
174 174 msg = _("abort: remote error:\n")
175 175 else:
176 176 msg = _("abort: remote error\n")
177 177 ui.warn(msg)
178 178 if inst.args:
179 179 ui.warn(''.join(inst.args))
180 180 if inst.hint:
181 181 ui.warn('(%s)\n' % inst.hint)
182 182 except error.RepoError as inst:
183 183 ui.warn(_("abort: %s!\n") % inst)
184 184 if inst.hint:
185 185 ui.warn(_("(%s)\n") % inst.hint)
186 186 except error.ResponseError as inst:
187 187 ui.warn(_("abort: %s") % inst.args[0])
188 188 if not isinstance(inst.args[1], basestring):
189 189 ui.warn(" %r\n" % (inst.args[1],))
190 190 elif not inst.args[1]:
191 191 ui.warn(_(" empty string\n"))
192 192 else:
193 193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 194 except error.CensoredNodeError as inst:
195 195 ui.warn(_("abort: file censored %s!\n") % inst)
196 196 except error.RevlogError as inst:
197 197 ui.warn(_("abort: %s!\n") % inst)
198 198 except error.InterventionRequired as inst:
199 199 ui.warn("%s\n" % inst)
200 200 if inst.hint:
201 201 ui.warn(_("(%s)\n") % inst.hint)
202 202 return 1
203 203 except error.WdirUnsupported:
204 204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 205 except error.Abort as inst:
206 206 ui.warn(_("abort: %s\n") % inst)
207 207 if inst.hint:
208 208 ui.warn(_("(%s)\n") % inst.hint)
209 209 except ImportError as inst:
210 210 ui.warn(_("abort: %s!\n") % inst)
211 211 m = str(inst).split()[-1]
212 212 if m in "mpatch bdiff".split():
213 213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 214 elif m in "zlib".split():
215 215 ui.warn(_("(is your Python install correct?)\n"))
216 216 except IOError as inst:
217 217 if util.safehasattr(inst, "code"):
218 218 ui.warn(_("abort: %s\n") % inst)
219 219 elif util.safehasattr(inst, "reason"):
220 220 try: # usually it is in the form (errno, strerror)
221 221 reason = inst.reason.args[1]
222 222 except (AttributeError, IndexError):
223 223 # it might be anything, for example a string
224 224 reason = inst.reason
225 225 if isinstance(reason, unicode):
226 226 # SSLError of Python 2.7.9 contains a unicode
227 227 reason = encoding.unitolocal(reason)
228 228 ui.warn(_("abort: error: %s\n") % reason)
229 229 elif (util.safehasattr(inst, "args")
230 230 and inst.args and inst.args[0] == errno.EPIPE):
231 231 pass
232 232 elif getattr(inst, "strerror", None):
233 233 if getattr(inst, "filename", None):
234 234 ui.warn(_("abort: %s: %s\n") % (
235 235 encoding.strtolocal(inst.strerror), inst.filename))
236 236 else:
237 237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 238 else:
239 239 raise
240 240 except OSError as inst:
241 241 if getattr(inst, "filename", None) is not None:
242 242 ui.warn(_("abort: %s: '%s'\n") % (
243 243 encoding.strtolocal(inst.strerror), inst.filename))
244 244 else:
245 245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 246 except MemoryError:
247 247 ui.warn(_("abort: out of memory\n"))
248 248 except SystemExit as inst:
249 249 # Commands shouldn't sys.exit directly, but give a return code.
250 250 # Just in case catch this and and pass exit code to caller.
251 251 return inst.code
252 252 except socket.error as inst:
253 253 ui.warn(_("abort: %s\n") % inst.args[-1])
254 254
255 255 return -1
256 256
257 257 def checknewlabel(repo, lbl, kind):
258 258 # Do not use the "kind" parameter in ui output.
259 259 # It makes strings difficult to translate.
260 260 if lbl in ['tip', '.', 'null']:
261 261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 262 for c in (':', '\0', '\n', '\r'):
263 263 if c in lbl:
264 264 raise error.Abort(_("%r cannot be used in a name") % c)
265 265 try:
266 266 int(lbl)
267 267 raise error.Abort(_("cannot use an integer as a name"))
268 268 except ValueError:
269 269 pass
270 270
271 271 def checkfilename(f):
272 272 '''Check that the filename f is an acceptable filename for a tracked file'''
273 273 if '\r' in f or '\n' in f:
274 274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
275 275
276 276 def checkportable(ui, f):
277 277 '''Check if filename f is portable and warn or abort depending on config'''
278 278 checkfilename(f)
279 279 abort, warn = checkportabilityalert(ui)
280 280 if abort or warn:
281 281 msg = util.checkwinfilename(f)
282 282 if msg:
283 283 msg = "%s: %s" % (msg, util.shellquote(f))
284 284 if abort:
285 285 raise error.Abort(msg)
286 286 ui.warn(_("warning: %s\n") % msg)
287 287
288 288 def checkportabilityalert(ui):
289 289 '''check if the user's config requests nothing, a warning, or abort for
290 290 non-portable filenames'''
291 291 val = ui.config('ui', 'portablefilenames')
292 292 lval = val.lower()
293 293 bval = util.parsebool(val)
294 abort = pycompat.osname == 'nt' or lval == 'abort'
294 abort = pycompat.iswindows or lval == 'abort'
295 295 warn = bval or lval == 'warn'
296 296 if bval is None and not (warn or abort or lval == 'ignore'):
297 297 raise error.ConfigError(
298 298 _("ui.portablefilenames value is invalid ('%s')") % val)
299 299 return abort, warn
300 300
301 301 class casecollisionauditor(object):
302 302 def __init__(self, ui, abort, dirstate):
303 303 self._ui = ui
304 304 self._abort = abort
305 305 allfiles = '\0'.join(dirstate._map)
306 306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
307 307 self._dirstate = dirstate
308 308 # The purpose of _newfiles is so that we don't complain about
309 309 # case collisions if someone were to call this object with the
310 310 # same filename twice.
311 311 self._newfiles = set()
312 312
313 313 def __call__(self, f):
314 314 if f in self._newfiles:
315 315 return
316 316 fl = encoding.lower(f)
317 317 if fl in self._loweredfiles and f not in self._dirstate:
318 318 msg = _('possible case-folding collision for %s') % f
319 319 if self._abort:
320 320 raise error.Abort(msg)
321 321 self._ui.warn(_("warning: %s\n") % msg)
322 322 self._loweredfiles.add(fl)
323 323 self._newfiles.add(f)
324 324
325 325 def filteredhash(repo, maxrev):
326 326 """build hash of filtered revisions in the current repoview.
327 327
328 328 Multiple caches perform up-to-date validation by checking that the
329 329 tiprev and tipnode stored in the cache file match the current repository.
330 330 However, this is not sufficient for validating repoviews because the set
331 331 of revisions in the view may change without the repository tiprev and
332 332 tipnode changing.
333 333
334 334 This function hashes all the revs filtered from the view and returns
335 335 that SHA-1 digest.
336 336 """
337 337 cl = repo.changelog
338 338 if not cl.filteredrevs:
339 339 return None
340 340 key = None
341 341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
342 342 if revs:
343 343 s = hashlib.sha1()
344 344 for rev in revs:
345 345 s.update('%d;' % rev)
346 346 key = s.digest()
347 347 return key
348 348
349 349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
350 350 '''yield every hg repository under path, always recursively.
351 351 The recurse flag will only control recursion into repo working dirs'''
352 352 def errhandler(err):
353 353 if err.filename == path:
354 354 raise err
355 355 samestat = getattr(os.path, 'samestat', None)
356 356 if followsym and samestat is not None:
357 357 def adddir(dirlst, dirname):
358 358 match = False
359 359 dirstat = os.stat(dirname)
360 360 for lstdirstat in dirlst:
361 361 if samestat(dirstat, lstdirstat):
362 362 match = True
363 363 break
364 364 if not match:
365 365 dirlst.append(dirstat)
366 366 return not match
367 367 else:
368 368 followsym = False
369 369
370 370 if (seen_dirs is None) and followsym:
371 371 seen_dirs = []
372 372 adddir(seen_dirs, path)
373 373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
374 374 dirs.sort()
375 375 if '.hg' in dirs:
376 376 yield root # found a repository
377 377 qroot = os.path.join(root, '.hg', 'patches')
378 378 if os.path.isdir(os.path.join(qroot, '.hg')):
379 379 yield qroot # we have a patch queue repo here
380 380 if recurse:
381 381 # avoid recursing inside the .hg directory
382 382 dirs.remove('.hg')
383 383 else:
384 384 dirs[:] = [] # don't descend further
385 385 elif followsym:
386 386 newdirs = []
387 387 for d in dirs:
388 388 fname = os.path.join(root, d)
389 389 if adddir(seen_dirs, fname):
390 390 if os.path.islink(fname):
391 391 for hgname in walkrepos(fname, True, seen_dirs):
392 392 yield hgname
393 393 else:
394 394 newdirs.append(d)
395 395 dirs[:] = newdirs
396 396
397 397 def binnode(ctx):
398 398 """Return binary node id for a given basectx"""
399 399 node = ctx.node()
400 400 if node is None:
401 401 return wdirid
402 402 return node
403 403
404 404 def intrev(ctx):
405 405 """Return integer for a given basectx that can be used in comparison or
406 406 arithmetic operation"""
407 407 rev = ctx.rev()
408 408 if rev is None:
409 409 return wdirrev
410 410 return rev
411 411
412 412 def formatchangeid(ctx):
413 413 """Format changectx as '{rev}:{node|formatnode}', which is the default
414 414 template provided by cmdutil.changeset_templater"""
415 415 repo = ctx.repo()
416 416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
417 417
418 418 def formatrevnode(ui, rev, node):
419 419 """Format given revision and node depending on the current verbosity"""
420 420 if ui.debugflag:
421 421 hexfunc = hex
422 422 else:
423 423 hexfunc = short
424 424 return '%d:%s' % (rev, hexfunc(node))
425 425
426 426 def revsingle(repo, revspec, default='.', localalias=None):
427 427 if not revspec and revspec != 0:
428 428 return repo[default]
429 429
430 430 l = revrange(repo, [revspec], localalias=localalias)
431 431 if not l:
432 432 raise error.Abort(_('empty revision set'))
433 433 return repo[l.last()]
434 434
435 435 def _pairspec(revspec):
436 436 tree = revsetlang.parse(revspec)
437 437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
438 438
439 439 def revpair(repo, revs):
440 440 if not revs:
441 441 return repo.dirstate.p1(), None
442 442
443 443 l = revrange(repo, revs)
444 444
445 445 if not l:
446 446 first = second = None
447 447 elif l.isascending():
448 448 first = l.min()
449 449 second = l.max()
450 450 elif l.isdescending():
451 451 first = l.max()
452 452 second = l.min()
453 453 else:
454 454 first = l.first()
455 455 second = l.last()
456 456
457 457 if first is None:
458 458 raise error.Abort(_('empty revision range'))
459 459 if (first == second and len(revs) >= 2
460 460 and not all(revrange(repo, [r]) for r in revs)):
461 461 raise error.Abort(_('empty revision on one side of range'))
462 462
463 463 # if top-level is range expression, the result must always be a pair
464 464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
465 465 return repo.lookup(first), None
466 466
467 467 return repo.lookup(first), repo.lookup(second)
468 468
469 469 def revrange(repo, specs, localalias=None):
470 470 """Execute 1 to many revsets and return the union.
471 471
472 472 This is the preferred mechanism for executing revsets using user-specified
473 473 config options, such as revset aliases.
474 474
475 475 The revsets specified by ``specs`` will be executed via a chained ``OR``
476 476 expression. If ``specs`` is empty, an empty result is returned.
477 477
478 478 ``specs`` can contain integers, in which case they are assumed to be
479 479 revision numbers.
480 480
481 481 It is assumed the revsets are already formatted. If you have arguments
482 482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
483 483 and pass the result as an element of ``specs``.
484 484
485 485 Specifying a single revset is allowed.
486 486
487 487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
488 488 integer revisions.
489 489 """
490 490 allspecs = []
491 491 for spec in specs:
492 492 if isinstance(spec, int):
493 493 spec = revsetlang.formatspec('rev(%d)', spec)
494 494 allspecs.append(spec)
495 495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
496 496
497 497 def meaningfulparents(repo, ctx):
498 498 """Return list of meaningful (or all if debug) parentrevs for rev.
499 499
500 500 For merges (two non-nullrev revisions) both parents are meaningful.
501 501 Otherwise the first parent revision is considered meaningful if it
502 502 is not the preceding revision.
503 503 """
504 504 parents = ctx.parents()
505 505 if len(parents) > 1:
506 506 return parents
507 507 if repo.ui.debugflag:
508 508 return [parents[0], repo['null']]
509 509 if parents[0].rev() >= intrev(ctx) - 1:
510 510 return []
511 511 return parents
512 512
513 513 def expandpats(pats):
514 514 '''Expand bare globs when running on windows.
515 515 On posix we assume it already has already been done by sh.'''
516 516 if not util.expandglobs:
517 517 return list(pats)
518 518 ret = []
519 519 for kindpat in pats:
520 520 kind, pat = matchmod._patsplit(kindpat, None)
521 521 if kind is None:
522 522 try:
523 523 globbed = glob.glob(pat)
524 524 except re.error:
525 525 globbed = [pat]
526 526 if globbed:
527 527 ret.extend(globbed)
528 528 continue
529 529 ret.append(kindpat)
530 530 return ret
531 531
532 532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
533 533 badfn=None):
534 534 '''Return a matcher and the patterns that were used.
535 535 The matcher will warn about bad matches, unless an alternate badfn callback
536 536 is provided.'''
537 537 if pats == ("",):
538 538 pats = []
539 539 if opts is None:
540 540 opts = {}
541 541 if not globbed and default == 'relpath':
542 542 pats = expandpats(pats or [])
543 543
544 544 def bad(f, msg):
545 545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
546 546
547 547 if badfn is None:
548 548 badfn = bad
549 549
550 550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
552 552
553 553 if m.always():
554 554 pats = []
555 555 return m, pats
556 556
557 557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
558 558 badfn=None):
559 559 '''Return a matcher that will warn about bad matches.'''
560 560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
561 561
562 562 def matchall(repo):
563 563 '''Return a matcher that will efficiently match everything.'''
564 564 return matchmod.always(repo.root, repo.getcwd())
565 565
566 566 def matchfiles(repo, files, badfn=None):
567 567 '''Return a matcher that will efficiently match exactly these files.'''
568 568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
569 569
570 570 def origpath(ui, repo, filepath):
571 571 '''customize where .orig files are created
572 572
573 573 Fetch user defined path from config file: [ui] origbackuppath = <path>
574 574 Fall back to default (filepath with .orig suffix) if not specified
575 575 '''
576 576 origbackuppath = ui.config('ui', 'origbackuppath')
577 577 if not origbackuppath:
578 578 return filepath + ".orig"
579 579
580 580 # Convert filepath from an absolute path into a path inside the repo.
581 581 filepathfromroot = util.normpath(os.path.relpath(filepath,
582 582 start=repo.root))
583 583
584 584 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
585 585 origbackupdir = origvfs.dirname(filepathfromroot)
586 586 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
587 587 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
588 588
589 589 # Remove any files that conflict with the backup file's path
590 590 for f in reversed(list(util.finddirs(filepathfromroot))):
591 591 if origvfs.isfileorlink(f):
592 592 ui.note(_('removing conflicting file: %s\n')
593 593 % origvfs.join(f))
594 594 origvfs.unlink(f)
595 595 break
596 596
597 597 origvfs.makedirs(origbackupdir)
598 598
599 599 if origvfs.isdir(filepathfromroot):
600 600 ui.note(_('removing conflicting directory: %s\n')
601 601 % origvfs.join(filepathfromroot))
602 602 origvfs.rmtree(filepathfromroot, forcibly=True)
603 603
604 604 return origvfs.join(filepathfromroot)
605 605
606 606 class _containsnode(object):
607 607 """proxy __contains__(node) to container.__contains__ which accepts revs"""
608 608
609 609 def __init__(self, repo, revcontainer):
610 610 self._torev = repo.changelog.rev
611 611 self._revcontains = revcontainer.__contains__
612 612
613 613 def __contains__(self, node):
614 614 return self._revcontains(self._torev(node))
615 615
616 616 def cleanupnodes(repo, replacements, operation, moves=None):
617 617 """do common cleanups when old nodes are replaced by new nodes
618 618
619 619 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
620 620 (we might also want to move working directory parent in the future)
621 621
622 622 By default, bookmark moves are calculated automatically from 'replacements',
623 623 but 'moves' can be used to override that. Also, 'moves' may include
624 624 additional bookmark moves that should not have associated obsmarkers.
625 625
626 626 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
627 627 have replacements. operation is a string, like "rebase".
628 628 """
629 629 if not replacements and not moves:
630 630 return
631 631
632 632 # translate mapping's other forms
633 633 if not util.safehasattr(replacements, 'items'):
634 634 replacements = {n: () for n in replacements}
635 635
636 636 # Calculate bookmark movements
637 637 if moves is None:
638 638 moves = {}
639 639 # Unfiltered repo is needed since nodes in replacements might be hidden.
640 640 unfi = repo.unfiltered()
641 641 for oldnode, newnodes in replacements.items():
642 642 if oldnode in moves:
643 643 continue
644 644 if len(newnodes) > 1:
645 645 # usually a split, take the one with biggest rev number
646 646 newnode = next(unfi.set('max(%ln)', newnodes)).node()
647 647 elif len(newnodes) == 0:
648 648 # move bookmark backwards
649 649 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
650 650 list(replacements)))
651 651 if roots:
652 652 newnode = roots[0].node()
653 653 else:
654 654 newnode = nullid
655 655 else:
656 656 newnode = newnodes[0]
657 657 moves[oldnode] = newnode
658 658
659 659 with repo.transaction('cleanup') as tr:
660 660 # Move bookmarks
661 661 bmarks = repo._bookmarks
662 662 bmarkchanges = []
663 663 allnewnodes = [n for ns in replacements.values() for n in ns]
664 664 for oldnode, newnode in moves.items():
665 665 oldbmarks = repo.nodebookmarks(oldnode)
666 666 if not oldbmarks:
667 667 continue
668 668 from . import bookmarks # avoid import cycle
669 669 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
670 670 (oldbmarks, hex(oldnode), hex(newnode)))
671 671 # Delete divergent bookmarks being parents of related newnodes
672 672 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
673 673 allnewnodes, newnode, oldnode)
674 674 deletenodes = _containsnode(repo, deleterevs)
675 675 for name in oldbmarks:
676 676 bmarkchanges.append((name, newnode))
677 677 for b in bookmarks.divergent2delete(repo, deletenodes, name):
678 678 bmarkchanges.append((b, None))
679 679
680 680 if bmarkchanges:
681 681 bmarks.applychanges(repo, tr, bmarkchanges)
682 682
683 683 # Obsolete or strip nodes
684 684 if obsolete.isenabled(repo, obsolete.createmarkersopt):
685 685 # If a node is already obsoleted, and we want to obsolete it
686 686 # without a successor, skip that obssolete request since it's
687 687 # unnecessary. That's the "if s or not isobs(n)" check below.
688 688 # Also sort the node in topology order, that might be useful for
689 689 # some obsstore logic.
690 690 # NOTE: the filtering and sorting might belong to createmarkers.
691 691 isobs = unfi.obsstore.successors.__contains__
692 692 torev = unfi.changelog.rev
693 693 sortfunc = lambda ns: torev(ns[0])
694 694 rels = [(unfi[n], tuple(unfi[m] for m in s))
695 695 for n, s in sorted(replacements.items(), key=sortfunc)
696 696 if s or not isobs(n)]
697 697 if rels:
698 698 obsolete.createmarkers(repo, rels, operation=operation)
699 699 else:
700 700 from . import repair # avoid import cycle
701 701 tostrip = list(replacements)
702 702 if tostrip:
703 703 repair.delayedstrip(repo.ui, repo, tostrip, operation)
704 704
705 705 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
706 706 if opts is None:
707 707 opts = {}
708 708 m = matcher
709 709 if dry_run is None:
710 710 dry_run = opts.get('dry_run')
711 711 if similarity is None:
712 712 similarity = float(opts.get('similarity') or 0)
713 713
714 714 ret = 0
715 715 join = lambda f: os.path.join(prefix, f)
716 716
717 717 wctx = repo[None]
718 718 for subpath in sorted(wctx.substate):
719 719 submatch = matchmod.subdirmatcher(subpath, m)
720 720 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
721 721 sub = wctx.sub(subpath)
722 722 try:
723 723 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
724 724 ret = 1
725 725 except error.LookupError:
726 726 repo.ui.status(_("skipping missing subrepository: %s\n")
727 727 % join(subpath))
728 728
729 729 rejected = []
730 730 def badfn(f, msg):
731 731 if f in m.files():
732 732 m.bad(f, msg)
733 733 rejected.append(f)
734 734
735 735 badmatch = matchmod.badmatch(m, badfn)
736 736 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
737 737 badmatch)
738 738
739 739 unknownset = set(unknown + forgotten)
740 740 toprint = unknownset.copy()
741 741 toprint.update(deleted)
742 742 for abs in sorted(toprint):
743 743 if repo.ui.verbose or not m.exact(abs):
744 744 if abs in unknownset:
745 745 status = _('adding %s\n') % m.uipath(abs)
746 746 else:
747 747 status = _('removing %s\n') % m.uipath(abs)
748 748 repo.ui.status(status)
749 749
750 750 renames = _findrenames(repo, m, added + unknown, removed + deleted,
751 751 similarity)
752 752
753 753 if not dry_run:
754 754 _markchanges(repo, unknown + forgotten, deleted, renames)
755 755
756 756 for f in rejected:
757 757 if f in m.files():
758 758 return 1
759 759 return ret
760 760
761 761 def marktouched(repo, files, similarity=0.0):
762 762 '''Assert that files have somehow been operated upon. files are relative to
763 763 the repo root.'''
764 764 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
765 765 rejected = []
766 766
767 767 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
768 768
769 769 if repo.ui.verbose:
770 770 unknownset = set(unknown + forgotten)
771 771 toprint = unknownset.copy()
772 772 toprint.update(deleted)
773 773 for abs in sorted(toprint):
774 774 if abs in unknownset:
775 775 status = _('adding %s\n') % abs
776 776 else:
777 777 status = _('removing %s\n') % abs
778 778 repo.ui.status(status)
779 779
780 780 renames = _findrenames(repo, m, added + unknown, removed + deleted,
781 781 similarity)
782 782
783 783 _markchanges(repo, unknown + forgotten, deleted, renames)
784 784
785 785 for f in rejected:
786 786 if f in m.files():
787 787 return 1
788 788 return 0
789 789
790 790 def _interestingfiles(repo, matcher):
791 791 '''Walk dirstate with matcher, looking for files that addremove would care
792 792 about.
793 793
794 794 This is different from dirstate.status because it doesn't care about
795 795 whether files are modified or clean.'''
796 796 added, unknown, deleted, removed, forgotten = [], [], [], [], []
797 797 audit_path = pathutil.pathauditor(repo.root, cached=True)
798 798
799 799 ctx = repo[None]
800 800 dirstate = repo.dirstate
801 801 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
802 802 unknown=True, ignored=False, full=False)
803 803 for abs, st in walkresults.iteritems():
804 804 dstate = dirstate[abs]
805 805 if dstate == '?' and audit_path.check(abs):
806 806 unknown.append(abs)
807 807 elif dstate != 'r' and not st:
808 808 deleted.append(abs)
809 809 elif dstate == 'r' and st:
810 810 forgotten.append(abs)
811 811 # for finding renames
812 812 elif dstate == 'r' and not st:
813 813 removed.append(abs)
814 814 elif dstate == 'a':
815 815 added.append(abs)
816 816
817 817 return added, unknown, deleted, removed, forgotten
818 818
819 819 def _findrenames(repo, matcher, added, removed, similarity):
820 820 '''Find renames from removed files to added ones.'''
821 821 renames = {}
822 822 if similarity > 0:
823 823 for old, new, score in similar.findrenames(repo, added, removed,
824 824 similarity):
825 825 if (repo.ui.verbose or not matcher.exact(old)
826 826 or not matcher.exact(new)):
827 827 repo.ui.status(_('recording removal of %s as rename to %s '
828 828 '(%d%% similar)\n') %
829 829 (matcher.rel(old), matcher.rel(new),
830 830 score * 100))
831 831 renames[new] = old
832 832 return renames
833 833
834 834 def _markchanges(repo, unknown, deleted, renames):
835 835 '''Marks the files in unknown as added, the files in deleted as removed,
836 836 and the files in renames as copied.'''
837 837 wctx = repo[None]
838 838 with repo.wlock():
839 839 wctx.forget(deleted)
840 840 wctx.add(unknown)
841 841 for new, old in renames.iteritems():
842 842 wctx.copy(old, new)
843 843
844 844 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
845 845 """Update the dirstate to reflect the intent of copying src to dst. For
846 846 different reasons it might not end with dst being marked as copied from src.
847 847 """
848 848 origsrc = repo.dirstate.copied(src) or src
849 849 if dst == origsrc: # copying back a copy?
850 850 if repo.dirstate[dst] not in 'mn' and not dryrun:
851 851 repo.dirstate.normallookup(dst)
852 852 else:
853 853 if repo.dirstate[origsrc] == 'a' and origsrc == src:
854 854 if not ui.quiet:
855 855 ui.warn(_("%s has not been committed yet, so no copy "
856 856 "data will be stored for %s.\n")
857 857 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
858 858 if repo.dirstate[dst] in '?r' and not dryrun:
859 859 wctx.add([dst])
860 860 elif not dryrun:
861 861 wctx.copy(origsrc, dst)
862 862
863 863 def readrequires(opener, supported):
864 864 '''Reads and parses .hg/requires and checks if all entries found
865 865 are in the list of supported features.'''
866 866 requirements = set(opener.read("requires").splitlines())
867 867 missings = []
868 868 for r in requirements:
869 869 if r not in supported:
870 870 if not r or not r[0].isalnum():
871 871 raise error.RequirementError(_(".hg/requires file is corrupt"))
872 872 missings.append(r)
873 873 missings.sort()
874 874 if missings:
875 875 raise error.RequirementError(
876 876 _("repository requires features unknown to this Mercurial: %s")
877 877 % " ".join(missings),
878 878 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
879 879 " for more information"))
880 880 return requirements
881 881
882 882 def writerequires(opener, requirements):
883 883 with opener('requires', 'w') as fp:
884 884 for r in sorted(requirements):
885 885 fp.write("%s\n" % r)
886 886
887 887 class filecachesubentry(object):
888 888 def __init__(self, path, stat):
889 889 self.path = path
890 890 self.cachestat = None
891 891 self._cacheable = None
892 892
893 893 if stat:
894 894 self.cachestat = filecachesubentry.stat(self.path)
895 895
896 896 if self.cachestat:
897 897 self._cacheable = self.cachestat.cacheable()
898 898 else:
899 899 # None means we don't know yet
900 900 self._cacheable = None
901 901
902 902 def refresh(self):
903 903 if self.cacheable():
904 904 self.cachestat = filecachesubentry.stat(self.path)
905 905
906 906 def cacheable(self):
907 907 if self._cacheable is not None:
908 908 return self._cacheable
909 909
910 910 # we don't know yet, assume it is for now
911 911 return True
912 912
913 913 def changed(self):
914 914 # no point in going further if we can't cache it
915 915 if not self.cacheable():
916 916 return True
917 917
918 918 newstat = filecachesubentry.stat(self.path)
919 919
920 920 # we may not know if it's cacheable yet, check again now
921 921 if newstat and self._cacheable is None:
922 922 self._cacheable = newstat.cacheable()
923 923
924 924 # check again
925 925 if not self._cacheable:
926 926 return True
927 927
928 928 if self.cachestat != newstat:
929 929 self.cachestat = newstat
930 930 return True
931 931 else:
932 932 return False
933 933
934 934 @staticmethod
935 935 def stat(path):
936 936 try:
937 937 return util.cachestat(path)
938 938 except OSError as e:
939 939 if e.errno != errno.ENOENT:
940 940 raise
941 941
942 942 class filecacheentry(object):
943 943 def __init__(self, paths, stat=True):
944 944 self._entries = []
945 945 for path in paths:
946 946 self._entries.append(filecachesubentry(path, stat))
947 947
948 948 def changed(self):
949 949 '''true if any entry has changed'''
950 950 for entry in self._entries:
951 951 if entry.changed():
952 952 return True
953 953 return False
954 954
955 955 def refresh(self):
956 956 for entry in self._entries:
957 957 entry.refresh()
958 958
959 959 class filecache(object):
960 960 '''A property like decorator that tracks files under .hg/ for updates.
961 961
962 962 Records stat info when called in _filecache.
963 963
964 964 On subsequent calls, compares old stat info with new info, and recreates the
965 965 object when any of the files changes, updating the new stat info in
966 966 _filecache.
967 967
968 968 Mercurial either atomic renames or appends for files under .hg,
969 969 so to ensure the cache is reliable we need the filesystem to be able
970 970 to tell us if a file has been replaced. If it can't, we fallback to
971 971 recreating the object on every call (essentially the same behavior as
972 972 propertycache).
973 973
974 974 '''
975 975 def __init__(self, *paths):
976 976 self.paths = paths
977 977
978 978 def join(self, obj, fname):
979 979 """Used to compute the runtime path of a cached file.
980 980
981 981 Users should subclass filecache and provide their own version of this
982 982 function to call the appropriate join function on 'obj' (an instance
983 983 of the class that its member function was decorated).
984 984 """
985 985 raise NotImplementedError
986 986
987 987 def __call__(self, func):
988 988 self.func = func
989 989 self.name = func.__name__.encode('ascii')
990 990 return self
991 991
992 992 def __get__(self, obj, type=None):
993 993 # if accessed on the class, return the descriptor itself.
994 994 if obj is None:
995 995 return self
996 996 # do we need to check if the file changed?
997 997 if self.name in obj.__dict__:
998 998 assert self.name in obj._filecache, self.name
999 999 return obj.__dict__[self.name]
1000 1000
1001 1001 entry = obj._filecache.get(self.name)
1002 1002
1003 1003 if entry:
1004 1004 if entry.changed():
1005 1005 entry.obj = self.func(obj)
1006 1006 else:
1007 1007 paths = [self.join(obj, path) for path in self.paths]
1008 1008
1009 1009 # We stat -before- creating the object so our cache doesn't lie if
1010 1010 # a writer modified between the time we read and stat
1011 1011 entry = filecacheentry(paths, True)
1012 1012 entry.obj = self.func(obj)
1013 1013
1014 1014 obj._filecache[self.name] = entry
1015 1015
1016 1016 obj.__dict__[self.name] = entry.obj
1017 1017 return entry.obj
1018 1018
1019 1019 def __set__(self, obj, value):
1020 1020 if self.name not in obj._filecache:
1021 1021 # we add an entry for the missing value because X in __dict__
1022 1022 # implies X in _filecache
1023 1023 paths = [self.join(obj, path) for path in self.paths]
1024 1024 ce = filecacheentry(paths, False)
1025 1025 obj._filecache[self.name] = ce
1026 1026 else:
1027 1027 ce = obj._filecache[self.name]
1028 1028
1029 1029 ce.obj = value # update cached copy
1030 1030 obj.__dict__[self.name] = value # update copy returned by obj.x
1031 1031
1032 1032 def __delete__(self, obj):
1033 1033 try:
1034 1034 del obj.__dict__[self.name]
1035 1035 except KeyError:
1036 1036 raise AttributeError(self.name)
1037 1037
1038 1038 def extdatasource(repo, source):
1039 1039 """Gather a map of rev -> value dict from the specified source
1040 1040
1041 1041 A source spec is treated as a URL, with a special case shell: type
1042 1042 for parsing the output from a shell command.
1043 1043
1044 1044 The data is parsed as a series of newline-separated records where
1045 1045 each record is a revision specifier optionally followed by a space
1046 1046 and a freeform string value. If the revision is known locally, it
1047 1047 is converted to a rev, otherwise the record is skipped.
1048 1048
1049 1049 Note that both key and value are treated as UTF-8 and converted to
1050 1050 the local encoding. This allows uniformity between local and
1051 1051 remote data sources.
1052 1052 """
1053 1053
1054 1054 spec = repo.ui.config("extdata", source)
1055 1055 if not spec:
1056 1056 raise error.Abort(_("unknown extdata source '%s'") % source)
1057 1057
1058 1058 data = {}
1059 1059 src = proc = None
1060 1060 try:
1061 1061 if spec.startswith("shell:"):
1062 1062 # external commands should be run relative to the repo root
1063 1063 cmd = spec[6:]
1064 1064 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1065 1065 close_fds=util.closefds,
1066 1066 stdout=subprocess.PIPE, cwd=repo.root)
1067 1067 src = proc.stdout
1068 1068 else:
1069 1069 # treat as a URL or file
1070 1070 src = url.open(repo.ui, spec)
1071 1071 for l in src:
1072 1072 if " " in l:
1073 1073 k, v = l.strip().split(" ", 1)
1074 1074 else:
1075 1075 k, v = l.strip(), ""
1076 1076
1077 1077 k = encoding.tolocal(k)
1078 1078 try:
1079 1079 data[repo[k].rev()] = encoding.tolocal(v)
1080 1080 except (error.LookupError, error.RepoLookupError):
1081 1081 pass # we ignore data for nodes that don't exist locally
1082 1082 finally:
1083 1083 if proc:
1084 1084 proc.communicate()
1085 1085 if proc.returncode != 0:
1086 1086 # not an error so 'cmd | grep' can be empty
1087 1087 repo.ui.debug("extdata command '%s' %s\n"
1088 1088 % (cmd, util.explainexit(proc.returncode)[0]))
1089 1089 if src:
1090 1090 src.close()
1091 1091
1092 1092 return data
1093 1093
1094 1094 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1095 1095 if lock is None:
1096 1096 raise error.LockInheritanceContractViolation(
1097 1097 'lock can only be inherited while held')
1098 1098 if environ is None:
1099 1099 environ = {}
1100 1100 with lock.inherit() as locker:
1101 1101 environ[envvar] = locker
1102 1102 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1103 1103
1104 1104 def wlocksub(repo, cmd, *args, **kwargs):
1105 1105 """run cmd as a subprocess that allows inheriting repo's wlock
1106 1106
1107 1107 This can only be called while the wlock is held. This takes all the
1108 1108 arguments that ui.system does, and returns the exit code of the
1109 1109 subprocess."""
1110 1110 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1111 1111 **kwargs)
1112 1112
1113 1113 def gdinitconfig(ui):
1114 1114 """helper function to know if a repo should be created as general delta
1115 1115 """
1116 1116 # experimental config: format.generaldelta
1117 1117 return (ui.configbool('format', 'generaldelta')
1118 1118 or ui.configbool('format', 'usegeneraldelta'))
1119 1119
1120 1120 def gddeltaconfig(ui):
1121 1121 """helper function to know if incoming delta should be optimised
1122 1122 """
1123 1123 # experimental config: format.generaldelta
1124 1124 return ui.configbool('format', 'generaldelta')
1125 1125
1126 1126 class simplekeyvaluefile(object):
1127 1127 """A simple file with key=value lines
1128 1128
1129 1129 Keys must be alphanumerics and start with a letter, values must not
1130 1130 contain '\n' characters"""
1131 1131 firstlinekey = '__firstline'
1132 1132
1133 1133 def __init__(self, vfs, path, keys=None):
1134 1134 self.vfs = vfs
1135 1135 self.path = path
1136 1136
1137 1137 def read(self, firstlinenonkeyval=False):
1138 1138 """Read the contents of a simple key-value file
1139 1139
1140 1140 'firstlinenonkeyval' indicates whether the first line of file should
1141 1141 be treated as a key-value pair or reuturned fully under the
1142 1142 __firstline key."""
1143 1143 lines = self.vfs.readlines(self.path)
1144 1144 d = {}
1145 1145 if firstlinenonkeyval:
1146 1146 if not lines:
1147 1147 e = _("empty simplekeyvalue file")
1148 1148 raise error.CorruptedState(e)
1149 1149 # we don't want to include '\n' in the __firstline
1150 1150 d[self.firstlinekey] = lines[0][:-1]
1151 1151 del lines[0]
1152 1152
1153 1153 try:
1154 1154 # the 'if line.strip()' part prevents us from failing on empty
1155 1155 # lines which only contain '\n' therefore are not skipped
1156 1156 # by 'if line'
1157 1157 updatedict = dict(line[:-1].split('=', 1) for line in lines
1158 1158 if line.strip())
1159 1159 if self.firstlinekey in updatedict:
1160 1160 e = _("%r can't be used as a key")
1161 1161 raise error.CorruptedState(e % self.firstlinekey)
1162 1162 d.update(updatedict)
1163 1163 except ValueError as e:
1164 1164 raise error.CorruptedState(str(e))
1165 1165 return d
1166 1166
1167 1167 def write(self, data, firstline=None):
1168 1168 """Write key=>value mapping to a file
1169 1169 data is a dict. Keys must be alphanumerical and start with a letter.
1170 1170 Values must not contain newline characters.
1171 1171
1172 1172 If 'firstline' is not None, it is written to file before
1173 1173 everything else, as it is, not in a key=value form"""
1174 1174 lines = []
1175 1175 if firstline is not None:
1176 1176 lines.append('%s\n' % firstline)
1177 1177
1178 1178 for k, v in data.items():
1179 1179 if k == self.firstlinekey:
1180 1180 e = "key name '%s' is reserved" % self.firstlinekey
1181 1181 raise error.ProgrammingError(e)
1182 1182 if not k[0].isalpha():
1183 1183 e = "keys must start with a letter in a key-value file"
1184 1184 raise error.ProgrammingError(e)
1185 1185 if not k.isalnum():
1186 1186 e = "invalid key name in a simple key-value file"
1187 1187 raise error.ProgrammingError(e)
1188 1188 if '\n' in v:
1189 1189 e = "invalid value in a simple key-value file"
1190 1190 raise error.ProgrammingError(e)
1191 1191 lines.append("%s=%s\n" % (k, v))
1192 1192 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1193 1193 fp.write(''.join(lines))
1194 1194
1195 1195 _reportobsoletedsource = [
1196 1196 'debugobsolete',
1197 1197 'pull',
1198 1198 'push',
1199 1199 'serve',
1200 1200 'unbundle',
1201 1201 ]
1202 1202
1203 1203 def registersummarycallback(repo, otr, txnname=''):
1204 1204 """register a callback to issue a summary after the transaction is closed
1205 1205 """
1206 1206 def txmatch(sources):
1207 1207 return any(txnname.startswith(source) for source in sources)
1208 1208
1209 1209 categories = []
1210 1210
1211 1211 def reportsummary(func):
1212 1212 """decorator for report callbacks."""
1213 1213 reporef = weakref.ref(repo)
1214 1214 def wrapped(tr):
1215 1215 repo = reporef()
1216 1216 func(repo, tr)
1217 1217 newcat = '%2i-txnreport' % len(categories)
1218 1218 otr.addpostclose(newcat, wrapped)
1219 1219 categories.append(newcat)
1220 1220 return wrapped
1221 1221
1222 1222 if txmatch(_reportobsoletedsource):
1223 1223 @reportsummary
1224 1224 def reportobsoleted(repo, tr):
1225 1225 obsoleted = obsutil.getobsoleted(repo, tr)
1226 1226 if obsoleted:
1227 1227 repo.ui.status(_('obsoleted %i changesets\n')
1228 1228 % len(obsoleted))
@@ -1,865 +1,865
1 1 # sslutil.py - SSL handling for mercurial
2 2 #
3 3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 from __future__ import absolute_import
11 11
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import ssl
16 16
17 17 from .i18n import _
18 18 from . import (
19 19 error,
20 20 pycompat,
21 21 util,
22 22 )
23 23
24 24 # Python 2.7.9+ overhauled the built-in SSL/TLS features of Python. It added
25 25 # support for TLS 1.1, TLS 1.2, SNI, system CA stores, etc. These features are
26 26 # all exposed via the "ssl" module.
27 27 #
28 28 # Depending on the version of Python being used, SSL/TLS support is either
29 29 # modern/secure or legacy/insecure. Many operations in this module have
30 30 # separate code paths depending on support in Python.
31 31
32 32 configprotocols = {
33 33 'tls1.0',
34 34 'tls1.1',
35 35 'tls1.2',
36 36 }
37 37
38 38 hassni = getattr(ssl, 'HAS_SNI', False)
39 39
40 40 # TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
41 41 # against doesn't support them.
42 42 supportedprotocols = {'tls1.0'}
43 43 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
44 44 supportedprotocols.add('tls1.1')
45 45 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
46 46 supportedprotocols.add('tls1.2')
47 47
48 48 try:
49 49 # ssl.SSLContext was added in 2.7.9 and presence indicates modern
50 50 # SSL/TLS features are available.
51 51 SSLContext = ssl.SSLContext
52 52 modernssl = True
53 53 _canloaddefaultcerts = util.safehasattr(SSLContext, 'load_default_certs')
54 54 except AttributeError:
55 55 modernssl = False
56 56 _canloaddefaultcerts = False
57 57
58 58 # We implement SSLContext using the interface from the standard library.
59 59 class SSLContext(object):
60 60 def __init__(self, protocol):
61 61 # From the public interface of SSLContext
62 62 self.protocol = protocol
63 63 self.check_hostname = False
64 64 self.options = 0
65 65 self.verify_mode = ssl.CERT_NONE
66 66
67 67 # Used by our implementation.
68 68 self._certfile = None
69 69 self._keyfile = None
70 70 self._certpassword = None
71 71 self._cacerts = None
72 72 self._ciphers = None
73 73
74 74 def load_cert_chain(self, certfile, keyfile=None, password=None):
75 75 self._certfile = certfile
76 76 self._keyfile = keyfile
77 77 self._certpassword = password
78 78
79 79 def load_default_certs(self, purpose=None):
80 80 pass
81 81
82 82 def load_verify_locations(self, cafile=None, capath=None, cadata=None):
83 83 if capath:
84 84 raise error.Abort(_('capath not supported'))
85 85 if cadata:
86 86 raise error.Abort(_('cadata not supported'))
87 87
88 88 self._cacerts = cafile
89 89
90 90 def set_ciphers(self, ciphers):
91 91 self._ciphers = ciphers
92 92
93 93 def wrap_socket(self, socket, server_hostname=None, server_side=False):
94 94 # server_hostname is unique to SSLContext.wrap_socket and is used
95 95 # for SNI in that context. So there's nothing for us to do with it
96 96 # in this legacy code since we don't support SNI.
97 97
98 98 args = {
99 99 'keyfile': self._keyfile,
100 100 'certfile': self._certfile,
101 101 'server_side': server_side,
102 102 'cert_reqs': self.verify_mode,
103 103 'ssl_version': self.protocol,
104 104 'ca_certs': self._cacerts,
105 105 'ciphers': self._ciphers,
106 106 }
107 107
108 108 return ssl.wrap_socket(socket, **args)
109 109
110 110 def _hostsettings(ui, hostname):
111 111 """Obtain security settings for a hostname.
112 112
113 113 Returns a dict of settings relevant to that hostname.
114 114 """
115 115 s = {
116 116 # Whether we should attempt to load default/available CA certs
117 117 # if an explicit ``cafile`` is not defined.
118 118 'allowloaddefaultcerts': True,
119 119 # List of 2-tuple of (hash algorithm, hash).
120 120 'certfingerprints': [],
121 121 # Path to file containing concatenated CA certs. Used by
122 122 # SSLContext.load_verify_locations().
123 123 'cafile': None,
124 124 # Whether certificate verification should be disabled.
125 125 'disablecertverification': False,
126 126 # Whether the legacy [hostfingerprints] section has data for this host.
127 127 'legacyfingerprint': False,
128 128 # PROTOCOL_* constant to use for SSLContext.__init__.
129 129 'protocol': None,
130 130 # String representation of minimum protocol to be used for UI
131 131 # presentation.
132 132 'protocolui': None,
133 133 # ssl.CERT_* constant used by SSLContext.verify_mode.
134 134 'verifymode': None,
135 135 # Defines extra ssl.OP* bitwise options to set.
136 136 'ctxoptions': None,
137 137 # OpenSSL Cipher List to use (instead of default).
138 138 'ciphers': None,
139 139 }
140 140
141 141 # Allow minimum TLS protocol to be specified in the config.
142 142 def validateprotocol(protocol, key):
143 143 if protocol not in configprotocols:
144 144 raise error.Abort(
145 145 _('unsupported protocol from hostsecurity.%s: %s') %
146 146 (key, protocol),
147 147 hint=_('valid protocols: %s') %
148 148 ' '.join(sorted(configprotocols)))
149 149
150 150 # We default to TLS 1.1+ where we can because TLS 1.0 has known
151 151 # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
152 152 # TLS 1.0+ via config options in case a legacy server is encountered.
153 153 if 'tls1.1' in supportedprotocols:
154 154 defaultprotocol = 'tls1.1'
155 155 else:
156 156 # Let people know they are borderline secure.
157 157 # We don't document this config option because we want people to see
158 158 # the bold warnings on the web site.
159 159 # internal config: hostsecurity.disabletls10warning
160 160 if not ui.configbool('hostsecurity', 'disabletls10warning'):
161 161 ui.warn(_('warning: connecting to %s using legacy security '
162 162 'technology (TLS 1.0); see '
163 163 'https://mercurial-scm.org/wiki/SecureConnections for '
164 164 'more info\n') % hostname)
165 165 defaultprotocol = 'tls1.0'
166 166
167 167 key = 'minimumprotocol'
168 168 protocol = ui.config('hostsecurity', key, defaultprotocol)
169 169 validateprotocol(protocol, key)
170 170
171 171 key = '%s:minimumprotocol' % hostname
172 172 protocol = ui.config('hostsecurity', key, protocol)
173 173 validateprotocol(protocol, key)
174 174
175 175 # If --insecure is used, we allow the use of TLS 1.0 despite config options.
176 176 # We always print a "connection security to %s is disabled..." message when
177 177 # --insecure is used. So no need to print anything more here.
178 178 if ui.insecureconnections:
179 179 protocol = 'tls1.0'
180 180
181 181 s['protocol'], s['ctxoptions'], s['protocolui'] = protocolsettings(protocol)
182 182
183 183 ciphers = ui.config('hostsecurity', 'ciphers')
184 184 ciphers = ui.config('hostsecurity', '%s:ciphers' % hostname, ciphers)
185 185 s['ciphers'] = ciphers
186 186
187 187 # Look for fingerprints in [hostsecurity] section. Value is a list
188 188 # of <alg>:<fingerprint> strings.
189 189 fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % hostname,
190 190 [])
191 191 for fingerprint in fingerprints:
192 192 if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
193 193 raise error.Abort(_('invalid fingerprint for %s: %s') % (
194 194 hostname, fingerprint),
195 195 hint=_('must begin with "sha1:", "sha256:", '
196 196 'or "sha512:"'))
197 197
198 198 alg, fingerprint = fingerprint.split(':', 1)
199 199 fingerprint = fingerprint.replace(':', '').lower()
200 200 s['certfingerprints'].append((alg, fingerprint))
201 201
202 202 # Fingerprints from [hostfingerprints] are always SHA-1.
203 203 for fingerprint in ui.configlist('hostfingerprints', hostname, []):
204 204 fingerprint = fingerprint.replace(':', '').lower()
205 205 s['certfingerprints'].append(('sha1', fingerprint))
206 206 s['legacyfingerprint'] = True
207 207
208 208 # If a host cert fingerprint is defined, it is the only thing that
209 209 # matters. No need to validate CA certs.
210 210 if s['certfingerprints']:
211 211 s['verifymode'] = ssl.CERT_NONE
212 212 s['allowloaddefaultcerts'] = False
213 213
214 214 # If --insecure is used, don't take CAs into consideration.
215 215 elif ui.insecureconnections:
216 216 s['disablecertverification'] = True
217 217 s['verifymode'] = ssl.CERT_NONE
218 218 s['allowloaddefaultcerts'] = False
219 219
220 220 if ui.configbool('devel', 'disableloaddefaultcerts'):
221 221 s['allowloaddefaultcerts'] = False
222 222
223 223 # If both fingerprints and a per-host ca file are specified, issue a warning
224 224 # because users should not be surprised about what security is or isn't
225 225 # being performed.
226 226 cafile = ui.config('hostsecurity', '%s:verifycertsfile' % hostname)
227 227 if s['certfingerprints'] and cafile:
228 228 ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
229 229 'fingerprints defined; using host fingerprints for '
230 230 'verification)\n') % hostname)
231 231
232 232 # Try to hook up CA certificate validation unless something above
233 233 # makes it not necessary.
234 234 if s['verifymode'] is None:
235 235 # Look at per-host ca file first.
236 236 if cafile:
237 237 cafile = util.expandpath(cafile)
238 238 if not os.path.exists(cafile):
239 239 raise error.Abort(_('path specified by %s does not exist: %s') %
240 240 ('hostsecurity.%s:verifycertsfile' % hostname,
241 241 cafile))
242 242 s['cafile'] = cafile
243 243 else:
244 244 # Find global certificates file in config.
245 245 cafile = ui.config('web', 'cacerts')
246 246
247 247 if cafile:
248 248 cafile = util.expandpath(cafile)
249 249 if not os.path.exists(cafile):
250 250 raise error.Abort(_('could not find web.cacerts: %s') %
251 251 cafile)
252 252 elif s['allowloaddefaultcerts']:
253 253 # CAs not defined in config. Try to find system bundles.
254 254 cafile = _defaultcacerts(ui)
255 255 if cafile:
256 256 ui.debug('using %s for CA file\n' % cafile)
257 257
258 258 s['cafile'] = cafile
259 259
260 260 # Require certificate validation if CA certs are being loaded and
261 261 # verification hasn't been disabled above.
262 262 if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
263 263 s['verifymode'] = ssl.CERT_REQUIRED
264 264 else:
265 265 # At this point we don't have a fingerprint, aren't being
266 266 # explicitly insecure, and can't load CA certs. Connecting
267 267 # is insecure. We allow the connection and abort during
268 268 # validation (once we have the fingerprint to print to the
269 269 # user).
270 270 s['verifymode'] = ssl.CERT_NONE
271 271
272 272 assert s['protocol'] is not None
273 273 assert s['ctxoptions'] is not None
274 274 assert s['verifymode'] is not None
275 275
276 276 return s
277 277
278 278 def protocolsettings(protocol):
279 279 """Resolve the protocol for a config value.
280 280
281 281 Returns a 3-tuple of (protocol, options, ui value) where the first
282 282 2 items are values used by SSLContext and the last is a string value
283 283 of the ``minimumprotocol`` config option equivalent.
284 284 """
285 285 if protocol not in configprotocols:
286 286 raise ValueError('protocol value not supported: %s' % protocol)
287 287
288 288 # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
289 289 # that both ends support, including TLS protocols. On legacy stacks,
290 290 # the highest it likely goes is TLS 1.0. On modern stacks, it can
291 291 # support TLS 1.2.
292 292 #
293 293 # The PROTOCOL_TLSv* constants select a specific TLS version
294 294 # only (as opposed to multiple versions). So the method for
295 295 # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and
296 296 # disable protocols via SSLContext.options and OP_NO_* constants.
297 297 # However, SSLContext.options doesn't work unless we have the
298 298 # full/real SSLContext available to us.
299 299 if supportedprotocols == {'tls1.0'}:
300 300 if protocol != 'tls1.0':
301 301 raise error.Abort(_('current Python does not support protocol '
302 302 'setting %s') % protocol,
303 303 hint=_('upgrade Python or disable setting since '
304 304 'only TLS 1.0 is supported'))
305 305
306 306 return ssl.PROTOCOL_TLSv1, 0, 'tls1.0'
307 307
308 308 # WARNING: returned options don't work unless the modern ssl module
309 309 # is available. Be careful when adding options here.
310 310
311 311 # SSLv2 and SSLv3 are broken. We ban them outright.
312 312 options = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
313 313
314 314 if protocol == 'tls1.0':
315 315 # Defaults above are to use TLS 1.0+
316 316 pass
317 317 elif protocol == 'tls1.1':
318 318 options |= ssl.OP_NO_TLSv1
319 319 elif protocol == 'tls1.2':
320 320 options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
321 321 else:
322 322 raise error.Abort(_('this should not happen'))
323 323
324 324 # Prevent CRIME.
325 325 # There is no guarantee this attribute is defined on the module.
326 326 options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
327 327
328 328 return ssl.PROTOCOL_SSLv23, options, protocol
329 329
330 330 def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
331 331 """Add SSL/TLS to a socket.
332 332
333 333 This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane
334 334 choices based on what security options are available.
335 335
336 336 In addition to the arguments supported by ``ssl.wrap_socket``, we allow
337 337 the following additional arguments:
338 338
339 339 * serverhostname - The expected hostname of the remote server. If the
340 340 server (and client) support SNI, this tells the server which certificate
341 341 to use.
342 342 """
343 343 if not serverhostname:
344 344 raise error.Abort(_('serverhostname argument is required'))
345 345
346 346 for f in (keyfile, certfile):
347 347 if f and not os.path.exists(f):
348 348 raise error.Abort(_('certificate file (%s) does not exist; '
349 349 'cannot connect to %s') % (f, serverhostname),
350 350 hint=_('restore missing file or fix references '
351 351 'in Mercurial config'))
352 352
353 353 settings = _hostsettings(ui, serverhostname)
354 354
355 355 # We can't use ssl.create_default_context() because it calls
356 356 # load_default_certs() unless CA arguments are passed to it. We want to
357 357 # have explicit control over CA loading because implicitly loading
358 358 # CAs may undermine the user's intent. For example, a user may define a CA
359 359 # bundle with a specific CA cert removed. If the system/default CA bundle
360 360 # is loaded and contains that removed CA, you've just undone the user's
361 361 # choice.
362 362 sslcontext = SSLContext(settings['protocol'])
363 363
364 364 # This is a no-op unless using modern ssl.
365 365 sslcontext.options |= settings['ctxoptions']
366 366
367 367 # This still works on our fake SSLContext.
368 368 sslcontext.verify_mode = settings['verifymode']
369 369
370 370 if settings['ciphers']:
371 371 try:
372 372 sslcontext.set_ciphers(settings['ciphers'])
373 373 except ssl.SSLError as e:
374 374 raise error.Abort(_('could not set ciphers: %s') % e.args[0],
375 375 hint=_('change cipher string (%s) in config') %
376 376 settings['ciphers'])
377 377
378 378 if certfile is not None:
379 379 def password():
380 380 f = keyfile or certfile
381 381 return ui.getpass(_('passphrase for %s: ') % f, '')
382 382 sslcontext.load_cert_chain(certfile, keyfile, password)
383 383
384 384 if settings['cafile'] is not None:
385 385 try:
386 386 sslcontext.load_verify_locations(cafile=settings['cafile'])
387 387 except ssl.SSLError as e:
388 388 if len(e.args) == 1: # pypy has different SSLError args
389 389 msg = e.args[0]
390 390 else:
391 391 msg = e.args[1]
392 392 raise error.Abort(_('error loading CA file %s: %s') % (
393 393 settings['cafile'], msg),
394 394 hint=_('file is empty or malformed?'))
395 395 caloaded = True
396 396 elif settings['allowloaddefaultcerts']:
397 397 # This is a no-op on old Python.
398 398 sslcontext.load_default_certs()
399 399 caloaded = True
400 400 else:
401 401 caloaded = False
402 402
403 403 try:
404 404 sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
405 405 except ssl.SSLError as e:
406 406 # If we're doing certificate verification and no CA certs are loaded,
407 407 # that is almost certainly the reason why verification failed. Provide
408 408 # a hint to the user.
409 409 # Only modern ssl module exposes SSLContext.get_ca_certs() so we can
410 410 # only show this warning if modern ssl is available.
411 411 # The exception handler is here to handle bugs around cert attributes:
412 412 # https://bugs.python.org/issue20916#msg213479. (See issues5313.)
413 413 # When the main 20916 bug occurs, 'sslcontext.get_ca_certs()' is a
414 414 # non-empty list, but the following conditional is otherwise True.
415 415 try:
416 416 if (caloaded and settings['verifymode'] == ssl.CERT_REQUIRED and
417 417 modernssl and not sslcontext.get_ca_certs()):
418 418 ui.warn(_('(an attempt was made to load CA certificates but '
419 419 'none were loaded; see '
420 420 'https://mercurial-scm.org/wiki/SecureConnections '
421 421 'for how to configure Mercurial to avoid this '
422 422 'error)\n'))
423 423 except ssl.SSLError:
424 424 pass
425 425 # Try to print more helpful error messages for known failures.
426 426 if util.safehasattr(e, 'reason'):
427 427 # This error occurs when the client and server don't share a
428 428 # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3
429 429 # outright. Hopefully the reason for this error is that we require
430 430 # TLS 1.1+ and the server only supports TLS 1.0. Whatever the
431 431 # reason, try to emit an actionable warning.
432 432 if e.reason == 'UNSUPPORTED_PROTOCOL':
433 433 # We attempted TLS 1.0+.
434 434 if settings['protocolui'] == 'tls1.0':
435 435 # We support more than just TLS 1.0+. If this happens,
436 436 # the likely scenario is either the client or the server
437 437 # is really old. (e.g. server doesn't support TLS 1.0+ or
438 438 # client doesn't support modern TLS versions introduced
439 439 # several years from when this comment was written).
440 440 if supportedprotocols != {'tls1.0'}:
441 441 ui.warn(_(
442 442 '(could not communicate with %s using security '
443 443 'protocols %s; if you are using a modern Mercurial '
444 444 'version, consider contacting the operator of this '
445 445 'server; see '
446 446 'https://mercurial-scm.org/wiki/SecureConnections '
447 447 'for more info)\n') % (
448 448 serverhostname,
449 449 ', '.join(sorted(supportedprotocols))))
450 450 else:
451 451 ui.warn(_(
452 452 '(could not communicate with %s using TLS 1.0; the '
453 453 'likely cause of this is the server no longer '
454 454 'supports TLS 1.0 because it has known security '
455 455 'vulnerabilities; see '
456 456 'https://mercurial-scm.org/wiki/SecureConnections '
457 457 'for more info)\n') % serverhostname)
458 458 else:
459 459 # We attempted TLS 1.1+. We can only get here if the client
460 460 # supports the configured protocol. So the likely reason is
461 461 # the client wants better security than the server can
462 462 # offer.
463 463 ui.warn(_(
464 464 '(could not negotiate a common security protocol (%s+) '
465 465 'with %s; the likely cause is Mercurial is configured '
466 466 'to be more secure than the server can support)\n') % (
467 467 settings['protocolui'], serverhostname))
468 468 ui.warn(_('(consider contacting the operator of this '
469 469 'server and ask them to support modern TLS '
470 470 'protocol versions; or, set '
471 471 'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
472 472 'use of legacy, less secure protocols when '
473 473 'communicating with this server)\n') %
474 474 serverhostname)
475 475 ui.warn(_(
476 476 '(see https://mercurial-scm.org/wiki/SecureConnections '
477 477 'for more info)\n'))
478 478
479 479 elif (e.reason == 'CERTIFICATE_VERIFY_FAILED' and
480 pycompat.osname == 'nt'):
480 pycompat.iswindows):
481 481
482 482 ui.warn(_('(the full certificate chain may not be available '
483 483 'locally; see "hg help debugssl")\n'))
484 484 raise
485 485
486 486 # check if wrap_socket failed silently because socket had been
487 487 # closed
488 488 # - see http://bugs.python.org/issue13721
489 489 if not sslsocket.cipher():
490 490 raise error.Abort(_('ssl connection failed'))
491 491
492 492 sslsocket._hgstate = {
493 493 'caloaded': caloaded,
494 494 'hostname': serverhostname,
495 495 'settings': settings,
496 496 'ui': ui,
497 497 }
498 498
499 499 return sslsocket
500 500
501 501 def wrapserversocket(sock, ui, certfile=None, keyfile=None, cafile=None,
502 502 requireclientcert=False):
503 503 """Wrap a socket for use by servers.
504 504
505 505 ``certfile`` and ``keyfile`` specify the files containing the certificate's
506 506 public and private keys, respectively. Both keys can be defined in the same
507 507 file via ``certfile`` (the private key must come first in the file).
508 508
509 509 ``cafile`` defines the path to certificate authorities.
510 510
511 511 ``requireclientcert`` specifies whether to require client certificates.
512 512
513 513 Typically ``cafile`` is only defined if ``requireclientcert`` is true.
514 514 """
515 515 # This function is not used much by core Mercurial, so the error messaging
516 516 # doesn't have to be as detailed as for wrapsocket().
517 517 for f in (certfile, keyfile, cafile):
518 518 if f and not os.path.exists(f):
519 519 raise error.Abort(_('referenced certificate file (%s) does not '
520 520 'exist') % f)
521 521
522 522 protocol, options, _protocolui = protocolsettings('tls1.0')
523 523
524 524 # This config option is intended for use in tests only. It is a giant
525 525 # footgun to kill security. Don't define it.
526 526 exactprotocol = ui.config('devel', 'serverexactprotocol')
527 527 if exactprotocol == 'tls1.0':
528 528 protocol = ssl.PROTOCOL_TLSv1
529 529 elif exactprotocol == 'tls1.1':
530 530 if 'tls1.1' not in supportedprotocols:
531 531 raise error.Abort(_('TLS 1.1 not supported by this Python'))
532 532 protocol = ssl.PROTOCOL_TLSv1_1
533 533 elif exactprotocol == 'tls1.2':
534 534 if 'tls1.2' not in supportedprotocols:
535 535 raise error.Abort(_('TLS 1.2 not supported by this Python'))
536 536 protocol = ssl.PROTOCOL_TLSv1_2
537 537 elif exactprotocol:
538 538 raise error.Abort(_('invalid value for serverexactprotocol: %s') %
539 539 exactprotocol)
540 540
541 541 if modernssl:
542 542 # We /could/ use create_default_context() here since it doesn't load
543 543 # CAs when configured for client auth. However, it is hard-coded to
544 544 # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
545 545 sslcontext = SSLContext(protocol)
546 546 sslcontext.options |= options
547 547
548 548 # Improve forward secrecy.
549 549 sslcontext.options |= getattr(ssl, 'OP_SINGLE_DH_USE', 0)
550 550 sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
551 551
552 552 # Use the list of more secure ciphers if found in the ssl module.
553 553 if util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'):
554 554 sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
555 555 sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
556 556 else:
557 557 sslcontext = SSLContext(ssl.PROTOCOL_TLSv1)
558 558
559 559 if requireclientcert:
560 560 sslcontext.verify_mode = ssl.CERT_REQUIRED
561 561 else:
562 562 sslcontext.verify_mode = ssl.CERT_NONE
563 563
564 564 if certfile or keyfile:
565 565 sslcontext.load_cert_chain(certfile=certfile, keyfile=keyfile)
566 566
567 567 if cafile:
568 568 sslcontext.load_verify_locations(cafile=cafile)
569 569
570 570 return sslcontext.wrap_socket(sock, server_side=True)
571 571
572 572 class wildcarderror(Exception):
573 573 """Represents an error parsing wildcards in DNS name."""
574 574
575 575 def _dnsnamematch(dn, hostname, maxwildcards=1):
576 576 """Match DNS names according RFC 6125 section 6.4.3.
577 577
578 578 This code is effectively copied from CPython's ssl._dnsname_match.
579 579
580 580 Returns a bool indicating whether the expected hostname matches
581 581 the value in ``dn``.
582 582 """
583 583 pats = []
584 584 if not dn:
585 585 return False
586 586
587 587 pieces = dn.split(r'.')
588 588 leftmost = pieces[0]
589 589 remainder = pieces[1:]
590 590 wildcards = leftmost.count('*')
591 591 if wildcards > maxwildcards:
592 592 raise wildcarderror(
593 593 _('too many wildcards in certificate DNS name: %s') % dn)
594 594
595 595 # speed up common case w/o wildcards
596 596 if not wildcards:
597 597 return dn.lower() == hostname.lower()
598 598
599 599 # RFC 6125, section 6.4.3, subitem 1.
600 600 # The client SHOULD NOT attempt to match a presented identifier in which
601 601 # the wildcard character comprises a label other than the left-most label.
602 602 if leftmost == '*':
603 603 # When '*' is a fragment by itself, it matches a non-empty dotless
604 604 # fragment.
605 605 pats.append('[^.]+')
606 606 elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
607 607 # RFC 6125, section 6.4.3, subitem 3.
608 608 # The client SHOULD NOT attempt to match a presented identifier
609 609 # where the wildcard character is embedded within an A-label or
610 610 # U-label of an internationalized domain name.
611 611 pats.append(re.escape(leftmost))
612 612 else:
613 613 # Otherwise, '*' matches any dotless string, e.g. www*
614 614 pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
615 615
616 616 # add the remaining fragments, ignore any wildcards
617 617 for frag in remainder:
618 618 pats.append(re.escape(frag))
619 619
620 620 pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
621 621 return pat.match(hostname) is not None
622 622
623 623 def _verifycert(cert, hostname):
624 624 '''Verify that cert (in socket.getpeercert() format) matches hostname.
625 625 CRLs is not handled.
626 626
627 627 Returns error message if any problems are found and None on success.
628 628 '''
629 629 if not cert:
630 630 return _('no certificate received')
631 631
632 632 dnsnames = []
633 633 san = cert.get('subjectAltName', [])
634 634 for key, value in san:
635 635 if key == 'DNS':
636 636 try:
637 637 if _dnsnamematch(value, hostname):
638 638 return
639 639 except wildcarderror as e:
640 640 return e.args[0]
641 641
642 642 dnsnames.append(value)
643 643
644 644 if not dnsnames:
645 645 # The subject is only checked when there is no DNS in subjectAltName.
646 646 for sub in cert.get('subject', []):
647 647 for key, value in sub:
648 648 # According to RFC 2818 the most specific Common Name must
649 649 # be used.
650 650 if key == 'commonName':
651 651 # 'subject' entries are unicode.
652 652 try:
653 653 value = value.encode('ascii')
654 654 except UnicodeEncodeError:
655 655 return _('IDN in certificate not supported')
656 656
657 657 try:
658 658 if _dnsnamematch(value, hostname):
659 659 return
660 660 except wildcarderror as e:
661 661 return e.args[0]
662 662
663 663 dnsnames.append(value)
664 664
665 665 if len(dnsnames) > 1:
666 666 return _('certificate is for %s') % ', '.join(dnsnames)
667 667 elif len(dnsnames) == 1:
668 668 return _('certificate is for %s') % dnsnames[0]
669 669 else:
670 670 return _('no commonName or subjectAltName found in certificate')
671 671
672 672 def _plainapplepython():
673 673 """return true if this seems to be a pure Apple Python that
674 674 * is unfrozen and presumably has the whole mercurial module in the file
675 675 system
676 676 * presumably is an Apple Python that uses Apple OpenSSL which has patches
677 677 for using system certificate store CAs in addition to the provided
678 678 cacerts file
679 679 """
680 680 if (pycompat.sysplatform != 'darwin' or
681 681 util.mainfrozen() or not pycompat.sysexecutable):
682 682 return False
683 683 exe = os.path.realpath(pycompat.sysexecutable).lower()
684 684 return (exe.startswith('/usr/bin/python') or
685 685 exe.startswith('/system/library/frameworks/python.framework/'))
686 686
687 687 _systemcacertpaths = [
688 688 # RHEL, CentOS, and Fedora
689 689 '/etc/pki/tls/certs/ca-bundle.trust.crt',
690 690 # Debian, Ubuntu, Gentoo
691 691 '/etc/ssl/certs/ca-certificates.crt',
692 692 ]
693 693
694 694 def _defaultcacerts(ui):
695 695 """return path to default CA certificates or None.
696 696
697 697 It is assumed this function is called when the returned certificates
698 698 file will actually be used to validate connections. Therefore this
699 699 function may print warnings or debug messages assuming this usage.
700 700
701 701 We don't print a message when the Python is able to load default
702 702 CA certs because this scenario is detected at socket connect time.
703 703 """
704 704 # The "certifi" Python package provides certificates. If it is installed
705 705 # and usable, assume the user intends it to be used and use it.
706 706 try:
707 707 import certifi
708 708 certs = certifi.where()
709 709 if os.path.exists(certs):
710 710 ui.debug('using ca certificates from certifi\n')
711 711 return certs
712 712 except (ImportError, AttributeError):
713 713 pass
714 714
715 715 # On Windows, only the modern ssl module is capable of loading the system
716 716 # CA certificates. If we're not capable of doing that, emit a warning
717 717 # because we'll get a certificate verification error later and the lack
718 718 # of loaded CA certificates will be the reason why.
719 719 # Assertion: this code is only called if certificates are being verified.
720 if pycompat.osname == 'nt':
720 if pycompat.iswindows:
721 721 if not _canloaddefaultcerts:
722 722 ui.warn(_('(unable to load Windows CA certificates; see '
723 723 'https://mercurial-scm.org/wiki/SecureConnections for '
724 724 'how to configure Mercurial to avoid this message)\n'))
725 725
726 726 return None
727 727
728 728 # Apple's OpenSSL has patches that allow a specially constructed certificate
729 729 # to load the system CA store. If we're running on Apple Python, use this
730 730 # trick.
731 731 if _plainapplepython():
732 732 dummycert = os.path.join(
733 733 os.path.dirname(pycompat.fsencode(__file__)), 'dummycert.pem')
734 734 if os.path.exists(dummycert):
735 735 return dummycert
736 736
737 737 # The Apple OpenSSL trick isn't available to us. If Python isn't able to
738 738 # load system certs, we're out of luck.
739 739 if pycompat.sysplatform == 'darwin':
740 740 # FUTURE Consider looking for Homebrew or MacPorts installed certs
741 741 # files. Also consider exporting the keychain certs to a file during
742 742 # Mercurial install.
743 743 if not _canloaddefaultcerts:
744 744 ui.warn(_('(unable to load CA certificates; see '
745 745 'https://mercurial-scm.org/wiki/SecureConnections for '
746 746 'how to configure Mercurial to avoid this message)\n'))
747 747 return None
748 748
749 749 # / is writable on Windows. Out of an abundance of caution make sure
750 750 # we're not on Windows because paths from _systemcacerts could be installed
751 751 # by non-admin users.
752 assert pycompat.osname != 'nt'
752 assert not pycompat.iswindows
753 753
754 754 # Try to find CA certificates in well-known locations. We print a warning
755 755 # when using a found file because we don't want too much silent magic
756 756 # for security settings. The expectation is that proper Mercurial
757 757 # installs will have the CA certs path defined at install time and the
758 758 # installer/packager will make an appropriate decision on the user's
759 759 # behalf. We only get here and perform this setting as a feature of
760 760 # last resort.
761 761 if not _canloaddefaultcerts:
762 762 for path in _systemcacertpaths:
763 763 if os.path.isfile(path):
764 764 ui.warn(_('(using CA certificates from %s; if you see this '
765 765 'message, your Mercurial install is not properly '
766 766 'configured; see '
767 767 'https://mercurial-scm.org/wiki/SecureConnections '
768 768 'for how to configure Mercurial to avoid this '
769 769 'message)\n') % path)
770 770 return path
771 771
772 772 ui.warn(_('(unable to load CA certificates; see '
773 773 'https://mercurial-scm.org/wiki/SecureConnections for '
774 774 'how to configure Mercurial to avoid this message)\n'))
775 775
776 776 return None
777 777
778 778 def validatesocket(sock):
779 779 """Validate a socket meets security requirements.
780 780
781 781 The passed socket must have been created with ``wrapsocket()``.
782 782 """
783 783 host = sock._hgstate['hostname']
784 784 ui = sock._hgstate['ui']
785 785 settings = sock._hgstate['settings']
786 786
787 787 try:
788 788 peercert = sock.getpeercert(True)
789 789 peercert2 = sock.getpeercert()
790 790 except AttributeError:
791 791 raise error.Abort(_('%s ssl connection error') % host)
792 792
793 793 if not peercert:
794 794 raise error.Abort(_('%s certificate error: '
795 795 'no certificate received') % host)
796 796
797 797 if settings['disablecertverification']:
798 798 # We don't print the certificate fingerprint because it shouldn't
799 799 # be necessary: if the user requested certificate verification be
800 800 # disabled, they presumably already saw a message about the inability
801 801 # to verify the certificate and this message would have printed the
802 802 # fingerprint. So printing the fingerprint here adds little to no
803 803 # value.
804 804 ui.warn(_('warning: connection security to %s is disabled per current '
805 805 'settings; communication is susceptible to eavesdropping '
806 806 'and tampering\n') % host)
807 807 return
808 808
809 809 # If a certificate fingerprint is pinned, use it and only it to
810 810 # validate the remote cert.
811 811 peerfingerprints = {
812 812 'sha1': hashlib.sha1(peercert).hexdigest(),
813 813 'sha256': hashlib.sha256(peercert).hexdigest(),
814 814 'sha512': hashlib.sha512(peercert).hexdigest(),
815 815 }
816 816
817 817 def fmtfingerprint(s):
818 818 return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
819 819
820 820 nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
821 821
822 822 if settings['certfingerprints']:
823 823 for hash, fingerprint in settings['certfingerprints']:
824 824 if peerfingerprints[hash].lower() == fingerprint:
825 825 ui.debug('%s certificate matched fingerprint %s:%s\n' %
826 826 (host, hash, fmtfingerprint(fingerprint)))
827 827 if settings['legacyfingerprint']:
828 828 ui.warn(_('(SHA-1 fingerprint for %s found in legacy '
829 829 '[hostfingerprints] section; '
830 830 'if you trust this fingerprint, remove the old '
831 831 'SHA-1 fingerprint from [hostfingerprints] and '
832 832 'add the following entry to the new '
833 833 '[hostsecurity] section: %s:fingerprints=%s)\n') %
834 834 (host, host, nicefingerprint))
835 835 return
836 836
837 837 # Pinned fingerprint didn't match. This is a fatal error.
838 838 if settings['legacyfingerprint']:
839 839 section = 'hostfingerprint'
840 840 nice = fmtfingerprint(peerfingerprints['sha1'])
841 841 else:
842 842 section = 'hostsecurity'
843 843 nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
844 844 raise error.Abort(_('certificate for %s has unexpected '
845 845 'fingerprint %s') % (host, nice),
846 846 hint=_('check %s configuration') % section)
847 847
848 848 # Security is enabled but no CAs are loaded. We can't establish trust
849 849 # for the cert so abort.
850 850 if not sock._hgstate['caloaded']:
851 851 raise error.Abort(
852 852 _('unable to verify security of %s (no loaded CA certificates); '
853 853 'refusing to connect') % host,
854 854 hint=_('see https://mercurial-scm.org/wiki/SecureConnections for '
855 855 'how to configure Mercurial to avoid this error or set '
856 856 'hostsecurity.%s:fingerprints=%s to trust this server') %
857 857 (host, nicefingerprint))
858 858
859 859 msg = _verifycert(peercert2, host)
860 860 if msg:
861 861 raise error.Abort(_('%s certificate error: %s') % (host, msg),
862 862 hint=_('set hostsecurity.%s:certfingerprints=%s '
863 863 'config setting or use --insecure to connect '
864 864 'insecurely') %
865 865 (host, nicefingerprint))
@@ -1,2002 +1,2002
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22
23 23 from .i18n import _
24 24 from . import (
25 25 cmdutil,
26 26 config,
27 27 encoding,
28 28 error,
29 29 exchange,
30 30 filemerge,
31 31 match as matchmod,
32 32 node,
33 33 pathutil,
34 34 phases,
35 35 pycompat,
36 36 scmutil,
37 37 util,
38 38 vfs as vfsmod,
39 39 )
40 40
41 41 hg = None
42 42 propertycache = util.propertycache
43 43
44 44 nullstate = ('', '', 'empty')
45 45
46 46 def _expandedabspath(path):
47 47 '''
48 48 get a path or url and if it is a path expand it and return an absolute path
49 49 '''
50 50 expandedpath = util.urllocalpath(util.expandpath(path))
51 51 u = util.url(expandedpath)
52 52 if not u.scheme:
53 53 path = util.normpath(os.path.abspath(u.path))
54 54 return path
55 55
56 56 def _getstorehashcachename(remotepath):
57 57 '''get a unique filename for the store hash cache of a remote repository'''
58 58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
59 59
60 60 class SubrepoAbort(error.Abort):
61 61 """Exception class used to avoid handling a subrepo error more than once"""
62 62 def __init__(self, *args, **kw):
63 63 self.subrepo = kw.pop('subrepo', None)
64 64 self.cause = kw.pop('cause', None)
65 65 error.Abort.__init__(self, *args, **kw)
66 66
67 67 def annotatesubrepoerror(func):
68 68 def decoratedmethod(self, *args, **kargs):
69 69 try:
70 70 res = func(self, *args, **kargs)
71 71 except SubrepoAbort as ex:
72 72 # This exception has already been handled
73 73 raise ex
74 74 except error.Abort as ex:
75 75 subrepo = subrelpath(self)
76 76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
77 77 # avoid handling this exception by raising a SubrepoAbort exception
78 78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
79 79 cause=sys.exc_info())
80 80 return res
81 81 return decoratedmethod
82 82
83 83 def state(ctx, ui):
84 84 """return a state dict, mapping subrepo paths configured in .hgsub
85 85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
86 86 (key in types dict))
87 87 """
88 88 p = config.config()
89 89 repo = ctx.repo()
90 90 def read(f, sections=None, remap=None):
91 91 if f in ctx:
92 92 try:
93 93 data = ctx[f].data()
94 94 except IOError as err:
95 95 if err.errno != errno.ENOENT:
96 96 raise
97 97 # handle missing subrepo spec files as removed
98 98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
99 99 repo.pathto(f))
100 100 return
101 101 p.parse(f, data, sections, remap, read)
102 102 else:
103 103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
104 104 repo.pathto(f))
105 105 if '.hgsub' in ctx:
106 106 read('.hgsub')
107 107
108 108 for path, src in ui.configitems('subpaths'):
109 109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
110 110
111 111 rev = {}
112 112 if '.hgsubstate' in ctx:
113 113 try:
114 114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
115 115 l = l.lstrip()
116 116 if not l:
117 117 continue
118 118 try:
119 119 revision, path = l.split(" ", 1)
120 120 except ValueError:
121 121 raise error.Abort(_("invalid subrepository revision "
122 122 "specifier in \'%s\' line %d")
123 123 % (repo.pathto('.hgsubstate'), (i + 1)))
124 124 rev[path] = revision
125 125 except IOError as err:
126 126 if err.errno != errno.ENOENT:
127 127 raise
128 128
129 129 def remap(src):
130 130 for pattern, repl in p.items('subpaths'):
131 131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
132 132 # does a string decode.
133 133 repl = util.escapestr(repl)
134 134 # However, we still want to allow back references to go
135 135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
136 136 # extra escapes are needed because re.sub string decodes.
137 137 repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl)
138 138 try:
139 139 src = re.sub(pattern, repl, src, 1)
140 140 except re.error as e:
141 141 raise error.Abort(_("bad subrepository pattern in %s: %s")
142 142 % (p.source('subpaths', pattern), e))
143 143 return src
144 144
145 145 state = {}
146 146 for path, src in p[''].items():
147 147 kind = 'hg'
148 148 if src.startswith('['):
149 149 if ']' not in src:
150 150 raise error.Abort(_('missing ] in subrepository source'))
151 151 kind, src = src.split(']', 1)
152 152 kind = kind[1:]
153 153 src = src.lstrip() # strip any extra whitespace after ']'
154 154
155 155 if not util.url(src).isabs():
156 156 parent = _abssource(repo, abort=False)
157 157 if parent:
158 158 parent = util.url(parent)
159 159 parent.path = posixpath.join(parent.path or '', src)
160 160 parent.path = posixpath.normpath(parent.path)
161 161 joined = str(parent)
162 162 # Remap the full joined path and use it if it changes,
163 163 # else remap the original source.
164 164 remapped = remap(joined)
165 165 if remapped == joined:
166 166 src = remap(src)
167 167 else:
168 168 src = remapped
169 169
170 170 src = remap(src)
171 171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
172 172
173 173 return state
174 174
175 175 def writestate(repo, state):
176 176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
177 177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
178 178 if state[s][1] != nullstate[1]]
179 179 repo.wwrite('.hgsubstate', ''.join(lines), '')
180 180
181 181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
182 182 """delegated from merge.applyupdates: merging of .hgsubstate file
183 183 in working context, merging context and ancestor context"""
184 184 if mctx == actx: # backwards?
185 185 actx = wctx.p1()
186 186 s1 = wctx.substate
187 187 s2 = mctx.substate
188 188 sa = actx.substate
189 189 sm = {}
190 190
191 191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
192 192
193 193 def debug(s, msg, r=""):
194 194 if r:
195 195 r = "%s:%s:%s" % r
196 196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
197 197
198 198 promptssrc = filemerge.partextras(labels)
199 199 for s, l in sorted(s1.iteritems()):
200 200 prompts = None
201 201 a = sa.get(s, nullstate)
202 202 ld = l # local state with possible dirty flag for compares
203 203 if wctx.sub(s).dirty():
204 204 ld = (l[0], l[1] + "+")
205 205 if wctx == actx: # overwrite
206 206 a = ld
207 207
208 208 prompts = promptssrc.copy()
209 209 prompts['s'] = s
210 210 if s in s2:
211 211 r = s2[s]
212 212 if ld == r or r == a: # no change or local is newer
213 213 sm[s] = l
214 214 continue
215 215 elif ld == a: # other side changed
216 216 debug(s, "other changed, get", r)
217 217 wctx.sub(s).get(r, overwrite)
218 218 sm[s] = r
219 219 elif ld[0] != r[0]: # sources differ
220 220 prompts['lo'] = l[0]
221 221 prompts['ro'] = r[0]
222 222 if repo.ui.promptchoice(
223 223 _(' subrepository sources for %(s)s differ\n'
224 224 'use (l)ocal%(l)s source (%(lo)s)'
225 225 ' or (r)emote%(o)s source (%(ro)s)?'
226 226 '$$ &Local $$ &Remote') % prompts, 0):
227 227 debug(s, "prompt changed, get", r)
228 228 wctx.sub(s).get(r, overwrite)
229 229 sm[s] = r
230 230 elif ld[1] == a[1]: # local side is unchanged
231 231 debug(s, "other side changed, get", r)
232 232 wctx.sub(s).get(r, overwrite)
233 233 sm[s] = r
234 234 else:
235 235 debug(s, "both sides changed")
236 236 srepo = wctx.sub(s)
237 237 prompts['sl'] = srepo.shortid(l[1])
238 238 prompts['sr'] = srepo.shortid(r[1])
239 239 option = repo.ui.promptchoice(
240 240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
241 241 'remote revision: %(sr)s)\n'
242 242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
243 243 '$$ &Merge $$ &Local $$ &Remote')
244 244 % prompts, 0)
245 245 if option == 0:
246 246 wctx.sub(s).merge(r)
247 247 sm[s] = l
248 248 debug(s, "merge with", r)
249 249 elif option == 1:
250 250 sm[s] = l
251 251 debug(s, "keep local subrepo revision", l)
252 252 else:
253 253 wctx.sub(s).get(r, overwrite)
254 254 sm[s] = r
255 255 debug(s, "get remote subrepo revision", r)
256 256 elif ld == a: # remote removed, local unchanged
257 257 debug(s, "remote removed, remove")
258 258 wctx.sub(s).remove()
259 259 elif a == nullstate: # not present in remote or ancestor
260 260 debug(s, "local added, keep")
261 261 sm[s] = l
262 262 continue
263 263 else:
264 264 if repo.ui.promptchoice(
265 265 _(' local%(l)s changed subrepository %(s)s'
266 266 ' which remote%(o)s removed\n'
267 267 'use (c)hanged version or (d)elete?'
268 268 '$$ &Changed $$ &Delete') % prompts, 0):
269 269 debug(s, "prompt remove")
270 270 wctx.sub(s).remove()
271 271
272 272 for s, r in sorted(s2.items()):
273 273 prompts = None
274 274 if s in s1:
275 275 continue
276 276 elif s not in sa:
277 277 debug(s, "remote added, get", r)
278 278 mctx.sub(s).get(r)
279 279 sm[s] = r
280 280 elif r != sa[s]:
281 281 prompts = promptssrc.copy()
282 282 prompts['s'] = s
283 283 if repo.ui.promptchoice(
284 284 _(' remote%(o)s changed subrepository %(s)s'
285 285 ' which local%(l)s removed\n'
286 286 'use (c)hanged version or (d)elete?'
287 287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
288 288 debug(s, "prompt recreate", r)
289 289 mctx.sub(s).get(r)
290 290 sm[s] = r
291 291
292 292 # record merged .hgsubstate
293 293 writestate(repo, sm)
294 294 return sm
295 295
296 296 def _updateprompt(ui, sub, dirty, local, remote):
297 297 if dirty:
298 298 msg = (_(' subrepository sources for %s differ\n'
299 299 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 300 '$$ &Local $$ &Remote')
301 301 % (subrelpath(sub), local, remote))
302 302 else:
303 303 msg = (_(' subrepository sources for %s differ (in checked out '
304 304 'version)\n'
305 305 'use (l)ocal source (%s) or (r)emote source (%s)?'
306 306 '$$ &Local $$ &Remote')
307 307 % (subrelpath(sub), local, remote))
308 308 return ui.promptchoice(msg, 0)
309 309
310 310 def reporelpath(repo):
311 311 """return path to this (sub)repo as seen from outermost repo"""
312 312 parent = repo
313 313 while util.safehasattr(parent, '_subparent'):
314 314 parent = parent._subparent
315 315 return repo.root[len(pathutil.normasprefix(parent.root)):]
316 316
317 317 def subrelpath(sub):
318 318 """return path to this subrepo as seen from outermost repo"""
319 319 return sub._relpath
320 320
321 321 def _abssource(repo, push=False, abort=True):
322 322 """return pull/push path of repo - either based on parent repo .hgsub info
323 323 or on the top repo config. Abort or return None if no source found."""
324 324 if util.safehasattr(repo, '_subparent'):
325 325 source = util.url(repo._subsource)
326 326 if source.isabs():
327 327 return str(source)
328 328 source.path = posixpath.normpath(source.path)
329 329 parent = _abssource(repo._subparent, push, abort=False)
330 330 if parent:
331 331 parent = util.url(util.pconvert(parent))
332 332 parent.path = posixpath.join(parent.path or '', source.path)
333 333 parent.path = posixpath.normpath(parent.path)
334 334 return str(parent)
335 335 else: # recursion reached top repo
336 336 if util.safehasattr(repo, '_subtoppath'):
337 337 return repo._subtoppath
338 338 if push and repo.ui.config('paths', 'default-push'):
339 339 return repo.ui.config('paths', 'default-push')
340 340 if repo.ui.config('paths', 'default'):
341 341 return repo.ui.config('paths', 'default')
342 342 if repo.shared():
343 343 # chop off the .hg component to get the default path form
344 344 return os.path.dirname(repo.sharedpath)
345 345 if abort:
346 346 raise error.Abort(_("default path for subrepository not found"))
347 347
348 348 def _sanitize(ui, vfs, ignore):
349 349 for dirname, dirs, names in vfs.walk():
350 350 for i, d in enumerate(dirs):
351 351 if d.lower() == ignore:
352 352 del dirs[i]
353 353 break
354 354 if vfs.basename(dirname).lower() != '.hg':
355 355 continue
356 356 for f in names:
357 357 if f.lower() == 'hgrc':
358 358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
359 359 "in '%s'\n") % vfs.join(dirname))
360 360 vfs.unlink(vfs.reljoin(dirname, f))
361 361
362 362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
363 363 """return instance of the right subrepo class for subrepo in path"""
364 364 # subrepo inherently violates our import layering rules
365 365 # because it wants to make repo objects from deep inside the stack
366 366 # so we manually delay the circular imports to not break
367 367 # scripts that don't use our demand-loading
368 368 global hg
369 369 from . import hg as h
370 370 hg = h
371 371
372 372 pathutil.pathauditor(ctx.repo().root)(path)
373 373 state = ctx.substate[path]
374 374 if state[2] not in types:
375 375 raise error.Abort(_('unknown subrepo type %s') % state[2])
376 376 if allowwdir:
377 377 state = (state[0], ctx.subrev(path), state[2])
378 378 return types[state[2]](ctx, path, state[:2], allowcreate)
379 379
380 380 def nullsubrepo(ctx, path, pctx):
381 381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
382 382 # subrepo inherently violates our import layering rules
383 383 # because it wants to make repo objects from deep inside the stack
384 384 # so we manually delay the circular imports to not break
385 385 # scripts that don't use our demand-loading
386 386 global hg
387 387 from . import hg as h
388 388 hg = h
389 389
390 390 pathutil.pathauditor(ctx.repo().root)(path)
391 391 state = ctx.substate[path]
392 392 if state[2] not in types:
393 393 raise error.Abort(_('unknown subrepo type %s') % state[2])
394 394 subrev = ''
395 395 if state[2] == 'hg':
396 396 subrev = "0" * 40
397 397 return types[state[2]](pctx, path, (state[0], subrev), True)
398 398
399 399 def newcommitphase(ui, ctx):
400 400 commitphase = phases.newcommitphase(ui)
401 401 substate = getattr(ctx, "substate", None)
402 402 if not substate:
403 403 return commitphase
404 404 check = ui.config('phases', 'checksubrepos')
405 405 if check not in ('ignore', 'follow', 'abort'):
406 406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
407 407 % (check))
408 408 if check == 'ignore':
409 409 return commitphase
410 410 maxphase = phases.public
411 411 maxsub = None
412 412 for s in sorted(substate):
413 413 sub = ctx.sub(s)
414 414 subphase = sub.phase(substate[s][1])
415 415 if maxphase < subphase:
416 416 maxphase = subphase
417 417 maxsub = s
418 418 if commitphase < maxphase:
419 419 if check == 'abort':
420 420 raise error.Abort(_("can't commit in %s phase"
421 421 " conflicting %s from subrepository %s") %
422 422 (phases.phasenames[commitphase],
423 423 phases.phasenames[maxphase], maxsub))
424 424 ui.warn(_("warning: changes are committed in"
425 425 " %s phase from subrepository %s\n") %
426 426 (phases.phasenames[maxphase], maxsub))
427 427 return maxphase
428 428 return commitphase
429 429
430 430 # subrepo classes need to implement the following abstract class:
431 431
432 432 class abstractsubrepo(object):
433 433
434 434 def __init__(self, ctx, path):
435 435 """Initialize abstractsubrepo part
436 436
437 437 ``ctx`` is the context referring this subrepository in the
438 438 parent repository.
439 439
440 440 ``path`` is the path to this subrepository as seen from
441 441 innermost repository.
442 442 """
443 443 self.ui = ctx.repo().ui
444 444 self._ctx = ctx
445 445 self._path = path
446 446
447 447 def addwebdirpath(self, serverpath, webconf):
448 448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
449 449
450 450 ``serverpath`` is the path component of the URL for this repo.
451 451
452 452 ``webconf`` is the dictionary of hgwebdir entries.
453 453 """
454 454 pass
455 455
456 456 def storeclean(self, path):
457 457 """
458 458 returns true if the repository has not changed since it was last
459 459 cloned from or pushed to a given repository.
460 460 """
461 461 return False
462 462
463 463 def dirty(self, ignoreupdate=False, missing=False):
464 464 """returns true if the dirstate of the subrepo is dirty or does not
465 465 match current stored state. If ignoreupdate is true, only check
466 466 whether the subrepo has uncommitted changes in its dirstate. If missing
467 467 is true, check for deleted files.
468 468 """
469 469 raise NotImplementedError
470 470
471 471 def dirtyreason(self, ignoreupdate=False, missing=False):
472 472 """return reason string if it is ``dirty()``
473 473
474 474 Returned string should have enough information for the message
475 475 of exception.
476 476
477 477 This returns None, otherwise.
478 478 """
479 479 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
480 480 return _('uncommitted changes in subrepository "%s"'
481 481 ) % subrelpath(self)
482 482
483 483 def bailifchanged(self, ignoreupdate=False, hint=None):
484 484 """raise Abort if subrepository is ``dirty()``
485 485 """
486 486 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
487 487 missing=True)
488 488 if dirtyreason:
489 489 raise error.Abort(dirtyreason, hint=hint)
490 490
491 491 def basestate(self):
492 492 """current working directory base state, disregarding .hgsubstate
493 493 state and working directory modifications"""
494 494 raise NotImplementedError
495 495
496 496 def checknested(self, path):
497 497 """check if path is a subrepository within this repository"""
498 498 return False
499 499
500 500 def commit(self, text, user, date):
501 501 """commit the current changes to the subrepo with the given
502 502 log message. Use given user and date if possible. Return the
503 503 new state of the subrepo.
504 504 """
505 505 raise NotImplementedError
506 506
507 507 def phase(self, state):
508 508 """returns phase of specified state in the subrepository.
509 509 """
510 510 return phases.public
511 511
512 512 def remove(self):
513 513 """remove the subrepo
514 514
515 515 (should verify the dirstate is not dirty first)
516 516 """
517 517 raise NotImplementedError
518 518
519 519 def get(self, state, overwrite=False):
520 520 """run whatever commands are needed to put the subrepo into
521 521 this state
522 522 """
523 523 raise NotImplementedError
524 524
525 525 def merge(self, state):
526 526 """merge currently-saved state with the new state."""
527 527 raise NotImplementedError
528 528
529 529 def push(self, opts):
530 530 """perform whatever action is analogous to 'hg push'
531 531
532 532 This may be a no-op on some systems.
533 533 """
534 534 raise NotImplementedError
535 535
536 536 def add(self, ui, match, prefix, explicitonly, **opts):
537 537 return []
538 538
539 539 def addremove(self, matcher, prefix, opts, dry_run, similarity):
540 540 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
541 541 return 1
542 542
543 543 def cat(self, match, fm, fntemplate, prefix, **opts):
544 544 return 1
545 545
546 546 def status(self, rev2, **opts):
547 547 return scmutil.status([], [], [], [], [], [], [])
548 548
549 549 def diff(self, ui, diffopts, node2, match, prefix, **opts):
550 550 pass
551 551
552 552 def outgoing(self, ui, dest, opts):
553 553 return 1
554 554
555 555 def incoming(self, ui, source, opts):
556 556 return 1
557 557
558 558 def files(self):
559 559 """return filename iterator"""
560 560 raise NotImplementedError
561 561
562 562 def filedata(self, name, decode):
563 563 """return file data, optionally passed through repo decoders"""
564 564 raise NotImplementedError
565 565
566 566 def fileflags(self, name):
567 567 """return file flags"""
568 568 return ''
569 569
570 570 def getfileset(self, expr):
571 571 """Resolve the fileset expression for this repo"""
572 572 return set()
573 573
574 574 def printfiles(self, ui, m, fm, fmt, subrepos):
575 575 """handle the files command for this subrepo"""
576 576 return 1
577 577
578 578 def archive(self, archiver, prefix, match=None, decode=True):
579 579 if match is not None:
580 580 files = [f for f in self.files() if match(f)]
581 581 else:
582 582 files = self.files()
583 583 total = len(files)
584 584 relpath = subrelpath(self)
585 585 self.ui.progress(_('archiving (%s)') % relpath, 0,
586 586 unit=_('files'), total=total)
587 587 for i, name in enumerate(files):
588 588 flags = self.fileflags(name)
589 589 mode = 'x' in flags and 0o755 or 0o644
590 590 symlink = 'l' in flags
591 591 archiver.addfile(prefix + self._path + '/' + name,
592 592 mode, symlink, self.filedata(name, decode))
593 593 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
594 594 unit=_('files'), total=total)
595 595 self.ui.progress(_('archiving (%s)') % relpath, None)
596 596 return total
597 597
598 598 def walk(self, match):
599 599 '''
600 600 walk recursively through the directory tree, finding all files
601 601 matched by the match function
602 602 '''
603 603
604 604 def forget(self, match, prefix):
605 605 return ([], [])
606 606
607 607 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
608 608 """remove the matched files from the subrepository and the filesystem,
609 609 possibly by force and/or after the file has been removed from the
610 610 filesystem. Return 0 on success, 1 on any warning.
611 611 """
612 612 warnings.append(_("warning: removefiles not implemented (%s)")
613 613 % self._path)
614 614 return 1
615 615
616 616 def revert(self, substate, *pats, **opts):
617 617 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
618 618 % (substate[0], substate[2]))
619 619 return []
620 620
621 621 def shortid(self, revid):
622 622 return revid
623 623
624 624 def verify(self):
625 625 '''verify the integrity of the repository. Return 0 on success or
626 626 warning, 1 on any error.
627 627 '''
628 628 return 0
629 629
630 630 @propertycache
631 631 def wvfs(self):
632 632 """return vfs to access the working directory of this subrepository
633 633 """
634 634 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
635 635
636 636 @propertycache
637 637 def _relpath(self):
638 638 """return path to this subrepository as seen from outermost repository
639 639 """
640 640 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
641 641
642 642 class hgsubrepo(abstractsubrepo):
643 643 def __init__(self, ctx, path, state, allowcreate):
644 644 super(hgsubrepo, self).__init__(ctx, path)
645 645 self._state = state
646 646 r = ctx.repo()
647 647 root = r.wjoin(path)
648 648 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
649 649 self._repo = hg.repository(r.baseui, root, create=create)
650 650
651 651 # Propagate the parent's --hidden option
652 652 if r is r.unfiltered():
653 653 self._repo = self._repo.unfiltered()
654 654
655 655 self.ui = self._repo.ui
656 656 for s, k in [('ui', 'commitsubrepos')]:
657 657 v = r.ui.config(s, k)
658 658 if v:
659 659 self.ui.setconfig(s, k, v, 'subrepo')
660 660 # internal config: ui._usedassubrepo
661 661 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
662 662 self._initrepo(r, state[0], create)
663 663
664 664 @annotatesubrepoerror
665 665 def addwebdirpath(self, serverpath, webconf):
666 666 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
667 667
668 668 def storeclean(self, path):
669 669 with self._repo.lock():
670 670 return self._storeclean(path)
671 671
672 672 def _storeclean(self, path):
673 673 clean = True
674 674 itercache = self._calcstorehash(path)
675 675 for filehash in self._readstorehashcache(path):
676 676 if filehash != next(itercache, None):
677 677 clean = False
678 678 break
679 679 if clean:
680 680 # if not empty:
681 681 # the cached and current pull states have a different size
682 682 clean = next(itercache, None) is None
683 683 return clean
684 684
685 685 def _calcstorehash(self, remotepath):
686 686 '''calculate a unique "store hash"
687 687
688 688 This method is used to to detect when there are changes that may
689 689 require a push to a given remote path.'''
690 690 # sort the files that will be hashed in increasing (likely) file size
691 691 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
692 692 yield '# %s\n' % _expandedabspath(remotepath)
693 693 vfs = self._repo.vfs
694 694 for relname in filelist:
695 695 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
696 696 yield '%s = %s\n' % (relname, filehash)
697 697
698 698 @propertycache
699 699 def _cachestorehashvfs(self):
700 700 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
701 701
702 702 def _readstorehashcache(self, remotepath):
703 703 '''read the store hash cache for a given remote repository'''
704 704 cachefile = _getstorehashcachename(remotepath)
705 705 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
706 706
707 707 def _cachestorehash(self, remotepath):
708 708 '''cache the current store hash
709 709
710 710 Each remote repo requires its own store hash cache, because a subrepo
711 711 store may be "clean" versus a given remote repo, but not versus another
712 712 '''
713 713 cachefile = _getstorehashcachename(remotepath)
714 714 with self._repo.lock():
715 715 storehash = list(self._calcstorehash(remotepath))
716 716 vfs = self._cachestorehashvfs
717 717 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
718 718
719 719 def _getctx(self):
720 720 '''fetch the context for this subrepo revision, possibly a workingctx
721 721 '''
722 722 if self._ctx.rev() is None:
723 723 return self._repo[None] # workingctx if parent is workingctx
724 724 else:
725 725 rev = self._state[1]
726 726 return self._repo[rev]
727 727
728 728 @annotatesubrepoerror
729 729 def _initrepo(self, parentrepo, source, create):
730 730 self._repo._subparent = parentrepo
731 731 self._repo._subsource = source
732 732
733 733 if create:
734 734 lines = ['[paths]\n']
735 735
736 736 def addpathconfig(key, value):
737 737 if value:
738 738 lines.append('%s = %s\n' % (key, value))
739 739 self.ui.setconfig('paths', key, value, 'subrepo')
740 740
741 741 defpath = _abssource(self._repo, abort=False)
742 742 defpushpath = _abssource(self._repo, True, abort=False)
743 743 addpathconfig('default', defpath)
744 744 if defpath != defpushpath:
745 745 addpathconfig('default-push', defpushpath)
746 746
747 747 fp = self._repo.vfs("hgrc", "w", text=True)
748 748 try:
749 749 fp.write(''.join(lines))
750 750 finally:
751 751 fp.close()
752 752
753 753 @annotatesubrepoerror
754 754 def add(self, ui, match, prefix, explicitonly, **opts):
755 755 return cmdutil.add(ui, self._repo, match,
756 756 self.wvfs.reljoin(prefix, self._path),
757 757 explicitonly, **opts)
758 758
759 759 @annotatesubrepoerror
760 760 def addremove(self, m, prefix, opts, dry_run, similarity):
761 761 # In the same way as sub directories are processed, once in a subrepo,
762 762 # always entry any of its subrepos. Don't corrupt the options that will
763 763 # be used to process sibling subrepos however.
764 764 opts = copy.copy(opts)
765 765 opts['subrepos'] = True
766 766 return scmutil.addremove(self._repo, m,
767 767 self.wvfs.reljoin(prefix, self._path), opts,
768 768 dry_run, similarity)
769 769
770 770 @annotatesubrepoerror
771 771 def cat(self, match, fm, fntemplate, prefix, **opts):
772 772 rev = self._state[1]
773 773 ctx = self._repo[rev]
774 774 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
775 775 prefix, **opts)
776 776
777 777 @annotatesubrepoerror
778 778 def status(self, rev2, **opts):
779 779 try:
780 780 rev1 = self._state[1]
781 781 ctx1 = self._repo[rev1]
782 782 ctx2 = self._repo[rev2]
783 783 return self._repo.status(ctx1, ctx2, **opts)
784 784 except error.RepoLookupError as inst:
785 785 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
786 786 % (inst, subrelpath(self)))
787 787 return scmutil.status([], [], [], [], [], [], [])
788 788
789 789 @annotatesubrepoerror
790 790 def diff(self, ui, diffopts, node2, match, prefix, **opts):
791 791 try:
792 792 node1 = node.bin(self._state[1])
793 793 # We currently expect node2 to come from substate and be
794 794 # in hex format
795 795 if node2 is not None:
796 796 node2 = node.bin(node2)
797 797 cmdutil.diffordiffstat(ui, self._repo, diffopts,
798 798 node1, node2, match,
799 799 prefix=posixpath.join(prefix, self._path),
800 800 listsubrepos=True, **opts)
801 801 except error.RepoLookupError as inst:
802 802 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
803 803 % (inst, subrelpath(self)))
804 804
805 805 @annotatesubrepoerror
806 806 def archive(self, archiver, prefix, match=None, decode=True):
807 807 self._get(self._state + ('hg',))
808 808 total = abstractsubrepo.archive(self, archiver, prefix, match)
809 809 rev = self._state[1]
810 810 ctx = self._repo[rev]
811 811 for subpath in ctx.substate:
812 812 s = subrepo(ctx, subpath, True)
813 813 submatch = matchmod.subdirmatcher(subpath, match)
814 814 total += s.archive(archiver, prefix + self._path + '/', submatch,
815 815 decode)
816 816 return total
817 817
818 818 @annotatesubrepoerror
819 819 def dirty(self, ignoreupdate=False, missing=False):
820 820 r = self._state[1]
821 821 if r == '' and not ignoreupdate: # no state recorded
822 822 return True
823 823 w = self._repo[None]
824 824 if r != w.p1().hex() and not ignoreupdate:
825 825 # different version checked out
826 826 return True
827 827 return w.dirty(missing=missing) # working directory changed
828 828
829 829 def basestate(self):
830 830 return self._repo['.'].hex()
831 831
832 832 def checknested(self, path):
833 833 return self._repo._checknested(self._repo.wjoin(path))
834 834
835 835 @annotatesubrepoerror
836 836 def commit(self, text, user, date):
837 837 # don't bother committing in the subrepo if it's only been
838 838 # updated
839 839 if not self.dirty(True):
840 840 return self._repo['.'].hex()
841 841 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
842 842 n = self._repo.commit(text, user, date)
843 843 if not n:
844 844 return self._repo['.'].hex() # different version checked out
845 845 return node.hex(n)
846 846
847 847 @annotatesubrepoerror
848 848 def phase(self, state):
849 849 return self._repo[state].phase()
850 850
851 851 @annotatesubrepoerror
852 852 def remove(self):
853 853 # we can't fully delete the repository as it may contain
854 854 # local-only history
855 855 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
856 856 hg.clean(self._repo, node.nullid, False)
857 857
858 858 def _get(self, state):
859 859 source, revision, kind = state
860 860 if revision in self._repo.unfiltered():
861 861 return True
862 862 self._repo._subsource = source
863 863 srcurl = _abssource(self._repo)
864 864 other = hg.peer(self._repo, {}, srcurl)
865 865 if len(self._repo) == 0:
866 866 self.ui.status(_('cloning subrepo %s from %s\n')
867 867 % (subrelpath(self), srcurl))
868 868 parentrepo = self._repo._subparent
869 869 # use self._repo.vfs instead of self.wvfs to remove .hg only
870 870 self._repo.vfs.rmtree()
871 871 other, cloned = hg.clone(self._repo._subparent.baseui, {},
872 872 other, self._repo.root,
873 873 update=False)
874 874 self._repo = cloned.local()
875 875 self._initrepo(parentrepo, source, create=True)
876 876 self._cachestorehash(srcurl)
877 877 else:
878 878 self.ui.status(_('pulling subrepo %s from %s\n')
879 879 % (subrelpath(self), srcurl))
880 880 cleansub = self.storeclean(srcurl)
881 881 exchange.pull(self._repo, other)
882 882 if cleansub:
883 883 # keep the repo clean after pull
884 884 self._cachestorehash(srcurl)
885 885 return False
886 886
887 887 @annotatesubrepoerror
888 888 def get(self, state, overwrite=False):
889 889 inrepo = self._get(state)
890 890 source, revision, kind = state
891 891 repo = self._repo
892 892 repo.ui.debug("getting subrepo %s\n" % self._path)
893 893 if inrepo:
894 894 urepo = repo.unfiltered()
895 895 ctx = urepo[revision]
896 896 if ctx.hidden():
897 897 urepo.ui.warn(
898 898 _('revision %s in subrepository "%s" is hidden\n') \
899 899 % (revision[0:12], self._path))
900 900 repo = urepo
901 901 hg.updaterepo(repo, revision, overwrite)
902 902
903 903 @annotatesubrepoerror
904 904 def merge(self, state):
905 905 self._get(state)
906 906 cur = self._repo['.']
907 907 dst = self._repo[state[1]]
908 908 anc = dst.ancestor(cur)
909 909
910 910 def mergefunc():
911 911 if anc == cur and dst.branch() == cur.branch():
912 912 self.ui.debug('updating subrepository "%s"\n'
913 913 % subrelpath(self))
914 914 hg.update(self._repo, state[1])
915 915 elif anc == dst:
916 916 self.ui.debug('skipping subrepository "%s"\n'
917 917 % subrelpath(self))
918 918 else:
919 919 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
920 920 hg.merge(self._repo, state[1], remind=False)
921 921
922 922 wctx = self._repo[None]
923 923 if self.dirty():
924 924 if anc != dst:
925 925 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
926 926 mergefunc()
927 927 else:
928 928 mergefunc()
929 929 else:
930 930 mergefunc()
931 931
932 932 @annotatesubrepoerror
933 933 def push(self, opts):
934 934 force = opts.get('force')
935 935 newbranch = opts.get('new_branch')
936 936 ssh = opts.get('ssh')
937 937
938 938 # push subrepos depth-first for coherent ordering
939 939 c = self._repo['']
940 940 subs = c.substate # only repos that are committed
941 941 for s in sorted(subs):
942 942 if c.sub(s).push(opts) == 0:
943 943 return False
944 944
945 945 dsturl = _abssource(self._repo, True)
946 946 if not force:
947 947 if self.storeclean(dsturl):
948 948 self.ui.status(
949 949 _('no changes made to subrepo %s since last push to %s\n')
950 950 % (subrelpath(self), dsturl))
951 951 return None
952 952 self.ui.status(_('pushing subrepo %s to %s\n') %
953 953 (subrelpath(self), dsturl))
954 954 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
955 955 res = exchange.push(self._repo, other, force, newbranch=newbranch)
956 956
957 957 # the repo is now clean
958 958 self._cachestorehash(dsturl)
959 959 return res.cgresult
960 960
961 961 @annotatesubrepoerror
962 962 def outgoing(self, ui, dest, opts):
963 963 if 'rev' in opts or 'branch' in opts:
964 964 opts = copy.copy(opts)
965 965 opts.pop('rev', None)
966 966 opts.pop('branch', None)
967 967 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
968 968
969 969 @annotatesubrepoerror
970 970 def incoming(self, ui, source, opts):
971 971 if 'rev' in opts or 'branch' in opts:
972 972 opts = copy.copy(opts)
973 973 opts.pop('rev', None)
974 974 opts.pop('branch', None)
975 975 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
976 976
977 977 @annotatesubrepoerror
978 978 def files(self):
979 979 rev = self._state[1]
980 980 ctx = self._repo[rev]
981 981 return ctx.manifest().keys()
982 982
983 983 def filedata(self, name, decode):
984 984 rev = self._state[1]
985 985 data = self._repo[rev][name].data()
986 986 if decode:
987 987 data = self._repo.wwritedata(name, data)
988 988 return data
989 989
990 990 def fileflags(self, name):
991 991 rev = self._state[1]
992 992 ctx = self._repo[rev]
993 993 return ctx.flags(name)
994 994
995 995 @annotatesubrepoerror
996 996 def printfiles(self, ui, m, fm, fmt, subrepos):
997 997 # If the parent context is a workingctx, use the workingctx here for
998 998 # consistency.
999 999 if self._ctx.rev() is None:
1000 1000 ctx = self._repo[None]
1001 1001 else:
1002 1002 rev = self._state[1]
1003 1003 ctx = self._repo[rev]
1004 1004 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1005 1005
1006 1006 @annotatesubrepoerror
1007 1007 def getfileset(self, expr):
1008 1008 if self._ctx.rev() is None:
1009 1009 ctx = self._repo[None]
1010 1010 else:
1011 1011 rev = self._state[1]
1012 1012 ctx = self._repo[rev]
1013 1013
1014 1014 files = ctx.getfileset(expr)
1015 1015
1016 1016 for subpath in ctx.substate:
1017 1017 sub = ctx.sub(subpath)
1018 1018
1019 1019 try:
1020 1020 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1021 1021 except error.LookupError:
1022 1022 self.ui.status(_("skipping missing subrepository: %s\n")
1023 1023 % self.wvfs.reljoin(reporelpath(self), subpath))
1024 1024 return files
1025 1025
1026 1026 def walk(self, match):
1027 1027 ctx = self._repo[None]
1028 1028 return ctx.walk(match)
1029 1029
1030 1030 @annotatesubrepoerror
1031 1031 def forget(self, match, prefix):
1032 1032 return cmdutil.forget(self.ui, self._repo, match,
1033 1033 self.wvfs.reljoin(prefix, self._path), True)
1034 1034
1035 1035 @annotatesubrepoerror
1036 1036 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1037 1037 return cmdutil.remove(self.ui, self._repo, matcher,
1038 1038 self.wvfs.reljoin(prefix, self._path),
1039 1039 after, force, subrepos)
1040 1040
1041 1041 @annotatesubrepoerror
1042 1042 def revert(self, substate, *pats, **opts):
1043 1043 # reverting a subrepo is a 2 step process:
1044 1044 # 1. if the no_backup is not set, revert all modified
1045 1045 # files inside the subrepo
1046 1046 # 2. update the subrepo to the revision specified in
1047 1047 # the corresponding substate dictionary
1048 1048 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1049 1049 if not opts.get('no_backup'):
1050 1050 # Revert all files on the subrepo, creating backups
1051 1051 # Note that this will not recursively revert subrepos
1052 1052 # We could do it if there was a set:subrepos() predicate
1053 1053 opts = opts.copy()
1054 1054 opts['date'] = None
1055 1055 opts['rev'] = substate[1]
1056 1056
1057 1057 self.filerevert(*pats, **opts)
1058 1058
1059 1059 # Update the repo to the revision specified in the given substate
1060 1060 if not opts.get('dry_run'):
1061 1061 self.get(substate, overwrite=True)
1062 1062
1063 1063 def filerevert(self, *pats, **opts):
1064 1064 ctx = self._repo[opts['rev']]
1065 1065 parents = self._repo.dirstate.parents()
1066 1066 if opts.get('all'):
1067 1067 pats = ['set:modified()']
1068 1068 else:
1069 1069 pats = []
1070 1070 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1071 1071
1072 1072 def shortid(self, revid):
1073 1073 return revid[:12]
1074 1074
1075 1075 def verify(self):
1076 1076 try:
1077 1077 rev = self._state[1]
1078 1078 ctx = self._repo.unfiltered()[rev]
1079 1079 if ctx.hidden():
1080 1080 # Since hidden revisions aren't pushed/pulled, it seems worth an
1081 1081 # explicit warning.
1082 1082 ui = self._repo.ui
1083 1083 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1084 1084 (self._relpath, node.short(self._ctx.node())))
1085 1085 return 0
1086 1086 except error.RepoLookupError:
1087 1087 # A missing subrepo revision may be a case of needing to pull it, so
1088 1088 # don't treat this as an error.
1089 1089 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1090 1090 (self._relpath, node.short(self._ctx.node())))
1091 1091 return 0
1092 1092
1093 1093 @propertycache
1094 1094 def wvfs(self):
1095 1095 """return own wvfs for efficiency and consistency
1096 1096 """
1097 1097 return self._repo.wvfs
1098 1098
1099 1099 @propertycache
1100 1100 def _relpath(self):
1101 1101 """return path to this subrepository as seen from outermost repository
1102 1102 """
1103 1103 # Keep consistent dir separators by avoiding vfs.join(self._path)
1104 1104 return reporelpath(self._repo)
1105 1105
1106 1106 class svnsubrepo(abstractsubrepo):
1107 1107 def __init__(self, ctx, path, state, allowcreate):
1108 1108 super(svnsubrepo, self).__init__(ctx, path)
1109 1109 self._state = state
1110 1110 self._exe = util.findexe('svn')
1111 1111 if not self._exe:
1112 1112 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1113 1113 % self._path)
1114 1114
1115 1115 def _svncommand(self, commands, filename='', failok=False):
1116 1116 cmd = [self._exe]
1117 1117 extrakw = {}
1118 1118 if not self.ui.interactive():
1119 1119 # Making stdin be a pipe should prevent svn from behaving
1120 1120 # interactively even if we can't pass --non-interactive.
1121 1121 extrakw['stdin'] = subprocess.PIPE
1122 1122 # Starting in svn 1.5 --non-interactive is a global flag
1123 1123 # instead of being per-command, but we need to support 1.4 so
1124 1124 # we have to be intelligent about what commands take
1125 1125 # --non-interactive.
1126 1126 if commands[0] in ('update', 'checkout', 'commit'):
1127 1127 cmd.append('--non-interactive')
1128 1128 cmd.extend(commands)
1129 1129 if filename is not None:
1130 1130 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1131 1131 self._path, filename)
1132 1132 cmd.append(path)
1133 1133 env = dict(encoding.environ)
1134 1134 # Avoid localized output, preserve current locale for everything else.
1135 1135 lc_all = env.get('LC_ALL')
1136 1136 if lc_all:
1137 1137 env['LANG'] = lc_all
1138 1138 del env['LC_ALL']
1139 1139 env['LC_MESSAGES'] = 'C'
1140 1140 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1141 1141 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1142 1142 universal_newlines=True, env=env, **extrakw)
1143 1143 stdout, stderr = p.communicate()
1144 1144 stderr = stderr.strip()
1145 1145 if not failok:
1146 1146 if p.returncode:
1147 1147 raise error.Abort(stderr or 'exited with code %d'
1148 1148 % p.returncode)
1149 1149 if stderr:
1150 1150 self.ui.warn(stderr + '\n')
1151 1151 return stdout, stderr
1152 1152
1153 1153 @propertycache
1154 1154 def _svnversion(self):
1155 1155 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1156 1156 m = re.search(br'^(\d+)\.(\d+)', output)
1157 1157 if not m:
1158 1158 raise error.Abort(_('cannot retrieve svn tool version'))
1159 1159 return (int(m.group(1)), int(m.group(2)))
1160 1160
1161 1161 def _wcrevs(self):
1162 1162 # Get the working directory revision as well as the last
1163 1163 # commit revision so we can compare the subrepo state with
1164 1164 # both. We used to store the working directory one.
1165 1165 output, err = self._svncommand(['info', '--xml'])
1166 1166 doc = xml.dom.minidom.parseString(output)
1167 1167 entries = doc.getElementsByTagName('entry')
1168 1168 lastrev, rev = '0', '0'
1169 1169 if entries:
1170 1170 rev = str(entries[0].getAttribute('revision')) or '0'
1171 1171 commits = entries[0].getElementsByTagName('commit')
1172 1172 if commits:
1173 1173 lastrev = str(commits[0].getAttribute('revision')) or '0'
1174 1174 return (lastrev, rev)
1175 1175
1176 1176 def _wcrev(self):
1177 1177 return self._wcrevs()[0]
1178 1178
1179 1179 def _wcchanged(self):
1180 1180 """Return (changes, extchanges, missing) where changes is True
1181 1181 if the working directory was changed, extchanges is
1182 1182 True if any of these changes concern an external entry and missing
1183 1183 is True if any change is a missing entry.
1184 1184 """
1185 1185 output, err = self._svncommand(['status', '--xml'])
1186 1186 externals, changes, missing = [], [], []
1187 1187 doc = xml.dom.minidom.parseString(output)
1188 1188 for e in doc.getElementsByTagName('entry'):
1189 1189 s = e.getElementsByTagName('wc-status')
1190 1190 if not s:
1191 1191 continue
1192 1192 item = s[0].getAttribute('item')
1193 1193 props = s[0].getAttribute('props')
1194 1194 path = e.getAttribute('path')
1195 1195 if item == 'external':
1196 1196 externals.append(path)
1197 1197 elif item == 'missing':
1198 1198 missing.append(path)
1199 1199 if (item not in ('', 'normal', 'unversioned', 'external')
1200 1200 or props not in ('', 'none', 'normal')):
1201 1201 changes.append(path)
1202 1202 for path in changes:
1203 1203 for ext in externals:
1204 1204 if path == ext or path.startswith(ext + pycompat.ossep):
1205 1205 return True, True, bool(missing)
1206 1206 return bool(changes), False, bool(missing)
1207 1207
1208 1208 def dirty(self, ignoreupdate=False, missing=False):
1209 1209 wcchanged = self._wcchanged()
1210 1210 changed = wcchanged[0] or (missing and wcchanged[2])
1211 1211 if not changed:
1212 1212 if self._state[1] in self._wcrevs() or ignoreupdate:
1213 1213 return False
1214 1214 return True
1215 1215
1216 1216 def basestate(self):
1217 1217 lastrev, rev = self._wcrevs()
1218 1218 if lastrev != rev:
1219 1219 # Last committed rev is not the same than rev. We would
1220 1220 # like to take lastrev but we do not know if the subrepo
1221 1221 # URL exists at lastrev. Test it and fallback to rev it
1222 1222 # is not there.
1223 1223 try:
1224 1224 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1225 1225 return lastrev
1226 1226 except error.Abort:
1227 1227 pass
1228 1228 return rev
1229 1229
1230 1230 @annotatesubrepoerror
1231 1231 def commit(self, text, user, date):
1232 1232 # user and date are out of our hands since svn is centralized
1233 1233 changed, extchanged, missing = self._wcchanged()
1234 1234 if not changed:
1235 1235 return self.basestate()
1236 1236 if extchanged:
1237 1237 # Do not try to commit externals
1238 1238 raise error.Abort(_('cannot commit svn externals'))
1239 1239 if missing:
1240 1240 # svn can commit with missing entries but aborting like hg
1241 1241 # seems a better approach.
1242 1242 raise error.Abort(_('cannot commit missing svn entries'))
1243 1243 commitinfo, err = self._svncommand(['commit', '-m', text])
1244 1244 self.ui.status(commitinfo)
1245 1245 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1246 1246 if not newrev:
1247 1247 if not commitinfo.strip():
1248 1248 # Sometimes, our definition of "changed" differs from
1249 1249 # svn one. For instance, svn ignores missing files
1250 1250 # when committing. If there are only missing files, no
1251 1251 # commit is made, no output and no error code.
1252 1252 raise error.Abort(_('failed to commit svn changes'))
1253 1253 raise error.Abort(commitinfo.splitlines()[-1])
1254 1254 newrev = newrev.groups()[0]
1255 1255 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1256 1256 return newrev
1257 1257
1258 1258 @annotatesubrepoerror
1259 1259 def remove(self):
1260 1260 if self.dirty():
1261 1261 self.ui.warn(_('not removing repo %s because '
1262 1262 'it has changes.\n') % self._path)
1263 1263 return
1264 1264 self.ui.note(_('removing subrepo %s\n') % self._path)
1265 1265
1266 1266 self.wvfs.rmtree(forcibly=True)
1267 1267 try:
1268 1268 pwvfs = self._ctx.repo().wvfs
1269 1269 pwvfs.removedirs(pwvfs.dirname(self._path))
1270 1270 except OSError:
1271 1271 pass
1272 1272
1273 1273 @annotatesubrepoerror
1274 1274 def get(self, state, overwrite=False):
1275 1275 if overwrite:
1276 1276 self._svncommand(['revert', '--recursive'])
1277 1277 args = ['checkout']
1278 1278 if self._svnversion >= (1, 5):
1279 1279 args.append('--force')
1280 1280 # The revision must be specified at the end of the URL to properly
1281 1281 # update to a directory which has since been deleted and recreated.
1282 1282 args.append('%s@%s' % (state[0], state[1]))
1283 1283
1284 1284 # SEC: check that the ssh url is safe
1285 1285 util.checksafessh(state[0])
1286 1286
1287 1287 status, err = self._svncommand(args, failok=True)
1288 1288 _sanitize(self.ui, self.wvfs, '.svn')
1289 1289 if not re.search('Checked out revision [0-9]+.', status):
1290 1290 if ('is already a working copy for a different URL' in err
1291 1291 and (self._wcchanged()[:2] == (False, False))):
1292 1292 # obstructed but clean working copy, so just blow it away.
1293 1293 self.remove()
1294 1294 self.get(state, overwrite=False)
1295 1295 return
1296 1296 raise error.Abort((status or err).splitlines()[-1])
1297 1297 self.ui.status(status)
1298 1298
1299 1299 @annotatesubrepoerror
1300 1300 def merge(self, state):
1301 1301 old = self._state[1]
1302 1302 new = state[1]
1303 1303 wcrev = self._wcrev()
1304 1304 if new != wcrev:
1305 1305 dirty = old == wcrev or self._wcchanged()[0]
1306 1306 if _updateprompt(self.ui, self, dirty, wcrev, new):
1307 1307 self.get(state, False)
1308 1308
1309 1309 def push(self, opts):
1310 1310 # push is a no-op for SVN
1311 1311 return True
1312 1312
1313 1313 @annotatesubrepoerror
1314 1314 def files(self):
1315 1315 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1316 1316 doc = xml.dom.minidom.parseString(output)
1317 1317 paths = []
1318 1318 for e in doc.getElementsByTagName('entry'):
1319 1319 kind = str(e.getAttribute('kind'))
1320 1320 if kind != 'file':
1321 1321 continue
1322 1322 name = ''.join(c.data for c
1323 1323 in e.getElementsByTagName('name')[0].childNodes
1324 1324 if c.nodeType == c.TEXT_NODE)
1325 1325 paths.append(name.encode('utf-8'))
1326 1326 return paths
1327 1327
1328 1328 def filedata(self, name, decode):
1329 1329 return self._svncommand(['cat'], name)[0]
1330 1330
1331 1331
1332 1332 class gitsubrepo(abstractsubrepo):
1333 1333 def __init__(self, ctx, path, state, allowcreate):
1334 1334 super(gitsubrepo, self).__init__(ctx, path)
1335 1335 self._state = state
1336 1336 self._abspath = ctx.repo().wjoin(path)
1337 1337 self._subparent = ctx.repo()
1338 1338 self._ensuregit()
1339 1339
1340 1340 def _ensuregit(self):
1341 1341 try:
1342 1342 self._gitexecutable = 'git'
1343 1343 out, err = self._gitnodir(['--version'])
1344 1344 except OSError as e:
1345 1345 genericerror = _("error executing git for subrepo '%s': %s")
1346 1346 notfoundhint = _("check git is installed and in your PATH")
1347 1347 if e.errno != errno.ENOENT:
1348 1348 raise error.Abort(genericerror % (
1349 1349 self._path, encoding.strtolocal(e.strerror)))
1350 elif pycompat.osname == 'nt':
1350 elif pycompat.iswindows:
1351 1351 try:
1352 1352 self._gitexecutable = 'git.cmd'
1353 1353 out, err = self._gitnodir(['--version'])
1354 1354 except OSError as e2:
1355 1355 if e2.errno == errno.ENOENT:
1356 1356 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1357 1357 " for subrepo '%s'") % self._path,
1358 1358 hint=notfoundhint)
1359 1359 else:
1360 1360 raise error.Abort(genericerror % (self._path,
1361 1361 encoding.strtolocal(e2.strerror)))
1362 1362 else:
1363 1363 raise error.Abort(_("couldn't find git for subrepo '%s'")
1364 1364 % self._path, hint=notfoundhint)
1365 1365 versionstatus = self._checkversion(out)
1366 1366 if versionstatus == 'unknown':
1367 1367 self.ui.warn(_('cannot retrieve git version\n'))
1368 1368 elif versionstatus == 'abort':
1369 1369 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1370 1370 elif versionstatus == 'warning':
1371 1371 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1372 1372
1373 1373 @staticmethod
1374 1374 def _gitversion(out):
1375 1375 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1376 1376 if m:
1377 1377 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1378 1378
1379 1379 m = re.search(br'^git version (\d+)\.(\d+)', out)
1380 1380 if m:
1381 1381 return (int(m.group(1)), int(m.group(2)), 0)
1382 1382
1383 1383 return -1
1384 1384
1385 1385 @staticmethod
1386 1386 def _checkversion(out):
1387 1387 '''ensure git version is new enough
1388 1388
1389 1389 >>> _checkversion = gitsubrepo._checkversion
1390 1390 >>> _checkversion(b'git version 1.6.0')
1391 1391 'ok'
1392 1392 >>> _checkversion(b'git version 1.8.5')
1393 1393 'ok'
1394 1394 >>> _checkversion(b'git version 1.4.0')
1395 1395 'abort'
1396 1396 >>> _checkversion(b'git version 1.5.0')
1397 1397 'warning'
1398 1398 >>> _checkversion(b'git version 1.9-rc0')
1399 1399 'ok'
1400 1400 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1401 1401 'ok'
1402 1402 >>> _checkversion(b'git version 1.9.0.GIT')
1403 1403 'ok'
1404 1404 >>> _checkversion(b'git version 12345')
1405 1405 'unknown'
1406 1406 >>> _checkversion(b'no')
1407 1407 'unknown'
1408 1408 '''
1409 1409 version = gitsubrepo._gitversion(out)
1410 1410 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1411 1411 # despite the docstring comment. For now, error on 1.4.0, warn on
1412 1412 # 1.5.0 but attempt to continue.
1413 1413 if version == -1:
1414 1414 return 'unknown'
1415 1415 if version < (1, 5, 0):
1416 1416 return 'abort'
1417 1417 elif version < (1, 6, 0):
1418 1418 return 'warning'
1419 1419 return 'ok'
1420 1420
1421 1421 def _gitcommand(self, commands, env=None, stream=False):
1422 1422 return self._gitdir(commands, env=env, stream=stream)[0]
1423 1423
1424 1424 def _gitdir(self, commands, env=None, stream=False):
1425 1425 return self._gitnodir(commands, env=env, stream=stream,
1426 1426 cwd=self._abspath)
1427 1427
1428 1428 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1429 1429 """Calls the git command
1430 1430
1431 1431 The methods tries to call the git command. versions prior to 1.6.0
1432 1432 are not supported and very probably fail.
1433 1433 """
1434 1434 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1435 1435 if env is None:
1436 1436 env = encoding.environ.copy()
1437 1437 # disable localization for Git output (issue5176)
1438 1438 env['LC_ALL'] = 'C'
1439 1439 # fix for Git CVE-2015-7545
1440 1440 if 'GIT_ALLOW_PROTOCOL' not in env:
1441 1441 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1442 1442 # unless ui.quiet is set, print git's stderr,
1443 1443 # which is mostly progress and useful info
1444 1444 errpipe = None
1445 1445 if self.ui.quiet:
1446 1446 errpipe = open(os.devnull, 'w')
1447 1447 if self.ui._colormode and len(commands) and commands[0] == "diff":
1448 1448 # insert the argument in the front,
1449 1449 # the end of git diff arguments is used for paths
1450 1450 commands.insert(1, '--color')
1451 1451 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1452 1452 cwd=cwd, env=env, close_fds=util.closefds,
1453 1453 stdout=subprocess.PIPE, stderr=errpipe)
1454 1454 if stream:
1455 1455 return p.stdout, None
1456 1456
1457 1457 retdata = p.stdout.read().strip()
1458 1458 # wait for the child to exit to avoid race condition.
1459 1459 p.wait()
1460 1460
1461 1461 if p.returncode != 0 and p.returncode != 1:
1462 1462 # there are certain error codes that are ok
1463 1463 command = commands[0]
1464 1464 if command in ('cat-file', 'symbolic-ref'):
1465 1465 return retdata, p.returncode
1466 1466 # for all others, abort
1467 1467 raise error.Abort(_('git %s error %d in %s') %
1468 1468 (command, p.returncode, self._relpath))
1469 1469
1470 1470 return retdata, p.returncode
1471 1471
1472 1472 def _gitmissing(self):
1473 1473 return not self.wvfs.exists('.git')
1474 1474
1475 1475 def _gitstate(self):
1476 1476 return self._gitcommand(['rev-parse', 'HEAD'])
1477 1477
1478 1478 def _gitcurrentbranch(self):
1479 1479 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1480 1480 if err:
1481 1481 current = None
1482 1482 return current
1483 1483
1484 1484 def _gitremote(self, remote):
1485 1485 out = self._gitcommand(['remote', 'show', '-n', remote])
1486 1486 line = out.split('\n')[1]
1487 1487 i = line.index('URL: ') + len('URL: ')
1488 1488 return line[i:]
1489 1489
1490 1490 def _githavelocally(self, revision):
1491 1491 out, code = self._gitdir(['cat-file', '-e', revision])
1492 1492 return code == 0
1493 1493
1494 1494 def _gitisancestor(self, r1, r2):
1495 1495 base = self._gitcommand(['merge-base', r1, r2])
1496 1496 return base == r1
1497 1497
1498 1498 def _gitisbare(self):
1499 1499 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1500 1500
1501 1501 def _gitupdatestat(self):
1502 1502 """This must be run before git diff-index.
1503 1503 diff-index only looks at changes to file stat;
1504 1504 this command looks at file contents and updates the stat."""
1505 1505 self._gitcommand(['update-index', '-q', '--refresh'])
1506 1506
1507 1507 def _gitbranchmap(self):
1508 1508 '''returns 2 things:
1509 1509 a map from git branch to revision
1510 1510 a map from revision to branches'''
1511 1511 branch2rev = {}
1512 1512 rev2branch = {}
1513 1513
1514 1514 out = self._gitcommand(['for-each-ref', '--format',
1515 1515 '%(objectname) %(refname)'])
1516 1516 for line in out.split('\n'):
1517 1517 revision, ref = line.split(' ')
1518 1518 if (not ref.startswith('refs/heads/') and
1519 1519 not ref.startswith('refs/remotes/')):
1520 1520 continue
1521 1521 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1522 1522 continue # ignore remote/HEAD redirects
1523 1523 branch2rev[ref] = revision
1524 1524 rev2branch.setdefault(revision, []).append(ref)
1525 1525 return branch2rev, rev2branch
1526 1526
1527 1527 def _gittracking(self, branches):
1528 1528 'return map of remote branch to local tracking branch'
1529 1529 # assumes no more than one local tracking branch for each remote
1530 1530 tracking = {}
1531 1531 for b in branches:
1532 1532 if b.startswith('refs/remotes/'):
1533 1533 continue
1534 1534 bname = b.split('/', 2)[2]
1535 1535 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1536 1536 if remote:
1537 1537 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1538 1538 tracking['refs/remotes/%s/%s' %
1539 1539 (remote, ref.split('/', 2)[2])] = b
1540 1540 return tracking
1541 1541
1542 1542 def _abssource(self, source):
1543 1543 if '://' not in source:
1544 1544 # recognize the scp syntax as an absolute source
1545 1545 colon = source.find(':')
1546 1546 if colon != -1 and '/' not in source[:colon]:
1547 1547 return source
1548 1548 self._subsource = source
1549 1549 return _abssource(self)
1550 1550
1551 1551 def _fetch(self, source, revision):
1552 1552 if self._gitmissing():
1553 1553 # SEC: check for safe ssh url
1554 1554 util.checksafessh(source)
1555 1555
1556 1556 source = self._abssource(source)
1557 1557 self.ui.status(_('cloning subrepo %s from %s\n') %
1558 1558 (self._relpath, source))
1559 1559 self._gitnodir(['clone', source, self._abspath])
1560 1560 if self._githavelocally(revision):
1561 1561 return
1562 1562 self.ui.status(_('pulling subrepo %s from %s\n') %
1563 1563 (self._relpath, self._gitremote('origin')))
1564 1564 # try only origin: the originally cloned repo
1565 1565 self._gitcommand(['fetch'])
1566 1566 if not self._githavelocally(revision):
1567 1567 raise error.Abort(_('revision %s does not exist in subrepository '
1568 1568 '"%s"\n') % (revision, self._relpath))
1569 1569
1570 1570 @annotatesubrepoerror
1571 1571 def dirty(self, ignoreupdate=False, missing=False):
1572 1572 if self._gitmissing():
1573 1573 return self._state[1] != ''
1574 1574 if self._gitisbare():
1575 1575 return True
1576 1576 if not ignoreupdate and self._state[1] != self._gitstate():
1577 1577 # different version checked out
1578 1578 return True
1579 1579 # check for staged changes or modified files; ignore untracked files
1580 1580 self._gitupdatestat()
1581 1581 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1582 1582 return code == 1
1583 1583
1584 1584 def basestate(self):
1585 1585 return self._gitstate()
1586 1586
1587 1587 @annotatesubrepoerror
1588 1588 def get(self, state, overwrite=False):
1589 1589 source, revision, kind = state
1590 1590 if not revision:
1591 1591 self.remove()
1592 1592 return
1593 1593 self._fetch(source, revision)
1594 1594 # if the repo was set to be bare, unbare it
1595 1595 if self._gitisbare():
1596 1596 self._gitcommand(['config', 'core.bare', 'false'])
1597 1597 if self._gitstate() == revision:
1598 1598 self._gitcommand(['reset', '--hard', 'HEAD'])
1599 1599 return
1600 1600 elif self._gitstate() == revision:
1601 1601 if overwrite:
1602 1602 # first reset the index to unmark new files for commit, because
1603 1603 # reset --hard will otherwise throw away files added for commit,
1604 1604 # not just unmark them.
1605 1605 self._gitcommand(['reset', 'HEAD'])
1606 1606 self._gitcommand(['reset', '--hard', 'HEAD'])
1607 1607 return
1608 1608 branch2rev, rev2branch = self._gitbranchmap()
1609 1609
1610 1610 def checkout(args):
1611 1611 cmd = ['checkout']
1612 1612 if overwrite:
1613 1613 # first reset the index to unmark new files for commit, because
1614 1614 # the -f option will otherwise throw away files added for
1615 1615 # commit, not just unmark them.
1616 1616 self._gitcommand(['reset', 'HEAD'])
1617 1617 cmd.append('-f')
1618 1618 self._gitcommand(cmd + args)
1619 1619 _sanitize(self.ui, self.wvfs, '.git')
1620 1620
1621 1621 def rawcheckout():
1622 1622 # no branch to checkout, check it out with no branch
1623 1623 self.ui.warn(_('checking out detached HEAD in '
1624 1624 'subrepository "%s"\n') % self._relpath)
1625 1625 self.ui.warn(_('check out a git branch if you intend '
1626 1626 'to make changes\n'))
1627 1627 checkout(['-q', revision])
1628 1628
1629 1629 if revision not in rev2branch:
1630 1630 rawcheckout()
1631 1631 return
1632 1632 branches = rev2branch[revision]
1633 1633 firstlocalbranch = None
1634 1634 for b in branches:
1635 1635 if b == 'refs/heads/master':
1636 1636 # master trumps all other branches
1637 1637 checkout(['refs/heads/master'])
1638 1638 return
1639 1639 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1640 1640 firstlocalbranch = b
1641 1641 if firstlocalbranch:
1642 1642 checkout([firstlocalbranch])
1643 1643 return
1644 1644
1645 1645 tracking = self._gittracking(branch2rev.keys())
1646 1646 # choose a remote branch already tracked if possible
1647 1647 remote = branches[0]
1648 1648 if remote not in tracking:
1649 1649 for b in branches:
1650 1650 if b in tracking:
1651 1651 remote = b
1652 1652 break
1653 1653
1654 1654 if remote not in tracking:
1655 1655 # create a new local tracking branch
1656 1656 local = remote.split('/', 3)[3]
1657 1657 checkout(['-b', local, remote])
1658 1658 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1659 1659 # When updating to a tracked remote branch,
1660 1660 # if the local tracking branch is downstream of it,
1661 1661 # a normal `git pull` would have performed a "fast-forward merge"
1662 1662 # which is equivalent to updating the local branch to the remote.
1663 1663 # Since we are only looking at branching at update, we need to
1664 1664 # detect this situation and perform this action lazily.
1665 1665 if tracking[remote] != self._gitcurrentbranch():
1666 1666 checkout([tracking[remote]])
1667 1667 self._gitcommand(['merge', '--ff', remote])
1668 1668 _sanitize(self.ui, self.wvfs, '.git')
1669 1669 else:
1670 1670 # a real merge would be required, just checkout the revision
1671 1671 rawcheckout()
1672 1672
1673 1673 @annotatesubrepoerror
1674 1674 def commit(self, text, user, date):
1675 1675 if self._gitmissing():
1676 1676 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1677 1677 cmd = ['commit', '-a', '-m', text]
1678 1678 env = encoding.environ.copy()
1679 1679 if user:
1680 1680 cmd += ['--author', user]
1681 1681 if date:
1682 1682 # git's date parser silently ignores when seconds < 1e9
1683 1683 # convert to ISO8601
1684 1684 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1685 1685 '%Y-%m-%dT%H:%M:%S %1%2')
1686 1686 self._gitcommand(cmd, env=env)
1687 1687 # make sure commit works otherwise HEAD might not exist under certain
1688 1688 # circumstances
1689 1689 return self._gitstate()
1690 1690
1691 1691 @annotatesubrepoerror
1692 1692 def merge(self, state):
1693 1693 source, revision, kind = state
1694 1694 self._fetch(source, revision)
1695 1695 base = self._gitcommand(['merge-base', revision, self._state[1]])
1696 1696 self._gitupdatestat()
1697 1697 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1698 1698
1699 1699 def mergefunc():
1700 1700 if base == revision:
1701 1701 self.get(state) # fast forward merge
1702 1702 elif base != self._state[1]:
1703 1703 self._gitcommand(['merge', '--no-commit', revision])
1704 1704 _sanitize(self.ui, self.wvfs, '.git')
1705 1705
1706 1706 if self.dirty():
1707 1707 if self._gitstate() != revision:
1708 1708 dirty = self._gitstate() == self._state[1] or code != 0
1709 1709 if _updateprompt(self.ui, self, dirty,
1710 1710 self._state[1][:7], revision[:7]):
1711 1711 mergefunc()
1712 1712 else:
1713 1713 mergefunc()
1714 1714
1715 1715 @annotatesubrepoerror
1716 1716 def push(self, opts):
1717 1717 force = opts.get('force')
1718 1718
1719 1719 if not self._state[1]:
1720 1720 return True
1721 1721 if self._gitmissing():
1722 1722 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1723 1723 # if a branch in origin contains the revision, nothing to do
1724 1724 branch2rev, rev2branch = self._gitbranchmap()
1725 1725 if self._state[1] in rev2branch:
1726 1726 for b in rev2branch[self._state[1]]:
1727 1727 if b.startswith('refs/remotes/origin/'):
1728 1728 return True
1729 1729 for b, revision in branch2rev.iteritems():
1730 1730 if b.startswith('refs/remotes/origin/'):
1731 1731 if self._gitisancestor(self._state[1], revision):
1732 1732 return True
1733 1733 # otherwise, try to push the currently checked out branch
1734 1734 cmd = ['push']
1735 1735 if force:
1736 1736 cmd.append('--force')
1737 1737
1738 1738 current = self._gitcurrentbranch()
1739 1739 if current:
1740 1740 # determine if the current branch is even useful
1741 1741 if not self._gitisancestor(self._state[1], current):
1742 1742 self.ui.warn(_('unrelated git branch checked out '
1743 1743 'in subrepository "%s"\n') % self._relpath)
1744 1744 return False
1745 1745 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1746 1746 (current.split('/', 2)[2], self._relpath))
1747 1747 ret = self._gitdir(cmd + ['origin', current])
1748 1748 return ret[1] == 0
1749 1749 else:
1750 1750 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1751 1751 'cannot push revision %s\n') %
1752 1752 (self._relpath, self._state[1]))
1753 1753 return False
1754 1754
1755 1755 @annotatesubrepoerror
1756 1756 def add(self, ui, match, prefix, explicitonly, **opts):
1757 1757 if self._gitmissing():
1758 1758 return []
1759 1759
1760 1760 (modified, added, removed,
1761 1761 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1762 1762 clean=True)
1763 1763
1764 1764 tracked = set()
1765 1765 # dirstates 'amn' warn, 'r' is added again
1766 1766 for l in (modified, added, deleted, clean):
1767 1767 tracked.update(l)
1768 1768
1769 1769 # Unknown files not of interest will be rejected by the matcher
1770 1770 files = unknown
1771 1771 files.extend(match.files())
1772 1772
1773 1773 rejected = []
1774 1774
1775 1775 files = [f for f in sorted(set(files)) if match(f)]
1776 1776 for f in files:
1777 1777 exact = match.exact(f)
1778 1778 command = ["add"]
1779 1779 if exact:
1780 1780 command.append("-f") #should be added, even if ignored
1781 1781 if ui.verbose or not exact:
1782 1782 ui.status(_('adding %s\n') % match.rel(f))
1783 1783
1784 1784 if f in tracked: # hg prints 'adding' even if already tracked
1785 1785 if exact:
1786 1786 rejected.append(f)
1787 1787 continue
1788 1788 if not opts.get(r'dry_run'):
1789 1789 self._gitcommand(command + [f])
1790 1790
1791 1791 for f in rejected:
1792 1792 ui.warn(_("%s already tracked!\n") % match.abs(f))
1793 1793
1794 1794 return rejected
1795 1795
1796 1796 @annotatesubrepoerror
1797 1797 def remove(self):
1798 1798 if self._gitmissing():
1799 1799 return
1800 1800 if self.dirty():
1801 1801 self.ui.warn(_('not removing repo %s because '
1802 1802 'it has changes.\n') % self._relpath)
1803 1803 return
1804 1804 # we can't fully delete the repository as it may contain
1805 1805 # local-only history
1806 1806 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1807 1807 self._gitcommand(['config', 'core.bare', 'true'])
1808 1808 for f, kind in self.wvfs.readdir():
1809 1809 if f == '.git':
1810 1810 continue
1811 1811 if kind == stat.S_IFDIR:
1812 1812 self.wvfs.rmtree(f)
1813 1813 else:
1814 1814 self.wvfs.unlink(f)
1815 1815
1816 1816 def archive(self, archiver, prefix, match=None, decode=True):
1817 1817 total = 0
1818 1818 source, revision = self._state
1819 1819 if not revision:
1820 1820 return total
1821 1821 self._fetch(source, revision)
1822 1822
1823 1823 # Parse git's native archive command.
1824 1824 # This should be much faster than manually traversing the trees
1825 1825 # and objects with many subprocess calls.
1826 1826 tarstream = self._gitcommand(['archive', revision], stream=True)
1827 1827 tar = tarfile.open(fileobj=tarstream, mode='r|')
1828 1828 relpath = subrelpath(self)
1829 1829 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1830 1830 for i, info in enumerate(tar):
1831 1831 if info.isdir():
1832 1832 continue
1833 1833 if match and not match(info.name):
1834 1834 continue
1835 1835 if info.issym():
1836 1836 data = info.linkname
1837 1837 else:
1838 1838 data = tar.extractfile(info).read()
1839 1839 archiver.addfile(prefix + self._path + '/' + info.name,
1840 1840 info.mode, info.issym(), data)
1841 1841 total += 1
1842 1842 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1843 1843 unit=_('files'))
1844 1844 self.ui.progress(_('archiving (%s)') % relpath, None)
1845 1845 return total
1846 1846
1847 1847
1848 1848 @annotatesubrepoerror
1849 1849 def cat(self, match, fm, fntemplate, prefix, **opts):
1850 1850 rev = self._state[1]
1851 1851 if match.anypats():
1852 1852 return 1 #No support for include/exclude yet
1853 1853
1854 1854 if not match.files():
1855 1855 return 1
1856 1856
1857 1857 # TODO: add support for non-plain formatter (see cmdutil.cat())
1858 1858 for f in match.files():
1859 1859 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1860 1860 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1861 1861 self._ctx.node(),
1862 1862 pathname=self.wvfs.reljoin(prefix, f))
1863 1863 fp.write(output)
1864 1864 fp.close()
1865 1865 return 0
1866 1866
1867 1867
1868 1868 @annotatesubrepoerror
1869 1869 def status(self, rev2, **opts):
1870 1870 rev1 = self._state[1]
1871 1871 if self._gitmissing() or not rev1:
1872 1872 # if the repo is missing, return no results
1873 1873 return scmutil.status([], [], [], [], [], [], [])
1874 1874 modified, added, removed = [], [], []
1875 1875 self._gitupdatestat()
1876 1876 if rev2:
1877 1877 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1878 1878 else:
1879 1879 command = ['diff-index', '--no-renames', rev1]
1880 1880 out = self._gitcommand(command)
1881 1881 for line in out.split('\n'):
1882 1882 tab = line.find('\t')
1883 1883 if tab == -1:
1884 1884 continue
1885 1885 status, f = line[tab - 1], line[tab + 1:]
1886 1886 if status == 'M':
1887 1887 modified.append(f)
1888 1888 elif status == 'A':
1889 1889 added.append(f)
1890 1890 elif status == 'D':
1891 1891 removed.append(f)
1892 1892
1893 1893 deleted, unknown, ignored, clean = [], [], [], []
1894 1894
1895 1895 command = ['status', '--porcelain', '-z']
1896 1896 if opts.get(r'unknown'):
1897 1897 command += ['--untracked-files=all']
1898 1898 if opts.get(r'ignored'):
1899 1899 command += ['--ignored']
1900 1900 out = self._gitcommand(command)
1901 1901
1902 1902 changedfiles = set()
1903 1903 changedfiles.update(modified)
1904 1904 changedfiles.update(added)
1905 1905 changedfiles.update(removed)
1906 1906 for line in out.split('\0'):
1907 1907 if not line:
1908 1908 continue
1909 1909 st = line[0:2]
1910 1910 #moves and copies show 2 files on one line
1911 1911 if line.find('\0') >= 0:
1912 1912 filename1, filename2 = line[3:].split('\0')
1913 1913 else:
1914 1914 filename1 = line[3:]
1915 1915 filename2 = None
1916 1916
1917 1917 changedfiles.add(filename1)
1918 1918 if filename2:
1919 1919 changedfiles.add(filename2)
1920 1920
1921 1921 if st == '??':
1922 1922 unknown.append(filename1)
1923 1923 elif st == '!!':
1924 1924 ignored.append(filename1)
1925 1925
1926 1926 if opts.get(r'clean'):
1927 1927 out = self._gitcommand(['ls-files'])
1928 1928 for f in out.split('\n'):
1929 1929 if not f in changedfiles:
1930 1930 clean.append(f)
1931 1931
1932 1932 return scmutil.status(modified, added, removed, deleted,
1933 1933 unknown, ignored, clean)
1934 1934
1935 1935 @annotatesubrepoerror
1936 1936 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1937 1937 node1 = self._state[1]
1938 1938 cmd = ['diff', '--no-renames']
1939 1939 if opts[r'stat']:
1940 1940 cmd.append('--stat')
1941 1941 else:
1942 1942 # for Git, this also implies '-p'
1943 1943 cmd.append('-U%d' % diffopts.context)
1944 1944
1945 1945 gitprefix = self.wvfs.reljoin(prefix, self._path)
1946 1946
1947 1947 if diffopts.noprefix:
1948 1948 cmd.extend(['--src-prefix=%s/' % gitprefix,
1949 1949 '--dst-prefix=%s/' % gitprefix])
1950 1950 else:
1951 1951 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1952 1952 '--dst-prefix=b/%s/' % gitprefix])
1953 1953
1954 1954 if diffopts.ignorews:
1955 1955 cmd.append('--ignore-all-space')
1956 1956 if diffopts.ignorewsamount:
1957 1957 cmd.append('--ignore-space-change')
1958 1958 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1959 1959 and diffopts.ignoreblanklines:
1960 1960 cmd.append('--ignore-blank-lines')
1961 1961
1962 1962 cmd.append(node1)
1963 1963 if node2:
1964 1964 cmd.append(node2)
1965 1965
1966 1966 output = ""
1967 1967 if match.always():
1968 1968 output += self._gitcommand(cmd) + '\n'
1969 1969 else:
1970 1970 st = self.status(node2)[:3]
1971 1971 files = [f for sublist in st for f in sublist]
1972 1972 for f in files:
1973 1973 if match(f):
1974 1974 output += self._gitcommand(cmd + ['--', f]) + '\n'
1975 1975
1976 1976 if output.strip():
1977 1977 ui.write(output)
1978 1978
1979 1979 @annotatesubrepoerror
1980 1980 def revert(self, substate, *pats, **opts):
1981 1981 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1982 1982 if not opts.get(r'no_backup'):
1983 1983 status = self.status(None)
1984 1984 names = status.modified
1985 1985 for name in names:
1986 1986 bakname = scmutil.origpath(self.ui, self._subparent, name)
1987 1987 self.ui.note(_('saving current version of %s as %s\n') %
1988 1988 (name, bakname))
1989 1989 self.wvfs.rename(name, bakname)
1990 1990
1991 1991 if not opts.get(r'dry_run'):
1992 1992 self.get(substate, overwrite=True)
1993 1993 return []
1994 1994
1995 1995 def shortid(self, revid):
1996 1996 return revid[:7]
1997 1997
1998 1998 types = {
1999 1999 'hg': hgsubrepo,
2000 2000 'svn': svnsubrepo,
2001 2001 'git': gitsubrepo,
2002 2002 }
@@ -1,1819 +1,1819
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import errno
13 13 import getpass
14 14 import inspect
15 15 import os
16 16 import re
17 17 import signal
18 18 import socket
19 19 import subprocess
20 20 import sys
21 21 import tempfile
22 22 import traceback
23 23
24 24 from .i18n import _
25 25 from .node import hex
26 26
27 27 from . import (
28 28 color,
29 29 config,
30 30 configitems,
31 31 encoding,
32 32 error,
33 33 formatter,
34 34 progress,
35 35 pycompat,
36 36 rcutil,
37 37 scmutil,
38 38 util,
39 39 )
40 40
41 41 urlreq = util.urlreq
42 42
43 43 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
44 44 _keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256))
45 45 if not c.isalnum())
46 46
47 47 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
48 48 tweakrc = """
49 49 [ui]
50 50 # The rollback command is dangerous. As a rule, don't use it.
51 51 rollback = False
52 52
53 53 [commands]
54 54 # Make `hg status` emit cwd-relative paths by default.
55 55 status.relative = yes
56 56
57 57 [diff]
58 58 git = 1
59 59 """
60 60
61 61 samplehgrcs = {
62 62 'user':
63 63 b"""# example user config (see 'hg help config' for more info)
64 64 [ui]
65 65 # name and email, e.g.
66 66 # username = Jane Doe <jdoe@example.com>
67 67 username =
68 68
69 69 # We recommend enabling tweakdefaults to get slight improvements to
70 70 # the UI over time. Make sure to set HGPLAIN in the environment when
71 71 # writing scripts!
72 72 # tweakdefaults = True
73 73
74 74 # uncomment to disable color in command output
75 75 # (see 'hg help color' for details)
76 76 # color = never
77 77
78 78 # uncomment to disable command output pagination
79 79 # (see 'hg help pager' for details)
80 80 # paginate = never
81 81
82 82 [extensions]
83 83 # uncomment these lines to enable some popular extensions
84 84 # (see 'hg help extensions' for more info)
85 85 #
86 86 # churn =
87 87 """,
88 88
89 89 'cloned':
90 90 b"""# example repository config (see 'hg help config' for more info)
91 91 [paths]
92 92 default = %s
93 93
94 94 # path aliases to other clones of this repo in URLs or filesystem paths
95 95 # (see 'hg help config.paths' for more info)
96 96 #
97 97 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
98 98 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
99 99 # my-clone = /home/jdoe/jdoes-clone
100 100
101 101 [ui]
102 102 # name and email (local to this repository, optional), e.g.
103 103 # username = Jane Doe <jdoe@example.com>
104 104 """,
105 105
106 106 'local':
107 107 b"""# example repository config (see 'hg help config' for more info)
108 108 [paths]
109 109 # path aliases to other clones of this repo in URLs or filesystem paths
110 110 # (see 'hg help config.paths' for more info)
111 111 #
112 112 # default = http://example.com/hg/example-repo
113 113 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
114 114 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
115 115 # my-clone = /home/jdoe/jdoes-clone
116 116
117 117 [ui]
118 118 # name and email (local to this repository, optional), e.g.
119 119 # username = Jane Doe <jdoe@example.com>
120 120 """,
121 121
122 122 'global':
123 123 b"""# example system-wide hg config (see 'hg help config' for more info)
124 124
125 125 [ui]
126 126 # uncomment to disable color in command output
127 127 # (see 'hg help color' for details)
128 128 # color = never
129 129
130 130 # uncomment to disable command output pagination
131 131 # (see 'hg help pager' for details)
132 132 # paginate = never
133 133
134 134 [extensions]
135 135 # uncomment these lines to enable some popular extensions
136 136 # (see 'hg help extensions' for more info)
137 137 #
138 138 # blackbox =
139 139 # churn =
140 140 """,
141 141 }
142 142
143 143 def _maybestrurl(maybebytes):
144 144 if maybebytes is None:
145 145 return None
146 146 return pycompat.strurl(maybebytes)
147 147
148 148 def _maybebytesurl(maybestr):
149 149 if maybestr is None:
150 150 return None
151 151 return pycompat.bytesurl(maybestr)
152 152
153 153 class httppasswordmgrdbproxy(object):
154 154 """Delays loading urllib2 until it's needed."""
155 155 def __init__(self):
156 156 self._mgr = None
157 157
158 158 def _get_mgr(self):
159 159 if self._mgr is None:
160 160 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
161 161 return self._mgr
162 162
163 163 def add_password(self, realm, uris, user, passwd):
164 164 if isinstance(uris, tuple):
165 165 uris = tuple(_maybestrurl(u) for u in uris)
166 166 else:
167 167 uris = _maybestrurl(uris)
168 168 return self._get_mgr().add_password(
169 169 _maybestrurl(realm), uris,
170 170 _maybestrurl(user), _maybestrurl(passwd))
171 171
172 172 def find_user_password(self, realm, uri):
173 173 return tuple(_maybebytesurl(v) for v in
174 174 self._get_mgr().find_user_password(_maybestrurl(realm),
175 175 _maybestrurl(uri)))
176 176
177 177 def _catchterm(*args):
178 178 raise error.SignalInterrupt
179 179
180 180 # unique object used to detect no default value has been provided when
181 181 # retrieving configuration value.
182 182 _unset = object()
183 183
184 184 class ui(object):
185 185 def __init__(self, src=None):
186 186 """Create a fresh new ui object if no src given
187 187
188 188 Use uimod.ui.load() to create a ui which knows global and user configs.
189 189 In most cases, you should use ui.copy() to create a copy of an existing
190 190 ui object.
191 191 """
192 192 # _buffers: used for temporary capture of output
193 193 self._buffers = []
194 194 # _exithandlers: callbacks run at the end of a request
195 195 self._exithandlers = []
196 196 # 3-tuple describing how each buffer in the stack behaves.
197 197 # Values are (capture stderr, capture subprocesses, apply labels).
198 198 self._bufferstates = []
199 199 # When a buffer is active, defines whether we are expanding labels.
200 200 # This exists to prevent an extra list lookup.
201 201 self._bufferapplylabels = None
202 202 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
203 203 self._reportuntrusted = True
204 204 self._knownconfig = configitems.coreitems
205 205 self._ocfg = config.config() # overlay
206 206 self._tcfg = config.config() # trusted
207 207 self._ucfg = config.config() # untrusted
208 208 self._trustusers = set()
209 209 self._trustgroups = set()
210 210 self.callhooks = True
211 211 # Insecure server connections requested.
212 212 self.insecureconnections = False
213 213 # Blocked time
214 214 self.logblockedtimes = False
215 215 # color mode: see mercurial/color.py for possible value
216 216 self._colormode = None
217 217 self._terminfoparams = {}
218 218 self._styles = {}
219 219
220 220 if src:
221 221 self._exithandlers = src._exithandlers
222 222 self.fout = src.fout
223 223 self.ferr = src.ferr
224 224 self.fin = src.fin
225 225 self.pageractive = src.pageractive
226 226 self._disablepager = src._disablepager
227 227 self._tweaked = src._tweaked
228 228
229 229 self._tcfg = src._tcfg.copy()
230 230 self._ucfg = src._ucfg.copy()
231 231 self._ocfg = src._ocfg.copy()
232 232 self._trustusers = src._trustusers.copy()
233 233 self._trustgroups = src._trustgroups.copy()
234 234 self.environ = src.environ
235 235 self.callhooks = src.callhooks
236 236 self.insecureconnections = src.insecureconnections
237 237 self._colormode = src._colormode
238 238 self._terminfoparams = src._terminfoparams.copy()
239 239 self._styles = src._styles.copy()
240 240
241 241 self.fixconfig()
242 242
243 243 self.httppasswordmgrdb = src.httppasswordmgrdb
244 244 self._blockedtimes = src._blockedtimes
245 245 else:
246 246 self.fout = util.stdout
247 247 self.ferr = util.stderr
248 248 self.fin = util.stdin
249 249 self.pageractive = False
250 250 self._disablepager = False
251 251 self._tweaked = False
252 252
253 253 # shared read-only environment
254 254 self.environ = encoding.environ
255 255
256 256 self.httppasswordmgrdb = httppasswordmgrdbproxy()
257 257 self._blockedtimes = collections.defaultdict(int)
258 258
259 259 allowed = self.configlist('experimental', 'exportableenviron')
260 260 if '*' in allowed:
261 261 self._exportableenviron = self.environ
262 262 else:
263 263 self._exportableenviron = {}
264 264 for k in allowed:
265 265 if k in self.environ:
266 266 self._exportableenviron[k] = self.environ[k]
267 267
268 268 @classmethod
269 269 def load(cls):
270 270 """Create a ui and load global and user configs"""
271 271 u = cls()
272 272 # we always trust global config files and environment variables
273 273 for t, f in rcutil.rccomponents():
274 274 if t == 'path':
275 275 u.readconfig(f, trust=True)
276 276 elif t == 'items':
277 277 sections = set()
278 278 for section, name, value, source in f:
279 279 # do not set u._ocfg
280 280 # XXX clean this up once immutable config object is a thing
281 281 u._tcfg.set(section, name, value, source)
282 282 u._ucfg.set(section, name, value, source)
283 283 sections.add(section)
284 284 for section in sections:
285 285 u.fixconfig(section=section)
286 286 else:
287 287 raise error.ProgrammingError('unknown rctype: %s' % t)
288 288 u._maybetweakdefaults()
289 289 return u
290 290
291 291 def _maybetweakdefaults(self):
292 292 if not self.configbool('ui', 'tweakdefaults'):
293 293 return
294 294 if self._tweaked or self.plain('tweakdefaults'):
295 295 return
296 296
297 297 # Note: it is SUPER IMPORTANT that you set self._tweaked to
298 298 # True *before* any calls to setconfig(), otherwise you'll get
299 299 # infinite recursion between setconfig and this method.
300 300 #
301 301 # TODO: We should extract an inner method in setconfig() to
302 302 # avoid this weirdness.
303 303 self._tweaked = True
304 304 tmpcfg = config.config()
305 305 tmpcfg.parse('<tweakdefaults>', tweakrc)
306 306 for section in tmpcfg:
307 307 for name, value in tmpcfg.items(section):
308 308 if not self.hasconfig(section, name):
309 309 self.setconfig(section, name, value, "<tweakdefaults>")
310 310
311 311 def copy(self):
312 312 return self.__class__(self)
313 313
314 314 def resetstate(self):
315 315 """Clear internal state that shouldn't persist across commands"""
316 316 if self._progbar:
317 317 self._progbar.resetstate() # reset last-print time of progress bar
318 318 self.httppasswordmgrdb = httppasswordmgrdbproxy()
319 319
320 320 @contextlib.contextmanager
321 321 def timeblockedsection(self, key):
322 322 # this is open-coded below - search for timeblockedsection to find them
323 323 starttime = util.timer()
324 324 try:
325 325 yield
326 326 finally:
327 327 self._blockedtimes[key + '_blocked'] += \
328 328 (util.timer() - starttime) * 1000
329 329
330 330 def formatter(self, topic, opts):
331 331 return formatter.formatter(self, self, topic, opts)
332 332
333 333 def _trusted(self, fp, f):
334 334 st = util.fstat(fp)
335 335 if util.isowner(st):
336 336 return True
337 337
338 338 tusers, tgroups = self._trustusers, self._trustgroups
339 339 if '*' in tusers or '*' in tgroups:
340 340 return True
341 341
342 342 user = util.username(st.st_uid)
343 343 group = util.groupname(st.st_gid)
344 344 if user in tusers or group in tgroups or user == util.username():
345 345 return True
346 346
347 347 if self._reportuntrusted:
348 348 self.warn(_('not trusting file %s from untrusted '
349 349 'user %s, group %s\n') % (f, user, group))
350 350 return False
351 351
352 352 def readconfig(self, filename, root=None, trust=False,
353 353 sections=None, remap=None):
354 354 try:
355 355 fp = open(filename, u'rb')
356 356 except IOError:
357 357 if not sections: # ignore unless we were looking for something
358 358 return
359 359 raise
360 360
361 361 cfg = config.config()
362 362 trusted = sections or trust or self._trusted(fp, filename)
363 363
364 364 try:
365 365 cfg.read(filename, fp, sections=sections, remap=remap)
366 366 fp.close()
367 367 except error.ConfigError as inst:
368 368 if trusted:
369 369 raise
370 370 self.warn(_("ignored: %s\n") % str(inst))
371 371
372 372 if self.plain():
373 373 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
374 374 'logtemplate', 'statuscopies', 'style',
375 375 'traceback', 'verbose'):
376 376 if k in cfg['ui']:
377 377 del cfg['ui'][k]
378 378 for k, v in cfg.items('defaults'):
379 379 del cfg['defaults'][k]
380 380 for k, v in cfg.items('commands'):
381 381 del cfg['commands'][k]
382 382 # Don't remove aliases from the configuration if in the exceptionlist
383 383 if self.plain('alias'):
384 384 for k, v in cfg.items('alias'):
385 385 del cfg['alias'][k]
386 386 if self.plain('revsetalias'):
387 387 for k, v in cfg.items('revsetalias'):
388 388 del cfg['revsetalias'][k]
389 389 if self.plain('templatealias'):
390 390 for k, v in cfg.items('templatealias'):
391 391 del cfg['templatealias'][k]
392 392
393 393 if trusted:
394 394 self._tcfg.update(cfg)
395 395 self._tcfg.update(self._ocfg)
396 396 self._ucfg.update(cfg)
397 397 self._ucfg.update(self._ocfg)
398 398
399 399 if root is None:
400 400 root = os.path.expanduser('~')
401 401 self.fixconfig(root=root)
402 402
403 403 def fixconfig(self, root=None, section=None):
404 404 if section in (None, 'paths'):
405 405 # expand vars and ~
406 406 # translate paths relative to root (or home) into absolute paths
407 407 root = root or pycompat.getcwd()
408 408 for c in self._tcfg, self._ucfg, self._ocfg:
409 409 for n, p in c.items('paths'):
410 410 # Ignore sub-options.
411 411 if ':' in n:
412 412 continue
413 413 if not p:
414 414 continue
415 415 if '%%' in p:
416 416 s = self.configsource('paths', n) or 'none'
417 417 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
418 418 % (n, p, s))
419 419 p = p.replace('%%', '%')
420 420 p = util.expandpath(p)
421 421 if not util.hasscheme(p) and not os.path.isabs(p):
422 422 p = os.path.normpath(os.path.join(root, p))
423 423 c.set("paths", n, p)
424 424
425 425 if section in (None, 'ui'):
426 426 # update ui options
427 427 self.debugflag = self.configbool('ui', 'debug')
428 428 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
429 429 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
430 430 if self.verbose and self.quiet:
431 431 self.quiet = self.verbose = False
432 432 self._reportuntrusted = self.debugflag or self.configbool("ui",
433 433 "report_untrusted")
434 434 self.tracebackflag = self.configbool('ui', 'traceback')
435 435 self.logblockedtimes = self.configbool('ui', 'logblockedtimes')
436 436
437 437 if section in (None, 'trusted'):
438 438 # update trust information
439 439 self._trustusers.update(self.configlist('trusted', 'users'))
440 440 self._trustgroups.update(self.configlist('trusted', 'groups'))
441 441
442 442 def backupconfig(self, section, item):
443 443 return (self._ocfg.backup(section, item),
444 444 self._tcfg.backup(section, item),
445 445 self._ucfg.backup(section, item),)
446 446 def restoreconfig(self, data):
447 447 self._ocfg.restore(data[0])
448 448 self._tcfg.restore(data[1])
449 449 self._ucfg.restore(data[2])
450 450
451 451 def setconfig(self, section, name, value, source=''):
452 452 for cfg in (self._ocfg, self._tcfg, self._ucfg):
453 453 cfg.set(section, name, value, source)
454 454 self.fixconfig(section=section)
455 455 self._maybetweakdefaults()
456 456
457 457 def _data(self, untrusted):
458 458 return untrusted and self._ucfg or self._tcfg
459 459
460 460 def configsource(self, section, name, untrusted=False):
461 461 return self._data(untrusted).source(section, name)
462 462
463 463 def config(self, section, name, default=_unset, untrusted=False):
464 464 """return the plain string version of a config"""
465 465 value = self._config(section, name, default=default,
466 466 untrusted=untrusted)
467 467 if value is _unset:
468 468 return None
469 469 return value
470 470
471 471 def _config(self, section, name, default=_unset, untrusted=False):
472 472 value = default
473 473 item = self._knownconfig.get(section, {}).get(name)
474 474 alternates = [(section, name)]
475 475
476 476 if item is not None:
477 477 alternates.extend(item.alias)
478 478
479 479 if default is _unset:
480 480 if item is None:
481 481 value = default
482 482 elif item.default is configitems.dynamicdefault:
483 483 value = None
484 484 msg = "config item requires an explicit default value: '%s.%s'"
485 485 msg %= (section, name)
486 486 self.develwarn(msg, 2, 'warn-config-default')
487 487 elif callable(item.default):
488 488 value = item.default()
489 489 else:
490 490 value = item.default
491 491 elif (item is not None
492 492 and item.default is not configitems.dynamicdefault):
493 493 msg = ("specifying a default value for a registered "
494 494 "config item: '%s.%s' '%s'")
495 495 msg %= (section, name, default)
496 496 self.develwarn(msg, 2, 'warn-config-default')
497 497
498 498 for s, n in alternates:
499 499 candidate = self._data(untrusted).get(s, n, None)
500 500 if candidate is not None:
501 501 value = candidate
502 502 section = s
503 503 name = n
504 504 break
505 505
506 506 if self.debugflag and not untrusted and self._reportuntrusted:
507 507 for s, n in alternates:
508 508 uvalue = self._ucfg.get(s, n)
509 509 if uvalue is not None and uvalue != value:
510 510 self.debug("ignoring untrusted configuration option "
511 511 "%s.%s = %s\n" % (s, n, uvalue))
512 512 return value
513 513
514 514 def configsuboptions(self, section, name, default=_unset, untrusted=False):
515 515 """Get a config option and all sub-options.
516 516
517 517 Some config options have sub-options that are declared with the
518 518 format "key:opt = value". This method is used to return the main
519 519 option and all its declared sub-options.
520 520
521 521 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
522 522 is a dict of defined sub-options where keys and values are strings.
523 523 """
524 524 main = self.config(section, name, default, untrusted=untrusted)
525 525 data = self._data(untrusted)
526 526 sub = {}
527 527 prefix = '%s:' % name
528 528 for k, v in data.items(section):
529 529 if k.startswith(prefix):
530 530 sub[k[len(prefix):]] = v
531 531
532 532 if self.debugflag and not untrusted and self._reportuntrusted:
533 533 for k, v in sub.items():
534 534 uvalue = self._ucfg.get(section, '%s:%s' % (name, k))
535 535 if uvalue is not None and uvalue != v:
536 536 self.debug('ignoring untrusted configuration option '
537 537 '%s:%s.%s = %s\n' % (section, name, k, uvalue))
538 538
539 539 return main, sub
540 540
541 541 def configpath(self, section, name, default=_unset, untrusted=False):
542 542 'get a path config item, expanded relative to repo root or config file'
543 543 v = self.config(section, name, default, untrusted)
544 544 if v is None:
545 545 return None
546 546 if not os.path.isabs(v) or "://" not in v:
547 547 src = self.configsource(section, name, untrusted)
548 548 if ':' in src:
549 549 base = os.path.dirname(src.rsplit(':')[0])
550 550 v = os.path.join(base, os.path.expanduser(v))
551 551 return v
552 552
553 553 def configbool(self, section, name, default=_unset, untrusted=False):
554 554 """parse a configuration element as a boolean
555 555
556 556 >>> u = ui(); s = b'foo'
557 557 >>> u.setconfig(s, b'true', b'yes')
558 558 >>> u.configbool(s, b'true')
559 559 True
560 560 >>> u.setconfig(s, b'false', b'no')
561 561 >>> u.configbool(s, b'false')
562 562 False
563 563 >>> u.configbool(s, b'unknown')
564 564 False
565 565 >>> u.configbool(s, b'unknown', True)
566 566 True
567 567 >>> u.setconfig(s, b'invalid', b'somevalue')
568 568 >>> u.configbool(s, b'invalid')
569 569 Traceback (most recent call last):
570 570 ...
571 571 ConfigError: foo.invalid is not a boolean ('somevalue')
572 572 """
573 573
574 574 v = self._config(section, name, default, untrusted=untrusted)
575 575 if v is None:
576 576 return v
577 577 if v is _unset:
578 578 if default is _unset:
579 579 return False
580 580 return default
581 581 if isinstance(v, bool):
582 582 return v
583 583 b = util.parsebool(v)
584 584 if b is None:
585 585 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
586 586 % (section, name, v))
587 587 return b
588 588
589 589 def configwith(self, convert, section, name, default=_unset,
590 590 desc=None, untrusted=False):
591 591 """parse a configuration element with a conversion function
592 592
593 593 >>> u = ui(); s = b'foo'
594 594 >>> u.setconfig(s, b'float1', b'42')
595 595 >>> u.configwith(float, s, b'float1')
596 596 42.0
597 597 >>> u.setconfig(s, b'float2', b'-4.25')
598 598 >>> u.configwith(float, s, b'float2')
599 599 -4.25
600 600 >>> u.configwith(float, s, b'unknown', 7)
601 601 7.0
602 602 >>> u.setconfig(s, b'invalid', b'somevalue')
603 603 >>> u.configwith(float, s, b'invalid')
604 604 Traceback (most recent call last):
605 605 ...
606 606 ConfigError: foo.invalid is not a valid float ('somevalue')
607 607 >>> u.configwith(float, s, b'invalid', desc=b'womble')
608 608 Traceback (most recent call last):
609 609 ...
610 610 ConfigError: foo.invalid is not a valid womble ('somevalue')
611 611 """
612 612
613 613 v = self.config(section, name, default, untrusted)
614 614 if v is None:
615 615 return v # do not attempt to convert None
616 616 try:
617 617 return convert(v)
618 618 except (ValueError, error.ParseError):
619 619 if desc is None:
620 620 desc = pycompat.sysbytes(convert.__name__)
621 621 raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
622 622 % (section, name, desc, v))
623 623
624 624 def configint(self, section, name, default=_unset, untrusted=False):
625 625 """parse a configuration element as an integer
626 626
627 627 >>> u = ui(); s = b'foo'
628 628 >>> u.setconfig(s, b'int1', b'42')
629 629 >>> u.configint(s, b'int1')
630 630 42
631 631 >>> u.setconfig(s, b'int2', b'-42')
632 632 >>> u.configint(s, b'int2')
633 633 -42
634 634 >>> u.configint(s, b'unknown', 7)
635 635 7
636 636 >>> u.setconfig(s, b'invalid', b'somevalue')
637 637 >>> u.configint(s, b'invalid')
638 638 Traceback (most recent call last):
639 639 ...
640 640 ConfigError: foo.invalid is not a valid integer ('somevalue')
641 641 """
642 642
643 643 return self.configwith(int, section, name, default, 'integer',
644 644 untrusted)
645 645
646 646 def configbytes(self, section, name, default=_unset, untrusted=False):
647 647 """parse a configuration element as a quantity in bytes
648 648
649 649 Units can be specified as b (bytes), k or kb (kilobytes), m or
650 650 mb (megabytes), g or gb (gigabytes).
651 651
652 652 >>> u = ui(); s = b'foo'
653 653 >>> u.setconfig(s, b'val1', b'42')
654 654 >>> u.configbytes(s, b'val1')
655 655 42
656 656 >>> u.setconfig(s, b'val2', b'42.5 kb')
657 657 >>> u.configbytes(s, b'val2')
658 658 43520
659 659 >>> u.configbytes(s, b'unknown', b'7 MB')
660 660 7340032
661 661 >>> u.setconfig(s, b'invalid', b'somevalue')
662 662 >>> u.configbytes(s, b'invalid')
663 663 Traceback (most recent call last):
664 664 ...
665 665 ConfigError: foo.invalid is not a byte quantity ('somevalue')
666 666 """
667 667
668 668 value = self._config(section, name, default, untrusted)
669 669 if value is _unset:
670 670 if default is _unset:
671 671 default = 0
672 672 value = default
673 673 if not isinstance(value, bytes):
674 674 return value
675 675 try:
676 676 return util.sizetoint(value)
677 677 except error.ParseError:
678 678 raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
679 679 % (section, name, value))
680 680
681 681 def configlist(self, section, name, default=_unset, untrusted=False):
682 682 """parse a configuration element as a list of comma/space separated
683 683 strings
684 684
685 685 >>> u = ui(); s = b'foo'
686 686 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
687 687 >>> u.configlist(s, b'list1')
688 688 ['this', 'is', 'a small', 'test']
689 689 """
690 690 # default is not always a list
691 691 v = self.configwith(config.parselist, section, name, default,
692 692 'list', untrusted)
693 693 if isinstance(v, bytes):
694 694 return config.parselist(v)
695 695 elif v is None:
696 696 return []
697 697 return v
698 698
699 699 def configdate(self, section, name, default=_unset, untrusted=False):
700 700 """parse a configuration element as a tuple of ints
701 701
702 702 >>> u = ui(); s = b'foo'
703 703 >>> u.setconfig(s, b'date', b'0 0')
704 704 >>> u.configdate(s, b'date')
705 705 (0, 0)
706 706 """
707 707 if self.config(section, name, default, untrusted):
708 708 return self.configwith(util.parsedate, section, name, default,
709 709 'date', untrusted)
710 710 if default is _unset:
711 711 return None
712 712 return default
713 713
714 714 def hasconfig(self, section, name, untrusted=False):
715 715 return self._data(untrusted).hasitem(section, name)
716 716
717 717 def has_section(self, section, untrusted=False):
718 718 '''tell whether section exists in config.'''
719 719 return section in self._data(untrusted)
720 720
721 721 def configitems(self, section, untrusted=False, ignoresub=False):
722 722 items = self._data(untrusted).items(section)
723 723 if ignoresub:
724 724 newitems = {}
725 725 for k, v in items:
726 726 if ':' not in k:
727 727 newitems[k] = v
728 728 items = newitems.items()
729 729 if self.debugflag and not untrusted and self._reportuntrusted:
730 730 for k, v in self._ucfg.items(section):
731 731 if self._tcfg.get(section, k) != v:
732 732 self.debug("ignoring untrusted configuration option "
733 733 "%s.%s = %s\n" % (section, k, v))
734 734 return items
735 735
736 736 def walkconfig(self, untrusted=False):
737 737 cfg = self._data(untrusted)
738 738 for section in cfg.sections():
739 739 for name, value in self.configitems(section, untrusted):
740 740 yield section, name, value
741 741
742 742 def plain(self, feature=None):
743 743 '''is plain mode active?
744 744
745 745 Plain mode means that all configuration variables which affect
746 746 the behavior and output of Mercurial should be
747 747 ignored. Additionally, the output should be stable,
748 748 reproducible and suitable for use in scripts or applications.
749 749
750 750 The only way to trigger plain mode is by setting either the
751 751 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
752 752
753 753 The return value can either be
754 754 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
755 755 - True otherwise
756 756 '''
757 757 if ('HGPLAIN' not in encoding.environ and
758 758 'HGPLAINEXCEPT' not in encoding.environ):
759 759 return False
760 760 exceptions = encoding.environ.get('HGPLAINEXCEPT',
761 761 '').strip().split(',')
762 762 if feature and exceptions:
763 763 return feature not in exceptions
764 764 return True
765 765
766 766 def username(self):
767 767 """Return default username to be used in commits.
768 768
769 769 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
770 770 and stop searching if one of these is set.
771 771 If not found and ui.askusername is True, ask the user, else use
772 772 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
773 773 """
774 774 user = encoding.environ.get("HGUSER")
775 775 if user is None:
776 776 user = self.config("ui", "username")
777 777 if user is not None:
778 778 user = os.path.expandvars(user)
779 779 if user is None:
780 780 user = encoding.environ.get("EMAIL")
781 781 if user is None and self.configbool("ui", "askusername"):
782 782 user = self.prompt(_("enter a commit username:"), default=None)
783 783 if user is None and not self.interactive():
784 784 try:
785 785 user = '%s@%s' % (util.getuser(), socket.getfqdn())
786 786 self.warn(_("no username found, using '%s' instead\n") % user)
787 787 except KeyError:
788 788 pass
789 789 if not user:
790 790 raise error.Abort(_('no username supplied'),
791 791 hint=_("use 'hg config --edit' "
792 792 'to set your username'))
793 793 if "\n" in user:
794 794 raise error.Abort(_("username %s contains a newline\n")
795 795 % repr(user))
796 796 return user
797 797
798 798 def shortuser(self, user):
799 799 """Return a short representation of a user name or email address."""
800 800 if not self.verbose:
801 801 user = util.shortuser(user)
802 802 return user
803 803
804 804 def expandpath(self, loc, default=None):
805 805 """Return repository location relative to cwd or from [paths]"""
806 806 try:
807 807 p = self.paths.getpath(loc)
808 808 if p:
809 809 return p.rawloc
810 810 except error.RepoError:
811 811 pass
812 812
813 813 if default:
814 814 try:
815 815 p = self.paths.getpath(default)
816 816 if p:
817 817 return p.rawloc
818 818 except error.RepoError:
819 819 pass
820 820
821 821 return loc
822 822
823 823 @util.propertycache
824 824 def paths(self):
825 825 return paths(self)
826 826
827 827 def pushbuffer(self, error=False, subproc=False, labeled=False):
828 828 """install a buffer to capture standard output of the ui object
829 829
830 830 If error is True, the error output will be captured too.
831 831
832 832 If subproc is True, output from subprocesses (typically hooks) will be
833 833 captured too.
834 834
835 835 If labeled is True, any labels associated with buffered
836 836 output will be handled. By default, this has no effect
837 837 on the output returned, but extensions and GUI tools may
838 838 handle this argument and returned styled output. If output
839 839 is being buffered so it can be captured and parsed or
840 840 processed, labeled should not be set to True.
841 841 """
842 842 self._buffers.append([])
843 843 self._bufferstates.append((error, subproc, labeled))
844 844 self._bufferapplylabels = labeled
845 845
846 846 def popbuffer(self):
847 847 '''pop the last buffer and return the buffered output'''
848 848 self._bufferstates.pop()
849 849 if self._bufferstates:
850 850 self._bufferapplylabels = self._bufferstates[-1][2]
851 851 else:
852 852 self._bufferapplylabels = None
853 853
854 854 return "".join(self._buffers.pop())
855 855
856 856 def write(self, *args, **opts):
857 857 '''write args to output
858 858
859 859 By default, this method simply writes to the buffer or stdout.
860 860 Color mode can be set on the UI class to have the output decorated
861 861 with color modifier before being written to stdout.
862 862
863 863 The color used is controlled by an optional keyword argument, "label".
864 864 This should be a string containing label names separated by space.
865 865 Label names take the form of "topic.type". For example, ui.debug()
866 866 issues a label of "ui.debug".
867 867
868 868 When labeling output for a specific command, a label of
869 869 "cmdname.type" is recommended. For example, status issues
870 870 a label of "status.modified" for modified files.
871 871 '''
872 872 if self._buffers and not opts.get('prompt', False):
873 873 if self._bufferapplylabels:
874 874 label = opts.get('label', '')
875 875 self._buffers[-1].extend(self.label(a, label) for a in args)
876 876 else:
877 877 self._buffers[-1].extend(args)
878 878 elif self._colormode == 'win32':
879 879 # windows color printing is its own can of crab, defer to
880 880 # the color module and that is it.
881 881 color.win32print(self, self._write, *args, **opts)
882 882 else:
883 883 msgs = args
884 884 if self._colormode is not None:
885 885 label = opts.get('label', '')
886 886 msgs = [self.label(a, label) for a in args]
887 887 self._write(*msgs, **opts)
888 888
889 889 def _write(self, *msgs, **opts):
890 890 self._progclear()
891 891 # opencode timeblockedsection because this is a critical path
892 892 starttime = util.timer()
893 893 try:
894 894 for a in msgs:
895 895 self.fout.write(a)
896 896 except IOError as err:
897 897 raise error.StdioError(err)
898 898 finally:
899 899 self._blockedtimes['stdio_blocked'] += \
900 900 (util.timer() - starttime) * 1000
901 901
902 902 def write_err(self, *args, **opts):
903 903 self._progclear()
904 904 if self._bufferstates and self._bufferstates[-1][0]:
905 905 self.write(*args, **opts)
906 906 elif self._colormode == 'win32':
907 907 # windows color printing is its own can of crab, defer to
908 908 # the color module and that is it.
909 909 color.win32print(self, self._write_err, *args, **opts)
910 910 else:
911 911 msgs = args
912 912 if self._colormode is not None:
913 913 label = opts.get('label', '')
914 914 msgs = [self.label(a, label) for a in args]
915 915 self._write_err(*msgs, **opts)
916 916
917 917 def _write_err(self, *msgs, **opts):
918 918 try:
919 919 with self.timeblockedsection('stdio'):
920 920 if not getattr(self.fout, 'closed', False):
921 921 self.fout.flush()
922 922 for a in msgs:
923 923 self.ferr.write(a)
924 924 # stderr may be buffered under win32 when redirected to files,
925 925 # including stdout.
926 926 if not getattr(self.ferr, 'closed', False):
927 927 self.ferr.flush()
928 928 except IOError as inst:
929 929 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
930 930 raise error.StdioError(inst)
931 931
932 932 def flush(self):
933 933 # opencode timeblockedsection because this is a critical path
934 934 starttime = util.timer()
935 935 try:
936 936 try:
937 937 self.fout.flush()
938 938 except IOError as err:
939 939 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
940 940 raise error.StdioError(err)
941 941 finally:
942 942 try:
943 943 self.ferr.flush()
944 944 except IOError as err:
945 945 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
946 946 raise error.StdioError(err)
947 947 finally:
948 948 self._blockedtimes['stdio_blocked'] += \
949 949 (util.timer() - starttime) * 1000
950 950
951 951 def _isatty(self, fh):
952 952 if self.configbool('ui', 'nontty'):
953 953 return False
954 954 return util.isatty(fh)
955 955
956 956 def disablepager(self):
957 957 self._disablepager = True
958 958
959 959 def pager(self, command):
960 960 """Start a pager for subsequent command output.
961 961
962 962 Commands which produce a long stream of output should call
963 963 this function to activate the user's preferred pagination
964 964 mechanism (which may be no pager). Calling this function
965 965 precludes any future use of interactive functionality, such as
966 966 prompting the user or activating curses.
967 967
968 968 Args:
969 969 command: The full, non-aliased name of the command. That is, "log"
970 970 not "history, "summary" not "summ", etc.
971 971 """
972 972 if (self._disablepager
973 973 or self.pageractive):
974 974 # how pager should do is already determined
975 975 return
976 976
977 977 if not command.startswith('internal-always-') and (
978 978 # explicit --pager=on (= 'internal-always-' prefix) should
979 979 # take precedence over disabling factors below
980 980 command in self.configlist('pager', 'ignore')
981 981 or not self.configbool('ui', 'paginate')
982 982 or not self.configbool('pager', 'attend-' + command, True)
983 983 # TODO: if we want to allow HGPLAINEXCEPT=pager,
984 984 # formatted() will need some adjustment.
985 985 or not self.formatted()
986 986 or self.plain()
987 987 or self._buffers
988 988 # TODO: expose debugger-enabled on the UI object
989 989 or '--debugger' in pycompat.sysargv):
990 990 # We only want to paginate if the ui appears to be
991 991 # interactive, the user didn't say HGPLAIN or
992 992 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
993 993 return
994 994
995 995 pagercmd = self.config('pager', 'pager', rcutil.fallbackpager)
996 996 if not pagercmd:
997 997 return
998 998
999 999 pagerenv = {}
1000 1000 for name, value in rcutil.defaultpagerenv().items():
1001 1001 if name not in encoding.environ:
1002 1002 pagerenv[name] = value
1003 1003
1004 1004 self.debug('starting pager for command %r\n' % command)
1005 1005 self.flush()
1006 1006
1007 1007 wasformatted = self.formatted()
1008 1008 if util.safehasattr(signal, "SIGPIPE"):
1009 1009 signal.signal(signal.SIGPIPE, _catchterm)
1010 1010 if self._runpager(pagercmd, pagerenv):
1011 1011 self.pageractive = True
1012 1012 # Preserve the formatted-ness of the UI. This is important
1013 1013 # because we mess with stdout, which might confuse
1014 1014 # auto-detection of things being formatted.
1015 1015 self.setconfig('ui', 'formatted', wasformatted, 'pager')
1016 1016 self.setconfig('ui', 'interactive', False, 'pager')
1017 1017
1018 1018 # If pagermode differs from color.mode, reconfigure color now that
1019 1019 # pageractive is set.
1020 1020 cm = self._colormode
1021 1021 if cm != self.config('color', 'pagermode', cm):
1022 1022 color.setup(self)
1023 1023 else:
1024 1024 # If the pager can't be spawned in dispatch when --pager=on is
1025 1025 # given, don't try again when the command runs, to avoid a duplicate
1026 1026 # warning about a missing pager command.
1027 1027 self.disablepager()
1028 1028
1029 1029 def _runpager(self, command, env=None):
1030 1030 """Actually start the pager and set up file descriptors.
1031 1031
1032 1032 This is separate in part so that extensions (like chg) can
1033 1033 override how a pager is invoked.
1034 1034 """
1035 1035 if command == 'cat':
1036 1036 # Save ourselves some work.
1037 1037 return False
1038 1038 # If the command doesn't contain any of these characters, we
1039 1039 # assume it's a binary and exec it directly. This means for
1040 1040 # simple pager command configurations, we can degrade
1041 1041 # gracefully and tell the user about their broken pager.
1042 1042 shell = any(c in command for c in "|&;<>()$`\\\"' \t\n*?[#~=%")
1043 1043
1044 if pycompat.osname == 'nt' and not shell:
1044 if pycompat.iswindows and not shell:
1045 1045 # Window's built-in `more` cannot be invoked with shell=False, but
1046 1046 # its `more.com` can. Hide this implementation detail from the
1047 1047 # user so we can also get sane bad PAGER behavior. MSYS has
1048 1048 # `more.exe`, so do a cmd.exe style resolution of the executable to
1049 1049 # determine which one to use.
1050 1050 fullcmd = util.findexe(command)
1051 1051 if not fullcmd:
1052 1052 self.warn(_("missing pager command '%s', skipping pager\n")
1053 1053 % command)
1054 1054 return False
1055 1055
1056 1056 command = fullcmd
1057 1057
1058 1058 try:
1059 1059 pager = subprocess.Popen(
1060 1060 command, shell=shell, bufsize=-1,
1061 1061 close_fds=util.closefds, stdin=subprocess.PIPE,
1062 1062 stdout=util.stdout, stderr=util.stderr,
1063 1063 env=util.shellenviron(env))
1064 1064 except OSError as e:
1065 1065 if e.errno == errno.ENOENT and not shell:
1066 1066 self.warn(_("missing pager command '%s', skipping pager\n")
1067 1067 % command)
1068 1068 return False
1069 1069 raise
1070 1070
1071 1071 # back up original file descriptors
1072 1072 stdoutfd = os.dup(util.stdout.fileno())
1073 1073 stderrfd = os.dup(util.stderr.fileno())
1074 1074
1075 1075 os.dup2(pager.stdin.fileno(), util.stdout.fileno())
1076 1076 if self._isatty(util.stderr):
1077 1077 os.dup2(pager.stdin.fileno(), util.stderr.fileno())
1078 1078
1079 1079 @self.atexit
1080 1080 def killpager():
1081 1081 if util.safehasattr(signal, "SIGINT"):
1082 1082 signal.signal(signal.SIGINT, signal.SIG_IGN)
1083 1083 # restore original fds, closing pager.stdin copies in the process
1084 1084 os.dup2(stdoutfd, util.stdout.fileno())
1085 1085 os.dup2(stderrfd, util.stderr.fileno())
1086 1086 pager.stdin.close()
1087 1087 pager.wait()
1088 1088
1089 1089 return True
1090 1090
1091 1091 def atexit(self, func, *args, **kwargs):
1092 1092 '''register a function to run after dispatching a request
1093 1093
1094 1094 Handlers do not stay registered across request boundaries.'''
1095 1095 self._exithandlers.append((func, args, kwargs))
1096 1096 return func
1097 1097
1098 1098 def interface(self, feature):
1099 1099 """what interface to use for interactive console features?
1100 1100
1101 1101 The interface is controlled by the value of `ui.interface` but also by
1102 1102 the value of feature-specific configuration. For example:
1103 1103
1104 1104 ui.interface.histedit = text
1105 1105 ui.interface.chunkselector = curses
1106 1106
1107 1107 Here the features are "histedit" and "chunkselector".
1108 1108
1109 1109 The configuration above means that the default interfaces for commands
1110 1110 is curses, the interface for histedit is text and the interface for
1111 1111 selecting chunk is crecord (the best curses interface available).
1112 1112
1113 1113 Consider the following example:
1114 1114 ui.interface = curses
1115 1115 ui.interface.histedit = text
1116 1116
1117 1117 Then histedit will use the text interface and chunkselector will use
1118 1118 the default curses interface (crecord at the moment).
1119 1119 """
1120 1120 alldefaults = frozenset(["text", "curses"])
1121 1121
1122 1122 featureinterfaces = {
1123 1123 "chunkselector": [
1124 1124 "text",
1125 1125 "curses",
1126 1126 ]
1127 1127 }
1128 1128
1129 1129 # Feature-specific interface
1130 1130 if feature not in featureinterfaces.keys():
1131 1131 # Programming error, not user error
1132 1132 raise ValueError("Unknown feature requested %s" % feature)
1133 1133
1134 1134 availableinterfaces = frozenset(featureinterfaces[feature])
1135 1135 if alldefaults > availableinterfaces:
1136 1136 # Programming error, not user error. We need a use case to
1137 1137 # define the right thing to do here.
1138 1138 raise ValueError(
1139 1139 "Feature %s does not handle all default interfaces" %
1140 1140 feature)
1141 1141
1142 1142 if self.plain():
1143 1143 return "text"
1144 1144
1145 1145 # Default interface for all the features
1146 1146 defaultinterface = "text"
1147 1147 i = self.config("ui", "interface")
1148 1148 if i in alldefaults:
1149 1149 defaultinterface = i
1150 1150
1151 1151 choseninterface = defaultinterface
1152 1152 f = self.config("ui", "interface.%s" % feature)
1153 1153 if f in availableinterfaces:
1154 1154 choseninterface = f
1155 1155
1156 1156 if i is not None and defaultinterface != i:
1157 1157 if f is not None:
1158 1158 self.warn(_("invalid value for ui.interface: %s\n") %
1159 1159 (i,))
1160 1160 else:
1161 1161 self.warn(_("invalid value for ui.interface: %s (using %s)\n") %
1162 1162 (i, choseninterface))
1163 1163 if f is not None and choseninterface != f:
1164 1164 self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") %
1165 1165 (feature, f, choseninterface))
1166 1166
1167 1167 return choseninterface
1168 1168
1169 1169 def interactive(self):
1170 1170 '''is interactive input allowed?
1171 1171
1172 1172 An interactive session is a session where input can be reasonably read
1173 1173 from `sys.stdin'. If this function returns false, any attempt to read
1174 1174 from stdin should fail with an error, unless a sensible default has been
1175 1175 specified.
1176 1176
1177 1177 Interactiveness is triggered by the value of the `ui.interactive'
1178 1178 configuration variable or - if it is unset - when `sys.stdin' points
1179 1179 to a terminal device.
1180 1180
1181 1181 This function refers to input only; for output, see `ui.formatted()'.
1182 1182 '''
1183 1183 i = self.configbool("ui", "interactive")
1184 1184 if i is None:
1185 1185 # some environments replace stdin without implementing isatty
1186 1186 # usually those are non-interactive
1187 1187 return self._isatty(self.fin)
1188 1188
1189 1189 return i
1190 1190
1191 1191 def termwidth(self):
1192 1192 '''how wide is the terminal in columns?
1193 1193 '''
1194 1194 if 'COLUMNS' in encoding.environ:
1195 1195 try:
1196 1196 return int(encoding.environ['COLUMNS'])
1197 1197 except ValueError:
1198 1198 pass
1199 1199 return scmutil.termsize(self)[0]
1200 1200
1201 1201 def formatted(self):
1202 1202 '''should formatted output be used?
1203 1203
1204 1204 It is often desirable to format the output to suite the output medium.
1205 1205 Examples of this are truncating long lines or colorizing messages.
1206 1206 However, this is not often not desirable when piping output into other
1207 1207 utilities, e.g. `grep'.
1208 1208
1209 1209 Formatted output is triggered by the value of the `ui.formatted'
1210 1210 configuration variable or - if it is unset - when `sys.stdout' points
1211 1211 to a terminal device. Please note that `ui.formatted' should be
1212 1212 considered an implementation detail; it is not intended for use outside
1213 1213 Mercurial or its extensions.
1214 1214
1215 1215 This function refers to output only; for input, see `ui.interactive()'.
1216 1216 This function always returns false when in plain mode, see `ui.plain()'.
1217 1217 '''
1218 1218 if self.plain():
1219 1219 return False
1220 1220
1221 1221 i = self.configbool("ui", "formatted")
1222 1222 if i is None:
1223 1223 # some environments replace stdout without implementing isatty
1224 1224 # usually those are non-interactive
1225 1225 return self._isatty(self.fout)
1226 1226
1227 1227 return i
1228 1228
1229 1229 def _readline(self, prompt=''):
1230 1230 if self._isatty(self.fin):
1231 1231 try:
1232 1232 # magically add command line editing support, where
1233 1233 # available
1234 1234 import readline
1235 1235 # force demandimport to really load the module
1236 1236 readline.read_history_file
1237 1237 # windows sometimes raises something other than ImportError
1238 1238 except Exception:
1239 1239 pass
1240 1240
1241 1241 # call write() so output goes through subclassed implementation
1242 1242 # e.g. color extension on Windows
1243 1243 self.write(prompt, prompt=True)
1244 1244 self.flush()
1245 1245
1246 1246 # prompt ' ' must exist; otherwise readline may delete entire line
1247 1247 # - http://bugs.python.org/issue12833
1248 1248 with self.timeblockedsection('stdio'):
1249 1249 line = util.bytesinput(self.fin, self.fout, r' ')
1250 1250
1251 1251 # When stdin is in binary mode on Windows, it can cause
1252 1252 # raw_input() to emit an extra trailing carriage return
1253 1253 if pycompat.oslinesep == '\r\n' and line and line[-1] == '\r':
1254 1254 line = line[:-1]
1255 1255 return line
1256 1256
1257 1257 def prompt(self, msg, default="y"):
1258 1258 """Prompt user with msg, read response.
1259 1259 If ui is not interactive, the default is returned.
1260 1260 """
1261 1261 if not self.interactive():
1262 1262 self.write(msg, ' ', default or '', "\n")
1263 1263 return default
1264 1264 try:
1265 1265 r = self._readline(self.label(msg, 'ui.prompt'))
1266 1266 if not r:
1267 1267 r = default
1268 1268 if self.configbool('ui', 'promptecho'):
1269 1269 self.write(r, "\n")
1270 1270 return r
1271 1271 except EOFError:
1272 1272 raise error.ResponseExpected()
1273 1273
1274 1274 @staticmethod
1275 1275 def extractchoices(prompt):
1276 1276 """Extract prompt message and list of choices from specified prompt.
1277 1277
1278 1278 This returns tuple "(message, choices)", and "choices" is the
1279 1279 list of tuple "(response character, text without &)".
1280 1280
1281 1281 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1282 1282 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1283 1283 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1284 1284 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1285 1285 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1286 1286 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1287 1287 """
1288 1288
1289 1289 # Sadly, the prompt string may have been built with a filename
1290 1290 # containing "$$" so let's try to find the first valid-looking
1291 1291 # prompt to start parsing. Sadly, we also can't rely on
1292 1292 # choices containing spaces, ASCII, or basically anything
1293 1293 # except an ampersand followed by a character.
1294 1294 m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
1295 1295 msg = m.group(1)
1296 1296 choices = [p.strip(' ') for p in m.group(2).split('$$')]
1297 1297 def choicetuple(s):
1298 1298 ampidx = s.index('&')
1299 1299 return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1)
1300 1300 return (msg, [choicetuple(s) for s in choices])
1301 1301
1302 1302 def promptchoice(self, prompt, default=0):
1303 1303 """Prompt user with a message, read response, and ensure it matches
1304 1304 one of the provided choices. The prompt is formatted as follows:
1305 1305
1306 1306 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1307 1307
1308 1308 The index of the choice is returned. Responses are case
1309 1309 insensitive. If ui is not interactive, the default is
1310 1310 returned.
1311 1311 """
1312 1312
1313 1313 msg, choices = self.extractchoices(prompt)
1314 1314 resps = [r for r, t in choices]
1315 1315 while True:
1316 1316 r = self.prompt(msg, resps[default])
1317 1317 if r.lower() in resps:
1318 1318 return resps.index(r.lower())
1319 1319 self.write(_("unrecognized response\n"))
1320 1320
1321 1321 def getpass(self, prompt=None, default=None):
1322 1322 if not self.interactive():
1323 1323 return default
1324 1324 try:
1325 1325 self.write_err(self.label(prompt or _('password: '), 'ui.prompt'))
1326 1326 # disable getpass() only if explicitly specified. it's still valid
1327 1327 # to interact with tty even if fin is not a tty.
1328 1328 with self.timeblockedsection('stdio'):
1329 1329 if self.configbool('ui', 'nontty'):
1330 1330 l = self.fin.readline()
1331 1331 if not l:
1332 1332 raise EOFError
1333 1333 return l.rstrip('\n')
1334 1334 else:
1335 1335 return getpass.getpass('')
1336 1336 except EOFError:
1337 1337 raise error.ResponseExpected()
1338 1338 def status(self, *msg, **opts):
1339 1339 '''write status message to output (if ui.quiet is False)
1340 1340
1341 1341 This adds an output label of "ui.status".
1342 1342 '''
1343 1343 if not self.quiet:
1344 1344 opts[r'label'] = opts.get(r'label', '') + ' ui.status'
1345 1345 self.write(*msg, **opts)
1346 1346 def warn(self, *msg, **opts):
1347 1347 '''write warning message to output (stderr)
1348 1348
1349 1349 This adds an output label of "ui.warning".
1350 1350 '''
1351 1351 opts[r'label'] = opts.get(r'label', '') + ' ui.warning'
1352 1352 self.write_err(*msg, **opts)
1353 1353 def note(self, *msg, **opts):
1354 1354 '''write note to output (if ui.verbose is True)
1355 1355
1356 1356 This adds an output label of "ui.note".
1357 1357 '''
1358 1358 if self.verbose:
1359 1359 opts[r'label'] = opts.get(r'label', '') + ' ui.note'
1360 1360 self.write(*msg, **opts)
1361 1361 def debug(self, *msg, **opts):
1362 1362 '''write debug message to output (if ui.debugflag is True)
1363 1363
1364 1364 This adds an output label of "ui.debug".
1365 1365 '''
1366 1366 if self.debugflag:
1367 1367 opts[r'label'] = opts.get(r'label', '') + ' ui.debug'
1368 1368 self.write(*msg, **opts)
1369 1369
1370 1370 def edit(self, text, user, extra=None, editform=None, pending=None,
1371 1371 repopath=None, action=None):
1372 1372 if action is None:
1373 1373 self.develwarn('action is None but will soon be a required '
1374 1374 'parameter to ui.edit()')
1375 1375 extra_defaults = {
1376 1376 'prefix': 'editor',
1377 1377 'suffix': '.txt',
1378 1378 }
1379 1379 if extra is not None:
1380 1380 if extra.get('suffix') is not None:
1381 1381 self.develwarn('extra.suffix is not None but will soon be '
1382 1382 'ignored by ui.edit()')
1383 1383 extra_defaults.update(extra)
1384 1384 extra = extra_defaults
1385 1385
1386 1386 if action == 'diff':
1387 1387 suffix = '.diff'
1388 1388 elif action:
1389 1389 suffix = '.%s.hg.txt' % action
1390 1390 else:
1391 1391 suffix = extra['suffix']
1392 1392
1393 1393 rdir = None
1394 1394 if self.configbool('experimental', 'editortmpinhg'):
1395 1395 rdir = repopath
1396 1396 (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-',
1397 1397 suffix=suffix,
1398 1398 dir=rdir)
1399 1399 try:
1400 1400 f = os.fdopen(fd, r'wb')
1401 1401 f.write(util.tonativeeol(text))
1402 1402 f.close()
1403 1403
1404 1404 environ = {'HGUSER': user}
1405 1405 if 'transplant_source' in extra:
1406 1406 environ.update({'HGREVISION': hex(extra['transplant_source'])})
1407 1407 for label in ('intermediate-source', 'source', 'rebase_source'):
1408 1408 if label in extra:
1409 1409 environ.update({'HGREVISION': extra[label]})
1410 1410 break
1411 1411 if editform:
1412 1412 environ.update({'HGEDITFORM': editform})
1413 1413 if pending:
1414 1414 environ.update({'HG_PENDING': pending})
1415 1415
1416 1416 editor = self.geteditor()
1417 1417
1418 1418 self.system("%s \"%s\"" % (editor, name),
1419 1419 environ=environ,
1420 1420 onerr=error.Abort, errprefix=_("edit failed"),
1421 1421 blockedtag='editor')
1422 1422
1423 1423 f = open(name, r'rb')
1424 1424 t = util.fromnativeeol(f.read())
1425 1425 f.close()
1426 1426 finally:
1427 1427 os.unlink(name)
1428 1428
1429 1429 return t
1430 1430
1431 1431 def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None,
1432 1432 blockedtag=None):
1433 1433 '''execute shell command with appropriate output stream. command
1434 1434 output will be redirected if fout is not stdout.
1435 1435
1436 1436 if command fails and onerr is None, return status, else raise onerr
1437 1437 object as exception.
1438 1438 '''
1439 1439 if blockedtag is None:
1440 1440 # Long cmds tend to be because of an absolute path on cmd. Keep
1441 1441 # the tail end instead
1442 1442 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1443 1443 blockedtag = 'unknown_system_' + cmdsuffix
1444 1444 out = self.fout
1445 1445 if any(s[1] for s in self._bufferstates):
1446 1446 out = self
1447 1447 with self.timeblockedsection(blockedtag):
1448 1448 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1449 1449 if rc and onerr:
1450 1450 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
1451 1451 util.explainexit(rc)[0])
1452 1452 if errprefix:
1453 1453 errmsg = '%s: %s' % (errprefix, errmsg)
1454 1454 raise onerr(errmsg)
1455 1455 return rc
1456 1456
1457 1457 def _runsystem(self, cmd, environ, cwd, out):
1458 1458 """actually execute the given shell command (can be overridden by
1459 1459 extensions like chg)"""
1460 1460 return util.system(cmd, environ=environ, cwd=cwd, out=out)
1461 1461
1462 1462 def traceback(self, exc=None, force=False):
1463 1463 '''print exception traceback if traceback printing enabled or forced.
1464 1464 only to call in exception handler. returns true if traceback
1465 1465 printed.'''
1466 1466 if self.tracebackflag or force:
1467 1467 if exc is None:
1468 1468 exc = sys.exc_info()
1469 1469 cause = getattr(exc[1], 'cause', None)
1470 1470
1471 1471 if cause is not None:
1472 1472 causetb = traceback.format_tb(cause[2])
1473 1473 exctb = traceback.format_tb(exc[2])
1474 1474 exconly = traceback.format_exception_only(cause[0], cause[1])
1475 1475
1476 1476 # exclude frame where 'exc' was chained and rethrown from exctb
1477 1477 self.write_err('Traceback (most recent call last):\n',
1478 1478 ''.join(exctb[:-1]),
1479 1479 ''.join(causetb),
1480 1480 ''.join(exconly))
1481 1481 else:
1482 1482 output = traceback.format_exception(exc[0], exc[1], exc[2])
1483 1483 data = r''.join(output)
1484 1484 if pycompat.ispy3:
1485 1485 enc = pycompat.sysstr(encoding.encoding)
1486 1486 data = data.encode(enc, errors=r'replace')
1487 1487 self.write_err(data)
1488 1488 return self.tracebackflag or force
1489 1489
1490 1490 def geteditor(self):
1491 1491 '''return editor to use'''
1492 1492 if pycompat.sysplatform == 'plan9':
1493 1493 # vi is the MIPS instruction simulator on Plan 9. We
1494 1494 # instead default to E to plumb commit messages to
1495 1495 # avoid confusion.
1496 1496 editor = 'E'
1497 1497 else:
1498 1498 editor = 'vi'
1499 1499 return (encoding.environ.get("HGEDITOR") or
1500 1500 self.config("ui", "editor", editor))
1501 1501
1502 1502 @util.propertycache
1503 1503 def _progbar(self):
1504 1504 """setup the progbar singleton to the ui object"""
1505 1505 if (self.quiet or self.debugflag
1506 1506 or self.configbool('progress', 'disable')
1507 1507 or not progress.shouldprint(self)):
1508 1508 return None
1509 1509 return getprogbar(self)
1510 1510
1511 1511 def _progclear(self):
1512 1512 """clear progress bar output if any. use it before any output"""
1513 1513 if not haveprogbar(): # nothing loaded yet
1514 1514 return
1515 1515 if self._progbar is not None and self._progbar.printed:
1516 1516 self._progbar.clear()
1517 1517
1518 1518 def progress(self, topic, pos, item="", unit="", total=None):
1519 1519 '''show a progress message
1520 1520
1521 1521 By default a textual progress bar will be displayed if an operation
1522 1522 takes too long. 'topic' is the current operation, 'item' is a
1523 1523 non-numeric marker of the current position (i.e. the currently
1524 1524 in-process file), 'pos' is the current numeric position (i.e.
1525 1525 revision, bytes, etc.), unit is a corresponding unit label,
1526 1526 and total is the highest expected pos.
1527 1527
1528 1528 Multiple nested topics may be active at a time.
1529 1529
1530 1530 All topics should be marked closed by setting pos to None at
1531 1531 termination.
1532 1532 '''
1533 1533 if self._progbar is not None:
1534 1534 self._progbar.progress(topic, pos, item=item, unit=unit,
1535 1535 total=total)
1536 1536 if pos is None or not self.configbool('progress', 'debug'):
1537 1537 return
1538 1538
1539 1539 if unit:
1540 1540 unit = ' ' + unit
1541 1541 if item:
1542 1542 item = ' ' + item
1543 1543
1544 1544 if total:
1545 1545 pct = 100.0 * pos / total
1546 1546 self.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1547 1547 % (topic, item, pos, total, unit, pct))
1548 1548 else:
1549 1549 self.debug('%s:%s %d%s\n' % (topic, item, pos, unit))
1550 1550
1551 1551 def log(self, service, *msg, **opts):
1552 1552 '''hook for logging facility extensions
1553 1553
1554 1554 service should be a readily-identifiable subsystem, which will
1555 1555 allow filtering.
1556 1556
1557 1557 *msg should be a newline-terminated format string to log, and
1558 1558 then any values to %-format into that format string.
1559 1559
1560 1560 **opts currently has no defined meanings.
1561 1561 '''
1562 1562
1563 1563 def label(self, msg, label):
1564 1564 '''style msg based on supplied label
1565 1565
1566 1566 If some color mode is enabled, this will add the necessary control
1567 1567 characters to apply such color. In addition, 'debug' color mode adds
1568 1568 markup showing which label affects a piece of text.
1569 1569
1570 1570 ui.write(s, 'label') is equivalent to
1571 1571 ui.write(ui.label(s, 'label')).
1572 1572 '''
1573 1573 if self._colormode is not None:
1574 1574 return color.colorlabel(self, msg, label)
1575 1575 return msg
1576 1576
1577 1577 def develwarn(self, msg, stacklevel=1, config=None):
1578 1578 """issue a developer warning message
1579 1579
1580 1580 Use 'stacklevel' to report the offender some layers further up in the
1581 1581 stack.
1582 1582 """
1583 1583 if not self.configbool('devel', 'all-warnings'):
1584 1584 if config is not None and not self.configbool('devel', config):
1585 1585 return
1586 1586 msg = 'devel-warn: ' + msg
1587 1587 stacklevel += 1 # get in develwarn
1588 1588 if self.tracebackflag:
1589 1589 util.debugstacktrace(msg, stacklevel, self.ferr, self.fout)
1590 1590 self.log('develwarn', '%s at:\n%s' %
1591 1591 (msg, ''.join(util.getstackframes(stacklevel))))
1592 1592 else:
1593 1593 curframe = inspect.currentframe()
1594 1594 calframe = inspect.getouterframes(curframe, 2)
1595 1595 self.write_err('%s at: %s:%s (%s)\n'
1596 1596 % ((msg,) + calframe[stacklevel][1:4]))
1597 1597 self.log('develwarn', '%s at: %s:%s (%s)\n',
1598 1598 msg, *calframe[stacklevel][1:4])
1599 1599 curframe = calframe = None # avoid cycles
1600 1600
1601 1601 def deprecwarn(self, msg, version):
1602 1602 """issue a deprecation warning
1603 1603
1604 1604 - msg: message explaining what is deprecated and how to upgrade,
1605 1605 - version: last version where the API will be supported,
1606 1606 """
1607 1607 if not (self.configbool('devel', 'all-warnings')
1608 1608 or self.configbool('devel', 'deprec-warn')):
1609 1609 return
1610 1610 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
1611 1611 " update your code.)") % version
1612 1612 self.develwarn(msg, stacklevel=2, config='deprec-warn')
1613 1613
1614 1614 def exportableenviron(self):
1615 1615 """The environment variables that are safe to export, e.g. through
1616 1616 hgweb.
1617 1617 """
1618 1618 return self._exportableenviron
1619 1619
1620 1620 @contextlib.contextmanager
1621 1621 def configoverride(self, overrides, source=""):
1622 1622 """Context manager for temporary config overrides
1623 1623 `overrides` must be a dict of the following structure:
1624 1624 {(section, name) : value}"""
1625 1625 backups = {}
1626 1626 try:
1627 1627 for (section, name), value in overrides.items():
1628 1628 backups[(section, name)] = self.backupconfig(section, name)
1629 1629 self.setconfig(section, name, value, source)
1630 1630 yield
1631 1631 finally:
1632 1632 for __, backup in backups.items():
1633 1633 self.restoreconfig(backup)
1634 1634 # just restoring ui.quiet config to the previous value is not enough
1635 1635 # as it does not update ui.quiet class member
1636 1636 if ('ui', 'quiet') in overrides:
1637 1637 self.fixconfig(section='ui')
1638 1638
1639 1639 class paths(dict):
1640 1640 """Represents a collection of paths and their configs.
1641 1641
1642 1642 Data is initially derived from ui instances and the config files they have
1643 1643 loaded.
1644 1644 """
1645 1645 def __init__(self, ui):
1646 1646 dict.__init__(self)
1647 1647
1648 1648 for name, loc in ui.configitems('paths', ignoresub=True):
1649 1649 # No location is the same as not existing.
1650 1650 if not loc:
1651 1651 continue
1652 1652 loc, sub = ui.configsuboptions('paths', name)
1653 1653 self[name] = path(ui, name, rawloc=loc, suboptions=sub)
1654 1654
1655 1655 def getpath(self, name, default=None):
1656 1656 """Return a ``path`` from a string, falling back to default.
1657 1657
1658 1658 ``name`` can be a named path or locations. Locations are filesystem
1659 1659 paths or URIs.
1660 1660
1661 1661 Returns None if ``name`` is not a registered path, a URI, or a local
1662 1662 path to a repo.
1663 1663 """
1664 1664 # Only fall back to default if no path was requested.
1665 1665 if name is None:
1666 1666 if not default:
1667 1667 default = ()
1668 1668 elif not isinstance(default, (tuple, list)):
1669 1669 default = (default,)
1670 1670 for k in default:
1671 1671 try:
1672 1672 return self[k]
1673 1673 except KeyError:
1674 1674 continue
1675 1675 return None
1676 1676
1677 1677 # Most likely empty string.
1678 1678 # This may need to raise in the future.
1679 1679 if not name:
1680 1680 return None
1681 1681
1682 1682 try:
1683 1683 return self[name]
1684 1684 except KeyError:
1685 1685 # Try to resolve as a local path or URI.
1686 1686 try:
1687 1687 # We don't pass sub-options in, so no need to pass ui instance.
1688 1688 return path(None, None, rawloc=name)
1689 1689 except ValueError:
1690 1690 raise error.RepoError(_('repository %s does not exist') %
1691 1691 name)
1692 1692
1693 1693 _pathsuboptions = {}
1694 1694
1695 1695 def pathsuboption(option, attr):
1696 1696 """Decorator used to declare a path sub-option.
1697 1697
1698 1698 Arguments are the sub-option name and the attribute it should set on
1699 1699 ``path`` instances.
1700 1700
1701 1701 The decorated function will receive as arguments a ``ui`` instance,
1702 1702 ``path`` instance, and the string value of this option from the config.
1703 1703 The function should return the value that will be set on the ``path``
1704 1704 instance.
1705 1705
1706 1706 This decorator can be used to perform additional verification of
1707 1707 sub-options and to change the type of sub-options.
1708 1708 """
1709 1709 def register(func):
1710 1710 _pathsuboptions[option] = (attr, func)
1711 1711 return func
1712 1712 return register
1713 1713
1714 1714 @pathsuboption('pushurl', 'pushloc')
1715 1715 def pushurlpathoption(ui, path, value):
1716 1716 u = util.url(value)
1717 1717 # Actually require a URL.
1718 1718 if not u.scheme:
1719 1719 ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
1720 1720 return None
1721 1721
1722 1722 # Don't support the #foo syntax in the push URL to declare branch to
1723 1723 # push.
1724 1724 if u.fragment:
1725 1725 ui.warn(_('("#fragment" in paths.%s:pushurl not supported; '
1726 1726 'ignoring)\n') % path.name)
1727 1727 u.fragment = None
1728 1728
1729 1729 return str(u)
1730 1730
1731 1731 @pathsuboption('pushrev', 'pushrev')
1732 1732 def pushrevpathoption(ui, path, value):
1733 1733 return value
1734 1734
1735 1735 class path(object):
1736 1736 """Represents an individual path and its configuration."""
1737 1737
1738 1738 def __init__(self, ui, name, rawloc=None, suboptions=None):
1739 1739 """Construct a path from its config options.
1740 1740
1741 1741 ``ui`` is the ``ui`` instance the path is coming from.
1742 1742 ``name`` is the symbolic name of the path.
1743 1743 ``rawloc`` is the raw location, as defined in the config.
1744 1744 ``pushloc`` is the raw locations pushes should be made to.
1745 1745
1746 1746 If ``name`` is not defined, we require that the location be a) a local
1747 1747 filesystem path with a .hg directory or b) a URL. If not,
1748 1748 ``ValueError`` is raised.
1749 1749 """
1750 1750 if not rawloc:
1751 1751 raise ValueError('rawloc must be defined')
1752 1752
1753 1753 # Locations may define branches via syntax <base>#<branch>.
1754 1754 u = util.url(rawloc)
1755 1755 branch = None
1756 1756 if u.fragment:
1757 1757 branch = u.fragment
1758 1758 u.fragment = None
1759 1759
1760 1760 self.url = u
1761 1761 self.branch = branch
1762 1762
1763 1763 self.name = name
1764 1764 self.rawloc = rawloc
1765 1765 self.loc = '%s' % u
1766 1766
1767 1767 # When given a raw location but not a symbolic name, validate the
1768 1768 # location is valid.
1769 1769 if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
1770 1770 raise ValueError('location is not a URL or path to a local '
1771 1771 'repo: %s' % rawloc)
1772 1772
1773 1773 suboptions = suboptions or {}
1774 1774
1775 1775 # Now process the sub-options. If a sub-option is registered, its
1776 1776 # attribute will always be present. The value will be None if there
1777 1777 # was no valid sub-option.
1778 1778 for suboption, (attr, func) in _pathsuboptions.iteritems():
1779 1779 if suboption not in suboptions:
1780 1780 setattr(self, attr, None)
1781 1781 continue
1782 1782
1783 1783 value = func(ui, self, suboptions[suboption])
1784 1784 setattr(self, attr, value)
1785 1785
1786 1786 def _isvalidlocalpath(self, path):
1787 1787 """Returns True if the given path is a potentially valid repository.
1788 1788 This is its own function so that extensions can change the definition of
1789 1789 'valid' in this case (like when pulling from a git repo into a hg
1790 1790 one)."""
1791 1791 return os.path.isdir(os.path.join(path, '.hg'))
1792 1792
1793 1793 @property
1794 1794 def suboptions(self):
1795 1795 """Return sub-options and their values for this path.
1796 1796
1797 1797 This is intended to be used for presentation purposes.
1798 1798 """
1799 1799 d = {}
1800 1800 for subopt, (attr, _func) in _pathsuboptions.iteritems():
1801 1801 value = getattr(self, attr)
1802 1802 if value is not None:
1803 1803 d[subopt] = value
1804 1804 return d
1805 1805
1806 1806 # we instantiate one globally shared progress bar to avoid
1807 1807 # competing progress bars when multiple UI objects get created
1808 1808 _progresssingleton = None
1809 1809
1810 1810 def getprogbar(ui):
1811 1811 global _progresssingleton
1812 1812 if _progresssingleton is None:
1813 1813 # passing 'ui' object to the singleton is fishy,
1814 1814 # this is how the extension used to work but feel free to rework it.
1815 1815 _progresssingleton = progress.progbar(ui)
1816 1816 return _progresssingleton
1817 1817
1818 1818 def haveprogbar():
1819 1819 return _progresssingleton is not None
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now