##// END OF EJS Templates
rust: module policy with importrust...
Georges Racinet -
r42651:810f66b4 default
parent child Browse files
Show More
@@ -1,3469 +1,3481 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 compression,
86 86 dateutil,
87 87 procutil,
88 88 stringutil,
89 89 )
90 90
91 91 from .revlogutils import (
92 92 deltas as deltautil
93 93 )
94 94
95 95 release = lockmod.release
96 96
97 97 command = registrar.command()
98 98
99 99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 100 def debugancestor(ui, repo, *args):
101 101 """find the ancestor revision of two revisions in a given index"""
102 102 if len(args) == 3:
103 103 index, rev1, rev2 = args
104 104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 105 lookup = r.lookup
106 106 elif len(args) == 2:
107 107 if not repo:
108 108 raise error.Abort(_('there is no Mercurial repository here '
109 109 '(.hg not found)'))
110 110 rev1, rev2 = args
111 111 r = repo.changelog
112 112 lookup = repo.lookup
113 113 else:
114 114 raise error.Abort(_('either two or three arguments required'))
115 115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117 117
118 118 @command('debugapplystreamclonebundle', [], 'FILE')
119 119 def debugapplystreamclonebundle(ui, repo, fname):
120 120 """apply a stream clone bundle file"""
121 121 f = hg.openpath(ui, fname)
122 122 gen = exchange.readbundle(ui, f, fname)
123 123 gen.apply(repo)
124 124
125 125 @command('debugbuilddag',
126 126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 128 ('n', 'new-file', None, _('add new file at each rev'))],
129 129 _('[OPTION]... [TEXT]'))
130 130 def debugbuilddag(ui, repo, text=None,
131 131 mergeable_file=False,
132 132 overwritten_file=False,
133 133 new_file=False):
134 134 """builds a repo with a given DAG from scratch in the current empty repo
135 135
136 136 The description of the DAG is read from stdin if not given on the
137 137 command line.
138 138
139 139 Elements:
140 140
141 141 - "+n" is a linear run of n nodes based on the current default parent
142 142 - "." is a single node based on the current default parent
143 143 - "$" resets the default parent to null (implied at the start);
144 144 otherwise the default parent is always the last node created
145 145 - "<p" sets the default parent to the backref p
146 146 - "*p" is a fork at parent p, which is a backref
147 147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 148 - "/p2" is a merge of the preceding node and p2
149 149 - ":tag" defines a local tag for the preceding node
150 150 - "@branch" sets the named branch for subsequent nodes
151 151 - "#...\\n" is a comment up to the end of the line
152 152
153 153 Whitespace between the above elements is ignored.
154 154
155 155 A backref is either
156 156
157 157 - a number n, which references the node curr-n, where curr is the current
158 158 node, or
159 159 - the name of a local tag you placed earlier using ":tag", or
160 160 - empty to denote the default parent.
161 161
162 162 All string valued-elements are either strictly alphanumeric, or must
163 163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 164 """
165 165
166 166 if text is None:
167 167 ui.status(_("reading DAG from stdin\n"))
168 168 text = ui.fin.read()
169 169
170 170 cl = repo.changelog
171 171 if len(cl) > 0:
172 172 raise error.Abort(_('repository is not empty'))
173 173
174 174 # determine number of revs in DAG
175 175 total = 0
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 total += 1
179 179
180 180 if mergeable_file:
181 181 linesperrev = 2
182 182 # make a file with k lines per rev
183 183 initialmergedlines = ['%d' % i
184 184 for i in pycompat.xrange(0, total * linesperrev)]
185 185 initialmergedlines.append("")
186 186
187 187 tags = []
188 188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 189 total=total)
190 190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 191 at = -1
192 192 atbranch = 'default'
193 193 nodeids = []
194 194 id = 0
195 195 progress.update(id)
196 196 for type, data in dagparser.parsedag(text):
197 197 if type == 'n':
198 198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 199 id, ps = data
200 200
201 201 files = []
202 202 filecontent = {}
203 203
204 204 p2 = None
205 205 if mergeable_file:
206 206 fn = "mf"
207 207 p1 = repo[ps[0]]
208 208 if len(ps) > 1:
209 209 p2 = repo[ps[1]]
210 210 pa = p1.ancestor(p2)
211 211 base, local, other = [x[fn].data() for x in (pa, p1,
212 212 p2)]
213 213 m3 = simplemerge.Merge3Text(base, local, other)
214 214 ml = [l.strip() for l in m3.merge_lines()]
215 215 ml.append("")
216 216 elif at > 0:
217 217 ml = p1[fn].data().split("\n")
218 218 else:
219 219 ml = initialmergedlines
220 220 ml[id * linesperrev] += " r%i" % id
221 221 mergedtext = "\n".join(ml)
222 222 files.append(fn)
223 223 filecontent[fn] = mergedtext
224 224
225 225 if overwritten_file:
226 226 fn = "of"
227 227 files.append(fn)
228 228 filecontent[fn] = "r%i\n" % id
229 229
230 230 if new_file:
231 231 fn = "nf%i" % id
232 232 files.append(fn)
233 233 filecontent[fn] = "r%i\n" % id
234 234 if len(ps) > 1:
235 235 if not p2:
236 236 p2 = repo[ps[1]]
237 237 for fn in p2:
238 238 if fn.startswith("nf"):
239 239 files.append(fn)
240 240 filecontent[fn] = p2[fn].data()
241 241
242 242 def fctxfn(repo, cx, path):
243 243 if path in filecontent:
244 244 return context.memfilectx(repo, cx, path,
245 245 filecontent[path])
246 246 return None
247 247
248 248 if len(ps) == 0 or ps[0] < 0:
249 249 pars = [None, None]
250 250 elif len(ps) == 1:
251 251 pars = [nodeids[ps[0]], None]
252 252 else:
253 253 pars = [nodeids[p] for p in ps]
254 254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 255 date=(id, 0),
256 256 user="debugbuilddag",
257 257 extra={'branch': atbranch})
258 258 nodeid = repo.commitctx(cx)
259 259 nodeids.append(nodeid)
260 260 at = id
261 261 elif type == 'l':
262 262 id, name = data
263 263 ui.note(('tag %s\n' % name))
264 264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 265 elif type == 'a':
266 266 ui.note(('branch %s\n' % data))
267 267 atbranch = data
268 268 progress.update(id)
269 269
270 270 if tags:
271 271 repo.vfs.write("localtags", "".join(tags))
272 272
273 273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 274 indent_string = ' ' * indent
275 275 if all:
276 276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 277 % indent_string)
278 278
279 279 def showchunks(named):
280 280 ui.write("\n%s%s\n" % (indent_string, named))
281 281 for deltadata in gen.deltaiter():
282 282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 283 ui.write("%s%s %s %s %s %s %d\n" %
284 284 (indent_string, hex(node), hex(p1), hex(p2),
285 285 hex(cs), hex(deltabase), len(delta)))
286 286
287 287 chunkdata = gen.changelogheader()
288 288 showchunks("changelog")
289 289 chunkdata = gen.manifestheader()
290 290 showchunks("manifest")
291 291 for chunkdata in iter(gen.filelogheader, {}):
292 292 fname = chunkdata['filename']
293 293 showchunks(fname)
294 294 else:
295 295 if isinstance(gen, bundle2.unbundle20):
296 296 raise error.Abort(_('use debugbundle2 for this file'))
297 297 chunkdata = gen.changelogheader()
298 298 for deltadata in gen.deltaiter():
299 299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 300 ui.write("%s%s\n" % (indent_string, hex(node)))
301 301
302 302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 303 """display version and markers contained in 'data'"""
304 304 opts = pycompat.byteskwargs(opts)
305 305 data = part.read()
306 306 indent_string = ' ' * indent
307 307 try:
308 308 version, markers = obsolete._readmarkers(data)
309 309 except error.UnknownVersion as exc:
310 310 msg = "%sunsupported version: %s (%d bytes)\n"
311 311 msg %= indent_string, exc.version, len(data)
312 312 ui.write(msg)
313 313 else:
314 314 msg = "%sversion: %d (%d bytes)\n"
315 315 msg %= indent_string, version, len(data)
316 316 ui.write(msg)
317 317 fm = ui.formatter('debugobsolete', opts)
318 318 for rawmarker in sorted(markers):
319 319 m = obsutil.marker(None, rawmarker)
320 320 fm.startitem()
321 321 fm.plain(indent_string)
322 322 cmdutil.showmarker(fm, m)
323 323 fm.end()
324 324
325 325 def _debugphaseheads(ui, data, indent=0):
326 326 """display version and markers contained in 'data'"""
327 327 indent_string = ' ' * indent
328 328 headsbyphase = phases.binarydecode(data)
329 329 for phase in phases.allphases:
330 330 for head in headsbyphase[phase]:
331 331 ui.write(indent_string)
332 332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333 333
334 334 def _quasirepr(thing):
335 335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 336 return '{%s}' % (
337 337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 338 return pycompat.bytestr(repr(thing))
339 339
340 340 def _debugbundle2(ui, gen, all=None, **opts):
341 341 """lists the contents of a bundle2"""
342 342 if not isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_('not a bundle2 file'))
344 344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 345 parttypes = opts.get(r'part_type', [])
346 346 for part in gen.iterparts():
347 347 if parttypes and part.type not in parttypes:
348 348 continue
349 349 msg = '%s -- %s (mandatory: %r)\n'
350 350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 351 if part.type == 'changegroup':
352 352 version = part.params.get('version', '01')
353 353 cg = changegroup.getunbundler(version, part, 'UN')
354 354 if not ui.quiet:
355 355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 356 if part.type == 'obsmarkers':
357 357 if not ui.quiet:
358 358 _debugobsmarkers(ui, part, indent=4, **opts)
359 359 if part.type == 'phase-heads':
360 360 if not ui.quiet:
361 361 _debugphaseheads(ui, part, indent=4)
362 362
363 363 @command('debugbundle',
364 364 [('a', 'all', None, _('show all details')),
365 365 ('', 'part-type', [], _('show only the named part type')),
366 366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 367 _('FILE'),
368 368 norepo=True)
369 369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 370 """lists the contents of a bundle"""
371 371 with hg.openpath(ui, bundlepath) as f:
372 372 if spec:
373 373 spec = exchange.getbundlespec(ui, f)
374 374 ui.write('%s\n' % spec)
375 375 return
376 376
377 377 gen = exchange.readbundle(ui, f, bundlepath)
378 378 if isinstance(gen, bundle2.unbundle20):
379 379 return _debugbundle2(ui, gen, all=all, **opts)
380 380 _debugchangegroup(ui, gen, all=all, **opts)
381 381
382 382 @command('debugcapabilities',
383 383 [], _('PATH'),
384 384 norepo=True)
385 385 def debugcapabilities(ui, path, **opts):
386 386 """lists the capabilities of a remote peer"""
387 387 opts = pycompat.byteskwargs(opts)
388 388 peer = hg.peer(ui, opts, path)
389 389 caps = peer.capabilities()
390 390 ui.write(('Main capabilities:\n'))
391 391 for c in sorted(caps):
392 392 ui.write((' %s\n') % c)
393 393 b2caps = bundle2.bundle2caps(peer)
394 394 if b2caps:
395 395 ui.write(('Bundle2 capabilities:\n'))
396 396 for key, values in sorted(b2caps.iteritems()):
397 397 ui.write((' %s\n') % key)
398 398 for v in values:
399 399 ui.write((' %s\n') % v)
400 400
401 401 @command('debugcheckstate', [], '')
402 402 def debugcheckstate(ui, repo):
403 403 """validate the correctness of the current dirstate"""
404 404 parent1, parent2 = repo.dirstate.parents()
405 405 m1 = repo[parent1].manifest()
406 406 m2 = repo[parent2].manifest()
407 407 errors = 0
408 408 for f in repo.dirstate:
409 409 state = repo.dirstate[f]
410 410 if state in "nr" and f not in m1:
411 411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "a" and f in m1:
414 414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 415 errors += 1
416 416 if state in "m" and f not in m1 and f not in m2:
417 417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 418 (f, state))
419 419 errors += 1
420 420 for f in m1:
421 421 state = repo.dirstate[f]
422 422 if state not in "nrm":
423 423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 424 errors += 1
425 425 if errors:
426 426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 427 raise error.Abort(error)
428 428
429 429 @command('debugcolor',
430 430 [('', 'style', None, _('show all configured styles'))],
431 431 'hg debugcolor')
432 432 def debugcolor(ui, repo, **opts):
433 433 """show available color, effects or style"""
434 434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 435 if opts.get(r'style'):
436 436 return _debugdisplaystyle(ui)
437 437 else:
438 438 return _debugdisplaycolor(ui)
439 439
440 440 def _debugdisplaycolor(ui):
441 441 ui = ui.copy()
442 442 ui._styles.clear()
443 443 for effect in color._activeeffects(ui).keys():
444 444 ui._styles[effect] = effect
445 445 if ui._terminfoparams:
446 446 for k, v in ui.configitems('color'):
447 447 if k.startswith('color.'):
448 448 ui._styles[k] = k[6:]
449 449 elif k.startswith('terminfo.'):
450 450 ui._styles[k] = k[9:]
451 451 ui.write(_('available colors:\n'))
452 452 # sort label with a '_' after the other to group '_background' entry.
453 453 items = sorted(ui._styles.items(),
454 454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 455 for colorname, label in items:
456 456 ui.write(('%s\n') % colorname, label=label)
457 457
458 458 def _debugdisplaystyle(ui):
459 459 ui.write(_('available style:\n'))
460 460 if not ui._styles:
461 461 return
462 462 width = max(len(s) for s in ui._styles)
463 463 for label, effects in sorted(ui._styles.items()):
464 464 ui.write('%s' % label, label=label)
465 465 if effects:
466 466 # 50
467 467 ui.write(': ')
468 468 ui.write(' ' * (max(0, width - len(label))))
469 469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 470 ui.write('\n')
471 471
472 472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 473 def debugcreatestreamclonebundle(ui, repo, fname):
474 474 """create a stream clone bundle file
475 475
476 476 Stream bundles are special bundles that are essentially archives of
477 477 revlog files. They are commonly used for cloning very quickly.
478 478 """
479 479 # TODO we may want to turn this into an abort when this functionality
480 480 # is moved into `hg bundle`.
481 481 if phases.hassecret(repo):
482 482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 483 'revisions)\n'))
484 484
485 485 requirements, gen = streamclone.generatebundlev1(repo)
486 486 changegroup.writechunks(ui, gen, fname)
487 487
488 488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489 489
490 490 @command('debugdag',
491 491 [('t', 'tags', None, _('use tags as labels')),
492 492 ('b', 'branches', None, _('annotate with branch names')),
493 493 ('', 'dots', None, _('use dots for runs')),
494 494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 495 _('[OPTION]... [FILE [REV]...]'),
496 496 optionalrepo=True)
497 497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 498 """format the changelog or an index DAG as a concise textual description
499 499
500 500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 501 revision numbers, they get labeled in the output as rN.
502 502
503 503 Otherwise, the changelog DAG of the current repo is emitted.
504 504 """
505 505 spaces = opts.get(r'spaces')
506 506 dots = opts.get(r'dots')
507 507 if file_:
508 508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 509 file_)
510 510 revs = set((int(r) for r in revs))
511 511 def events():
512 512 for r in rlog:
513 513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 514 if p != -1))
515 515 if r in revs:
516 516 yield 'l', (r, "r%i" % r)
517 517 elif repo:
518 518 cl = repo.changelog
519 519 tags = opts.get(r'tags')
520 520 branches = opts.get(r'branches')
521 521 if tags:
522 522 labels = {}
523 523 for l, n in repo.tags().items():
524 524 labels.setdefault(cl.rev(n), []).append(l)
525 525 def events():
526 526 b = "default"
527 527 for r in cl:
528 528 if branches:
529 529 newb = cl.read(cl.node(r))[5]['branch']
530 530 if newb != b:
531 531 yield 'a', newb
532 532 b = newb
533 533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 534 if p != -1))
535 535 if tags:
536 536 ls = labels.get(r)
537 537 if ls:
538 538 for l in ls:
539 539 yield 'l', (r, l)
540 540 else:
541 541 raise error.Abort(_('need repo for changelog dag'))
542 542
543 543 for line in dagparser.dagtextlines(events(),
544 544 addspaces=spaces,
545 545 wraplabels=True,
546 546 wrapannotations=True,
547 547 wrapnonlinear=dots,
548 548 usedots=dots,
549 549 maxlinewidth=70):
550 550 ui.write(line)
551 551 ui.write("\n")
552 552
553 553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 554 def debugdata(ui, repo, file_, rev=None, **opts):
555 555 """dump the contents of a data file revision"""
556 556 opts = pycompat.byteskwargs(opts)
557 557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 558 if rev is not None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 file_, rev = None, file_
561 561 elif rev is None:
562 562 raise error.CommandError('debugdata', _('invalid arguments'))
563 563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 564 try:
565 565 ui.write(r.revision(r.lookup(rev), raw=True))
566 566 except KeyError:
567 567 raise error.Abort(_('invalid revision identifier %s') % rev)
568 568
569 569 @command('debugdate',
570 570 [('e', 'extended', None, _('try extended date formats'))],
571 571 _('[-e] DATE [RANGE]'),
572 572 norepo=True, optionalrepo=True)
573 573 def debugdate(ui, date, range=None, **opts):
574 574 """parse and display a date"""
575 575 if opts[r"extended"]:
576 576 d = dateutil.parsedate(date, util.extendeddateformats)
577 577 else:
578 578 d = dateutil.parsedate(date)
579 579 ui.write(("internal: %d %d\n") % d)
580 580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 581 if range:
582 582 m = dateutil.matchdate(range)
583 583 ui.write(("match: %s\n") % m(d[0]))
584 584
585 585 @command('debugdeltachain',
586 586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 587 _('-c|-m|FILE'),
588 588 optionalrepo=True)
589 589 def debugdeltachain(ui, repo, file_=None, **opts):
590 590 """dump information about delta chains in a revlog
591 591
592 592 Output can be templatized. Available template keywords are:
593 593
594 594 :``rev``: revision number
595 595 :``chainid``: delta chain identifier (numbered by unique base)
596 596 :``chainlen``: delta chain length to this revision
597 597 :``prevrev``: previous revision in delta chain
598 598 :``deltatype``: role of delta / how it was computed
599 599 :``compsize``: compressed size of revision
600 600 :``uncompsize``: uncompressed size of revision
601 601 :``chainsize``: total size of compressed revisions in chain
602 602 :``chainratio``: total chain size divided by uncompressed revision size
603 603 (new delta chains typically start at ratio 2.00)
604 604 :``lindist``: linear distance from base revision in delta chain to end
605 605 of this revision
606 606 :``extradist``: total size of revisions not part of this delta chain from
607 607 base of delta chain to end of this revision; a measurement
608 608 of how much extra data we need to read/seek across to read
609 609 the delta chain for this revision
610 610 :``extraratio``: extradist divided by chainsize; another representation of
611 611 how much unrelated data is needed to load this delta chain
612 612
613 613 If the repository is configured to use the sparse read, additional keywords
614 614 are available:
615 615
616 616 :``readsize``: total size of data read from the disk for a revision
617 617 (sum of the sizes of all the blocks)
618 618 :``largestblock``: size of the largest block of data read from the disk
619 619 :``readdensity``: density of useful bytes in the data read from the disk
620 620 :``srchunks``: in how many data hunks the whole revision would be read
621 621
622 622 The sparse read can be enabled with experimental.sparse-read = True
623 623 """
624 624 opts = pycompat.byteskwargs(opts)
625 625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 626 index = r.index
627 627 start = r.start
628 628 length = r.length
629 629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 630 withsparseread = getattr(r, '_withsparseread', False)
631 631
632 632 def revinfo(rev):
633 633 e = index[rev]
634 634 compsize = e[1]
635 635 uncompsize = e[2]
636 636 chainsize = 0
637 637
638 638 if generaldelta:
639 639 if e[3] == e[5]:
640 640 deltatype = 'p1'
641 641 elif e[3] == e[6]:
642 642 deltatype = 'p2'
643 643 elif e[3] == rev - 1:
644 644 deltatype = 'prev'
645 645 elif e[3] == rev:
646 646 deltatype = 'base'
647 647 else:
648 648 deltatype = 'other'
649 649 else:
650 650 if e[3] == rev:
651 651 deltatype = 'base'
652 652 else:
653 653 deltatype = 'prev'
654 654
655 655 chain = r._deltachain(rev)[0]
656 656 for iterrev in chain:
657 657 e = index[iterrev]
658 658 chainsize += e[1]
659 659
660 660 return compsize, uncompsize, deltatype, chain, chainsize
661 661
662 662 fm = ui.formatter('debugdeltachain', opts)
663 663
664 664 fm.plain(' rev chain# chainlen prev delta '
665 665 'size rawsize chainsize ratio lindist extradist '
666 666 'extraratio')
667 667 if withsparseread:
668 668 fm.plain(' readsize largestblk rddensity srchunks')
669 669 fm.plain('\n')
670 670
671 671 chainbases = {}
672 672 for rev in r:
673 673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 674 chainbase = chain[0]
675 675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 676 basestart = start(chainbase)
677 677 revstart = start(rev)
678 678 lineardist = revstart + comp - basestart
679 679 extradist = lineardist - chainsize
680 680 try:
681 681 prevrev = chain[-2]
682 682 except IndexError:
683 683 prevrev = -1
684 684
685 685 if uncomp != 0:
686 686 chainratio = float(chainsize) / float(uncomp)
687 687 else:
688 688 chainratio = chainsize
689 689
690 690 if chainsize != 0:
691 691 extraratio = float(extradist) / float(chainsize)
692 692 else:
693 693 extraratio = extradist
694 694
695 695 fm.startitem()
696 696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 697 'uncompsize chainsize chainratio lindist extradist '
698 698 'extraratio',
699 699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 701 uncomp, chainsize, chainratio, lineardist, extradist,
702 702 extraratio,
703 703 rev=rev, chainid=chainid, chainlen=len(chain),
704 704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 705 uncompsize=uncomp, chainsize=chainsize,
706 706 chainratio=chainratio, lindist=lineardist,
707 707 extradist=extradist, extraratio=extraratio)
708 708 if withsparseread:
709 709 readsize = 0
710 710 largestblock = 0
711 711 srchunks = 0
712 712
713 713 for revschunk in deltautil.slicechunk(r, chain):
714 714 srchunks += 1
715 715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 716 blksize = blkend - start(revschunk[0])
717 717
718 718 readsize += blksize
719 719 if largestblock < blksize:
720 720 largestblock = blksize
721 721
722 722 if readsize:
723 723 readdensity = float(chainsize) / float(readsize)
724 724 else:
725 725 readdensity = 1
726 726
727 727 fm.write('readsize largestblock readdensity srchunks',
728 728 ' %10d %10d %9.5f %8d',
729 729 readsize, largestblock, readdensity, srchunks,
730 730 readsize=readsize, largestblock=largestblock,
731 731 readdensity=readdensity, srchunks=srchunks)
732 732
733 733 fm.plain('\n')
734 734
735 735 fm.end()
736 736
737 737 @command('debugdirstate|debugstate',
738 738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 739 ('', 'dates', True, _('display the saved mtime')),
740 740 ('', 'datesort', None, _('sort by saved mtime'))],
741 741 _('[OPTION]...'))
742 742 def debugstate(ui, repo, **opts):
743 743 """show the contents of the current dirstate"""
744 744
745 745 nodates = not opts[r'dates']
746 746 if opts.get(r'nodates') is not None:
747 747 nodates = True
748 748 datesort = opts.get(r'datesort')
749 749
750 750 if datesort:
751 751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 752 else:
753 753 keyfunc = None # sort by filename
754 754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 755 if ent[3] == -1:
756 756 timestr = 'unset '
757 757 elif nodates:
758 758 timestr = 'set '
759 759 else:
760 760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 761 time.localtime(ent[3]))
762 762 timestr = encoding.strtolocal(timestr)
763 763 if ent[1] & 0o20000:
764 764 mode = 'lnk'
765 765 else:
766 766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 768 for f in repo.dirstate.copies():
769 769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770 770
771 771 @command('debugdiscovery',
772 772 [('', 'old', None, _('use old-style discovery')),
773 773 ('', 'nonheads', None,
774 774 _('use old-style discovery with non-heads included')),
775 775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 777 ] + cmdutil.remoteopts,
778 778 _('[--rev REV] [OTHER]'))
779 779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 780 """runs the changeset discovery protocol in isolation"""
781 781 opts = pycompat.byteskwargs(opts)
782 782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 783 remote = hg.peer(repo, opts, remoteurl)
784 784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785 785
786 786 # make sure tests are repeatable
787 787 random.seed(int(opts['seed']))
788 788
789 789
790 790
791 791 if opts.get('old'):
792 792 def doit(pushedrevs, remoteheads, remote=remote):
793 793 if not util.safehasattr(remote, 'branches'):
794 794 # enable in-client legacy support
795 795 remote = localrepo.locallegacypeer(remote.local())
796 796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 797 force=True)
798 798 common = set(common)
799 799 if not opts.get('nonheads'):
800 800 ui.write(("unpruned common: %s\n") %
801 801 " ".join(sorted(short(n) for n in common)))
802 802
803 803 clnode = repo.changelog.node
804 804 common = repo.revs('heads(::%ln)', common)
805 805 common = {clnode(r) for r in common}
806 806 return common, hds
807 807 else:
808 808 def doit(pushedrevs, remoteheads, remote=remote):
809 809 nodes = None
810 810 if pushedrevs:
811 811 revs = scmutil.revrange(repo, pushedrevs)
812 812 nodes = [repo[r].node() for r in revs]
813 813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 814 ancestorsof=nodes)
815 815 return common, hds
816 816
817 817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 818 localrevs = opts['rev']
819 819 with util.timedcm('debug-discovery') as t:
820 820 common, hds = doit(localrevs, remoterevs)
821 821
822 822 # compute all statistics
823 823 common = set(common)
824 824 rheads = set(hds)
825 825 lheads = set(repo.heads())
826 826
827 827 data = {}
828 828 data['elapsed'] = t.elapsed
829 829 data['nb-common'] = len(common)
830 830 data['nb-common-local'] = len(common & lheads)
831 831 data['nb-common-remote'] = len(common & rheads)
832 832 data['nb-common-both'] = len(common & rheads & lheads)
833 833 data['nb-local'] = len(lheads)
834 834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
835 835 data['nb-remote'] = len(rheads)
836 836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
837 837 data['nb-revs'] = len(repo.revs('all()'))
838 838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
839 839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
840 840
841 841 # display discovery summary
842 842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
843 843 ui.write(("heads summary:\n"))
844 844 ui.write((" total common heads: %(nb-common)9d\n") % data)
845 845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
846 846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 847 ui.write((" both: %(nb-common-both)9d\n") % data)
848 848 ui.write((" local heads: %(nb-local)9d\n") % data)
849 849 ui.write((" common: %(nb-common-local)9d\n") % data)
850 850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
851 851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
852 852 ui.write((" common: %(nb-common-remote)9d\n") % data)
853 853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
854 854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
855 855 ui.write((" common: %(nb-revs-common)9d\n") % data)
856 856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
857 857
858 858 if ui.verbose:
859 859 ui.write(("common heads: %s\n") %
860 860 " ".join(sorted(short(n) for n in common)))
861 861
862 862 _chunksize = 4 << 10
863 863
864 864 @command('debugdownload',
865 865 [
866 866 ('o', 'output', '', _('path')),
867 867 ],
868 868 optionalrepo=True)
869 869 def debugdownload(ui, repo, url, output=None, **opts):
870 870 """download a resource using Mercurial logic and config
871 871 """
872 872 fh = urlmod.open(ui, url, output)
873 873
874 874 dest = ui
875 875 if output:
876 876 dest = open(output, "wb", _chunksize)
877 877 try:
878 878 data = fh.read(_chunksize)
879 879 while data:
880 880 dest.write(data)
881 881 data = fh.read(_chunksize)
882 882 finally:
883 883 if output:
884 884 dest.close()
885 885
886 886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
887 887 def debugextensions(ui, repo, **opts):
888 888 '''show information about active extensions'''
889 889 opts = pycompat.byteskwargs(opts)
890 890 exts = extensions.extensions(ui)
891 891 hgver = util.version()
892 892 fm = ui.formatter('debugextensions', opts)
893 893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
894 894 isinternal = extensions.ismoduleinternal(extmod)
895 895 extsource = pycompat.fsencode(extmod.__file__)
896 896 if isinternal:
897 897 exttestedwith = [] # never expose magic string to users
898 898 else:
899 899 exttestedwith = getattr(extmod, 'testedwith', '').split()
900 900 extbuglink = getattr(extmod, 'buglink', None)
901 901
902 902 fm.startitem()
903 903
904 904 if ui.quiet or ui.verbose:
905 905 fm.write('name', '%s\n', extname)
906 906 else:
907 907 fm.write('name', '%s', extname)
908 908 if isinternal or hgver in exttestedwith:
909 909 fm.plain('\n')
910 910 elif not exttestedwith:
911 911 fm.plain(_(' (untested!)\n'))
912 912 else:
913 913 lasttestedversion = exttestedwith[-1]
914 914 fm.plain(' (%s!)\n' % lasttestedversion)
915 915
916 916 fm.condwrite(ui.verbose and extsource, 'source',
917 917 _(' location: %s\n'), extsource or "")
918 918
919 919 if ui.verbose:
920 920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
921 921 fm.data(bundled=isinternal)
922 922
923 923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
924 924 _(' tested with: %s\n'),
925 925 fm.formatlist(exttestedwith, name='ver'))
926 926
927 927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
928 928 _(' bug reporting: %s\n'), extbuglink or "")
929 929
930 930 fm.end()
931 931
932 932 @command('debugfileset',
933 933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
934 934 ('', 'all-files', False,
935 935 _('test files from all revisions and working directory')),
936 936 ('s', 'show-matcher', None,
937 937 _('print internal representation of matcher')),
938 938 ('p', 'show-stage', [],
939 939 _('print parsed tree at the given stage'), _('NAME'))],
940 940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
941 941 def debugfileset(ui, repo, expr, **opts):
942 942 '''parse and apply a fileset specification'''
943 943 from . import fileset
944 944 fileset.symbols # force import of fileset so we have predicates to optimize
945 945 opts = pycompat.byteskwargs(opts)
946 946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
947 947
948 948 stages = [
949 949 ('parsed', pycompat.identity),
950 950 ('analyzed', filesetlang.analyze),
951 951 ('optimized', filesetlang.optimize),
952 952 ]
953 953 stagenames = set(n for n, f in stages)
954 954
955 955 showalways = set()
956 956 if ui.verbose and not opts['show_stage']:
957 957 # show parsed tree by --verbose (deprecated)
958 958 showalways.add('parsed')
959 959 if opts['show_stage'] == ['all']:
960 960 showalways.update(stagenames)
961 961 else:
962 962 for n in opts['show_stage']:
963 963 if n not in stagenames:
964 964 raise error.Abort(_('invalid stage name: %s') % n)
965 965 showalways.update(opts['show_stage'])
966 966
967 967 tree = filesetlang.parse(expr)
968 968 for n, f in stages:
969 969 tree = f(tree)
970 970 if n in showalways:
971 971 if opts['show_stage'] or n != 'parsed':
972 972 ui.write(("* %s:\n") % n)
973 973 ui.write(filesetlang.prettyformat(tree), "\n")
974 974
975 975 files = set()
976 976 if opts['all_files']:
977 977 for r in repo:
978 978 c = repo[r]
979 979 files.update(c.files())
980 980 files.update(c.substate)
981 981 if opts['all_files'] or ctx.rev() is None:
982 982 wctx = repo[None]
983 983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
984 984 subrepos=list(wctx.substate),
985 985 unknown=True, ignored=True))
986 986 files.update(wctx.substate)
987 987 else:
988 988 files.update(ctx.files())
989 989 files.update(ctx.substate)
990 990
991 991 m = ctx.matchfileset(expr)
992 992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
993 993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
994 994 for f in sorted(files):
995 995 if not m(f):
996 996 continue
997 997 ui.write("%s\n" % f)
998 998
999 999 @command('debugformat',
1000 1000 [] + cmdutil.formatteropts)
1001 1001 def debugformat(ui, repo, **opts):
1002 1002 """display format information about the current repository
1003 1003
1004 1004 Use --verbose to get extra information about current config value and
1005 1005 Mercurial default."""
1006 1006 opts = pycompat.byteskwargs(opts)
1007 1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1008 1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1009 1009
1010 1010 def makeformatname(name):
1011 1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1012 1012
1013 1013 fm = ui.formatter('debugformat', opts)
1014 1014 if fm.isplain():
1015 1015 def formatvalue(value):
1016 1016 if util.safehasattr(value, 'startswith'):
1017 1017 return value
1018 1018 if value:
1019 1019 return 'yes'
1020 1020 else:
1021 1021 return 'no'
1022 1022 else:
1023 1023 formatvalue = pycompat.identity
1024 1024
1025 1025 fm.plain('format-variant')
1026 1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1027 1027 fm.plain(' repo')
1028 1028 if ui.verbose:
1029 1029 fm.plain(' config default')
1030 1030 fm.plain('\n')
1031 1031 for fv in upgrade.allformatvariant:
1032 1032 fm.startitem()
1033 1033 repovalue = fv.fromrepo(repo)
1034 1034 configvalue = fv.fromconfig(repo)
1035 1035
1036 1036 if repovalue != configvalue:
1037 1037 namelabel = 'formatvariant.name.mismatchconfig'
1038 1038 repolabel = 'formatvariant.repo.mismatchconfig'
1039 1039 elif repovalue != fv.default:
1040 1040 namelabel = 'formatvariant.name.mismatchdefault'
1041 1041 repolabel = 'formatvariant.repo.mismatchdefault'
1042 1042 else:
1043 1043 namelabel = 'formatvariant.name.uptodate'
1044 1044 repolabel = 'formatvariant.repo.uptodate'
1045 1045
1046 1046 fm.write('name', makeformatname(fv.name), fv.name,
1047 1047 label=namelabel)
1048 1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1049 1049 label=repolabel)
1050 1050 if fv.default != configvalue:
1051 1051 configlabel = 'formatvariant.config.special'
1052 1052 else:
1053 1053 configlabel = 'formatvariant.config.default'
1054 1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1055 1055 label=configlabel)
1056 1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1057 1057 label='formatvariant.default')
1058 1058 fm.plain('\n')
1059 1059 fm.end()
1060 1060
1061 1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1062 1062 def debugfsinfo(ui, path="."):
1063 1063 """show information detected about current filesystem"""
1064 1064 ui.write(('path: %s\n') % path)
1065 1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1066 1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1067 1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1068 1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1069 1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1070 1070 casesensitive = '(unknown)'
1071 1071 try:
1072 1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1073 1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1074 1074 except OSError:
1075 1075 pass
1076 1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1077 1077
1078 1078 @command('debuggetbundle',
1079 1079 [('H', 'head', [], _('id of head node'), _('ID')),
1080 1080 ('C', 'common', [], _('id of common node'), _('ID')),
1081 1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1082 1082 _('REPO FILE [-H|-C ID]...'),
1083 1083 norepo=True)
1084 1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1085 1085 """retrieves a bundle from a repo
1086 1086
1087 1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1088 1088 given file.
1089 1089 """
1090 1090 opts = pycompat.byteskwargs(opts)
1091 1091 repo = hg.peer(ui, opts, repopath)
1092 1092 if not repo.capable('getbundle'):
1093 1093 raise error.Abort("getbundle() not supported by target repository")
1094 1094 args = {}
1095 1095 if common:
1096 1096 args[r'common'] = [bin(s) for s in common]
1097 1097 if head:
1098 1098 args[r'heads'] = [bin(s) for s in head]
1099 1099 # TODO: get desired bundlecaps from command line.
1100 1100 args[r'bundlecaps'] = None
1101 1101 bundle = repo.getbundle('debug', **args)
1102 1102
1103 1103 bundletype = opts.get('type', 'bzip2').lower()
1104 1104 btypes = {'none': 'HG10UN',
1105 1105 'bzip2': 'HG10BZ',
1106 1106 'gzip': 'HG10GZ',
1107 1107 'bundle2': 'HG20'}
1108 1108 bundletype = btypes.get(bundletype)
1109 1109 if bundletype not in bundle2.bundletypes:
1110 1110 raise error.Abort(_('unknown bundle type specified with --type'))
1111 1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1112 1112
1113 1113 @command('debugignore', [], '[FILE]')
1114 1114 def debugignore(ui, repo, *files, **opts):
1115 1115 """display the combined ignore pattern and information about ignored files
1116 1116
1117 1117 With no argument display the combined ignore pattern.
1118 1118
1119 1119 Given space separated file names, shows if the given file is ignored and
1120 1120 if so, show the ignore rule (file and line number) that matched it.
1121 1121 """
1122 1122 ignore = repo.dirstate._ignore
1123 1123 if not files:
1124 1124 # Show all the patterns
1125 1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1126 1126 else:
1127 1127 m = scmutil.match(repo[None], pats=files)
1128 1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1129 1129 for f in m.files():
1130 1130 nf = util.normpath(f)
1131 1131 ignored = None
1132 1132 ignoredata = None
1133 1133 if nf != '.':
1134 1134 if ignore(nf):
1135 1135 ignored = nf
1136 1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1137 1137 else:
1138 1138 for p in util.finddirs(nf):
1139 1139 if ignore(p):
1140 1140 ignored = p
1141 1141 ignoredata = repo.dirstate._ignorefileandline(p)
1142 1142 break
1143 1143 if ignored:
1144 1144 if ignored == nf:
1145 1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1146 1146 else:
1147 1147 ui.write(_("%s is ignored because of "
1148 1148 "containing directory %s\n")
1149 1149 % (uipathfn(f), ignored))
1150 1150 ignorefile, lineno, line = ignoredata
1151 1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1152 1152 % (ignorefile, lineno, line))
1153 1153 else:
1154 1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1155 1155
1156 1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1157 1157 _('-c|-m|FILE'))
1158 1158 def debugindex(ui, repo, file_=None, **opts):
1159 1159 """dump index data for a storage primitive"""
1160 1160 opts = pycompat.byteskwargs(opts)
1161 1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1162 1162
1163 1163 if ui.debugflag:
1164 1164 shortfn = hex
1165 1165 else:
1166 1166 shortfn = short
1167 1167
1168 1168 idlen = 12
1169 1169 for i in store:
1170 1170 idlen = len(shortfn(store.node(i)))
1171 1171 break
1172 1172
1173 1173 fm = ui.formatter('debugindex', opts)
1174 1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1175 1175 b'nodeid'.ljust(idlen),
1176 1176 b'p1'.ljust(idlen)))
1177 1177
1178 1178 for rev in store:
1179 1179 node = store.node(rev)
1180 1180 parents = store.parents(node)
1181 1181
1182 1182 fm.startitem()
1183 1183 fm.write(b'rev', b'%6d ', rev)
1184 1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1185 1185 fm.write(b'node', '%s ', shortfn(node))
1186 1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1187 1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1188 1188 fm.plain(b'\n')
1189 1189
1190 1190 fm.end()
1191 1191
1192 1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1193 1193 _('-c|-m|FILE'), optionalrepo=True)
1194 1194 def debugindexdot(ui, repo, file_=None, **opts):
1195 1195 """dump an index DAG as a graphviz dot file"""
1196 1196 opts = pycompat.byteskwargs(opts)
1197 1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1198 1198 ui.write(("digraph G {\n"))
1199 1199 for i in r:
1200 1200 node = r.node(i)
1201 1201 pp = r.parents(node)
1202 1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1203 1203 if pp[1] != nullid:
1204 1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1205 1205 ui.write("}\n")
1206 1206
1207 1207 @command('debugindexstats', [])
1208 1208 def debugindexstats(ui, repo):
1209 1209 """show stats related to the changelog index"""
1210 1210 repo.changelog.shortest(nullid, 1)
1211 1211 index = repo.changelog.index
1212 1212 if not util.safehasattr(index, 'stats'):
1213 1213 raise error.Abort(_('debugindexstats only works with native code'))
1214 1214 for k, v in sorted(index.stats().items()):
1215 1215 ui.write('%s: %d\n' % (k, v))
1216 1216
1217 1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1218 1218 def debuginstall(ui, **opts):
1219 1219 '''test Mercurial installation
1220 1220
1221 1221 Returns 0 on success.
1222 1222 '''
1223 1223 opts = pycompat.byteskwargs(opts)
1224 1224
1225 1225 problems = 0
1226 1226
1227 1227 fm = ui.formatter('debuginstall', opts)
1228 1228 fm.startitem()
1229 1229
1230 1230 # encoding
1231 1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1232 1232 err = None
1233 1233 try:
1234 1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1235 1235 except LookupError as inst:
1236 1236 err = stringutil.forcebytestr(inst)
1237 1237 problems += 1
1238 1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1239 1239 " (check that your locale is properly set)\n"), err)
1240 1240
1241 1241 # Python
1242 1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1243 1243 pycompat.sysexecutable)
1244 1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1245 1245 ("%d.%d.%d" % sys.version_info[:3]))
1246 1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1247 1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1248 1248
1249 1249 security = set(sslutil.supportedprotocols)
1250 1250 if sslutil.hassni:
1251 1251 security.add('sni')
1252 1252
1253 1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1254 1254 fm.formatlist(sorted(security), name='protocol',
1255 1255 fmt='%s', sep=','))
1256 1256
1257 1257 # These are warnings, not errors. So don't increment problem count. This
1258 1258 # may change in the future.
1259 1259 if 'tls1.2' not in security:
1260 1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1261 1261 'network connections lack modern security\n'))
1262 1262 if 'sni' not in security:
1263 1263 fm.plain(_(' SNI not supported by Python install; may have '
1264 1264 'connectivity issues with some servers\n'))
1265 1265
1266 1266 # TODO print CA cert info
1267 1267
1268 1268 # hg version
1269 1269 hgver = util.version()
1270 1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1271 1271 hgver.split('+')[0])
1272 1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1273 1273 '+'.join(hgver.split('+')[1:]))
1274 1274
1275 1275 # compiled modules
1276 1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1277 1277 policy.policy)
1278 1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1279 1279 os.path.dirname(pycompat.fsencode(__file__)))
1280 1280
1281 if policy.policy in ('c', 'allow'):
1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1282 rustext = rustandc # for now, that's the only case
1283 cext = policy.policy in ('c', 'allow') or rustandc
1284 nopure = cext or rustext
1285 if nopure:
1282 1286 err = None
1283 1287 try:
1284 from .cext import (
1285 base85,
1286 bdiff,
1287 mpatch,
1288 osutil,
1289 )
1290 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1288 if cext:
1289 from .cext import (
1290 base85,
1291 bdiff,
1292 mpatch,
1293 osutil,
1294 )
1295 # quiet pyflakes
1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1297 if rustext:
1298 from .rustext import (
1299 ancestor,
1300 dirstate,
1301 )
1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1291 1303 except Exception as inst:
1292 1304 err = stringutil.forcebytestr(inst)
1293 1305 problems += 1
1294 1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1295 1307
1296 1308 compengines = util.compengines._engines.values()
1297 1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1298 1310 fm.formatlist(sorted(e.name() for e in compengines),
1299 1311 name='compengine', fmt='%s', sep=', '))
1300 1312 fm.write('compenginesavail', _('checking available compression engines '
1301 1313 '(%s)\n'),
1302 1314 fm.formatlist(sorted(e.name() for e in compengines
1303 1315 if e.available()),
1304 1316 name='compengine', fmt='%s', sep=', '))
1305 1317 wirecompengines = compression.compengines.supportedwireengines(
1306 1318 compression.SERVERROLE)
1307 1319 fm.write('compenginesserver', _('checking available compression engines '
1308 1320 'for wire protocol (%s)\n'),
1309 1321 fm.formatlist([e.name() for e in wirecompengines
1310 1322 if e.wireprotosupport()],
1311 1323 name='compengine', fmt='%s', sep=', '))
1312 1324 re2 = 'missing'
1313 1325 if util._re2:
1314 1326 re2 = 'available'
1315 1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1316 1328 fm.data(re2=bool(util._re2))
1317 1329
1318 1330 # templates
1319 1331 p = templater.templatepaths()
1320 1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1321 1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1322 1334 if p:
1323 1335 m = templater.templatepath("map-cmdline.default")
1324 1336 if m:
1325 1337 # template found, check if it is working
1326 1338 err = None
1327 1339 try:
1328 1340 templater.templater.frommapfile(m)
1329 1341 except Exception as inst:
1330 1342 err = stringutil.forcebytestr(inst)
1331 1343 p = None
1332 1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1333 1345 else:
1334 1346 p = None
1335 1347 fm.condwrite(p, 'defaulttemplate',
1336 1348 _("checking default template (%s)\n"), m)
1337 1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1338 1350 _(" template '%s' not found\n"), "default")
1339 1351 if not p:
1340 1352 problems += 1
1341 1353 fm.condwrite(not p, '',
1342 1354 _(" (templates seem to have been installed incorrectly)\n"))
1343 1355
1344 1356 # editor
1345 1357 editor = ui.geteditor()
1346 1358 editor = util.expandpath(editor)
1347 1359 editorbin = procutil.shellsplit(editor)[0]
1348 1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1349 1361 cmdpath = procutil.findexe(editorbin)
1350 1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1351 1363 _(" No commit editor set and can't find %s in PATH\n"
1352 1364 " (specify a commit editor in your configuration"
1353 1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1354 1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1355 1367 _(" Can't find editor '%s' in PATH\n"
1356 1368 " (specify a commit editor in your configuration"
1357 1369 " file)\n"), not cmdpath and editorbin)
1358 1370 if not cmdpath and editor != 'vi':
1359 1371 problems += 1
1360 1372
1361 1373 # check username
1362 1374 username = None
1363 1375 err = None
1364 1376 try:
1365 1377 username = ui.username()
1366 1378 except error.Abort as e:
1367 1379 err = stringutil.forcebytestr(e)
1368 1380 problems += 1
1369 1381
1370 1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1371 1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1372 1384 " (specify a username in your configuration file)\n"), err)
1373 1385
1374 1386 fm.condwrite(not problems, '',
1375 1387 _("no problems detected\n"))
1376 1388 if not problems:
1377 1389 fm.data(problems=problems)
1378 1390 fm.condwrite(problems, 'problems',
1379 1391 _("%d problems detected,"
1380 1392 " please check your install!\n"), problems)
1381 1393 fm.end()
1382 1394
1383 1395 return problems
1384 1396
1385 1397 @command('debugknown', [], _('REPO ID...'), norepo=True)
1386 1398 def debugknown(ui, repopath, *ids, **opts):
1387 1399 """test whether node ids are known to a repo
1388 1400
1389 1401 Every ID must be a full-length hex node id string. Returns a list of 0s
1390 1402 and 1s indicating unknown/known.
1391 1403 """
1392 1404 opts = pycompat.byteskwargs(opts)
1393 1405 repo = hg.peer(ui, opts, repopath)
1394 1406 if not repo.capable('known'):
1395 1407 raise error.Abort("known() not supported by target repository")
1396 1408 flags = repo.known([bin(s) for s in ids])
1397 1409 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1398 1410
1399 1411 @command('debuglabelcomplete', [], _('LABEL...'))
1400 1412 def debuglabelcomplete(ui, repo, *args):
1401 1413 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1402 1414 debugnamecomplete(ui, repo, *args)
1403 1415
1404 1416 @command('debuglocks',
1405 1417 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1406 1418 ('W', 'force-wlock', None,
1407 1419 _('free the working state lock (DANGEROUS)')),
1408 1420 ('s', 'set-lock', None, _('set the store lock until stopped')),
1409 1421 ('S', 'set-wlock', None,
1410 1422 _('set the working state lock until stopped'))],
1411 1423 _('[OPTION]...'))
1412 1424 def debuglocks(ui, repo, **opts):
1413 1425 """show or modify state of locks
1414 1426
1415 1427 By default, this command will show which locks are held. This
1416 1428 includes the user and process holding the lock, the amount of time
1417 1429 the lock has been held, and the machine name where the process is
1418 1430 running if it's not local.
1419 1431
1420 1432 Locks protect the integrity of Mercurial's data, so should be
1421 1433 treated with care. System crashes or other interruptions may cause
1422 1434 locks to not be properly released, though Mercurial will usually
1423 1435 detect and remove such stale locks automatically.
1424 1436
1425 1437 However, detecting stale locks may not always be possible (for
1426 1438 instance, on a shared filesystem). Removing locks may also be
1427 1439 blocked by filesystem permissions.
1428 1440
1429 1441 Setting a lock will prevent other commands from changing the data.
1430 1442 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1431 1443 The set locks are removed when the command exits.
1432 1444
1433 1445 Returns 0 if no locks are held.
1434 1446
1435 1447 """
1436 1448
1437 1449 if opts.get(r'force_lock'):
1438 1450 repo.svfs.unlink('lock')
1439 1451 if opts.get(r'force_wlock'):
1440 1452 repo.vfs.unlink('wlock')
1441 1453 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1442 1454 return 0
1443 1455
1444 1456 locks = []
1445 1457 try:
1446 1458 if opts.get(r'set_wlock'):
1447 1459 try:
1448 1460 locks.append(repo.wlock(False))
1449 1461 except error.LockHeld:
1450 1462 raise error.Abort(_('wlock is already held'))
1451 1463 if opts.get(r'set_lock'):
1452 1464 try:
1453 1465 locks.append(repo.lock(False))
1454 1466 except error.LockHeld:
1455 1467 raise error.Abort(_('lock is already held'))
1456 1468 if len(locks):
1457 1469 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1458 1470 return 0
1459 1471 finally:
1460 1472 release(*locks)
1461 1473
1462 1474 now = time.time()
1463 1475 held = 0
1464 1476
1465 1477 def report(vfs, name, method):
1466 1478 # this causes stale locks to get reaped for more accurate reporting
1467 1479 try:
1468 1480 l = method(False)
1469 1481 except error.LockHeld:
1470 1482 l = None
1471 1483
1472 1484 if l:
1473 1485 l.release()
1474 1486 else:
1475 1487 try:
1476 1488 st = vfs.lstat(name)
1477 1489 age = now - st[stat.ST_MTIME]
1478 1490 user = util.username(st.st_uid)
1479 1491 locker = vfs.readlock(name)
1480 1492 if ":" in locker:
1481 1493 host, pid = locker.split(':')
1482 1494 if host == socket.gethostname():
1483 1495 locker = 'user %s, process %s' % (user or b'None', pid)
1484 1496 else:
1485 1497 locker = ('user %s, process %s, host %s'
1486 1498 % (user or b'None', pid, host))
1487 1499 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1488 1500 return 1
1489 1501 except OSError as e:
1490 1502 if e.errno != errno.ENOENT:
1491 1503 raise
1492 1504
1493 1505 ui.write(("%-6s free\n") % (name + ":"))
1494 1506 return 0
1495 1507
1496 1508 held += report(repo.svfs, "lock", repo.lock)
1497 1509 held += report(repo.vfs, "wlock", repo.wlock)
1498 1510
1499 1511 return held
1500 1512
1501 1513 @command('debugmanifestfulltextcache', [
1502 1514 ('', 'clear', False, _('clear the cache')),
1503 1515 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1504 1516 _('NODE'))
1505 1517 ], '')
1506 1518 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1507 1519 """show, clear or amend the contents of the manifest fulltext cache"""
1508 1520
1509 1521 def getcache():
1510 1522 r = repo.manifestlog.getstorage(b'')
1511 1523 try:
1512 1524 return r._fulltextcache
1513 1525 except AttributeError:
1514 1526 msg = _("Current revlog implementation doesn't appear to have a "
1515 1527 "manifest fulltext cache\n")
1516 1528 raise error.Abort(msg)
1517 1529
1518 1530 if opts.get(r'clear'):
1519 1531 with repo.wlock():
1520 1532 cache = getcache()
1521 1533 cache.clear(clear_persisted_data=True)
1522 1534 return
1523 1535
1524 1536 if add:
1525 1537 with repo.wlock():
1526 1538 m = repo.manifestlog
1527 1539 store = m.getstorage(b'')
1528 1540 for n in add:
1529 1541 try:
1530 1542 manifest = m[store.lookup(n)]
1531 1543 except error.LookupError as e:
1532 1544 raise error.Abort(e, hint="Check your manifest node id")
1533 1545 manifest.read() # stores revisision in cache too
1534 1546 return
1535 1547
1536 1548 cache = getcache()
1537 1549 if not len(cache):
1538 1550 ui.write(_('cache empty\n'))
1539 1551 else:
1540 1552 ui.write(
1541 1553 _('cache contains %d manifest entries, in order of most to '
1542 1554 'least recent:\n') % (len(cache),))
1543 1555 totalsize = 0
1544 1556 for nodeid in cache:
1545 1557 # Use cache.get to not update the LRU order
1546 1558 data = cache.peek(nodeid)
1547 1559 size = len(data)
1548 1560 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1549 1561 ui.write(_('id: %s, size %s\n') % (
1550 1562 hex(nodeid), util.bytecount(size)))
1551 1563 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1552 1564 ui.write(
1553 1565 _('total cache data size %s, on-disk %s\n') % (
1554 1566 util.bytecount(totalsize), util.bytecount(ondisk))
1555 1567 )
1556 1568
1557 1569 @command('debugmergestate', [], '')
1558 1570 def debugmergestate(ui, repo, *args):
1559 1571 """print merge state
1560 1572
1561 1573 Use --verbose to print out information about whether v1 or v2 merge state
1562 1574 was chosen."""
1563 1575 def _hashornull(h):
1564 1576 if h == nullhex:
1565 1577 return 'null'
1566 1578 else:
1567 1579 return h
1568 1580
1569 1581 def printrecords(version):
1570 1582 ui.write(('* version %d records\n') % version)
1571 1583 if version == 1:
1572 1584 records = v1records
1573 1585 else:
1574 1586 records = v2records
1575 1587
1576 1588 for rtype, record in records:
1577 1589 # pretty print some record types
1578 1590 if rtype == 'L':
1579 1591 ui.write(('local: %s\n') % record)
1580 1592 elif rtype == 'O':
1581 1593 ui.write(('other: %s\n') % record)
1582 1594 elif rtype == 'm':
1583 1595 driver, mdstate = record.split('\0', 1)
1584 1596 ui.write(('merge driver: %s (state "%s")\n')
1585 1597 % (driver, mdstate))
1586 1598 elif rtype in 'FDC':
1587 1599 r = record.split('\0')
1588 1600 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1589 1601 if version == 1:
1590 1602 onode = 'not stored in v1 format'
1591 1603 flags = r[7]
1592 1604 else:
1593 1605 onode, flags = r[7:9]
1594 1606 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1595 1607 % (f, rtype, state, _hashornull(hash)))
1596 1608 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1597 1609 ui.write((' ancestor path: %s (node %s)\n')
1598 1610 % (afile, _hashornull(anode)))
1599 1611 ui.write((' other path: %s (node %s)\n')
1600 1612 % (ofile, _hashornull(onode)))
1601 1613 elif rtype == 'f':
1602 1614 filename, rawextras = record.split('\0', 1)
1603 1615 extras = rawextras.split('\0')
1604 1616 i = 0
1605 1617 extrastrings = []
1606 1618 while i < len(extras):
1607 1619 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1608 1620 i += 2
1609 1621
1610 1622 ui.write(('file extras: %s (%s)\n')
1611 1623 % (filename, ', '.join(extrastrings)))
1612 1624 elif rtype == 'l':
1613 1625 labels = record.split('\0', 2)
1614 1626 labels = [l for l in labels if len(l) > 0]
1615 1627 ui.write(('labels:\n'))
1616 1628 ui.write((' local: %s\n' % labels[0]))
1617 1629 ui.write((' other: %s\n' % labels[1]))
1618 1630 if len(labels) > 2:
1619 1631 ui.write((' base: %s\n' % labels[2]))
1620 1632 else:
1621 1633 ui.write(('unrecognized entry: %s\t%s\n')
1622 1634 % (rtype, record.replace('\0', '\t')))
1623 1635
1624 1636 # Avoid mergestate.read() since it may raise an exception for unsupported
1625 1637 # merge state records. We shouldn't be doing this, but this is OK since this
1626 1638 # command is pretty low-level.
1627 1639 ms = mergemod.mergestate(repo)
1628 1640
1629 1641 # sort so that reasonable information is on top
1630 1642 v1records = ms._readrecordsv1()
1631 1643 v2records = ms._readrecordsv2()
1632 1644 order = 'LOml'
1633 1645 def key(r):
1634 1646 idx = order.find(r[0])
1635 1647 if idx == -1:
1636 1648 return (1, r[1])
1637 1649 else:
1638 1650 return (0, idx)
1639 1651 v1records.sort(key=key)
1640 1652 v2records.sort(key=key)
1641 1653
1642 1654 if not v1records and not v2records:
1643 1655 ui.write(('no merge state found\n'))
1644 1656 elif not v2records:
1645 1657 ui.note(('no version 2 merge state\n'))
1646 1658 printrecords(1)
1647 1659 elif ms._v1v2match(v1records, v2records):
1648 1660 ui.note(('v1 and v2 states match: using v2\n'))
1649 1661 printrecords(2)
1650 1662 else:
1651 1663 ui.note(('v1 and v2 states mismatch: using v1\n'))
1652 1664 printrecords(1)
1653 1665 if ui.verbose:
1654 1666 printrecords(2)
1655 1667
1656 1668 @command('debugnamecomplete', [], _('NAME...'))
1657 1669 def debugnamecomplete(ui, repo, *args):
1658 1670 '''complete "names" - tags, open branch names, bookmark names'''
1659 1671
1660 1672 names = set()
1661 1673 # since we previously only listed open branches, we will handle that
1662 1674 # specially (after this for loop)
1663 1675 for name, ns in repo.names.iteritems():
1664 1676 if name != 'branches':
1665 1677 names.update(ns.listnames(repo))
1666 1678 names.update(tag for (tag, heads, tip, closed)
1667 1679 in repo.branchmap().iterbranches() if not closed)
1668 1680 completions = set()
1669 1681 if not args:
1670 1682 args = ['']
1671 1683 for a in args:
1672 1684 completions.update(n for n in names if n.startswith(a))
1673 1685 ui.write('\n'.join(sorted(completions)))
1674 1686 ui.write('\n')
1675 1687
1676 1688 @command('debugobsolete',
1677 1689 [('', 'flags', 0, _('markers flag')),
1678 1690 ('', 'record-parents', False,
1679 1691 _('record parent information for the precursor')),
1680 1692 ('r', 'rev', [], _('display markers relevant to REV')),
1681 1693 ('', 'exclusive', False, _('restrict display to markers only '
1682 1694 'relevant to REV')),
1683 1695 ('', 'index', False, _('display index of the marker')),
1684 1696 ('', 'delete', [], _('delete markers specified by indices')),
1685 1697 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1686 1698 _('[OBSOLETED [REPLACEMENT ...]]'))
1687 1699 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1688 1700 """create arbitrary obsolete marker
1689 1701
1690 1702 With no arguments, displays the list of obsolescence markers."""
1691 1703
1692 1704 opts = pycompat.byteskwargs(opts)
1693 1705
1694 1706 def parsenodeid(s):
1695 1707 try:
1696 1708 # We do not use revsingle/revrange functions here to accept
1697 1709 # arbitrary node identifiers, possibly not present in the
1698 1710 # local repository.
1699 1711 n = bin(s)
1700 1712 if len(n) != len(nullid):
1701 1713 raise TypeError()
1702 1714 return n
1703 1715 except TypeError:
1704 1716 raise error.Abort('changeset references must be full hexadecimal '
1705 1717 'node identifiers')
1706 1718
1707 1719 if opts.get('delete'):
1708 1720 indices = []
1709 1721 for v in opts.get('delete'):
1710 1722 try:
1711 1723 indices.append(int(v))
1712 1724 except ValueError:
1713 1725 raise error.Abort(_('invalid index value: %r') % v,
1714 1726 hint=_('use integers for indices'))
1715 1727
1716 1728 if repo.currenttransaction():
1717 1729 raise error.Abort(_('cannot delete obsmarkers in the middle '
1718 1730 'of transaction.'))
1719 1731
1720 1732 with repo.lock():
1721 1733 n = repair.deleteobsmarkers(repo.obsstore, indices)
1722 1734 ui.write(_('deleted %i obsolescence markers\n') % n)
1723 1735
1724 1736 return
1725 1737
1726 1738 if precursor is not None:
1727 1739 if opts['rev']:
1728 1740 raise error.Abort('cannot select revision when creating marker')
1729 1741 metadata = {}
1730 1742 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1731 1743 succs = tuple(parsenodeid(succ) for succ in successors)
1732 1744 l = repo.lock()
1733 1745 try:
1734 1746 tr = repo.transaction('debugobsolete')
1735 1747 try:
1736 1748 date = opts.get('date')
1737 1749 if date:
1738 1750 date = dateutil.parsedate(date)
1739 1751 else:
1740 1752 date = None
1741 1753 prec = parsenodeid(precursor)
1742 1754 parents = None
1743 1755 if opts['record_parents']:
1744 1756 if prec not in repo.unfiltered():
1745 1757 raise error.Abort('cannot used --record-parents on '
1746 1758 'unknown changesets')
1747 1759 parents = repo.unfiltered()[prec].parents()
1748 1760 parents = tuple(p.node() for p in parents)
1749 1761 repo.obsstore.create(tr, prec, succs, opts['flags'],
1750 1762 parents=parents, date=date,
1751 1763 metadata=metadata, ui=ui)
1752 1764 tr.close()
1753 1765 except ValueError as exc:
1754 1766 raise error.Abort(_('bad obsmarker input: %s') %
1755 1767 pycompat.bytestr(exc))
1756 1768 finally:
1757 1769 tr.release()
1758 1770 finally:
1759 1771 l.release()
1760 1772 else:
1761 1773 if opts['rev']:
1762 1774 revs = scmutil.revrange(repo, opts['rev'])
1763 1775 nodes = [repo[r].node() for r in revs]
1764 1776 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1765 1777 exclusive=opts['exclusive']))
1766 1778 markers.sort(key=lambda x: x._data)
1767 1779 else:
1768 1780 markers = obsutil.getmarkers(repo)
1769 1781
1770 1782 markerstoiter = markers
1771 1783 isrelevant = lambda m: True
1772 1784 if opts.get('rev') and opts.get('index'):
1773 1785 markerstoiter = obsutil.getmarkers(repo)
1774 1786 markerset = set(markers)
1775 1787 isrelevant = lambda m: m in markerset
1776 1788
1777 1789 fm = ui.formatter('debugobsolete', opts)
1778 1790 for i, m in enumerate(markerstoiter):
1779 1791 if not isrelevant(m):
1780 1792 # marker can be irrelevant when we're iterating over a set
1781 1793 # of markers (markerstoiter) which is bigger than the set
1782 1794 # of markers we want to display (markers)
1783 1795 # this can happen if both --index and --rev options are
1784 1796 # provided and thus we need to iterate over all of the markers
1785 1797 # to get the correct indices, but only display the ones that
1786 1798 # are relevant to --rev value
1787 1799 continue
1788 1800 fm.startitem()
1789 1801 ind = i if opts.get('index') else None
1790 1802 cmdutil.showmarker(fm, m, index=ind)
1791 1803 fm.end()
1792 1804
1793 1805 @command('debugp1copies',
1794 1806 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1795 1807 _('[-r REV]'))
1796 1808 def debugp1copies(ui, repo, **opts):
1797 1809 """dump copy information compared to p1"""
1798 1810
1799 1811 opts = pycompat.byteskwargs(opts)
1800 1812 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1801 1813 for dst, src in ctx.p1copies().items():
1802 1814 ui.write('%s -> %s\n' % (src, dst))
1803 1815
1804 1816 @command('debugp2copies',
1805 1817 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1806 1818 _('[-r REV]'))
1807 1819 def debugp1copies(ui, repo, **opts):
1808 1820 """dump copy information compared to p2"""
1809 1821
1810 1822 opts = pycompat.byteskwargs(opts)
1811 1823 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1812 1824 for dst, src in ctx.p2copies().items():
1813 1825 ui.write('%s -> %s\n' % (src, dst))
1814 1826
1815 1827 @command('debugpathcomplete',
1816 1828 [('f', 'full', None, _('complete an entire path')),
1817 1829 ('n', 'normal', None, _('show only normal files')),
1818 1830 ('a', 'added', None, _('show only added files')),
1819 1831 ('r', 'removed', None, _('show only removed files'))],
1820 1832 _('FILESPEC...'))
1821 1833 def debugpathcomplete(ui, repo, *specs, **opts):
1822 1834 '''complete part or all of a tracked path
1823 1835
1824 1836 This command supports shells that offer path name completion. It
1825 1837 currently completes only files already known to the dirstate.
1826 1838
1827 1839 Completion extends only to the next path segment unless
1828 1840 --full is specified, in which case entire paths are used.'''
1829 1841
1830 1842 def complete(path, acceptable):
1831 1843 dirstate = repo.dirstate
1832 1844 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1833 1845 rootdir = repo.root + pycompat.ossep
1834 1846 if spec != repo.root and not spec.startswith(rootdir):
1835 1847 return [], []
1836 1848 if os.path.isdir(spec):
1837 1849 spec += '/'
1838 1850 spec = spec[len(rootdir):]
1839 1851 fixpaths = pycompat.ossep != '/'
1840 1852 if fixpaths:
1841 1853 spec = spec.replace(pycompat.ossep, '/')
1842 1854 speclen = len(spec)
1843 1855 fullpaths = opts[r'full']
1844 1856 files, dirs = set(), set()
1845 1857 adddir, addfile = dirs.add, files.add
1846 1858 for f, st in dirstate.iteritems():
1847 1859 if f.startswith(spec) and st[0] in acceptable:
1848 1860 if fixpaths:
1849 1861 f = f.replace('/', pycompat.ossep)
1850 1862 if fullpaths:
1851 1863 addfile(f)
1852 1864 continue
1853 1865 s = f.find(pycompat.ossep, speclen)
1854 1866 if s >= 0:
1855 1867 adddir(f[:s])
1856 1868 else:
1857 1869 addfile(f)
1858 1870 return files, dirs
1859 1871
1860 1872 acceptable = ''
1861 1873 if opts[r'normal']:
1862 1874 acceptable += 'nm'
1863 1875 if opts[r'added']:
1864 1876 acceptable += 'a'
1865 1877 if opts[r'removed']:
1866 1878 acceptable += 'r'
1867 1879 cwd = repo.getcwd()
1868 1880 if not specs:
1869 1881 specs = ['.']
1870 1882
1871 1883 files, dirs = set(), set()
1872 1884 for spec in specs:
1873 1885 f, d = complete(spec, acceptable or 'nmar')
1874 1886 files.update(f)
1875 1887 dirs.update(d)
1876 1888 files.update(dirs)
1877 1889 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1878 1890 ui.write('\n')
1879 1891
1880 1892 @command('debugpathcopies',
1881 1893 cmdutil.walkopts,
1882 1894 'hg debugpathcopies REV1 REV2 [FILE]',
1883 1895 inferrepo=True)
1884 1896 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1885 1897 """show copies between two revisions"""
1886 1898 ctx1 = scmutil.revsingle(repo, rev1)
1887 1899 ctx2 = scmutil.revsingle(repo, rev2)
1888 1900 m = scmutil.match(ctx1, pats, opts)
1889 1901 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1890 1902 ui.write('%s -> %s\n' % (src, dst))
1891 1903
1892 1904 @command('debugpeer', [], _('PATH'), norepo=True)
1893 1905 def debugpeer(ui, path):
1894 1906 """establish a connection to a peer repository"""
1895 1907 # Always enable peer request logging. Requires --debug to display
1896 1908 # though.
1897 1909 overrides = {
1898 1910 ('devel', 'debug.peer-request'): True,
1899 1911 }
1900 1912
1901 1913 with ui.configoverride(overrides):
1902 1914 peer = hg.peer(ui, {}, path)
1903 1915
1904 1916 local = peer.local() is not None
1905 1917 canpush = peer.canpush()
1906 1918
1907 1919 ui.write(_('url: %s\n') % peer.url())
1908 1920 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1909 1921 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1910 1922
1911 1923 @command('debugpickmergetool',
1912 1924 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1913 1925 ('', 'changedelete', None, _('emulate merging change and delete')),
1914 1926 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1915 1927 _('[PATTERN]...'),
1916 1928 inferrepo=True)
1917 1929 def debugpickmergetool(ui, repo, *pats, **opts):
1918 1930 """examine which merge tool is chosen for specified file
1919 1931
1920 1932 As described in :hg:`help merge-tools`, Mercurial examines
1921 1933 configurations below in this order to decide which merge tool is
1922 1934 chosen for specified file.
1923 1935
1924 1936 1. ``--tool`` option
1925 1937 2. ``HGMERGE`` environment variable
1926 1938 3. configurations in ``merge-patterns`` section
1927 1939 4. configuration of ``ui.merge``
1928 1940 5. configurations in ``merge-tools`` section
1929 1941 6. ``hgmerge`` tool (for historical reason only)
1930 1942 7. default tool for fallback (``:merge`` or ``:prompt``)
1931 1943
1932 1944 This command writes out examination result in the style below::
1933 1945
1934 1946 FILE = MERGETOOL
1935 1947
1936 1948 By default, all files known in the first parent context of the
1937 1949 working directory are examined. Use file patterns and/or -I/-X
1938 1950 options to limit target files. -r/--rev is also useful to examine
1939 1951 files in another context without actual updating to it.
1940 1952
1941 1953 With --debug, this command shows warning messages while matching
1942 1954 against ``merge-patterns`` and so on, too. It is recommended to
1943 1955 use this option with explicit file patterns and/or -I/-X options,
1944 1956 because this option increases amount of output per file according
1945 1957 to configurations in hgrc.
1946 1958
1947 1959 With -v/--verbose, this command shows configurations below at
1948 1960 first (only if specified).
1949 1961
1950 1962 - ``--tool`` option
1951 1963 - ``HGMERGE`` environment variable
1952 1964 - configuration of ``ui.merge``
1953 1965
1954 1966 If merge tool is chosen before matching against
1955 1967 ``merge-patterns``, this command can't show any helpful
1956 1968 information, even with --debug. In such case, information above is
1957 1969 useful to know why a merge tool is chosen.
1958 1970 """
1959 1971 opts = pycompat.byteskwargs(opts)
1960 1972 overrides = {}
1961 1973 if opts['tool']:
1962 1974 overrides[('ui', 'forcemerge')] = opts['tool']
1963 1975 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1964 1976
1965 1977 with ui.configoverride(overrides, 'debugmergepatterns'):
1966 1978 hgmerge = encoding.environ.get("HGMERGE")
1967 1979 if hgmerge is not None:
1968 1980 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1969 1981 uimerge = ui.config("ui", "merge")
1970 1982 if uimerge:
1971 1983 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1972 1984
1973 1985 ctx = scmutil.revsingle(repo, opts.get('rev'))
1974 1986 m = scmutil.match(ctx, pats, opts)
1975 1987 changedelete = opts['changedelete']
1976 1988 for path in ctx.walk(m):
1977 1989 fctx = ctx[path]
1978 1990 try:
1979 1991 if not ui.debugflag:
1980 1992 ui.pushbuffer(error=True)
1981 1993 tool, toolpath = filemerge._picktool(repo, ui, path,
1982 1994 fctx.isbinary(),
1983 1995 'l' in fctx.flags(),
1984 1996 changedelete)
1985 1997 finally:
1986 1998 if not ui.debugflag:
1987 1999 ui.popbuffer()
1988 2000 ui.write(('%s = %s\n') % (path, tool))
1989 2001
1990 2002 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1991 2003 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1992 2004 '''access the pushkey key/value protocol
1993 2005
1994 2006 With two args, list the keys in the given namespace.
1995 2007
1996 2008 With five args, set a key to new if it currently is set to old.
1997 2009 Reports success or failure.
1998 2010 '''
1999 2011
2000 2012 target = hg.peer(ui, {}, repopath)
2001 2013 if keyinfo:
2002 2014 key, old, new = keyinfo
2003 2015 with target.commandexecutor() as e:
2004 2016 r = e.callcommand('pushkey', {
2005 2017 'namespace': namespace,
2006 2018 'key': key,
2007 2019 'old': old,
2008 2020 'new': new,
2009 2021 }).result()
2010 2022
2011 2023 ui.status(pycompat.bytestr(r) + '\n')
2012 2024 return not r
2013 2025 else:
2014 2026 for k, v in sorted(target.listkeys(namespace).iteritems()):
2015 2027 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2016 2028 stringutil.escapestr(v)))
2017 2029
2018 2030 @command('debugpvec', [], _('A B'))
2019 2031 def debugpvec(ui, repo, a, b=None):
2020 2032 ca = scmutil.revsingle(repo, a)
2021 2033 cb = scmutil.revsingle(repo, b)
2022 2034 pa = pvec.ctxpvec(ca)
2023 2035 pb = pvec.ctxpvec(cb)
2024 2036 if pa == pb:
2025 2037 rel = "="
2026 2038 elif pa > pb:
2027 2039 rel = ">"
2028 2040 elif pa < pb:
2029 2041 rel = "<"
2030 2042 elif pa | pb:
2031 2043 rel = "|"
2032 2044 ui.write(_("a: %s\n") % pa)
2033 2045 ui.write(_("b: %s\n") % pb)
2034 2046 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2035 2047 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2036 2048 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2037 2049 pa.distance(pb), rel))
2038 2050
2039 2051 @command('debugrebuilddirstate|debugrebuildstate',
2040 2052 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2041 2053 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2042 2054 'the working copy parent')),
2043 2055 ],
2044 2056 _('[-r REV]'))
2045 2057 def debugrebuilddirstate(ui, repo, rev, **opts):
2046 2058 """rebuild the dirstate as it would look like for the given revision
2047 2059
2048 2060 If no revision is specified the first current parent will be used.
2049 2061
2050 2062 The dirstate will be set to the files of the given revision.
2051 2063 The actual working directory content or existing dirstate
2052 2064 information such as adds or removes is not considered.
2053 2065
2054 2066 ``minimal`` will only rebuild the dirstate status for files that claim to be
2055 2067 tracked but are not in the parent manifest, or that exist in the parent
2056 2068 manifest but are not in the dirstate. It will not change adds, removes, or
2057 2069 modified files that are in the working copy parent.
2058 2070
2059 2071 One use of this command is to make the next :hg:`status` invocation
2060 2072 check the actual file content.
2061 2073 """
2062 2074 ctx = scmutil.revsingle(repo, rev)
2063 2075 with repo.wlock():
2064 2076 dirstate = repo.dirstate
2065 2077 changedfiles = None
2066 2078 # See command doc for what minimal does.
2067 2079 if opts.get(r'minimal'):
2068 2080 manifestfiles = set(ctx.manifest().keys())
2069 2081 dirstatefiles = set(dirstate)
2070 2082 manifestonly = manifestfiles - dirstatefiles
2071 2083 dsonly = dirstatefiles - manifestfiles
2072 2084 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2073 2085 changedfiles = manifestonly | dsnotadded
2074 2086
2075 2087 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2076 2088
2077 2089 @command('debugrebuildfncache', [], '')
2078 2090 def debugrebuildfncache(ui, repo):
2079 2091 """rebuild the fncache file"""
2080 2092 repair.rebuildfncache(ui, repo)
2081 2093
2082 2094 @command('debugrename',
2083 2095 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2084 2096 _('[-r REV] [FILE]...'))
2085 2097 def debugrename(ui, repo, *pats, **opts):
2086 2098 """dump rename information"""
2087 2099
2088 2100 opts = pycompat.byteskwargs(opts)
2089 2101 ctx = scmutil.revsingle(repo, opts.get('rev'))
2090 2102 m = scmutil.match(ctx, pats, opts)
2091 2103 for abs in ctx.walk(m):
2092 2104 fctx = ctx[abs]
2093 2105 o = fctx.filelog().renamed(fctx.filenode())
2094 2106 rel = repo.pathto(abs)
2095 2107 if o:
2096 2108 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2097 2109 else:
2098 2110 ui.write(_("%s not renamed\n") % rel)
2099 2111
2100 2112 @command('debugrevlog', cmdutil.debugrevlogopts +
2101 2113 [('d', 'dump', False, _('dump index data'))],
2102 2114 _('-c|-m|FILE'),
2103 2115 optionalrepo=True)
2104 2116 def debugrevlog(ui, repo, file_=None, **opts):
2105 2117 """show data and statistics about a revlog"""
2106 2118 opts = pycompat.byteskwargs(opts)
2107 2119 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2108 2120
2109 2121 if opts.get("dump"):
2110 2122 numrevs = len(r)
2111 2123 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2112 2124 " rawsize totalsize compression heads chainlen\n"))
2113 2125 ts = 0
2114 2126 heads = set()
2115 2127
2116 2128 for rev in pycompat.xrange(numrevs):
2117 2129 dbase = r.deltaparent(rev)
2118 2130 if dbase == -1:
2119 2131 dbase = rev
2120 2132 cbase = r.chainbase(rev)
2121 2133 clen = r.chainlen(rev)
2122 2134 p1, p2 = r.parentrevs(rev)
2123 2135 rs = r.rawsize(rev)
2124 2136 ts = ts + rs
2125 2137 heads -= set(r.parentrevs(rev))
2126 2138 heads.add(rev)
2127 2139 try:
2128 2140 compression = ts / r.end(rev)
2129 2141 except ZeroDivisionError:
2130 2142 compression = 0
2131 2143 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2132 2144 "%11d %5d %8d\n" %
2133 2145 (rev, p1, p2, r.start(rev), r.end(rev),
2134 2146 r.start(dbase), r.start(cbase),
2135 2147 r.start(p1), r.start(p2),
2136 2148 rs, ts, compression, len(heads), clen))
2137 2149 return 0
2138 2150
2139 2151 v = r.version
2140 2152 format = v & 0xFFFF
2141 2153 flags = []
2142 2154 gdelta = False
2143 2155 if v & revlog.FLAG_INLINE_DATA:
2144 2156 flags.append('inline')
2145 2157 if v & revlog.FLAG_GENERALDELTA:
2146 2158 gdelta = True
2147 2159 flags.append('generaldelta')
2148 2160 if not flags:
2149 2161 flags = ['(none)']
2150 2162
2151 2163 ### tracks merge vs single parent
2152 2164 nummerges = 0
2153 2165
2154 2166 ### tracks ways the "delta" are build
2155 2167 # nodelta
2156 2168 numempty = 0
2157 2169 numemptytext = 0
2158 2170 numemptydelta = 0
2159 2171 # full file content
2160 2172 numfull = 0
2161 2173 # intermediate snapshot against a prior snapshot
2162 2174 numsemi = 0
2163 2175 # snapshot count per depth
2164 2176 numsnapdepth = collections.defaultdict(lambda: 0)
2165 2177 # delta against previous revision
2166 2178 numprev = 0
2167 2179 # delta against first or second parent (not prev)
2168 2180 nump1 = 0
2169 2181 nump2 = 0
2170 2182 # delta against neither prev nor parents
2171 2183 numother = 0
2172 2184 # delta against prev that are also first or second parent
2173 2185 # (details of `numprev`)
2174 2186 nump1prev = 0
2175 2187 nump2prev = 0
2176 2188
2177 2189 # data about delta chain of each revs
2178 2190 chainlengths = []
2179 2191 chainbases = []
2180 2192 chainspans = []
2181 2193
2182 2194 # data about each revision
2183 2195 datasize = [None, 0, 0]
2184 2196 fullsize = [None, 0, 0]
2185 2197 semisize = [None, 0, 0]
2186 2198 # snapshot count per depth
2187 2199 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2188 2200 deltasize = [None, 0, 0]
2189 2201 chunktypecounts = {}
2190 2202 chunktypesizes = {}
2191 2203
2192 2204 def addsize(size, l):
2193 2205 if l[0] is None or size < l[0]:
2194 2206 l[0] = size
2195 2207 if size > l[1]:
2196 2208 l[1] = size
2197 2209 l[2] += size
2198 2210
2199 2211 numrevs = len(r)
2200 2212 for rev in pycompat.xrange(numrevs):
2201 2213 p1, p2 = r.parentrevs(rev)
2202 2214 delta = r.deltaparent(rev)
2203 2215 if format > 0:
2204 2216 addsize(r.rawsize(rev), datasize)
2205 2217 if p2 != nullrev:
2206 2218 nummerges += 1
2207 2219 size = r.length(rev)
2208 2220 if delta == nullrev:
2209 2221 chainlengths.append(0)
2210 2222 chainbases.append(r.start(rev))
2211 2223 chainspans.append(size)
2212 2224 if size == 0:
2213 2225 numempty += 1
2214 2226 numemptytext += 1
2215 2227 else:
2216 2228 numfull += 1
2217 2229 numsnapdepth[0] += 1
2218 2230 addsize(size, fullsize)
2219 2231 addsize(size, snapsizedepth[0])
2220 2232 else:
2221 2233 chainlengths.append(chainlengths[delta] + 1)
2222 2234 baseaddr = chainbases[delta]
2223 2235 revaddr = r.start(rev)
2224 2236 chainbases.append(baseaddr)
2225 2237 chainspans.append((revaddr - baseaddr) + size)
2226 2238 if size == 0:
2227 2239 numempty += 1
2228 2240 numemptydelta += 1
2229 2241 elif r.issnapshot(rev):
2230 2242 addsize(size, semisize)
2231 2243 numsemi += 1
2232 2244 depth = r.snapshotdepth(rev)
2233 2245 numsnapdepth[depth] += 1
2234 2246 addsize(size, snapsizedepth[depth])
2235 2247 else:
2236 2248 addsize(size, deltasize)
2237 2249 if delta == rev - 1:
2238 2250 numprev += 1
2239 2251 if delta == p1:
2240 2252 nump1prev += 1
2241 2253 elif delta == p2:
2242 2254 nump2prev += 1
2243 2255 elif delta == p1:
2244 2256 nump1 += 1
2245 2257 elif delta == p2:
2246 2258 nump2 += 1
2247 2259 elif delta != nullrev:
2248 2260 numother += 1
2249 2261
2250 2262 # Obtain data on the raw chunks in the revlog.
2251 2263 if util.safehasattr(r, '_getsegmentforrevs'):
2252 2264 segment = r._getsegmentforrevs(rev, rev)[1]
2253 2265 else:
2254 2266 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2255 2267 if segment:
2256 2268 chunktype = bytes(segment[0:1])
2257 2269 else:
2258 2270 chunktype = 'empty'
2259 2271
2260 2272 if chunktype not in chunktypecounts:
2261 2273 chunktypecounts[chunktype] = 0
2262 2274 chunktypesizes[chunktype] = 0
2263 2275
2264 2276 chunktypecounts[chunktype] += 1
2265 2277 chunktypesizes[chunktype] += size
2266 2278
2267 2279 # Adjust size min value for empty cases
2268 2280 for size in (datasize, fullsize, semisize, deltasize):
2269 2281 if size[0] is None:
2270 2282 size[0] = 0
2271 2283
2272 2284 numdeltas = numrevs - numfull - numempty - numsemi
2273 2285 numoprev = numprev - nump1prev - nump2prev
2274 2286 totalrawsize = datasize[2]
2275 2287 datasize[2] /= numrevs
2276 2288 fulltotal = fullsize[2]
2277 2289 fullsize[2] /= numfull
2278 2290 semitotal = semisize[2]
2279 2291 snaptotal = {}
2280 2292 if numsemi > 0:
2281 2293 semisize[2] /= numsemi
2282 2294 for depth in snapsizedepth:
2283 2295 snaptotal[depth] = snapsizedepth[depth][2]
2284 2296 snapsizedepth[depth][2] /= numsnapdepth[depth]
2285 2297
2286 2298 deltatotal = deltasize[2]
2287 2299 if numdeltas > 0:
2288 2300 deltasize[2] /= numdeltas
2289 2301 totalsize = fulltotal + semitotal + deltatotal
2290 2302 avgchainlen = sum(chainlengths) / numrevs
2291 2303 maxchainlen = max(chainlengths)
2292 2304 maxchainspan = max(chainspans)
2293 2305 compratio = 1
2294 2306 if totalsize:
2295 2307 compratio = totalrawsize / totalsize
2296 2308
2297 2309 basedfmtstr = '%%%dd\n'
2298 2310 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2299 2311
2300 2312 def dfmtstr(max):
2301 2313 return basedfmtstr % len(str(max))
2302 2314 def pcfmtstr(max, padding=0):
2303 2315 return basepcfmtstr % (len(str(max)), ' ' * padding)
2304 2316
2305 2317 def pcfmt(value, total):
2306 2318 if total:
2307 2319 return (value, 100 * float(value) / total)
2308 2320 else:
2309 2321 return value, 100.0
2310 2322
2311 2323 ui.write(('format : %d\n') % format)
2312 2324 ui.write(('flags : %s\n') % ', '.join(flags))
2313 2325
2314 2326 ui.write('\n')
2315 2327 fmt = pcfmtstr(totalsize)
2316 2328 fmt2 = dfmtstr(totalsize)
2317 2329 ui.write(('revisions : ') + fmt2 % numrevs)
2318 2330 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2319 2331 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2320 2332 ui.write(('revisions : ') + fmt2 % numrevs)
2321 2333 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2322 2334 ui.write((' text : ')
2323 2335 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2324 2336 ui.write((' delta : ')
2325 2337 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2326 2338 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2327 2339 for depth in sorted(numsnapdepth):
2328 2340 ui.write((' lvl-%-3d : ' % depth)
2329 2341 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2330 2342 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2331 2343 ui.write(('revision size : ') + fmt2 % totalsize)
2332 2344 ui.write((' snapshot : ')
2333 2345 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2334 2346 for depth in sorted(numsnapdepth):
2335 2347 ui.write((' lvl-%-3d : ' % depth)
2336 2348 + fmt % pcfmt(snaptotal[depth], totalsize))
2337 2349 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2338 2350
2339 2351 def fmtchunktype(chunktype):
2340 2352 if chunktype == 'empty':
2341 2353 return ' %s : ' % chunktype
2342 2354 elif chunktype in pycompat.bytestr(string.ascii_letters):
2343 2355 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2344 2356 else:
2345 2357 return ' 0x%s : ' % hex(chunktype)
2346 2358
2347 2359 ui.write('\n')
2348 2360 ui.write(('chunks : ') + fmt2 % numrevs)
2349 2361 for chunktype in sorted(chunktypecounts):
2350 2362 ui.write(fmtchunktype(chunktype))
2351 2363 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2352 2364 ui.write(('chunks size : ') + fmt2 % totalsize)
2353 2365 for chunktype in sorted(chunktypecounts):
2354 2366 ui.write(fmtchunktype(chunktype))
2355 2367 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2356 2368
2357 2369 ui.write('\n')
2358 2370 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2359 2371 ui.write(('avg chain length : ') + fmt % avgchainlen)
2360 2372 ui.write(('max chain length : ') + fmt % maxchainlen)
2361 2373 ui.write(('max chain reach : ') + fmt % maxchainspan)
2362 2374 ui.write(('compression ratio : ') + fmt % compratio)
2363 2375
2364 2376 if format > 0:
2365 2377 ui.write('\n')
2366 2378 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2367 2379 % tuple(datasize))
2368 2380 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2369 2381 % tuple(fullsize))
2370 2382 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2371 2383 % tuple(semisize))
2372 2384 for depth in sorted(snapsizedepth):
2373 2385 if depth == 0:
2374 2386 continue
2375 2387 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2376 2388 % ((depth,) + tuple(snapsizedepth[depth])))
2377 2389 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2378 2390 % tuple(deltasize))
2379 2391
2380 2392 if numdeltas > 0:
2381 2393 ui.write('\n')
2382 2394 fmt = pcfmtstr(numdeltas)
2383 2395 fmt2 = pcfmtstr(numdeltas, 4)
2384 2396 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2385 2397 if numprev > 0:
2386 2398 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2387 2399 numprev))
2388 2400 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2389 2401 numprev))
2390 2402 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2391 2403 numprev))
2392 2404 if gdelta:
2393 2405 ui.write(('deltas against p1 : ')
2394 2406 + fmt % pcfmt(nump1, numdeltas))
2395 2407 ui.write(('deltas against p2 : ')
2396 2408 + fmt % pcfmt(nump2, numdeltas))
2397 2409 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2398 2410 numdeltas))
2399 2411
2400 2412 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2401 2413 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2402 2414 _('[-f FORMAT] -c|-m|FILE'),
2403 2415 optionalrepo=True)
2404 2416 def debugrevlogindex(ui, repo, file_=None, **opts):
2405 2417 """dump the contents of a revlog index"""
2406 2418 opts = pycompat.byteskwargs(opts)
2407 2419 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2408 2420 format = opts.get('format', 0)
2409 2421 if format not in (0, 1):
2410 2422 raise error.Abort(_("unknown format %d") % format)
2411 2423
2412 2424 if ui.debugflag:
2413 2425 shortfn = hex
2414 2426 else:
2415 2427 shortfn = short
2416 2428
2417 2429 # There might not be anything in r, so have a sane default
2418 2430 idlen = 12
2419 2431 for i in r:
2420 2432 idlen = len(shortfn(r.node(i)))
2421 2433 break
2422 2434
2423 2435 if format == 0:
2424 2436 if ui.verbose:
2425 2437 ui.write((" rev offset length linkrev"
2426 2438 " %s %s p2\n") % ("nodeid".ljust(idlen),
2427 2439 "p1".ljust(idlen)))
2428 2440 else:
2429 2441 ui.write((" rev linkrev %s %s p2\n") % (
2430 2442 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2431 2443 elif format == 1:
2432 2444 if ui.verbose:
2433 2445 ui.write((" rev flag offset length size link p1"
2434 2446 " p2 %s\n") % "nodeid".rjust(idlen))
2435 2447 else:
2436 2448 ui.write((" rev flag size link p1 p2 %s\n") %
2437 2449 "nodeid".rjust(idlen))
2438 2450
2439 2451 for i in r:
2440 2452 node = r.node(i)
2441 2453 if format == 0:
2442 2454 try:
2443 2455 pp = r.parents(node)
2444 2456 except Exception:
2445 2457 pp = [nullid, nullid]
2446 2458 if ui.verbose:
2447 2459 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2448 2460 i, r.start(i), r.length(i), r.linkrev(i),
2449 2461 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2450 2462 else:
2451 2463 ui.write("% 6d % 7d %s %s %s\n" % (
2452 2464 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2453 2465 shortfn(pp[1])))
2454 2466 elif format == 1:
2455 2467 pr = r.parentrevs(i)
2456 2468 if ui.verbose:
2457 2469 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2458 2470 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2459 2471 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2460 2472 else:
2461 2473 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2462 2474 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2463 2475 shortfn(node)))
2464 2476
2465 2477 @command('debugrevspec',
2466 2478 [('', 'optimize', None,
2467 2479 _('print parsed tree after optimizing (DEPRECATED)')),
2468 2480 ('', 'show-revs', True, _('print list of result revisions (default)')),
2469 2481 ('s', 'show-set', None, _('print internal representation of result set')),
2470 2482 ('p', 'show-stage', [],
2471 2483 _('print parsed tree at the given stage'), _('NAME')),
2472 2484 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2473 2485 ('', 'verify-optimized', False, _('verify optimized result')),
2474 2486 ],
2475 2487 ('REVSPEC'))
2476 2488 def debugrevspec(ui, repo, expr, **opts):
2477 2489 """parse and apply a revision specification
2478 2490
2479 2491 Use -p/--show-stage option to print the parsed tree at the given stages.
2480 2492 Use -p all to print tree at every stage.
2481 2493
2482 2494 Use --no-show-revs option with -s or -p to print only the set
2483 2495 representation or the parsed tree respectively.
2484 2496
2485 2497 Use --verify-optimized to compare the optimized result with the unoptimized
2486 2498 one. Returns 1 if the optimized result differs.
2487 2499 """
2488 2500 opts = pycompat.byteskwargs(opts)
2489 2501 aliases = ui.configitems('revsetalias')
2490 2502 stages = [
2491 2503 ('parsed', lambda tree: tree),
2492 2504 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2493 2505 ui.warn)),
2494 2506 ('concatenated', revsetlang.foldconcat),
2495 2507 ('analyzed', revsetlang.analyze),
2496 2508 ('optimized', revsetlang.optimize),
2497 2509 ]
2498 2510 if opts['no_optimized']:
2499 2511 stages = stages[:-1]
2500 2512 if opts['verify_optimized'] and opts['no_optimized']:
2501 2513 raise error.Abort(_('cannot use --verify-optimized with '
2502 2514 '--no-optimized'))
2503 2515 stagenames = set(n for n, f in stages)
2504 2516
2505 2517 showalways = set()
2506 2518 showchanged = set()
2507 2519 if ui.verbose and not opts['show_stage']:
2508 2520 # show parsed tree by --verbose (deprecated)
2509 2521 showalways.add('parsed')
2510 2522 showchanged.update(['expanded', 'concatenated'])
2511 2523 if opts['optimize']:
2512 2524 showalways.add('optimized')
2513 2525 if opts['show_stage'] and opts['optimize']:
2514 2526 raise error.Abort(_('cannot use --optimize with --show-stage'))
2515 2527 if opts['show_stage'] == ['all']:
2516 2528 showalways.update(stagenames)
2517 2529 else:
2518 2530 for n in opts['show_stage']:
2519 2531 if n not in stagenames:
2520 2532 raise error.Abort(_('invalid stage name: %s') % n)
2521 2533 showalways.update(opts['show_stage'])
2522 2534
2523 2535 treebystage = {}
2524 2536 printedtree = None
2525 2537 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2526 2538 for n, f in stages:
2527 2539 treebystage[n] = tree = f(tree)
2528 2540 if n in showalways or (n in showchanged and tree != printedtree):
2529 2541 if opts['show_stage'] or n != 'parsed':
2530 2542 ui.write(("* %s:\n") % n)
2531 2543 ui.write(revsetlang.prettyformat(tree), "\n")
2532 2544 printedtree = tree
2533 2545
2534 2546 if opts['verify_optimized']:
2535 2547 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2536 2548 brevs = revset.makematcher(treebystage['optimized'])(repo)
2537 2549 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2538 2550 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2539 2551 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2540 2552 arevs = list(arevs)
2541 2553 brevs = list(brevs)
2542 2554 if arevs == brevs:
2543 2555 return 0
2544 2556 ui.write(('--- analyzed\n'), label='diff.file_a')
2545 2557 ui.write(('+++ optimized\n'), label='diff.file_b')
2546 2558 sm = difflib.SequenceMatcher(None, arevs, brevs)
2547 2559 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2548 2560 if tag in (r'delete', r'replace'):
2549 2561 for c in arevs[alo:ahi]:
2550 2562 ui.write('-%d\n' % c, label='diff.deleted')
2551 2563 if tag in (r'insert', r'replace'):
2552 2564 for c in brevs[blo:bhi]:
2553 2565 ui.write('+%d\n' % c, label='diff.inserted')
2554 2566 if tag == r'equal':
2555 2567 for c in arevs[alo:ahi]:
2556 2568 ui.write(' %d\n' % c)
2557 2569 return 1
2558 2570
2559 2571 func = revset.makematcher(tree)
2560 2572 revs = func(repo)
2561 2573 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2562 2574 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2563 2575 if not opts['show_revs']:
2564 2576 return
2565 2577 for c in revs:
2566 2578 ui.write("%d\n" % c)
2567 2579
2568 2580 @command('debugserve', [
2569 2581 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2570 2582 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2571 2583 ('', 'logiofile', '', _('file to log server I/O to')),
2572 2584 ], '')
2573 2585 def debugserve(ui, repo, **opts):
2574 2586 """run a server with advanced settings
2575 2587
2576 2588 This command is similar to :hg:`serve`. It exists partially as a
2577 2589 workaround to the fact that ``hg serve --stdio`` must have specific
2578 2590 arguments for security reasons.
2579 2591 """
2580 2592 opts = pycompat.byteskwargs(opts)
2581 2593
2582 2594 if not opts['sshstdio']:
2583 2595 raise error.Abort(_('only --sshstdio is currently supported'))
2584 2596
2585 2597 logfh = None
2586 2598
2587 2599 if opts['logiofd'] and opts['logiofile']:
2588 2600 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2589 2601
2590 2602 if opts['logiofd']:
2591 2603 # Line buffered because output is line based.
2592 2604 try:
2593 2605 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2594 2606 except OSError as e:
2595 2607 if e.errno != errno.ESPIPE:
2596 2608 raise
2597 2609 # can't seek a pipe, so `ab` mode fails on py3
2598 2610 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2599 2611 elif opts['logiofile']:
2600 2612 logfh = open(opts['logiofile'], 'ab', 1)
2601 2613
2602 2614 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2603 2615 s.serve_forever()
2604 2616
2605 2617 @command('debugsetparents', [], _('REV1 [REV2]'))
2606 2618 def debugsetparents(ui, repo, rev1, rev2=None):
2607 2619 """manually set the parents of the current working directory
2608 2620
2609 2621 This is useful for writing repository conversion tools, but should
2610 2622 be used with care. For example, neither the working directory nor the
2611 2623 dirstate is updated, so file status may be incorrect after running this
2612 2624 command.
2613 2625
2614 2626 Returns 0 on success.
2615 2627 """
2616 2628
2617 2629 node1 = scmutil.revsingle(repo, rev1).node()
2618 2630 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2619 2631
2620 2632 with repo.wlock():
2621 2633 repo.setparents(node1, node2)
2622 2634
2623 2635 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2624 2636 def debugssl(ui, repo, source=None, **opts):
2625 2637 '''test a secure connection to a server
2626 2638
2627 2639 This builds the certificate chain for the server on Windows, installing the
2628 2640 missing intermediates and trusted root via Windows Update if necessary. It
2629 2641 does nothing on other platforms.
2630 2642
2631 2643 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2632 2644 that server is used. See :hg:`help urls` for more information.
2633 2645
2634 2646 If the update succeeds, retry the original operation. Otherwise, the cause
2635 2647 of the SSL error is likely another issue.
2636 2648 '''
2637 2649 if not pycompat.iswindows:
2638 2650 raise error.Abort(_('certificate chain building is only possible on '
2639 2651 'Windows'))
2640 2652
2641 2653 if not source:
2642 2654 if not repo:
2643 2655 raise error.Abort(_("there is no Mercurial repository here, and no "
2644 2656 "server specified"))
2645 2657 source = "default"
2646 2658
2647 2659 source, branches = hg.parseurl(ui.expandpath(source))
2648 2660 url = util.url(source)
2649 2661
2650 2662 defaultport = {'https': 443, 'ssh': 22}
2651 2663 if url.scheme in defaultport:
2652 2664 try:
2653 2665 addr = (url.host, int(url.port or defaultport[url.scheme]))
2654 2666 except ValueError:
2655 2667 raise error.Abort(_("malformed port number in URL"))
2656 2668 else:
2657 2669 raise error.Abort(_("only https and ssh connections are supported"))
2658 2670
2659 2671 from . import win32
2660 2672
2661 2673 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2662 2674 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2663 2675
2664 2676 try:
2665 2677 s.connect(addr)
2666 2678 cert = s.getpeercert(True)
2667 2679
2668 2680 ui.status(_('checking the certificate chain for %s\n') % url.host)
2669 2681
2670 2682 complete = win32.checkcertificatechain(cert, build=False)
2671 2683
2672 2684 if not complete:
2673 2685 ui.status(_('certificate chain is incomplete, updating... '))
2674 2686
2675 2687 if not win32.checkcertificatechain(cert):
2676 2688 ui.status(_('failed.\n'))
2677 2689 else:
2678 2690 ui.status(_('done.\n'))
2679 2691 else:
2680 2692 ui.status(_('full certificate chain is available\n'))
2681 2693 finally:
2682 2694 s.close()
2683 2695
2684 2696 @command('debugsub',
2685 2697 [('r', 'rev', '',
2686 2698 _('revision to check'), _('REV'))],
2687 2699 _('[-r REV] [REV]'))
2688 2700 def debugsub(ui, repo, rev=None):
2689 2701 ctx = scmutil.revsingle(repo, rev, None)
2690 2702 for k, v in sorted(ctx.substate.items()):
2691 2703 ui.write(('path %s\n') % k)
2692 2704 ui.write((' source %s\n') % v[0])
2693 2705 ui.write((' revision %s\n') % v[1])
2694 2706
2695 2707 @command('debugsuccessorssets',
2696 2708 [('', 'closest', False, _('return closest successors sets only'))],
2697 2709 _('[REV]'))
2698 2710 def debugsuccessorssets(ui, repo, *revs, **opts):
2699 2711 """show set of successors for revision
2700 2712
2701 2713 A successors set of changeset A is a consistent group of revisions that
2702 2714 succeed A. It contains non-obsolete changesets only unless closests
2703 2715 successors set is set.
2704 2716
2705 2717 In most cases a changeset A has a single successors set containing a single
2706 2718 successor (changeset A replaced by A').
2707 2719
2708 2720 A changeset that is made obsolete with no successors are called "pruned".
2709 2721 Such changesets have no successors sets at all.
2710 2722
2711 2723 A changeset that has been "split" will have a successors set containing
2712 2724 more than one successor.
2713 2725
2714 2726 A changeset that has been rewritten in multiple different ways is called
2715 2727 "divergent". Such changesets have multiple successor sets (each of which
2716 2728 may also be split, i.e. have multiple successors).
2717 2729
2718 2730 Results are displayed as follows::
2719 2731
2720 2732 <rev1>
2721 2733 <successors-1A>
2722 2734 <rev2>
2723 2735 <successors-2A>
2724 2736 <successors-2B1> <successors-2B2> <successors-2B3>
2725 2737
2726 2738 Here rev2 has two possible (i.e. divergent) successors sets. The first
2727 2739 holds one element, whereas the second holds three (i.e. the changeset has
2728 2740 been split).
2729 2741 """
2730 2742 # passed to successorssets caching computation from one call to another
2731 2743 cache = {}
2732 2744 ctx2str = bytes
2733 2745 node2str = short
2734 2746 for rev in scmutil.revrange(repo, revs):
2735 2747 ctx = repo[rev]
2736 2748 ui.write('%s\n'% ctx2str(ctx))
2737 2749 for succsset in obsutil.successorssets(repo, ctx.node(),
2738 2750 closest=opts[r'closest'],
2739 2751 cache=cache):
2740 2752 if succsset:
2741 2753 ui.write(' ')
2742 2754 ui.write(node2str(succsset[0]))
2743 2755 for node in succsset[1:]:
2744 2756 ui.write(' ')
2745 2757 ui.write(node2str(node))
2746 2758 ui.write('\n')
2747 2759
2748 2760 @command('debugtemplate',
2749 2761 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2750 2762 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2751 2763 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2752 2764 optionalrepo=True)
2753 2765 def debugtemplate(ui, repo, tmpl, **opts):
2754 2766 """parse and apply a template
2755 2767
2756 2768 If -r/--rev is given, the template is processed as a log template and
2757 2769 applied to the given changesets. Otherwise, it is processed as a generic
2758 2770 template.
2759 2771
2760 2772 Use --verbose to print the parsed tree.
2761 2773 """
2762 2774 revs = None
2763 2775 if opts[r'rev']:
2764 2776 if repo is None:
2765 2777 raise error.RepoError(_('there is no Mercurial repository here '
2766 2778 '(.hg not found)'))
2767 2779 revs = scmutil.revrange(repo, opts[r'rev'])
2768 2780
2769 2781 props = {}
2770 2782 for d in opts[r'define']:
2771 2783 try:
2772 2784 k, v = (e.strip() for e in d.split('=', 1))
2773 2785 if not k or k == 'ui':
2774 2786 raise ValueError
2775 2787 props[k] = v
2776 2788 except ValueError:
2777 2789 raise error.Abort(_('malformed keyword definition: %s') % d)
2778 2790
2779 2791 if ui.verbose:
2780 2792 aliases = ui.configitems('templatealias')
2781 2793 tree = templater.parse(tmpl)
2782 2794 ui.note(templater.prettyformat(tree), '\n')
2783 2795 newtree = templater.expandaliases(tree, aliases)
2784 2796 if newtree != tree:
2785 2797 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2786 2798
2787 2799 if revs is None:
2788 2800 tres = formatter.templateresources(ui, repo)
2789 2801 t = formatter.maketemplater(ui, tmpl, resources=tres)
2790 2802 if ui.verbose:
2791 2803 kwds, funcs = t.symbolsuseddefault()
2792 2804 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2793 2805 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2794 2806 ui.write(t.renderdefault(props))
2795 2807 else:
2796 2808 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2797 2809 if ui.verbose:
2798 2810 kwds, funcs = displayer.t.symbolsuseddefault()
2799 2811 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2800 2812 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2801 2813 for r in revs:
2802 2814 displayer.show(repo[r], **pycompat.strkwargs(props))
2803 2815 displayer.close()
2804 2816
2805 2817 @command('debuguigetpass', [
2806 2818 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2807 2819 ], _('[-p TEXT]'), norepo=True)
2808 2820 def debuguigetpass(ui, prompt=''):
2809 2821 """show prompt to type password"""
2810 2822 r = ui.getpass(prompt)
2811 2823 ui.write(('respose: %s\n') % r)
2812 2824
2813 2825 @command('debuguiprompt', [
2814 2826 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2815 2827 ], _('[-p TEXT]'), norepo=True)
2816 2828 def debuguiprompt(ui, prompt=''):
2817 2829 """show plain prompt"""
2818 2830 r = ui.prompt(prompt)
2819 2831 ui.write(('response: %s\n') % r)
2820 2832
2821 2833 @command('debugupdatecaches', [])
2822 2834 def debugupdatecaches(ui, repo, *pats, **opts):
2823 2835 """warm all known caches in the repository"""
2824 2836 with repo.wlock(), repo.lock():
2825 2837 repo.updatecaches(full=True)
2826 2838
2827 2839 @command('debugupgraderepo', [
2828 2840 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2829 2841 ('', 'run', False, _('performs an upgrade')),
2830 2842 ('', 'backup', True, _('keep the old repository content around')),
2831 2843 ])
2832 2844 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2833 2845 """upgrade a repository to use different features
2834 2846
2835 2847 If no arguments are specified, the repository is evaluated for upgrade
2836 2848 and a list of problems and potential optimizations is printed.
2837 2849
2838 2850 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2839 2851 can be influenced via additional arguments. More details will be provided
2840 2852 by the command output when run without ``--run``.
2841 2853
2842 2854 During the upgrade, the repository will be locked and no writes will be
2843 2855 allowed.
2844 2856
2845 2857 At the end of the upgrade, the repository may not be readable while new
2846 2858 repository data is swapped in. This window will be as long as it takes to
2847 2859 rename some directories inside the ``.hg`` directory. On most machines, this
2848 2860 should complete almost instantaneously and the chances of a consumer being
2849 2861 unable to access the repository should be low.
2850 2862 """
2851 2863 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2852 2864 backup=backup)
2853 2865
2854 2866 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2855 2867 inferrepo=True)
2856 2868 def debugwalk(ui, repo, *pats, **opts):
2857 2869 """show how files match on given patterns"""
2858 2870 opts = pycompat.byteskwargs(opts)
2859 2871 m = scmutil.match(repo[None], pats, opts)
2860 2872 if ui.verbose:
2861 2873 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2862 2874 items = list(repo[None].walk(m))
2863 2875 if not items:
2864 2876 return
2865 2877 f = lambda fn: fn
2866 2878 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2867 2879 f = lambda fn: util.normpath(fn)
2868 2880 fmt = 'f %%-%ds %%-%ds %%s' % (
2869 2881 max([len(abs) for abs in items]),
2870 2882 max([len(repo.pathto(abs)) for abs in items]))
2871 2883 for abs in items:
2872 2884 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2873 2885 ui.write("%s\n" % line.rstrip())
2874 2886
2875 2887 @command('debugwhyunstable', [], _('REV'))
2876 2888 def debugwhyunstable(ui, repo, rev):
2877 2889 """explain instabilities of a changeset"""
2878 2890 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2879 2891 dnodes = ''
2880 2892 if entry.get('divergentnodes'):
2881 2893 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2882 2894 for ctx in entry['divergentnodes']) + ' '
2883 2895 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2884 2896 entry['reason'], entry['node']))
2885 2897
2886 2898 @command('debugwireargs',
2887 2899 [('', 'three', '', 'three'),
2888 2900 ('', 'four', '', 'four'),
2889 2901 ('', 'five', '', 'five'),
2890 2902 ] + cmdutil.remoteopts,
2891 2903 _('REPO [OPTIONS]... [ONE [TWO]]'),
2892 2904 norepo=True)
2893 2905 def debugwireargs(ui, repopath, *vals, **opts):
2894 2906 opts = pycompat.byteskwargs(opts)
2895 2907 repo = hg.peer(ui, opts, repopath)
2896 2908 for opt in cmdutil.remoteopts:
2897 2909 del opts[opt[1]]
2898 2910 args = {}
2899 2911 for k, v in opts.iteritems():
2900 2912 if v:
2901 2913 args[k] = v
2902 2914 args = pycompat.strkwargs(args)
2903 2915 # run twice to check that we don't mess up the stream for the next command
2904 2916 res1 = repo.debugwireargs(*vals, **args)
2905 2917 res2 = repo.debugwireargs(*vals, **args)
2906 2918 ui.write("%s\n" % res1)
2907 2919 if res1 != res2:
2908 2920 ui.warn("%s\n" % res2)
2909 2921
2910 2922 def _parsewirelangblocks(fh):
2911 2923 activeaction = None
2912 2924 blocklines = []
2913 2925 lastindent = 0
2914 2926
2915 2927 for line in fh:
2916 2928 line = line.rstrip()
2917 2929 if not line:
2918 2930 continue
2919 2931
2920 2932 if line.startswith(b'#'):
2921 2933 continue
2922 2934
2923 2935 if not line.startswith(b' '):
2924 2936 # New block. Flush previous one.
2925 2937 if activeaction:
2926 2938 yield activeaction, blocklines
2927 2939
2928 2940 activeaction = line
2929 2941 blocklines = []
2930 2942 lastindent = 0
2931 2943 continue
2932 2944
2933 2945 # Else we start with an indent.
2934 2946
2935 2947 if not activeaction:
2936 2948 raise error.Abort(_('indented line outside of block'))
2937 2949
2938 2950 indent = len(line) - len(line.lstrip())
2939 2951
2940 2952 # If this line is indented more than the last line, concatenate it.
2941 2953 if indent > lastindent and blocklines:
2942 2954 blocklines[-1] += line.lstrip()
2943 2955 else:
2944 2956 blocklines.append(line)
2945 2957 lastindent = indent
2946 2958
2947 2959 # Flush last block.
2948 2960 if activeaction:
2949 2961 yield activeaction, blocklines
2950 2962
2951 2963 @command('debugwireproto',
2952 2964 [
2953 2965 ('', 'localssh', False, _('start an SSH server for this repo')),
2954 2966 ('', 'peer', '', _('construct a specific version of the peer')),
2955 2967 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2956 2968 ('', 'nologhandshake', False,
2957 2969 _('do not log I/O related to the peer handshake')),
2958 2970 ] + cmdutil.remoteopts,
2959 2971 _('[PATH]'),
2960 2972 optionalrepo=True)
2961 2973 def debugwireproto(ui, repo, path=None, **opts):
2962 2974 """send wire protocol commands to a server
2963 2975
2964 2976 This command can be used to issue wire protocol commands to remote
2965 2977 peers and to debug the raw data being exchanged.
2966 2978
2967 2979 ``--localssh`` will start an SSH server against the current repository
2968 2980 and connect to that. By default, the connection will perform a handshake
2969 2981 and establish an appropriate peer instance.
2970 2982
2971 2983 ``--peer`` can be used to bypass the handshake protocol and construct a
2972 2984 peer instance using the specified class type. Valid values are ``raw``,
2973 2985 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2974 2986 raw data payloads and don't support higher-level command actions.
2975 2987
2976 2988 ``--noreadstderr`` can be used to disable automatic reading from stderr
2977 2989 of the peer (for SSH connections only). Disabling automatic reading of
2978 2990 stderr is useful for making output more deterministic.
2979 2991
2980 2992 Commands are issued via a mini language which is specified via stdin.
2981 2993 The language consists of individual actions to perform. An action is
2982 2994 defined by a block. A block is defined as a line with no leading
2983 2995 space followed by 0 or more lines with leading space. Blocks are
2984 2996 effectively a high-level command with additional metadata.
2985 2997
2986 2998 Lines beginning with ``#`` are ignored.
2987 2999
2988 3000 The following sections denote available actions.
2989 3001
2990 3002 raw
2991 3003 ---
2992 3004
2993 3005 Send raw data to the server.
2994 3006
2995 3007 The block payload contains the raw data to send as one atomic send
2996 3008 operation. The data may not actually be delivered in a single system
2997 3009 call: it depends on the abilities of the transport being used.
2998 3010
2999 3011 Each line in the block is de-indented and concatenated. Then, that
3000 3012 value is evaluated as a Python b'' literal. This allows the use of
3001 3013 backslash escaping, etc.
3002 3014
3003 3015 raw+
3004 3016 ----
3005 3017
3006 3018 Behaves like ``raw`` except flushes output afterwards.
3007 3019
3008 3020 command <X>
3009 3021 -----------
3010 3022
3011 3023 Send a request to run a named command, whose name follows the ``command``
3012 3024 string.
3013 3025
3014 3026 Arguments to the command are defined as lines in this block. The format of
3015 3027 each line is ``<key> <value>``. e.g.::
3016 3028
3017 3029 command listkeys
3018 3030 namespace bookmarks
3019 3031
3020 3032 If the value begins with ``eval:``, it will be interpreted as a Python
3021 3033 literal expression. Otherwise values are interpreted as Python b'' literals.
3022 3034 This allows sending complex types and encoding special byte sequences via
3023 3035 backslash escaping.
3024 3036
3025 3037 The following arguments have special meaning:
3026 3038
3027 3039 ``PUSHFILE``
3028 3040 When defined, the *push* mechanism of the peer will be used instead
3029 3041 of the static request-response mechanism and the content of the
3030 3042 file specified in the value of this argument will be sent as the
3031 3043 command payload.
3032 3044
3033 3045 This can be used to submit a local bundle file to the remote.
3034 3046
3035 3047 batchbegin
3036 3048 ----------
3037 3049
3038 3050 Instruct the peer to begin a batched send.
3039 3051
3040 3052 All ``command`` blocks are queued for execution until the next
3041 3053 ``batchsubmit`` block.
3042 3054
3043 3055 batchsubmit
3044 3056 -----------
3045 3057
3046 3058 Submit previously queued ``command`` blocks as a batch request.
3047 3059
3048 3060 This action MUST be paired with a ``batchbegin`` action.
3049 3061
3050 3062 httprequest <method> <path>
3051 3063 ---------------------------
3052 3064
3053 3065 (HTTP peer only)
3054 3066
3055 3067 Send an HTTP request to the peer.
3056 3068
3057 3069 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3058 3070
3059 3071 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3060 3072 headers to add to the request. e.g. ``Accept: foo``.
3061 3073
3062 3074 The following arguments are special:
3063 3075
3064 3076 ``BODYFILE``
3065 3077 The content of the file defined as the value to this argument will be
3066 3078 transferred verbatim as the HTTP request body.
3067 3079
3068 3080 ``frame <type> <flags> <payload>``
3069 3081 Send a unified protocol frame as part of the request body.
3070 3082
3071 3083 All frames will be collected and sent as the body to the HTTP
3072 3084 request.
3073 3085
3074 3086 close
3075 3087 -----
3076 3088
3077 3089 Close the connection to the server.
3078 3090
3079 3091 flush
3080 3092 -----
3081 3093
3082 3094 Flush data written to the server.
3083 3095
3084 3096 readavailable
3085 3097 -------------
3086 3098
3087 3099 Close the write end of the connection and read all available data from
3088 3100 the server.
3089 3101
3090 3102 If the connection to the server encompasses multiple pipes, we poll both
3091 3103 pipes and read available data.
3092 3104
3093 3105 readline
3094 3106 --------
3095 3107
3096 3108 Read a line of output from the server. If there are multiple output
3097 3109 pipes, reads only the main pipe.
3098 3110
3099 3111 ereadline
3100 3112 ---------
3101 3113
3102 3114 Like ``readline``, but read from the stderr pipe, if available.
3103 3115
3104 3116 read <X>
3105 3117 --------
3106 3118
3107 3119 ``read()`` N bytes from the server's main output pipe.
3108 3120
3109 3121 eread <X>
3110 3122 ---------
3111 3123
3112 3124 ``read()`` N bytes from the server's stderr pipe, if available.
3113 3125
3114 3126 Specifying Unified Frame-Based Protocol Frames
3115 3127 ----------------------------------------------
3116 3128
3117 3129 It is possible to emit a *Unified Frame-Based Protocol* by using special
3118 3130 syntax.
3119 3131
3120 3132 A frame is composed as a type, flags, and payload. These can be parsed
3121 3133 from a string of the form:
3122 3134
3123 3135 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3124 3136
3125 3137 ``request-id`` and ``stream-id`` are integers defining the request and
3126 3138 stream identifiers.
3127 3139
3128 3140 ``type`` can be an integer value for the frame type or the string name
3129 3141 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3130 3142 ``command-name``.
3131 3143
3132 3144 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3133 3145 components. Each component (and there can be just one) can be an integer
3134 3146 or a flag name for stream flags or frame flags, respectively. Values are
3135 3147 resolved to integers and then bitwise OR'd together.
3136 3148
3137 3149 ``payload`` represents the raw frame payload. If it begins with
3138 3150 ``cbor:``, the following string is evaluated as Python code and the
3139 3151 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3140 3152 as a Python byte string literal.
3141 3153 """
3142 3154 opts = pycompat.byteskwargs(opts)
3143 3155
3144 3156 if opts['localssh'] and not repo:
3145 3157 raise error.Abort(_('--localssh requires a repository'))
3146 3158
3147 3159 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3148 3160 raise error.Abort(_('invalid value for --peer'),
3149 3161 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3150 3162
3151 3163 if path and opts['localssh']:
3152 3164 raise error.Abort(_('cannot specify --localssh with an explicit '
3153 3165 'path'))
3154 3166
3155 3167 if ui.interactive():
3156 3168 ui.write(_('(waiting for commands on stdin)\n'))
3157 3169
3158 3170 blocks = list(_parsewirelangblocks(ui.fin))
3159 3171
3160 3172 proc = None
3161 3173 stdin = None
3162 3174 stdout = None
3163 3175 stderr = None
3164 3176 opener = None
3165 3177
3166 3178 if opts['localssh']:
3167 3179 # We start the SSH server in its own process so there is process
3168 3180 # separation. This prevents a whole class of potential bugs around
3169 3181 # shared state from interfering with server operation.
3170 3182 args = procutil.hgcmd() + [
3171 3183 '-R', repo.root,
3172 3184 'debugserve', '--sshstdio',
3173 3185 ]
3174 3186 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3175 3187 stdin=subprocess.PIPE,
3176 3188 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3177 3189 bufsize=0)
3178 3190
3179 3191 stdin = proc.stdin
3180 3192 stdout = proc.stdout
3181 3193 stderr = proc.stderr
3182 3194
3183 3195 # We turn the pipes into observers so we can log I/O.
3184 3196 if ui.verbose or opts['peer'] == 'raw':
3185 3197 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3186 3198 logdata=True)
3187 3199 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3188 3200 logdata=True)
3189 3201 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3190 3202 logdata=True)
3191 3203
3192 3204 # --localssh also implies the peer connection settings.
3193 3205
3194 3206 url = 'ssh://localserver'
3195 3207 autoreadstderr = not opts['noreadstderr']
3196 3208
3197 3209 if opts['peer'] == 'ssh1':
3198 3210 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3199 3211 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3200 3212 None, autoreadstderr=autoreadstderr)
3201 3213 elif opts['peer'] == 'ssh2':
3202 3214 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3203 3215 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3204 3216 None, autoreadstderr=autoreadstderr)
3205 3217 elif opts['peer'] == 'raw':
3206 3218 ui.write(_('using raw connection to peer\n'))
3207 3219 peer = None
3208 3220 else:
3209 3221 ui.write(_('creating ssh peer from handshake results\n'))
3210 3222 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3211 3223 autoreadstderr=autoreadstderr)
3212 3224
3213 3225 elif path:
3214 3226 # We bypass hg.peer() so we can proxy the sockets.
3215 3227 # TODO consider not doing this because we skip
3216 3228 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3217 3229 u = util.url(path)
3218 3230 if u.scheme != 'http':
3219 3231 raise error.Abort(_('only http:// paths are currently supported'))
3220 3232
3221 3233 url, authinfo = u.authinfo()
3222 3234 openerargs = {
3223 3235 r'useragent': b'Mercurial debugwireproto',
3224 3236 }
3225 3237
3226 3238 # Turn pipes/sockets into observers so we can log I/O.
3227 3239 if ui.verbose:
3228 3240 openerargs.update({
3229 3241 r'loggingfh': ui,
3230 3242 r'loggingname': b's',
3231 3243 r'loggingopts': {
3232 3244 r'logdata': True,
3233 3245 r'logdataapis': False,
3234 3246 },
3235 3247 })
3236 3248
3237 3249 if ui.debugflag:
3238 3250 openerargs[r'loggingopts'][r'logdataapis'] = True
3239 3251
3240 3252 # Don't send default headers when in raw mode. This allows us to
3241 3253 # bypass most of the behavior of our URL handling code so we can
3242 3254 # have near complete control over what's sent on the wire.
3243 3255 if opts['peer'] == 'raw':
3244 3256 openerargs[r'sendaccept'] = False
3245 3257
3246 3258 opener = urlmod.opener(ui, authinfo, **openerargs)
3247 3259
3248 3260 if opts['peer'] == 'http2':
3249 3261 ui.write(_('creating http peer for wire protocol version 2\n'))
3250 3262 # We go through makepeer() because we need an API descriptor for
3251 3263 # the peer instance to be useful.
3252 3264 with ui.configoverride({
3253 3265 ('experimental', 'httppeer.advertise-v2'): True}):
3254 3266 if opts['nologhandshake']:
3255 3267 ui.pushbuffer()
3256 3268
3257 3269 peer = httppeer.makepeer(ui, path, opener=opener)
3258 3270
3259 3271 if opts['nologhandshake']:
3260 3272 ui.popbuffer()
3261 3273
3262 3274 if not isinstance(peer, httppeer.httpv2peer):
3263 3275 raise error.Abort(_('could not instantiate HTTP peer for '
3264 3276 'wire protocol version 2'),
3265 3277 hint=_('the server may not have the feature '
3266 3278 'enabled or is not allowing this '
3267 3279 'client version'))
3268 3280
3269 3281 elif opts['peer'] == 'raw':
3270 3282 ui.write(_('using raw connection to peer\n'))
3271 3283 peer = None
3272 3284 elif opts['peer']:
3273 3285 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3274 3286 opts['peer'])
3275 3287 else:
3276 3288 peer = httppeer.makepeer(ui, path, opener=opener)
3277 3289
3278 3290 # We /could/ populate stdin/stdout with sock.makefile()...
3279 3291 else:
3280 3292 raise error.Abort(_('unsupported connection configuration'))
3281 3293
3282 3294 batchedcommands = None
3283 3295
3284 3296 # Now perform actions based on the parsed wire language instructions.
3285 3297 for action, lines in blocks:
3286 3298 if action in ('raw', 'raw+'):
3287 3299 if not stdin:
3288 3300 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3289 3301
3290 3302 # Concatenate the data together.
3291 3303 data = ''.join(l.lstrip() for l in lines)
3292 3304 data = stringutil.unescapestr(data)
3293 3305 stdin.write(data)
3294 3306
3295 3307 if action == 'raw+':
3296 3308 stdin.flush()
3297 3309 elif action == 'flush':
3298 3310 if not stdin:
3299 3311 raise error.Abort(_('cannot call flush on this peer'))
3300 3312 stdin.flush()
3301 3313 elif action.startswith('command'):
3302 3314 if not peer:
3303 3315 raise error.Abort(_('cannot send commands unless peer instance '
3304 3316 'is available'))
3305 3317
3306 3318 command = action.split(' ', 1)[1]
3307 3319
3308 3320 args = {}
3309 3321 for line in lines:
3310 3322 # We need to allow empty values.
3311 3323 fields = line.lstrip().split(' ', 1)
3312 3324 if len(fields) == 1:
3313 3325 key = fields[0]
3314 3326 value = ''
3315 3327 else:
3316 3328 key, value = fields
3317 3329
3318 3330 if value.startswith('eval:'):
3319 3331 value = stringutil.evalpythonliteral(value[5:])
3320 3332 else:
3321 3333 value = stringutil.unescapestr(value)
3322 3334
3323 3335 args[key] = value
3324 3336
3325 3337 if batchedcommands is not None:
3326 3338 batchedcommands.append((command, args))
3327 3339 continue
3328 3340
3329 3341 ui.status(_('sending %s command\n') % command)
3330 3342
3331 3343 if 'PUSHFILE' in args:
3332 3344 with open(args['PUSHFILE'], r'rb') as fh:
3333 3345 del args['PUSHFILE']
3334 3346 res, output = peer._callpush(command, fh,
3335 3347 **pycompat.strkwargs(args))
3336 3348 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3337 3349 ui.status(_('remote output: %s\n') %
3338 3350 stringutil.escapestr(output))
3339 3351 else:
3340 3352 with peer.commandexecutor() as e:
3341 3353 res = e.callcommand(command, args).result()
3342 3354
3343 3355 if isinstance(res, wireprotov2peer.commandresponse):
3344 3356 val = res.objects()
3345 3357 ui.status(_('response: %s\n') %
3346 3358 stringutil.pprint(val, bprefix=True, indent=2))
3347 3359 else:
3348 3360 ui.status(_('response: %s\n') %
3349 3361 stringutil.pprint(res, bprefix=True, indent=2))
3350 3362
3351 3363 elif action == 'batchbegin':
3352 3364 if batchedcommands is not None:
3353 3365 raise error.Abort(_('nested batchbegin not allowed'))
3354 3366
3355 3367 batchedcommands = []
3356 3368 elif action == 'batchsubmit':
3357 3369 # There is a batching API we could go through. But it would be
3358 3370 # difficult to normalize requests into function calls. It is easier
3359 3371 # to bypass this layer and normalize to commands + args.
3360 3372 ui.status(_('sending batch with %d sub-commands\n') %
3361 3373 len(batchedcommands))
3362 3374 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3363 3375 ui.status(_('response #%d: %s\n') %
3364 3376 (i, stringutil.escapestr(chunk)))
3365 3377
3366 3378 batchedcommands = None
3367 3379
3368 3380 elif action.startswith('httprequest '):
3369 3381 if not opener:
3370 3382 raise error.Abort(_('cannot use httprequest without an HTTP '
3371 3383 'peer'))
3372 3384
3373 3385 request = action.split(' ', 2)
3374 3386 if len(request) != 3:
3375 3387 raise error.Abort(_('invalid httprequest: expected format is '
3376 3388 '"httprequest <method> <path>'))
3377 3389
3378 3390 method, httppath = request[1:]
3379 3391 headers = {}
3380 3392 body = None
3381 3393 frames = []
3382 3394 for line in lines:
3383 3395 line = line.lstrip()
3384 3396 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3385 3397 if m:
3386 3398 # Headers need to use native strings.
3387 3399 key = pycompat.strurl(m.group(1))
3388 3400 value = pycompat.strurl(m.group(2))
3389 3401 headers[key] = value
3390 3402 continue
3391 3403
3392 3404 if line.startswith(b'BODYFILE '):
3393 3405 with open(line.split(b' ', 1), 'rb') as fh:
3394 3406 body = fh.read()
3395 3407 elif line.startswith(b'frame '):
3396 3408 frame = wireprotoframing.makeframefromhumanstring(
3397 3409 line[len(b'frame '):])
3398 3410
3399 3411 frames.append(frame)
3400 3412 else:
3401 3413 raise error.Abort(_('unknown argument to httprequest: %s') %
3402 3414 line)
3403 3415
3404 3416 url = path + httppath
3405 3417
3406 3418 if frames:
3407 3419 body = b''.join(bytes(f) for f in frames)
3408 3420
3409 3421 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3410 3422
3411 3423 # urllib.Request insists on using has_data() as a proxy for
3412 3424 # determining the request method. Override that to use our
3413 3425 # explicitly requested method.
3414 3426 req.get_method = lambda: pycompat.sysstr(method)
3415 3427
3416 3428 try:
3417 3429 res = opener.open(req)
3418 3430 body = res.read()
3419 3431 except util.urlerr.urlerror as e:
3420 3432 # read() method must be called, but only exists in Python 2
3421 3433 getattr(e, 'read', lambda: None)()
3422 3434 continue
3423 3435
3424 3436 ct = res.headers.get(r'Content-Type')
3425 3437 if ct == r'application/mercurial-cbor':
3426 3438 ui.write(_('cbor> %s\n') %
3427 3439 stringutil.pprint(cborutil.decodeall(body),
3428 3440 bprefix=True,
3429 3441 indent=2))
3430 3442
3431 3443 elif action == 'close':
3432 3444 peer.close()
3433 3445 elif action == 'readavailable':
3434 3446 if not stdout or not stderr:
3435 3447 raise error.Abort(_('readavailable not available on this peer'))
3436 3448
3437 3449 stdin.close()
3438 3450 stdout.read()
3439 3451 stderr.read()
3440 3452
3441 3453 elif action == 'readline':
3442 3454 if not stdout:
3443 3455 raise error.Abort(_('readline not available on this peer'))
3444 3456 stdout.readline()
3445 3457 elif action == 'ereadline':
3446 3458 if not stderr:
3447 3459 raise error.Abort(_('ereadline not available on this peer'))
3448 3460 stderr.readline()
3449 3461 elif action.startswith('read '):
3450 3462 count = int(action.split(' ', 1)[1])
3451 3463 if not stdout:
3452 3464 raise error.Abort(_('read not available on this peer'))
3453 3465 stdout.read(count)
3454 3466 elif action.startswith('eread '):
3455 3467 count = int(action.split(' ', 1)[1])
3456 3468 if not stderr:
3457 3469 raise error.Abort(_('eread not available on this peer'))
3458 3470 stderr.read(count)
3459 3471 else:
3460 3472 raise error.Abort(_('unknown action: %s') % action)
3461 3473
3462 3474 if batchedcommands is not None:
3463 3475 raise error.Abort(_('unclosed "batchbegin" request'))
3464 3476
3465 3477 if peer:
3466 3478 peer.close()
3467 3479
3468 3480 if proc:
3469 3481 proc.kill()
@@ -1,109 +1,146 b''
1 1 # policy.py - module policy logic for Mercurial.
2 2 #
3 3 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11 import sys
12 12
13 13 # Rules for how modules can be loaded. Values are:
14 14 #
15 15 # c - require C extensions
16 # rust+c - require Rust and C extensions
17 # rust+c-allow - allow Rust and C extensions with fallback to pure Python
18 # for each
16 19 # allow - allow pure Python implementation when C loading fails
17 20 # cffi - required cffi versions (implemented within pure module)
18 21 # cffi-allow - allow pure Python implementation if cffi version is missing
19 22 # py - only load pure Python modules
20 23 #
21 24 # By default, fall back to the pure modules so the in-place build can
22 25 # run without recompiling the C extensions. This will be overridden by
23 26 # __modulepolicy__ generated by setup.py.
24 27 policy = b'allow'
25 28 _packageprefs = {
26 29 # policy: (versioned package, pure package)
27 30 b'c': (r'cext', None),
28 31 b'allow': (r'cext', r'pure'),
29 32 b'cffi': (r'cffi', None),
30 33 b'cffi-allow': (r'cffi', r'pure'),
31 34 b'py': (None, r'pure'),
35 # For now, rust policies impact importrust only
36 b'rust+c': (r'cext', None),
37 b'rust+c-allow': (r'cext', r'pure'),
32 38 }
33 39
34 40 try:
35 41 from . import __modulepolicy__
36 42 policy = __modulepolicy__.modulepolicy
37 43 except ImportError:
38 44 pass
39 45
40 46 # PyPy doesn't load C extensions.
41 47 #
42 48 # The canonical way to do this is to test platform.python_implementation().
43 49 # But we don't import platform and don't bloat for it here.
44 50 if r'__pypy__' in sys.builtin_module_names:
45 51 policy = b'cffi'
46 52
47 53 # Environment variable can always force settings.
48 54 if sys.version_info[0] >= 3:
49 55 if r'HGMODULEPOLICY' in os.environ:
50 56 policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8')
51 57 else:
52 58 policy = os.environ.get(r'HGMODULEPOLICY', policy)
53 59
54 60 def _importfrom(pkgname, modname):
55 61 # from .<pkgname> import <modname> (where . is looked through this module)
56 62 fakelocals = {}
57 63 pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1)
58 64 try:
59 65 fakelocals[modname] = mod = getattr(pkg, modname)
60 66 except AttributeError:
61 67 raise ImportError(r'cannot import name %s' % modname)
62 68 # force import; fakelocals[modname] may be replaced with the real module
63 69 getattr(mod, r'__doc__', None)
64 70 return fakelocals[modname]
65 71
66 72 # keep in sync with "version" in C modules
67 73 _cextversions = {
68 74 (r'cext', r'base85'): 1,
69 75 (r'cext', r'bdiff'): 3,
70 76 (r'cext', r'mpatch'): 1,
71 77 (r'cext', r'osutil'): 4,
72 78 (r'cext', r'parsers'): 13,
73 79 }
74 80
75 81 # map import request to other package or module
76 82 _modredirects = {
77 83 (r'cext', r'charencode'): (r'cext', r'parsers'),
78 84 (r'cffi', r'base85'): (r'pure', r'base85'),
79 85 (r'cffi', r'charencode'): (r'pure', r'charencode'),
80 86 (r'cffi', r'parsers'): (r'pure', r'parsers'),
81 87 }
82 88
83 89 def _checkmod(pkgname, modname, mod):
84 90 expected = _cextversions.get((pkgname, modname))
85 91 actual = getattr(mod, r'version', None)
86 92 if actual != expected:
87 93 raise ImportError(r'cannot import module %s.%s '
88 94 r'(expected version: %d, actual: %r)'
89 95 % (pkgname, modname, expected, actual))
90 96
91 97 def importmod(modname):
92 98 """Import module according to policy and check API version"""
93 99 try:
94 100 verpkg, purepkg = _packageprefs[policy]
95 101 except KeyError:
96 102 raise ImportError(r'invalid HGMODULEPOLICY %r' % policy)
97 103 assert verpkg or purepkg
98 104 if verpkg:
99 105 pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname))
100 106 try:
101 107 mod = _importfrom(pn, mn)
102 108 if pn == verpkg:
103 109 _checkmod(pn, mn, mod)
104 110 return mod
105 111 except ImportError:
106 112 if not purepkg:
107 113 raise
108 114 pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname))
109 115 return _importfrom(pn, mn)
116
117 def _isrustpermissive():
118 """Assuming the policy is a Rust one, tell if it's permissive."""
119 return policy.endswith(b'-allow')
120
121 def importrust(modname, member=None, default=None):
122 """Import Rust module according to policy and availability.
123
124 If policy isn't a Rust one, this returns `default`.
125
126 If either the module or its member is not available, this returns `default`
127 if policy is permissive and raises `ImportError` if not.
128 """
129 if not policy.startswith(b'rust'):
130 return default
131
132 try:
133 mod = _importfrom(r'rustext', modname)
134 except ImportError:
135 if _isrustpermissive():
136 return default
137 raise
138 if member is None:
139 return mod
140
141 try:
142 return getattr(mod, member)
143 except AttributeError:
144 if _isrustpermissive():
145 return default
146 raise ImportError(r"Cannot import name %s" % member)
@@ -1,1477 +1,1484 b''
1 1 #
2 2 # This is the mercurial setup script.
3 3 #
4 4 # 'python setup.py install', or
5 5 # 'python setup.py --help' for more options
6 6
7 7 import os
8 8
9 9 supportedpy = '~= 2.7'
10 10 if os.environ.get('HGALLOWPYTHON3', ''):
11 11 # Mercurial will never work on Python 3 before 3.5 due to a lack
12 12 # of % formatting on bytestrings, and can't work on 3.6.0 or 3.6.1
13 13 # due to a bug in % formatting in bytestrings.
14 14 # We cannot support Python 3.5.0, 3.5.1, 3.5.2 because of bug in
15 15 # codecs.escape_encode() where it raises SystemError on empty bytestring
16 16 # bug link: https://bugs.python.org/issue25270
17 17 #
18 18 # TODO: when we actually work on Python 3, use this string as the
19 19 # actual supportedpy string.
20 20 supportedpy = ','.join([
21 21 '>=2.7',
22 22 '!=3.0.*',
23 23 '!=3.1.*',
24 24 '!=3.2.*',
25 25 '!=3.3.*',
26 26 '!=3.4.*',
27 27 '!=3.5.0',
28 28 '!=3.5.1',
29 29 '!=3.5.2',
30 30 '!=3.6.0',
31 31 '!=3.6.1',
32 32 ])
33 33
34 34 import sys, platform
35 35 if sys.version_info[0] >= 3:
36 36 printf = eval('print')
37 37 libdir_escape = 'unicode_escape'
38 38 def sysstr(s):
39 39 return s.decode('latin-1')
40 40 else:
41 41 libdir_escape = 'string_escape'
42 42 def printf(*args, **kwargs):
43 43 f = kwargs.get('file', sys.stdout)
44 44 end = kwargs.get('end', '\n')
45 45 f.write(b' '.join(args) + end)
46 46 def sysstr(s):
47 47 return s
48 48
49 49 # Attempt to guide users to a modern pip - this means that 2.6 users
50 50 # should have a chance of getting a 4.2 release, and when we ratchet
51 51 # the version requirement forward again hopefully everyone will get
52 52 # something that works for them.
53 53 if sys.version_info < (2, 7, 0, 'final'):
54 54 pip_message = ('This may be due to an out of date pip. '
55 55 'Make sure you have pip >= 9.0.1.')
56 56 try:
57 57 import pip
58 58 pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]])
59 59 if pip_version < (9, 0, 1) :
60 60 pip_message = (
61 61 'Your pip version is out of date, please install '
62 62 'pip >= 9.0.1. pip {} detected.'.format(pip.__version__))
63 63 else:
64 64 # pip is new enough - it must be something else
65 65 pip_message = ''
66 66 except Exception:
67 67 pass
68 68 error = """
69 69 Mercurial does not support Python older than 2.7.
70 70 Python {py} detected.
71 71 {pip}
72 72 """.format(py=sys.version_info, pip=pip_message)
73 73 printf(error, file=sys.stderr)
74 74 sys.exit(1)
75 75
76 76 # We don't yet officially support Python 3. But we want to allow developers to
77 77 # hack on. Detect and disallow running on Python 3 by default. But provide a
78 78 # backdoor to enable working on Python 3.
79 79 if sys.version_info[0] != 2:
80 80 badpython = True
81 81
82 82 # Allow Python 3 from source checkouts.
83 83 if os.path.isdir('.hg') or 'HGPYTHON3' in os.environ:
84 84 badpython = False
85 85
86 86 if badpython:
87 87 error = """
88 88 Python {py} detected.
89 89
90 90 Mercurial currently has beta support for Python 3 and use of Python 2.7 is
91 91 recommended for the best experience.
92 92
93 93 Please re-run with Python 2.7 for a faster, less buggy experience.
94 94
95 95 If you would like to beta test Mercurial with Python 3, this error can
96 96 be suppressed by defining the HGPYTHON3 environment variable when invoking
97 97 this command. No special environment variables or configuration changes are
98 98 necessary to run `hg` with Python 3.
99 99
100 100 See https://www.mercurial-scm.org/wiki/Python3 for more on Mercurial's
101 101 Python 3 support.
102 102 """.format(py='.'.join('%d' % x for x in sys.version_info[0:2]))
103 103
104 104 printf(error, file=sys.stderr)
105 105 sys.exit(1)
106 106
107 107 # Solaris Python packaging brain damage
108 108 try:
109 109 import hashlib
110 110 sha = hashlib.sha1()
111 111 except ImportError:
112 112 try:
113 113 import sha
114 114 sha.sha # silence unused import warning
115 115 except ImportError:
116 116 raise SystemExit(
117 117 "Couldn't import standard hashlib (incomplete Python install).")
118 118
119 119 try:
120 120 import zlib
121 121 zlib.compressobj # silence unused import warning
122 122 except ImportError:
123 123 raise SystemExit(
124 124 "Couldn't import standard zlib (incomplete Python install).")
125 125
126 126 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
127 127 isironpython = False
128 128 try:
129 129 isironpython = (platform.python_implementation()
130 130 .lower().find("ironpython") != -1)
131 131 except AttributeError:
132 132 pass
133 133
134 134 if isironpython:
135 135 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
136 136 else:
137 137 try:
138 138 import bz2
139 139 bz2.BZ2Compressor # silence unused import warning
140 140 except ImportError:
141 141 raise SystemExit(
142 142 "Couldn't import standard bz2 (incomplete Python install).")
143 143
144 144 ispypy = "PyPy" in sys.version
145 145
146 146 hgrustext = os.environ.get('HGWITHRUSTEXT')
147 147 # TODO record it for proper rebuild upon changes
148 148 # (see mercurial/__modulepolicy__.py)
149 149 if hgrustext != 'cpython' and hgrustext is not None:
150 150 hgrustext = 'direct-ffi'
151 151
152 152 import ctypes
153 153 import errno
154 154 import stat, subprocess, time
155 155 import re
156 156 import shutil
157 157 import tempfile
158 158 from distutils import log
159 159 # We have issues with setuptools on some platforms and builders. Until
160 160 # those are resolved, setuptools is opt-in except for platforms where
161 161 # we don't have issues.
162 162 issetuptools = (os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ)
163 163 if issetuptools:
164 164 from setuptools import setup
165 165 else:
166 166 from distutils.core import setup
167 167 from distutils.ccompiler import new_compiler
168 168 from distutils.core import Command, Extension
169 169 from distutils.dist import Distribution
170 170 from distutils.command.build import build
171 171 from distutils.command.build_ext import build_ext
172 172 from distutils.command.build_py import build_py
173 173 from distutils.command.build_scripts import build_scripts
174 174 from distutils.command.install import install
175 175 from distutils.command.install_lib import install_lib
176 176 from distutils.command.install_scripts import install_scripts
177 177 from distutils.spawn import spawn, find_executable
178 178 from distutils import file_util
179 179 from distutils.errors import (
180 180 CCompilerError,
181 181 DistutilsError,
182 182 DistutilsExecError,
183 183 )
184 184 from distutils.sysconfig import get_python_inc, get_config_var
185 185 from distutils.version import StrictVersion
186 186
187 187 # Explain to distutils.StrictVersion how our release candidates are versionned
188 188 StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$')
189 189
190 190 def write_if_changed(path, content):
191 191 """Write content to a file iff the content hasn't changed."""
192 192 if os.path.exists(path):
193 193 with open(path, 'rb') as fh:
194 194 current = fh.read()
195 195 else:
196 196 current = b''
197 197
198 198 if current != content:
199 199 with open(path, 'wb') as fh:
200 200 fh.write(content)
201 201
202 202 scripts = ['hg']
203 203 if os.name == 'nt':
204 204 # We remove hg.bat if we are able to build hg.exe.
205 205 scripts.append('contrib/win32/hg.bat')
206 206
207 207 def cancompile(cc, code):
208 208 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
209 209 devnull = oldstderr = None
210 210 try:
211 211 fname = os.path.join(tmpdir, 'testcomp.c')
212 212 f = open(fname, 'w')
213 213 f.write(code)
214 214 f.close()
215 215 # Redirect stderr to /dev/null to hide any error messages
216 216 # from the compiler.
217 217 # This will have to be changed if we ever have to check
218 218 # for a function on Windows.
219 219 devnull = open('/dev/null', 'w')
220 220 oldstderr = os.dup(sys.stderr.fileno())
221 221 os.dup2(devnull.fileno(), sys.stderr.fileno())
222 222 objects = cc.compile([fname], output_dir=tmpdir)
223 223 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
224 224 return True
225 225 except Exception:
226 226 return False
227 227 finally:
228 228 if oldstderr is not None:
229 229 os.dup2(oldstderr, sys.stderr.fileno())
230 230 if devnull is not None:
231 231 devnull.close()
232 232 shutil.rmtree(tmpdir)
233 233
234 234 # simplified version of distutils.ccompiler.CCompiler.has_function
235 235 # that actually removes its temporary files.
236 236 def hasfunction(cc, funcname):
237 237 code = 'int main(void) { %s(); }\n' % funcname
238 238 return cancompile(cc, code)
239 239
240 240 def hasheader(cc, headername):
241 241 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
242 242 return cancompile(cc, code)
243 243
244 244 # py2exe needs to be installed to work
245 245 try:
246 246 import py2exe
247 247 py2exe.Distribution # silence unused import warning
248 248 py2exeloaded = True
249 249 # import py2exe's patched Distribution class
250 250 from distutils.core import Distribution
251 251 except ImportError:
252 252 py2exeloaded = False
253 253
254 254 def runcmd(cmd, env, cwd=None):
255 255 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
256 256 stderr=subprocess.PIPE, env=env, cwd=cwd)
257 257 out, err = p.communicate()
258 258 return p.returncode, out, err
259 259
260 260 class hgcommand(object):
261 261 def __init__(self, cmd, env):
262 262 self.cmd = cmd
263 263 self.env = env
264 264
265 265 def run(self, args):
266 266 cmd = self.cmd + args
267 267 returncode, out, err = runcmd(cmd, self.env)
268 268 err = filterhgerr(err)
269 269 if err or returncode != 0:
270 270 printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
271 271 printf(err, file=sys.stderr)
272 272 return ''
273 273 return out
274 274
275 275 def filterhgerr(err):
276 276 # If root is executing setup.py, but the repository is owned by
277 277 # another user (as in "sudo python setup.py install") we will get
278 278 # trust warnings since the .hg/hgrc file is untrusted. That is
279 279 # fine, we don't want to load it anyway. Python may warn about
280 280 # a missing __init__.py in mercurial/locale, we also ignore that.
281 281 err = [e for e in err.splitlines()
282 282 if (not e.startswith(b'not trusting file')
283 283 and not e.startswith(b'warning: Not importing')
284 284 and not e.startswith(b'obsolete feature not enabled')
285 285 and not e.startswith(b'*** failed to import extension')
286 286 and not e.startswith(b'devel-warn:')
287 287 and not (e.startswith(b'(third party extension')
288 288 and e.endswith(b'or newer of Mercurial; disabling)')))]
289 289 return b'\n'.join(b' ' + e for e in err)
290 290
291 291 def findhg():
292 292 """Try to figure out how we should invoke hg for examining the local
293 293 repository contents.
294 294
295 295 Returns an hgcommand object."""
296 296 # By default, prefer the "hg" command in the user's path. This was
297 297 # presumably the hg command that the user used to create this repository.
298 298 #
299 299 # This repository may require extensions or other settings that would not
300 300 # be enabled by running the hg script directly from this local repository.
301 301 hgenv = os.environ.copy()
302 302 # Use HGPLAIN to disable hgrc settings that would change output formatting,
303 303 # and disable localization for the same reasons.
304 304 hgenv['HGPLAIN'] = '1'
305 305 hgenv['LANGUAGE'] = 'C'
306 306 hgcmd = ['hg']
307 307 # Run a simple "hg log" command just to see if using hg from the user's
308 308 # path works and can successfully interact with this repository. Windows
309 309 # gives precedence to hg.exe in the current directory, so fall back to the
310 310 # python invocation of local hg, where pythonXY.dll can always be found.
311 311 check_cmd = ['log', '-r.', '-Ttest']
312 312 if os.name != 'nt':
313 313 try:
314 314 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
315 315 except EnvironmentError:
316 316 retcode = -1
317 317 if retcode == 0 and not filterhgerr(err):
318 318 return hgcommand(hgcmd, hgenv)
319 319
320 320 # Fall back to trying the local hg installation.
321 321 hgenv = localhgenv()
322 322 hgcmd = [sys.executable, 'hg']
323 323 try:
324 324 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
325 325 except EnvironmentError:
326 326 retcode = -1
327 327 if retcode == 0 and not filterhgerr(err):
328 328 return hgcommand(hgcmd, hgenv)
329 329
330 330 raise SystemExit('Unable to find a working hg binary to extract the '
331 331 'version from the repository tags')
332 332
333 333 def localhgenv():
334 334 """Get an environment dictionary to use for invoking or importing
335 335 mercurial from the local repository."""
336 336 # Execute hg out of this directory with a custom environment which takes
337 337 # care to not use any hgrc files and do no localization.
338 338 env = {'HGMODULEPOLICY': 'py',
339 339 'HGRCPATH': '',
340 340 'LANGUAGE': 'C',
341 341 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
342 342 if 'LD_LIBRARY_PATH' in os.environ:
343 343 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
344 344 if 'SystemRoot' in os.environ:
345 345 # SystemRoot is required by Windows to load various DLLs. See:
346 346 # https://bugs.python.org/issue13524#msg148850
347 347 env['SystemRoot'] = os.environ['SystemRoot']
348 348 return env
349 349
350 350 version = ''
351 351
352 352 if os.path.isdir('.hg'):
353 353 hg = findhg()
354 354 cmd = ['log', '-r', '.', '--template', '{tags}\n']
355 355 numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()]
356 356 hgid = sysstr(hg.run(['id', '-i'])).strip()
357 357 if not hgid:
358 358 # Bail out if hg is having problems interacting with this repository,
359 359 # rather than falling through and producing a bogus version number.
360 360 # Continuing with an invalid version number will break extensions
361 361 # that define minimumhgversion.
362 362 raise SystemExit('Unable to determine hg version from local repository')
363 363 if numerictags: # tag(s) found
364 364 version = numerictags[-1]
365 365 if hgid.endswith('+'): # propagate the dirty status to the tag
366 366 version += '+'
367 367 else: # no tag found
368 368 ltagcmd = ['parents', '--template', '{latesttag}']
369 369 ltag = sysstr(hg.run(ltagcmd))
370 370 changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
371 371 changessince = len(hg.run(changessincecmd).splitlines())
372 372 version = '%s+%s-%s' % (ltag, changessince, hgid)
373 373 if version.endswith('+'):
374 374 version += time.strftime('%Y%m%d')
375 375 elif os.path.exists('.hg_archival.txt'):
376 376 kw = dict([[t.strip() for t in l.split(':', 1)]
377 377 for l in open('.hg_archival.txt')])
378 378 if 'tag' in kw:
379 379 version = kw['tag']
380 380 elif 'latesttag' in kw:
381 381 if 'changessincelatesttag' in kw:
382 382 version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw
383 383 else:
384 384 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
385 385 else:
386 386 version = kw.get('node', '')[:12]
387 387
388 388 if version:
389 389 versionb = version
390 390 if not isinstance(versionb, bytes):
391 391 versionb = versionb.encode('ascii')
392 392
393 393 write_if_changed('mercurial/__version__.py', b''.join([
394 394 b'# this file is autogenerated by setup.py\n'
395 395 b'version = b"%s"\n' % versionb,
396 396 ]))
397 397
398 398 try:
399 399 oldpolicy = os.environ.get('HGMODULEPOLICY', None)
400 400 os.environ['HGMODULEPOLICY'] = 'py'
401 401 from mercurial import __version__
402 402 version = __version__.version
403 403 except ImportError:
404 404 version = b'unknown'
405 405 finally:
406 406 if oldpolicy is None:
407 407 del os.environ['HGMODULEPOLICY']
408 408 else:
409 409 os.environ['HGMODULEPOLICY'] = oldpolicy
410 410
411 411 class hgbuild(build):
412 412 # Insert hgbuildmo first so that files in mercurial/locale/ are found
413 413 # when build_py is run next.
414 414 sub_commands = [('build_mo', None)] + build.sub_commands
415 415
416 416 class hgbuildmo(build):
417 417
418 418 description = "build translations (.mo files)"
419 419
420 420 def run(self):
421 421 if not find_executable('msgfmt'):
422 422 self.warn("could not find msgfmt executable, no translations "
423 423 "will be built")
424 424 return
425 425
426 426 podir = 'i18n'
427 427 if not os.path.isdir(podir):
428 428 self.warn("could not find %s/ directory" % podir)
429 429 return
430 430
431 431 join = os.path.join
432 432 for po in os.listdir(podir):
433 433 if not po.endswith('.po'):
434 434 continue
435 435 pofile = join(podir, po)
436 436 modir = join('locale', po[:-3], 'LC_MESSAGES')
437 437 mofile = join(modir, 'hg.mo')
438 438 mobuildfile = join('mercurial', mofile)
439 439 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
440 440 if sys.platform != 'sunos5':
441 441 # msgfmt on Solaris does not know about -c
442 442 cmd.append('-c')
443 443 self.mkpath(join('mercurial', modir))
444 444 self.make_file([pofile], mobuildfile, spawn, (cmd,))
445 445
446 446
447 447 class hgdist(Distribution):
448 448 pure = False
449 449 cffi = ispypy
450 450
451 451 global_options = Distribution.global_options + [
452 452 ('pure', None, "use pure (slow) Python code instead of C extensions"),
453 453 ]
454 454
455 455 def has_ext_modules(self):
456 456 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
457 457 # too late for some cases
458 458 return not self.pure and Distribution.has_ext_modules(self)
459 459
460 460 # This is ugly as a one-liner. So use a variable.
461 461 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
462 462 buildextnegops['no-zstd'] = 'zstd'
463 463
464 464 class hgbuildext(build_ext):
465 465 user_options = build_ext.user_options + [
466 466 ('zstd', None, 'compile zstd bindings [default]'),
467 467 ('no-zstd', None, 'do not compile zstd bindings'),
468 468 ]
469 469
470 470 boolean_options = build_ext.boolean_options + ['zstd']
471 471 negative_opt = buildextnegops
472 472
473 473 def initialize_options(self):
474 474 self.zstd = True
475 475 return build_ext.initialize_options(self)
476 476
477 477 def build_extensions(self):
478 478 ruststandalones = [e for e in self.extensions
479 479 if isinstance(e, RustStandaloneExtension)]
480 480 self.extensions = [e for e in self.extensions
481 481 if e not in ruststandalones]
482 482 # Filter out zstd if disabled via argument.
483 483 if not self.zstd:
484 484 self.extensions = [e for e in self.extensions
485 485 if e.name != 'mercurial.zstd']
486 486
487 487 for rustext in ruststandalones:
488 488 rustext.build('' if self.inplace else self.build_lib)
489 489
490 490 return build_ext.build_extensions(self)
491 491
492 492 def build_extension(self, ext):
493 493 if isinstance(ext, RustExtension):
494 494 ext.rustbuild()
495 495 try:
496 496 build_ext.build_extension(self, ext)
497 497 except CCompilerError:
498 498 if not getattr(ext, 'optional', False):
499 499 raise
500 500 log.warn("Failed to build optional extension '%s' (skipping)",
501 501 ext.name)
502 502
503 503 class hgbuildscripts(build_scripts):
504 504 def run(self):
505 505 if os.name != 'nt' or self.distribution.pure:
506 506 return build_scripts.run(self)
507 507
508 508 exebuilt = False
509 509 try:
510 510 self.run_command('build_hgexe')
511 511 exebuilt = True
512 512 except (DistutilsError, CCompilerError):
513 513 log.warn('failed to build optional hg.exe')
514 514
515 515 if exebuilt:
516 516 # Copying hg.exe to the scripts build directory ensures it is
517 517 # installed by the install_scripts command.
518 518 hgexecommand = self.get_finalized_command('build_hgexe')
519 519 dest = os.path.join(self.build_dir, 'hg.exe')
520 520 self.mkpath(self.build_dir)
521 521 self.copy_file(hgexecommand.hgexepath, dest)
522 522
523 523 # Remove hg.bat because it is redundant with hg.exe.
524 524 self.scripts.remove('contrib/win32/hg.bat')
525 525
526 526 return build_scripts.run(self)
527 527
528 528 class hgbuildpy(build_py):
529 529 def finalize_options(self):
530 530 build_py.finalize_options(self)
531 531
532 532 if self.distribution.pure:
533 533 self.distribution.ext_modules = []
534 534 elif self.distribution.cffi:
535 535 from mercurial.cffi import (
536 536 bdiffbuild,
537 537 mpatchbuild,
538 538 )
539 539 exts = [mpatchbuild.ffi.distutils_extension(),
540 540 bdiffbuild.ffi.distutils_extension()]
541 541 # cffi modules go here
542 542 if sys.platform == 'darwin':
543 543 from mercurial.cffi import osutilbuild
544 544 exts.append(osutilbuild.ffi.distutils_extension())
545 545 self.distribution.ext_modules = exts
546 546 else:
547 547 h = os.path.join(get_python_inc(), 'Python.h')
548 548 if not os.path.exists(h):
549 549 raise SystemExit('Python headers are required to build '
550 550 'Mercurial but weren\'t found in %s' % h)
551 551
552 552 def run(self):
553 553 basepath = os.path.join(self.build_lib, 'mercurial')
554 554 self.mkpath(basepath)
555 555
556 556 if self.distribution.pure:
557 557 modulepolicy = 'py'
558 558 elif self.build_lib == '.':
559 # in-place build should run without rebuilding C extensions
560 modulepolicy = 'allow'
559 # in-place build should run without rebuilding C
560 # and Rust extensions
561 if hgrustext == 'cpython':
562 modulepolicy = 'rust+c-allow'
563 else:
564 modulepolicy = 'allow'
561 565 else:
562 modulepolicy = 'c'
566 if hgrustext == 'cpython':
567 modulepolicy = 'rust+c'
568 else:
569 modulepolicy = 'c'
563 570
564 571 content = b''.join([
565 572 b'# this file is autogenerated by setup.py\n',
566 573 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
567 574 ])
568 575 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'),
569 576 content)
570 577
571 578 build_py.run(self)
572 579
573 580 class buildhgextindex(Command):
574 581 description = 'generate prebuilt index of hgext (for frozen package)'
575 582 user_options = []
576 583 _indexfilename = 'hgext/__index__.py'
577 584
578 585 def initialize_options(self):
579 586 pass
580 587
581 588 def finalize_options(self):
582 589 pass
583 590
584 591 def run(self):
585 592 if os.path.exists(self._indexfilename):
586 593 with open(self._indexfilename, 'w') as f:
587 594 f.write('# empty\n')
588 595
589 596 # here no extension enabled, disabled() lists up everything
590 597 code = ('import pprint; from mercurial import extensions; '
591 598 'pprint.pprint(extensions.disabled())')
592 599 returncode, out, err = runcmd([sys.executable, '-c', code],
593 600 localhgenv())
594 601 if err or returncode != 0:
595 602 raise DistutilsExecError(err)
596 603
597 604 with open(self._indexfilename, 'wb') as f:
598 605 f.write(b'# this file is autogenerated by setup.py\n')
599 606 f.write(b'docs = ')
600 607 f.write(out)
601 608
602 609 class buildhgexe(build_ext):
603 610 description = 'compile hg.exe from mercurial/exewrapper.c'
604 611 user_options = build_ext.user_options + [
605 612 ('long-paths-support', None, 'enable support for long paths on '
606 613 'Windows (off by default and '
607 614 'experimental)'),
608 615 ]
609 616
610 617 LONG_PATHS_MANIFEST = """
611 618 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
612 619 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
613 620 <application>
614 621 <windowsSettings
615 622 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
616 623 <ws2:longPathAware>true</ws2:longPathAware>
617 624 </windowsSettings>
618 625 </application>
619 626 </assembly>"""
620 627
621 628 def initialize_options(self):
622 629 build_ext.initialize_options(self)
623 630 self.long_paths_support = False
624 631
625 632 def build_extensions(self):
626 633 if os.name != 'nt':
627 634 return
628 635 if isinstance(self.compiler, HackedMingw32CCompiler):
629 636 self.compiler.compiler_so = self.compiler.compiler # no -mdll
630 637 self.compiler.dll_libraries = [] # no -lmsrvc90
631 638
632 639 # Different Python installs can have different Python library
633 640 # names. e.g. the official CPython distribution uses pythonXY.dll
634 641 # and MinGW uses libpythonX.Y.dll.
635 642 _kernel32 = ctypes.windll.kernel32
636 643 _kernel32.GetModuleFileNameA.argtypes = [ctypes.c_void_p,
637 644 ctypes.c_void_p,
638 645 ctypes.c_ulong]
639 646 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
640 647 size = 1000
641 648 buf = ctypes.create_string_buffer(size + 1)
642 649 filelen = _kernel32.GetModuleFileNameA(sys.dllhandle, ctypes.byref(buf),
643 650 size)
644 651
645 652 if filelen > 0 and filelen != size:
646 653 dllbasename = os.path.basename(buf.value)
647 654 if not dllbasename.lower().endswith(b'.dll'):
648 655 raise SystemExit('Python DLL does not end with .dll: %s' %
649 656 dllbasename)
650 657 pythonlib = dllbasename[:-4]
651 658 else:
652 659 log.warn('could not determine Python DLL filename; '
653 660 'assuming pythonXY')
654 661
655 662 hv = sys.hexversion
656 663 pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
657 664
658 665 log.info('using %s as Python library name' % pythonlib)
659 666 with open('mercurial/hgpythonlib.h', 'wb') as f:
660 667 f.write(b'/* this file is autogenerated by setup.py */\n')
661 668 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
662 669
663 670 macros = None
664 671 if sys.version_info[0] >= 3:
665 672 macros = [('_UNICODE', None), ('UNICODE', None)]
666 673
667 674 objects = self.compiler.compile(['mercurial/exewrapper.c'],
668 675 output_dir=self.build_temp,
669 676 macros=macros)
670 677 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
671 678 self.hgtarget = os.path.join(dir, 'hg')
672 679 self.compiler.link_executable(objects, self.hgtarget,
673 680 libraries=[],
674 681 output_dir=self.build_temp)
675 682 if self.long_paths_support:
676 683 self.addlongpathsmanifest()
677 684
678 685 def addlongpathsmanifest(self):
679 686 r"""Add manifest pieces so that hg.exe understands long paths
680 687
681 688 This is an EXPERIMENTAL feature, use with care.
682 689 To enable long paths support, one needs to do two things:
683 690 - build Mercurial with --long-paths-support option
684 691 - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\
685 692 LongPathsEnabled to have value 1.
686 693
687 694 Please ignore 'warning 81010002: Unrecognized Element "longPathAware"';
688 695 it happens because Mercurial uses mt.exe circa 2008, which is not
689 696 yet aware of long paths support in the manifest (I think so at least).
690 697 This does not stop mt.exe from embedding/merging the XML properly.
691 698
692 699 Why resource #1 should be used for .exe manifests? I don't know and
693 700 wasn't able to find an explanation for mortals. But it seems to work.
694 701 """
695 702 exefname = self.compiler.executable_filename(self.hgtarget)
696 703 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
697 704 os.close(fdauto)
698 705 with open(manfname, 'w') as f:
699 706 f.write(self.LONG_PATHS_MANIFEST)
700 707 log.info("long paths manifest is written to '%s'" % manfname)
701 708 inputresource = '-inputresource:%s;#1' % exefname
702 709 outputresource = '-outputresource:%s;#1' % exefname
703 710 log.info("running mt.exe to update hg.exe's manifest in-place")
704 711 # supplying both -manifest and -inputresource to mt.exe makes
705 712 # it merge the embedded and supplied manifests in the -outputresource
706 713 self.spawn(['mt.exe', '-nologo', '-manifest', manfname,
707 714 inputresource, outputresource])
708 715 log.info("done updating hg.exe's manifest")
709 716 os.remove(manfname)
710 717
711 718 @property
712 719 def hgexepath(self):
713 720 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
714 721 return os.path.join(self.build_temp, dir, 'hg.exe')
715 722
716 723 class hgbuilddoc(Command):
717 724 description = 'build documentation'
718 725 user_options = [
719 726 ('man', None, 'generate man pages'),
720 727 ('html', None, 'generate html pages'),
721 728 ]
722 729
723 730 def initialize_options(self):
724 731 self.man = None
725 732 self.html = None
726 733
727 734 def finalize_options(self):
728 735 # If --man or --html are set, only generate what we're told to.
729 736 # Otherwise generate everything.
730 737 have_subset = self.man is not None or self.html is not None
731 738
732 739 if have_subset:
733 740 self.man = True if self.man else False
734 741 self.html = True if self.html else False
735 742 else:
736 743 self.man = True
737 744 self.html = True
738 745
739 746 def run(self):
740 747 def normalizecrlf(p):
741 748 with open(p, 'rb') as fh:
742 749 orig = fh.read()
743 750
744 751 if b'\r\n' not in orig:
745 752 return
746 753
747 754 log.info('normalizing %s to LF line endings' % p)
748 755 with open(p, 'wb') as fh:
749 756 fh.write(orig.replace(b'\r\n', b'\n'))
750 757
751 758 def gentxt(root):
752 759 txt = 'doc/%s.txt' % root
753 760 log.info('generating %s' % txt)
754 761 res, out, err = runcmd(
755 762 [sys.executable, 'gendoc.py', root],
756 763 os.environ,
757 764 cwd='doc')
758 765 if res:
759 766 raise SystemExit('error running gendoc.py: %s' %
760 767 '\n'.join([out, err]))
761 768
762 769 with open(txt, 'wb') as fh:
763 770 fh.write(out)
764 771
765 772 def gengendoc(root):
766 773 gendoc = 'doc/%s.gendoc.txt' % root
767 774
768 775 log.info('generating %s' % gendoc)
769 776 res, out, err = runcmd(
770 777 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
771 778 os.environ,
772 779 cwd='doc')
773 780 if res:
774 781 raise SystemExit('error running gendoc: %s' %
775 782 '\n'.join([out, err]))
776 783
777 784 with open(gendoc, 'wb') as fh:
778 785 fh.write(out)
779 786
780 787 def genman(root):
781 788 log.info('generating doc/%s' % root)
782 789 res, out, err = runcmd(
783 790 [sys.executable, 'runrst', 'hgmanpage', '--halt', 'warning',
784 791 '--strip-elements-with-class', 'htmlonly',
785 792 '%s.txt' % root, root],
786 793 os.environ,
787 794 cwd='doc')
788 795 if res:
789 796 raise SystemExit('error running runrst: %s' %
790 797 '\n'.join([out, err]))
791 798
792 799 normalizecrlf('doc/%s' % root)
793 800
794 801 def genhtml(root):
795 802 log.info('generating doc/%s.html' % root)
796 803 res, out, err = runcmd(
797 804 [sys.executable, 'runrst', 'html', '--halt', 'warning',
798 805 '--link-stylesheet', '--stylesheet-path', 'style.css',
799 806 '%s.txt' % root, '%s.html' % root],
800 807 os.environ,
801 808 cwd='doc')
802 809 if res:
803 810 raise SystemExit('error running runrst: %s' %
804 811 '\n'.join([out, err]))
805 812
806 813 normalizecrlf('doc/%s.html' % root)
807 814
808 815 # This logic is duplicated in doc/Makefile.
809 816 sources = set(f for f in os.listdir('mercurial/help')
810 817 if re.search(r'[0-9]\.txt$', f))
811 818
812 819 # common.txt is a one-off.
813 820 gentxt('common')
814 821
815 822 for source in sorted(sources):
816 823 assert source[-4:] == '.txt'
817 824 root = source[:-4]
818 825
819 826 gentxt(root)
820 827 gengendoc(root)
821 828
822 829 if self.man:
823 830 genman(root)
824 831 if self.html:
825 832 genhtml(root)
826 833
827 834 class hginstall(install):
828 835
829 836 user_options = install.user_options + [
830 837 ('old-and-unmanageable', None,
831 838 'noop, present for eggless setuptools compat'),
832 839 ('single-version-externally-managed', None,
833 840 'noop, present for eggless setuptools compat'),
834 841 ]
835 842
836 843 # Also helps setuptools not be sad while we refuse to create eggs.
837 844 single_version_externally_managed = True
838 845
839 846 def get_sub_commands(self):
840 847 # Screen out egg related commands to prevent egg generation. But allow
841 848 # mercurial.egg-info generation, since that is part of modern
842 849 # packaging.
843 850 excl = set(['bdist_egg'])
844 851 return filter(lambda x: x not in excl, install.get_sub_commands(self))
845 852
846 853 class hginstalllib(install_lib):
847 854 '''
848 855 This is a specialization of install_lib that replaces the copy_file used
849 856 there so that it supports setting the mode of files after copying them,
850 857 instead of just preserving the mode that the files originally had. If your
851 858 system has a umask of something like 027, preserving the permissions when
852 859 copying will lead to a broken install.
853 860
854 861 Note that just passing keep_permissions=False to copy_file would be
855 862 insufficient, as it might still be applying a umask.
856 863 '''
857 864
858 865 def run(self):
859 866 realcopyfile = file_util.copy_file
860 867 def copyfileandsetmode(*args, **kwargs):
861 868 src, dst = args[0], args[1]
862 869 dst, copied = realcopyfile(*args, **kwargs)
863 870 if copied:
864 871 st = os.stat(src)
865 872 # Persist executable bit (apply it to group and other if user
866 873 # has it)
867 874 if st[stat.ST_MODE] & stat.S_IXUSR:
868 875 setmode = int('0755', 8)
869 876 else:
870 877 setmode = int('0644', 8)
871 878 m = stat.S_IMODE(st[stat.ST_MODE])
872 879 m = (m & ~int('0777', 8)) | setmode
873 880 os.chmod(dst, m)
874 881 file_util.copy_file = copyfileandsetmode
875 882 try:
876 883 install_lib.run(self)
877 884 finally:
878 885 file_util.copy_file = realcopyfile
879 886
880 887 class hginstallscripts(install_scripts):
881 888 '''
882 889 This is a specialization of install_scripts that replaces the @LIBDIR@ with
883 890 the configured directory for modules. If possible, the path is made relative
884 891 to the directory for scripts.
885 892 '''
886 893
887 894 def initialize_options(self):
888 895 install_scripts.initialize_options(self)
889 896
890 897 self.install_lib = None
891 898
892 899 def finalize_options(self):
893 900 install_scripts.finalize_options(self)
894 901 self.set_undefined_options('install',
895 902 ('install_lib', 'install_lib'))
896 903
897 904 def run(self):
898 905 install_scripts.run(self)
899 906
900 907 # It only makes sense to replace @LIBDIR@ with the install path if
901 908 # the install path is known. For wheels, the logic below calculates
902 909 # the libdir to be "../..". This is because the internal layout of a
903 910 # wheel archive looks like:
904 911 #
905 912 # mercurial-3.6.1.data/scripts/hg
906 913 # mercurial/__init__.py
907 914 #
908 915 # When installing wheels, the subdirectories of the "<pkg>.data"
909 916 # directory are translated to system local paths and files therein
910 917 # are copied in place. The mercurial/* files are installed into the
911 918 # site-packages directory. However, the site-packages directory
912 919 # isn't known until wheel install time. This means we have no clue
913 920 # at wheel generation time what the installed site-packages directory
914 921 # will be. And, wheels don't appear to provide the ability to register
915 922 # custom code to run during wheel installation. This all means that
916 923 # we can't reliably set the libdir in wheels: the default behavior
917 924 # of looking in sys.path must do.
918 925
919 926 if (os.path.splitdrive(self.install_dir)[0] !=
920 927 os.path.splitdrive(self.install_lib)[0]):
921 928 # can't make relative paths from one drive to another, so use an
922 929 # absolute path instead
923 930 libdir = self.install_lib
924 931 else:
925 932 common = os.path.commonprefix((self.install_dir, self.install_lib))
926 933 rest = self.install_dir[len(common):]
927 934 uplevel = len([n for n in os.path.split(rest) if n])
928 935
929 936 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
930 937
931 938 for outfile in self.outfiles:
932 939 with open(outfile, 'rb') as fp:
933 940 data = fp.read()
934 941
935 942 # skip binary files
936 943 if b'\0' in data:
937 944 continue
938 945
939 946 # During local installs, the shebang will be rewritten to the final
940 947 # install path. During wheel packaging, the shebang has a special
941 948 # value.
942 949 if data.startswith(b'#!python'):
943 950 log.info('not rewriting @LIBDIR@ in %s because install path '
944 951 'not known' % outfile)
945 952 continue
946 953
947 954 data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
948 955 with open(outfile, 'wb') as fp:
949 956 fp.write(data)
950 957
951 958 # virtualenv installs custom distutils/__init__.py and
952 959 # distutils/distutils.cfg files which essentially proxy back to the
953 960 # "real" distutils in the main Python install. The presence of this
954 961 # directory causes py2exe to pick up the "hacked" distutils package
955 962 # from the virtualenv and "import distutils" will fail from the py2exe
956 963 # build because the "real" distutils files can't be located.
957 964 #
958 965 # We work around this by monkeypatching the py2exe code finding Python
959 966 # modules to replace the found virtualenv distutils modules with the
960 967 # original versions via filesystem scanning. This is a bit hacky. But
961 968 # it allows us to use virtualenvs for py2exe packaging, which is more
962 969 # deterministic and reproducible.
963 970 #
964 971 # It's worth noting that the common StackOverflow suggestions for this
965 972 # problem involve copying the original distutils files into the
966 973 # virtualenv or into the staging directory after setup() is invoked.
967 974 # The former is very brittle and can easily break setup(). Our hacking
968 975 # of the found modules routine has a similar result as copying the files
969 976 # manually. But it makes fewer assumptions about how py2exe works and
970 977 # is less brittle.
971 978
972 979 # This only catches virtualenvs made with virtualenv (as opposed to
973 980 # venv, which is likely what Python 3 uses).
974 981 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
975 982
976 983 if py2exehacked:
977 984 from distutils.command.py2exe import py2exe as buildpy2exe
978 985 from py2exe.mf import Module as py2exemodule
979 986
980 987 class hgbuildpy2exe(buildpy2exe):
981 988 def find_needed_modules(self, mf, files, modules):
982 989 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
983 990
984 991 # Replace virtualenv's distutils modules with the real ones.
985 992 modules = {}
986 993 for k, v in res.modules.items():
987 994 if k != 'distutils' and not k.startswith('distutils.'):
988 995 modules[k] = v
989 996
990 997 res.modules = modules
991 998
992 999 import opcode
993 1000 distutilsreal = os.path.join(os.path.dirname(opcode.__file__),
994 1001 'distutils')
995 1002
996 1003 for root, dirs, files in os.walk(distutilsreal):
997 1004 for f in sorted(files):
998 1005 if not f.endswith('.py'):
999 1006 continue
1000 1007
1001 1008 full = os.path.join(root, f)
1002 1009
1003 1010 parents = ['distutils']
1004 1011
1005 1012 if root != distutilsreal:
1006 1013 rel = os.path.relpath(root, distutilsreal)
1007 1014 parents.extend(p for p in rel.split(os.sep))
1008 1015
1009 1016 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1010 1017
1011 1018 if modname.startswith('distutils.tests.'):
1012 1019 continue
1013 1020
1014 1021 if modname.endswith('.__init__'):
1015 1022 modname = modname[:-len('.__init__')]
1016 1023 path = os.path.dirname(full)
1017 1024 else:
1018 1025 path = None
1019 1026
1020 1027 res.modules[modname] = py2exemodule(modname, full,
1021 1028 path=path)
1022 1029
1023 1030 if 'distutils' not in res.modules:
1024 1031 raise SystemExit('could not find distutils modules')
1025 1032
1026 1033 return res
1027 1034
1028 1035 cmdclass = {'build': hgbuild,
1029 1036 'build_doc': hgbuilddoc,
1030 1037 'build_mo': hgbuildmo,
1031 1038 'build_ext': hgbuildext,
1032 1039 'build_py': hgbuildpy,
1033 1040 'build_scripts': hgbuildscripts,
1034 1041 'build_hgextindex': buildhgextindex,
1035 1042 'install': hginstall,
1036 1043 'install_lib': hginstalllib,
1037 1044 'install_scripts': hginstallscripts,
1038 1045 'build_hgexe': buildhgexe,
1039 1046 }
1040 1047
1041 1048 if py2exehacked:
1042 1049 cmdclass['py2exe'] = hgbuildpy2exe
1043 1050
1044 1051 packages = ['mercurial',
1045 1052 'mercurial.cext',
1046 1053 'mercurial.cffi',
1047 1054 'mercurial.hgweb',
1048 1055 'mercurial.pure',
1049 1056 'mercurial.thirdparty',
1050 1057 'mercurial.thirdparty.attr',
1051 1058 'mercurial.thirdparty.zope',
1052 1059 'mercurial.thirdparty.zope.interface',
1053 1060 'mercurial.utils',
1054 1061 'mercurial.revlogutils',
1055 1062 'mercurial.testing',
1056 1063 'hgext', 'hgext.convert', 'hgext.fsmonitor',
1057 1064 'hgext.fastannotate',
1058 1065 'hgext.fsmonitor.pywatchman',
1059 1066 'hgext.infinitepush',
1060 1067 'hgext.highlight',
1061 1068 'hgext.largefiles', 'hgext.lfs', 'hgext.narrow',
1062 1069 'hgext.remotefilelog',
1063 1070 'hgext.zeroconf', 'hgext3rd',
1064 1071 'hgdemandimport']
1065 1072 if sys.version_info[0] == 2:
1066 1073 packages.extend(['mercurial.thirdparty.concurrent',
1067 1074 'mercurial.thirdparty.concurrent.futures'])
1068 1075
1069 1076 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1070 1077 # py2exe can't cope with namespace packages very well, so we have to
1071 1078 # install any hgext3rd.* extensions that we want in the final py2exe
1072 1079 # image here. This is gross, but you gotta do what you gotta do.
1073 1080 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1074 1081
1075 1082 common_depends = ['mercurial/bitmanipulation.h',
1076 1083 'mercurial/compat.h',
1077 1084 'mercurial/cext/util.h']
1078 1085 common_include_dirs = ['mercurial']
1079 1086
1080 1087 osutil_cflags = []
1081 1088 osutil_ldflags = []
1082 1089
1083 1090 # platform specific macros
1084 1091 for plat, func in [('bsd', 'setproctitle')]:
1085 1092 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1086 1093 osutil_cflags.append('-DHAVE_%s' % func.upper())
1087 1094
1088 1095 for plat, macro, code in [
1089 1096 ('bsd|darwin', 'BSD_STATFS', '''
1090 1097 #include <sys/param.h>
1091 1098 #include <sys/mount.h>
1092 1099 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1093 1100 '''),
1094 1101 ('linux', 'LINUX_STATFS', '''
1095 1102 #include <linux/magic.h>
1096 1103 #include <sys/vfs.h>
1097 1104 int main() { struct statfs s; return sizeof(s.f_type); }
1098 1105 '''),
1099 1106 ]:
1100 1107 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1101 1108 osutil_cflags.append('-DHAVE_%s' % macro)
1102 1109
1103 1110 if sys.platform == 'darwin':
1104 1111 osutil_ldflags += ['-framework', 'ApplicationServices']
1105 1112
1106 1113 xdiff_srcs = [
1107 1114 'mercurial/thirdparty/xdiff/xdiffi.c',
1108 1115 'mercurial/thirdparty/xdiff/xprepare.c',
1109 1116 'mercurial/thirdparty/xdiff/xutils.c',
1110 1117 ]
1111 1118
1112 1119 xdiff_headers = [
1113 1120 'mercurial/thirdparty/xdiff/xdiff.h',
1114 1121 'mercurial/thirdparty/xdiff/xdiffi.h',
1115 1122 'mercurial/thirdparty/xdiff/xinclude.h',
1116 1123 'mercurial/thirdparty/xdiff/xmacros.h',
1117 1124 'mercurial/thirdparty/xdiff/xprepare.h',
1118 1125 'mercurial/thirdparty/xdiff/xtypes.h',
1119 1126 'mercurial/thirdparty/xdiff/xutils.h',
1120 1127 ]
1121 1128
1122 1129 class RustCompilationError(CCompilerError):
1123 1130 """Exception class for Rust compilation errors."""
1124 1131
1125 1132 class RustExtension(Extension):
1126 1133 """Base classes for concrete Rust Extension classes.
1127 1134 """
1128 1135
1129 1136 rusttargetdir = os.path.join('rust', 'target', 'release')
1130 1137
1131 1138 def __init__(self, mpath, sources, rustlibname, subcrate,
1132 1139 py3_features=None, **kw):
1133 1140 Extension.__init__(self, mpath, sources, **kw)
1134 1141 if hgrustext is None:
1135 1142 return
1136 1143 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1137 1144 self.py3_features = py3_features
1138 1145
1139 1146 # adding Rust source and control files to depends so that the extension
1140 1147 # gets rebuilt if they've changed
1141 1148 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1142 1149 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1143 1150 if os.path.exists(cargo_lock):
1144 1151 self.depends.append(cargo_lock)
1145 1152 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1146 1153 self.depends.extend(os.path.join(dirpath, fname)
1147 1154 for fname in fnames
1148 1155 if os.path.splitext(fname)[1] == '.rs')
1149 1156
1150 1157 def rustbuild(self):
1151 1158 if hgrustext is None:
1152 1159 return
1153 1160 env = os.environ.copy()
1154 1161 if 'HGTEST_RESTOREENV' in env:
1155 1162 # Mercurial tests change HOME to a temporary directory,
1156 1163 # but, if installed with rustup, the Rust toolchain needs
1157 1164 # HOME to be correct (otherwise the 'no default toolchain'
1158 1165 # error message is issued and the build fails).
1159 1166 # This happens currently with test-hghave.t, which does
1160 1167 # invoke this build.
1161 1168
1162 1169 # Unix only fix (os.path.expanduser not really reliable if
1163 1170 # HOME is shadowed like this)
1164 1171 import pwd
1165 1172 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1166 1173
1167 1174 cargocmd = ['cargo', 'build', '-vv', '--release']
1168 1175 if sys.version_info[0] == 3 and self.py3_features is not None:
1169 1176 cargocmd.extend(('--features', self.py3_features,
1170 1177 '--no-default-features'))
1171 1178 try:
1172 1179 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1173 1180 except OSError as exc:
1174 1181 if exc.errno == errno.ENOENT:
1175 1182 raise RustCompilationError("Cargo not found")
1176 1183 elif exc.errno == errno.EACCES:
1177 1184 raise RustCompilationError(
1178 1185 "Cargo found, but permisssion to execute it is denied")
1179 1186 else:
1180 1187 raise
1181 1188 except subprocess.CalledProcessError:
1182 1189 raise RustCompilationError(
1183 1190 "Cargo failed. Working directory: %r, "
1184 1191 "command: %r, environment: %r"
1185 1192 % (self.rustsrcdir, cargocmd, env))
1186 1193
1187 1194 class RustEnhancedExtension(RustExtension):
1188 1195 """A C Extension, conditionally enhanced with Rust code.
1189 1196
1190 1197 If the HGRUSTEXT environment variable is set to something else
1191 1198 than 'cpython', the Rust sources get compiled and linked within the
1192 1199 C target shared library object.
1193 1200 """
1194 1201
1195 1202 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1196 1203 RustExtension.__init__(self, mpath, sources, rustlibname, subcrate,
1197 1204 **kw)
1198 1205 if hgrustext != 'direct-ffi':
1199 1206 return
1200 1207 self.extra_compile_args.append('-DWITH_RUST')
1201 1208 self.libraries.append(rustlibname)
1202 1209 self.library_dirs.append(self.rusttargetdir)
1203 1210
1204 1211 class RustStandaloneExtension(RustExtension):
1205 1212
1206 1213 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1207 1214 RustExtension.__init__(self, pydottedname, [], dylibname, rustcrate,
1208 1215 **kw)
1209 1216 self.dylibname = dylibname
1210 1217
1211 1218 def build(self, target_dir):
1212 1219 self.rustbuild()
1213 1220 target = [target_dir]
1214 1221 target.extend(self.name.split('.'))
1215 1222 ext = '.so' # TODO Unix only
1216 1223 target[-1] += ext
1217 1224 shutil.copy2(os.path.join(self.rusttargetdir, self.dylibname + ext),
1218 1225 os.path.join(*target))
1219 1226
1220 1227
1221 1228 extmodules = [
1222 1229 Extension('mercurial.cext.base85', ['mercurial/cext/base85.c'],
1223 1230 include_dirs=common_include_dirs,
1224 1231 depends=common_depends),
1225 1232 Extension('mercurial.cext.bdiff', ['mercurial/bdiff.c',
1226 1233 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1227 1234 include_dirs=common_include_dirs,
1228 1235 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers),
1229 1236 Extension('mercurial.cext.mpatch', ['mercurial/mpatch.c',
1230 1237 'mercurial/cext/mpatch.c'],
1231 1238 include_dirs=common_include_dirs,
1232 1239 depends=common_depends),
1233 1240 RustEnhancedExtension(
1234 1241 'mercurial.cext.parsers', ['mercurial/cext/charencode.c',
1235 1242 'mercurial/cext/dirs.c',
1236 1243 'mercurial/cext/manifest.c',
1237 1244 'mercurial/cext/parsers.c',
1238 1245 'mercurial/cext/pathencode.c',
1239 1246 'mercurial/cext/revlog.c'],
1240 1247 'hgdirectffi',
1241 1248 'hg-direct-ffi',
1242 1249 include_dirs=common_include_dirs,
1243 1250 depends=common_depends + ['mercurial/cext/charencode.h',
1244 1251 'mercurial/cext/revlog.h',
1245 1252 'rust/hg-core/src/ancestors.rs',
1246 1253 'rust/hg-core/src/lib.rs']),
1247 1254 Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
1248 1255 include_dirs=common_include_dirs,
1249 1256 extra_compile_args=osutil_cflags,
1250 1257 extra_link_args=osutil_ldflags,
1251 1258 depends=common_depends),
1252 1259 Extension(
1253 1260 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations', [
1254 1261 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1255 1262 ]),
1256 1263 Extension('hgext.fsmonitor.pywatchman.bser',
1257 1264 ['hgext/fsmonitor/pywatchman/bser.c']),
1258 1265 ]
1259 1266
1260 1267 if hgrustext == 'cpython':
1261 1268 extmodules.append(
1262 1269 RustStandaloneExtension('mercurial.rustext', 'hg-cpython', 'librusthg',
1263 1270 py3_features='python3')
1264 1271 )
1265 1272
1266 1273
1267 1274 sys.path.insert(0, 'contrib/python-zstandard')
1268 1275 import setup_zstd
1269 1276 extmodules.append(setup_zstd.get_c_extension(
1270 1277 name='mercurial.zstd',
1271 1278 root=os.path.abspath(os.path.dirname(__file__))))
1272 1279
1273 1280 try:
1274 1281 from distutils import cygwinccompiler
1275 1282
1276 1283 # the -mno-cygwin option has been deprecated for years
1277 1284 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1278 1285
1279 1286 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1280 1287 def __init__(self, *args, **kwargs):
1281 1288 mingw32compilerclass.__init__(self, *args, **kwargs)
1282 1289 for i in 'compiler compiler_so linker_exe linker_so'.split():
1283 1290 try:
1284 1291 getattr(self, i).remove('-mno-cygwin')
1285 1292 except ValueError:
1286 1293 pass
1287 1294
1288 1295 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1289 1296 except ImportError:
1290 1297 # the cygwinccompiler package is not available on some Python
1291 1298 # distributions like the ones from the optware project for Synology
1292 1299 # DiskStation boxes
1293 1300 class HackedMingw32CCompiler(object):
1294 1301 pass
1295 1302
1296 1303 if os.name == 'nt':
1297 1304 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1298 1305 # extra_link_args to distutils.extensions.Extension() doesn't have any
1299 1306 # effect.
1300 1307 from distutils import msvccompiler
1301 1308
1302 1309 msvccompilerclass = msvccompiler.MSVCCompiler
1303 1310
1304 1311 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1305 1312 def initialize(self):
1306 1313 msvccompilerclass.initialize(self)
1307 1314 # "warning LNK4197: export 'func' specified multiple times"
1308 1315 self.ldflags_shared.append('/ignore:4197')
1309 1316 self.ldflags_shared_debug.append('/ignore:4197')
1310 1317
1311 1318 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1312 1319
1313 1320 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
1314 1321 'help/*.txt',
1315 1322 'help/internals/*.txt',
1316 1323 'default.d/*.rc',
1317 1324 'dummycert.pem']}
1318 1325
1319 1326 def ordinarypath(p):
1320 1327 return p and p[0] != '.' and p[-1] != '~'
1321 1328
1322 1329 for root in ('templates',):
1323 1330 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1324 1331 curdir = curdir.split(os.sep, 1)[1]
1325 1332 dirs[:] = filter(ordinarypath, dirs)
1326 1333 for f in filter(ordinarypath, files):
1327 1334 f = os.path.join(curdir, f)
1328 1335 packagedata['mercurial'].append(f)
1329 1336
1330 1337 datafiles = []
1331 1338
1332 1339 # distutils expects version to be str/unicode. Converting it to
1333 1340 # unicode on Python 2 still works because it won't contain any
1334 1341 # non-ascii bytes and will be implicitly converted back to bytes
1335 1342 # when operated on.
1336 1343 assert isinstance(version, bytes)
1337 1344 setupversion = version.decode('ascii')
1338 1345
1339 1346 extra = {}
1340 1347
1341 1348 py2exepackages = [
1342 1349 'hgdemandimport',
1343 1350 'hgext3rd',
1344 1351 'hgext',
1345 1352 'email',
1346 1353 # implicitly imported per module policy
1347 1354 # (cffi wouldn't be used as a frozen exe)
1348 1355 'mercurial.cext',
1349 1356 #'mercurial.cffi',
1350 1357 'mercurial.pure',
1351 1358 ]
1352 1359
1353 1360 py2exeexcludes = []
1354 1361 py2exedllexcludes = ['crypt32.dll']
1355 1362
1356 1363 if issetuptools:
1357 1364 extra['python_requires'] = supportedpy
1358 1365
1359 1366 if py2exeloaded:
1360 1367 extra['console'] = [
1361 1368 {'script':'hg',
1362 1369 'copyright':'Copyright (C) 2005-2019 Matt Mackall and others',
1363 1370 'product_version':version}]
1364 1371 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1365 1372 # Need to override hgbuild because it has a private copy of
1366 1373 # build.sub_commands.
1367 1374 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1368 1375 # put dlls in sub directory so that they won't pollute PATH
1369 1376 extra['zipfile'] = 'lib/library.zip'
1370 1377
1371 1378 # We allow some configuration to be supplemented via environment
1372 1379 # variables. This is better than setup.cfg files because it allows
1373 1380 # supplementing configs instead of replacing them.
1374 1381 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1375 1382 if extrapackages:
1376 1383 py2exepackages.extend(extrapackages.split(' '))
1377 1384
1378 1385 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1379 1386 if excludes:
1380 1387 py2exeexcludes.extend(excludes.split(' '))
1381 1388
1382 1389 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1383 1390 if dllexcludes:
1384 1391 py2exedllexcludes.extend(dllexcludes.split(' '))
1385 1392
1386 1393 if os.name == 'nt':
1387 1394 # Windows binary file versions for exe/dll files must have the
1388 1395 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1389 1396 setupversion = setupversion.split(r'+', 1)[0]
1390 1397
1391 1398 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
1392 1399 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines()
1393 1400 if version:
1394 1401 version = version[0]
1395 1402 if sys.version_info[0] == 3:
1396 1403 version = version.decode('utf-8')
1397 1404 xcode4 = (version.startswith('Xcode') and
1398 1405 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
1399 1406 xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
1400 1407 else:
1401 1408 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
1402 1409 # installed, but instead with only command-line tools. Assume
1403 1410 # that only happens on >= Lion, thus no PPC support.
1404 1411 xcode4 = True
1405 1412 xcode51 = False
1406 1413
1407 1414 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
1408 1415 # distutils.sysconfig
1409 1416 if xcode4:
1410 1417 os.environ['ARCHFLAGS'] = ''
1411 1418
1412 1419 # XCode 5.1 changes clang such that it now fails to compile if the
1413 1420 # -mno-fused-madd flag is passed, but the version of Python shipped with
1414 1421 # OS X 10.9 Mavericks includes this flag. This causes problems in all
1415 1422 # C extension modules, and a bug has been filed upstream at
1416 1423 # http://bugs.python.org/issue21244. We also need to patch this here
1417 1424 # so Mercurial can continue to compile in the meantime.
1418 1425 if xcode51:
1419 1426 cflags = get_config_var('CFLAGS')
1420 1427 if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
1421 1428 os.environ['CFLAGS'] = (
1422 1429 os.environ.get('CFLAGS', '') + ' -Qunused-arguments')
1423 1430
1424 1431 setup(name='mercurial',
1425 1432 version=setupversion,
1426 1433 author='Matt Mackall and many others',
1427 1434 author_email='mercurial@mercurial-scm.org',
1428 1435 url='https://mercurial-scm.org/',
1429 1436 download_url='https://mercurial-scm.org/release/',
1430 1437 description=('Fast scalable distributed SCM (revision control, version '
1431 1438 'control) system'),
1432 1439 long_description=('Mercurial is a distributed SCM tool written in Python.'
1433 1440 ' It is used by a number of large projects that require'
1434 1441 ' fast, reliable distributed revision control, such as '
1435 1442 'Mozilla.'),
1436 1443 license='GNU GPLv2 or any later version',
1437 1444 classifiers=[
1438 1445 'Development Status :: 6 - Mature',
1439 1446 'Environment :: Console',
1440 1447 'Intended Audience :: Developers',
1441 1448 'Intended Audience :: System Administrators',
1442 1449 'License :: OSI Approved :: GNU General Public License (GPL)',
1443 1450 'Natural Language :: Danish',
1444 1451 'Natural Language :: English',
1445 1452 'Natural Language :: German',
1446 1453 'Natural Language :: Italian',
1447 1454 'Natural Language :: Japanese',
1448 1455 'Natural Language :: Portuguese (Brazilian)',
1449 1456 'Operating System :: Microsoft :: Windows',
1450 1457 'Operating System :: OS Independent',
1451 1458 'Operating System :: POSIX',
1452 1459 'Programming Language :: C',
1453 1460 'Programming Language :: Python',
1454 1461 'Topic :: Software Development :: Version Control',
1455 1462 ],
1456 1463 scripts=scripts,
1457 1464 packages=packages,
1458 1465 ext_modules=extmodules,
1459 1466 data_files=datafiles,
1460 1467 package_data=packagedata,
1461 1468 cmdclass=cmdclass,
1462 1469 distclass=hgdist,
1463 1470 options={
1464 1471 'py2exe': {
1465 1472 'bundle_files': 3,
1466 1473 'dll_excludes': py2exedllexcludes,
1467 1474 'excludes': py2exeexcludes,
1468 1475 'packages': py2exepackages,
1469 1476 },
1470 1477 'bdist_mpkg': {
1471 1478 'zipdist': False,
1472 1479 'license': 'COPYING',
1473 1480 'readme': 'contrib/packaging/macosx/Readme.html',
1474 1481 'welcome': 'contrib/packaging/macosx/Welcome.html',
1475 1482 },
1476 1483 },
1477 1484 **extra)
General Comments 0
You need to be logged in to leave comments. Login now