##// END OF EJS Templates
py3: wrap tempfile.NamedTemporaryFile() to return bytes fp.name...
Yuya Nishihara -
r38184:cc9aa887 default
parent child Browse files
Show More
@@ -1,154 +1,158 b''
1 1 # This software may be used and distributed according to the terms of the
2 2 # GNU General Public License version 2 or any later version.
3 3
4 4 # based on bundleheads extension by Gregory Szorc <gps@mozilla.com>
5 5
6 6 from __future__ import absolute_import
7 7
8 8 import abc
9 9 import hashlib
10 10 import os
11 11 import subprocess
12 12 import tempfile
13 13
14 14 NamedTemporaryFile = tempfile.NamedTemporaryFile
15 15
16 16 class BundleWriteException(Exception):
17 17 pass
18 18
19 19 class BundleReadException(Exception):
20 20 pass
21 21
22 22 class abstractbundlestore(object):
23 23 """Defines the interface for bundle stores.
24 24
25 25 A bundle store is an entity that stores raw bundle data. It is a simple
26 26 key-value store. However, the keys are chosen by the store. The keys can
27 27 be any Python object understood by the corresponding bundle index (see
28 28 ``abstractbundleindex`` below).
29 29 """
30 30 __metaclass__ = abc.ABCMeta
31 31
32 32 @abc.abstractmethod
33 33 def write(self, data):
34 34 """Write bundle data to the store.
35 35
36 36 This function receives the raw data to be written as a str.
37 37 Throws BundleWriteException
38 38 The key of the written data MUST be returned.
39 39 """
40 40
41 41 @abc.abstractmethod
42 42 def read(self, key):
43 43 """Obtain bundle data for a key.
44 44
45 45 Returns None if the bundle isn't known.
46 46 Throws BundleReadException
47 47 The returned object should be a file object supporting read()
48 48 and close().
49 49 """
50 50
51 51 class filebundlestore(object):
52 52 """bundle store in filesystem
53 53
54 54 meant for storing bundles somewhere on disk and on network filesystems
55 55 """
56 56 def __init__(self, ui, repo):
57 57 self.ui = ui
58 58 self.repo = repo
59 59 self.storepath = ui.configpath('scratchbranch', 'storepath')
60 60 if not self.storepath:
61 61 self.storepath = self.repo.vfs.join("scratchbranches",
62 62 "filebundlestore")
63 63 if not os.path.exists(self.storepath):
64 64 os.makedirs(self.storepath)
65 65
66 66 def _dirpath(self, hashvalue):
67 67 """First two bytes of the hash are the name of the upper
68 68 level directory, next two bytes are the name of the
69 69 next level directory"""
70 70 return os.path.join(self.storepath, hashvalue[0:2], hashvalue[2:4])
71 71
72 72 def _filepath(self, filename):
73 73 return os.path.join(self._dirpath(filename), filename)
74 74
75 75 def write(self, data):
76 76 filename = hashlib.sha1(data).hexdigest()
77 77 dirpath = self._dirpath(filename)
78 78
79 79 if not os.path.exists(dirpath):
80 80 os.makedirs(dirpath)
81 81
82 82 with open(self._filepath(filename), 'wb') as f:
83 83 f.write(data)
84 84
85 85 return filename
86 86
87 87 def read(self, key):
88 88 try:
89 89 with open(self._filepath(key), 'rb') as f:
90 90 return f.read()
91 91 except IOError:
92 92 return None
93 93
94 94 class externalbundlestore(abstractbundlestore):
95 95 def __init__(self, put_binary, put_args, get_binary, get_args):
96 96 """
97 97 `put_binary` - path to binary file which uploads bundle to external
98 98 storage and prints key to stdout
99 99 `put_args` - format string with additional args to `put_binary`
100 100 {filename} replacement field can be used.
101 101 `get_binary` - path to binary file which accepts filename and key
102 102 (in that order), downloads bundle from store and saves it to file
103 103 `get_args` - format string with additional args to `get_binary`.
104 104 {filename} and {handle} replacement field can be used.
105 105 """
106 106
107 107 self.put_args = put_args
108 108 self.get_args = get_args
109 109 self.put_binary = put_binary
110 110 self.get_binary = get_binary
111 111
112 112 def _call_binary(self, args):
113 113 p = subprocess.Popen(
114 114 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
115 115 close_fds=True)
116 116 stdout, stderr = p.communicate()
117 117 returncode = p.returncode
118 118 return returncode, stdout, stderr
119 119
120 120 def write(self, data):
121 121 # Won't work on windows because you can't open file second time without
122 122 # closing it
123 # TODO: rewrite without str.format() and replace NamedTemporaryFile()
124 # with pycompat.namedtempfile()
123 125 with NamedTemporaryFile() as temp:
124 126 temp.write(data)
125 127 temp.flush()
126 128 temp.seek(0)
127 129 formatted_args = [arg.format(filename=temp.name)
128 130 for arg in self.put_args]
129 131 returncode, stdout, stderr = self._call_binary(
130 132 [self.put_binary] + formatted_args)
131 133
132 134 if returncode != 0:
133 135 raise BundleWriteException(
134 136 'Failed to upload to external store: %s' % stderr)
135 137 stdout_lines = stdout.splitlines()
136 138 if len(stdout_lines) == 1:
137 139 return stdout_lines[0]
138 140 else:
139 141 raise BundleWriteException(
140 142 'Bad output from %s: %s' % (self.put_binary, stdout))
141 143
142 144 def read(self, handle):
143 145 # Won't work on windows because you can't open file second time without
144 146 # closing it
147 # TODO: rewrite without str.format() and replace NamedTemporaryFile()
148 # with pycompat.namedtempfile()
145 149 with NamedTemporaryFile() as temp:
146 150 formatted_args = [arg.format(filename=temp.name, handle=handle)
147 151 for arg in self.get_args]
148 152 returncode, stdout, stderr = self._call_binary(
149 153 [self.get_binary] + formatted_args)
150 154
151 155 if returncode != 0:
152 156 raise BundleReadException(
153 157 'Failed to download from external store: %s' % stderr)
154 158 return temp.read()
@@ -1,3138 +1,3137 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 import tempfile
25 24 import time
26 25
27 26 from .i18n import _
28 27 from .node import (
29 28 bin,
30 29 hex,
31 30 nullhex,
32 31 nullid,
33 32 nullrev,
34 33 short,
35 34 )
36 35 from .thirdparty import (
37 36 cbor,
38 37 )
39 38 from . import (
40 39 bundle2,
41 40 changegroup,
42 41 cmdutil,
43 42 color,
44 43 context,
45 44 dagparser,
46 45 dagutil,
47 46 encoding,
48 47 error,
49 48 exchange,
50 49 extensions,
51 50 filemerge,
52 51 fileset,
53 52 formatter,
54 53 hg,
55 54 httppeer,
56 55 localrepo,
57 56 lock as lockmod,
58 57 logcmdutil,
59 58 merge as mergemod,
60 59 obsolete,
61 60 obsutil,
62 61 phases,
63 62 policy,
64 63 pvec,
65 64 pycompat,
66 65 registrar,
67 66 repair,
68 67 revlog,
69 68 revset,
70 69 revsetlang,
71 70 scmutil,
72 71 setdiscovery,
73 72 simplemerge,
74 73 smartset,
75 74 sshpeer,
76 75 sslutil,
77 76 streamclone,
78 77 templater,
79 78 treediscovery,
80 79 upgrade,
81 80 url as urlmod,
82 81 util,
83 82 vfs as vfsmod,
84 83 wireprotoframing,
85 84 wireprotoserver,
86 85 wireprotov2peer,
87 86 )
88 87 from .utils import (
89 88 dateutil,
90 89 procutil,
91 90 stringutil,
92 91 )
93 92
94 93 release = lockmod.release
95 94
96 95 command = registrar.command()
97 96
98 97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 98 def debugancestor(ui, repo, *args):
100 99 """find the ancestor revision of two revisions in a given index"""
101 100 if len(args) == 3:
102 101 index, rev1, rev2 = args
103 102 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
104 103 lookup = r.lookup
105 104 elif len(args) == 2:
106 105 if not repo:
107 106 raise error.Abort(_('there is no Mercurial repository here '
108 107 '(.hg not found)'))
109 108 rev1, rev2 = args
110 109 r = repo.changelog
111 110 lookup = repo.lookup
112 111 else:
113 112 raise error.Abort(_('either two or three arguments required'))
114 113 a = r.ancestor(lookup(rev1), lookup(rev2))
115 114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 115
117 116 @command('debugapplystreamclonebundle', [], 'FILE')
118 117 def debugapplystreamclonebundle(ui, repo, fname):
119 118 """apply a stream clone bundle file"""
120 119 f = hg.openpath(ui, fname)
121 120 gen = exchange.readbundle(ui, f, fname)
122 121 gen.apply(repo)
123 122
124 123 @command('debugbuilddag',
125 124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 126 ('n', 'new-file', None, _('add new file at each rev'))],
128 127 _('[OPTION]... [TEXT]'))
129 128 def debugbuilddag(ui, repo, text=None,
130 129 mergeable_file=False,
131 130 overwritten_file=False,
132 131 new_file=False):
133 132 """builds a repo with a given DAG from scratch in the current empty repo
134 133
135 134 The description of the DAG is read from stdin if not given on the
136 135 command line.
137 136
138 137 Elements:
139 138
140 139 - "+n" is a linear run of n nodes based on the current default parent
141 140 - "." is a single node based on the current default parent
142 141 - "$" resets the default parent to null (implied at the start);
143 142 otherwise the default parent is always the last node created
144 143 - "<p" sets the default parent to the backref p
145 144 - "*p" is a fork at parent p, which is a backref
146 145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 146 - "/p2" is a merge of the preceding node and p2
148 147 - ":tag" defines a local tag for the preceding node
149 148 - "@branch" sets the named branch for subsequent nodes
150 149 - "#...\\n" is a comment up to the end of the line
151 150
152 151 Whitespace between the above elements is ignored.
153 152
154 153 A backref is either
155 154
156 155 - a number n, which references the node curr-n, where curr is the current
157 156 node, or
158 157 - the name of a local tag you placed earlier using ":tag", or
159 158 - empty to denote the default parent.
160 159
161 160 All string valued-elements are either strictly alphanumeric, or must
162 161 be enclosed in double quotes ("..."), with "\\" as escape character.
163 162 """
164 163
165 164 if text is None:
166 165 ui.status(_("reading DAG from stdin\n"))
167 166 text = ui.fin.read()
168 167
169 168 cl = repo.changelog
170 169 if len(cl) > 0:
171 170 raise error.Abort(_('repository is not empty'))
172 171
173 172 # determine number of revs in DAG
174 173 total = 0
175 174 for type, data in dagparser.parsedag(text):
176 175 if type == 'n':
177 176 total += 1
178 177
179 178 if mergeable_file:
180 179 linesperrev = 2
181 180 # make a file with k lines per rev
182 181 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
183 182 initialmergedlines.append("")
184 183
185 184 tags = []
186 185
187 186 wlock = lock = tr = None
188 187 try:
189 188 wlock = repo.wlock()
190 189 lock = repo.lock()
191 190 tr = repo.transaction("builddag")
192 191
193 192 at = -1
194 193 atbranch = 'default'
195 194 nodeids = []
196 195 id = 0
197 196 ui.progress(_('building'), id, unit=_('revisions'), total=total)
198 197 for type, data in dagparser.parsedag(text):
199 198 if type == 'n':
200 199 ui.note(('node %s\n' % pycompat.bytestr(data)))
201 200 id, ps = data
202 201
203 202 files = []
204 203 filecontent = {}
205 204
206 205 p2 = None
207 206 if mergeable_file:
208 207 fn = "mf"
209 208 p1 = repo[ps[0]]
210 209 if len(ps) > 1:
211 210 p2 = repo[ps[1]]
212 211 pa = p1.ancestor(p2)
213 212 base, local, other = [x[fn].data() for x in (pa, p1,
214 213 p2)]
215 214 m3 = simplemerge.Merge3Text(base, local, other)
216 215 ml = [l.strip() for l in m3.merge_lines()]
217 216 ml.append("")
218 217 elif at > 0:
219 218 ml = p1[fn].data().split("\n")
220 219 else:
221 220 ml = initialmergedlines
222 221 ml[id * linesperrev] += " r%i" % id
223 222 mergedtext = "\n".join(ml)
224 223 files.append(fn)
225 224 filecontent[fn] = mergedtext
226 225
227 226 if overwritten_file:
228 227 fn = "of"
229 228 files.append(fn)
230 229 filecontent[fn] = "r%i\n" % id
231 230
232 231 if new_file:
233 232 fn = "nf%i" % id
234 233 files.append(fn)
235 234 filecontent[fn] = "r%i\n" % id
236 235 if len(ps) > 1:
237 236 if not p2:
238 237 p2 = repo[ps[1]]
239 238 for fn in p2:
240 239 if fn.startswith("nf"):
241 240 files.append(fn)
242 241 filecontent[fn] = p2[fn].data()
243 242
244 243 def fctxfn(repo, cx, path):
245 244 if path in filecontent:
246 245 return context.memfilectx(repo, cx, path,
247 246 filecontent[path])
248 247 return None
249 248
250 249 if len(ps) == 0 or ps[0] < 0:
251 250 pars = [None, None]
252 251 elif len(ps) == 1:
253 252 pars = [nodeids[ps[0]], None]
254 253 else:
255 254 pars = [nodeids[p] for p in ps]
256 255 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
257 256 date=(id, 0),
258 257 user="debugbuilddag",
259 258 extra={'branch': atbranch})
260 259 nodeid = repo.commitctx(cx)
261 260 nodeids.append(nodeid)
262 261 at = id
263 262 elif type == 'l':
264 263 id, name = data
265 264 ui.note(('tag %s\n' % name))
266 265 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
267 266 elif type == 'a':
268 267 ui.note(('branch %s\n' % data))
269 268 atbranch = data
270 269 ui.progress(_('building'), id, unit=_('revisions'), total=total)
271 270 tr.close()
272 271
273 272 if tags:
274 273 repo.vfs.write("localtags", "".join(tags))
275 274 finally:
276 275 ui.progress(_('building'), None)
277 276 release(tr, lock, wlock)
278 277
279 278 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
280 279 indent_string = ' ' * indent
281 280 if all:
282 281 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
283 282 % indent_string)
284 283
285 284 def showchunks(named):
286 285 ui.write("\n%s%s\n" % (indent_string, named))
287 286 for deltadata in gen.deltaiter():
288 287 node, p1, p2, cs, deltabase, delta, flags = deltadata
289 288 ui.write("%s%s %s %s %s %s %d\n" %
290 289 (indent_string, hex(node), hex(p1), hex(p2),
291 290 hex(cs), hex(deltabase), len(delta)))
292 291
293 292 chunkdata = gen.changelogheader()
294 293 showchunks("changelog")
295 294 chunkdata = gen.manifestheader()
296 295 showchunks("manifest")
297 296 for chunkdata in iter(gen.filelogheader, {}):
298 297 fname = chunkdata['filename']
299 298 showchunks(fname)
300 299 else:
301 300 if isinstance(gen, bundle2.unbundle20):
302 301 raise error.Abort(_('use debugbundle2 for this file'))
303 302 chunkdata = gen.changelogheader()
304 303 for deltadata in gen.deltaiter():
305 304 node, p1, p2, cs, deltabase, delta, flags = deltadata
306 305 ui.write("%s%s\n" % (indent_string, hex(node)))
307 306
308 307 def _debugobsmarkers(ui, part, indent=0, **opts):
309 308 """display version and markers contained in 'data'"""
310 309 opts = pycompat.byteskwargs(opts)
311 310 data = part.read()
312 311 indent_string = ' ' * indent
313 312 try:
314 313 version, markers = obsolete._readmarkers(data)
315 314 except error.UnknownVersion as exc:
316 315 msg = "%sunsupported version: %s (%d bytes)\n"
317 316 msg %= indent_string, exc.version, len(data)
318 317 ui.write(msg)
319 318 else:
320 319 msg = "%sversion: %d (%d bytes)\n"
321 320 msg %= indent_string, version, len(data)
322 321 ui.write(msg)
323 322 fm = ui.formatter('debugobsolete', opts)
324 323 for rawmarker in sorted(markers):
325 324 m = obsutil.marker(None, rawmarker)
326 325 fm.startitem()
327 326 fm.plain(indent_string)
328 327 cmdutil.showmarker(fm, m)
329 328 fm.end()
330 329
331 330 def _debugphaseheads(ui, data, indent=0):
332 331 """display version and markers contained in 'data'"""
333 332 indent_string = ' ' * indent
334 333 headsbyphase = phases.binarydecode(data)
335 334 for phase in phases.allphases:
336 335 for head in headsbyphase[phase]:
337 336 ui.write(indent_string)
338 337 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
339 338
340 339 def _quasirepr(thing):
341 340 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
342 341 return '{%s}' % (
343 342 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
344 343 return pycompat.bytestr(repr(thing))
345 344
346 345 def _debugbundle2(ui, gen, all=None, **opts):
347 346 """lists the contents of a bundle2"""
348 347 if not isinstance(gen, bundle2.unbundle20):
349 348 raise error.Abort(_('not a bundle2 file'))
350 349 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
351 350 parttypes = opts.get(r'part_type', [])
352 351 for part in gen.iterparts():
353 352 if parttypes and part.type not in parttypes:
354 353 continue
355 354 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
356 355 if part.type == 'changegroup':
357 356 version = part.params.get('version', '01')
358 357 cg = changegroup.getunbundler(version, part, 'UN')
359 358 if not ui.quiet:
360 359 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
361 360 if part.type == 'obsmarkers':
362 361 if not ui.quiet:
363 362 _debugobsmarkers(ui, part, indent=4, **opts)
364 363 if part.type == 'phase-heads':
365 364 if not ui.quiet:
366 365 _debugphaseheads(ui, part, indent=4)
367 366
368 367 @command('debugbundle',
369 368 [('a', 'all', None, _('show all details')),
370 369 ('', 'part-type', [], _('show only the named part type')),
371 370 ('', 'spec', None, _('print the bundlespec of the bundle'))],
372 371 _('FILE'),
373 372 norepo=True)
374 373 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
375 374 """lists the contents of a bundle"""
376 375 with hg.openpath(ui, bundlepath) as f:
377 376 if spec:
378 377 spec = exchange.getbundlespec(ui, f)
379 378 ui.write('%s\n' % spec)
380 379 return
381 380
382 381 gen = exchange.readbundle(ui, f, bundlepath)
383 382 if isinstance(gen, bundle2.unbundle20):
384 383 return _debugbundle2(ui, gen, all=all, **opts)
385 384 _debugchangegroup(ui, gen, all=all, **opts)
386 385
387 386 @command('debugcapabilities',
388 387 [], _('PATH'),
389 388 norepo=True)
390 389 def debugcapabilities(ui, path, **opts):
391 390 """lists the capabilities of a remote peer"""
392 391 opts = pycompat.byteskwargs(opts)
393 392 peer = hg.peer(ui, opts, path)
394 393 caps = peer.capabilities()
395 394 ui.write(('Main capabilities:\n'))
396 395 for c in sorted(caps):
397 396 ui.write((' %s\n') % c)
398 397 b2caps = bundle2.bundle2caps(peer)
399 398 if b2caps:
400 399 ui.write(('Bundle2 capabilities:\n'))
401 400 for key, values in sorted(b2caps.iteritems()):
402 401 ui.write((' %s\n') % key)
403 402 for v in values:
404 403 ui.write((' %s\n') % v)
405 404
406 405 @command('debugcheckstate', [], '')
407 406 def debugcheckstate(ui, repo):
408 407 """validate the correctness of the current dirstate"""
409 408 parent1, parent2 = repo.dirstate.parents()
410 409 m1 = repo[parent1].manifest()
411 410 m2 = repo[parent2].manifest()
412 411 errors = 0
413 412 for f in repo.dirstate:
414 413 state = repo.dirstate[f]
415 414 if state in "nr" and f not in m1:
416 415 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
417 416 errors += 1
418 417 if state in "a" and f in m1:
419 418 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
420 419 errors += 1
421 420 if state in "m" and f not in m1 and f not in m2:
422 421 ui.warn(_("%s in state %s, but not in either manifest\n") %
423 422 (f, state))
424 423 errors += 1
425 424 for f in m1:
426 425 state = repo.dirstate[f]
427 426 if state not in "nrm":
428 427 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
429 428 errors += 1
430 429 if errors:
431 430 error = _(".hg/dirstate inconsistent with current parent's manifest")
432 431 raise error.Abort(error)
433 432
434 433 @command('debugcolor',
435 434 [('', 'style', None, _('show all configured styles'))],
436 435 'hg debugcolor')
437 436 def debugcolor(ui, repo, **opts):
438 437 """show available color, effects or style"""
439 438 ui.write(('color mode: %s\n') % ui._colormode)
440 439 if opts.get(r'style'):
441 440 return _debugdisplaystyle(ui)
442 441 else:
443 442 return _debugdisplaycolor(ui)
444 443
445 444 def _debugdisplaycolor(ui):
446 445 ui = ui.copy()
447 446 ui._styles.clear()
448 447 for effect in color._activeeffects(ui).keys():
449 448 ui._styles[effect] = effect
450 449 if ui._terminfoparams:
451 450 for k, v in ui.configitems('color'):
452 451 if k.startswith('color.'):
453 452 ui._styles[k] = k[6:]
454 453 elif k.startswith('terminfo.'):
455 454 ui._styles[k] = k[9:]
456 455 ui.write(_('available colors:\n'))
457 456 # sort label with a '_' after the other to group '_background' entry.
458 457 items = sorted(ui._styles.items(),
459 458 key=lambda i: ('_' in i[0], i[0], i[1]))
460 459 for colorname, label in items:
461 460 ui.write(('%s\n') % colorname, label=label)
462 461
463 462 def _debugdisplaystyle(ui):
464 463 ui.write(_('available style:\n'))
465 464 if not ui._styles:
466 465 return
467 466 width = max(len(s) for s in ui._styles)
468 467 for label, effects in sorted(ui._styles.items()):
469 468 ui.write('%s' % label, label=label)
470 469 if effects:
471 470 # 50
472 471 ui.write(': ')
473 472 ui.write(' ' * (max(0, width - len(label))))
474 473 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
475 474 ui.write('\n')
476 475
477 476 @command('debugcreatestreamclonebundle', [], 'FILE')
478 477 def debugcreatestreamclonebundle(ui, repo, fname):
479 478 """create a stream clone bundle file
480 479
481 480 Stream bundles are special bundles that are essentially archives of
482 481 revlog files. They are commonly used for cloning very quickly.
483 482 """
484 483 # TODO we may want to turn this into an abort when this functionality
485 484 # is moved into `hg bundle`.
486 485 if phases.hassecret(repo):
487 486 ui.warn(_('(warning: stream clone bundle will contain secret '
488 487 'revisions)\n'))
489 488
490 489 requirements, gen = streamclone.generatebundlev1(repo)
491 490 changegroup.writechunks(ui, gen, fname)
492 491
493 492 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
494 493
495 494 @command('debugdag',
496 495 [('t', 'tags', None, _('use tags as labels')),
497 496 ('b', 'branches', None, _('annotate with branch names')),
498 497 ('', 'dots', None, _('use dots for runs')),
499 498 ('s', 'spaces', None, _('separate elements by spaces'))],
500 499 _('[OPTION]... [FILE [REV]...]'),
501 500 optionalrepo=True)
502 501 def debugdag(ui, repo, file_=None, *revs, **opts):
503 502 """format the changelog or an index DAG as a concise textual description
504 503
505 504 If you pass a revlog index, the revlog's DAG is emitted. If you list
506 505 revision numbers, they get labeled in the output as rN.
507 506
508 507 Otherwise, the changelog DAG of the current repo is emitted.
509 508 """
510 509 spaces = opts.get(r'spaces')
511 510 dots = opts.get(r'dots')
512 511 if file_:
513 512 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
514 513 file_)
515 514 revs = set((int(r) for r in revs))
516 515 def events():
517 516 for r in rlog:
518 517 yield 'n', (r, list(p for p in rlog.parentrevs(r)
519 518 if p != -1))
520 519 if r in revs:
521 520 yield 'l', (r, "r%i" % r)
522 521 elif repo:
523 522 cl = repo.changelog
524 523 tags = opts.get(r'tags')
525 524 branches = opts.get(r'branches')
526 525 if tags:
527 526 labels = {}
528 527 for l, n in repo.tags().items():
529 528 labels.setdefault(cl.rev(n), []).append(l)
530 529 def events():
531 530 b = "default"
532 531 for r in cl:
533 532 if branches:
534 533 newb = cl.read(cl.node(r))[5]['branch']
535 534 if newb != b:
536 535 yield 'a', newb
537 536 b = newb
538 537 yield 'n', (r, list(p for p in cl.parentrevs(r)
539 538 if p != -1))
540 539 if tags:
541 540 ls = labels.get(r)
542 541 if ls:
543 542 for l in ls:
544 543 yield 'l', (r, l)
545 544 else:
546 545 raise error.Abort(_('need repo for changelog dag'))
547 546
548 547 for line in dagparser.dagtextlines(events(),
549 548 addspaces=spaces,
550 549 wraplabels=True,
551 550 wrapannotations=True,
552 551 wrapnonlinear=dots,
553 552 usedots=dots,
554 553 maxlinewidth=70):
555 554 ui.write(line)
556 555 ui.write("\n")
557 556
558 557 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
559 558 def debugdata(ui, repo, file_, rev=None, **opts):
560 559 """dump the contents of a data file revision"""
561 560 opts = pycompat.byteskwargs(opts)
562 561 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
563 562 if rev is not None:
564 563 raise error.CommandError('debugdata', _('invalid arguments'))
565 564 file_, rev = None, file_
566 565 elif rev is None:
567 566 raise error.CommandError('debugdata', _('invalid arguments'))
568 567 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
569 568 try:
570 569 ui.write(r.revision(r.lookup(rev), raw=True))
571 570 except KeyError:
572 571 raise error.Abort(_('invalid revision identifier %s') % rev)
573 572
574 573 @command('debugdate',
575 574 [('e', 'extended', None, _('try extended date formats'))],
576 575 _('[-e] DATE [RANGE]'),
577 576 norepo=True, optionalrepo=True)
578 577 def debugdate(ui, date, range=None, **opts):
579 578 """parse and display a date"""
580 579 if opts[r"extended"]:
581 580 d = dateutil.parsedate(date, util.extendeddateformats)
582 581 else:
583 582 d = dateutil.parsedate(date)
584 583 ui.write(("internal: %d %d\n") % d)
585 584 ui.write(("standard: %s\n") % dateutil.datestr(d))
586 585 if range:
587 586 m = dateutil.matchdate(range)
588 587 ui.write(("match: %s\n") % m(d[0]))
589 588
590 589 @command('debugdeltachain',
591 590 cmdutil.debugrevlogopts + cmdutil.formatteropts,
592 591 _('-c|-m|FILE'),
593 592 optionalrepo=True)
594 593 def debugdeltachain(ui, repo, file_=None, **opts):
595 594 """dump information about delta chains in a revlog
596 595
597 596 Output can be templatized. Available template keywords are:
598 597
599 598 :``rev``: revision number
600 599 :``chainid``: delta chain identifier (numbered by unique base)
601 600 :``chainlen``: delta chain length to this revision
602 601 :``prevrev``: previous revision in delta chain
603 602 :``deltatype``: role of delta / how it was computed
604 603 :``compsize``: compressed size of revision
605 604 :``uncompsize``: uncompressed size of revision
606 605 :``chainsize``: total size of compressed revisions in chain
607 606 :``chainratio``: total chain size divided by uncompressed revision size
608 607 (new delta chains typically start at ratio 2.00)
609 608 :``lindist``: linear distance from base revision in delta chain to end
610 609 of this revision
611 610 :``extradist``: total size of revisions not part of this delta chain from
612 611 base of delta chain to end of this revision; a measurement
613 612 of how much extra data we need to read/seek across to read
614 613 the delta chain for this revision
615 614 :``extraratio``: extradist divided by chainsize; another representation of
616 615 how much unrelated data is needed to load this delta chain
617 616
618 617 If the repository is configured to use the sparse read, additional keywords
619 618 are available:
620 619
621 620 :``readsize``: total size of data read from the disk for a revision
622 621 (sum of the sizes of all the blocks)
623 622 :``largestblock``: size of the largest block of data read from the disk
624 623 :``readdensity``: density of useful bytes in the data read from the disk
625 624 :``srchunks``: in how many data hunks the whole revision would be read
626 625
627 626 The sparse read can be enabled with experimental.sparse-read = True
628 627 """
629 628 opts = pycompat.byteskwargs(opts)
630 629 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
631 630 index = r.index
632 631 start = r.start
633 632 length = r.length
634 633 generaldelta = r.version & revlog.FLAG_GENERALDELTA
635 634 withsparseread = getattr(r, '_withsparseread', False)
636 635
637 636 def revinfo(rev):
638 637 e = index[rev]
639 638 compsize = e[1]
640 639 uncompsize = e[2]
641 640 chainsize = 0
642 641
643 642 if generaldelta:
644 643 if e[3] == e[5]:
645 644 deltatype = 'p1'
646 645 elif e[3] == e[6]:
647 646 deltatype = 'p2'
648 647 elif e[3] == rev - 1:
649 648 deltatype = 'prev'
650 649 elif e[3] == rev:
651 650 deltatype = 'base'
652 651 else:
653 652 deltatype = 'other'
654 653 else:
655 654 if e[3] == rev:
656 655 deltatype = 'base'
657 656 else:
658 657 deltatype = 'prev'
659 658
660 659 chain = r._deltachain(rev)[0]
661 660 for iterrev in chain:
662 661 e = index[iterrev]
663 662 chainsize += e[1]
664 663
665 664 return compsize, uncompsize, deltatype, chain, chainsize
666 665
667 666 fm = ui.formatter('debugdeltachain', opts)
668 667
669 668 fm.plain(' rev chain# chainlen prev delta '
670 669 'size rawsize chainsize ratio lindist extradist '
671 670 'extraratio')
672 671 if withsparseread:
673 672 fm.plain(' readsize largestblk rddensity srchunks')
674 673 fm.plain('\n')
675 674
676 675 chainbases = {}
677 676 for rev in r:
678 677 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
679 678 chainbase = chain[0]
680 679 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
681 680 basestart = start(chainbase)
682 681 revstart = start(rev)
683 682 lineardist = revstart + comp - basestart
684 683 extradist = lineardist - chainsize
685 684 try:
686 685 prevrev = chain[-2]
687 686 except IndexError:
688 687 prevrev = -1
689 688
690 689 chainratio = float(chainsize) / float(uncomp)
691 690 extraratio = float(extradist) / float(chainsize)
692 691
693 692 fm.startitem()
694 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 694 'uncompsize chainsize chainratio lindist extradist '
696 695 'extraratio',
697 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 697 rev, chainid, len(chain), prevrev, deltatype, comp,
699 698 uncomp, chainsize, chainratio, lineardist, extradist,
700 699 extraratio,
701 700 rev=rev, chainid=chainid, chainlen=len(chain),
702 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 702 uncompsize=uncomp, chainsize=chainsize,
704 703 chainratio=chainratio, lindist=lineardist,
705 704 extradist=extradist, extraratio=extraratio)
706 705 if withsparseread:
707 706 readsize = 0
708 707 largestblock = 0
709 708 srchunks = 0
710 709
711 710 for revschunk in revlog._slicechunk(r, chain):
712 711 srchunks += 1
713 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 713 blksize = blkend - start(revschunk[0])
715 714
716 715 readsize += blksize
717 716 if largestblock < blksize:
718 717 largestblock = blksize
719 718
720 719 readdensity = float(chainsize) / float(readsize)
721 720
722 721 fm.write('readsize largestblock readdensity srchunks',
723 722 ' %10d %10d %9.5f %8d',
724 723 readsize, largestblock, readdensity, srchunks,
725 724 readsize=readsize, largestblock=largestblock,
726 725 readdensity=readdensity, srchunks=srchunks)
727 726
728 727 fm.plain('\n')
729 728
730 729 fm.end()
731 730
732 731 @command('debugdirstate|debugstate',
733 732 [('', 'nodates', None, _('do not display the saved mtime')),
734 733 ('', 'datesort', None, _('sort by saved mtime'))],
735 734 _('[OPTION]...'))
736 735 def debugstate(ui, repo, **opts):
737 736 """show the contents of the current dirstate"""
738 737
739 738 nodates = opts.get(r'nodates')
740 739 datesort = opts.get(r'datesort')
741 740
742 741 timestr = ""
743 742 if datesort:
744 743 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
745 744 else:
746 745 keyfunc = None # sort by filename
747 746 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
748 747 if ent[3] == -1:
749 748 timestr = 'unset '
750 749 elif nodates:
751 750 timestr = 'set '
752 751 else:
753 752 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
754 753 time.localtime(ent[3]))
755 754 timestr = encoding.strtolocal(timestr)
756 755 if ent[1] & 0o20000:
757 756 mode = 'lnk'
758 757 else:
759 758 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
760 759 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
761 760 for f in repo.dirstate.copies():
762 761 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763 762
764 763 @command('debugdiscovery',
765 764 [('', 'old', None, _('use old-style discovery')),
766 765 ('', 'nonheads', None,
767 766 _('use old-style discovery with non-heads included')),
768 767 ('', 'rev', [], 'restrict discovery to this set of revs'),
769 768 ] + cmdutil.remoteopts,
770 769 _('[--rev REV] [OTHER]'))
771 770 def debugdiscovery(ui, repo, remoteurl="default", **opts):
772 771 """runs the changeset discovery protocol in isolation"""
773 772 opts = pycompat.byteskwargs(opts)
774 773 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
775 774 remote = hg.peer(repo, opts, remoteurl)
776 775 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
777 776
778 777 # make sure tests are repeatable
779 778 random.seed(12323)
780 779
781 780 def doit(pushedrevs, remoteheads, remote=remote):
782 781 if opts.get('old'):
783 782 if not util.safehasattr(remote, 'branches'):
784 783 # enable in-client legacy support
785 784 remote = localrepo.locallegacypeer(remote.local())
786 785 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
787 786 force=True)
788 787 common = set(common)
789 788 if not opts.get('nonheads'):
790 789 ui.write(("unpruned common: %s\n") %
791 790 " ".join(sorted(short(n) for n in common)))
792 791 dag = dagutil.revlogdag(repo.changelog)
793 792 all = dag.ancestorset(dag.internalizeall(common))
794 793 common = dag.externalizeall(dag.headsetofconnecteds(all))
795 794 else:
796 795 nodes = None
797 796 if pushedrevs:
798 797 revs = scmutil.revrange(repo, pushedrevs)
799 798 nodes = [repo[r].node() for r in revs]
800 799 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
801 800 ancestorsof=nodes)
802 801 common = set(common)
803 802 rheads = set(hds)
804 803 lheads = set(repo.heads())
805 804 ui.write(("common heads: %s\n") %
806 805 " ".join(sorted(short(n) for n in common)))
807 806 if lheads <= common:
808 807 ui.write(("local is subset\n"))
809 808 elif rheads <= common:
810 809 ui.write(("remote is subset\n"))
811 810
812 811 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
813 812 localrevs = opts['rev']
814 813 doit(localrevs, remoterevs)
815 814
816 815 _chunksize = 4 << 10
817 816
818 817 @command('debugdownload',
819 818 [
820 819 ('o', 'output', '', _('path')),
821 820 ],
822 821 optionalrepo=True)
823 822 def debugdownload(ui, repo, url, output=None, **opts):
824 823 """download a resource using Mercurial logic and config
825 824 """
826 825 fh = urlmod.open(ui, url, output)
827 826
828 827 dest = ui
829 828 if output:
830 829 dest = open(output, "wb", _chunksize)
831 830 try:
832 831 data = fh.read(_chunksize)
833 832 while data:
834 833 dest.write(data)
835 834 data = fh.read(_chunksize)
836 835 finally:
837 836 if output:
838 837 dest.close()
839 838
840 839 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
841 840 def debugextensions(ui, repo, **opts):
842 841 '''show information about active extensions'''
843 842 opts = pycompat.byteskwargs(opts)
844 843 exts = extensions.extensions(ui)
845 844 hgver = util.version()
846 845 fm = ui.formatter('debugextensions', opts)
847 846 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
848 847 isinternal = extensions.ismoduleinternal(extmod)
849 848 extsource = pycompat.fsencode(extmod.__file__)
850 849 if isinternal:
851 850 exttestedwith = [] # never expose magic string to users
852 851 else:
853 852 exttestedwith = getattr(extmod, 'testedwith', '').split()
854 853 extbuglink = getattr(extmod, 'buglink', None)
855 854
856 855 fm.startitem()
857 856
858 857 if ui.quiet or ui.verbose:
859 858 fm.write('name', '%s\n', extname)
860 859 else:
861 860 fm.write('name', '%s', extname)
862 861 if isinternal or hgver in exttestedwith:
863 862 fm.plain('\n')
864 863 elif not exttestedwith:
865 864 fm.plain(_(' (untested!)\n'))
866 865 else:
867 866 lasttestedversion = exttestedwith[-1]
868 867 fm.plain(' (%s!)\n' % lasttestedversion)
869 868
870 869 fm.condwrite(ui.verbose and extsource, 'source',
871 870 _(' location: %s\n'), extsource or "")
872 871
873 872 if ui.verbose:
874 873 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
875 874 fm.data(bundled=isinternal)
876 875
877 876 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
878 877 _(' tested with: %s\n'),
879 878 fm.formatlist(exttestedwith, name='ver'))
880 879
881 880 fm.condwrite(ui.verbose and extbuglink, 'buglink',
882 881 _(' bug reporting: %s\n'), extbuglink or "")
883 882
884 883 fm.end()
885 884
886 885 @command('debugfileset',
887 886 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
888 887 _('[-r REV] FILESPEC'))
889 888 def debugfileset(ui, repo, expr, **opts):
890 889 '''parse and apply a fileset specification'''
891 890 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
892 891 if ui.verbose:
893 892 tree = fileset.parse(expr)
894 893 ui.note(fileset.prettyformat(tree), "\n")
895 894
896 895 for f in ctx.getfileset(expr):
897 896 ui.write("%s\n" % f)
898 897
899 898 @command('debugformat',
900 899 [] + cmdutil.formatteropts,
901 900 _(''))
902 901 def debugformat(ui, repo, **opts):
903 902 """display format information about the current repository
904 903
905 904 Use --verbose to get extra information about current config value and
906 905 Mercurial default."""
907 906 opts = pycompat.byteskwargs(opts)
908 907 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
909 908 maxvariantlength = max(len('format-variant'), maxvariantlength)
910 909
911 910 def makeformatname(name):
912 911 return '%s:' + (' ' * (maxvariantlength - len(name)))
913 912
914 913 fm = ui.formatter('debugformat', opts)
915 914 if fm.isplain():
916 915 def formatvalue(value):
917 916 if util.safehasattr(value, 'startswith'):
918 917 return value
919 918 if value:
920 919 return 'yes'
921 920 else:
922 921 return 'no'
923 922 else:
924 923 formatvalue = pycompat.identity
925 924
926 925 fm.plain('format-variant')
927 926 fm.plain(' ' * (maxvariantlength - len('format-variant')))
928 927 fm.plain(' repo')
929 928 if ui.verbose:
930 929 fm.plain(' config default')
931 930 fm.plain('\n')
932 931 for fv in upgrade.allformatvariant:
933 932 fm.startitem()
934 933 repovalue = fv.fromrepo(repo)
935 934 configvalue = fv.fromconfig(repo)
936 935
937 936 if repovalue != configvalue:
938 937 namelabel = 'formatvariant.name.mismatchconfig'
939 938 repolabel = 'formatvariant.repo.mismatchconfig'
940 939 elif repovalue != fv.default:
941 940 namelabel = 'formatvariant.name.mismatchdefault'
942 941 repolabel = 'formatvariant.repo.mismatchdefault'
943 942 else:
944 943 namelabel = 'formatvariant.name.uptodate'
945 944 repolabel = 'formatvariant.repo.uptodate'
946 945
947 946 fm.write('name', makeformatname(fv.name), fv.name,
948 947 label=namelabel)
949 948 fm.write('repo', ' %3s', formatvalue(repovalue),
950 949 label=repolabel)
951 950 if fv.default != configvalue:
952 951 configlabel = 'formatvariant.config.special'
953 952 else:
954 953 configlabel = 'formatvariant.config.default'
955 954 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
956 955 label=configlabel)
957 956 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
958 957 label='formatvariant.default')
959 958 fm.plain('\n')
960 959 fm.end()
961 960
962 961 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
963 962 def debugfsinfo(ui, path="."):
964 963 """show information detected about current filesystem"""
965 964 ui.write(('path: %s\n') % path)
966 965 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
967 966 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
968 967 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
969 968 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
970 969 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
971 970 casesensitive = '(unknown)'
972 971 try:
973 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
972 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
974 973 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
975 974 except OSError:
976 975 pass
977 976 ui.write(('case-sensitive: %s\n') % casesensitive)
978 977
979 978 @command('debuggetbundle',
980 979 [('H', 'head', [], _('id of head node'), _('ID')),
981 980 ('C', 'common', [], _('id of common node'), _('ID')),
982 981 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
983 982 _('REPO FILE [-H|-C ID]...'),
984 983 norepo=True)
985 984 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
986 985 """retrieves a bundle from a repo
987 986
988 987 Every ID must be a full-length hex node id string. Saves the bundle to the
989 988 given file.
990 989 """
991 990 opts = pycompat.byteskwargs(opts)
992 991 repo = hg.peer(ui, opts, repopath)
993 992 if not repo.capable('getbundle'):
994 993 raise error.Abort("getbundle() not supported by target repository")
995 994 args = {}
996 995 if common:
997 996 args[r'common'] = [bin(s) for s in common]
998 997 if head:
999 998 args[r'heads'] = [bin(s) for s in head]
1000 999 # TODO: get desired bundlecaps from command line.
1001 1000 args[r'bundlecaps'] = None
1002 1001 bundle = repo.getbundle('debug', **args)
1003 1002
1004 1003 bundletype = opts.get('type', 'bzip2').lower()
1005 1004 btypes = {'none': 'HG10UN',
1006 1005 'bzip2': 'HG10BZ',
1007 1006 'gzip': 'HG10GZ',
1008 1007 'bundle2': 'HG20'}
1009 1008 bundletype = btypes.get(bundletype)
1010 1009 if bundletype not in bundle2.bundletypes:
1011 1010 raise error.Abort(_('unknown bundle type specified with --type'))
1012 1011 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1013 1012
1014 1013 @command('debugignore', [], '[FILE]')
1015 1014 def debugignore(ui, repo, *files, **opts):
1016 1015 """display the combined ignore pattern and information about ignored files
1017 1016
1018 1017 With no argument display the combined ignore pattern.
1019 1018
1020 1019 Given space separated file names, shows if the given file is ignored and
1021 1020 if so, show the ignore rule (file and line number) that matched it.
1022 1021 """
1023 1022 ignore = repo.dirstate._ignore
1024 1023 if not files:
1025 1024 # Show all the patterns
1026 1025 ui.write("%s\n" % pycompat.byterepr(ignore))
1027 1026 else:
1028 1027 m = scmutil.match(repo[None], pats=files)
1029 1028 for f in m.files():
1030 1029 nf = util.normpath(f)
1031 1030 ignored = None
1032 1031 ignoredata = None
1033 1032 if nf != '.':
1034 1033 if ignore(nf):
1035 1034 ignored = nf
1036 1035 ignoredata = repo.dirstate._ignorefileandline(nf)
1037 1036 else:
1038 1037 for p in util.finddirs(nf):
1039 1038 if ignore(p):
1040 1039 ignored = p
1041 1040 ignoredata = repo.dirstate._ignorefileandline(p)
1042 1041 break
1043 1042 if ignored:
1044 1043 if ignored == nf:
1045 1044 ui.write(_("%s is ignored\n") % m.uipath(f))
1046 1045 else:
1047 1046 ui.write(_("%s is ignored because of "
1048 1047 "containing folder %s\n")
1049 1048 % (m.uipath(f), ignored))
1050 1049 ignorefile, lineno, line = ignoredata
1051 1050 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1052 1051 % (ignorefile, lineno, line))
1053 1052 else:
1054 1053 ui.write(_("%s is not ignored\n") % m.uipath(f))
1055 1054
1056 1055 @command('debugindex', cmdutil.debugrevlogopts +
1057 1056 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1058 1057 _('[-f FORMAT] -c|-m|FILE'),
1059 1058 optionalrepo=True)
1060 1059 def debugindex(ui, repo, file_=None, **opts):
1061 1060 """dump the contents of an index file"""
1062 1061 opts = pycompat.byteskwargs(opts)
1063 1062 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1064 1063 format = opts.get('format', 0)
1065 1064 if format not in (0, 1):
1066 1065 raise error.Abort(_("unknown format %d") % format)
1067 1066
1068 1067 if ui.debugflag:
1069 1068 shortfn = hex
1070 1069 else:
1071 1070 shortfn = short
1072 1071
1073 1072 # There might not be anything in r, so have a sane default
1074 1073 idlen = 12
1075 1074 for i in r:
1076 1075 idlen = len(shortfn(r.node(i)))
1077 1076 break
1078 1077
1079 1078 if format == 0:
1080 1079 if ui.verbose:
1081 1080 ui.write((" rev offset length linkrev"
1082 1081 " %s %s p2\n") % ("nodeid".ljust(idlen),
1083 1082 "p1".ljust(idlen)))
1084 1083 else:
1085 1084 ui.write((" rev linkrev %s %s p2\n") % (
1086 1085 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1087 1086 elif format == 1:
1088 1087 if ui.verbose:
1089 1088 ui.write((" rev flag offset length size link p1"
1090 1089 " p2 %s\n") % "nodeid".rjust(idlen))
1091 1090 else:
1092 1091 ui.write((" rev flag size link p1 p2 %s\n") %
1093 1092 "nodeid".rjust(idlen))
1094 1093
1095 1094 for i in r:
1096 1095 node = r.node(i)
1097 1096 if format == 0:
1098 1097 try:
1099 1098 pp = r.parents(node)
1100 1099 except Exception:
1101 1100 pp = [nullid, nullid]
1102 1101 if ui.verbose:
1103 1102 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1104 1103 i, r.start(i), r.length(i), r.linkrev(i),
1105 1104 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1106 1105 else:
1107 1106 ui.write("% 6d % 7d %s %s %s\n" % (
1108 1107 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1109 1108 shortfn(pp[1])))
1110 1109 elif format == 1:
1111 1110 pr = r.parentrevs(i)
1112 1111 if ui.verbose:
1113 1112 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1114 1113 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1115 1114 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1116 1115 else:
1117 1116 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1118 1117 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1119 1118 shortfn(node)))
1120 1119
1121 1120 @command('debugindexdot', cmdutil.debugrevlogopts,
1122 1121 _('-c|-m|FILE'), optionalrepo=True)
1123 1122 def debugindexdot(ui, repo, file_=None, **opts):
1124 1123 """dump an index DAG as a graphviz dot file"""
1125 1124 opts = pycompat.byteskwargs(opts)
1126 1125 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1127 1126 ui.write(("digraph G {\n"))
1128 1127 for i in r:
1129 1128 node = r.node(i)
1130 1129 pp = r.parents(node)
1131 1130 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1132 1131 if pp[1] != nullid:
1133 1132 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1134 1133 ui.write("}\n")
1135 1134
1136 1135 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1137 1136 def debuginstall(ui, **opts):
1138 1137 '''test Mercurial installation
1139 1138
1140 1139 Returns 0 on success.
1141 1140 '''
1142 1141 opts = pycompat.byteskwargs(opts)
1143 1142
1144 1143 def writetemp(contents):
1145 1144 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1146 1145 f = os.fdopen(fd, r"wb")
1147 1146 f.write(contents)
1148 1147 f.close()
1149 1148 return name
1150 1149
1151 1150 problems = 0
1152 1151
1153 1152 fm = ui.formatter('debuginstall', opts)
1154 1153 fm.startitem()
1155 1154
1156 1155 # encoding
1157 1156 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1158 1157 err = None
1159 1158 try:
1160 1159 codecs.lookup(pycompat.sysstr(encoding.encoding))
1161 1160 except LookupError as inst:
1162 1161 err = stringutil.forcebytestr(inst)
1163 1162 problems += 1
1164 1163 fm.condwrite(err, 'encodingerror', _(" %s\n"
1165 1164 " (check that your locale is properly set)\n"), err)
1166 1165
1167 1166 # Python
1168 1167 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1169 1168 pycompat.sysexecutable)
1170 1169 fm.write('pythonver', _("checking Python version (%s)\n"),
1171 1170 ("%d.%d.%d" % sys.version_info[:3]))
1172 1171 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1173 1172 os.path.dirname(pycompat.fsencode(os.__file__)))
1174 1173
1175 1174 security = set(sslutil.supportedprotocols)
1176 1175 if sslutil.hassni:
1177 1176 security.add('sni')
1178 1177
1179 1178 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1180 1179 fm.formatlist(sorted(security), name='protocol',
1181 1180 fmt='%s', sep=','))
1182 1181
1183 1182 # These are warnings, not errors. So don't increment problem count. This
1184 1183 # may change in the future.
1185 1184 if 'tls1.2' not in security:
1186 1185 fm.plain(_(' TLS 1.2 not supported by Python install; '
1187 1186 'network connections lack modern security\n'))
1188 1187 if 'sni' not in security:
1189 1188 fm.plain(_(' SNI not supported by Python install; may have '
1190 1189 'connectivity issues with some servers\n'))
1191 1190
1192 1191 # TODO print CA cert info
1193 1192
1194 1193 # hg version
1195 1194 hgver = util.version()
1196 1195 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1197 1196 hgver.split('+')[0])
1198 1197 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1199 1198 '+'.join(hgver.split('+')[1:]))
1200 1199
1201 1200 # compiled modules
1202 1201 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1203 1202 policy.policy)
1204 1203 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1205 1204 os.path.dirname(pycompat.fsencode(__file__)))
1206 1205
1207 1206 if policy.policy in ('c', 'allow'):
1208 1207 err = None
1209 1208 try:
1210 1209 from .cext import (
1211 1210 base85,
1212 1211 bdiff,
1213 1212 mpatch,
1214 1213 osutil,
1215 1214 )
1216 1215 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1217 1216 except Exception as inst:
1218 1217 err = stringutil.forcebytestr(inst)
1219 1218 problems += 1
1220 1219 fm.condwrite(err, 'extensionserror', " %s\n", err)
1221 1220
1222 1221 compengines = util.compengines._engines.values()
1223 1222 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1224 1223 fm.formatlist(sorted(e.name() for e in compengines),
1225 1224 name='compengine', fmt='%s', sep=', '))
1226 1225 fm.write('compenginesavail', _('checking available compression engines '
1227 1226 '(%s)\n'),
1228 1227 fm.formatlist(sorted(e.name() for e in compengines
1229 1228 if e.available()),
1230 1229 name='compengine', fmt='%s', sep=', '))
1231 1230 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1232 1231 fm.write('compenginesserver', _('checking available compression engines '
1233 1232 'for wire protocol (%s)\n'),
1234 1233 fm.formatlist([e.name() for e in wirecompengines
1235 1234 if e.wireprotosupport()],
1236 1235 name='compengine', fmt='%s', sep=', '))
1237 1236 re2 = 'missing'
1238 1237 if util._re2:
1239 1238 re2 = 'available'
1240 1239 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1241 1240 fm.data(re2=bool(util._re2))
1242 1241
1243 1242 # templates
1244 1243 p = templater.templatepaths()
1245 1244 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1246 1245 fm.condwrite(not p, '', _(" no template directories found\n"))
1247 1246 if p:
1248 1247 m = templater.templatepath("map-cmdline.default")
1249 1248 if m:
1250 1249 # template found, check if it is working
1251 1250 err = None
1252 1251 try:
1253 1252 templater.templater.frommapfile(m)
1254 1253 except Exception as inst:
1255 1254 err = stringutil.forcebytestr(inst)
1256 1255 p = None
1257 1256 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1258 1257 else:
1259 1258 p = None
1260 1259 fm.condwrite(p, 'defaulttemplate',
1261 1260 _("checking default template (%s)\n"), m)
1262 1261 fm.condwrite(not m, 'defaulttemplatenotfound',
1263 1262 _(" template '%s' not found\n"), "default")
1264 1263 if not p:
1265 1264 problems += 1
1266 1265 fm.condwrite(not p, '',
1267 1266 _(" (templates seem to have been installed incorrectly)\n"))
1268 1267
1269 1268 # editor
1270 1269 editor = ui.geteditor()
1271 1270 editor = util.expandpath(editor)
1272 1271 editorbin = procutil.shellsplit(editor)[0]
1273 1272 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1274 1273 cmdpath = procutil.findexe(editorbin)
1275 1274 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1276 1275 _(" No commit editor set and can't find %s in PATH\n"
1277 1276 " (specify a commit editor in your configuration"
1278 1277 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1279 1278 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1280 1279 _(" Can't find editor '%s' in PATH\n"
1281 1280 " (specify a commit editor in your configuration"
1282 1281 " file)\n"), not cmdpath and editorbin)
1283 1282 if not cmdpath and editor != 'vi':
1284 1283 problems += 1
1285 1284
1286 1285 # check username
1287 1286 username = None
1288 1287 err = None
1289 1288 try:
1290 1289 username = ui.username()
1291 1290 except error.Abort as e:
1292 1291 err = stringutil.forcebytestr(e)
1293 1292 problems += 1
1294 1293
1295 1294 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1296 1295 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1297 1296 " (specify a username in your configuration file)\n"), err)
1298 1297
1299 1298 fm.condwrite(not problems, '',
1300 1299 _("no problems detected\n"))
1301 1300 if not problems:
1302 1301 fm.data(problems=problems)
1303 1302 fm.condwrite(problems, 'problems',
1304 1303 _("%d problems detected,"
1305 1304 " please check your install!\n"), problems)
1306 1305 fm.end()
1307 1306
1308 1307 return problems
1309 1308
1310 1309 @command('debugknown', [], _('REPO ID...'), norepo=True)
1311 1310 def debugknown(ui, repopath, *ids, **opts):
1312 1311 """test whether node ids are known to a repo
1313 1312
1314 1313 Every ID must be a full-length hex node id string. Returns a list of 0s
1315 1314 and 1s indicating unknown/known.
1316 1315 """
1317 1316 opts = pycompat.byteskwargs(opts)
1318 1317 repo = hg.peer(ui, opts, repopath)
1319 1318 if not repo.capable('known'):
1320 1319 raise error.Abort("known() not supported by target repository")
1321 1320 flags = repo.known([bin(s) for s in ids])
1322 1321 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1323 1322
1324 1323 @command('debuglabelcomplete', [], _('LABEL...'))
1325 1324 def debuglabelcomplete(ui, repo, *args):
1326 1325 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1327 1326 debugnamecomplete(ui, repo, *args)
1328 1327
1329 1328 @command('debuglocks',
1330 1329 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1331 1330 ('W', 'force-wlock', None,
1332 1331 _('free the working state lock (DANGEROUS)')),
1333 1332 ('s', 'set-lock', None, _('set the store lock until stopped')),
1334 1333 ('S', 'set-wlock', None,
1335 1334 _('set the working state lock until stopped'))],
1336 1335 _('[OPTION]...'))
1337 1336 def debuglocks(ui, repo, **opts):
1338 1337 """show or modify state of locks
1339 1338
1340 1339 By default, this command will show which locks are held. This
1341 1340 includes the user and process holding the lock, the amount of time
1342 1341 the lock has been held, and the machine name where the process is
1343 1342 running if it's not local.
1344 1343
1345 1344 Locks protect the integrity of Mercurial's data, so should be
1346 1345 treated with care. System crashes or other interruptions may cause
1347 1346 locks to not be properly released, though Mercurial will usually
1348 1347 detect and remove such stale locks automatically.
1349 1348
1350 1349 However, detecting stale locks may not always be possible (for
1351 1350 instance, on a shared filesystem). Removing locks may also be
1352 1351 blocked by filesystem permissions.
1353 1352
1354 1353 Setting a lock will prevent other commands from changing the data.
1355 1354 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1356 1355 The set locks are removed when the command exits.
1357 1356
1358 1357 Returns 0 if no locks are held.
1359 1358
1360 1359 """
1361 1360
1362 1361 if opts.get(r'force_lock'):
1363 1362 repo.svfs.unlink('lock')
1364 1363 if opts.get(r'force_wlock'):
1365 1364 repo.vfs.unlink('wlock')
1366 1365 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1367 1366 return 0
1368 1367
1369 1368 locks = []
1370 1369 try:
1371 1370 if opts.get(r'set_wlock'):
1372 1371 try:
1373 1372 locks.append(repo.wlock(False))
1374 1373 except error.LockHeld:
1375 1374 raise error.Abort(_('wlock is already held'))
1376 1375 if opts.get(r'set_lock'):
1377 1376 try:
1378 1377 locks.append(repo.lock(False))
1379 1378 except error.LockHeld:
1380 1379 raise error.Abort(_('lock is already held'))
1381 1380 if len(locks):
1382 1381 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1383 1382 return 0
1384 1383 finally:
1385 1384 release(*locks)
1386 1385
1387 1386 now = time.time()
1388 1387 held = 0
1389 1388
1390 1389 def report(vfs, name, method):
1391 1390 # this causes stale locks to get reaped for more accurate reporting
1392 1391 try:
1393 1392 l = method(False)
1394 1393 except error.LockHeld:
1395 1394 l = None
1396 1395
1397 1396 if l:
1398 1397 l.release()
1399 1398 else:
1400 1399 try:
1401 1400 st = vfs.lstat(name)
1402 1401 age = now - st[stat.ST_MTIME]
1403 1402 user = util.username(st.st_uid)
1404 1403 locker = vfs.readlock(name)
1405 1404 if ":" in locker:
1406 1405 host, pid = locker.split(':')
1407 1406 if host == socket.gethostname():
1408 1407 locker = 'user %s, process %s' % (user, pid)
1409 1408 else:
1410 1409 locker = 'user %s, process %s, host %s' \
1411 1410 % (user, pid, host)
1412 1411 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1413 1412 return 1
1414 1413 except OSError as e:
1415 1414 if e.errno != errno.ENOENT:
1416 1415 raise
1417 1416
1418 1417 ui.write(("%-6s free\n") % (name + ":"))
1419 1418 return 0
1420 1419
1421 1420 held += report(repo.svfs, "lock", repo.lock)
1422 1421 held += report(repo.vfs, "wlock", repo.wlock)
1423 1422
1424 1423 return held
1425 1424
1426 1425 @command('debugmergestate', [], '')
1427 1426 def debugmergestate(ui, repo, *args):
1428 1427 """print merge state
1429 1428
1430 1429 Use --verbose to print out information about whether v1 or v2 merge state
1431 1430 was chosen."""
1432 1431 def _hashornull(h):
1433 1432 if h == nullhex:
1434 1433 return 'null'
1435 1434 else:
1436 1435 return h
1437 1436
1438 1437 def printrecords(version):
1439 1438 ui.write(('* version %d records\n') % version)
1440 1439 if version == 1:
1441 1440 records = v1records
1442 1441 else:
1443 1442 records = v2records
1444 1443
1445 1444 for rtype, record in records:
1446 1445 # pretty print some record types
1447 1446 if rtype == 'L':
1448 1447 ui.write(('local: %s\n') % record)
1449 1448 elif rtype == 'O':
1450 1449 ui.write(('other: %s\n') % record)
1451 1450 elif rtype == 'm':
1452 1451 driver, mdstate = record.split('\0', 1)
1453 1452 ui.write(('merge driver: %s (state "%s")\n')
1454 1453 % (driver, mdstate))
1455 1454 elif rtype in 'FDC':
1456 1455 r = record.split('\0')
1457 1456 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1458 1457 if version == 1:
1459 1458 onode = 'not stored in v1 format'
1460 1459 flags = r[7]
1461 1460 else:
1462 1461 onode, flags = r[7:9]
1463 1462 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1464 1463 % (f, rtype, state, _hashornull(hash)))
1465 1464 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1466 1465 ui.write((' ancestor path: %s (node %s)\n')
1467 1466 % (afile, _hashornull(anode)))
1468 1467 ui.write((' other path: %s (node %s)\n')
1469 1468 % (ofile, _hashornull(onode)))
1470 1469 elif rtype == 'f':
1471 1470 filename, rawextras = record.split('\0', 1)
1472 1471 extras = rawextras.split('\0')
1473 1472 i = 0
1474 1473 extrastrings = []
1475 1474 while i < len(extras):
1476 1475 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1477 1476 i += 2
1478 1477
1479 1478 ui.write(('file extras: %s (%s)\n')
1480 1479 % (filename, ', '.join(extrastrings)))
1481 1480 elif rtype == 'l':
1482 1481 labels = record.split('\0', 2)
1483 1482 labels = [l for l in labels if len(l) > 0]
1484 1483 ui.write(('labels:\n'))
1485 1484 ui.write((' local: %s\n' % labels[0]))
1486 1485 ui.write((' other: %s\n' % labels[1]))
1487 1486 if len(labels) > 2:
1488 1487 ui.write((' base: %s\n' % labels[2]))
1489 1488 else:
1490 1489 ui.write(('unrecognized entry: %s\t%s\n')
1491 1490 % (rtype, record.replace('\0', '\t')))
1492 1491
1493 1492 # Avoid mergestate.read() since it may raise an exception for unsupported
1494 1493 # merge state records. We shouldn't be doing this, but this is OK since this
1495 1494 # command is pretty low-level.
1496 1495 ms = mergemod.mergestate(repo)
1497 1496
1498 1497 # sort so that reasonable information is on top
1499 1498 v1records = ms._readrecordsv1()
1500 1499 v2records = ms._readrecordsv2()
1501 1500 order = 'LOml'
1502 1501 def key(r):
1503 1502 idx = order.find(r[0])
1504 1503 if idx == -1:
1505 1504 return (1, r[1])
1506 1505 else:
1507 1506 return (0, idx)
1508 1507 v1records.sort(key=key)
1509 1508 v2records.sort(key=key)
1510 1509
1511 1510 if not v1records and not v2records:
1512 1511 ui.write(('no merge state found\n'))
1513 1512 elif not v2records:
1514 1513 ui.note(('no version 2 merge state\n'))
1515 1514 printrecords(1)
1516 1515 elif ms._v1v2match(v1records, v2records):
1517 1516 ui.note(('v1 and v2 states match: using v2\n'))
1518 1517 printrecords(2)
1519 1518 else:
1520 1519 ui.note(('v1 and v2 states mismatch: using v1\n'))
1521 1520 printrecords(1)
1522 1521 if ui.verbose:
1523 1522 printrecords(2)
1524 1523
1525 1524 @command('debugnamecomplete', [], _('NAME...'))
1526 1525 def debugnamecomplete(ui, repo, *args):
1527 1526 '''complete "names" - tags, open branch names, bookmark names'''
1528 1527
1529 1528 names = set()
1530 1529 # since we previously only listed open branches, we will handle that
1531 1530 # specially (after this for loop)
1532 1531 for name, ns in repo.names.iteritems():
1533 1532 if name != 'branches':
1534 1533 names.update(ns.listnames(repo))
1535 1534 names.update(tag for (tag, heads, tip, closed)
1536 1535 in repo.branchmap().iterbranches() if not closed)
1537 1536 completions = set()
1538 1537 if not args:
1539 1538 args = ['']
1540 1539 for a in args:
1541 1540 completions.update(n for n in names if n.startswith(a))
1542 1541 ui.write('\n'.join(sorted(completions)))
1543 1542 ui.write('\n')
1544 1543
1545 1544 @command('debugobsolete',
1546 1545 [('', 'flags', 0, _('markers flag')),
1547 1546 ('', 'record-parents', False,
1548 1547 _('record parent information for the precursor')),
1549 1548 ('r', 'rev', [], _('display markers relevant to REV')),
1550 1549 ('', 'exclusive', False, _('restrict display to markers only '
1551 1550 'relevant to REV')),
1552 1551 ('', 'index', False, _('display index of the marker')),
1553 1552 ('', 'delete', [], _('delete markers specified by indices')),
1554 1553 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1555 1554 _('[OBSOLETED [REPLACEMENT ...]]'))
1556 1555 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1557 1556 """create arbitrary obsolete marker
1558 1557
1559 1558 With no arguments, displays the list of obsolescence markers."""
1560 1559
1561 1560 opts = pycompat.byteskwargs(opts)
1562 1561
1563 1562 def parsenodeid(s):
1564 1563 try:
1565 1564 # We do not use revsingle/revrange functions here to accept
1566 1565 # arbitrary node identifiers, possibly not present in the
1567 1566 # local repository.
1568 1567 n = bin(s)
1569 1568 if len(n) != len(nullid):
1570 1569 raise TypeError()
1571 1570 return n
1572 1571 except TypeError:
1573 1572 raise error.Abort('changeset references must be full hexadecimal '
1574 1573 'node identifiers')
1575 1574
1576 1575 if opts.get('delete'):
1577 1576 indices = []
1578 1577 for v in opts.get('delete'):
1579 1578 try:
1580 1579 indices.append(int(v))
1581 1580 except ValueError:
1582 1581 raise error.Abort(_('invalid index value: %r') % v,
1583 1582 hint=_('use integers for indices'))
1584 1583
1585 1584 if repo.currenttransaction():
1586 1585 raise error.Abort(_('cannot delete obsmarkers in the middle '
1587 1586 'of transaction.'))
1588 1587
1589 1588 with repo.lock():
1590 1589 n = repair.deleteobsmarkers(repo.obsstore, indices)
1591 1590 ui.write(_('deleted %i obsolescence markers\n') % n)
1592 1591
1593 1592 return
1594 1593
1595 1594 if precursor is not None:
1596 1595 if opts['rev']:
1597 1596 raise error.Abort('cannot select revision when creating marker')
1598 1597 metadata = {}
1599 1598 metadata['user'] = opts['user'] or ui.username()
1600 1599 succs = tuple(parsenodeid(succ) for succ in successors)
1601 1600 l = repo.lock()
1602 1601 try:
1603 1602 tr = repo.transaction('debugobsolete')
1604 1603 try:
1605 1604 date = opts.get('date')
1606 1605 if date:
1607 1606 date = dateutil.parsedate(date)
1608 1607 else:
1609 1608 date = None
1610 1609 prec = parsenodeid(precursor)
1611 1610 parents = None
1612 1611 if opts['record_parents']:
1613 1612 if prec not in repo.unfiltered():
1614 1613 raise error.Abort('cannot used --record-parents on '
1615 1614 'unknown changesets')
1616 1615 parents = repo.unfiltered()[prec].parents()
1617 1616 parents = tuple(p.node() for p in parents)
1618 1617 repo.obsstore.create(tr, prec, succs, opts['flags'],
1619 1618 parents=parents, date=date,
1620 1619 metadata=metadata, ui=ui)
1621 1620 tr.close()
1622 1621 except ValueError as exc:
1623 1622 raise error.Abort(_('bad obsmarker input: %s') %
1624 1623 pycompat.bytestr(exc))
1625 1624 finally:
1626 1625 tr.release()
1627 1626 finally:
1628 1627 l.release()
1629 1628 else:
1630 1629 if opts['rev']:
1631 1630 revs = scmutil.revrange(repo, opts['rev'])
1632 1631 nodes = [repo[r].node() for r in revs]
1633 1632 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1634 1633 exclusive=opts['exclusive']))
1635 1634 markers.sort(key=lambda x: x._data)
1636 1635 else:
1637 1636 markers = obsutil.getmarkers(repo)
1638 1637
1639 1638 markerstoiter = markers
1640 1639 isrelevant = lambda m: True
1641 1640 if opts.get('rev') and opts.get('index'):
1642 1641 markerstoiter = obsutil.getmarkers(repo)
1643 1642 markerset = set(markers)
1644 1643 isrelevant = lambda m: m in markerset
1645 1644
1646 1645 fm = ui.formatter('debugobsolete', opts)
1647 1646 for i, m in enumerate(markerstoiter):
1648 1647 if not isrelevant(m):
1649 1648 # marker can be irrelevant when we're iterating over a set
1650 1649 # of markers (markerstoiter) which is bigger than the set
1651 1650 # of markers we want to display (markers)
1652 1651 # this can happen if both --index and --rev options are
1653 1652 # provided and thus we need to iterate over all of the markers
1654 1653 # to get the correct indices, but only display the ones that
1655 1654 # are relevant to --rev value
1656 1655 continue
1657 1656 fm.startitem()
1658 1657 ind = i if opts.get('index') else None
1659 1658 cmdutil.showmarker(fm, m, index=ind)
1660 1659 fm.end()
1661 1660
1662 1661 @command('debugpathcomplete',
1663 1662 [('f', 'full', None, _('complete an entire path')),
1664 1663 ('n', 'normal', None, _('show only normal files')),
1665 1664 ('a', 'added', None, _('show only added files')),
1666 1665 ('r', 'removed', None, _('show only removed files'))],
1667 1666 _('FILESPEC...'))
1668 1667 def debugpathcomplete(ui, repo, *specs, **opts):
1669 1668 '''complete part or all of a tracked path
1670 1669
1671 1670 This command supports shells that offer path name completion. It
1672 1671 currently completes only files already known to the dirstate.
1673 1672
1674 1673 Completion extends only to the next path segment unless
1675 1674 --full is specified, in which case entire paths are used.'''
1676 1675
1677 1676 def complete(path, acceptable):
1678 1677 dirstate = repo.dirstate
1679 1678 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1680 1679 rootdir = repo.root + pycompat.ossep
1681 1680 if spec != repo.root and not spec.startswith(rootdir):
1682 1681 return [], []
1683 1682 if os.path.isdir(spec):
1684 1683 spec += '/'
1685 1684 spec = spec[len(rootdir):]
1686 1685 fixpaths = pycompat.ossep != '/'
1687 1686 if fixpaths:
1688 1687 spec = spec.replace(pycompat.ossep, '/')
1689 1688 speclen = len(spec)
1690 1689 fullpaths = opts[r'full']
1691 1690 files, dirs = set(), set()
1692 1691 adddir, addfile = dirs.add, files.add
1693 1692 for f, st in dirstate.iteritems():
1694 1693 if f.startswith(spec) and st[0] in acceptable:
1695 1694 if fixpaths:
1696 1695 f = f.replace('/', pycompat.ossep)
1697 1696 if fullpaths:
1698 1697 addfile(f)
1699 1698 continue
1700 1699 s = f.find(pycompat.ossep, speclen)
1701 1700 if s >= 0:
1702 1701 adddir(f[:s])
1703 1702 else:
1704 1703 addfile(f)
1705 1704 return files, dirs
1706 1705
1707 1706 acceptable = ''
1708 1707 if opts[r'normal']:
1709 1708 acceptable += 'nm'
1710 1709 if opts[r'added']:
1711 1710 acceptable += 'a'
1712 1711 if opts[r'removed']:
1713 1712 acceptable += 'r'
1714 1713 cwd = repo.getcwd()
1715 1714 if not specs:
1716 1715 specs = ['.']
1717 1716
1718 1717 files, dirs = set(), set()
1719 1718 for spec in specs:
1720 1719 f, d = complete(spec, acceptable or 'nmar')
1721 1720 files.update(f)
1722 1721 dirs.update(d)
1723 1722 files.update(dirs)
1724 1723 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1725 1724 ui.write('\n')
1726 1725
1727 1726 @command('debugpeer', [], _('PATH'), norepo=True)
1728 1727 def debugpeer(ui, path):
1729 1728 """establish a connection to a peer repository"""
1730 1729 # Always enable peer request logging. Requires --debug to display
1731 1730 # though.
1732 1731 overrides = {
1733 1732 ('devel', 'debug.peer-request'): True,
1734 1733 }
1735 1734
1736 1735 with ui.configoverride(overrides):
1737 1736 peer = hg.peer(ui, {}, path)
1738 1737
1739 1738 local = peer.local() is not None
1740 1739 canpush = peer.canpush()
1741 1740
1742 1741 ui.write(_('url: %s\n') % peer.url())
1743 1742 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1744 1743 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1745 1744
1746 1745 @command('debugpickmergetool',
1747 1746 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1748 1747 ('', 'changedelete', None, _('emulate merging change and delete')),
1749 1748 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1750 1749 _('[PATTERN]...'),
1751 1750 inferrepo=True)
1752 1751 def debugpickmergetool(ui, repo, *pats, **opts):
1753 1752 """examine which merge tool is chosen for specified file
1754 1753
1755 1754 As described in :hg:`help merge-tools`, Mercurial examines
1756 1755 configurations below in this order to decide which merge tool is
1757 1756 chosen for specified file.
1758 1757
1759 1758 1. ``--tool`` option
1760 1759 2. ``HGMERGE`` environment variable
1761 1760 3. configurations in ``merge-patterns`` section
1762 1761 4. configuration of ``ui.merge``
1763 1762 5. configurations in ``merge-tools`` section
1764 1763 6. ``hgmerge`` tool (for historical reason only)
1765 1764 7. default tool for fallback (``:merge`` or ``:prompt``)
1766 1765
1767 1766 This command writes out examination result in the style below::
1768 1767
1769 1768 FILE = MERGETOOL
1770 1769
1771 1770 By default, all files known in the first parent context of the
1772 1771 working directory are examined. Use file patterns and/or -I/-X
1773 1772 options to limit target files. -r/--rev is also useful to examine
1774 1773 files in another context without actual updating to it.
1775 1774
1776 1775 With --debug, this command shows warning messages while matching
1777 1776 against ``merge-patterns`` and so on, too. It is recommended to
1778 1777 use this option with explicit file patterns and/or -I/-X options,
1779 1778 because this option increases amount of output per file according
1780 1779 to configurations in hgrc.
1781 1780
1782 1781 With -v/--verbose, this command shows configurations below at
1783 1782 first (only if specified).
1784 1783
1785 1784 - ``--tool`` option
1786 1785 - ``HGMERGE`` environment variable
1787 1786 - configuration of ``ui.merge``
1788 1787
1789 1788 If merge tool is chosen before matching against
1790 1789 ``merge-patterns``, this command can't show any helpful
1791 1790 information, even with --debug. In such case, information above is
1792 1791 useful to know why a merge tool is chosen.
1793 1792 """
1794 1793 opts = pycompat.byteskwargs(opts)
1795 1794 overrides = {}
1796 1795 if opts['tool']:
1797 1796 overrides[('ui', 'forcemerge')] = opts['tool']
1798 1797 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1799 1798
1800 1799 with ui.configoverride(overrides, 'debugmergepatterns'):
1801 1800 hgmerge = encoding.environ.get("HGMERGE")
1802 1801 if hgmerge is not None:
1803 1802 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1804 1803 uimerge = ui.config("ui", "merge")
1805 1804 if uimerge:
1806 1805 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1807 1806
1808 1807 ctx = scmutil.revsingle(repo, opts.get('rev'))
1809 1808 m = scmutil.match(ctx, pats, opts)
1810 1809 changedelete = opts['changedelete']
1811 1810 for path in ctx.walk(m):
1812 1811 fctx = ctx[path]
1813 1812 try:
1814 1813 if not ui.debugflag:
1815 1814 ui.pushbuffer(error=True)
1816 1815 tool, toolpath = filemerge._picktool(repo, ui, path,
1817 1816 fctx.isbinary(),
1818 1817 'l' in fctx.flags(),
1819 1818 changedelete)
1820 1819 finally:
1821 1820 if not ui.debugflag:
1822 1821 ui.popbuffer()
1823 1822 ui.write(('%s = %s\n') % (path, tool))
1824 1823
1825 1824 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1826 1825 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1827 1826 '''access the pushkey key/value protocol
1828 1827
1829 1828 With two args, list the keys in the given namespace.
1830 1829
1831 1830 With five args, set a key to new if it currently is set to old.
1832 1831 Reports success or failure.
1833 1832 '''
1834 1833
1835 1834 target = hg.peer(ui, {}, repopath)
1836 1835 if keyinfo:
1837 1836 key, old, new = keyinfo
1838 1837 with target.commandexecutor() as e:
1839 1838 r = e.callcommand('pushkey', {
1840 1839 'namespace': namespace,
1841 1840 'key': key,
1842 1841 'old': old,
1843 1842 'new': new,
1844 1843 }).result()
1845 1844
1846 1845 ui.status(pycompat.bytestr(r) + '\n')
1847 1846 return not r
1848 1847 else:
1849 1848 for k, v in sorted(target.listkeys(namespace).iteritems()):
1850 1849 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1851 1850 stringutil.escapestr(v)))
1852 1851
1853 1852 @command('debugpvec', [], _('A B'))
1854 1853 def debugpvec(ui, repo, a, b=None):
1855 1854 ca = scmutil.revsingle(repo, a)
1856 1855 cb = scmutil.revsingle(repo, b)
1857 1856 pa = pvec.ctxpvec(ca)
1858 1857 pb = pvec.ctxpvec(cb)
1859 1858 if pa == pb:
1860 1859 rel = "="
1861 1860 elif pa > pb:
1862 1861 rel = ">"
1863 1862 elif pa < pb:
1864 1863 rel = "<"
1865 1864 elif pa | pb:
1866 1865 rel = "|"
1867 1866 ui.write(_("a: %s\n") % pa)
1868 1867 ui.write(_("b: %s\n") % pb)
1869 1868 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1870 1869 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1871 1870 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1872 1871 pa.distance(pb), rel))
1873 1872
1874 1873 @command('debugrebuilddirstate|debugrebuildstate',
1875 1874 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1876 1875 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1877 1876 'the working copy parent')),
1878 1877 ],
1879 1878 _('[-r REV]'))
1880 1879 def debugrebuilddirstate(ui, repo, rev, **opts):
1881 1880 """rebuild the dirstate as it would look like for the given revision
1882 1881
1883 1882 If no revision is specified the first current parent will be used.
1884 1883
1885 1884 The dirstate will be set to the files of the given revision.
1886 1885 The actual working directory content or existing dirstate
1887 1886 information such as adds or removes is not considered.
1888 1887
1889 1888 ``minimal`` will only rebuild the dirstate status for files that claim to be
1890 1889 tracked but are not in the parent manifest, or that exist in the parent
1891 1890 manifest but are not in the dirstate. It will not change adds, removes, or
1892 1891 modified files that are in the working copy parent.
1893 1892
1894 1893 One use of this command is to make the next :hg:`status` invocation
1895 1894 check the actual file content.
1896 1895 """
1897 1896 ctx = scmutil.revsingle(repo, rev)
1898 1897 with repo.wlock():
1899 1898 dirstate = repo.dirstate
1900 1899 changedfiles = None
1901 1900 # See command doc for what minimal does.
1902 1901 if opts.get(r'minimal'):
1903 1902 manifestfiles = set(ctx.manifest().keys())
1904 1903 dirstatefiles = set(dirstate)
1905 1904 manifestonly = manifestfiles - dirstatefiles
1906 1905 dsonly = dirstatefiles - manifestfiles
1907 1906 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1908 1907 changedfiles = manifestonly | dsnotadded
1909 1908
1910 1909 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1911 1910
1912 1911 @command('debugrebuildfncache', [], '')
1913 1912 def debugrebuildfncache(ui, repo):
1914 1913 """rebuild the fncache file"""
1915 1914 repair.rebuildfncache(ui, repo)
1916 1915
1917 1916 @command('debugrename',
1918 1917 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1919 1918 _('[-r REV] FILE'))
1920 1919 def debugrename(ui, repo, file1, *pats, **opts):
1921 1920 """dump rename information"""
1922 1921
1923 1922 opts = pycompat.byteskwargs(opts)
1924 1923 ctx = scmutil.revsingle(repo, opts.get('rev'))
1925 1924 m = scmutil.match(ctx, (file1,) + pats, opts)
1926 1925 for abs in ctx.walk(m):
1927 1926 fctx = ctx[abs]
1928 1927 o = fctx.filelog().renamed(fctx.filenode())
1929 1928 rel = m.rel(abs)
1930 1929 if o:
1931 1930 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1932 1931 else:
1933 1932 ui.write(_("%s not renamed\n") % rel)
1934 1933
1935 1934 @command('debugrevlog', cmdutil.debugrevlogopts +
1936 1935 [('d', 'dump', False, _('dump index data'))],
1937 1936 _('-c|-m|FILE'),
1938 1937 optionalrepo=True)
1939 1938 def debugrevlog(ui, repo, file_=None, **opts):
1940 1939 """show data and statistics about a revlog"""
1941 1940 opts = pycompat.byteskwargs(opts)
1942 1941 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1943 1942
1944 1943 if opts.get("dump"):
1945 1944 numrevs = len(r)
1946 1945 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1947 1946 " rawsize totalsize compression heads chainlen\n"))
1948 1947 ts = 0
1949 1948 heads = set()
1950 1949
1951 1950 for rev in xrange(numrevs):
1952 1951 dbase = r.deltaparent(rev)
1953 1952 if dbase == -1:
1954 1953 dbase = rev
1955 1954 cbase = r.chainbase(rev)
1956 1955 clen = r.chainlen(rev)
1957 1956 p1, p2 = r.parentrevs(rev)
1958 1957 rs = r.rawsize(rev)
1959 1958 ts = ts + rs
1960 1959 heads -= set(r.parentrevs(rev))
1961 1960 heads.add(rev)
1962 1961 try:
1963 1962 compression = ts / r.end(rev)
1964 1963 except ZeroDivisionError:
1965 1964 compression = 0
1966 1965 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1967 1966 "%11d %5d %8d\n" %
1968 1967 (rev, p1, p2, r.start(rev), r.end(rev),
1969 1968 r.start(dbase), r.start(cbase),
1970 1969 r.start(p1), r.start(p2),
1971 1970 rs, ts, compression, len(heads), clen))
1972 1971 return 0
1973 1972
1974 1973 v = r.version
1975 1974 format = v & 0xFFFF
1976 1975 flags = []
1977 1976 gdelta = False
1978 1977 if v & revlog.FLAG_INLINE_DATA:
1979 1978 flags.append('inline')
1980 1979 if v & revlog.FLAG_GENERALDELTA:
1981 1980 gdelta = True
1982 1981 flags.append('generaldelta')
1983 1982 if not flags:
1984 1983 flags = ['(none)']
1985 1984
1986 1985 nummerges = 0
1987 1986 numfull = 0
1988 1987 numprev = 0
1989 1988 nump1 = 0
1990 1989 nump2 = 0
1991 1990 numother = 0
1992 1991 nump1prev = 0
1993 1992 nump2prev = 0
1994 1993 chainlengths = []
1995 1994 chainbases = []
1996 1995 chainspans = []
1997 1996
1998 1997 datasize = [None, 0, 0]
1999 1998 fullsize = [None, 0, 0]
2000 1999 deltasize = [None, 0, 0]
2001 2000 chunktypecounts = {}
2002 2001 chunktypesizes = {}
2003 2002
2004 2003 def addsize(size, l):
2005 2004 if l[0] is None or size < l[0]:
2006 2005 l[0] = size
2007 2006 if size > l[1]:
2008 2007 l[1] = size
2009 2008 l[2] += size
2010 2009
2011 2010 numrevs = len(r)
2012 2011 for rev in xrange(numrevs):
2013 2012 p1, p2 = r.parentrevs(rev)
2014 2013 delta = r.deltaparent(rev)
2015 2014 if format > 0:
2016 2015 addsize(r.rawsize(rev), datasize)
2017 2016 if p2 != nullrev:
2018 2017 nummerges += 1
2019 2018 size = r.length(rev)
2020 2019 if delta == nullrev:
2021 2020 chainlengths.append(0)
2022 2021 chainbases.append(r.start(rev))
2023 2022 chainspans.append(size)
2024 2023 numfull += 1
2025 2024 addsize(size, fullsize)
2026 2025 else:
2027 2026 chainlengths.append(chainlengths[delta] + 1)
2028 2027 baseaddr = chainbases[delta]
2029 2028 revaddr = r.start(rev)
2030 2029 chainbases.append(baseaddr)
2031 2030 chainspans.append((revaddr - baseaddr) + size)
2032 2031 addsize(size, deltasize)
2033 2032 if delta == rev - 1:
2034 2033 numprev += 1
2035 2034 if delta == p1:
2036 2035 nump1prev += 1
2037 2036 elif delta == p2:
2038 2037 nump2prev += 1
2039 2038 elif delta == p1:
2040 2039 nump1 += 1
2041 2040 elif delta == p2:
2042 2041 nump2 += 1
2043 2042 elif delta != nullrev:
2044 2043 numother += 1
2045 2044
2046 2045 # Obtain data on the raw chunks in the revlog.
2047 2046 segment = r._getsegmentforrevs(rev, rev)[1]
2048 2047 if segment:
2049 2048 chunktype = bytes(segment[0:1])
2050 2049 else:
2051 2050 chunktype = 'empty'
2052 2051
2053 2052 if chunktype not in chunktypecounts:
2054 2053 chunktypecounts[chunktype] = 0
2055 2054 chunktypesizes[chunktype] = 0
2056 2055
2057 2056 chunktypecounts[chunktype] += 1
2058 2057 chunktypesizes[chunktype] += size
2059 2058
2060 2059 # Adjust size min value for empty cases
2061 2060 for size in (datasize, fullsize, deltasize):
2062 2061 if size[0] is None:
2063 2062 size[0] = 0
2064 2063
2065 2064 numdeltas = numrevs - numfull
2066 2065 numoprev = numprev - nump1prev - nump2prev
2067 2066 totalrawsize = datasize[2]
2068 2067 datasize[2] /= numrevs
2069 2068 fulltotal = fullsize[2]
2070 2069 fullsize[2] /= numfull
2071 2070 deltatotal = deltasize[2]
2072 2071 if numrevs - numfull > 0:
2073 2072 deltasize[2] /= numrevs - numfull
2074 2073 totalsize = fulltotal + deltatotal
2075 2074 avgchainlen = sum(chainlengths) / numrevs
2076 2075 maxchainlen = max(chainlengths)
2077 2076 maxchainspan = max(chainspans)
2078 2077 compratio = 1
2079 2078 if totalsize:
2080 2079 compratio = totalrawsize / totalsize
2081 2080
2082 2081 basedfmtstr = '%%%dd\n'
2083 2082 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2084 2083
2085 2084 def dfmtstr(max):
2086 2085 return basedfmtstr % len(str(max))
2087 2086 def pcfmtstr(max, padding=0):
2088 2087 return basepcfmtstr % (len(str(max)), ' ' * padding)
2089 2088
2090 2089 def pcfmt(value, total):
2091 2090 if total:
2092 2091 return (value, 100 * float(value) / total)
2093 2092 else:
2094 2093 return value, 100.0
2095 2094
2096 2095 ui.write(('format : %d\n') % format)
2097 2096 ui.write(('flags : %s\n') % ', '.join(flags))
2098 2097
2099 2098 ui.write('\n')
2100 2099 fmt = pcfmtstr(totalsize)
2101 2100 fmt2 = dfmtstr(totalsize)
2102 2101 ui.write(('revisions : ') + fmt2 % numrevs)
2103 2102 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2104 2103 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2105 2104 ui.write(('revisions : ') + fmt2 % numrevs)
2106 2105 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2107 2106 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2108 2107 ui.write(('revision size : ') + fmt2 % totalsize)
2109 2108 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2110 2109 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2111 2110
2112 2111 def fmtchunktype(chunktype):
2113 2112 if chunktype == 'empty':
2114 2113 return ' %s : ' % chunktype
2115 2114 elif chunktype in pycompat.bytestr(string.ascii_letters):
2116 2115 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2117 2116 else:
2118 2117 return ' 0x%s : ' % hex(chunktype)
2119 2118
2120 2119 ui.write('\n')
2121 2120 ui.write(('chunks : ') + fmt2 % numrevs)
2122 2121 for chunktype in sorted(chunktypecounts):
2123 2122 ui.write(fmtchunktype(chunktype))
2124 2123 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2125 2124 ui.write(('chunks size : ') + fmt2 % totalsize)
2126 2125 for chunktype in sorted(chunktypecounts):
2127 2126 ui.write(fmtchunktype(chunktype))
2128 2127 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2129 2128
2130 2129 ui.write('\n')
2131 2130 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2132 2131 ui.write(('avg chain length : ') + fmt % avgchainlen)
2133 2132 ui.write(('max chain length : ') + fmt % maxchainlen)
2134 2133 ui.write(('max chain reach : ') + fmt % maxchainspan)
2135 2134 ui.write(('compression ratio : ') + fmt % compratio)
2136 2135
2137 2136 if format > 0:
2138 2137 ui.write('\n')
2139 2138 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2140 2139 % tuple(datasize))
2141 2140 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2142 2141 % tuple(fullsize))
2143 2142 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2144 2143 % tuple(deltasize))
2145 2144
2146 2145 if numdeltas > 0:
2147 2146 ui.write('\n')
2148 2147 fmt = pcfmtstr(numdeltas)
2149 2148 fmt2 = pcfmtstr(numdeltas, 4)
2150 2149 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2151 2150 if numprev > 0:
2152 2151 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2153 2152 numprev))
2154 2153 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2155 2154 numprev))
2156 2155 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2157 2156 numprev))
2158 2157 if gdelta:
2159 2158 ui.write(('deltas against p1 : ')
2160 2159 + fmt % pcfmt(nump1, numdeltas))
2161 2160 ui.write(('deltas against p2 : ')
2162 2161 + fmt % pcfmt(nump2, numdeltas))
2163 2162 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2164 2163 numdeltas))
2165 2164
2166 2165 @command('debugrevspec',
2167 2166 [('', 'optimize', None,
2168 2167 _('print parsed tree after optimizing (DEPRECATED)')),
2169 2168 ('', 'show-revs', True, _('print list of result revisions (default)')),
2170 2169 ('s', 'show-set', None, _('print internal representation of result set')),
2171 2170 ('p', 'show-stage', [],
2172 2171 _('print parsed tree at the given stage'), _('NAME')),
2173 2172 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2174 2173 ('', 'verify-optimized', False, _('verify optimized result')),
2175 2174 ],
2176 2175 ('REVSPEC'))
2177 2176 def debugrevspec(ui, repo, expr, **opts):
2178 2177 """parse and apply a revision specification
2179 2178
2180 2179 Use -p/--show-stage option to print the parsed tree at the given stages.
2181 2180 Use -p all to print tree at every stage.
2182 2181
2183 2182 Use --no-show-revs option with -s or -p to print only the set
2184 2183 representation or the parsed tree respectively.
2185 2184
2186 2185 Use --verify-optimized to compare the optimized result with the unoptimized
2187 2186 one. Returns 1 if the optimized result differs.
2188 2187 """
2189 2188 opts = pycompat.byteskwargs(opts)
2190 2189 aliases = ui.configitems('revsetalias')
2191 2190 stages = [
2192 2191 ('parsed', lambda tree: tree),
2193 2192 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2194 2193 ui.warn)),
2195 2194 ('concatenated', revsetlang.foldconcat),
2196 2195 ('analyzed', revsetlang.analyze),
2197 2196 ('optimized', revsetlang.optimize),
2198 2197 ]
2199 2198 if opts['no_optimized']:
2200 2199 stages = stages[:-1]
2201 2200 if opts['verify_optimized'] and opts['no_optimized']:
2202 2201 raise error.Abort(_('cannot use --verify-optimized with '
2203 2202 '--no-optimized'))
2204 2203 stagenames = set(n for n, f in stages)
2205 2204
2206 2205 showalways = set()
2207 2206 showchanged = set()
2208 2207 if ui.verbose and not opts['show_stage']:
2209 2208 # show parsed tree by --verbose (deprecated)
2210 2209 showalways.add('parsed')
2211 2210 showchanged.update(['expanded', 'concatenated'])
2212 2211 if opts['optimize']:
2213 2212 showalways.add('optimized')
2214 2213 if opts['show_stage'] and opts['optimize']:
2215 2214 raise error.Abort(_('cannot use --optimize with --show-stage'))
2216 2215 if opts['show_stage'] == ['all']:
2217 2216 showalways.update(stagenames)
2218 2217 else:
2219 2218 for n in opts['show_stage']:
2220 2219 if n not in stagenames:
2221 2220 raise error.Abort(_('invalid stage name: %s') % n)
2222 2221 showalways.update(opts['show_stage'])
2223 2222
2224 2223 treebystage = {}
2225 2224 printedtree = None
2226 2225 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2227 2226 for n, f in stages:
2228 2227 treebystage[n] = tree = f(tree)
2229 2228 if n in showalways or (n in showchanged and tree != printedtree):
2230 2229 if opts['show_stage'] or n != 'parsed':
2231 2230 ui.write(("* %s:\n") % n)
2232 2231 ui.write(revsetlang.prettyformat(tree), "\n")
2233 2232 printedtree = tree
2234 2233
2235 2234 if opts['verify_optimized']:
2236 2235 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2237 2236 brevs = revset.makematcher(treebystage['optimized'])(repo)
2238 2237 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2239 2238 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2240 2239 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2241 2240 arevs = list(arevs)
2242 2241 brevs = list(brevs)
2243 2242 if arevs == brevs:
2244 2243 return 0
2245 2244 ui.write(('--- analyzed\n'), label='diff.file_a')
2246 2245 ui.write(('+++ optimized\n'), label='diff.file_b')
2247 2246 sm = difflib.SequenceMatcher(None, arevs, brevs)
2248 2247 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2249 2248 if tag in ('delete', 'replace'):
2250 2249 for c in arevs[alo:ahi]:
2251 2250 ui.write('-%s\n' % c, label='diff.deleted')
2252 2251 if tag in ('insert', 'replace'):
2253 2252 for c in brevs[blo:bhi]:
2254 2253 ui.write('+%s\n' % c, label='diff.inserted')
2255 2254 if tag == 'equal':
2256 2255 for c in arevs[alo:ahi]:
2257 2256 ui.write(' %s\n' % c)
2258 2257 return 1
2259 2258
2260 2259 func = revset.makematcher(tree)
2261 2260 revs = func(repo)
2262 2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2263 2262 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2264 2263 if not opts['show_revs']:
2265 2264 return
2266 2265 for c in revs:
2267 2266 ui.write("%d\n" % c)
2268 2267
2269 2268 @command('debugserve', [
2270 2269 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2271 2270 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2272 2271 ('', 'logiofile', '', _('file to log server I/O to')),
2273 2272 ], '')
2274 2273 def debugserve(ui, repo, **opts):
2275 2274 """run a server with advanced settings
2276 2275
2277 2276 This command is similar to :hg:`serve`. It exists partially as a
2278 2277 workaround to the fact that ``hg serve --stdio`` must have specific
2279 2278 arguments for security reasons.
2280 2279 """
2281 2280 opts = pycompat.byteskwargs(opts)
2282 2281
2283 2282 if not opts['sshstdio']:
2284 2283 raise error.Abort(_('only --sshstdio is currently supported'))
2285 2284
2286 2285 logfh = None
2287 2286
2288 2287 if opts['logiofd'] and opts['logiofile']:
2289 2288 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2290 2289
2291 2290 if opts['logiofd']:
2292 2291 # Line buffered because output is line based.
2293 2292 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2294 2293 elif opts['logiofile']:
2295 2294 logfh = open(opts['logiofile'], 'ab', 1)
2296 2295
2297 2296 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2298 2297 s.serve_forever()
2299 2298
2300 2299 @command('debugsetparents', [], _('REV1 [REV2]'))
2301 2300 def debugsetparents(ui, repo, rev1, rev2=None):
2302 2301 """manually set the parents of the current working directory
2303 2302
2304 2303 This is useful for writing repository conversion tools, but should
2305 2304 be used with care. For example, neither the working directory nor the
2306 2305 dirstate is updated, so file status may be incorrect after running this
2307 2306 command.
2308 2307
2309 2308 Returns 0 on success.
2310 2309 """
2311 2310
2312 2311 node1 = scmutil.revsingle(repo, rev1).node()
2313 2312 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2314 2313
2315 2314 with repo.wlock():
2316 2315 repo.setparents(node1, node2)
2317 2316
2318 2317 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2319 2318 def debugssl(ui, repo, source=None, **opts):
2320 2319 '''test a secure connection to a server
2321 2320
2322 2321 This builds the certificate chain for the server on Windows, installing the
2323 2322 missing intermediates and trusted root via Windows Update if necessary. It
2324 2323 does nothing on other platforms.
2325 2324
2326 2325 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2327 2326 that server is used. See :hg:`help urls` for more information.
2328 2327
2329 2328 If the update succeeds, retry the original operation. Otherwise, the cause
2330 2329 of the SSL error is likely another issue.
2331 2330 '''
2332 2331 if not pycompat.iswindows:
2333 2332 raise error.Abort(_('certificate chain building is only possible on '
2334 2333 'Windows'))
2335 2334
2336 2335 if not source:
2337 2336 if not repo:
2338 2337 raise error.Abort(_("there is no Mercurial repository here, and no "
2339 2338 "server specified"))
2340 2339 source = "default"
2341 2340
2342 2341 source, branches = hg.parseurl(ui.expandpath(source))
2343 2342 url = util.url(source)
2344 2343 addr = None
2345 2344
2346 2345 defaultport = {'https': 443, 'ssh': 22}
2347 2346 if url.scheme in defaultport:
2348 2347 try:
2349 2348 addr = (url.host, int(url.port or defaultport[url.scheme]))
2350 2349 except ValueError:
2351 2350 raise error.Abort(_("malformed port number in URL"))
2352 2351 else:
2353 2352 raise error.Abort(_("only https and ssh connections are supported"))
2354 2353
2355 2354 from . import win32
2356 2355
2357 2356 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2358 2357 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2359 2358
2360 2359 try:
2361 2360 s.connect(addr)
2362 2361 cert = s.getpeercert(True)
2363 2362
2364 2363 ui.status(_('checking the certificate chain for %s\n') % url.host)
2365 2364
2366 2365 complete = win32.checkcertificatechain(cert, build=False)
2367 2366
2368 2367 if not complete:
2369 2368 ui.status(_('certificate chain is incomplete, updating... '))
2370 2369
2371 2370 if not win32.checkcertificatechain(cert):
2372 2371 ui.status(_('failed.\n'))
2373 2372 else:
2374 2373 ui.status(_('done.\n'))
2375 2374 else:
2376 2375 ui.status(_('full certificate chain is available\n'))
2377 2376 finally:
2378 2377 s.close()
2379 2378
2380 2379 @command('debugsub',
2381 2380 [('r', 'rev', '',
2382 2381 _('revision to check'), _('REV'))],
2383 2382 _('[-r REV] [REV]'))
2384 2383 def debugsub(ui, repo, rev=None):
2385 2384 ctx = scmutil.revsingle(repo, rev, None)
2386 2385 for k, v in sorted(ctx.substate.items()):
2387 2386 ui.write(('path %s\n') % k)
2388 2387 ui.write((' source %s\n') % v[0])
2389 2388 ui.write((' revision %s\n') % v[1])
2390 2389
2391 2390 @command('debugsuccessorssets',
2392 2391 [('', 'closest', False, _('return closest successors sets only'))],
2393 2392 _('[REV]'))
2394 2393 def debugsuccessorssets(ui, repo, *revs, **opts):
2395 2394 """show set of successors for revision
2396 2395
2397 2396 A successors set of changeset A is a consistent group of revisions that
2398 2397 succeed A. It contains non-obsolete changesets only unless closests
2399 2398 successors set is set.
2400 2399
2401 2400 In most cases a changeset A has a single successors set containing a single
2402 2401 successor (changeset A replaced by A').
2403 2402
2404 2403 A changeset that is made obsolete with no successors are called "pruned".
2405 2404 Such changesets have no successors sets at all.
2406 2405
2407 2406 A changeset that has been "split" will have a successors set containing
2408 2407 more than one successor.
2409 2408
2410 2409 A changeset that has been rewritten in multiple different ways is called
2411 2410 "divergent". Such changesets have multiple successor sets (each of which
2412 2411 may also be split, i.e. have multiple successors).
2413 2412
2414 2413 Results are displayed as follows::
2415 2414
2416 2415 <rev1>
2417 2416 <successors-1A>
2418 2417 <rev2>
2419 2418 <successors-2A>
2420 2419 <successors-2B1> <successors-2B2> <successors-2B3>
2421 2420
2422 2421 Here rev2 has two possible (i.e. divergent) successors sets. The first
2423 2422 holds one element, whereas the second holds three (i.e. the changeset has
2424 2423 been split).
2425 2424 """
2426 2425 # passed to successorssets caching computation from one call to another
2427 2426 cache = {}
2428 2427 ctx2str = bytes
2429 2428 node2str = short
2430 2429 for rev in scmutil.revrange(repo, revs):
2431 2430 ctx = repo[rev]
2432 2431 ui.write('%s\n'% ctx2str(ctx))
2433 2432 for succsset in obsutil.successorssets(repo, ctx.node(),
2434 2433 closest=opts[r'closest'],
2435 2434 cache=cache):
2436 2435 if succsset:
2437 2436 ui.write(' ')
2438 2437 ui.write(node2str(succsset[0]))
2439 2438 for node in succsset[1:]:
2440 2439 ui.write(' ')
2441 2440 ui.write(node2str(node))
2442 2441 ui.write('\n')
2443 2442
2444 2443 @command('debugtemplate',
2445 2444 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2446 2445 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2447 2446 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2448 2447 optionalrepo=True)
2449 2448 def debugtemplate(ui, repo, tmpl, **opts):
2450 2449 """parse and apply a template
2451 2450
2452 2451 If -r/--rev is given, the template is processed as a log template and
2453 2452 applied to the given changesets. Otherwise, it is processed as a generic
2454 2453 template.
2455 2454
2456 2455 Use --verbose to print the parsed tree.
2457 2456 """
2458 2457 revs = None
2459 2458 if opts[r'rev']:
2460 2459 if repo is None:
2461 2460 raise error.RepoError(_('there is no Mercurial repository here '
2462 2461 '(.hg not found)'))
2463 2462 revs = scmutil.revrange(repo, opts[r'rev'])
2464 2463
2465 2464 props = {}
2466 2465 for d in opts[r'define']:
2467 2466 try:
2468 2467 k, v = (e.strip() for e in d.split('=', 1))
2469 2468 if not k or k == 'ui':
2470 2469 raise ValueError
2471 2470 props[k] = v
2472 2471 except ValueError:
2473 2472 raise error.Abort(_('malformed keyword definition: %s') % d)
2474 2473
2475 2474 if ui.verbose:
2476 2475 aliases = ui.configitems('templatealias')
2477 2476 tree = templater.parse(tmpl)
2478 2477 ui.note(templater.prettyformat(tree), '\n')
2479 2478 newtree = templater.expandaliases(tree, aliases)
2480 2479 if newtree != tree:
2481 2480 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2482 2481
2483 2482 if revs is None:
2484 2483 tres = formatter.templateresources(ui, repo)
2485 2484 t = formatter.maketemplater(ui, tmpl, resources=tres)
2486 2485 ui.write(t.renderdefault(props))
2487 2486 else:
2488 2487 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2489 2488 for r in revs:
2490 2489 displayer.show(repo[r], **pycompat.strkwargs(props))
2491 2490 displayer.close()
2492 2491
2493 2492 @command('debuguigetpass', [
2494 2493 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2495 2494 ], _('[-p TEXT]'), norepo=True)
2496 2495 def debuguigetpass(ui, prompt=''):
2497 2496 """show prompt to type password"""
2498 2497 r = ui.getpass(prompt)
2499 2498 ui.write(('respose: %s\n') % r)
2500 2499
2501 2500 @command('debuguiprompt', [
2502 2501 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2503 2502 ], _('[-p TEXT]'), norepo=True)
2504 2503 def debuguiprompt(ui, prompt=''):
2505 2504 """show plain prompt"""
2506 2505 r = ui.prompt(prompt)
2507 2506 ui.write(('response: %s\n') % r)
2508 2507
2509 2508 @command('debugupdatecaches', [])
2510 2509 def debugupdatecaches(ui, repo, *pats, **opts):
2511 2510 """warm all known caches in the repository"""
2512 2511 with repo.wlock(), repo.lock():
2513 2512 repo.updatecaches(full=True)
2514 2513
2515 2514 @command('debugupgraderepo', [
2516 2515 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2517 2516 ('', 'run', False, _('performs an upgrade')),
2518 2517 ])
2519 2518 def debugupgraderepo(ui, repo, run=False, optimize=None):
2520 2519 """upgrade a repository to use different features
2521 2520
2522 2521 If no arguments are specified, the repository is evaluated for upgrade
2523 2522 and a list of problems and potential optimizations is printed.
2524 2523
2525 2524 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2526 2525 can be influenced via additional arguments. More details will be provided
2527 2526 by the command output when run without ``--run``.
2528 2527
2529 2528 During the upgrade, the repository will be locked and no writes will be
2530 2529 allowed.
2531 2530
2532 2531 At the end of the upgrade, the repository may not be readable while new
2533 2532 repository data is swapped in. This window will be as long as it takes to
2534 2533 rename some directories inside the ``.hg`` directory. On most machines, this
2535 2534 should complete almost instantaneously and the chances of a consumer being
2536 2535 unable to access the repository should be low.
2537 2536 """
2538 2537 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2539 2538
2540 2539 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2541 2540 inferrepo=True)
2542 2541 def debugwalk(ui, repo, *pats, **opts):
2543 2542 """show how files match on given patterns"""
2544 2543 opts = pycompat.byteskwargs(opts)
2545 2544 m = scmutil.match(repo[None], pats, opts)
2546 2545 ui.write(('matcher: %r\n' % m))
2547 2546 items = list(repo[None].walk(m))
2548 2547 if not items:
2549 2548 return
2550 2549 f = lambda fn: fn
2551 2550 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2552 2551 f = lambda fn: util.normpath(fn)
2553 2552 fmt = 'f %%-%ds %%-%ds %%s' % (
2554 2553 max([len(abs) for abs in items]),
2555 2554 max([len(m.rel(abs)) for abs in items]))
2556 2555 for abs in items:
2557 2556 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2558 2557 ui.write("%s\n" % line.rstrip())
2559 2558
2560 2559 @command('debugwhyunstable', [], _('REV'))
2561 2560 def debugwhyunstable(ui, repo, rev):
2562 2561 """explain instabilities of a changeset"""
2563 2562 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2564 2563 dnodes = ''
2565 2564 if entry.get('divergentnodes'):
2566 2565 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2567 2566 for ctx in entry['divergentnodes']) + ' '
2568 2567 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2569 2568 entry['reason'], entry['node']))
2570 2569
2571 2570 @command('debugwireargs',
2572 2571 [('', 'three', '', 'three'),
2573 2572 ('', 'four', '', 'four'),
2574 2573 ('', 'five', '', 'five'),
2575 2574 ] + cmdutil.remoteopts,
2576 2575 _('REPO [OPTIONS]... [ONE [TWO]]'),
2577 2576 norepo=True)
2578 2577 def debugwireargs(ui, repopath, *vals, **opts):
2579 2578 opts = pycompat.byteskwargs(opts)
2580 2579 repo = hg.peer(ui, opts, repopath)
2581 2580 for opt in cmdutil.remoteopts:
2582 2581 del opts[opt[1]]
2583 2582 args = {}
2584 2583 for k, v in opts.iteritems():
2585 2584 if v:
2586 2585 args[k] = v
2587 2586 args = pycompat.strkwargs(args)
2588 2587 # run twice to check that we don't mess up the stream for the next command
2589 2588 res1 = repo.debugwireargs(*vals, **args)
2590 2589 res2 = repo.debugwireargs(*vals, **args)
2591 2590 ui.write("%s\n" % res1)
2592 2591 if res1 != res2:
2593 2592 ui.warn("%s\n" % res2)
2594 2593
2595 2594 def _parsewirelangblocks(fh):
2596 2595 activeaction = None
2597 2596 blocklines = []
2598 2597
2599 2598 for line in fh:
2600 2599 line = line.rstrip()
2601 2600 if not line:
2602 2601 continue
2603 2602
2604 2603 if line.startswith(b'#'):
2605 2604 continue
2606 2605
2607 2606 if not line.startswith(' '):
2608 2607 # New block. Flush previous one.
2609 2608 if activeaction:
2610 2609 yield activeaction, blocklines
2611 2610
2612 2611 activeaction = line
2613 2612 blocklines = []
2614 2613 continue
2615 2614
2616 2615 # Else we start with an indent.
2617 2616
2618 2617 if not activeaction:
2619 2618 raise error.Abort(_('indented line outside of block'))
2620 2619
2621 2620 blocklines.append(line)
2622 2621
2623 2622 # Flush last block.
2624 2623 if activeaction:
2625 2624 yield activeaction, blocklines
2626 2625
2627 2626 @command('debugwireproto',
2628 2627 [
2629 2628 ('', 'localssh', False, _('start an SSH server for this repo')),
2630 2629 ('', 'peer', '', _('construct a specific version of the peer')),
2631 2630 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2632 2631 ('', 'nologhandshake', False,
2633 2632 _('do not log I/O related to the peer handshake')),
2634 2633 ] + cmdutil.remoteopts,
2635 2634 _('[PATH]'),
2636 2635 optionalrepo=True)
2637 2636 def debugwireproto(ui, repo, path=None, **opts):
2638 2637 """send wire protocol commands to a server
2639 2638
2640 2639 This command can be used to issue wire protocol commands to remote
2641 2640 peers and to debug the raw data being exchanged.
2642 2641
2643 2642 ``--localssh`` will start an SSH server against the current repository
2644 2643 and connect to that. By default, the connection will perform a handshake
2645 2644 and establish an appropriate peer instance.
2646 2645
2647 2646 ``--peer`` can be used to bypass the handshake protocol and construct a
2648 2647 peer instance using the specified class type. Valid values are ``raw``,
2649 2648 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2650 2649 raw data payloads and don't support higher-level command actions.
2651 2650
2652 2651 ``--noreadstderr`` can be used to disable automatic reading from stderr
2653 2652 of the peer (for SSH connections only). Disabling automatic reading of
2654 2653 stderr is useful for making output more deterministic.
2655 2654
2656 2655 Commands are issued via a mini language which is specified via stdin.
2657 2656 The language consists of individual actions to perform. An action is
2658 2657 defined by a block. A block is defined as a line with no leading
2659 2658 space followed by 0 or more lines with leading space. Blocks are
2660 2659 effectively a high-level command with additional metadata.
2661 2660
2662 2661 Lines beginning with ``#`` are ignored.
2663 2662
2664 2663 The following sections denote available actions.
2665 2664
2666 2665 raw
2667 2666 ---
2668 2667
2669 2668 Send raw data to the server.
2670 2669
2671 2670 The block payload contains the raw data to send as one atomic send
2672 2671 operation. The data may not actually be delivered in a single system
2673 2672 call: it depends on the abilities of the transport being used.
2674 2673
2675 2674 Each line in the block is de-indented and concatenated. Then, that
2676 2675 value is evaluated as a Python b'' literal. This allows the use of
2677 2676 backslash escaping, etc.
2678 2677
2679 2678 raw+
2680 2679 ----
2681 2680
2682 2681 Behaves like ``raw`` except flushes output afterwards.
2683 2682
2684 2683 command <X>
2685 2684 -----------
2686 2685
2687 2686 Send a request to run a named command, whose name follows the ``command``
2688 2687 string.
2689 2688
2690 2689 Arguments to the command are defined as lines in this block. The format of
2691 2690 each line is ``<key> <value>``. e.g.::
2692 2691
2693 2692 command listkeys
2694 2693 namespace bookmarks
2695 2694
2696 2695 If the value begins with ``eval:``, it will be interpreted as a Python
2697 2696 literal expression. Otherwise values are interpreted as Python b'' literals.
2698 2697 This allows sending complex types and encoding special byte sequences via
2699 2698 backslash escaping.
2700 2699
2701 2700 The following arguments have special meaning:
2702 2701
2703 2702 ``PUSHFILE``
2704 2703 When defined, the *push* mechanism of the peer will be used instead
2705 2704 of the static request-response mechanism and the content of the
2706 2705 file specified in the value of this argument will be sent as the
2707 2706 command payload.
2708 2707
2709 2708 This can be used to submit a local bundle file to the remote.
2710 2709
2711 2710 batchbegin
2712 2711 ----------
2713 2712
2714 2713 Instruct the peer to begin a batched send.
2715 2714
2716 2715 All ``command`` blocks are queued for execution until the next
2717 2716 ``batchsubmit`` block.
2718 2717
2719 2718 batchsubmit
2720 2719 -----------
2721 2720
2722 2721 Submit previously queued ``command`` blocks as a batch request.
2723 2722
2724 2723 This action MUST be paired with a ``batchbegin`` action.
2725 2724
2726 2725 httprequest <method> <path>
2727 2726 ---------------------------
2728 2727
2729 2728 (HTTP peer only)
2730 2729
2731 2730 Send an HTTP request to the peer.
2732 2731
2733 2732 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2734 2733
2735 2734 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2736 2735 headers to add to the request. e.g. ``Accept: foo``.
2737 2736
2738 2737 The following arguments are special:
2739 2738
2740 2739 ``BODYFILE``
2741 2740 The content of the file defined as the value to this argument will be
2742 2741 transferred verbatim as the HTTP request body.
2743 2742
2744 2743 ``frame <type> <flags> <payload>``
2745 2744 Send a unified protocol frame as part of the request body.
2746 2745
2747 2746 All frames will be collected and sent as the body to the HTTP
2748 2747 request.
2749 2748
2750 2749 close
2751 2750 -----
2752 2751
2753 2752 Close the connection to the server.
2754 2753
2755 2754 flush
2756 2755 -----
2757 2756
2758 2757 Flush data written to the server.
2759 2758
2760 2759 readavailable
2761 2760 -------------
2762 2761
2763 2762 Close the write end of the connection and read all available data from
2764 2763 the server.
2765 2764
2766 2765 If the connection to the server encompasses multiple pipes, we poll both
2767 2766 pipes and read available data.
2768 2767
2769 2768 readline
2770 2769 --------
2771 2770
2772 2771 Read a line of output from the server. If there are multiple output
2773 2772 pipes, reads only the main pipe.
2774 2773
2775 2774 ereadline
2776 2775 ---------
2777 2776
2778 2777 Like ``readline``, but read from the stderr pipe, if available.
2779 2778
2780 2779 read <X>
2781 2780 --------
2782 2781
2783 2782 ``read()`` N bytes from the server's main output pipe.
2784 2783
2785 2784 eread <X>
2786 2785 ---------
2787 2786
2788 2787 ``read()`` N bytes from the server's stderr pipe, if available.
2789 2788
2790 2789 Specifying Unified Frame-Based Protocol Frames
2791 2790 ----------------------------------------------
2792 2791
2793 2792 It is possible to emit a *Unified Frame-Based Protocol* by using special
2794 2793 syntax.
2795 2794
2796 2795 A frame is composed as a type, flags, and payload. These can be parsed
2797 2796 from a string of the form:
2798 2797
2799 2798 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2800 2799
2801 2800 ``request-id`` and ``stream-id`` are integers defining the request and
2802 2801 stream identifiers.
2803 2802
2804 2803 ``type`` can be an integer value for the frame type or the string name
2805 2804 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2806 2805 ``command-name``.
2807 2806
2808 2807 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2809 2808 components. Each component (and there can be just one) can be an integer
2810 2809 or a flag name for stream flags or frame flags, respectively. Values are
2811 2810 resolved to integers and then bitwise OR'd together.
2812 2811
2813 2812 ``payload`` represents the raw frame payload. If it begins with
2814 2813 ``cbor:``, the following string is evaluated as Python code and the
2815 2814 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2816 2815 as a Python byte string literal.
2817 2816 """
2818 2817 opts = pycompat.byteskwargs(opts)
2819 2818
2820 2819 if opts['localssh'] and not repo:
2821 2820 raise error.Abort(_('--localssh requires a repository'))
2822 2821
2823 2822 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2824 2823 raise error.Abort(_('invalid value for --peer'),
2825 2824 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2826 2825
2827 2826 if path and opts['localssh']:
2828 2827 raise error.Abort(_('cannot specify --localssh with an explicit '
2829 2828 'path'))
2830 2829
2831 2830 if ui.interactive():
2832 2831 ui.write(_('(waiting for commands on stdin)\n'))
2833 2832
2834 2833 blocks = list(_parsewirelangblocks(ui.fin))
2835 2834
2836 2835 proc = None
2837 2836 stdin = None
2838 2837 stdout = None
2839 2838 stderr = None
2840 2839 opener = None
2841 2840
2842 2841 if opts['localssh']:
2843 2842 # We start the SSH server in its own process so there is process
2844 2843 # separation. This prevents a whole class of potential bugs around
2845 2844 # shared state from interfering with server operation.
2846 2845 args = procutil.hgcmd() + [
2847 2846 '-R', repo.root,
2848 2847 'debugserve', '--sshstdio',
2849 2848 ]
2850 2849 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2851 2850 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2852 2851 bufsize=0)
2853 2852
2854 2853 stdin = proc.stdin
2855 2854 stdout = proc.stdout
2856 2855 stderr = proc.stderr
2857 2856
2858 2857 # We turn the pipes into observers so we can log I/O.
2859 2858 if ui.verbose or opts['peer'] == 'raw':
2860 2859 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2861 2860 logdata=True)
2862 2861 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2863 2862 logdata=True)
2864 2863 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2865 2864 logdata=True)
2866 2865
2867 2866 # --localssh also implies the peer connection settings.
2868 2867
2869 2868 url = 'ssh://localserver'
2870 2869 autoreadstderr = not opts['noreadstderr']
2871 2870
2872 2871 if opts['peer'] == 'ssh1':
2873 2872 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2874 2873 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2875 2874 None, autoreadstderr=autoreadstderr)
2876 2875 elif opts['peer'] == 'ssh2':
2877 2876 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2878 2877 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2879 2878 None, autoreadstderr=autoreadstderr)
2880 2879 elif opts['peer'] == 'raw':
2881 2880 ui.write(_('using raw connection to peer\n'))
2882 2881 peer = None
2883 2882 else:
2884 2883 ui.write(_('creating ssh peer from handshake results\n'))
2885 2884 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2886 2885 autoreadstderr=autoreadstderr)
2887 2886
2888 2887 elif path:
2889 2888 # We bypass hg.peer() so we can proxy the sockets.
2890 2889 # TODO consider not doing this because we skip
2891 2890 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2892 2891 u = util.url(path)
2893 2892 if u.scheme != 'http':
2894 2893 raise error.Abort(_('only http:// paths are currently supported'))
2895 2894
2896 2895 url, authinfo = u.authinfo()
2897 2896 openerargs = {
2898 2897 r'useragent': b'Mercurial debugwireproto',
2899 2898 }
2900 2899
2901 2900 # Turn pipes/sockets into observers so we can log I/O.
2902 2901 if ui.verbose:
2903 2902 openerargs.update({
2904 2903 r'loggingfh': ui,
2905 2904 r'loggingname': b's',
2906 2905 r'loggingopts': {
2907 2906 r'logdata': True,
2908 2907 r'logdataapis': False,
2909 2908 },
2910 2909 })
2911 2910
2912 2911 if ui.debugflag:
2913 2912 openerargs[r'loggingopts'][r'logdataapis'] = True
2914 2913
2915 2914 # Don't send default headers when in raw mode. This allows us to
2916 2915 # bypass most of the behavior of our URL handling code so we can
2917 2916 # have near complete control over what's sent on the wire.
2918 2917 if opts['peer'] == 'raw':
2919 2918 openerargs[r'sendaccept'] = False
2920 2919
2921 2920 opener = urlmod.opener(ui, authinfo, **openerargs)
2922 2921
2923 2922 if opts['peer'] == 'http2':
2924 2923 ui.write(_('creating http peer for wire protocol version 2\n'))
2925 2924 # We go through makepeer() because we need an API descriptor for
2926 2925 # the peer instance to be useful.
2927 2926 with ui.configoverride({
2928 2927 ('experimental', 'httppeer.advertise-v2'): True}):
2929 2928 if opts['nologhandshake']:
2930 2929 ui.pushbuffer()
2931 2930
2932 2931 peer = httppeer.makepeer(ui, path, opener=opener)
2933 2932
2934 2933 if opts['nologhandshake']:
2935 2934 ui.popbuffer()
2936 2935
2937 2936 if not isinstance(peer, httppeer.httpv2peer):
2938 2937 raise error.Abort(_('could not instantiate HTTP peer for '
2939 2938 'wire protocol version 2'),
2940 2939 hint=_('the server may not have the feature '
2941 2940 'enabled or is not allowing this '
2942 2941 'client version'))
2943 2942
2944 2943 elif opts['peer'] == 'raw':
2945 2944 ui.write(_('using raw connection to peer\n'))
2946 2945 peer = None
2947 2946 elif opts['peer']:
2948 2947 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2949 2948 opts['peer'])
2950 2949 else:
2951 2950 peer = httppeer.makepeer(ui, path, opener=opener)
2952 2951
2953 2952 # We /could/ populate stdin/stdout with sock.makefile()...
2954 2953 else:
2955 2954 raise error.Abort(_('unsupported connection configuration'))
2956 2955
2957 2956 batchedcommands = None
2958 2957
2959 2958 # Now perform actions based on the parsed wire language instructions.
2960 2959 for action, lines in blocks:
2961 2960 if action in ('raw', 'raw+'):
2962 2961 if not stdin:
2963 2962 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2964 2963
2965 2964 # Concatenate the data together.
2966 2965 data = ''.join(l.lstrip() for l in lines)
2967 2966 data = stringutil.unescapestr(data)
2968 2967 stdin.write(data)
2969 2968
2970 2969 if action == 'raw+':
2971 2970 stdin.flush()
2972 2971 elif action == 'flush':
2973 2972 if not stdin:
2974 2973 raise error.Abort(_('cannot call flush on this peer'))
2975 2974 stdin.flush()
2976 2975 elif action.startswith('command'):
2977 2976 if not peer:
2978 2977 raise error.Abort(_('cannot send commands unless peer instance '
2979 2978 'is available'))
2980 2979
2981 2980 command = action.split(' ', 1)[1]
2982 2981
2983 2982 args = {}
2984 2983 for line in lines:
2985 2984 # We need to allow empty values.
2986 2985 fields = line.lstrip().split(' ', 1)
2987 2986 if len(fields) == 1:
2988 2987 key = fields[0]
2989 2988 value = ''
2990 2989 else:
2991 2990 key, value = fields
2992 2991
2993 2992 if value.startswith('eval:'):
2994 2993 value = stringutil.evalpythonliteral(value[5:])
2995 2994 else:
2996 2995 value = stringutil.unescapestr(value)
2997 2996
2998 2997 args[key] = value
2999 2998
3000 2999 if batchedcommands is not None:
3001 3000 batchedcommands.append((command, args))
3002 3001 continue
3003 3002
3004 3003 ui.status(_('sending %s command\n') % command)
3005 3004
3006 3005 if 'PUSHFILE' in args:
3007 3006 with open(args['PUSHFILE'], r'rb') as fh:
3008 3007 del args['PUSHFILE']
3009 3008 res, output = peer._callpush(command, fh,
3010 3009 **pycompat.strkwargs(args))
3011 3010 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3012 3011 ui.status(_('remote output: %s\n') %
3013 3012 stringutil.escapestr(output))
3014 3013 else:
3015 3014 with peer.commandexecutor() as e:
3016 3015 res = e.callcommand(command, args).result()
3017 3016
3018 3017 if isinstance(res, wireprotov2peer.commandresponse):
3019 3018 val = list(res.cborobjects())
3020 3019 ui.status(_('response: %s\n') %
3021 3020 stringutil.pprint(val, bprefix=True))
3022 3021
3023 3022 else:
3024 3023 ui.status(_('response: %s\n') %
3025 3024 stringutil.pprint(res, bprefix=True))
3026 3025
3027 3026 elif action == 'batchbegin':
3028 3027 if batchedcommands is not None:
3029 3028 raise error.Abort(_('nested batchbegin not allowed'))
3030 3029
3031 3030 batchedcommands = []
3032 3031 elif action == 'batchsubmit':
3033 3032 # There is a batching API we could go through. But it would be
3034 3033 # difficult to normalize requests into function calls. It is easier
3035 3034 # to bypass this layer and normalize to commands + args.
3036 3035 ui.status(_('sending batch with %d sub-commands\n') %
3037 3036 len(batchedcommands))
3038 3037 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3039 3038 ui.status(_('response #%d: %s\n') %
3040 3039 (i, stringutil.escapestr(chunk)))
3041 3040
3042 3041 batchedcommands = None
3043 3042
3044 3043 elif action.startswith('httprequest '):
3045 3044 if not opener:
3046 3045 raise error.Abort(_('cannot use httprequest without an HTTP '
3047 3046 'peer'))
3048 3047
3049 3048 request = action.split(' ', 2)
3050 3049 if len(request) != 3:
3051 3050 raise error.Abort(_('invalid httprequest: expected format is '
3052 3051 '"httprequest <method> <path>'))
3053 3052
3054 3053 method, httppath = request[1:]
3055 3054 headers = {}
3056 3055 body = None
3057 3056 frames = []
3058 3057 for line in lines:
3059 3058 line = line.lstrip()
3060 3059 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3061 3060 if m:
3062 3061 headers[m.group(1)] = m.group(2)
3063 3062 continue
3064 3063
3065 3064 if line.startswith(b'BODYFILE '):
3066 3065 with open(line.split(b' ', 1), 'rb') as fh:
3067 3066 body = fh.read()
3068 3067 elif line.startswith(b'frame '):
3069 3068 frame = wireprotoframing.makeframefromhumanstring(
3070 3069 line[len(b'frame '):])
3071 3070
3072 3071 frames.append(frame)
3073 3072 else:
3074 3073 raise error.Abort(_('unknown argument to httprequest: %s') %
3075 3074 line)
3076 3075
3077 3076 url = path + httppath
3078 3077
3079 3078 if frames:
3080 3079 body = b''.join(bytes(f) for f in frames)
3081 3080
3082 3081 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3083 3082
3084 3083 # urllib.Request insists on using has_data() as a proxy for
3085 3084 # determining the request method. Override that to use our
3086 3085 # explicitly requested method.
3087 3086 req.get_method = lambda: method
3088 3087
3089 3088 try:
3090 3089 res = opener.open(req)
3091 3090 body = res.read()
3092 3091 except util.urlerr.urlerror as e:
3093 3092 e.read()
3094 3093 continue
3095 3094
3096 3095 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3097 3096 ui.write(_('cbor> %s\n') %
3098 3097 stringutil.pprint(cbor.loads(body), bprefix=True))
3099 3098
3100 3099 elif action == 'close':
3101 3100 peer.close()
3102 3101 elif action == 'readavailable':
3103 3102 if not stdout or not stderr:
3104 3103 raise error.Abort(_('readavailable not available on this peer'))
3105 3104
3106 3105 stdin.close()
3107 3106 stdout.read()
3108 3107 stderr.read()
3109 3108
3110 3109 elif action == 'readline':
3111 3110 if not stdout:
3112 3111 raise error.Abort(_('readline not available on this peer'))
3113 3112 stdout.readline()
3114 3113 elif action == 'ereadline':
3115 3114 if not stderr:
3116 3115 raise error.Abort(_('ereadline not available on this peer'))
3117 3116 stderr.readline()
3118 3117 elif action.startswith('read '):
3119 3118 count = int(action.split(' ', 1)[1])
3120 3119 if not stdout:
3121 3120 raise error.Abort(_('read not available on this peer'))
3122 3121 stdout.read(count)
3123 3122 elif action.startswith('eread '):
3124 3123 count = int(action.split(' ', 1)[1])
3125 3124 if not stderr:
3126 3125 raise error.Abort(_('eread not available on this peer'))
3127 3126 stderr.read(count)
3128 3127 else:
3129 3128 raise error.Abort(_('unknown action: %s') % action)
3130 3129
3131 3130 if batchedcommands is not None:
3132 3131 raise error.Abort(_('unclosed "batchbegin" request'))
3133 3132
3134 3133 if peer:
3135 3134 peer.close()
3136 3135
3137 3136 if proc:
3138 3137 proc.kill()
@@ -1,684 +1,683 b''
1 1 # posix.py - Posix utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import fcntl
12 12 import getpass
13 13 import grp
14 14 import os
15 15 import pwd
16 16 import re
17 17 import select
18 18 import stat
19 19 import sys
20 20 import tempfile
21 21 import unicodedata
22 22
23 23 from .i18n import _
24 24 from . import (
25 25 encoding,
26 26 error,
27 27 policy,
28 28 pycompat,
29 29 )
30 30
31 31 osutil = policy.importmod(r'osutil')
32 32
33 33 posixfile = open
34 34 normpath = os.path.normpath
35 35 samestat = os.path.samestat
36 36 try:
37 37 oslink = os.link
38 38 except AttributeError:
39 39 # Some platforms build Python without os.link on systems that are
40 40 # vaguely unix-like but don't have hardlink support. For those
41 41 # poor souls, just say we tried and that it failed so we fall back
42 42 # to copies.
43 43 def oslink(src, dst):
44 44 raise OSError(errno.EINVAL,
45 45 'hardlinks not supported: %s to %s' % (src, dst))
46 46 unlink = os.unlink
47 47 rename = os.rename
48 48 removedirs = os.removedirs
49 49 expandglobs = False
50 50
51 51 umask = os.umask(0)
52 52 os.umask(umask)
53 53
54 54 def split(p):
55 55 '''Same as posixpath.split, but faster
56 56
57 57 >>> import posixpath
58 58 >>> for f in [b'/absolute/path/to/file',
59 59 ... b'relative/path/to/file',
60 60 ... b'file_alone',
61 61 ... b'path/to/directory/',
62 62 ... b'/multiple/path//separators',
63 63 ... b'/file_at_root',
64 64 ... b'///multiple_leading_separators_at_root',
65 65 ... b'']:
66 66 ... assert split(f) == posixpath.split(f), f
67 67 '''
68 68 ht = p.rsplit('/', 1)
69 69 if len(ht) == 1:
70 70 return '', p
71 71 nh = ht[0].rstrip('/')
72 72 if nh:
73 73 return nh, ht[1]
74 74 return ht[0] + '/', ht[1]
75 75
76 76 def openhardlinks():
77 77 '''return true if it is safe to hold open file handles to hardlinks'''
78 78 return True
79 79
80 80 def nlinks(name):
81 81 '''return number of hardlinks for the given file'''
82 82 return os.lstat(name).st_nlink
83 83
84 84 def parsepatchoutput(output_line):
85 85 """parses the output produced by patch and returns the filename"""
86 86 pf = output_line[14:]
87 87 if pycompat.sysplatform == 'OpenVMS':
88 88 if pf[0] == '`':
89 89 pf = pf[1:-1] # Remove the quotes
90 90 else:
91 91 if pf.startswith("'") and pf.endswith("'") and " " in pf:
92 92 pf = pf[1:-1] # Remove the quotes
93 93 return pf
94 94
95 95 def sshargs(sshcmd, host, user, port):
96 96 '''Build argument list for ssh'''
97 97 args = user and ("%s@%s" % (user, host)) or host
98 98 if '-' in args[:1]:
99 99 raise error.Abort(
100 100 _('illegal ssh hostname or username starting with -: %s') % args)
101 101 args = shellquote(args)
102 102 if port:
103 103 args = '-p %s %s' % (shellquote(port), args)
104 104 return args
105 105
106 106 def isexec(f):
107 107 """check whether a file is executable"""
108 108 return (os.lstat(f).st_mode & 0o100 != 0)
109 109
110 110 def setflags(f, l, x):
111 111 st = os.lstat(f)
112 112 s = st.st_mode
113 113 if l:
114 114 if not stat.S_ISLNK(s):
115 115 # switch file to link
116 116 fp = open(f, 'rb')
117 117 data = fp.read()
118 118 fp.close()
119 119 unlink(f)
120 120 try:
121 121 os.symlink(data, f)
122 122 except OSError:
123 123 # failed to make a link, rewrite file
124 124 fp = open(f, "wb")
125 125 fp.write(data)
126 126 fp.close()
127 127 # no chmod needed at this point
128 128 return
129 129 if stat.S_ISLNK(s):
130 130 # switch link to file
131 131 data = os.readlink(f)
132 132 unlink(f)
133 133 fp = open(f, "wb")
134 134 fp.write(data)
135 135 fp.close()
136 136 s = 0o666 & ~umask # avoid restatting for chmod
137 137
138 138 sx = s & 0o100
139 139 if st.st_nlink > 1 and bool(x) != bool(sx):
140 140 # the file is a hardlink, break it
141 141 with open(f, "rb") as fp:
142 142 data = fp.read()
143 143 unlink(f)
144 144 with open(f, "wb") as fp:
145 145 fp.write(data)
146 146
147 147 if x and not sx:
148 148 # Turn on +x for every +r bit when making a file executable
149 149 # and obey umask.
150 150 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
151 151 elif not x and sx:
152 152 # Turn off all +x bits
153 153 os.chmod(f, s & 0o666)
154 154
155 155 def copymode(src, dst, mode=None):
156 156 '''Copy the file mode from the file at path src to dst.
157 157 If src doesn't exist, we're using mode instead. If mode is None, we're
158 158 using umask.'''
159 159 try:
160 160 st_mode = os.lstat(src).st_mode & 0o777
161 161 except OSError as inst:
162 162 if inst.errno != errno.ENOENT:
163 163 raise
164 164 st_mode = mode
165 165 if st_mode is None:
166 166 st_mode = ~umask
167 167 st_mode &= 0o666
168 168 os.chmod(dst, st_mode)
169 169
170 170 def checkexec(path):
171 171 """
172 172 Check whether the given path is on a filesystem with UNIX-like exec flags
173 173
174 174 Requires a directory (like /foo/.hg)
175 175 """
176 176
177 177 # VFAT on some Linux versions can flip mode but it doesn't persist
178 178 # a FS remount. Frequently we can detect it if files are created
179 179 # with exec bit on.
180 180
181 181 try:
182 182 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
183 183 cachedir = os.path.join(path, '.hg', 'cache')
184 184 if os.path.isdir(cachedir):
185 185 checkisexec = os.path.join(cachedir, 'checkisexec')
186 186 checknoexec = os.path.join(cachedir, 'checknoexec')
187 187
188 188 try:
189 189 m = os.stat(checkisexec).st_mode
190 190 except OSError as e:
191 191 if e.errno != errno.ENOENT:
192 192 raise
193 193 # checkisexec does not exist - fall through ...
194 194 else:
195 195 # checkisexec exists, check if it actually is exec
196 196 if m & EXECFLAGS != 0:
197 197 # ensure checkisexec exists, check it isn't exec
198 198 try:
199 199 m = os.stat(checknoexec).st_mode
200 200 except OSError as e:
201 201 if e.errno != errno.ENOENT:
202 202 raise
203 203 open(checknoexec, 'w').close() # might fail
204 204 m = os.stat(checknoexec).st_mode
205 205 if m & EXECFLAGS == 0:
206 206 # check-exec is exec and check-no-exec is not exec
207 207 return True
208 208 # checknoexec exists but is exec - delete it
209 209 unlink(checknoexec)
210 210 # checkisexec exists but is not exec - delete it
211 211 unlink(checkisexec)
212 212
213 213 # check using one file, leave it as checkisexec
214 214 checkdir = cachedir
215 215 else:
216 216 # check directly in path and don't leave checkisexec behind
217 217 checkdir = path
218 218 checkisexec = None
219 219 fh, fn = pycompat.mkstemp(dir=checkdir, prefix='hg-checkexec-')
220 220 try:
221 221 os.close(fh)
222 222 m = os.stat(fn).st_mode
223 223 if m & EXECFLAGS == 0:
224 224 os.chmod(fn, m & 0o777 | EXECFLAGS)
225 225 if os.stat(fn).st_mode & EXECFLAGS != 0:
226 226 if checkisexec is not None:
227 227 os.rename(fn, checkisexec)
228 228 fn = None
229 229 return True
230 230 finally:
231 231 if fn is not None:
232 232 unlink(fn)
233 233 except (IOError, OSError):
234 234 # we don't care, the user probably won't be able to commit anyway
235 235 return False
236 236
237 237 def checklink(path):
238 238 """check whether the given path is on a symlink-capable filesystem"""
239 239 # mktemp is not racy because symlink creation will fail if the
240 240 # file already exists
241 241 while True:
242 242 cachedir = os.path.join(path, '.hg', 'cache')
243 243 checklink = os.path.join(cachedir, 'checklink')
244 244 # try fast path, read only
245 245 if os.path.islink(checklink):
246 246 return True
247 247 if os.path.isdir(cachedir):
248 248 checkdir = cachedir
249 249 else:
250 250 checkdir = path
251 251 cachedir = None
252 fscheckdir = pycompat.fsdecode(checkdir)
253 name = tempfile.mktemp(dir=fscheckdir,
252 name = tempfile.mktemp(dir=pycompat.fsdecode(checkdir),
254 253 prefix=r'checklink-')
255 254 name = pycompat.fsencode(name)
256 255 try:
257 256 fd = None
258 257 if cachedir is None:
259 fd = tempfile.NamedTemporaryFile(dir=fscheckdir,
260 prefix=r'hg-checklink-')
261 target = pycompat.fsencode(os.path.basename(fd.name))
258 fd = pycompat.namedtempfile(dir=checkdir,
259 prefix='hg-checklink-')
260 target = os.path.basename(fd.name)
262 261 else:
263 262 # create a fixed file to link to; doesn't matter if it
264 263 # already exists.
265 264 target = 'checklink-target'
266 265 try:
267 266 fullpath = os.path.join(cachedir, target)
268 267 open(fullpath, 'w').close()
269 268 except IOError as inst:
270 269 if inst[0] == errno.EACCES:
271 270 # If we can't write to cachedir, just pretend
272 271 # that the fs is readonly and by association
273 272 # that the fs won't support symlinks. This
274 273 # seems like the least dangerous way to avoid
275 274 # data loss.
276 275 return False
277 276 raise
278 277 try:
279 278 os.symlink(target, name)
280 279 if cachedir is None:
281 280 unlink(name)
282 281 else:
283 282 try:
284 283 os.rename(name, checklink)
285 284 except OSError:
286 285 unlink(name)
287 286 return True
288 287 except OSError as inst:
289 288 # link creation might race, try again
290 289 if inst.errno == errno.EEXIST:
291 290 continue
292 291 raise
293 292 finally:
294 293 if fd is not None:
295 294 fd.close()
296 295 except AttributeError:
297 296 return False
298 297 except OSError as inst:
299 298 # sshfs might report failure while successfully creating the link
300 299 if inst.errno == errno.EIO and os.path.exists(name):
301 300 unlink(name)
302 301 return False
303 302
304 303 def checkosfilename(path):
305 304 '''Check that the base-relative path is a valid filename on this platform.
306 305 Returns None if the path is ok, or a UI string describing the problem.'''
307 306 return None # on posix platforms, every path is ok
308 307
309 308 def getfsmountpoint(dirpath):
310 309 '''Get the filesystem mount point from a directory (best-effort)
311 310
312 311 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
313 312 '''
314 313 return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
315 314
316 315 def getfstype(dirpath):
317 316 '''Get the filesystem type name from a directory (best-effort)
318 317
319 318 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
320 319 '''
321 320 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
322 321
323 322 def setbinary(fd):
324 323 pass
325 324
326 325 def pconvert(path):
327 326 return path
328 327
329 328 def localpath(path):
330 329 return path
331 330
332 331 def samefile(fpath1, fpath2):
333 332 """Returns whether path1 and path2 refer to the same file. This is only
334 333 guaranteed to work for files, not directories."""
335 334 return os.path.samefile(fpath1, fpath2)
336 335
337 336 def samedevice(fpath1, fpath2):
338 337 """Returns whether fpath1 and fpath2 are on the same device. This is only
339 338 guaranteed to work for files, not directories."""
340 339 st1 = os.lstat(fpath1)
341 340 st2 = os.lstat(fpath2)
342 341 return st1.st_dev == st2.st_dev
343 342
344 343 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
345 344 def normcase(path):
346 345 return path.lower()
347 346
348 347 # what normcase does to ASCII strings
349 348 normcasespec = encoding.normcasespecs.lower
350 349 # fallback normcase function for non-ASCII strings
351 350 normcasefallback = normcase
352 351
353 352 if pycompat.isdarwin:
354 353
355 354 def normcase(path):
356 355 '''
357 356 Normalize a filename for OS X-compatible comparison:
358 357 - escape-encode invalid characters
359 358 - decompose to NFD
360 359 - lowercase
361 360 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
362 361
363 362 >>> normcase(b'UPPER')
364 363 'upper'
365 364 >>> normcase(b'Caf\\xc3\\xa9')
366 365 'cafe\\xcc\\x81'
367 366 >>> normcase(b'\\xc3\\x89')
368 367 'e\\xcc\\x81'
369 368 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
370 369 '%b8%ca%c3\\xca\\xbe%c8.jpg'
371 370 '''
372 371
373 372 try:
374 373 return encoding.asciilower(path) # exception for non-ASCII
375 374 except UnicodeDecodeError:
376 375 return normcasefallback(path)
377 376
378 377 normcasespec = encoding.normcasespecs.lower
379 378
380 379 def normcasefallback(path):
381 380 try:
382 381 u = path.decode('utf-8')
383 382 except UnicodeDecodeError:
384 383 # OS X percent-encodes any bytes that aren't valid utf-8
385 384 s = ''
386 385 pos = 0
387 386 l = len(path)
388 387 while pos < l:
389 388 try:
390 389 c = encoding.getutf8char(path, pos)
391 390 pos += len(c)
392 391 except ValueError:
393 392 c = '%%%02X' % ord(path[pos:pos + 1])
394 393 pos += 1
395 394 s += c
396 395
397 396 u = s.decode('utf-8')
398 397
399 398 # Decompose then lowercase (HFS+ technote specifies lower)
400 399 enc = unicodedata.normalize(r'NFD', u).lower().encode('utf-8')
401 400 # drop HFS+ ignored characters
402 401 return encoding.hfsignoreclean(enc)
403 402
404 403 if pycompat.sysplatform == 'cygwin':
405 404 # workaround for cygwin, in which mount point part of path is
406 405 # treated as case sensitive, even though underlying NTFS is case
407 406 # insensitive.
408 407
409 408 # default mount points
410 409 cygwinmountpoints = sorted([
411 410 "/usr/bin",
412 411 "/usr/lib",
413 412 "/cygdrive",
414 413 ], reverse=True)
415 414
416 415 # use upper-ing as normcase as same as NTFS workaround
417 416 def normcase(path):
418 417 pathlen = len(path)
419 418 if (pathlen == 0) or (path[0] != pycompat.ossep):
420 419 # treat as relative
421 420 return encoding.upper(path)
422 421
423 422 # to preserve case of mountpoint part
424 423 for mp in cygwinmountpoints:
425 424 if not path.startswith(mp):
426 425 continue
427 426
428 427 mplen = len(mp)
429 428 if mplen == pathlen: # mount point itself
430 429 return mp
431 430 if path[mplen] == pycompat.ossep:
432 431 return mp + encoding.upper(path[mplen:])
433 432
434 433 return encoding.upper(path)
435 434
436 435 normcasespec = encoding.normcasespecs.other
437 436 normcasefallback = normcase
438 437
439 438 # Cygwin translates native ACLs to POSIX permissions,
440 439 # but these translations are not supported by native
441 440 # tools, so the exec bit tends to be set erroneously.
442 441 # Therefore, disable executable bit access on Cygwin.
443 442 def checkexec(path):
444 443 return False
445 444
446 445 # Similarly, Cygwin's symlink emulation is likely to create
447 446 # problems when Mercurial is used from both Cygwin and native
448 447 # Windows, with other native tools, or on shared volumes
449 448 def checklink(path):
450 449 return False
451 450
452 451 _needsshellquote = None
453 452 def shellquote(s):
454 453 if pycompat.sysplatform == 'OpenVMS':
455 454 return '"%s"' % s
456 455 global _needsshellquote
457 456 if _needsshellquote is None:
458 457 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
459 458 if s and not _needsshellquote(s):
460 459 # "s" shouldn't have to be quoted
461 460 return s
462 461 else:
463 462 return "'%s'" % s.replace("'", "'\\''")
464 463
465 464 def shellsplit(s):
466 465 """Parse a command string in POSIX shell way (best-effort)"""
467 466 return pycompat.shlexsplit(s, posix=True)
468 467
469 468 def quotecommand(cmd):
470 469 return cmd
471 470
472 471 def testpid(pid):
473 472 '''return False if pid dead, True if running or not sure'''
474 473 if pycompat.sysplatform == 'OpenVMS':
475 474 return True
476 475 try:
477 476 os.kill(pid, 0)
478 477 return True
479 478 except OSError as inst:
480 479 return inst.errno != errno.ESRCH
481 480
482 481 def isowner(st):
483 482 """Return True if the stat object st is from the current user."""
484 483 return st.st_uid == os.getuid()
485 484
486 485 def findexe(command):
487 486 '''Find executable for command searching like which does.
488 487 If command is a basename then PATH is searched for command.
489 488 PATH isn't searched if command is an absolute or relative path.
490 489 If command isn't found None is returned.'''
491 490 if pycompat.sysplatform == 'OpenVMS':
492 491 return command
493 492
494 493 def findexisting(executable):
495 494 'Will return executable if existing file'
496 495 if os.path.isfile(executable) and os.access(executable, os.X_OK):
497 496 return executable
498 497 return None
499 498
500 499 if pycompat.ossep in command:
501 500 return findexisting(command)
502 501
503 502 if pycompat.sysplatform == 'plan9':
504 503 return findexisting(os.path.join('/bin', command))
505 504
506 505 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
507 506 executable = findexisting(os.path.join(path, command))
508 507 if executable is not None:
509 508 return executable
510 509 return None
511 510
512 511 def setsignalhandler():
513 512 pass
514 513
515 514 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
516 515
517 516 def statfiles(files):
518 517 '''Stat each file in files. Yield each stat, or None if a file does not
519 518 exist or has a type we don't care about.'''
520 519 lstat = os.lstat
521 520 getkind = stat.S_IFMT
522 521 for nf in files:
523 522 try:
524 523 st = lstat(nf)
525 524 if getkind(st.st_mode) not in _wantedkinds:
526 525 st = None
527 526 except OSError as err:
528 527 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
529 528 raise
530 529 st = None
531 530 yield st
532 531
533 532 def getuser():
534 533 '''return name of current user'''
535 534 return pycompat.fsencode(getpass.getuser())
536 535
537 536 def username(uid=None):
538 537 """Return the name of the user with the given uid.
539 538
540 539 If uid is None, return the name of the current user."""
541 540
542 541 if uid is None:
543 542 uid = os.getuid()
544 543 try:
545 544 return pwd.getpwuid(uid)[0]
546 545 except KeyError:
547 546 return str(uid)
548 547
549 548 def groupname(gid=None):
550 549 """Return the name of the group with the given gid.
551 550
552 551 If gid is None, return the name of the current group."""
553 552
554 553 if gid is None:
555 554 gid = os.getgid()
556 555 try:
557 556 return grp.getgrgid(gid)[0]
558 557 except KeyError:
559 558 return str(gid)
560 559
561 560 def groupmembers(name):
562 561 """Return the list of members of the group with the given
563 562 name, KeyError if the group does not exist.
564 563 """
565 564 return list(grp.getgrnam(name).gr_mem)
566 565
567 566 def spawndetached(args):
568 567 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
569 568 args[0], args)
570 569
571 570 def gethgcmd():
572 571 return sys.argv[:1]
573 572
574 573 def makedir(path, notindexed):
575 574 os.mkdir(path)
576 575
577 576 def lookupreg(key, name=None, scope=None):
578 577 return None
579 578
580 579 def hidewindow():
581 580 """Hide current shell window.
582 581
583 582 Used to hide the window opened when starting asynchronous
584 583 child process under Windows, unneeded on other systems.
585 584 """
586 585 pass
587 586
588 587 class cachestat(object):
589 588 def __init__(self, path):
590 589 self.stat = os.stat(path)
591 590
592 591 def cacheable(self):
593 592 return bool(self.stat.st_ino)
594 593
595 594 __hash__ = object.__hash__
596 595
597 596 def __eq__(self, other):
598 597 try:
599 598 # Only dev, ino, size, mtime and atime are likely to change. Out
600 599 # of these, we shouldn't compare atime but should compare the
601 600 # rest. However, one of the other fields changing indicates
602 601 # something fishy going on, so return False if anything but atime
603 602 # changes.
604 603 return (self.stat.st_mode == other.stat.st_mode and
605 604 self.stat.st_ino == other.stat.st_ino and
606 605 self.stat.st_dev == other.stat.st_dev and
607 606 self.stat.st_nlink == other.stat.st_nlink and
608 607 self.stat.st_uid == other.stat.st_uid and
609 608 self.stat.st_gid == other.stat.st_gid and
610 609 self.stat.st_size == other.stat.st_size and
611 610 self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME] and
612 611 self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME])
613 612 except AttributeError:
614 613 return False
615 614
616 615 def __ne__(self, other):
617 616 return not self == other
618 617
619 618 def statislink(st):
620 619 '''check whether a stat result is a symlink'''
621 620 return st and stat.S_ISLNK(st.st_mode)
622 621
623 622 def statisexec(st):
624 623 '''check whether a stat result is an executable file'''
625 624 return st and (st.st_mode & 0o100 != 0)
626 625
627 626 def poll(fds):
628 627 """block until something happens on any file descriptor
629 628
630 629 This is a generic helper that will check for any activity
631 630 (read, write. exception) and return the list of touched files.
632 631
633 632 In unsupported cases, it will raise a NotImplementedError"""
634 633 try:
635 634 while True:
636 635 try:
637 636 res = select.select(fds, fds, fds)
638 637 break
639 638 except select.error as inst:
640 639 if inst.args[0] == errno.EINTR:
641 640 continue
642 641 raise
643 642 except ValueError: # out of range file descriptor
644 643 raise NotImplementedError()
645 644 return sorted(list(set(sum(res, []))))
646 645
647 646 def readpipe(pipe):
648 647 """Read all available data from a pipe."""
649 648 # We can't fstat() a pipe because Linux will always report 0.
650 649 # So, we set the pipe to non-blocking mode and read everything
651 650 # that's available.
652 651 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
653 652 flags |= os.O_NONBLOCK
654 653 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
655 654
656 655 try:
657 656 chunks = []
658 657 while True:
659 658 try:
660 659 s = pipe.read()
661 660 if not s:
662 661 break
663 662 chunks.append(s)
664 663 except IOError:
665 664 break
666 665
667 666 return ''.join(chunks)
668 667 finally:
669 668 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
670 669
671 670 def bindunixsocket(sock, path):
672 671 """Bind the UNIX domain socket to the specified path"""
673 672 # use relative path instead of full path at bind() if possible, since
674 673 # AF_UNIX path has very small length limit (107 chars) on common
675 674 # platforms (see sys/un.h)
676 675 dirname, basename = os.path.split(path)
677 676 bakwdfd = None
678 677 if dirname:
679 678 bakwdfd = os.open('.', os.O_DIRECTORY)
680 679 os.chdir(dirname)
681 680 sock.bind(basename)
682 681 if bakwdfd:
683 682 os.fchdir(bakwdfd)
684 683 os.close(bakwdfd)
@@ -1,394 +1,402 b''
1 1 # pycompat.py - portability shim for python 3
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5
6 6 """Mercurial portability shim for python 3.
7 7
8 8 This contains aliases to hide python version-specific details from the core.
9 9 """
10 10
11 11 from __future__ import absolute_import
12 12
13 13 import getopt
14 14 import inspect
15 15 import os
16 16 import shlex
17 17 import sys
18 18 import tempfile
19 19
20 20 ispy3 = (sys.version_info[0] >= 3)
21 21 ispypy = (r'__pypy__' in sys.builtin_module_names)
22 22
23 23 if not ispy3:
24 24 import cookielib
25 25 import cPickle as pickle
26 26 import httplib
27 27 import Queue as queue
28 28 import SocketServer as socketserver
29 29 import xmlrpclib
30 30
31 31 from .thirdparty.concurrent import futures
32 32
33 33 def future_set_exception_info(f, exc_info):
34 34 f.set_exception_info(*exc_info)
35 35 else:
36 36 import concurrent.futures as futures
37 37 import http.cookiejar as cookielib
38 38 import http.client as httplib
39 39 import pickle
40 40 import queue as queue
41 41 import socketserver
42 42 import xmlrpc.client as xmlrpclib
43 43
44 44 def future_set_exception_info(f, exc_info):
45 45 f.set_exception(exc_info[0])
46 46
47 47 def identity(a):
48 48 return a
49 49
50 50 if ispy3:
51 51 import builtins
52 52 import functools
53 53 import io
54 54 import struct
55 55
56 56 fsencode = os.fsencode
57 57 fsdecode = os.fsdecode
58 58 oscurdir = os.curdir.encode('ascii')
59 59 oslinesep = os.linesep.encode('ascii')
60 60 osname = os.name.encode('ascii')
61 61 ospathsep = os.pathsep.encode('ascii')
62 62 ospardir = os.pardir.encode('ascii')
63 63 ossep = os.sep.encode('ascii')
64 64 osaltsep = os.altsep
65 65 if osaltsep:
66 66 osaltsep = osaltsep.encode('ascii')
67 67 # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
68 68 # returns bytes.
69 69 getcwd = os.getcwdb
70 70 sysplatform = sys.platform.encode('ascii')
71 71 sysexecutable = sys.executable
72 72 if sysexecutable:
73 73 sysexecutable = os.fsencode(sysexecutable)
74 74 bytesio = io.BytesIO
75 75 # TODO deprecate stringio name, as it is a lie on Python 3.
76 76 stringio = bytesio
77 77
78 78 def maplist(*args):
79 79 return list(map(*args))
80 80
81 81 def rangelist(*args):
82 82 return list(range(*args))
83 83
84 84 def ziplist(*args):
85 85 return list(zip(*args))
86 86
87 87 rawinput = input
88 88 getargspec = inspect.getfullargspec
89 89
90 90 # TODO: .buffer might not exist if std streams were replaced; we'll need
91 91 # a silly wrapper to make a bytes stream backed by a unicode one.
92 92 stdin = sys.stdin.buffer
93 93 stdout = sys.stdout.buffer
94 94 stderr = sys.stderr.buffer
95 95
96 96 # Since Python 3 converts argv to wchar_t type by Py_DecodeLocale() on Unix,
97 97 # we can use os.fsencode() to get back bytes argv.
98 98 #
99 99 # https://hg.python.org/cpython/file/v3.5.1/Programs/python.c#l55
100 100 #
101 101 # TODO: On Windows, the native argv is wchar_t, so we'll need a different
102 102 # workaround to simulate the Python 2 (i.e. ANSI Win32 API) behavior.
103 103 if getattr(sys, 'argv', None) is not None:
104 104 sysargv = list(map(os.fsencode, sys.argv))
105 105
106 106 bytechr = struct.Struct('>B').pack
107 107 byterepr = b'%r'.__mod__
108 108
109 109 class bytestr(bytes):
110 110 """A bytes which mostly acts as a Python 2 str
111 111
112 112 >>> bytestr(), bytestr(bytearray(b'foo')), bytestr(u'ascii'), bytestr(1)
113 113 ('', 'foo', 'ascii', '1')
114 114 >>> s = bytestr(b'foo')
115 115 >>> assert s is bytestr(s)
116 116
117 117 __bytes__() should be called if provided:
118 118
119 119 >>> class bytesable(object):
120 120 ... def __bytes__(self):
121 121 ... return b'bytes'
122 122 >>> bytestr(bytesable())
123 123 'bytes'
124 124
125 125 There's no implicit conversion from non-ascii str as its encoding is
126 126 unknown:
127 127
128 128 >>> bytestr(chr(0x80)) # doctest: +ELLIPSIS
129 129 Traceback (most recent call last):
130 130 ...
131 131 UnicodeEncodeError: ...
132 132
133 133 Comparison between bytestr and bytes should work:
134 134
135 135 >>> assert bytestr(b'foo') == b'foo'
136 136 >>> assert b'foo' == bytestr(b'foo')
137 137 >>> assert b'f' in bytestr(b'foo')
138 138 >>> assert bytestr(b'f') in b'foo'
139 139
140 140 Sliced elements should be bytes, not integer:
141 141
142 142 >>> s[1], s[:2]
143 143 (b'o', b'fo')
144 144 >>> list(s), list(reversed(s))
145 145 ([b'f', b'o', b'o'], [b'o', b'o', b'f'])
146 146
147 147 As bytestr type isn't propagated across operations, you need to cast
148 148 bytes to bytestr explicitly:
149 149
150 150 >>> s = bytestr(b'foo').upper()
151 151 >>> t = bytestr(s)
152 152 >>> s[0], t[0]
153 153 (70, b'F')
154 154
155 155 Be careful to not pass a bytestr object to a function which expects
156 156 bytearray-like behavior.
157 157
158 158 >>> t = bytes(t) # cast to bytes
159 159 >>> assert type(t) is bytes
160 160 """
161 161
162 162 def __new__(cls, s=b''):
163 163 if isinstance(s, bytestr):
164 164 return s
165 165 if (not isinstance(s, (bytes, bytearray))
166 166 and not hasattr(s, u'__bytes__')): # hasattr-py3-only
167 167 s = str(s).encode(u'ascii')
168 168 return bytes.__new__(cls, s)
169 169
170 170 def __getitem__(self, key):
171 171 s = bytes.__getitem__(self, key)
172 172 if not isinstance(s, bytes):
173 173 s = bytechr(s)
174 174 return s
175 175
176 176 def __iter__(self):
177 177 return iterbytestr(bytes.__iter__(self))
178 178
179 179 def __repr__(self):
180 180 return bytes.__repr__(self)[1:] # drop b''
181 181
182 182 def iterbytestr(s):
183 183 """Iterate bytes as if it were a str object of Python 2"""
184 184 return map(bytechr, s)
185 185
186 186 def maybebytestr(s):
187 187 """Promote bytes to bytestr"""
188 188 if isinstance(s, bytes):
189 189 return bytestr(s)
190 190 return s
191 191
192 192 def sysbytes(s):
193 193 """Convert an internal str (e.g. keyword, __doc__) back to bytes
194 194
195 195 This never raises UnicodeEncodeError, but only ASCII characters
196 196 can be round-trip by sysstr(sysbytes(s)).
197 197 """
198 198 return s.encode(u'utf-8')
199 199
200 200 def sysstr(s):
201 201 """Return a keyword str to be passed to Python functions such as
202 202 getattr() and str.encode()
203 203
204 204 This never raises UnicodeDecodeError. Non-ascii characters are
205 205 considered invalid and mapped to arbitrary but unique code points
206 206 such that 'sysstr(a) != sysstr(b)' for all 'a != b'.
207 207 """
208 208 if isinstance(s, builtins.str):
209 209 return s
210 210 return s.decode(u'latin-1')
211 211
212 212 def strurl(url):
213 213 """Converts a bytes url back to str"""
214 214 if isinstance(url, bytes):
215 215 return url.decode(u'ascii')
216 216 return url
217 217
218 218 def bytesurl(url):
219 219 """Converts a str url to bytes by encoding in ascii"""
220 220 if isinstance(url, str):
221 221 return url.encode(u'ascii')
222 222 return url
223 223
224 224 def raisewithtb(exc, tb):
225 225 """Raise exception with the given traceback"""
226 226 raise exc.with_traceback(tb)
227 227
228 228 def getdoc(obj):
229 229 """Get docstring as bytes; may be None so gettext() won't confuse it
230 230 with _('')"""
231 231 doc = getattr(obj, u'__doc__', None)
232 232 if doc is None:
233 233 return doc
234 234 return sysbytes(doc)
235 235
236 236 def _wrapattrfunc(f):
237 237 @functools.wraps(f)
238 238 def w(object, name, *args):
239 239 return f(object, sysstr(name), *args)
240 240 return w
241 241
242 242 # these wrappers are automagically imported by hgloader
243 243 delattr = _wrapattrfunc(builtins.delattr)
244 244 getattr = _wrapattrfunc(builtins.getattr)
245 245 hasattr = _wrapattrfunc(builtins.hasattr)
246 246 setattr = _wrapattrfunc(builtins.setattr)
247 247 xrange = builtins.range
248 248 unicode = str
249 249
250 250 def open(name, mode='r', buffering=-1, encoding=None):
251 251 return builtins.open(name, sysstr(mode), buffering, encoding)
252 252
253 253 safehasattr = _wrapattrfunc(builtins.hasattr)
254 254
255 255 def _getoptbwrapper(orig, args, shortlist, namelist):
256 256 """
257 257 Takes bytes arguments, converts them to unicode, pass them to
258 258 getopt.getopt(), convert the returned values back to bytes and then
259 259 return them for Python 3 compatibility as getopt.getopt() don't accepts
260 260 bytes on Python 3.
261 261 """
262 262 args = [a.decode('latin-1') for a in args]
263 263 shortlist = shortlist.decode('latin-1')
264 264 namelist = [a.decode('latin-1') for a in namelist]
265 265 opts, args = orig(args, shortlist, namelist)
266 266 opts = [(a[0].encode('latin-1'), a[1].encode('latin-1'))
267 267 for a in opts]
268 268 args = [a.encode('latin-1') for a in args]
269 269 return opts, args
270 270
271 271 def strkwargs(dic):
272 272 """
273 273 Converts the keys of a python dictonary to str i.e. unicodes so that
274 274 they can be passed as keyword arguments as dictonaries with bytes keys
275 275 can't be passed as keyword arguments to functions on Python 3.
276 276 """
277 277 dic = dict((k.decode('latin-1'), v) for k, v in dic.iteritems())
278 278 return dic
279 279
280 280 def byteskwargs(dic):
281 281 """
282 282 Converts keys of python dictonaries to bytes as they were converted to
283 283 str to pass that dictonary as a keyword argument on Python 3.
284 284 """
285 285 dic = dict((k.encode('latin-1'), v) for k, v in dic.iteritems())
286 286 return dic
287 287
288 288 # TODO: handle shlex.shlex().
289 289 def shlexsplit(s, comments=False, posix=True):
290 290 """
291 291 Takes bytes argument, convert it to str i.e. unicodes, pass that into
292 292 shlex.split(), convert the returned value to bytes and return that for
293 293 Python 3 compatibility as shelx.split() don't accept bytes on Python 3.
294 294 """
295 295 ret = shlex.split(s.decode('latin-1'), comments, posix)
296 296 return [a.encode('latin-1') for a in ret]
297 297
298 298 def emailparser(*args, **kwargs):
299 299 import email.parser
300 300 return email.parser.BytesParser(*args, **kwargs)
301 301
302 302 else:
303 303 import cStringIO
304 304
305 305 bytechr = chr
306 306 byterepr = repr
307 307 bytestr = str
308 308 iterbytestr = iter
309 309 maybebytestr = identity
310 310 sysbytes = identity
311 311 sysstr = identity
312 312 strurl = identity
313 313 bytesurl = identity
314 314
315 315 # this can't be parsed on Python 3
316 316 exec('def raisewithtb(exc, tb):\n'
317 317 ' raise exc, None, tb\n')
318 318
319 319 def fsencode(filename):
320 320 """
321 321 Partial backport from os.py in Python 3, which only accepts bytes.
322 322 In Python 2, our paths should only ever be bytes, a unicode path
323 323 indicates a bug.
324 324 """
325 325 if isinstance(filename, str):
326 326 return filename
327 327 else:
328 328 raise TypeError(
329 329 "expect str, not %s" % type(filename).__name__)
330 330
331 331 # In Python 2, fsdecode() has a very chance to receive bytes. So it's
332 332 # better not to touch Python 2 part as it's already working fine.
333 333 fsdecode = identity
334 334
335 335 def getdoc(obj):
336 336 return getattr(obj, '__doc__', None)
337 337
338 338 _notset = object()
339 339
340 340 def safehasattr(thing, attr):
341 341 return getattr(thing, attr, _notset) is not _notset
342 342
343 343 def _getoptbwrapper(orig, args, shortlist, namelist):
344 344 return orig(args, shortlist, namelist)
345 345
346 346 strkwargs = identity
347 347 byteskwargs = identity
348 348
349 349 oscurdir = os.curdir
350 350 oslinesep = os.linesep
351 351 osname = os.name
352 352 ospathsep = os.pathsep
353 353 ospardir = os.pardir
354 354 ossep = os.sep
355 355 osaltsep = os.altsep
356 356 stdin = sys.stdin
357 357 stdout = sys.stdout
358 358 stderr = sys.stderr
359 359 if getattr(sys, 'argv', None) is not None:
360 360 sysargv = sys.argv
361 361 sysplatform = sys.platform
362 362 getcwd = os.getcwd
363 363 sysexecutable = sys.executable
364 364 shlexsplit = shlex.split
365 365 bytesio = cStringIO.StringIO
366 366 stringio = bytesio
367 367 maplist = map
368 368 rangelist = range
369 369 ziplist = zip
370 370 rawinput = raw_input
371 371 getargspec = inspect.getargspec
372 372
373 373 def emailparser(*args, **kwargs):
374 374 import email.parser
375 375 return email.parser.Parser(*args, **kwargs)
376 376
377 377 isjython = sysplatform.startswith('java')
378 378
379 379 isdarwin = sysplatform == 'darwin'
380 380 isposix = osname == 'posix'
381 381 iswindows = osname == 'nt'
382 382
383 383 def getoptb(args, shortlist, namelist):
384 384 return _getoptbwrapper(getopt.getopt, args, shortlist, namelist)
385 385
386 386 def gnugetoptb(args, shortlist, namelist):
387 387 return _getoptbwrapper(getopt.gnu_getopt, args, shortlist, namelist)
388 388
389 389 def mkdtemp(suffix=b'', prefix=b'tmp', dir=None):
390 390 return tempfile.mkdtemp(suffix, prefix, dir)
391 391
392 392 # text=True is not supported; use util.from/tonativeeol() instead
393 393 def mkstemp(suffix=b'', prefix=b'tmp', dir=None):
394 394 return tempfile.mkstemp(suffix, prefix, dir)
395
396 # mode must include 'b'ytes as encoding= is not supported
397 def namedtempfile(mode=b'w+b', bufsize=-1, suffix=b'', prefix=b'tmp', dir=None,
398 delete=True):
399 mode = sysstr(mode)
400 assert r'b' in mode
401 return tempfile.NamedTemporaryFile(mode, bufsize, suffix=suffix,
402 prefix=prefix, dir=dir, delete=delete)
General Comments 0
You need to be logged in to leave comments. Login now