##// END OF EJS Templates
debugcommands: pass part, not read data, into _debugobsmarker()...
Martin von Zweigbergk -
r33029:b482d80e default
parent child Browse files
Show More
@@ -1,2204 +1,2205 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 phases,
51 51 policy,
52 52 pvec,
53 53 pycompat,
54 54 registrar,
55 55 repair,
56 56 revlog,
57 57 revset,
58 58 revsetlang,
59 59 scmutil,
60 60 setdiscovery,
61 61 simplemerge,
62 62 smartset,
63 63 sslutil,
64 64 streamclone,
65 65 templater,
66 66 treediscovery,
67 67 upgrade,
68 68 util,
69 69 vfs as vfsmod,
70 70 )
71 71
72 72 release = lockmod.release
73 73
74 74 command = registrar.command()
75 75
76 76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 77 def debugancestor(ui, repo, *args):
78 78 """find the ancestor revision of two revisions in a given index"""
79 79 if len(args) == 3:
80 80 index, rev1, rev2 = args
81 81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 82 lookup = r.lookup
83 83 elif len(args) == 2:
84 84 if not repo:
85 85 raise error.Abort(_('there is no Mercurial repository here '
86 86 '(.hg not found)'))
87 87 rev1, rev2 = args
88 88 r = repo.changelog
89 89 lookup = repo.lookup
90 90 else:
91 91 raise error.Abort(_('either two or three arguments required'))
92 92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94 94
95 95 @command('debugapplystreamclonebundle', [], 'FILE')
96 96 def debugapplystreamclonebundle(ui, repo, fname):
97 97 """apply a stream clone bundle file"""
98 98 f = hg.openpath(ui, fname)
99 99 gen = exchange.readbundle(ui, f, fname)
100 100 gen.apply(repo)
101 101
102 102 @command('debugbuilddag',
103 103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 105 ('n', 'new-file', None, _('add new file at each rev'))],
106 106 _('[OPTION]... [TEXT]'))
107 107 def debugbuilddag(ui, repo, text=None,
108 108 mergeable_file=False,
109 109 overwritten_file=False,
110 110 new_file=False):
111 111 """builds a repo with a given DAG from scratch in the current empty repo
112 112
113 113 The description of the DAG is read from stdin if not given on the
114 114 command line.
115 115
116 116 Elements:
117 117
118 118 - "+n" is a linear run of n nodes based on the current default parent
119 119 - "." is a single node based on the current default parent
120 120 - "$" resets the default parent to null (implied at the start);
121 121 otherwise the default parent is always the last node created
122 122 - "<p" sets the default parent to the backref p
123 123 - "*p" is a fork at parent p, which is a backref
124 124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 125 - "/p2" is a merge of the preceding node and p2
126 126 - ":tag" defines a local tag for the preceding node
127 127 - "@branch" sets the named branch for subsequent nodes
128 128 - "#...\\n" is a comment up to the end of the line
129 129
130 130 Whitespace between the above elements is ignored.
131 131
132 132 A backref is either
133 133
134 134 - a number n, which references the node curr-n, where curr is the current
135 135 node, or
136 136 - the name of a local tag you placed earlier using ":tag", or
137 137 - empty to denote the default parent.
138 138
139 139 All string valued-elements are either strictly alphanumeric, or must
140 140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 141 """
142 142
143 143 if text is None:
144 144 ui.status(_("reading DAG from stdin\n"))
145 145 text = ui.fin.read()
146 146
147 147 cl = repo.changelog
148 148 if len(cl) > 0:
149 149 raise error.Abort(_('repository is not empty'))
150 150
151 151 # determine number of revs in DAG
152 152 total = 0
153 153 for type, data in dagparser.parsedag(text):
154 154 if type == 'n':
155 155 total += 1
156 156
157 157 if mergeable_file:
158 158 linesperrev = 2
159 159 # make a file with k lines per rev
160 160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 161 initialmergedlines.append("")
162 162
163 163 tags = []
164 164
165 165 wlock = lock = tr = None
166 166 try:
167 167 wlock = repo.wlock()
168 168 lock = repo.lock()
169 169 tr = repo.transaction("builddag")
170 170
171 171 at = -1
172 172 atbranch = 'default'
173 173 nodeids = []
174 174 id = 0
175 175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 ui.note(('node %s\n' % str(data)))
179 179 id, ps = data
180 180
181 181 files = []
182 182 fctxs = {}
183 183
184 184 p2 = None
185 185 if mergeable_file:
186 186 fn = "mf"
187 187 p1 = repo[ps[0]]
188 188 if len(ps) > 1:
189 189 p2 = repo[ps[1]]
190 190 pa = p1.ancestor(p2)
191 191 base, local, other = [x[fn].data() for x in (pa, p1,
192 192 p2)]
193 193 m3 = simplemerge.Merge3Text(base, local, other)
194 194 ml = [l.strip() for l in m3.merge_lines()]
195 195 ml.append("")
196 196 elif at > 0:
197 197 ml = p1[fn].data().split("\n")
198 198 else:
199 199 ml = initialmergedlines
200 200 ml[id * linesperrev] += " r%i" % id
201 201 mergedtext = "\n".join(ml)
202 202 files.append(fn)
203 203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204 204
205 205 if overwritten_file:
206 206 fn = "of"
207 207 files.append(fn)
208 208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209 209
210 210 if new_file:
211 211 fn = "nf%i" % id
212 212 files.append(fn)
213 213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 214 if len(ps) > 1:
215 215 if not p2:
216 216 p2 = repo[ps[1]]
217 217 for fn in p2:
218 218 if fn.startswith("nf"):
219 219 files.append(fn)
220 220 fctxs[fn] = p2[fn]
221 221
222 222 def fctxfn(repo, cx, path):
223 223 return fctxs.get(path)
224 224
225 225 if len(ps) == 0 or ps[0] < 0:
226 226 pars = [None, None]
227 227 elif len(ps) == 1:
228 228 pars = [nodeids[ps[0]], None]
229 229 else:
230 230 pars = [nodeids[p] for p in ps]
231 231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 232 date=(id, 0),
233 233 user="debugbuilddag",
234 234 extra={'branch': atbranch})
235 235 nodeid = repo.commitctx(cx)
236 236 nodeids.append(nodeid)
237 237 at = id
238 238 elif type == 'l':
239 239 id, name = data
240 240 ui.note(('tag %s\n' % name))
241 241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 242 elif type == 'a':
243 243 ui.note(('branch %s\n' % data))
244 244 atbranch = data
245 245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 246 tr.close()
247 247
248 248 if tags:
249 249 repo.vfs.write("localtags", "".join(tags))
250 250 finally:
251 251 ui.progress(_('building'), None)
252 252 release(tr, lock, wlock)
253 253
254 254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 255 indent_string = ' ' * indent
256 256 if all:
257 257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 258 % indent_string)
259 259
260 260 def showchunks(named):
261 261 ui.write("\n%s%s\n" % (indent_string, named))
262 262 chain = None
263 263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 264 node = chunkdata['node']
265 265 p1 = chunkdata['p1']
266 266 p2 = chunkdata['p2']
267 267 cs = chunkdata['cs']
268 268 deltabase = chunkdata['deltabase']
269 269 delta = chunkdata['delta']
270 270 ui.write("%s%s %s %s %s %s %s\n" %
271 271 (indent_string, hex(node), hex(p1), hex(p2),
272 272 hex(cs), hex(deltabase), len(delta)))
273 273 chain = node
274 274
275 275 chunkdata = gen.changelogheader()
276 276 showchunks("changelog")
277 277 chunkdata = gen.manifestheader()
278 278 showchunks("manifest")
279 279 for chunkdata in iter(gen.filelogheader, {}):
280 280 fname = chunkdata['filename']
281 281 showchunks(fname)
282 282 else:
283 283 if isinstance(gen, bundle2.unbundle20):
284 284 raise error.Abort(_('use debugbundle2 for this file'))
285 285 chunkdata = gen.changelogheader()
286 286 chain = None
287 287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 288 node = chunkdata['node']
289 289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 290 chain = node
291 291
292 def _debugobsmarkers(ui, data, indent=0, **opts):
292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 293 """display version and markers contained in 'data'"""
294 data = part.read()
294 295 indent_string = ' ' * indent
295 296 try:
296 297 version, markers = obsolete._readmarkers(data)
297 298 except error.UnknownVersion as exc:
298 299 msg = "%sunsupported version: %s (%d bytes)\n"
299 300 msg %= indent_string, exc.version, len(data)
300 301 ui.write(msg)
301 302 else:
302 303 msg = "%sversion: %s (%d bytes)\n"
303 304 msg %= indent_string, version, len(data)
304 305 ui.write(msg)
305 306 fm = ui.formatter('debugobsolete', opts)
306 307 for rawmarker in sorted(markers):
307 308 m = obsolete.marker(None, rawmarker)
308 309 fm.startitem()
309 310 fm.plain(indent_string)
310 311 cmdutil.showmarker(fm, m)
311 312 fm.end()
312 313
313 314 def _debugbundle2(ui, gen, all=None, **opts):
314 315 """lists the contents of a bundle2"""
315 316 if not isinstance(gen, bundle2.unbundle20):
316 317 raise error.Abort(_('not a bundle2 file'))
317 318 ui.write(('Stream params: %s\n' % repr(gen.params)))
318 319 parttypes = opts.get('part_type', [])
319 320 for part in gen.iterparts():
320 321 if parttypes and part.type not in parttypes:
321 322 continue
322 323 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
323 324 if part.type == 'changegroup':
324 325 version = part.params.get('version', '01')
325 326 cg = changegroup.getunbundler(version, part, 'UN')
326 327 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
327 328 if part.type == 'obsmarkers':
328 _debugobsmarkers(ui, part.read(), indent=4, **opts)
329 _debugobsmarkers(ui, part, indent=4, **opts)
329 330
330 331 @command('debugbundle',
331 332 [('a', 'all', None, _('show all details')),
332 333 ('', 'part-type', [], _('show only the named part type')),
333 334 ('', 'spec', None, _('print the bundlespec of the bundle'))],
334 335 _('FILE'),
335 336 norepo=True)
336 337 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
337 338 """lists the contents of a bundle"""
338 339 with hg.openpath(ui, bundlepath) as f:
339 340 if spec:
340 341 spec = exchange.getbundlespec(ui, f)
341 342 ui.write('%s\n' % spec)
342 343 return
343 344
344 345 gen = exchange.readbundle(ui, f, bundlepath)
345 346 if isinstance(gen, bundle2.unbundle20):
346 347 return _debugbundle2(ui, gen, all=all, **opts)
347 348 _debugchangegroup(ui, gen, all=all, **opts)
348 349
349 350 @command('debugcheckstate', [], '')
350 351 def debugcheckstate(ui, repo):
351 352 """validate the correctness of the current dirstate"""
352 353 parent1, parent2 = repo.dirstate.parents()
353 354 m1 = repo[parent1].manifest()
354 355 m2 = repo[parent2].manifest()
355 356 errors = 0
356 357 for f in repo.dirstate:
357 358 state = repo.dirstate[f]
358 359 if state in "nr" and f not in m1:
359 360 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
360 361 errors += 1
361 362 if state in "a" and f in m1:
362 363 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
363 364 errors += 1
364 365 if state in "m" and f not in m1 and f not in m2:
365 366 ui.warn(_("%s in state %s, but not in either manifest\n") %
366 367 (f, state))
367 368 errors += 1
368 369 for f in m1:
369 370 state = repo.dirstate[f]
370 371 if state not in "nrm":
371 372 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
372 373 errors += 1
373 374 if errors:
374 375 error = _(".hg/dirstate inconsistent with current parent's manifest")
375 376 raise error.Abort(error)
376 377
377 378 @command('debugcolor',
378 379 [('', 'style', None, _('show all configured styles'))],
379 380 'hg debugcolor')
380 381 def debugcolor(ui, repo, **opts):
381 382 """show available color, effects or style"""
382 383 ui.write(('color mode: %s\n') % ui._colormode)
383 384 if opts.get('style'):
384 385 return _debugdisplaystyle(ui)
385 386 else:
386 387 return _debugdisplaycolor(ui)
387 388
388 389 def _debugdisplaycolor(ui):
389 390 ui = ui.copy()
390 391 ui._styles.clear()
391 392 for effect in color._activeeffects(ui).keys():
392 393 ui._styles[effect] = effect
393 394 if ui._terminfoparams:
394 395 for k, v in ui.configitems('color'):
395 396 if k.startswith('color.'):
396 397 ui._styles[k] = k[6:]
397 398 elif k.startswith('terminfo.'):
398 399 ui._styles[k] = k[9:]
399 400 ui.write(_('available colors:\n'))
400 401 # sort label with a '_' after the other to group '_background' entry.
401 402 items = sorted(ui._styles.items(),
402 403 key=lambda i: ('_' in i[0], i[0], i[1]))
403 404 for colorname, label in items:
404 405 ui.write(('%s\n') % colorname, label=label)
405 406
406 407 def _debugdisplaystyle(ui):
407 408 ui.write(_('available style:\n'))
408 409 width = max(len(s) for s in ui._styles)
409 410 for label, effects in sorted(ui._styles.items()):
410 411 ui.write('%s' % label, label=label)
411 412 if effects:
412 413 # 50
413 414 ui.write(': ')
414 415 ui.write(' ' * (max(0, width - len(label))))
415 416 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
416 417 ui.write('\n')
417 418
418 419 @command('debugcreatestreamclonebundle', [], 'FILE')
419 420 def debugcreatestreamclonebundle(ui, repo, fname):
420 421 """create a stream clone bundle file
421 422
422 423 Stream bundles are special bundles that are essentially archives of
423 424 revlog files. They are commonly used for cloning very quickly.
424 425 """
425 426 # TODO we may want to turn this into an abort when this functionality
426 427 # is moved into `hg bundle`.
427 428 if phases.hassecret(repo):
428 429 ui.warn(_('(warning: stream clone bundle will contain secret '
429 430 'revisions)\n'))
430 431
431 432 requirements, gen = streamclone.generatebundlev1(repo)
432 433 changegroup.writechunks(ui, gen, fname)
433 434
434 435 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
435 436
436 437 @command('debugdag',
437 438 [('t', 'tags', None, _('use tags as labels')),
438 439 ('b', 'branches', None, _('annotate with branch names')),
439 440 ('', 'dots', None, _('use dots for runs')),
440 441 ('s', 'spaces', None, _('separate elements by spaces'))],
441 442 _('[OPTION]... [FILE [REV]...]'),
442 443 optionalrepo=True)
443 444 def debugdag(ui, repo, file_=None, *revs, **opts):
444 445 """format the changelog or an index DAG as a concise textual description
445 446
446 447 If you pass a revlog index, the revlog's DAG is emitted. If you list
447 448 revision numbers, they get labeled in the output as rN.
448 449
449 450 Otherwise, the changelog DAG of the current repo is emitted.
450 451 """
451 452 spaces = opts.get('spaces')
452 453 dots = opts.get('dots')
453 454 if file_:
454 455 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
455 456 file_)
456 457 revs = set((int(r) for r in revs))
457 458 def events():
458 459 for r in rlog:
459 460 yield 'n', (r, list(p for p in rlog.parentrevs(r)
460 461 if p != -1))
461 462 if r in revs:
462 463 yield 'l', (r, "r%i" % r)
463 464 elif repo:
464 465 cl = repo.changelog
465 466 tags = opts.get('tags')
466 467 branches = opts.get('branches')
467 468 if tags:
468 469 labels = {}
469 470 for l, n in repo.tags().items():
470 471 labels.setdefault(cl.rev(n), []).append(l)
471 472 def events():
472 473 b = "default"
473 474 for r in cl:
474 475 if branches:
475 476 newb = cl.read(cl.node(r))[5]['branch']
476 477 if newb != b:
477 478 yield 'a', newb
478 479 b = newb
479 480 yield 'n', (r, list(p for p in cl.parentrevs(r)
480 481 if p != -1))
481 482 if tags:
482 483 ls = labels.get(r)
483 484 if ls:
484 485 for l in ls:
485 486 yield 'l', (r, l)
486 487 else:
487 488 raise error.Abort(_('need repo for changelog dag'))
488 489
489 490 for line in dagparser.dagtextlines(events(),
490 491 addspaces=spaces,
491 492 wraplabels=True,
492 493 wrapannotations=True,
493 494 wrapnonlinear=dots,
494 495 usedots=dots,
495 496 maxlinewidth=70):
496 497 ui.write(line)
497 498 ui.write("\n")
498 499
499 500 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
500 501 def debugdata(ui, repo, file_, rev=None, **opts):
501 502 """dump the contents of a data file revision"""
502 503 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
503 504 if rev is not None:
504 505 raise error.CommandError('debugdata', _('invalid arguments'))
505 506 file_, rev = None, file_
506 507 elif rev is None:
507 508 raise error.CommandError('debugdata', _('invalid arguments'))
508 509 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
509 510 try:
510 511 ui.write(r.revision(r.lookup(rev), raw=True))
511 512 except KeyError:
512 513 raise error.Abort(_('invalid revision identifier %s') % rev)
513 514
514 515 @command('debugdate',
515 516 [('e', 'extended', None, _('try extended date formats'))],
516 517 _('[-e] DATE [RANGE]'),
517 518 norepo=True, optionalrepo=True)
518 519 def debugdate(ui, date, range=None, **opts):
519 520 """parse and display a date"""
520 521 if opts["extended"]:
521 522 d = util.parsedate(date, util.extendeddateformats)
522 523 else:
523 524 d = util.parsedate(date)
524 525 ui.write(("internal: %s %s\n") % d)
525 526 ui.write(("standard: %s\n") % util.datestr(d))
526 527 if range:
527 528 m = util.matchdate(range)
528 529 ui.write(("match: %s\n") % m(d[0]))
529 530
530 531 @command('debugdeltachain',
531 532 cmdutil.debugrevlogopts + cmdutil.formatteropts,
532 533 _('-c|-m|FILE'),
533 534 optionalrepo=True)
534 535 def debugdeltachain(ui, repo, file_=None, **opts):
535 536 """dump information about delta chains in a revlog
536 537
537 538 Output can be templatized. Available template keywords are:
538 539
539 540 :``rev``: revision number
540 541 :``chainid``: delta chain identifier (numbered by unique base)
541 542 :``chainlen``: delta chain length to this revision
542 543 :``prevrev``: previous revision in delta chain
543 544 :``deltatype``: role of delta / how it was computed
544 545 :``compsize``: compressed size of revision
545 546 :``uncompsize``: uncompressed size of revision
546 547 :``chainsize``: total size of compressed revisions in chain
547 548 :``chainratio``: total chain size divided by uncompressed revision size
548 549 (new delta chains typically start at ratio 2.00)
549 550 :``lindist``: linear distance from base revision in delta chain to end
550 551 of this revision
551 552 :``extradist``: total size of revisions not part of this delta chain from
552 553 base of delta chain to end of this revision; a measurement
553 554 of how much extra data we need to read/seek across to read
554 555 the delta chain for this revision
555 556 :``extraratio``: extradist divided by chainsize; another representation of
556 557 how much unrelated data is needed to load this delta chain
557 558 """
558 559 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
559 560 index = r.index
560 561 generaldelta = r.version & revlog.FLAG_GENERALDELTA
561 562
562 563 def revinfo(rev):
563 564 e = index[rev]
564 565 compsize = e[1]
565 566 uncompsize = e[2]
566 567 chainsize = 0
567 568
568 569 if generaldelta:
569 570 if e[3] == e[5]:
570 571 deltatype = 'p1'
571 572 elif e[3] == e[6]:
572 573 deltatype = 'p2'
573 574 elif e[3] == rev - 1:
574 575 deltatype = 'prev'
575 576 elif e[3] == rev:
576 577 deltatype = 'base'
577 578 else:
578 579 deltatype = 'other'
579 580 else:
580 581 if e[3] == rev:
581 582 deltatype = 'base'
582 583 else:
583 584 deltatype = 'prev'
584 585
585 586 chain = r._deltachain(rev)[0]
586 587 for iterrev in chain:
587 588 e = index[iterrev]
588 589 chainsize += e[1]
589 590
590 591 return compsize, uncompsize, deltatype, chain, chainsize
591 592
592 593 fm = ui.formatter('debugdeltachain', opts)
593 594
594 595 fm.plain(' rev chain# chainlen prev delta '
595 596 'size rawsize chainsize ratio lindist extradist '
596 597 'extraratio\n')
597 598
598 599 chainbases = {}
599 600 for rev in r:
600 601 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
601 602 chainbase = chain[0]
602 603 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
603 604 basestart = r.start(chainbase)
604 605 revstart = r.start(rev)
605 606 lineardist = revstart + comp - basestart
606 607 extradist = lineardist - chainsize
607 608 try:
608 609 prevrev = chain[-2]
609 610 except IndexError:
610 611 prevrev = -1
611 612
612 613 chainratio = float(chainsize) / float(uncomp)
613 614 extraratio = float(extradist) / float(chainsize)
614 615
615 616 fm.startitem()
616 617 fm.write('rev chainid chainlen prevrev deltatype compsize '
617 618 'uncompsize chainsize chainratio lindist extradist '
618 619 'extraratio',
619 620 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
620 621 rev, chainid, len(chain), prevrev, deltatype, comp,
621 622 uncomp, chainsize, chainratio, lineardist, extradist,
622 623 extraratio,
623 624 rev=rev, chainid=chainid, chainlen=len(chain),
624 625 prevrev=prevrev, deltatype=deltatype, compsize=comp,
625 626 uncompsize=uncomp, chainsize=chainsize,
626 627 chainratio=chainratio, lindist=lineardist,
627 628 extradist=extradist, extraratio=extraratio)
628 629
629 630 fm.end()
630 631
631 632 @command('debugdirstate|debugstate',
632 633 [('', 'nodates', None, _('do not display the saved mtime')),
633 634 ('', 'datesort', None, _('sort by saved mtime'))],
634 635 _('[OPTION]...'))
635 636 def debugstate(ui, repo, **opts):
636 637 """show the contents of the current dirstate"""
637 638
638 639 nodates = opts.get('nodates')
639 640 datesort = opts.get('datesort')
640 641
641 642 timestr = ""
642 643 if datesort:
643 644 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
644 645 else:
645 646 keyfunc = None # sort by filename
646 647 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
647 648 if ent[3] == -1:
648 649 timestr = 'unset '
649 650 elif nodates:
650 651 timestr = 'set '
651 652 else:
652 653 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
653 654 time.localtime(ent[3]))
654 655 if ent[1] & 0o20000:
655 656 mode = 'lnk'
656 657 else:
657 658 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
658 659 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
659 660 for f in repo.dirstate.copies():
660 661 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
661 662
662 663 @command('debugdiscovery',
663 664 [('', 'old', None, _('use old-style discovery')),
664 665 ('', 'nonheads', None,
665 666 _('use old-style discovery with non-heads included')),
666 667 ] + cmdutil.remoteopts,
667 668 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
668 669 def debugdiscovery(ui, repo, remoteurl="default", **opts):
669 670 """runs the changeset discovery protocol in isolation"""
670 671 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
671 672 opts.get('branch'))
672 673 remote = hg.peer(repo, opts, remoteurl)
673 674 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
674 675
675 676 # make sure tests are repeatable
676 677 random.seed(12323)
677 678
678 679 def doit(localheads, remoteheads, remote=remote):
679 680 if opts.get('old'):
680 681 if localheads:
681 682 raise error.Abort('cannot use localheads with old style '
682 683 'discovery')
683 684 if not util.safehasattr(remote, 'branches'):
684 685 # enable in-client legacy support
685 686 remote = localrepo.locallegacypeer(remote.local())
686 687 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
687 688 force=True)
688 689 common = set(common)
689 690 if not opts.get('nonheads'):
690 691 ui.write(("unpruned common: %s\n") %
691 692 " ".join(sorted(short(n) for n in common)))
692 693 dag = dagutil.revlogdag(repo.changelog)
693 694 all = dag.ancestorset(dag.internalizeall(common))
694 695 common = dag.externalizeall(dag.headsetofconnecteds(all))
695 696 else:
696 697 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
697 698 common = set(common)
698 699 rheads = set(hds)
699 700 lheads = set(repo.heads())
700 701 ui.write(("common heads: %s\n") %
701 702 " ".join(sorted(short(n) for n in common)))
702 703 if lheads <= common:
703 704 ui.write(("local is subset\n"))
704 705 elif rheads <= common:
705 706 ui.write(("remote is subset\n"))
706 707
707 708 serverlogs = opts.get('serverlog')
708 709 if serverlogs:
709 710 for filename in serverlogs:
710 711 with open(filename, 'r') as logfile:
711 712 line = logfile.readline()
712 713 while line:
713 714 parts = line.strip().split(';')
714 715 op = parts[1]
715 716 if op == 'cg':
716 717 pass
717 718 elif op == 'cgss':
718 719 doit(parts[2].split(' '), parts[3].split(' '))
719 720 elif op == 'unb':
720 721 doit(parts[3].split(' '), parts[2].split(' '))
721 722 line = logfile.readline()
722 723 else:
723 724 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
724 725 opts.get('remote_head'))
725 726 localrevs = opts.get('local_head')
726 727 doit(localrevs, remoterevs)
727 728
728 729 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
729 730 def debugextensions(ui, **opts):
730 731 '''show information about active extensions'''
731 732 exts = extensions.extensions(ui)
732 733 hgver = util.version()
733 734 fm = ui.formatter('debugextensions', opts)
734 735 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
735 736 isinternal = extensions.ismoduleinternal(extmod)
736 737 extsource = pycompat.fsencode(extmod.__file__)
737 738 if isinternal:
738 739 exttestedwith = [] # never expose magic string to users
739 740 else:
740 741 exttestedwith = getattr(extmod, 'testedwith', '').split()
741 742 extbuglink = getattr(extmod, 'buglink', None)
742 743
743 744 fm.startitem()
744 745
745 746 if ui.quiet or ui.verbose:
746 747 fm.write('name', '%s\n', extname)
747 748 else:
748 749 fm.write('name', '%s', extname)
749 750 if isinternal or hgver in exttestedwith:
750 751 fm.plain('\n')
751 752 elif not exttestedwith:
752 753 fm.plain(_(' (untested!)\n'))
753 754 else:
754 755 lasttestedversion = exttestedwith[-1]
755 756 fm.plain(' (%s!)\n' % lasttestedversion)
756 757
757 758 fm.condwrite(ui.verbose and extsource, 'source',
758 759 _(' location: %s\n'), extsource or "")
759 760
760 761 if ui.verbose:
761 762 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
762 763 fm.data(bundled=isinternal)
763 764
764 765 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
765 766 _(' tested with: %s\n'),
766 767 fm.formatlist(exttestedwith, name='ver'))
767 768
768 769 fm.condwrite(ui.verbose and extbuglink, 'buglink',
769 770 _(' bug reporting: %s\n'), extbuglink or "")
770 771
771 772 fm.end()
772 773
773 774 @command('debugfileset',
774 775 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
775 776 _('[-r REV] FILESPEC'))
776 777 def debugfileset(ui, repo, expr, **opts):
777 778 '''parse and apply a fileset specification'''
778 779 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
779 780 if ui.verbose:
780 781 tree = fileset.parse(expr)
781 782 ui.note(fileset.prettyformat(tree), "\n")
782 783
783 784 for f in ctx.getfileset(expr):
784 785 ui.write("%s\n" % f)
785 786
786 787 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
787 788 def debugfsinfo(ui, path="."):
788 789 """show information detected about current filesystem"""
789 790 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
790 791 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
791 792 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
792 793 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
793 794 casesensitive = '(unknown)'
794 795 try:
795 796 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
796 797 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
797 798 except OSError:
798 799 pass
799 800 ui.write(('case-sensitive: %s\n') % casesensitive)
800 801
801 802 @command('debuggetbundle',
802 803 [('H', 'head', [], _('id of head node'), _('ID')),
803 804 ('C', 'common', [], _('id of common node'), _('ID')),
804 805 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
805 806 _('REPO FILE [-H|-C ID]...'),
806 807 norepo=True)
807 808 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
808 809 """retrieves a bundle from a repo
809 810
810 811 Every ID must be a full-length hex node id string. Saves the bundle to the
811 812 given file.
812 813 """
813 814 repo = hg.peer(ui, opts, repopath)
814 815 if not repo.capable('getbundle'):
815 816 raise error.Abort("getbundle() not supported by target repository")
816 817 args = {}
817 818 if common:
818 819 args['common'] = [bin(s) for s in common]
819 820 if head:
820 821 args['heads'] = [bin(s) for s in head]
821 822 # TODO: get desired bundlecaps from command line.
822 823 args['bundlecaps'] = None
823 824 bundle = repo.getbundle('debug', **args)
824 825
825 826 bundletype = opts.get('type', 'bzip2').lower()
826 827 btypes = {'none': 'HG10UN',
827 828 'bzip2': 'HG10BZ',
828 829 'gzip': 'HG10GZ',
829 830 'bundle2': 'HG20'}
830 831 bundletype = btypes.get(bundletype)
831 832 if bundletype not in bundle2.bundletypes:
832 833 raise error.Abort(_('unknown bundle type specified with --type'))
833 834 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
834 835
835 836 @command('debugignore', [], '[FILE]')
836 837 def debugignore(ui, repo, *files, **opts):
837 838 """display the combined ignore pattern and information about ignored files
838 839
839 840 With no argument display the combined ignore pattern.
840 841
841 842 Given space separated file names, shows if the given file is ignored and
842 843 if so, show the ignore rule (file and line number) that matched it.
843 844 """
844 845 ignore = repo.dirstate._ignore
845 846 if not files:
846 847 # Show all the patterns
847 848 ui.write("%s\n" % repr(ignore))
848 849 else:
849 850 for f in files:
850 851 nf = util.normpath(f)
851 852 ignored = None
852 853 ignoredata = None
853 854 if nf != '.':
854 855 if ignore(nf):
855 856 ignored = nf
856 857 ignoredata = repo.dirstate._ignorefileandline(nf)
857 858 else:
858 859 for p in util.finddirs(nf):
859 860 if ignore(p):
860 861 ignored = p
861 862 ignoredata = repo.dirstate._ignorefileandline(p)
862 863 break
863 864 if ignored:
864 865 if ignored == nf:
865 866 ui.write(_("%s is ignored\n") % f)
866 867 else:
867 868 ui.write(_("%s is ignored because of "
868 869 "containing folder %s\n")
869 870 % (f, ignored))
870 871 ignorefile, lineno, line = ignoredata
871 872 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
872 873 % (ignorefile, lineno, line))
873 874 else:
874 875 ui.write(_("%s is not ignored\n") % f)
875 876
876 877 @command('debugindex', cmdutil.debugrevlogopts +
877 878 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
878 879 _('[-f FORMAT] -c|-m|FILE'),
879 880 optionalrepo=True)
880 881 def debugindex(ui, repo, file_=None, **opts):
881 882 """dump the contents of an index file"""
882 883 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
883 884 format = opts.get('format', 0)
884 885 if format not in (0, 1):
885 886 raise error.Abort(_("unknown format %d") % format)
886 887
887 888 generaldelta = r.version & revlog.FLAG_GENERALDELTA
888 889 if generaldelta:
889 890 basehdr = ' delta'
890 891 else:
891 892 basehdr = ' base'
892 893
893 894 if ui.debugflag:
894 895 shortfn = hex
895 896 else:
896 897 shortfn = short
897 898
898 899 # There might not be anything in r, so have a sane default
899 900 idlen = 12
900 901 for i in r:
901 902 idlen = len(shortfn(r.node(i)))
902 903 break
903 904
904 905 if format == 0:
905 906 ui.write((" rev offset length " + basehdr + " linkrev"
906 907 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
907 908 elif format == 1:
908 909 ui.write((" rev flag offset length"
909 910 " size " + basehdr + " link p1 p2"
910 911 " %s\n") % "nodeid".rjust(idlen))
911 912
912 913 for i in r:
913 914 node = r.node(i)
914 915 if generaldelta:
915 916 base = r.deltaparent(i)
916 917 else:
917 918 base = r.chainbase(i)
918 919 if format == 0:
919 920 try:
920 921 pp = r.parents(node)
921 922 except Exception:
922 923 pp = [nullid, nullid]
923 924 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
924 925 i, r.start(i), r.length(i), base, r.linkrev(i),
925 926 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
926 927 elif format == 1:
927 928 pr = r.parentrevs(i)
928 929 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
929 930 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
930 931 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
931 932
932 933 @command('debugindexdot', cmdutil.debugrevlogopts,
933 934 _('-c|-m|FILE'), optionalrepo=True)
934 935 def debugindexdot(ui, repo, file_=None, **opts):
935 936 """dump an index DAG as a graphviz dot file"""
936 937 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
937 938 ui.write(("digraph G {\n"))
938 939 for i in r:
939 940 node = r.node(i)
940 941 pp = r.parents(node)
941 942 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
942 943 if pp[1] != nullid:
943 944 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
944 945 ui.write("}\n")
945 946
946 947 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
947 948 def debuginstall(ui, **opts):
948 949 '''test Mercurial installation
949 950
950 951 Returns 0 on success.
951 952 '''
952 953
953 954 def writetemp(contents):
954 955 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
955 956 f = os.fdopen(fd, pycompat.sysstr("wb"))
956 957 f.write(contents)
957 958 f.close()
958 959 return name
959 960
960 961 problems = 0
961 962
962 963 fm = ui.formatter('debuginstall', opts)
963 964 fm.startitem()
964 965
965 966 # encoding
966 967 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
967 968 err = None
968 969 try:
969 970 encoding.fromlocal("test")
970 971 except error.Abort as inst:
971 972 err = inst
972 973 problems += 1
973 974 fm.condwrite(err, 'encodingerror', _(" %s\n"
974 975 " (check that your locale is properly set)\n"), err)
975 976
976 977 # Python
977 978 fm.write('pythonexe', _("checking Python executable (%s)\n"),
978 979 pycompat.sysexecutable)
979 980 fm.write('pythonver', _("checking Python version (%s)\n"),
980 981 ("%d.%d.%d" % sys.version_info[:3]))
981 982 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
982 983 os.path.dirname(pycompat.fsencode(os.__file__)))
983 984
984 985 security = set(sslutil.supportedprotocols)
985 986 if sslutil.hassni:
986 987 security.add('sni')
987 988
988 989 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
989 990 fm.formatlist(sorted(security), name='protocol',
990 991 fmt='%s', sep=','))
991 992
992 993 # These are warnings, not errors. So don't increment problem count. This
993 994 # may change in the future.
994 995 if 'tls1.2' not in security:
995 996 fm.plain(_(' TLS 1.2 not supported by Python install; '
996 997 'network connections lack modern security\n'))
997 998 if 'sni' not in security:
998 999 fm.plain(_(' SNI not supported by Python install; may have '
999 1000 'connectivity issues with some servers\n'))
1000 1001
1001 1002 # TODO print CA cert info
1002 1003
1003 1004 # hg version
1004 1005 hgver = util.version()
1005 1006 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1006 1007 hgver.split('+')[0])
1007 1008 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1008 1009 '+'.join(hgver.split('+')[1:]))
1009 1010
1010 1011 # compiled modules
1011 1012 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1012 1013 policy.policy)
1013 1014 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1014 1015 os.path.dirname(pycompat.fsencode(__file__)))
1015 1016
1016 1017 if policy.policy in ('c', 'allow'):
1017 1018 err = None
1018 1019 try:
1019 1020 from .cext import (
1020 1021 base85,
1021 1022 bdiff,
1022 1023 mpatch,
1023 1024 osutil,
1024 1025 )
1025 1026 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 1027 except Exception as inst:
1027 1028 err = inst
1028 1029 problems += 1
1029 1030 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030 1031
1031 1032 compengines = util.compengines._engines.values()
1032 1033 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 1034 fm.formatlist(sorted(e.name() for e in compengines),
1034 1035 name='compengine', fmt='%s', sep=', '))
1035 1036 fm.write('compenginesavail', _('checking available compression engines '
1036 1037 '(%s)\n'),
1037 1038 fm.formatlist(sorted(e.name() for e in compengines
1038 1039 if e.available()),
1039 1040 name='compengine', fmt='%s', sep=', '))
1040 1041 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 1042 fm.write('compenginesserver', _('checking available compression engines '
1042 1043 'for wire protocol (%s)\n'),
1043 1044 fm.formatlist([e.name() for e in wirecompengines
1044 1045 if e.wireprotosupport()],
1045 1046 name='compengine', fmt='%s', sep=', '))
1046 1047
1047 1048 # templates
1048 1049 p = templater.templatepaths()
1049 1050 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 1051 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 1052 if p:
1052 1053 m = templater.templatepath("map-cmdline.default")
1053 1054 if m:
1054 1055 # template found, check if it is working
1055 1056 err = None
1056 1057 try:
1057 1058 templater.templater.frommapfile(m)
1058 1059 except Exception as inst:
1059 1060 err = inst
1060 1061 p = None
1061 1062 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 1063 else:
1063 1064 p = None
1064 1065 fm.condwrite(p, 'defaulttemplate',
1065 1066 _("checking default template (%s)\n"), m)
1066 1067 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 1068 _(" template '%s' not found\n"), "default")
1068 1069 if not p:
1069 1070 problems += 1
1070 1071 fm.condwrite(not p, '',
1071 1072 _(" (templates seem to have been installed incorrectly)\n"))
1072 1073
1073 1074 # editor
1074 1075 editor = ui.geteditor()
1075 1076 editor = util.expandpath(editor)
1076 1077 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 1078 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 1079 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 1080 _(" No commit editor set and can't find %s in PATH\n"
1080 1081 " (specify a commit editor in your configuration"
1081 1082 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 1083 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 1084 _(" Can't find editor '%s' in PATH\n"
1084 1085 " (specify a commit editor in your configuration"
1085 1086 " file)\n"), not cmdpath and editor)
1086 1087 if not cmdpath and editor != 'vi':
1087 1088 problems += 1
1088 1089
1089 1090 # check username
1090 1091 username = None
1091 1092 err = None
1092 1093 try:
1093 1094 username = ui.username()
1094 1095 except error.Abort as e:
1095 1096 err = e
1096 1097 problems += 1
1097 1098
1098 1099 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 1100 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 1101 " (specify a username in your configuration file)\n"), err)
1101 1102
1102 1103 fm.condwrite(not problems, '',
1103 1104 _("no problems detected\n"))
1104 1105 if not problems:
1105 1106 fm.data(problems=problems)
1106 1107 fm.condwrite(problems, 'problems',
1107 1108 _("%d problems detected,"
1108 1109 " please check your install!\n"), problems)
1109 1110 fm.end()
1110 1111
1111 1112 return problems
1112 1113
1113 1114 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 1115 def debugknown(ui, repopath, *ids, **opts):
1115 1116 """test whether node ids are known to a repo
1116 1117
1117 1118 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 1119 and 1s indicating unknown/known.
1119 1120 """
1120 1121 repo = hg.peer(ui, opts, repopath)
1121 1122 if not repo.capable('known'):
1122 1123 raise error.Abort("known() not supported by target repository")
1123 1124 flags = repo.known([bin(s) for s in ids])
1124 1125 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125 1126
1126 1127 @command('debuglabelcomplete', [], _('LABEL...'))
1127 1128 def debuglabelcomplete(ui, repo, *args):
1128 1129 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 1130 debugnamecomplete(ui, repo, *args)
1130 1131
1131 1132 @command('debuglocks',
1132 1133 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 1134 ('W', 'force-wlock', None,
1134 1135 _('free the working state lock (DANGEROUS)'))],
1135 1136 _('[OPTION]...'))
1136 1137 def debuglocks(ui, repo, **opts):
1137 1138 """show or modify state of locks
1138 1139
1139 1140 By default, this command will show which locks are held. This
1140 1141 includes the user and process holding the lock, the amount of time
1141 1142 the lock has been held, and the machine name where the process is
1142 1143 running if it's not local.
1143 1144
1144 1145 Locks protect the integrity of Mercurial's data, so should be
1145 1146 treated with care. System crashes or other interruptions may cause
1146 1147 locks to not be properly released, though Mercurial will usually
1147 1148 detect and remove such stale locks automatically.
1148 1149
1149 1150 However, detecting stale locks may not always be possible (for
1150 1151 instance, on a shared filesystem). Removing locks may also be
1151 1152 blocked by filesystem permissions.
1152 1153
1153 1154 Returns 0 if no locks are held.
1154 1155
1155 1156 """
1156 1157
1157 1158 if opts.get('force_lock'):
1158 1159 repo.svfs.unlink('lock')
1159 1160 if opts.get('force_wlock'):
1160 1161 repo.vfs.unlink('wlock')
1161 1162 if opts.get('force_lock') or opts.get('force_lock'):
1162 1163 return 0
1163 1164
1164 1165 now = time.time()
1165 1166 held = 0
1166 1167
1167 1168 def report(vfs, name, method):
1168 1169 # this causes stale locks to get reaped for more accurate reporting
1169 1170 try:
1170 1171 l = method(False)
1171 1172 except error.LockHeld:
1172 1173 l = None
1173 1174
1174 1175 if l:
1175 1176 l.release()
1176 1177 else:
1177 1178 try:
1178 1179 stat = vfs.lstat(name)
1179 1180 age = now - stat.st_mtime
1180 1181 user = util.username(stat.st_uid)
1181 1182 locker = vfs.readlock(name)
1182 1183 if ":" in locker:
1183 1184 host, pid = locker.split(':')
1184 1185 if host == socket.gethostname():
1185 1186 locker = 'user %s, process %s' % (user, pid)
1186 1187 else:
1187 1188 locker = 'user %s, process %s, host %s' \
1188 1189 % (user, pid, host)
1189 1190 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 1191 return 1
1191 1192 except OSError as e:
1192 1193 if e.errno != errno.ENOENT:
1193 1194 raise
1194 1195
1195 1196 ui.write(("%-6s free\n") % (name + ":"))
1196 1197 return 0
1197 1198
1198 1199 held += report(repo.svfs, "lock", repo.lock)
1199 1200 held += report(repo.vfs, "wlock", repo.wlock)
1200 1201
1201 1202 return held
1202 1203
1203 1204 @command('debugmergestate', [], '')
1204 1205 def debugmergestate(ui, repo, *args):
1205 1206 """print merge state
1206 1207
1207 1208 Use --verbose to print out information about whether v1 or v2 merge state
1208 1209 was chosen."""
1209 1210 def _hashornull(h):
1210 1211 if h == nullhex:
1211 1212 return 'null'
1212 1213 else:
1213 1214 return h
1214 1215
1215 1216 def printrecords(version):
1216 1217 ui.write(('* version %s records\n') % version)
1217 1218 if version == 1:
1218 1219 records = v1records
1219 1220 else:
1220 1221 records = v2records
1221 1222
1222 1223 for rtype, record in records:
1223 1224 # pretty print some record types
1224 1225 if rtype == 'L':
1225 1226 ui.write(('local: %s\n') % record)
1226 1227 elif rtype == 'O':
1227 1228 ui.write(('other: %s\n') % record)
1228 1229 elif rtype == 'm':
1229 1230 driver, mdstate = record.split('\0', 1)
1230 1231 ui.write(('merge driver: %s (state "%s")\n')
1231 1232 % (driver, mdstate))
1232 1233 elif rtype in 'FDC':
1233 1234 r = record.split('\0')
1234 1235 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 1236 if version == 1:
1236 1237 onode = 'not stored in v1 format'
1237 1238 flags = r[7]
1238 1239 else:
1239 1240 onode, flags = r[7:9]
1240 1241 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 1242 % (f, rtype, state, _hashornull(hash)))
1242 1243 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 1244 ui.write((' ancestor path: %s (node %s)\n')
1244 1245 % (afile, _hashornull(anode)))
1245 1246 ui.write((' other path: %s (node %s)\n')
1246 1247 % (ofile, _hashornull(onode)))
1247 1248 elif rtype == 'f':
1248 1249 filename, rawextras = record.split('\0', 1)
1249 1250 extras = rawextras.split('\0')
1250 1251 i = 0
1251 1252 extrastrings = []
1252 1253 while i < len(extras):
1253 1254 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 1255 i += 2
1255 1256
1256 1257 ui.write(('file extras: %s (%s)\n')
1257 1258 % (filename, ', '.join(extrastrings)))
1258 1259 elif rtype == 'l':
1259 1260 labels = record.split('\0', 2)
1260 1261 labels = [l for l in labels if len(l) > 0]
1261 1262 ui.write(('labels:\n'))
1262 1263 ui.write((' local: %s\n' % labels[0]))
1263 1264 ui.write((' other: %s\n' % labels[1]))
1264 1265 if len(labels) > 2:
1265 1266 ui.write((' base: %s\n' % labels[2]))
1266 1267 else:
1267 1268 ui.write(('unrecognized entry: %s\t%s\n')
1268 1269 % (rtype, record.replace('\0', '\t')))
1269 1270
1270 1271 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 1272 # merge state records. We shouldn't be doing this, but this is OK since this
1272 1273 # command is pretty low-level.
1273 1274 ms = mergemod.mergestate(repo)
1274 1275
1275 1276 # sort so that reasonable information is on top
1276 1277 v1records = ms._readrecordsv1()
1277 1278 v2records = ms._readrecordsv2()
1278 1279 order = 'LOml'
1279 1280 def key(r):
1280 1281 idx = order.find(r[0])
1281 1282 if idx == -1:
1282 1283 return (1, r[1])
1283 1284 else:
1284 1285 return (0, idx)
1285 1286 v1records.sort(key=key)
1286 1287 v2records.sort(key=key)
1287 1288
1288 1289 if not v1records and not v2records:
1289 1290 ui.write(('no merge state found\n'))
1290 1291 elif not v2records:
1291 1292 ui.note(('no version 2 merge state\n'))
1292 1293 printrecords(1)
1293 1294 elif ms._v1v2match(v1records, v2records):
1294 1295 ui.note(('v1 and v2 states match: using v2\n'))
1295 1296 printrecords(2)
1296 1297 else:
1297 1298 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 1299 printrecords(1)
1299 1300 if ui.verbose:
1300 1301 printrecords(2)
1301 1302
1302 1303 @command('debugnamecomplete', [], _('NAME...'))
1303 1304 def debugnamecomplete(ui, repo, *args):
1304 1305 '''complete "names" - tags, open branch names, bookmark names'''
1305 1306
1306 1307 names = set()
1307 1308 # since we previously only listed open branches, we will handle that
1308 1309 # specially (after this for loop)
1309 1310 for name, ns in repo.names.iteritems():
1310 1311 if name != 'branches':
1311 1312 names.update(ns.listnames(repo))
1312 1313 names.update(tag for (tag, heads, tip, closed)
1313 1314 in repo.branchmap().iterbranches() if not closed)
1314 1315 completions = set()
1315 1316 if not args:
1316 1317 args = ['']
1317 1318 for a in args:
1318 1319 completions.update(n for n in names if n.startswith(a))
1319 1320 ui.write('\n'.join(sorted(completions)))
1320 1321 ui.write('\n')
1321 1322
1322 1323 @command('debugobsolete',
1323 1324 [('', 'flags', 0, _('markers flag')),
1324 1325 ('', 'record-parents', False,
1325 1326 _('record parent information for the precursor')),
1326 1327 ('r', 'rev', [], _('display markers relevant to REV')),
1327 1328 ('', 'exclusive', False, _('restrict display to markers only '
1328 1329 'relevant to REV')),
1329 1330 ('', 'index', False, _('display index of the marker')),
1330 1331 ('', 'delete', [], _('delete markers specified by indices')),
1331 1332 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1332 1333 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 1334 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 1335 """create arbitrary obsolete marker
1335 1336
1336 1337 With no arguments, displays the list of obsolescence markers."""
1337 1338
1338 1339 def parsenodeid(s):
1339 1340 try:
1340 1341 # We do not use revsingle/revrange functions here to accept
1341 1342 # arbitrary node identifiers, possibly not present in the
1342 1343 # local repository.
1343 1344 n = bin(s)
1344 1345 if len(n) != len(nullid):
1345 1346 raise TypeError()
1346 1347 return n
1347 1348 except TypeError:
1348 1349 raise error.Abort('changeset references must be full hexadecimal '
1349 1350 'node identifiers')
1350 1351
1351 1352 if opts.get('delete'):
1352 1353 indices = []
1353 1354 for v in opts.get('delete'):
1354 1355 try:
1355 1356 indices.append(int(v))
1356 1357 except ValueError:
1357 1358 raise error.Abort(_('invalid index value: %r') % v,
1358 1359 hint=_('use integers for indices'))
1359 1360
1360 1361 if repo.currenttransaction():
1361 1362 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 1363 'of transaction.'))
1363 1364
1364 1365 with repo.lock():
1365 1366 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 1367 ui.write(_('deleted %i obsolescence markers\n') % n)
1367 1368
1368 1369 return
1369 1370
1370 1371 if precursor is not None:
1371 1372 if opts['rev']:
1372 1373 raise error.Abort('cannot select revision when creating marker')
1373 1374 metadata = {}
1374 1375 metadata['user'] = opts['user'] or ui.username()
1375 1376 succs = tuple(parsenodeid(succ) for succ in successors)
1376 1377 l = repo.lock()
1377 1378 try:
1378 1379 tr = repo.transaction('debugobsolete')
1379 1380 try:
1380 1381 date = opts.get('date')
1381 1382 if date:
1382 1383 date = util.parsedate(date)
1383 1384 else:
1384 1385 date = None
1385 1386 prec = parsenodeid(precursor)
1386 1387 parents = None
1387 1388 if opts['record_parents']:
1388 1389 if prec not in repo.unfiltered():
1389 1390 raise error.Abort('cannot used --record-parents on '
1390 1391 'unknown changesets')
1391 1392 parents = repo.unfiltered()[prec].parents()
1392 1393 parents = tuple(p.node() for p in parents)
1393 1394 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 1395 parents=parents, date=date,
1395 1396 metadata=metadata, ui=ui)
1396 1397 tr.close()
1397 1398 except ValueError as exc:
1398 1399 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 1400 finally:
1400 1401 tr.release()
1401 1402 finally:
1402 1403 l.release()
1403 1404 else:
1404 1405 if opts['rev']:
1405 1406 revs = scmutil.revrange(repo, opts['rev'])
1406 1407 nodes = [repo[r].node() for r in revs]
1407 1408 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1408 1409 exclusive=opts['exclusive']))
1409 1410 markers.sort(key=lambda x: x._data)
1410 1411 else:
1411 1412 markers = obsolete.getmarkers(repo)
1412 1413
1413 1414 markerstoiter = markers
1414 1415 isrelevant = lambda m: True
1415 1416 if opts.get('rev') and opts.get('index'):
1416 1417 markerstoiter = obsolete.getmarkers(repo)
1417 1418 markerset = set(markers)
1418 1419 isrelevant = lambda m: m in markerset
1419 1420
1420 1421 fm = ui.formatter('debugobsolete', opts)
1421 1422 for i, m in enumerate(markerstoiter):
1422 1423 if not isrelevant(m):
1423 1424 # marker can be irrelevant when we're iterating over a set
1424 1425 # of markers (markerstoiter) which is bigger than the set
1425 1426 # of markers we want to display (markers)
1426 1427 # this can happen if both --index and --rev options are
1427 1428 # provided and thus we need to iterate over all of the markers
1428 1429 # to get the correct indices, but only display the ones that
1429 1430 # are relevant to --rev value
1430 1431 continue
1431 1432 fm.startitem()
1432 1433 ind = i if opts.get('index') else None
1433 1434 cmdutil.showmarker(fm, m, index=ind)
1434 1435 fm.end()
1435 1436
1436 1437 @command('debugpathcomplete',
1437 1438 [('f', 'full', None, _('complete an entire path')),
1438 1439 ('n', 'normal', None, _('show only normal files')),
1439 1440 ('a', 'added', None, _('show only added files')),
1440 1441 ('r', 'removed', None, _('show only removed files'))],
1441 1442 _('FILESPEC...'))
1442 1443 def debugpathcomplete(ui, repo, *specs, **opts):
1443 1444 '''complete part or all of a tracked path
1444 1445
1445 1446 This command supports shells that offer path name completion. It
1446 1447 currently completes only files already known to the dirstate.
1447 1448
1448 1449 Completion extends only to the next path segment unless
1449 1450 --full is specified, in which case entire paths are used.'''
1450 1451
1451 1452 def complete(path, acceptable):
1452 1453 dirstate = repo.dirstate
1453 1454 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1454 1455 rootdir = repo.root + pycompat.ossep
1455 1456 if spec != repo.root and not spec.startswith(rootdir):
1456 1457 return [], []
1457 1458 if os.path.isdir(spec):
1458 1459 spec += '/'
1459 1460 spec = spec[len(rootdir):]
1460 1461 fixpaths = pycompat.ossep != '/'
1461 1462 if fixpaths:
1462 1463 spec = spec.replace(pycompat.ossep, '/')
1463 1464 speclen = len(spec)
1464 1465 fullpaths = opts['full']
1465 1466 files, dirs = set(), set()
1466 1467 adddir, addfile = dirs.add, files.add
1467 1468 for f, st in dirstate.iteritems():
1468 1469 if f.startswith(spec) and st[0] in acceptable:
1469 1470 if fixpaths:
1470 1471 f = f.replace('/', pycompat.ossep)
1471 1472 if fullpaths:
1472 1473 addfile(f)
1473 1474 continue
1474 1475 s = f.find(pycompat.ossep, speclen)
1475 1476 if s >= 0:
1476 1477 adddir(f[:s])
1477 1478 else:
1478 1479 addfile(f)
1479 1480 return files, dirs
1480 1481
1481 1482 acceptable = ''
1482 1483 if opts['normal']:
1483 1484 acceptable += 'nm'
1484 1485 if opts['added']:
1485 1486 acceptable += 'a'
1486 1487 if opts['removed']:
1487 1488 acceptable += 'r'
1488 1489 cwd = repo.getcwd()
1489 1490 if not specs:
1490 1491 specs = ['.']
1491 1492
1492 1493 files, dirs = set(), set()
1493 1494 for spec in specs:
1494 1495 f, d = complete(spec, acceptable or 'nmar')
1495 1496 files.update(f)
1496 1497 dirs.update(d)
1497 1498 files.update(dirs)
1498 1499 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1499 1500 ui.write('\n')
1500 1501
1501 1502 @command('debugpickmergetool',
1502 1503 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1503 1504 ('', 'changedelete', None, _('emulate merging change and delete')),
1504 1505 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1505 1506 _('[PATTERN]...'),
1506 1507 inferrepo=True)
1507 1508 def debugpickmergetool(ui, repo, *pats, **opts):
1508 1509 """examine which merge tool is chosen for specified file
1509 1510
1510 1511 As described in :hg:`help merge-tools`, Mercurial examines
1511 1512 configurations below in this order to decide which merge tool is
1512 1513 chosen for specified file.
1513 1514
1514 1515 1. ``--tool`` option
1515 1516 2. ``HGMERGE`` environment variable
1516 1517 3. configurations in ``merge-patterns`` section
1517 1518 4. configuration of ``ui.merge``
1518 1519 5. configurations in ``merge-tools`` section
1519 1520 6. ``hgmerge`` tool (for historical reason only)
1520 1521 7. default tool for fallback (``:merge`` or ``:prompt``)
1521 1522
1522 1523 This command writes out examination result in the style below::
1523 1524
1524 1525 FILE = MERGETOOL
1525 1526
1526 1527 By default, all files known in the first parent context of the
1527 1528 working directory are examined. Use file patterns and/or -I/-X
1528 1529 options to limit target files. -r/--rev is also useful to examine
1529 1530 files in another context without actual updating to it.
1530 1531
1531 1532 With --debug, this command shows warning messages while matching
1532 1533 against ``merge-patterns`` and so on, too. It is recommended to
1533 1534 use this option with explicit file patterns and/or -I/-X options,
1534 1535 because this option increases amount of output per file according
1535 1536 to configurations in hgrc.
1536 1537
1537 1538 With -v/--verbose, this command shows configurations below at
1538 1539 first (only if specified).
1539 1540
1540 1541 - ``--tool`` option
1541 1542 - ``HGMERGE`` environment variable
1542 1543 - configuration of ``ui.merge``
1543 1544
1544 1545 If merge tool is chosen before matching against
1545 1546 ``merge-patterns``, this command can't show any helpful
1546 1547 information, even with --debug. In such case, information above is
1547 1548 useful to know why a merge tool is chosen.
1548 1549 """
1549 1550 overrides = {}
1550 1551 if opts['tool']:
1551 1552 overrides[('ui', 'forcemerge')] = opts['tool']
1552 1553 ui.note(('with --tool %r\n') % (opts['tool']))
1553 1554
1554 1555 with ui.configoverride(overrides, 'debugmergepatterns'):
1555 1556 hgmerge = encoding.environ.get("HGMERGE")
1556 1557 if hgmerge is not None:
1557 1558 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1558 1559 uimerge = ui.config("ui", "merge")
1559 1560 if uimerge:
1560 1561 ui.note(('with ui.merge=%r\n') % (uimerge))
1561 1562
1562 1563 ctx = scmutil.revsingle(repo, opts.get('rev'))
1563 1564 m = scmutil.match(ctx, pats, opts)
1564 1565 changedelete = opts['changedelete']
1565 1566 for path in ctx.walk(m):
1566 1567 fctx = ctx[path]
1567 1568 try:
1568 1569 if not ui.debugflag:
1569 1570 ui.pushbuffer(error=True)
1570 1571 tool, toolpath = filemerge._picktool(repo, ui, path,
1571 1572 fctx.isbinary(),
1572 1573 'l' in fctx.flags(),
1573 1574 changedelete)
1574 1575 finally:
1575 1576 if not ui.debugflag:
1576 1577 ui.popbuffer()
1577 1578 ui.write(('%s = %s\n') % (path, tool))
1578 1579
1579 1580 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1580 1581 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1581 1582 '''access the pushkey key/value protocol
1582 1583
1583 1584 With two args, list the keys in the given namespace.
1584 1585
1585 1586 With five args, set a key to new if it currently is set to old.
1586 1587 Reports success or failure.
1587 1588 '''
1588 1589
1589 1590 target = hg.peer(ui, {}, repopath)
1590 1591 if keyinfo:
1591 1592 key, old, new = keyinfo
1592 1593 r = target.pushkey(namespace, key, old, new)
1593 1594 ui.status(str(r) + '\n')
1594 1595 return not r
1595 1596 else:
1596 1597 for k, v in sorted(target.listkeys(namespace).iteritems()):
1597 1598 ui.write("%s\t%s\n" % (util.escapestr(k),
1598 1599 util.escapestr(v)))
1599 1600
1600 1601 @command('debugpvec', [], _('A B'))
1601 1602 def debugpvec(ui, repo, a, b=None):
1602 1603 ca = scmutil.revsingle(repo, a)
1603 1604 cb = scmutil.revsingle(repo, b)
1604 1605 pa = pvec.ctxpvec(ca)
1605 1606 pb = pvec.ctxpvec(cb)
1606 1607 if pa == pb:
1607 1608 rel = "="
1608 1609 elif pa > pb:
1609 1610 rel = ">"
1610 1611 elif pa < pb:
1611 1612 rel = "<"
1612 1613 elif pa | pb:
1613 1614 rel = "|"
1614 1615 ui.write(_("a: %s\n") % pa)
1615 1616 ui.write(_("b: %s\n") % pb)
1616 1617 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1617 1618 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1618 1619 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1619 1620 pa.distance(pb), rel))
1620 1621
1621 1622 @command('debugrebuilddirstate|debugrebuildstate',
1622 1623 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1623 1624 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1624 1625 'the working copy parent')),
1625 1626 ],
1626 1627 _('[-r REV]'))
1627 1628 def debugrebuilddirstate(ui, repo, rev, **opts):
1628 1629 """rebuild the dirstate as it would look like for the given revision
1629 1630
1630 1631 If no revision is specified the first current parent will be used.
1631 1632
1632 1633 The dirstate will be set to the files of the given revision.
1633 1634 The actual working directory content or existing dirstate
1634 1635 information such as adds or removes is not considered.
1635 1636
1636 1637 ``minimal`` will only rebuild the dirstate status for files that claim to be
1637 1638 tracked but are not in the parent manifest, or that exist in the parent
1638 1639 manifest but are not in the dirstate. It will not change adds, removes, or
1639 1640 modified files that are in the working copy parent.
1640 1641
1641 1642 One use of this command is to make the next :hg:`status` invocation
1642 1643 check the actual file content.
1643 1644 """
1644 1645 ctx = scmutil.revsingle(repo, rev)
1645 1646 with repo.wlock():
1646 1647 dirstate = repo.dirstate
1647 1648 changedfiles = None
1648 1649 # See command doc for what minimal does.
1649 1650 if opts.get('minimal'):
1650 1651 manifestfiles = set(ctx.manifest().keys())
1651 1652 dirstatefiles = set(dirstate)
1652 1653 manifestonly = manifestfiles - dirstatefiles
1653 1654 dsonly = dirstatefiles - manifestfiles
1654 1655 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1655 1656 changedfiles = manifestonly | dsnotadded
1656 1657
1657 1658 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1658 1659
1659 1660 @command('debugrebuildfncache', [], '')
1660 1661 def debugrebuildfncache(ui, repo):
1661 1662 """rebuild the fncache file"""
1662 1663 repair.rebuildfncache(ui, repo)
1663 1664
1664 1665 @command('debugrename',
1665 1666 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1666 1667 _('[-r REV] FILE'))
1667 1668 def debugrename(ui, repo, file1, *pats, **opts):
1668 1669 """dump rename information"""
1669 1670
1670 1671 ctx = scmutil.revsingle(repo, opts.get('rev'))
1671 1672 m = scmutil.match(ctx, (file1,) + pats, opts)
1672 1673 for abs in ctx.walk(m):
1673 1674 fctx = ctx[abs]
1674 1675 o = fctx.filelog().renamed(fctx.filenode())
1675 1676 rel = m.rel(abs)
1676 1677 if o:
1677 1678 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1678 1679 else:
1679 1680 ui.write(_("%s not renamed\n") % rel)
1680 1681
1681 1682 @command('debugrevlog', cmdutil.debugrevlogopts +
1682 1683 [('d', 'dump', False, _('dump index data'))],
1683 1684 _('-c|-m|FILE'),
1684 1685 optionalrepo=True)
1685 1686 def debugrevlog(ui, repo, file_=None, **opts):
1686 1687 """show data and statistics about a revlog"""
1687 1688 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1688 1689
1689 1690 if opts.get("dump"):
1690 1691 numrevs = len(r)
1691 1692 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1692 1693 " rawsize totalsize compression heads chainlen\n"))
1693 1694 ts = 0
1694 1695 heads = set()
1695 1696
1696 1697 for rev in xrange(numrevs):
1697 1698 dbase = r.deltaparent(rev)
1698 1699 if dbase == -1:
1699 1700 dbase = rev
1700 1701 cbase = r.chainbase(rev)
1701 1702 clen = r.chainlen(rev)
1702 1703 p1, p2 = r.parentrevs(rev)
1703 1704 rs = r.rawsize(rev)
1704 1705 ts = ts + rs
1705 1706 heads -= set(r.parentrevs(rev))
1706 1707 heads.add(rev)
1707 1708 try:
1708 1709 compression = ts / r.end(rev)
1709 1710 except ZeroDivisionError:
1710 1711 compression = 0
1711 1712 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1712 1713 "%11d %5d %8d\n" %
1713 1714 (rev, p1, p2, r.start(rev), r.end(rev),
1714 1715 r.start(dbase), r.start(cbase),
1715 1716 r.start(p1), r.start(p2),
1716 1717 rs, ts, compression, len(heads), clen))
1717 1718 return 0
1718 1719
1719 1720 v = r.version
1720 1721 format = v & 0xFFFF
1721 1722 flags = []
1722 1723 gdelta = False
1723 1724 if v & revlog.FLAG_INLINE_DATA:
1724 1725 flags.append('inline')
1725 1726 if v & revlog.FLAG_GENERALDELTA:
1726 1727 gdelta = True
1727 1728 flags.append('generaldelta')
1728 1729 if not flags:
1729 1730 flags = ['(none)']
1730 1731
1731 1732 nummerges = 0
1732 1733 numfull = 0
1733 1734 numprev = 0
1734 1735 nump1 = 0
1735 1736 nump2 = 0
1736 1737 numother = 0
1737 1738 nump1prev = 0
1738 1739 nump2prev = 0
1739 1740 chainlengths = []
1740 1741
1741 1742 datasize = [None, 0, 0]
1742 1743 fullsize = [None, 0, 0]
1743 1744 deltasize = [None, 0, 0]
1744 1745 chunktypecounts = {}
1745 1746 chunktypesizes = {}
1746 1747
1747 1748 def addsize(size, l):
1748 1749 if l[0] is None or size < l[0]:
1749 1750 l[0] = size
1750 1751 if size > l[1]:
1751 1752 l[1] = size
1752 1753 l[2] += size
1753 1754
1754 1755 numrevs = len(r)
1755 1756 for rev in xrange(numrevs):
1756 1757 p1, p2 = r.parentrevs(rev)
1757 1758 delta = r.deltaparent(rev)
1758 1759 if format > 0:
1759 1760 addsize(r.rawsize(rev), datasize)
1760 1761 if p2 != nullrev:
1761 1762 nummerges += 1
1762 1763 size = r.length(rev)
1763 1764 if delta == nullrev:
1764 1765 chainlengths.append(0)
1765 1766 numfull += 1
1766 1767 addsize(size, fullsize)
1767 1768 else:
1768 1769 chainlengths.append(chainlengths[delta] + 1)
1769 1770 addsize(size, deltasize)
1770 1771 if delta == rev - 1:
1771 1772 numprev += 1
1772 1773 if delta == p1:
1773 1774 nump1prev += 1
1774 1775 elif delta == p2:
1775 1776 nump2prev += 1
1776 1777 elif delta == p1:
1777 1778 nump1 += 1
1778 1779 elif delta == p2:
1779 1780 nump2 += 1
1780 1781 elif delta != nullrev:
1781 1782 numother += 1
1782 1783
1783 1784 # Obtain data on the raw chunks in the revlog.
1784 1785 segment = r._getsegmentforrevs(rev, rev)[1]
1785 1786 if segment:
1786 1787 chunktype = segment[0]
1787 1788 else:
1788 1789 chunktype = 'empty'
1789 1790
1790 1791 if chunktype not in chunktypecounts:
1791 1792 chunktypecounts[chunktype] = 0
1792 1793 chunktypesizes[chunktype] = 0
1793 1794
1794 1795 chunktypecounts[chunktype] += 1
1795 1796 chunktypesizes[chunktype] += size
1796 1797
1797 1798 # Adjust size min value for empty cases
1798 1799 for size in (datasize, fullsize, deltasize):
1799 1800 if size[0] is None:
1800 1801 size[0] = 0
1801 1802
1802 1803 numdeltas = numrevs - numfull
1803 1804 numoprev = numprev - nump1prev - nump2prev
1804 1805 totalrawsize = datasize[2]
1805 1806 datasize[2] /= numrevs
1806 1807 fulltotal = fullsize[2]
1807 1808 fullsize[2] /= numfull
1808 1809 deltatotal = deltasize[2]
1809 1810 if numrevs - numfull > 0:
1810 1811 deltasize[2] /= numrevs - numfull
1811 1812 totalsize = fulltotal + deltatotal
1812 1813 avgchainlen = sum(chainlengths) / numrevs
1813 1814 maxchainlen = max(chainlengths)
1814 1815 compratio = 1
1815 1816 if totalsize:
1816 1817 compratio = totalrawsize / totalsize
1817 1818
1818 1819 basedfmtstr = '%%%dd\n'
1819 1820 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1820 1821
1821 1822 def dfmtstr(max):
1822 1823 return basedfmtstr % len(str(max))
1823 1824 def pcfmtstr(max, padding=0):
1824 1825 return basepcfmtstr % (len(str(max)), ' ' * padding)
1825 1826
1826 1827 def pcfmt(value, total):
1827 1828 if total:
1828 1829 return (value, 100 * float(value) / total)
1829 1830 else:
1830 1831 return value, 100.0
1831 1832
1832 1833 ui.write(('format : %d\n') % format)
1833 1834 ui.write(('flags : %s\n') % ', '.join(flags))
1834 1835
1835 1836 ui.write('\n')
1836 1837 fmt = pcfmtstr(totalsize)
1837 1838 fmt2 = dfmtstr(totalsize)
1838 1839 ui.write(('revisions : ') + fmt2 % numrevs)
1839 1840 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1840 1841 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1841 1842 ui.write(('revisions : ') + fmt2 % numrevs)
1842 1843 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1843 1844 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1844 1845 ui.write(('revision size : ') + fmt2 % totalsize)
1845 1846 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1846 1847 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1847 1848
1848 1849 def fmtchunktype(chunktype):
1849 1850 if chunktype == 'empty':
1850 1851 return ' %s : ' % chunktype
1851 1852 elif chunktype in string.ascii_letters:
1852 1853 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1853 1854 else:
1854 1855 return ' 0x%s : ' % hex(chunktype)
1855 1856
1856 1857 ui.write('\n')
1857 1858 ui.write(('chunks : ') + fmt2 % numrevs)
1858 1859 for chunktype in sorted(chunktypecounts):
1859 1860 ui.write(fmtchunktype(chunktype))
1860 1861 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1861 1862 ui.write(('chunks size : ') + fmt2 % totalsize)
1862 1863 for chunktype in sorted(chunktypecounts):
1863 1864 ui.write(fmtchunktype(chunktype))
1864 1865 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1865 1866
1866 1867 ui.write('\n')
1867 1868 fmt = dfmtstr(max(avgchainlen, compratio))
1868 1869 ui.write(('avg chain length : ') + fmt % avgchainlen)
1869 1870 ui.write(('max chain length : ') + fmt % maxchainlen)
1870 1871 ui.write(('compression ratio : ') + fmt % compratio)
1871 1872
1872 1873 if format > 0:
1873 1874 ui.write('\n')
1874 1875 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1875 1876 % tuple(datasize))
1876 1877 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1877 1878 % tuple(fullsize))
1878 1879 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1879 1880 % tuple(deltasize))
1880 1881
1881 1882 if numdeltas > 0:
1882 1883 ui.write('\n')
1883 1884 fmt = pcfmtstr(numdeltas)
1884 1885 fmt2 = pcfmtstr(numdeltas, 4)
1885 1886 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1886 1887 if numprev > 0:
1887 1888 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1888 1889 numprev))
1889 1890 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1890 1891 numprev))
1891 1892 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1892 1893 numprev))
1893 1894 if gdelta:
1894 1895 ui.write(('deltas against p1 : ')
1895 1896 + fmt % pcfmt(nump1, numdeltas))
1896 1897 ui.write(('deltas against p2 : ')
1897 1898 + fmt % pcfmt(nump2, numdeltas))
1898 1899 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1899 1900 numdeltas))
1900 1901
1901 1902 @command('debugrevspec',
1902 1903 [('', 'optimize', None,
1903 1904 _('print parsed tree after optimizing (DEPRECATED)')),
1904 1905 ('', 'show-revs', True, _('print list of result revisions (default)')),
1905 1906 ('s', 'show-set', None, _('print internal representation of result set')),
1906 1907 ('p', 'show-stage', [],
1907 1908 _('print parsed tree at the given stage'), _('NAME')),
1908 1909 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1909 1910 ('', 'verify-optimized', False, _('verify optimized result')),
1910 1911 ],
1911 1912 ('REVSPEC'))
1912 1913 def debugrevspec(ui, repo, expr, **opts):
1913 1914 """parse and apply a revision specification
1914 1915
1915 1916 Use -p/--show-stage option to print the parsed tree at the given stages.
1916 1917 Use -p all to print tree at every stage.
1917 1918
1918 1919 Use --no-show-revs option with -s or -p to print only the set
1919 1920 representation or the parsed tree respectively.
1920 1921
1921 1922 Use --verify-optimized to compare the optimized result with the unoptimized
1922 1923 one. Returns 1 if the optimized result differs.
1923 1924 """
1924 1925 stages = [
1925 1926 ('parsed', lambda tree: tree),
1926 1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 1928 ('concatenated', revsetlang.foldconcat),
1928 1929 ('analyzed', revsetlang.analyze),
1929 1930 ('optimized', revsetlang.optimize),
1930 1931 ]
1931 1932 if opts['no_optimized']:
1932 1933 stages = stages[:-1]
1933 1934 if opts['verify_optimized'] and opts['no_optimized']:
1934 1935 raise error.Abort(_('cannot use --verify-optimized with '
1935 1936 '--no-optimized'))
1936 1937 stagenames = set(n for n, f in stages)
1937 1938
1938 1939 showalways = set()
1939 1940 showchanged = set()
1940 1941 if ui.verbose and not opts['show_stage']:
1941 1942 # show parsed tree by --verbose (deprecated)
1942 1943 showalways.add('parsed')
1943 1944 showchanged.update(['expanded', 'concatenated'])
1944 1945 if opts['optimize']:
1945 1946 showalways.add('optimized')
1946 1947 if opts['show_stage'] and opts['optimize']:
1947 1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 1949 if opts['show_stage'] == ['all']:
1949 1950 showalways.update(stagenames)
1950 1951 else:
1951 1952 for n in opts['show_stage']:
1952 1953 if n not in stagenames:
1953 1954 raise error.Abort(_('invalid stage name: %s') % n)
1954 1955 showalways.update(opts['show_stage'])
1955 1956
1956 1957 treebystage = {}
1957 1958 printedtree = None
1958 1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 1960 for n, f in stages:
1960 1961 treebystage[n] = tree = f(tree)
1961 1962 if n in showalways or (n in showchanged and tree != printedtree):
1962 1963 if opts['show_stage'] or n != 'parsed':
1963 1964 ui.write(("* %s:\n") % n)
1964 1965 ui.write(revsetlang.prettyformat(tree), "\n")
1965 1966 printedtree = tree
1966 1967
1967 1968 if opts['verify_optimized']:
1968 1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 1971 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1971 1972 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 1973 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 1974 arevs = list(arevs)
1974 1975 brevs = list(brevs)
1975 1976 if arevs == brevs:
1976 1977 return 0
1977 1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 1982 if tag in ('delete', 'replace'):
1982 1983 for c in arevs[alo:ahi]:
1983 1984 ui.write('-%s\n' % c, label='diff.deleted')
1984 1985 if tag in ('insert', 'replace'):
1985 1986 for c in brevs[blo:bhi]:
1986 1987 ui.write('+%s\n' % c, label='diff.inserted')
1987 1988 if tag == 'equal':
1988 1989 for c in arevs[alo:ahi]:
1989 1990 ui.write(' %s\n' % c)
1990 1991 return 1
1991 1992
1992 1993 func = revset.makematcher(tree)
1993 1994 revs = func(repo)
1994 1995 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1995 1996 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 1997 if not opts['show_revs']:
1997 1998 return
1998 1999 for c in revs:
1999 2000 ui.write("%s\n" % c)
2000 2001
2001 2002 @command('debugsetparents', [], _('REV1 [REV2]'))
2002 2003 def debugsetparents(ui, repo, rev1, rev2=None):
2003 2004 """manually set the parents of the current working directory
2004 2005
2005 2006 This is useful for writing repository conversion tools, but should
2006 2007 be used with care. For example, neither the working directory nor the
2007 2008 dirstate is updated, so file status may be incorrect after running this
2008 2009 command.
2009 2010
2010 2011 Returns 0 on success.
2011 2012 """
2012 2013
2013 2014 r1 = scmutil.revsingle(repo, rev1).node()
2014 2015 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2015 2016
2016 2017 with repo.wlock():
2017 2018 repo.setparents(r1, r2)
2018 2019
2019 2020 @command('debugsub',
2020 2021 [('r', 'rev', '',
2021 2022 _('revision to check'), _('REV'))],
2022 2023 _('[-r REV] [REV]'))
2023 2024 def debugsub(ui, repo, rev=None):
2024 2025 ctx = scmutil.revsingle(repo, rev, None)
2025 2026 for k, v in sorted(ctx.substate.items()):
2026 2027 ui.write(('path %s\n') % k)
2027 2028 ui.write((' source %s\n') % v[0])
2028 2029 ui.write((' revision %s\n') % v[1])
2029 2030
2030 2031 @command('debugsuccessorssets',
2031 2032 [],
2032 2033 _('[REV]'))
2033 2034 def debugsuccessorssets(ui, repo, *revs):
2034 2035 """show set of successors for revision
2035 2036
2036 2037 A successors set of changeset A is a consistent group of revisions that
2037 2038 succeed A. It contains non-obsolete changesets only.
2038 2039
2039 2040 In most cases a changeset A has a single successors set containing a single
2040 2041 successor (changeset A replaced by A').
2041 2042
2042 2043 A changeset that is made obsolete with no successors are called "pruned".
2043 2044 Such changesets have no successors sets at all.
2044 2045
2045 2046 A changeset that has been "split" will have a successors set containing
2046 2047 more than one successor.
2047 2048
2048 2049 A changeset that has been rewritten in multiple different ways is called
2049 2050 "divergent". Such changesets have multiple successor sets (each of which
2050 2051 may also be split, i.e. have multiple successors).
2051 2052
2052 2053 Results are displayed as follows::
2053 2054
2054 2055 <rev1>
2055 2056 <successors-1A>
2056 2057 <rev2>
2057 2058 <successors-2A>
2058 2059 <successors-2B1> <successors-2B2> <successors-2B3>
2059 2060
2060 2061 Here rev2 has two possible (i.e. divergent) successors sets. The first
2061 2062 holds one element, whereas the second holds three (i.e. the changeset has
2062 2063 been split).
2063 2064 """
2064 2065 # passed to successorssets caching computation from one call to another
2065 2066 cache = {}
2066 2067 ctx2str = str
2067 2068 node2str = short
2068 2069 if ui.debug():
2069 2070 def ctx2str(ctx):
2070 2071 return ctx.hex()
2071 2072 node2str = hex
2072 2073 for rev in scmutil.revrange(repo, revs):
2073 2074 ctx = repo[rev]
2074 2075 ui.write('%s\n'% ctx2str(ctx))
2075 2076 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2076 2077 if succsset:
2077 2078 ui.write(' ')
2078 2079 ui.write(node2str(succsset[0]))
2079 2080 for node in succsset[1:]:
2080 2081 ui.write(' ')
2081 2082 ui.write(node2str(node))
2082 2083 ui.write('\n')
2083 2084
2084 2085 @command('debugtemplate',
2085 2086 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2086 2087 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2087 2088 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2088 2089 optionalrepo=True)
2089 2090 def debugtemplate(ui, repo, tmpl, **opts):
2090 2091 """parse and apply a template
2091 2092
2092 2093 If -r/--rev is given, the template is processed as a log template and
2093 2094 applied to the given changesets. Otherwise, it is processed as a generic
2094 2095 template.
2095 2096
2096 2097 Use --verbose to print the parsed tree.
2097 2098 """
2098 2099 revs = None
2099 2100 if opts['rev']:
2100 2101 if repo is None:
2101 2102 raise error.RepoError(_('there is no Mercurial repository here '
2102 2103 '(.hg not found)'))
2103 2104 revs = scmutil.revrange(repo, opts['rev'])
2104 2105
2105 2106 props = {}
2106 2107 for d in opts['define']:
2107 2108 try:
2108 2109 k, v = (e.strip() for e in d.split('=', 1))
2109 2110 if not k or k == 'ui':
2110 2111 raise ValueError
2111 2112 props[k] = v
2112 2113 except ValueError:
2113 2114 raise error.Abort(_('malformed keyword definition: %s') % d)
2114 2115
2115 2116 if ui.verbose:
2116 2117 aliases = ui.configitems('templatealias')
2117 2118 tree = templater.parse(tmpl)
2118 2119 ui.note(templater.prettyformat(tree), '\n')
2119 2120 newtree = templater.expandaliases(tree, aliases)
2120 2121 if newtree != tree:
2121 2122 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2122 2123
2123 2124 if revs is None:
2124 2125 t = formatter.maketemplater(ui, tmpl)
2125 2126 props['ui'] = ui
2126 2127 ui.write(t.render(props))
2127 2128 else:
2128 2129 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2129 2130 for r in revs:
2130 2131 displayer.show(repo[r], **props)
2131 2132 displayer.close()
2132 2133
2133 2134 @command('debugupdatecaches', [])
2134 2135 def debugupdatecaches(ui, repo, *pats, **opts):
2135 2136 """warm all known caches in the repository"""
2136 2137 with repo.wlock():
2137 2138 with repo.lock():
2138 2139 repo.updatecaches()
2139 2140
2140 2141 @command('debugupgraderepo', [
2141 2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2142 2143 ('', 'run', False, _('performs an upgrade')),
2143 2144 ])
2144 2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2145 2146 """upgrade a repository to use different features
2146 2147
2147 2148 If no arguments are specified, the repository is evaluated for upgrade
2148 2149 and a list of problems and potential optimizations is printed.
2149 2150
2150 2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2151 2152 can be influenced via additional arguments. More details will be provided
2152 2153 by the command output when run without ``--run``.
2153 2154
2154 2155 During the upgrade, the repository will be locked and no writes will be
2155 2156 allowed.
2156 2157
2157 2158 At the end of the upgrade, the repository may not be readable while new
2158 2159 repository data is swapped in. This window will be as long as it takes to
2159 2160 rename some directories inside the ``.hg`` directory. On most machines, this
2160 2161 should complete almost instantaneously and the chances of a consumer being
2161 2162 unable to access the repository should be low.
2162 2163 """
2163 2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2164 2165
2165 2166 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2166 2167 inferrepo=True)
2167 2168 def debugwalk(ui, repo, *pats, **opts):
2168 2169 """show how files match on given patterns"""
2169 2170 m = scmutil.match(repo[None], pats, opts)
2170 2171 ui.write(('matcher: %r\n' % m))
2171 2172 items = list(repo[None].walk(m))
2172 2173 if not items:
2173 2174 return
2174 2175 f = lambda fn: fn
2175 2176 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 2177 f = lambda fn: util.normpath(fn)
2177 2178 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 2179 max([len(abs) for abs in items]),
2179 2180 max([len(m.rel(abs)) for abs in items]))
2180 2181 for abs in items:
2181 2182 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 2183 ui.write("%s\n" % line.rstrip())
2183 2184
2184 2185 @command('debugwireargs',
2185 2186 [('', 'three', '', 'three'),
2186 2187 ('', 'four', '', 'four'),
2187 2188 ('', 'five', '', 'five'),
2188 2189 ] + cmdutil.remoteopts,
2189 2190 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 2191 norepo=True)
2191 2192 def debugwireargs(ui, repopath, *vals, **opts):
2192 2193 repo = hg.peer(ui, opts, repopath)
2193 2194 for opt in cmdutil.remoteopts:
2194 2195 del opts[opt[1]]
2195 2196 args = {}
2196 2197 for k, v in opts.iteritems():
2197 2198 if v:
2198 2199 args[k] = v
2199 2200 # run twice to check that we don't mess up the stream for the next command
2200 2201 res1 = repo.debugwireargs(*vals, **args)
2201 2202 res2 = repo.debugwireargs(*vals, **args)
2202 2203 ui.write("%s\n" % res1)
2203 2204 if res1 != res2:
2204 2205 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now