##// END OF EJS Templates
setdiscover: allow to ignore part of the local graph...
Boris Feld -
r35305:f77121b6 default
parent child Browse files
Show More
@@ -1,286 +1,287 b''
1 # dagutil.py - dag utilities for mercurial
1 # dagutil.py - dag utilities for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 from .i18n import _
11 from .i18n import _
12 from .node import nullrev
12 from .node import nullrev
13
13
14 class basedag(object):
14 class basedag(object):
15 '''generic interface for DAGs
15 '''generic interface for DAGs
16
16
17 terms:
17 terms:
18 "ix" (short for index) identifies a nodes internally,
18 "ix" (short for index) identifies a nodes internally,
19 "id" identifies one externally.
19 "id" identifies one externally.
20
20
21 All params are ixs unless explicitly suffixed otherwise.
21 All params are ixs unless explicitly suffixed otherwise.
22 Pluralized params are lists or sets.
22 Pluralized params are lists or sets.
23 '''
23 '''
24
24
25 def __init__(self):
25 def __init__(self):
26 self._inverse = None
26 self._inverse = None
27
27
28 def nodeset(self):
28 def nodeset(self):
29 '''set of all node ixs'''
29 '''set of all node ixs'''
30 raise NotImplementedError
30 raise NotImplementedError
31
31
32 def heads(self):
32 def heads(self):
33 '''list of head ixs'''
33 '''list of head ixs'''
34 raise NotImplementedError
34 raise NotImplementedError
35
35
36 def parents(self, ix):
36 def parents(self, ix):
37 '''list of parents ixs of ix'''
37 '''list of parents ixs of ix'''
38 raise NotImplementedError
38 raise NotImplementedError
39
39
40 def inverse(self):
40 def inverse(self):
41 '''inverse DAG, where parents becomes children, etc.'''
41 '''inverse DAG, where parents becomes children, etc.'''
42 raise NotImplementedError
42 raise NotImplementedError
43
43
44 def ancestorset(self, starts, stops=None):
44 def ancestorset(self, starts, stops=None):
45 '''
45 '''
46 set of all ancestors of starts (incl), but stop walk at stops (excl)
46 set of all ancestors of starts (incl), but stop walk at stops (excl)
47 '''
47 '''
48 raise NotImplementedError
48 raise NotImplementedError
49
49
50 def descendantset(self, starts, stops=None):
50 def descendantset(self, starts, stops=None):
51 '''
51 '''
52 set of all descendants of starts (incl), but stop walk at stops (excl)
52 set of all descendants of starts (incl), but stop walk at stops (excl)
53 '''
53 '''
54 return self.inverse().ancestorset(starts, stops)
54 return self.inverse().ancestorset(starts, stops)
55
55
56 def headsetofconnecteds(self, ixs):
56 def headsetofconnecteds(self, ixs):
57 '''
57 '''
58 subset of connected list of ixs so that no node has a descendant in it
58 subset of connected list of ixs so that no node has a descendant in it
59
59
60 By "connected list" we mean that if an ancestor and a descendant are in
60 By "connected list" we mean that if an ancestor and a descendant are in
61 the list, then so is at least one path connecting them.
61 the list, then so is at least one path connecting them.
62 '''
62 '''
63 raise NotImplementedError
63 raise NotImplementedError
64
64
65 def externalize(self, ix):
65 def externalize(self, ix):
66 '''return a node id'''
66 '''return a node id'''
67 return self._externalize(ix)
67 return self._externalize(ix)
68
68
69 def externalizeall(self, ixs):
69 def externalizeall(self, ixs):
70 '''return a list of (or set if given a set) of node ids'''
70 '''return a list of (or set if given a set) of node ids'''
71 ids = self._externalizeall(ixs)
71 ids = self._externalizeall(ixs)
72 if isinstance(ixs, set):
72 if isinstance(ixs, set):
73 return set(ids)
73 return set(ids)
74 return list(ids)
74 return list(ids)
75
75
76 def internalize(self, id):
76 def internalize(self, id):
77 '''return a node ix'''
77 '''return a node ix'''
78 return self._internalize(id)
78 return self._internalize(id)
79
79
80 def internalizeall(self, ids, filterunknown=False):
80 def internalizeall(self, ids, filterunknown=False):
81 '''return a list of (or set if given a set) of node ixs'''
81 '''return a list of (or set if given a set) of node ixs'''
82 ixs = self._internalizeall(ids, filterunknown)
82 ixs = self._internalizeall(ids, filterunknown)
83 if isinstance(ids, set):
83 if isinstance(ids, set):
84 return set(ixs)
84 return set(ixs)
85 return list(ixs)
85 return list(ixs)
86
86
87
87
88 class genericdag(basedag):
88 class genericdag(basedag):
89 '''generic implementations for DAGs'''
89 '''generic implementations for DAGs'''
90
90
91 def ancestorset(self, starts, stops=None):
91 def ancestorset(self, starts, stops=None):
92 if stops:
92 if stops:
93 stops = set(stops)
93 stops = set(stops)
94 else:
94 else:
95 stops = set()
95 stops = set()
96 seen = set()
96 seen = set()
97 pending = list(starts)
97 pending = list(starts)
98 while pending:
98 while pending:
99 n = pending.pop()
99 n = pending.pop()
100 if n not in seen and n not in stops:
100 if n not in seen and n not in stops:
101 seen.add(n)
101 seen.add(n)
102 pending.extend(self.parents(n))
102 pending.extend(self.parents(n))
103 return seen
103 return seen
104
104
105 def headsetofconnecteds(self, ixs):
105 def headsetofconnecteds(self, ixs):
106 hds = set(ixs)
106 hds = set(ixs)
107 if not hds:
107 if not hds:
108 return hds
108 return hds
109 for n in ixs:
109 for n in ixs:
110 for p in self.parents(n):
110 for p in self.parents(n):
111 hds.discard(p)
111 hds.discard(p)
112 assert hds
112 assert hds
113 return hds
113 return hds
114
114
115
115
116 class revlogbaseddag(basedag):
116 class revlogbaseddag(basedag):
117 '''generic dag interface to a revlog'''
117 '''generic dag interface to a revlog'''
118
118
119 def __init__(self, revlog, nodeset):
119 def __init__(self, revlog, nodeset):
120 basedag.__init__(self)
120 basedag.__init__(self)
121 self._revlog = revlog
121 self._revlog = revlog
122 self._heads = None
122 self._heads = None
123 self._nodeset = nodeset
123 self._nodeset = nodeset
124
124
125 def nodeset(self):
125 def nodeset(self):
126 return self._nodeset
126 return self._nodeset
127
127
128 def heads(self):
128 def heads(self):
129 if self._heads is None:
129 if self._heads is None:
130 self._heads = self._getheads()
130 self._heads = self._getheads()
131 return self._heads
131 return self._heads
132
132
133 def _externalize(self, ix):
133 def _externalize(self, ix):
134 return self._revlog.index[ix][7]
134 return self._revlog.index[ix][7]
135 def _externalizeall(self, ixs):
135 def _externalizeall(self, ixs):
136 idx = self._revlog.index
136 idx = self._revlog.index
137 return [idx[i][7] for i in ixs]
137 return [idx[i][7] for i in ixs]
138
138
139 def _internalize(self, id):
139 def _internalize(self, id):
140 ix = self._revlog.rev(id)
140 ix = self._revlog.rev(id)
141 if ix == nullrev:
141 if ix == nullrev:
142 raise LookupError(id, self._revlog.indexfile, _('nullid'))
142 raise LookupError(id, self._revlog.indexfile, _('nullid'))
143 return ix
143 return ix
144 def _internalizeall(self, ids, filterunknown):
144 def _internalizeall(self, ids, filterunknown):
145 rl = self._revlog
145 rl = self._revlog
146 if filterunknown:
146 if filterunknown:
147 return [r for r in map(rl.nodemap.get, ids)
147 return [r for r in map(rl.nodemap.get, ids)
148 if (r is not None
148 if (r is not None
149 and r != nullrev
149 and r != nullrev
150 and r not in rl.filteredrevs)]
150 and r not in rl.filteredrevs)]
151 return [self._internalize(i) for i in ids]
151 return [self._internalize(i) for i in ids]
152
152
153
153
154 class revlogdag(revlogbaseddag):
154 class revlogdag(revlogbaseddag):
155 '''dag interface to a revlog'''
155 '''dag interface to a revlog'''
156
156
157 def __init__(self, revlog):
157 def __init__(self, revlog, localsubset=None):
158 revlogbaseddag.__init__(self, revlog, set(revlog))
158 revlogbaseddag.__init__(self, revlog, set(revlog))
159 self._heads = localsubset
159
160
160 def _getheads(self):
161 def _getheads(self):
161 return [r for r in self._revlog.headrevs() if r != nullrev]
162 return [r for r in self._revlog.headrevs() if r != nullrev]
162
163
163 def parents(self, ix):
164 def parents(self, ix):
164 rlog = self._revlog
165 rlog = self._revlog
165 idx = rlog.index
166 idx = rlog.index
166 revdata = idx[ix]
167 revdata = idx[ix]
167 prev = revdata[5]
168 prev = revdata[5]
168 if prev != nullrev:
169 if prev != nullrev:
169 prev2 = revdata[6]
170 prev2 = revdata[6]
170 if prev2 == nullrev:
171 if prev2 == nullrev:
171 return [prev]
172 return [prev]
172 return [prev, prev2]
173 return [prev, prev2]
173 prev2 = revdata[6]
174 prev2 = revdata[6]
174 if prev2 != nullrev:
175 if prev2 != nullrev:
175 return [prev2]
176 return [prev2]
176 return []
177 return []
177
178
178 def inverse(self):
179 def inverse(self):
179 if self._inverse is None:
180 if self._inverse is None:
180 self._inverse = inverserevlogdag(self)
181 self._inverse = inverserevlogdag(self)
181 return self._inverse
182 return self._inverse
182
183
183 def ancestorset(self, starts, stops=None):
184 def ancestorset(self, starts, stops=None):
184 rlog = self._revlog
185 rlog = self._revlog
185 idx = rlog.index
186 idx = rlog.index
186 if stops:
187 if stops:
187 stops = set(stops)
188 stops = set(stops)
188 else:
189 else:
189 stops = set()
190 stops = set()
190 seen = set()
191 seen = set()
191 pending = list(starts)
192 pending = list(starts)
192 while pending:
193 while pending:
193 rev = pending.pop()
194 rev = pending.pop()
194 if rev not in seen and rev not in stops:
195 if rev not in seen and rev not in stops:
195 seen.add(rev)
196 seen.add(rev)
196 revdata = idx[rev]
197 revdata = idx[rev]
197 for i in [5, 6]:
198 for i in [5, 6]:
198 prev = revdata[i]
199 prev = revdata[i]
199 if prev != nullrev:
200 if prev != nullrev:
200 pending.append(prev)
201 pending.append(prev)
201 return seen
202 return seen
202
203
203 def headsetofconnecteds(self, ixs):
204 def headsetofconnecteds(self, ixs):
204 if not ixs:
205 if not ixs:
205 return set()
206 return set()
206 rlog = self._revlog
207 rlog = self._revlog
207 idx = rlog.index
208 idx = rlog.index
208 headrevs = set(ixs)
209 headrevs = set(ixs)
209 for rev in ixs:
210 for rev in ixs:
210 revdata = idx[rev]
211 revdata = idx[rev]
211 for i in [5, 6]:
212 for i in [5, 6]:
212 prev = revdata[i]
213 prev = revdata[i]
213 if prev != nullrev:
214 if prev != nullrev:
214 headrevs.discard(prev)
215 headrevs.discard(prev)
215 assert headrevs
216 assert headrevs
216 return headrevs
217 return headrevs
217
218
218 def linearize(self, ixs):
219 def linearize(self, ixs):
219 '''linearize and topologically sort a list of revisions
220 '''linearize and topologically sort a list of revisions
220
221
221 The linearization process tries to create long runs of revs where
222 The linearization process tries to create long runs of revs where
222 a child rev comes immediately after its first parent. This is done by
223 a child rev comes immediately after its first parent. This is done by
223 visiting the heads of the given revs in inverse topological order,
224 visiting the heads of the given revs in inverse topological order,
224 and for each visited rev, visiting its second parent, then its first
225 and for each visited rev, visiting its second parent, then its first
225 parent, then adding the rev itself to the output list.
226 parent, then adding the rev itself to the output list.
226 '''
227 '''
227 sorted = []
228 sorted = []
228 visit = list(self.headsetofconnecteds(ixs))
229 visit = list(self.headsetofconnecteds(ixs))
229 visit.sort(reverse=True)
230 visit.sort(reverse=True)
230 finished = set()
231 finished = set()
231
232
232 while visit:
233 while visit:
233 cur = visit.pop()
234 cur = visit.pop()
234 if cur < 0:
235 if cur < 0:
235 cur = -cur - 1
236 cur = -cur - 1
236 if cur not in finished:
237 if cur not in finished:
237 sorted.append(cur)
238 sorted.append(cur)
238 finished.add(cur)
239 finished.add(cur)
239 else:
240 else:
240 visit.append(-cur - 1)
241 visit.append(-cur - 1)
241 visit += [p for p in self.parents(cur)
242 visit += [p for p in self.parents(cur)
242 if p in ixs and p not in finished]
243 if p in ixs and p not in finished]
243 assert len(sorted) == len(ixs)
244 assert len(sorted) == len(ixs)
244 return sorted
245 return sorted
245
246
246
247
247 class inverserevlogdag(revlogbaseddag, genericdag):
248 class inverserevlogdag(revlogbaseddag, genericdag):
248 '''inverse of an existing revlog dag; see revlogdag.inverse()'''
249 '''inverse of an existing revlog dag; see revlogdag.inverse()'''
249
250
250 def __init__(self, orig):
251 def __init__(self, orig):
251 revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
252 revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
252 self._orig = orig
253 self._orig = orig
253 self._children = {}
254 self._children = {}
254 self._roots = []
255 self._roots = []
255 self._walkfrom = len(self._revlog) - 1
256 self._walkfrom = len(self._revlog) - 1
256
257
257 def _walkto(self, walkto):
258 def _walkto(self, walkto):
258 rev = self._walkfrom
259 rev = self._walkfrom
259 cs = self._children
260 cs = self._children
260 roots = self._roots
261 roots = self._roots
261 idx = self._revlog.index
262 idx = self._revlog.index
262 while rev >= walkto:
263 while rev >= walkto:
263 data = idx[rev]
264 data = idx[rev]
264 isroot = True
265 isroot = True
265 for prev in [data[5], data[6]]: # parent revs
266 for prev in [data[5], data[6]]: # parent revs
266 if prev != nullrev:
267 if prev != nullrev:
267 cs.setdefault(prev, []).append(rev)
268 cs.setdefault(prev, []).append(rev)
268 isroot = False
269 isroot = False
269 if isroot:
270 if isroot:
270 roots.append(rev)
271 roots.append(rev)
271 rev -= 1
272 rev -= 1
272 self._walkfrom = rev
273 self._walkfrom = rev
273
274
274 def _getheads(self):
275 def _getheads(self):
275 self._walkto(nullrev)
276 self._walkto(nullrev)
276 return self._roots
277 return self._roots
277
278
278 def parents(self, ix):
279 def parents(self, ix):
279 if ix is None:
280 if ix is None:
280 return []
281 return []
281 if ix <= self._walkfrom:
282 if ix <= self._walkfrom:
282 self._walkto(ix)
283 self._walkto(ix)
283 return self._children.get(ix, [])
284 return self._children.get(ix, [])
284
285
285 def inverse(self):
286 def inverse(self):
286 return self._orig
287 return self._orig
@@ -1,2365 +1,2368 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import string
19 import string
20 import sys
20 import sys
21 import tempfile
21 import tempfile
22 import time
22 import time
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import (
25 from .node import (
26 bin,
26 bin,
27 hex,
27 hex,
28 nullhex,
28 nullhex,
29 nullid,
29 nullid,
30 nullrev,
30 nullrev,
31 short,
31 short,
32 )
32 )
33 from . import (
33 from . import (
34 bundle2,
34 bundle2,
35 changegroup,
35 changegroup,
36 cmdutil,
36 cmdutil,
37 color,
37 color,
38 context,
38 context,
39 dagparser,
39 dagparser,
40 dagutil,
40 dagutil,
41 encoding,
41 encoding,
42 error,
42 error,
43 exchange,
43 exchange,
44 extensions,
44 extensions,
45 filemerge,
45 filemerge,
46 fileset,
46 fileset,
47 formatter,
47 formatter,
48 hg,
48 hg,
49 localrepo,
49 localrepo,
50 lock as lockmod,
50 lock as lockmod,
51 merge as mergemod,
51 merge as mergemod,
52 obsolete,
52 obsolete,
53 obsutil,
53 obsutil,
54 phases,
54 phases,
55 policy,
55 policy,
56 pvec,
56 pvec,
57 pycompat,
57 pycompat,
58 registrar,
58 registrar,
59 repair,
59 repair,
60 revlog,
60 revlog,
61 revset,
61 revset,
62 revsetlang,
62 revsetlang,
63 scmutil,
63 scmutil,
64 setdiscovery,
64 setdiscovery,
65 simplemerge,
65 simplemerge,
66 smartset,
66 smartset,
67 sslutil,
67 sslutil,
68 streamclone,
68 streamclone,
69 templater,
69 templater,
70 treediscovery,
70 treediscovery,
71 upgrade,
71 upgrade,
72 util,
72 util,
73 vfs as vfsmod,
73 vfs as vfsmod,
74 )
74 )
75
75
76 release = lockmod.release
76 release = lockmod.release
77
77
78 command = registrar.command()
78 command = registrar.command()
79
79
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 def debugancestor(ui, repo, *args):
81 def debugancestor(ui, repo, *args):
82 """find the ancestor revision of two revisions in a given index"""
82 """find the ancestor revision of two revisions in a given index"""
83 if len(args) == 3:
83 if len(args) == 3:
84 index, rev1, rev2 = args
84 index, rev1, rev2 = args
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 lookup = r.lookup
86 lookup = r.lookup
87 elif len(args) == 2:
87 elif len(args) == 2:
88 if not repo:
88 if not repo:
89 raise error.Abort(_('there is no Mercurial repository here '
89 raise error.Abort(_('there is no Mercurial repository here '
90 '(.hg not found)'))
90 '(.hg not found)'))
91 rev1, rev2 = args
91 rev1, rev2 = args
92 r = repo.changelog
92 r = repo.changelog
93 lookup = repo.lookup
93 lookup = repo.lookup
94 else:
94 else:
95 raise error.Abort(_('either two or three arguments required'))
95 raise error.Abort(_('either two or three arguments required'))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98
98
99 @command('debugapplystreamclonebundle', [], 'FILE')
99 @command('debugapplystreamclonebundle', [], 'FILE')
100 def debugapplystreamclonebundle(ui, repo, fname):
100 def debugapplystreamclonebundle(ui, repo, fname):
101 """apply a stream clone bundle file"""
101 """apply a stream clone bundle file"""
102 f = hg.openpath(ui, fname)
102 f = hg.openpath(ui, fname)
103 gen = exchange.readbundle(ui, f, fname)
103 gen = exchange.readbundle(ui, f, fname)
104 gen.apply(repo)
104 gen.apply(repo)
105
105
106 @command('debugbuilddag',
106 @command('debugbuilddag',
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 ('n', 'new-file', None, _('add new file at each rev'))],
109 ('n', 'new-file', None, _('add new file at each rev'))],
110 _('[OPTION]... [TEXT]'))
110 _('[OPTION]... [TEXT]'))
111 def debugbuilddag(ui, repo, text=None,
111 def debugbuilddag(ui, repo, text=None,
112 mergeable_file=False,
112 mergeable_file=False,
113 overwritten_file=False,
113 overwritten_file=False,
114 new_file=False):
114 new_file=False):
115 """builds a repo with a given DAG from scratch in the current empty repo
115 """builds a repo with a given DAG from scratch in the current empty repo
116
116
117 The description of the DAG is read from stdin if not given on the
117 The description of the DAG is read from stdin if not given on the
118 command line.
118 command line.
119
119
120 Elements:
120 Elements:
121
121
122 - "+n" is a linear run of n nodes based on the current default parent
122 - "+n" is a linear run of n nodes based on the current default parent
123 - "." is a single node based on the current default parent
123 - "." is a single node based on the current default parent
124 - "$" resets the default parent to null (implied at the start);
124 - "$" resets the default parent to null (implied at the start);
125 otherwise the default parent is always the last node created
125 otherwise the default parent is always the last node created
126 - "<p" sets the default parent to the backref p
126 - "<p" sets the default parent to the backref p
127 - "*p" is a fork at parent p, which is a backref
127 - "*p" is a fork at parent p, which is a backref
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 - "/p2" is a merge of the preceding node and p2
129 - "/p2" is a merge of the preceding node and p2
130 - ":tag" defines a local tag for the preceding node
130 - ":tag" defines a local tag for the preceding node
131 - "@branch" sets the named branch for subsequent nodes
131 - "@branch" sets the named branch for subsequent nodes
132 - "#...\\n" is a comment up to the end of the line
132 - "#...\\n" is a comment up to the end of the line
133
133
134 Whitespace between the above elements is ignored.
134 Whitespace between the above elements is ignored.
135
135
136 A backref is either
136 A backref is either
137
137
138 - a number n, which references the node curr-n, where curr is the current
138 - a number n, which references the node curr-n, where curr is the current
139 node, or
139 node, or
140 - the name of a local tag you placed earlier using ":tag", or
140 - the name of a local tag you placed earlier using ":tag", or
141 - empty to denote the default parent.
141 - empty to denote the default parent.
142
142
143 All string valued-elements are either strictly alphanumeric, or must
143 All string valued-elements are either strictly alphanumeric, or must
144 be enclosed in double quotes ("..."), with "\\" as escape character.
144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 """
145 """
146
146
147 if text is None:
147 if text is None:
148 ui.status(_("reading DAG from stdin\n"))
148 ui.status(_("reading DAG from stdin\n"))
149 text = ui.fin.read()
149 text = ui.fin.read()
150
150
151 cl = repo.changelog
151 cl = repo.changelog
152 if len(cl) > 0:
152 if len(cl) > 0:
153 raise error.Abort(_('repository is not empty'))
153 raise error.Abort(_('repository is not empty'))
154
154
155 # determine number of revs in DAG
155 # determine number of revs in DAG
156 total = 0
156 total = 0
157 for type, data in dagparser.parsedag(text):
157 for type, data in dagparser.parsedag(text):
158 if type == 'n':
158 if type == 'n':
159 total += 1
159 total += 1
160
160
161 if mergeable_file:
161 if mergeable_file:
162 linesperrev = 2
162 linesperrev = 2
163 # make a file with k lines per rev
163 # make a file with k lines per rev
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 initialmergedlines.append("")
165 initialmergedlines.append("")
166
166
167 tags = []
167 tags = []
168
168
169 wlock = lock = tr = None
169 wlock = lock = tr = None
170 try:
170 try:
171 wlock = repo.wlock()
171 wlock = repo.wlock()
172 lock = repo.lock()
172 lock = repo.lock()
173 tr = repo.transaction("builddag")
173 tr = repo.transaction("builddag")
174
174
175 at = -1
175 at = -1
176 atbranch = 'default'
176 atbranch = 'default'
177 nodeids = []
177 nodeids = []
178 id = 0
178 id = 0
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 for type, data in dagparser.parsedag(text):
180 for type, data in dagparser.parsedag(text):
181 if type == 'n':
181 if type == 'n':
182 ui.note(('node %s\n' % str(data)))
182 ui.note(('node %s\n' % str(data)))
183 id, ps = data
183 id, ps = data
184
184
185 files = []
185 files = []
186 fctxs = {}
186 fctxs = {}
187
187
188 p2 = None
188 p2 = None
189 if mergeable_file:
189 if mergeable_file:
190 fn = "mf"
190 fn = "mf"
191 p1 = repo[ps[0]]
191 p1 = repo[ps[0]]
192 if len(ps) > 1:
192 if len(ps) > 1:
193 p2 = repo[ps[1]]
193 p2 = repo[ps[1]]
194 pa = p1.ancestor(p2)
194 pa = p1.ancestor(p2)
195 base, local, other = [x[fn].data() for x in (pa, p1,
195 base, local, other = [x[fn].data() for x in (pa, p1,
196 p2)]
196 p2)]
197 m3 = simplemerge.Merge3Text(base, local, other)
197 m3 = simplemerge.Merge3Text(base, local, other)
198 ml = [l.strip() for l in m3.merge_lines()]
198 ml = [l.strip() for l in m3.merge_lines()]
199 ml.append("")
199 ml.append("")
200 elif at > 0:
200 elif at > 0:
201 ml = p1[fn].data().split("\n")
201 ml = p1[fn].data().split("\n")
202 else:
202 else:
203 ml = initialmergedlines
203 ml = initialmergedlines
204 ml[id * linesperrev] += " r%i" % id
204 ml[id * linesperrev] += " r%i" % id
205 mergedtext = "\n".join(ml)
205 mergedtext = "\n".join(ml)
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208
208
209 if overwritten_file:
209 if overwritten_file:
210 fn = "of"
210 fn = "of"
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213
213
214 if new_file:
214 if new_file:
215 fn = "nf%i" % id
215 fn = "nf%i" % id
216 files.append(fn)
216 files.append(fn)
217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 if len(ps) > 1:
218 if len(ps) > 1:
219 if not p2:
219 if not p2:
220 p2 = repo[ps[1]]
220 p2 = repo[ps[1]]
221 for fn in p2:
221 for fn in p2:
222 if fn.startswith("nf"):
222 if fn.startswith("nf"):
223 files.append(fn)
223 files.append(fn)
224 fctxs[fn] = p2[fn]
224 fctxs[fn] = p2[fn]
225
225
226 def fctxfn(repo, cx, path):
226 def fctxfn(repo, cx, path):
227 return fctxs.get(path)
227 return fctxs.get(path)
228
228
229 if len(ps) == 0 or ps[0] < 0:
229 if len(ps) == 0 or ps[0] < 0:
230 pars = [None, None]
230 pars = [None, None]
231 elif len(ps) == 1:
231 elif len(ps) == 1:
232 pars = [nodeids[ps[0]], None]
232 pars = [nodeids[ps[0]], None]
233 else:
233 else:
234 pars = [nodeids[p] for p in ps]
234 pars = [nodeids[p] for p in ps]
235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 date=(id, 0),
236 date=(id, 0),
237 user="debugbuilddag",
237 user="debugbuilddag",
238 extra={'branch': atbranch})
238 extra={'branch': atbranch})
239 nodeid = repo.commitctx(cx)
239 nodeid = repo.commitctx(cx)
240 nodeids.append(nodeid)
240 nodeids.append(nodeid)
241 at = id
241 at = id
242 elif type == 'l':
242 elif type == 'l':
243 id, name = data
243 id, name = data
244 ui.note(('tag %s\n' % name))
244 ui.note(('tag %s\n' % name))
245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 elif type == 'a':
246 elif type == 'a':
247 ui.note(('branch %s\n' % data))
247 ui.note(('branch %s\n' % data))
248 atbranch = data
248 atbranch = data
249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 tr.close()
250 tr.close()
251
251
252 if tags:
252 if tags:
253 repo.vfs.write("localtags", "".join(tags))
253 repo.vfs.write("localtags", "".join(tags))
254 finally:
254 finally:
255 ui.progress(_('building'), None)
255 ui.progress(_('building'), None)
256 release(tr, lock, wlock)
256 release(tr, lock, wlock)
257
257
258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 indent_string = ' ' * indent
259 indent_string = ' ' * indent
260 if all:
260 if all:
261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 % indent_string)
262 % indent_string)
263
263
264 def showchunks(named):
264 def showchunks(named):
265 ui.write("\n%s%s\n" % (indent_string, named))
265 ui.write("\n%s%s\n" % (indent_string, named))
266 for deltadata in gen.deltaiter():
266 for deltadata in gen.deltaiter():
267 node, p1, p2, cs, deltabase, delta, flags = deltadata
267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 ui.write("%s%s %s %s %s %s %s\n" %
268 ui.write("%s%s %s %s %s %s %s\n" %
269 (indent_string, hex(node), hex(p1), hex(p2),
269 (indent_string, hex(node), hex(p1), hex(p2),
270 hex(cs), hex(deltabase), len(delta)))
270 hex(cs), hex(deltabase), len(delta)))
271
271
272 chunkdata = gen.changelogheader()
272 chunkdata = gen.changelogheader()
273 showchunks("changelog")
273 showchunks("changelog")
274 chunkdata = gen.manifestheader()
274 chunkdata = gen.manifestheader()
275 showchunks("manifest")
275 showchunks("manifest")
276 for chunkdata in iter(gen.filelogheader, {}):
276 for chunkdata in iter(gen.filelogheader, {}):
277 fname = chunkdata['filename']
277 fname = chunkdata['filename']
278 showchunks(fname)
278 showchunks(fname)
279 else:
279 else:
280 if isinstance(gen, bundle2.unbundle20):
280 if isinstance(gen, bundle2.unbundle20):
281 raise error.Abort(_('use debugbundle2 for this file'))
281 raise error.Abort(_('use debugbundle2 for this file'))
282 chunkdata = gen.changelogheader()
282 chunkdata = gen.changelogheader()
283 for deltadata in gen.deltaiter():
283 for deltadata in gen.deltaiter():
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 ui.write("%s%s\n" % (indent_string, hex(node)))
285 ui.write("%s%s\n" % (indent_string, hex(node)))
286
286
287 def _debugobsmarkers(ui, part, indent=0, **opts):
287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 """display version and markers contained in 'data'"""
288 """display version and markers contained in 'data'"""
289 opts = pycompat.byteskwargs(opts)
289 opts = pycompat.byteskwargs(opts)
290 data = part.read()
290 data = part.read()
291 indent_string = ' ' * indent
291 indent_string = ' ' * indent
292 try:
292 try:
293 version, markers = obsolete._readmarkers(data)
293 version, markers = obsolete._readmarkers(data)
294 except error.UnknownVersion as exc:
294 except error.UnknownVersion as exc:
295 msg = "%sunsupported version: %s (%d bytes)\n"
295 msg = "%sunsupported version: %s (%d bytes)\n"
296 msg %= indent_string, exc.version, len(data)
296 msg %= indent_string, exc.version, len(data)
297 ui.write(msg)
297 ui.write(msg)
298 else:
298 else:
299 msg = "%sversion: %d (%d bytes)\n"
299 msg = "%sversion: %d (%d bytes)\n"
300 msg %= indent_string, version, len(data)
300 msg %= indent_string, version, len(data)
301 ui.write(msg)
301 ui.write(msg)
302 fm = ui.formatter('debugobsolete', opts)
302 fm = ui.formatter('debugobsolete', opts)
303 for rawmarker in sorted(markers):
303 for rawmarker in sorted(markers):
304 m = obsutil.marker(None, rawmarker)
304 m = obsutil.marker(None, rawmarker)
305 fm.startitem()
305 fm.startitem()
306 fm.plain(indent_string)
306 fm.plain(indent_string)
307 cmdutil.showmarker(fm, m)
307 cmdutil.showmarker(fm, m)
308 fm.end()
308 fm.end()
309
309
310 def _debugphaseheads(ui, data, indent=0):
310 def _debugphaseheads(ui, data, indent=0):
311 """display version and markers contained in 'data'"""
311 """display version and markers contained in 'data'"""
312 indent_string = ' ' * indent
312 indent_string = ' ' * indent
313 headsbyphase = phases.binarydecode(data)
313 headsbyphase = phases.binarydecode(data)
314 for phase in phases.allphases:
314 for phase in phases.allphases:
315 for head in headsbyphase[phase]:
315 for head in headsbyphase[phase]:
316 ui.write(indent_string)
316 ui.write(indent_string)
317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318
318
319 def _quasirepr(thing):
319 def _quasirepr(thing):
320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 return '{%s}' % (
321 return '{%s}' % (
322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 return pycompat.bytestr(repr(thing))
323 return pycompat.bytestr(repr(thing))
324
324
325 def _debugbundle2(ui, gen, all=None, **opts):
325 def _debugbundle2(ui, gen, all=None, **opts):
326 """lists the contents of a bundle2"""
326 """lists the contents of a bundle2"""
327 if not isinstance(gen, bundle2.unbundle20):
327 if not isinstance(gen, bundle2.unbundle20):
328 raise error.Abort(_('not a bundle2 file'))
328 raise error.Abort(_('not a bundle2 file'))
329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 parttypes = opts.get(r'part_type', [])
330 parttypes = opts.get(r'part_type', [])
331 for part in gen.iterparts():
331 for part in gen.iterparts():
332 if parttypes and part.type not in parttypes:
332 if parttypes and part.type not in parttypes:
333 continue
333 continue
334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 if part.type == 'changegroup':
335 if part.type == 'changegroup':
336 version = part.params.get('version', '01')
336 version = part.params.get('version', '01')
337 cg = changegroup.getunbundler(version, part, 'UN')
337 cg = changegroup.getunbundler(version, part, 'UN')
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 if part.type == 'obsmarkers':
339 if part.type == 'obsmarkers':
340 _debugobsmarkers(ui, part, indent=4, **opts)
340 _debugobsmarkers(ui, part, indent=4, **opts)
341 if part.type == 'phase-heads':
341 if part.type == 'phase-heads':
342 _debugphaseheads(ui, part, indent=4)
342 _debugphaseheads(ui, part, indent=4)
343
343
344 @command('debugbundle',
344 @command('debugbundle',
345 [('a', 'all', None, _('show all details')),
345 [('a', 'all', None, _('show all details')),
346 ('', 'part-type', [], _('show only the named part type')),
346 ('', 'part-type', [], _('show only the named part type')),
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 _('FILE'),
348 _('FILE'),
349 norepo=True)
349 norepo=True)
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 """lists the contents of a bundle"""
351 """lists the contents of a bundle"""
352 with hg.openpath(ui, bundlepath) as f:
352 with hg.openpath(ui, bundlepath) as f:
353 if spec:
353 if spec:
354 spec = exchange.getbundlespec(ui, f)
354 spec = exchange.getbundlespec(ui, f)
355 ui.write('%s\n' % spec)
355 ui.write('%s\n' % spec)
356 return
356 return
357
357
358 gen = exchange.readbundle(ui, f, bundlepath)
358 gen = exchange.readbundle(ui, f, bundlepath)
359 if isinstance(gen, bundle2.unbundle20):
359 if isinstance(gen, bundle2.unbundle20):
360 return _debugbundle2(ui, gen, all=all, **opts)
360 return _debugbundle2(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
362
362
363 @command('debugcapabilities',
363 @command('debugcapabilities',
364 [], _('PATH'),
364 [], _('PATH'),
365 norepo=True)
365 norepo=True)
366 def debugcapabilities(ui, path, **opts):
366 def debugcapabilities(ui, path, **opts):
367 """lists the capabilities of a remote peer"""
367 """lists the capabilities of a remote peer"""
368 peer = hg.peer(ui, opts, path)
368 peer = hg.peer(ui, opts, path)
369 caps = peer.capabilities()
369 caps = peer.capabilities()
370 ui.write(('Main capabilities:\n'))
370 ui.write(('Main capabilities:\n'))
371 for c in sorted(caps):
371 for c in sorted(caps):
372 ui.write((' %s\n') % c)
372 ui.write((' %s\n') % c)
373 b2caps = bundle2.bundle2caps(peer)
373 b2caps = bundle2.bundle2caps(peer)
374 if b2caps:
374 if b2caps:
375 ui.write(('Bundle2 capabilities:\n'))
375 ui.write(('Bundle2 capabilities:\n'))
376 for key, values in sorted(b2caps.iteritems()):
376 for key, values in sorted(b2caps.iteritems()):
377 ui.write((' %s\n') % key)
377 ui.write((' %s\n') % key)
378 for v in values:
378 for v in values:
379 ui.write((' %s\n') % v)
379 ui.write((' %s\n') % v)
380
380
381 @command('debugcheckstate', [], '')
381 @command('debugcheckstate', [], '')
382 def debugcheckstate(ui, repo):
382 def debugcheckstate(ui, repo):
383 """validate the correctness of the current dirstate"""
383 """validate the correctness of the current dirstate"""
384 parent1, parent2 = repo.dirstate.parents()
384 parent1, parent2 = repo.dirstate.parents()
385 m1 = repo[parent1].manifest()
385 m1 = repo[parent1].manifest()
386 m2 = repo[parent2].manifest()
386 m2 = repo[parent2].manifest()
387 errors = 0
387 errors = 0
388 for f in repo.dirstate:
388 for f in repo.dirstate:
389 state = repo.dirstate[f]
389 state = repo.dirstate[f]
390 if state in "nr" and f not in m1:
390 if state in "nr" and f not in m1:
391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 errors += 1
392 errors += 1
393 if state in "a" and f in m1:
393 if state in "a" and f in m1:
394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 errors += 1
395 errors += 1
396 if state in "m" and f not in m1 and f not in m2:
396 if state in "m" and f not in m1 and f not in m2:
397 ui.warn(_("%s in state %s, but not in either manifest\n") %
397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 (f, state))
398 (f, state))
399 errors += 1
399 errors += 1
400 for f in m1:
400 for f in m1:
401 state = repo.dirstate[f]
401 state = repo.dirstate[f]
402 if state not in "nrm":
402 if state not in "nrm":
403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 errors += 1
404 errors += 1
405 if errors:
405 if errors:
406 error = _(".hg/dirstate inconsistent with current parent's manifest")
406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 raise error.Abort(error)
407 raise error.Abort(error)
408
408
409 @command('debugcolor',
409 @command('debugcolor',
410 [('', 'style', None, _('show all configured styles'))],
410 [('', 'style', None, _('show all configured styles'))],
411 'hg debugcolor')
411 'hg debugcolor')
412 def debugcolor(ui, repo, **opts):
412 def debugcolor(ui, repo, **opts):
413 """show available color, effects or style"""
413 """show available color, effects or style"""
414 ui.write(('color mode: %s\n') % ui._colormode)
414 ui.write(('color mode: %s\n') % ui._colormode)
415 if opts.get(r'style'):
415 if opts.get(r'style'):
416 return _debugdisplaystyle(ui)
416 return _debugdisplaystyle(ui)
417 else:
417 else:
418 return _debugdisplaycolor(ui)
418 return _debugdisplaycolor(ui)
419
419
420 def _debugdisplaycolor(ui):
420 def _debugdisplaycolor(ui):
421 ui = ui.copy()
421 ui = ui.copy()
422 ui._styles.clear()
422 ui._styles.clear()
423 for effect in color._activeeffects(ui).keys():
423 for effect in color._activeeffects(ui).keys():
424 ui._styles[effect] = effect
424 ui._styles[effect] = effect
425 if ui._terminfoparams:
425 if ui._terminfoparams:
426 for k, v in ui.configitems('color'):
426 for k, v in ui.configitems('color'):
427 if k.startswith('color.'):
427 if k.startswith('color.'):
428 ui._styles[k] = k[6:]
428 ui._styles[k] = k[6:]
429 elif k.startswith('terminfo.'):
429 elif k.startswith('terminfo.'):
430 ui._styles[k] = k[9:]
430 ui._styles[k] = k[9:]
431 ui.write(_('available colors:\n'))
431 ui.write(_('available colors:\n'))
432 # sort label with a '_' after the other to group '_background' entry.
432 # sort label with a '_' after the other to group '_background' entry.
433 items = sorted(ui._styles.items(),
433 items = sorted(ui._styles.items(),
434 key=lambda i: ('_' in i[0], i[0], i[1]))
434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 for colorname, label in items:
435 for colorname, label in items:
436 ui.write(('%s\n') % colorname, label=label)
436 ui.write(('%s\n') % colorname, label=label)
437
437
438 def _debugdisplaystyle(ui):
438 def _debugdisplaystyle(ui):
439 ui.write(_('available style:\n'))
439 ui.write(_('available style:\n'))
440 width = max(len(s) for s in ui._styles)
440 width = max(len(s) for s in ui._styles)
441 for label, effects in sorted(ui._styles.items()):
441 for label, effects in sorted(ui._styles.items()):
442 ui.write('%s' % label, label=label)
442 ui.write('%s' % label, label=label)
443 if effects:
443 if effects:
444 # 50
444 # 50
445 ui.write(': ')
445 ui.write(': ')
446 ui.write(' ' * (max(0, width - len(label))))
446 ui.write(' ' * (max(0, width - len(label))))
447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 ui.write('\n')
448 ui.write('\n')
449
449
450 @command('debugcreatestreamclonebundle', [], 'FILE')
450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 def debugcreatestreamclonebundle(ui, repo, fname):
451 def debugcreatestreamclonebundle(ui, repo, fname):
452 """create a stream clone bundle file
452 """create a stream clone bundle file
453
453
454 Stream bundles are special bundles that are essentially archives of
454 Stream bundles are special bundles that are essentially archives of
455 revlog files. They are commonly used for cloning very quickly.
455 revlog files. They are commonly used for cloning very quickly.
456 """
456 """
457 # TODO we may want to turn this into an abort when this functionality
457 # TODO we may want to turn this into an abort when this functionality
458 # is moved into `hg bundle`.
458 # is moved into `hg bundle`.
459 if phases.hassecret(repo):
459 if phases.hassecret(repo):
460 ui.warn(_('(warning: stream clone bundle will contain secret '
460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 'revisions)\n'))
461 'revisions)\n'))
462
462
463 requirements, gen = streamclone.generatebundlev1(repo)
463 requirements, gen = streamclone.generatebundlev1(repo)
464 changegroup.writechunks(ui, gen, fname)
464 changegroup.writechunks(ui, gen, fname)
465
465
466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467
467
468 @command('debugdag',
468 @command('debugdag',
469 [('t', 'tags', None, _('use tags as labels')),
469 [('t', 'tags', None, _('use tags as labels')),
470 ('b', 'branches', None, _('annotate with branch names')),
470 ('b', 'branches', None, _('annotate with branch names')),
471 ('', 'dots', None, _('use dots for runs')),
471 ('', 'dots', None, _('use dots for runs')),
472 ('s', 'spaces', None, _('separate elements by spaces'))],
472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 _('[OPTION]... [FILE [REV]...]'),
473 _('[OPTION]... [FILE [REV]...]'),
474 optionalrepo=True)
474 optionalrepo=True)
475 def debugdag(ui, repo, file_=None, *revs, **opts):
475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 """format the changelog or an index DAG as a concise textual description
476 """format the changelog or an index DAG as a concise textual description
477
477
478 If you pass a revlog index, the revlog's DAG is emitted. If you list
478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 revision numbers, they get labeled in the output as rN.
479 revision numbers, they get labeled in the output as rN.
480
480
481 Otherwise, the changelog DAG of the current repo is emitted.
481 Otherwise, the changelog DAG of the current repo is emitted.
482 """
482 """
483 spaces = opts.get(r'spaces')
483 spaces = opts.get(r'spaces')
484 dots = opts.get(r'dots')
484 dots = opts.get(r'dots')
485 if file_:
485 if file_:
486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 file_)
487 file_)
488 revs = set((int(r) for r in revs))
488 revs = set((int(r) for r in revs))
489 def events():
489 def events():
490 for r in rlog:
490 for r in rlog:
491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 if p != -1))
492 if p != -1))
493 if r in revs:
493 if r in revs:
494 yield 'l', (r, "r%i" % r)
494 yield 'l', (r, "r%i" % r)
495 elif repo:
495 elif repo:
496 cl = repo.changelog
496 cl = repo.changelog
497 tags = opts.get(r'tags')
497 tags = opts.get(r'tags')
498 branches = opts.get(r'branches')
498 branches = opts.get(r'branches')
499 if tags:
499 if tags:
500 labels = {}
500 labels = {}
501 for l, n in repo.tags().items():
501 for l, n in repo.tags().items():
502 labels.setdefault(cl.rev(n), []).append(l)
502 labels.setdefault(cl.rev(n), []).append(l)
503 def events():
503 def events():
504 b = "default"
504 b = "default"
505 for r in cl:
505 for r in cl:
506 if branches:
506 if branches:
507 newb = cl.read(cl.node(r))[5]['branch']
507 newb = cl.read(cl.node(r))[5]['branch']
508 if newb != b:
508 if newb != b:
509 yield 'a', newb
509 yield 'a', newb
510 b = newb
510 b = newb
511 yield 'n', (r, list(p for p in cl.parentrevs(r)
511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 if p != -1))
512 if p != -1))
513 if tags:
513 if tags:
514 ls = labels.get(r)
514 ls = labels.get(r)
515 if ls:
515 if ls:
516 for l in ls:
516 for l in ls:
517 yield 'l', (r, l)
517 yield 'l', (r, l)
518 else:
518 else:
519 raise error.Abort(_('need repo for changelog dag'))
519 raise error.Abort(_('need repo for changelog dag'))
520
520
521 for line in dagparser.dagtextlines(events(),
521 for line in dagparser.dagtextlines(events(),
522 addspaces=spaces,
522 addspaces=spaces,
523 wraplabels=True,
523 wraplabels=True,
524 wrapannotations=True,
524 wrapannotations=True,
525 wrapnonlinear=dots,
525 wrapnonlinear=dots,
526 usedots=dots,
526 usedots=dots,
527 maxlinewidth=70):
527 maxlinewidth=70):
528 ui.write(line)
528 ui.write(line)
529 ui.write("\n")
529 ui.write("\n")
530
530
531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 def debugdata(ui, repo, file_, rev=None, **opts):
532 def debugdata(ui, repo, file_, rev=None, **opts):
533 """dump the contents of a data file revision"""
533 """dump the contents of a data file revision"""
534 opts = pycompat.byteskwargs(opts)
534 opts = pycompat.byteskwargs(opts)
535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 if rev is not None:
536 if rev is not None:
537 raise error.CommandError('debugdata', _('invalid arguments'))
537 raise error.CommandError('debugdata', _('invalid arguments'))
538 file_, rev = None, file_
538 file_, rev = None, file_
539 elif rev is None:
539 elif rev is None:
540 raise error.CommandError('debugdata', _('invalid arguments'))
540 raise error.CommandError('debugdata', _('invalid arguments'))
541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 try:
542 try:
543 ui.write(r.revision(r.lookup(rev), raw=True))
543 ui.write(r.revision(r.lookup(rev), raw=True))
544 except KeyError:
544 except KeyError:
545 raise error.Abort(_('invalid revision identifier %s') % rev)
545 raise error.Abort(_('invalid revision identifier %s') % rev)
546
546
547 @command('debugdate',
547 @command('debugdate',
548 [('e', 'extended', None, _('try extended date formats'))],
548 [('e', 'extended', None, _('try extended date formats'))],
549 _('[-e] DATE [RANGE]'),
549 _('[-e] DATE [RANGE]'),
550 norepo=True, optionalrepo=True)
550 norepo=True, optionalrepo=True)
551 def debugdate(ui, date, range=None, **opts):
551 def debugdate(ui, date, range=None, **opts):
552 """parse and display a date"""
552 """parse and display a date"""
553 if opts[r"extended"]:
553 if opts[r"extended"]:
554 d = util.parsedate(date, util.extendeddateformats)
554 d = util.parsedate(date, util.extendeddateformats)
555 else:
555 else:
556 d = util.parsedate(date)
556 d = util.parsedate(date)
557 ui.write(("internal: %s %s\n") % d)
557 ui.write(("internal: %s %s\n") % d)
558 ui.write(("standard: %s\n") % util.datestr(d))
558 ui.write(("standard: %s\n") % util.datestr(d))
559 if range:
559 if range:
560 m = util.matchdate(range)
560 m = util.matchdate(range)
561 ui.write(("match: %s\n") % m(d[0]))
561 ui.write(("match: %s\n") % m(d[0]))
562
562
563 @command('debugdeltachain',
563 @command('debugdeltachain',
564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 _('-c|-m|FILE'),
565 _('-c|-m|FILE'),
566 optionalrepo=True)
566 optionalrepo=True)
567 def debugdeltachain(ui, repo, file_=None, **opts):
567 def debugdeltachain(ui, repo, file_=None, **opts):
568 """dump information about delta chains in a revlog
568 """dump information about delta chains in a revlog
569
569
570 Output can be templatized. Available template keywords are:
570 Output can be templatized. Available template keywords are:
571
571
572 :``rev``: revision number
572 :``rev``: revision number
573 :``chainid``: delta chain identifier (numbered by unique base)
573 :``chainid``: delta chain identifier (numbered by unique base)
574 :``chainlen``: delta chain length to this revision
574 :``chainlen``: delta chain length to this revision
575 :``prevrev``: previous revision in delta chain
575 :``prevrev``: previous revision in delta chain
576 :``deltatype``: role of delta / how it was computed
576 :``deltatype``: role of delta / how it was computed
577 :``compsize``: compressed size of revision
577 :``compsize``: compressed size of revision
578 :``uncompsize``: uncompressed size of revision
578 :``uncompsize``: uncompressed size of revision
579 :``chainsize``: total size of compressed revisions in chain
579 :``chainsize``: total size of compressed revisions in chain
580 :``chainratio``: total chain size divided by uncompressed revision size
580 :``chainratio``: total chain size divided by uncompressed revision size
581 (new delta chains typically start at ratio 2.00)
581 (new delta chains typically start at ratio 2.00)
582 :``lindist``: linear distance from base revision in delta chain to end
582 :``lindist``: linear distance from base revision in delta chain to end
583 of this revision
583 of this revision
584 :``extradist``: total size of revisions not part of this delta chain from
584 :``extradist``: total size of revisions not part of this delta chain from
585 base of delta chain to end of this revision; a measurement
585 base of delta chain to end of this revision; a measurement
586 of how much extra data we need to read/seek across to read
586 of how much extra data we need to read/seek across to read
587 the delta chain for this revision
587 the delta chain for this revision
588 :``extraratio``: extradist divided by chainsize; another representation of
588 :``extraratio``: extradist divided by chainsize; another representation of
589 how much unrelated data is needed to load this delta chain
589 how much unrelated data is needed to load this delta chain
590
590
591 If the repository is configured to use the sparse read, additional keywords
591 If the repository is configured to use the sparse read, additional keywords
592 are available:
592 are available:
593
593
594 :``readsize``: total size of data read from the disk for a revision
594 :``readsize``: total size of data read from the disk for a revision
595 (sum of the sizes of all the blocks)
595 (sum of the sizes of all the blocks)
596 :``largestblock``: size of the largest block of data read from the disk
596 :``largestblock``: size of the largest block of data read from the disk
597 :``readdensity``: density of useful bytes in the data read from the disk
597 :``readdensity``: density of useful bytes in the data read from the disk
598
598
599 The sparse read can be enabled with experimental.sparse-read = True
599 The sparse read can be enabled with experimental.sparse-read = True
600 """
600 """
601 opts = pycompat.byteskwargs(opts)
601 opts = pycompat.byteskwargs(opts)
602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
603 index = r.index
603 index = r.index
604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 withsparseread = getattr(r, '_withsparseread', False)
605 withsparseread = getattr(r, '_withsparseread', False)
606
606
607 def revinfo(rev):
607 def revinfo(rev):
608 e = index[rev]
608 e = index[rev]
609 compsize = e[1]
609 compsize = e[1]
610 uncompsize = e[2]
610 uncompsize = e[2]
611 chainsize = 0
611 chainsize = 0
612
612
613 if generaldelta:
613 if generaldelta:
614 if e[3] == e[5]:
614 if e[3] == e[5]:
615 deltatype = 'p1'
615 deltatype = 'p1'
616 elif e[3] == e[6]:
616 elif e[3] == e[6]:
617 deltatype = 'p2'
617 deltatype = 'p2'
618 elif e[3] == rev - 1:
618 elif e[3] == rev - 1:
619 deltatype = 'prev'
619 deltatype = 'prev'
620 elif e[3] == rev:
620 elif e[3] == rev:
621 deltatype = 'base'
621 deltatype = 'base'
622 else:
622 else:
623 deltatype = 'other'
623 deltatype = 'other'
624 else:
624 else:
625 if e[3] == rev:
625 if e[3] == rev:
626 deltatype = 'base'
626 deltatype = 'base'
627 else:
627 else:
628 deltatype = 'prev'
628 deltatype = 'prev'
629
629
630 chain = r._deltachain(rev)[0]
630 chain = r._deltachain(rev)[0]
631 for iterrev in chain:
631 for iterrev in chain:
632 e = index[iterrev]
632 e = index[iterrev]
633 chainsize += e[1]
633 chainsize += e[1]
634
634
635 return compsize, uncompsize, deltatype, chain, chainsize
635 return compsize, uncompsize, deltatype, chain, chainsize
636
636
637 fm = ui.formatter('debugdeltachain', opts)
637 fm = ui.formatter('debugdeltachain', opts)
638
638
639 fm.plain(' rev chain# chainlen prev delta '
639 fm.plain(' rev chain# chainlen prev delta '
640 'size rawsize chainsize ratio lindist extradist '
640 'size rawsize chainsize ratio lindist extradist '
641 'extraratio')
641 'extraratio')
642 if withsparseread:
642 if withsparseread:
643 fm.plain(' readsize largestblk rddensity')
643 fm.plain(' readsize largestblk rddensity')
644 fm.plain('\n')
644 fm.plain('\n')
645
645
646 chainbases = {}
646 chainbases = {}
647 for rev in r:
647 for rev in r:
648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
649 chainbase = chain[0]
649 chainbase = chain[0]
650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
651 start = r.start
651 start = r.start
652 length = r.length
652 length = r.length
653 basestart = start(chainbase)
653 basestart = start(chainbase)
654 revstart = start(rev)
654 revstart = start(rev)
655 lineardist = revstart + comp - basestart
655 lineardist = revstart + comp - basestart
656 extradist = lineardist - chainsize
656 extradist = lineardist - chainsize
657 try:
657 try:
658 prevrev = chain[-2]
658 prevrev = chain[-2]
659 except IndexError:
659 except IndexError:
660 prevrev = -1
660 prevrev = -1
661
661
662 chainratio = float(chainsize) / float(uncomp)
662 chainratio = float(chainsize) / float(uncomp)
663 extraratio = float(extradist) / float(chainsize)
663 extraratio = float(extradist) / float(chainsize)
664
664
665 fm.startitem()
665 fm.startitem()
666 fm.write('rev chainid chainlen prevrev deltatype compsize '
666 fm.write('rev chainid chainlen prevrev deltatype compsize '
667 'uncompsize chainsize chainratio lindist extradist '
667 'uncompsize chainsize chainratio lindist extradist '
668 'extraratio',
668 'extraratio',
669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
670 rev, chainid, len(chain), prevrev, deltatype, comp,
670 rev, chainid, len(chain), prevrev, deltatype, comp,
671 uncomp, chainsize, chainratio, lineardist, extradist,
671 uncomp, chainsize, chainratio, lineardist, extradist,
672 extraratio,
672 extraratio,
673 rev=rev, chainid=chainid, chainlen=len(chain),
673 rev=rev, chainid=chainid, chainlen=len(chain),
674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
675 uncompsize=uncomp, chainsize=chainsize,
675 uncompsize=uncomp, chainsize=chainsize,
676 chainratio=chainratio, lindist=lineardist,
676 chainratio=chainratio, lindist=lineardist,
677 extradist=extradist, extraratio=extraratio)
677 extradist=extradist, extraratio=extraratio)
678 if withsparseread:
678 if withsparseread:
679 readsize = 0
679 readsize = 0
680 largestblock = 0
680 largestblock = 0
681 for revschunk in revlog._slicechunk(r, chain):
681 for revschunk in revlog._slicechunk(r, chain):
682 blkend = start(revschunk[-1]) + length(revschunk[-1])
682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 blksize = blkend - start(revschunk[0])
683 blksize = blkend - start(revschunk[0])
684
684
685 readsize += blksize
685 readsize += blksize
686 if largestblock < blksize:
686 if largestblock < blksize:
687 largestblock = blksize
687 largestblock = blksize
688
688
689 readdensity = float(chainsize) / float(readsize)
689 readdensity = float(chainsize) / float(readsize)
690
690
691 fm.write('readsize largestblock readdensity',
691 fm.write('readsize largestblock readdensity',
692 ' %10d %10d %9.5f',
692 ' %10d %10d %9.5f',
693 readsize, largestblock, readdensity,
693 readsize, largestblock, readdensity,
694 readsize=readsize, largestblock=largestblock,
694 readsize=readsize, largestblock=largestblock,
695 readdensity=readdensity)
695 readdensity=readdensity)
696
696
697 fm.plain('\n')
697 fm.plain('\n')
698
698
699 fm.end()
699 fm.end()
700
700
701 @command('debugdirstate|debugstate',
701 @command('debugdirstate|debugstate',
702 [('', 'nodates', None, _('do not display the saved mtime')),
702 [('', 'nodates', None, _('do not display the saved mtime')),
703 ('', 'datesort', None, _('sort by saved mtime'))],
703 ('', 'datesort', None, _('sort by saved mtime'))],
704 _('[OPTION]...'))
704 _('[OPTION]...'))
705 def debugstate(ui, repo, **opts):
705 def debugstate(ui, repo, **opts):
706 """show the contents of the current dirstate"""
706 """show the contents of the current dirstate"""
707
707
708 nodates = opts.get(r'nodates')
708 nodates = opts.get(r'nodates')
709 datesort = opts.get(r'datesort')
709 datesort = opts.get(r'datesort')
710
710
711 timestr = ""
711 timestr = ""
712 if datesort:
712 if datesort:
713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
714 else:
714 else:
715 keyfunc = None # sort by filename
715 keyfunc = None # sort by filename
716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
717 if ent[3] == -1:
717 if ent[3] == -1:
718 timestr = 'unset '
718 timestr = 'unset '
719 elif nodates:
719 elif nodates:
720 timestr = 'set '
720 timestr = 'set '
721 else:
721 else:
722 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
722 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
723 time.localtime(ent[3]))
723 time.localtime(ent[3]))
724 timestr = encoding.strtolocal(timestr)
724 timestr = encoding.strtolocal(timestr)
725 if ent[1] & 0o20000:
725 if ent[1] & 0o20000:
726 mode = 'lnk'
726 mode = 'lnk'
727 else:
727 else:
728 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
728 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
729 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
729 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
730 for f in repo.dirstate.copies():
730 for f in repo.dirstate.copies():
731 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
731 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
732
732
733 @command('debugdiscovery',
733 @command('debugdiscovery',
734 [('', 'old', None, _('use old-style discovery')),
734 [('', 'old', None, _('use old-style discovery')),
735 ('', 'nonheads', None,
735 ('', 'nonheads', None,
736 _('use old-style discovery with non-heads included')),
736 _('use old-style discovery with non-heads included')),
737 ('', 'rev', [], 'restrict discovery to this set of revs'),
737 ] + cmdutil.remoteopts,
738 ] + cmdutil.remoteopts,
738 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
739 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
739 def debugdiscovery(ui, repo, remoteurl="default", **opts):
740 def debugdiscovery(ui, repo, remoteurl="default", **opts):
740 """runs the changeset discovery protocol in isolation"""
741 """runs the changeset discovery protocol in isolation"""
741 opts = pycompat.byteskwargs(opts)
742 opts = pycompat.byteskwargs(opts)
742 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
743 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
743 opts.get('branch'))
744 opts.get('branch'))
744 remote = hg.peer(repo, opts, remoteurl)
745 remote = hg.peer(repo, opts, remoteurl)
745 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
746 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
746
747
747 # make sure tests are repeatable
748 # make sure tests are repeatable
748 random.seed(12323)
749 random.seed(12323)
749
750
750 def doit(localheads, remoteheads, remote=remote):
751 def doit(pushedrevs, remoteheads, remote=remote):
751 if opts.get('old'):
752 if opts.get('old'):
752 if localheads:
753 raise error.Abort('cannot use localheads with old style '
754 'discovery')
755 if not util.safehasattr(remote, 'branches'):
753 if not util.safehasattr(remote, 'branches'):
756 # enable in-client legacy support
754 # enable in-client legacy support
757 remote = localrepo.locallegacypeer(remote.local())
755 remote = localrepo.locallegacypeer(remote.local())
758 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
756 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
759 force=True)
757 force=True)
760 common = set(common)
758 common = set(common)
761 if not opts.get('nonheads'):
759 if not opts.get('nonheads'):
762 ui.write(("unpruned common: %s\n") %
760 ui.write(("unpruned common: %s\n") %
763 " ".join(sorted(short(n) for n in common)))
761 " ".join(sorted(short(n) for n in common)))
764 dag = dagutil.revlogdag(repo.changelog)
762 dag = dagutil.revlogdag(repo.changelog)
765 all = dag.ancestorset(dag.internalizeall(common))
763 all = dag.ancestorset(dag.internalizeall(common))
766 common = dag.externalizeall(dag.headsetofconnecteds(all))
764 common = dag.externalizeall(dag.headsetofconnecteds(all))
767 else:
765 else:
768 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
766 nodes = None
767 if pushedrevs:
768 revs = scmutil.revrange(repo, pushedrevs)
769 nodes = [repo[r].node() for r in revs]
770 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
771 ancestorsof=nodes)
769 common = set(common)
772 common = set(common)
770 rheads = set(hds)
773 rheads = set(hds)
771 lheads = set(repo.heads())
774 lheads = set(repo.heads())
772 ui.write(("common heads: %s\n") %
775 ui.write(("common heads: %s\n") %
773 " ".join(sorted(short(n) for n in common)))
776 " ".join(sorted(short(n) for n in common)))
774 if lheads <= common:
777 if lheads <= common:
775 ui.write(("local is subset\n"))
778 ui.write(("local is subset\n"))
776 elif rheads <= common:
779 elif rheads <= common:
777 ui.write(("remote is subset\n"))
780 ui.write(("remote is subset\n"))
778
781
779 serverlogs = opts.get('serverlog')
782 serverlogs = opts.get('serverlog')
780 if serverlogs:
783 if serverlogs:
781 for filename in serverlogs:
784 for filename in serverlogs:
782 with open(filename, 'r') as logfile:
785 with open(filename, 'r') as logfile:
783 line = logfile.readline()
786 line = logfile.readline()
784 while line:
787 while line:
785 parts = line.strip().split(';')
788 parts = line.strip().split(';')
786 op = parts[1]
789 op = parts[1]
787 if op == 'cg':
790 if op == 'cg':
788 pass
791 pass
789 elif op == 'cgss':
792 elif op == 'cgss':
790 doit(parts[2].split(' '), parts[3].split(' '))
793 doit(parts[2].split(' '), parts[3].split(' '))
791 elif op == 'unb':
794 elif op == 'unb':
792 doit(parts[3].split(' '), parts[2].split(' '))
795 doit(parts[3].split(' '), parts[2].split(' '))
793 line = logfile.readline()
796 line = logfile.readline()
794 else:
797 else:
795 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
798 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
796 opts.get('remote_head'))
799 opts.get('remote_head'))
797 localrevs = opts.get('local_head')
800 localrevs = opts.get('rev')
798 doit(localrevs, remoterevs)
801 doit(localrevs, remoterevs)
799
802
800 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
803 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
801 def debugextensions(ui, **opts):
804 def debugextensions(ui, **opts):
802 '''show information about active extensions'''
805 '''show information about active extensions'''
803 opts = pycompat.byteskwargs(opts)
806 opts = pycompat.byteskwargs(opts)
804 exts = extensions.extensions(ui)
807 exts = extensions.extensions(ui)
805 hgver = util.version()
808 hgver = util.version()
806 fm = ui.formatter('debugextensions', opts)
809 fm = ui.formatter('debugextensions', opts)
807 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
810 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
808 isinternal = extensions.ismoduleinternal(extmod)
811 isinternal = extensions.ismoduleinternal(extmod)
809 extsource = pycompat.fsencode(extmod.__file__)
812 extsource = pycompat.fsencode(extmod.__file__)
810 if isinternal:
813 if isinternal:
811 exttestedwith = [] # never expose magic string to users
814 exttestedwith = [] # never expose magic string to users
812 else:
815 else:
813 exttestedwith = getattr(extmod, 'testedwith', '').split()
816 exttestedwith = getattr(extmod, 'testedwith', '').split()
814 extbuglink = getattr(extmod, 'buglink', None)
817 extbuglink = getattr(extmod, 'buglink', None)
815
818
816 fm.startitem()
819 fm.startitem()
817
820
818 if ui.quiet or ui.verbose:
821 if ui.quiet or ui.verbose:
819 fm.write('name', '%s\n', extname)
822 fm.write('name', '%s\n', extname)
820 else:
823 else:
821 fm.write('name', '%s', extname)
824 fm.write('name', '%s', extname)
822 if isinternal or hgver in exttestedwith:
825 if isinternal or hgver in exttestedwith:
823 fm.plain('\n')
826 fm.plain('\n')
824 elif not exttestedwith:
827 elif not exttestedwith:
825 fm.plain(_(' (untested!)\n'))
828 fm.plain(_(' (untested!)\n'))
826 else:
829 else:
827 lasttestedversion = exttestedwith[-1]
830 lasttestedversion = exttestedwith[-1]
828 fm.plain(' (%s!)\n' % lasttestedversion)
831 fm.plain(' (%s!)\n' % lasttestedversion)
829
832
830 fm.condwrite(ui.verbose and extsource, 'source',
833 fm.condwrite(ui.verbose and extsource, 'source',
831 _(' location: %s\n'), extsource or "")
834 _(' location: %s\n'), extsource or "")
832
835
833 if ui.verbose:
836 if ui.verbose:
834 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
837 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
835 fm.data(bundled=isinternal)
838 fm.data(bundled=isinternal)
836
839
837 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
840 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
838 _(' tested with: %s\n'),
841 _(' tested with: %s\n'),
839 fm.formatlist(exttestedwith, name='ver'))
842 fm.formatlist(exttestedwith, name='ver'))
840
843
841 fm.condwrite(ui.verbose and extbuglink, 'buglink',
844 fm.condwrite(ui.verbose and extbuglink, 'buglink',
842 _(' bug reporting: %s\n'), extbuglink or "")
845 _(' bug reporting: %s\n'), extbuglink or "")
843
846
844 fm.end()
847 fm.end()
845
848
846 @command('debugfileset',
849 @command('debugfileset',
847 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
850 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
848 _('[-r REV] FILESPEC'))
851 _('[-r REV] FILESPEC'))
849 def debugfileset(ui, repo, expr, **opts):
852 def debugfileset(ui, repo, expr, **opts):
850 '''parse and apply a fileset specification'''
853 '''parse and apply a fileset specification'''
851 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
854 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
852 if ui.verbose:
855 if ui.verbose:
853 tree = fileset.parse(expr)
856 tree = fileset.parse(expr)
854 ui.note(fileset.prettyformat(tree), "\n")
857 ui.note(fileset.prettyformat(tree), "\n")
855
858
856 for f in ctx.getfileset(expr):
859 for f in ctx.getfileset(expr):
857 ui.write("%s\n" % f)
860 ui.write("%s\n" % f)
858
861
859 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
862 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
860 def debugfsinfo(ui, path="."):
863 def debugfsinfo(ui, path="."):
861 """show information detected about current filesystem"""
864 """show information detected about current filesystem"""
862 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
865 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
863 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
866 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
864 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
867 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
865 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
868 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
866 casesensitive = '(unknown)'
869 casesensitive = '(unknown)'
867 try:
870 try:
868 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
871 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
869 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
872 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
870 except OSError:
873 except OSError:
871 pass
874 pass
872 ui.write(('case-sensitive: %s\n') % casesensitive)
875 ui.write(('case-sensitive: %s\n') % casesensitive)
873
876
874 @command('debuggetbundle',
877 @command('debuggetbundle',
875 [('H', 'head', [], _('id of head node'), _('ID')),
878 [('H', 'head', [], _('id of head node'), _('ID')),
876 ('C', 'common', [], _('id of common node'), _('ID')),
879 ('C', 'common', [], _('id of common node'), _('ID')),
877 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
880 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
878 _('REPO FILE [-H|-C ID]...'),
881 _('REPO FILE [-H|-C ID]...'),
879 norepo=True)
882 norepo=True)
880 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
883 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
881 """retrieves a bundle from a repo
884 """retrieves a bundle from a repo
882
885
883 Every ID must be a full-length hex node id string. Saves the bundle to the
886 Every ID must be a full-length hex node id string. Saves the bundle to the
884 given file.
887 given file.
885 """
888 """
886 opts = pycompat.byteskwargs(opts)
889 opts = pycompat.byteskwargs(opts)
887 repo = hg.peer(ui, opts, repopath)
890 repo = hg.peer(ui, opts, repopath)
888 if not repo.capable('getbundle'):
891 if not repo.capable('getbundle'):
889 raise error.Abort("getbundle() not supported by target repository")
892 raise error.Abort("getbundle() not supported by target repository")
890 args = {}
893 args = {}
891 if common:
894 if common:
892 args[r'common'] = [bin(s) for s in common]
895 args[r'common'] = [bin(s) for s in common]
893 if head:
896 if head:
894 args[r'heads'] = [bin(s) for s in head]
897 args[r'heads'] = [bin(s) for s in head]
895 # TODO: get desired bundlecaps from command line.
898 # TODO: get desired bundlecaps from command line.
896 args[r'bundlecaps'] = None
899 args[r'bundlecaps'] = None
897 bundle = repo.getbundle('debug', **args)
900 bundle = repo.getbundle('debug', **args)
898
901
899 bundletype = opts.get('type', 'bzip2').lower()
902 bundletype = opts.get('type', 'bzip2').lower()
900 btypes = {'none': 'HG10UN',
903 btypes = {'none': 'HG10UN',
901 'bzip2': 'HG10BZ',
904 'bzip2': 'HG10BZ',
902 'gzip': 'HG10GZ',
905 'gzip': 'HG10GZ',
903 'bundle2': 'HG20'}
906 'bundle2': 'HG20'}
904 bundletype = btypes.get(bundletype)
907 bundletype = btypes.get(bundletype)
905 if bundletype not in bundle2.bundletypes:
908 if bundletype not in bundle2.bundletypes:
906 raise error.Abort(_('unknown bundle type specified with --type'))
909 raise error.Abort(_('unknown bundle type specified with --type'))
907 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
910 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
908
911
909 @command('debugignore', [], '[FILE]')
912 @command('debugignore', [], '[FILE]')
910 def debugignore(ui, repo, *files, **opts):
913 def debugignore(ui, repo, *files, **opts):
911 """display the combined ignore pattern and information about ignored files
914 """display the combined ignore pattern and information about ignored files
912
915
913 With no argument display the combined ignore pattern.
916 With no argument display the combined ignore pattern.
914
917
915 Given space separated file names, shows if the given file is ignored and
918 Given space separated file names, shows if the given file is ignored and
916 if so, show the ignore rule (file and line number) that matched it.
919 if so, show the ignore rule (file and line number) that matched it.
917 """
920 """
918 ignore = repo.dirstate._ignore
921 ignore = repo.dirstate._ignore
919 if not files:
922 if not files:
920 # Show all the patterns
923 # Show all the patterns
921 ui.write("%s\n" % repr(ignore))
924 ui.write("%s\n" % repr(ignore))
922 else:
925 else:
923 m = scmutil.match(repo[None], pats=files)
926 m = scmutil.match(repo[None], pats=files)
924 for f in m.files():
927 for f in m.files():
925 nf = util.normpath(f)
928 nf = util.normpath(f)
926 ignored = None
929 ignored = None
927 ignoredata = None
930 ignoredata = None
928 if nf != '.':
931 if nf != '.':
929 if ignore(nf):
932 if ignore(nf):
930 ignored = nf
933 ignored = nf
931 ignoredata = repo.dirstate._ignorefileandline(nf)
934 ignoredata = repo.dirstate._ignorefileandline(nf)
932 else:
935 else:
933 for p in util.finddirs(nf):
936 for p in util.finddirs(nf):
934 if ignore(p):
937 if ignore(p):
935 ignored = p
938 ignored = p
936 ignoredata = repo.dirstate._ignorefileandline(p)
939 ignoredata = repo.dirstate._ignorefileandline(p)
937 break
940 break
938 if ignored:
941 if ignored:
939 if ignored == nf:
942 if ignored == nf:
940 ui.write(_("%s is ignored\n") % m.uipath(f))
943 ui.write(_("%s is ignored\n") % m.uipath(f))
941 else:
944 else:
942 ui.write(_("%s is ignored because of "
945 ui.write(_("%s is ignored because of "
943 "containing folder %s\n")
946 "containing folder %s\n")
944 % (m.uipath(f), ignored))
947 % (m.uipath(f), ignored))
945 ignorefile, lineno, line = ignoredata
948 ignorefile, lineno, line = ignoredata
946 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
949 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
947 % (ignorefile, lineno, line))
950 % (ignorefile, lineno, line))
948 else:
951 else:
949 ui.write(_("%s is not ignored\n") % m.uipath(f))
952 ui.write(_("%s is not ignored\n") % m.uipath(f))
950
953
951 @command('debugindex', cmdutil.debugrevlogopts +
954 @command('debugindex', cmdutil.debugrevlogopts +
952 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
955 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
953 _('[-f FORMAT] -c|-m|FILE'),
956 _('[-f FORMAT] -c|-m|FILE'),
954 optionalrepo=True)
957 optionalrepo=True)
955 def debugindex(ui, repo, file_=None, **opts):
958 def debugindex(ui, repo, file_=None, **opts):
956 """dump the contents of an index file"""
959 """dump the contents of an index file"""
957 opts = pycompat.byteskwargs(opts)
960 opts = pycompat.byteskwargs(opts)
958 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
961 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
959 format = opts.get('format', 0)
962 format = opts.get('format', 0)
960 if format not in (0, 1):
963 if format not in (0, 1):
961 raise error.Abort(_("unknown format %d") % format)
964 raise error.Abort(_("unknown format %d") % format)
962
965
963 generaldelta = r.version & revlog.FLAG_GENERALDELTA
966 generaldelta = r.version & revlog.FLAG_GENERALDELTA
964 if generaldelta:
967 if generaldelta:
965 basehdr = ' delta'
968 basehdr = ' delta'
966 else:
969 else:
967 basehdr = ' base'
970 basehdr = ' base'
968
971
969 if ui.debugflag:
972 if ui.debugflag:
970 shortfn = hex
973 shortfn = hex
971 else:
974 else:
972 shortfn = short
975 shortfn = short
973
976
974 # There might not be anything in r, so have a sane default
977 # There might not be anything in r, so have a sane default
975 idlen = 12
978 idlen = 12
976 for i in r:
979 for i in r:
977 idlen = len(shortfn(r.node(i)))
980 idlen = len(shortfn(r.node(i)))
978 break
981 break
979
982
980 if format == 0:
983 if format == 0:
981 ui.write((" rev offset length " + basehdr + " linkrev"
984 ui.write((" rev offset length " + basehdr + " linkrev"
982 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
985 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
983 elif format == 1:
986 elif format == 1:
984 ui.write((" rev flag offset length"
987 ui.write((" rev flag offset length"
985 " size " + basehdr + " link p1 p2"
988 " size " + basehdr + " link p1 p2"
986 " %s\n") % "nodeid".rjust(idlen))
989 " %s\n") % "nodeid".rjust(idlen))
987
990
988 for i in r:
991 for i in r:
989 node = r.node(i)
992 node = r.node(i)
990 if generaldelta:
993 if generaldelta:
991 base = r.deltaparent(i)
994 base = r.deltaparent(i)
992 else:
995 else:
993 base = r.chainbase(i)
996 base = r.chainbase(i)
994 if format == 0:
997 if format == 0:
995 try:
998 try:
996 pp = r.parents(node)
999 pp = r.parents(node)
997 except Exception:
1000 except Exception:
998 pp = [nullid, nullid]
1001 pp = [nullid, nullid]
999 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1002 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1000 i, r.start(i), r.length(i), base, r.linkrev(i),
1003 i, r.start(i), r.length(i), base, r.linkrev(i),
1001 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1004 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1002 elif format == 1:
1005 elif format == 1:
1003 pr = r.parentrevs(i)
1006 pr = r.parentrevs(i)
1004 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1007 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1005 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1008 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1006 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1009 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1007
1010
1008 @command('debugindexdot', cmdutil.debugrevlogopts,
1011 @command('debugindexdot', cmdutil.debugrevlogopts,
1009 _('-c|-m|FILE'), optionalrepo=True)
1012 _('-c|-m|FILE'), optionalrepo=True)
1010 def debugindexdot(ui, repo, file_=None, **opts):
1013 def debugindexdot(ui, repo, file_=None, **opts):
1011 """dump an index DAG as a graphviz dot file"""
1014 """dump an index DAG as a graphviz dot file"""
1012 opts = pycompat.byteskwargs(opts)
1015 opts = pycompat.byteskwargs(opts)
1013 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1016 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1014 ui.write(("digraph G {\n"))
1017 ui.write(("digraph G {\n"))
1015 for i in r:
1018 for i in r:
1016 node = r.node(i)
1019 node = r.node(i)
1017 pp = r.parents(node)
1020 pp = r.parents(node)
1018 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1021 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1019 if pp[1] != nullid:
1022 if pp[1] != nullid:
1020 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1023 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1021 ui.write("}\n")
1024 ui.write("}\n")
1022
1025
1023 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1026 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1024 def debuginstall(ui, **opts):
1027 def debuginstall(ui, **opts):
1025 '''test Mercurial installation
1028 '''test Mercurial installation
1026
1029
1027 Returns 0 on success.
1030 Returns 0 on success.
1028 '''
1031 '''
1029 opts = pycompat.byteskwargs(opts)
1032 opts = pycompat.byteskwargs(opts)
1030
1033
1031 def writetemp(contents):
1034 def writetemp(contents):
1032 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1035 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1033 f = os.fdopen(fd, pycompat.sysstr("wb"))
1036 f = os.fdopen(fd, pycompat.sysstr("wb"))
1034 f.write(contents)
1037 f.write(contents)
1035 f.close()
1038 f.close()
1036 return name
1039 return name
1037
1040
1038 problems = 0
1041 problems = 0
1039
1042
1040 fm = ui.formatter('debuginstall', opts)
1043 fm = ui.formatter('debuginstall', opts)
1041 fm.startitem()
1044 fm.startitem()
1042
1045
1043 # encoding
1046 # encoding
1044 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1047 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1045 err = None
1048 err = None
1046 try:
1049 try:
1047 codecs.lookup(pycompat.sysstr(encoding.encoding))
1050 codecs.lookup(pycompat.sysstr(encoding.encoding))
1048 except LookupError as inst:
1051 except LookupError as inst:
1049 err = util.forcebytestr(inst)
1052 err = util.forcebytestr(inst)
1050 problems += 1
1053 problems += 1
1051 fm.condwrite(err, 'encodingerror', _(" %s\n"
1054 fm.condwrite(err, 'encodingerror', _(" %s\n"
1052 " (check that your locale is properly set)\n"), err)
1055 " (check that your locale is properly set)\n"), err)
1053
1056
1054 # Python
1057 # Python
1055 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1058 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1056 pycompat.sysexecutable)
1059 pycompat.sysexecutable)
1057 fm.write('pythonver', _("checking Python version (%s)\n"),
1060 fm.write('pythonver', _("checking Python version (%s)\n"),
1058 ("%d.%d.%d" % sys.version_info[:3]))
1061 ("%d.%d.%d" % sys.version_info[:3]))
1059 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1062 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1060 os.path.dirname(pycompat.fsencode(os.__file__)))
1063 os.path.dirname(pycompat.fsencode(os.__file__)))
1061
1064
1062 security = set(sslutil.supportedprotocols)
1065 security = set(sslutil.supportedprotocols)
1063 if sslutil.hassni:
1066 if sslutil.hassni:
1064 security.add('sni')
1067 security.add('sni')
1065
1068
1066 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1069 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1067 fm.formatlist(sorted(security), name='protocol',
1070 fm.formatlist(sorted(security), name='protocol',
1068 fmt='%s', sep=','))
1071 fmt='%s', sep=','))
1069
1072
1070 # These are warnings, not errors. So don't increment problem count. This
1073 # These are warnings, not errors. So don't increment problem count. This
1071 # may change in the future.
1074 # may change in the future.
1072 if 'tls1.2' not in security:
1075 if 'tls1.2' not in security:
1073 fm.plain(_(' TLS 1.2 not supported by Python install; '
1076 fm.plain(_(' TLS 1.2 not supported by Python install; '
1074 'network connections lack modern security\n'))
1077 'network connections lack modern security\n'))
1075 if 'sni' not in security:
1078 if 'sni' not in security:
1076 fm.plain(_(' SNI not supported by Python install; may have '
1079 fm.plain(_(' SNI not supported by Python install; may have '
1077 'connectivity issues with some servers\n'))
1080 'connectivity issues with some servers\n'))
1078
1081
1079 # TODO print CA cert info
1082 # TODO print CA cert info
1080
1083
1081 # hg version
1084 # hg version
1082 hgver = util.version()
1085 hgver = util.version()
1083 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1086 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1084 hgver.split('+')[0])
1087 hgver.split('+')[0])
1085 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1088 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1086 '+'.join(hgver.split('+')[1:]))
1089 '+'.join(hgver.split('+')[1:]))
1087
1090
1088 # compiled modules
1091 # compiled modules
1089 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1092 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1090 policy.policy)
1093 policy.policy)
1091 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1094 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1092 os.path.dirname(pycompat.fsencode(__file__)))
1095 os.path.dirname(pycompat.fsencode(__file__)))
1093
1096
1094 if policy.policy in ('c', 'allow'):
1097 if policy.policy in ('c', 'allow'):
1095 err = None
1098 err = None
1096 try:
1099 try:
1097 from .cext import (
1100 from .cext import (
1098 base85,
1101 base85,
1099 bdiff,
1102 bdiff,
1100 mpatch,
1103 mpatch,
1101 osutil,
1104 osutil,
1102 )
1105 )
1103 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1106 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1104 except Exception as inst:
1107 except Exception as inst:
1105 err = util.forcebytestr(inst)
1108 err = util.forcebytestr(inst)
1106 problems += 1
1109 problems += 1
1107 fm.condwrite(err, 'extensionserror', " %s\n", err)
1110 fm.condwrite(err, 'extensionserror', " %s\n", err)
1108
1111
1109 compengines = util.compengines._engines.values()
1112 compengines = util.compengines._engines.values()
1110 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1113 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1111 fm.formatlist(sorted(e.name() for e in compengines),
1114 fm.formatlist(sorted(e.name() for e in compengines),
1112 name='compengine', fmt='%s', sep=', '))
1115 name='compengine', fmt='%s', sep=', '))
1113 fm.write('compenginesavail', _('checking available compression engines '
1116 fm.write('compenginesavail', _('checking available compression engines '
1114 '(%s)\n'),
1117 '(%s)\n'),
1115 fm.formatlist(sorted(e.name() for e in compengines
1118 fm.formatlist(sorted(e.name() for e in compengines
1116 if e.available()),
1119 if e.available()),
1117 name='compengine', fmt='%s', sep=', '))
1120 name='compengine', fmt='%s', sep=', '))
1118 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1121 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1119 fm.write('compenginesserver', _('checking available compression engines '
1122 fm.write('compenginesserver', _('checking available compression engines '
1120 'for wire protocol (%s)\n'),
1123 'for wire protocol (%s)\n'),
1121 fm.formatlist([e.name() for e in wirecompengines
1124 fm.formatlist([e.name() for e in wirecompengines
1122 if e.wireprotosupport()],
1125 if e.wireprotosupport()],
1123 name='compengine', fmt='%s', sep=', '))
1126 name='compengine', fmt='%s', sep=', '))
1124
1127
1125 # templates
1128 # templates
1126 p = templater.templatepaths()
1129 p = templater.templatepaths()
1127 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1130 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1128 fm.condwrite(not p, '', _(" no template directories found\n"))
1131 fm.condwrite(not p, '', _(" no template directories found\n"))
1129 if p:
1132 if p:
1130 m = templater.templatepath("map-cmdline.default")
1133 m = templater.templatepath("map-cmdline.default")
1131 if m:
1134 if m:
1132 # template found, check if it is working
1135 # template found, check if it is working
1133 err = None
1136 err = None
1134 try:
1137 try:
1135 templater.templater.frommapfile(m)
1138 templater.templater.frommapfile(m)
1136 except Exception as inst:
1139 except Exception as inst:
1137 err = util.forcebytestr(inst)
1140 err = util.forcebytestr(inst)
1138 p = None
1141 p = None
1139 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1142 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1140 else:
1143 else:
1141 p = None
1144 p = None
1142 fm.condwrite(p, 'defaulttemplate',
1145 fm.condwrite(p, 'defaulttemplate',
1143 _("checking default template (%s)\n"), m)
1146 _("checking default template (%s)\n"), m)
1144 fm.condwrite(not m, 'defaulttemplatenotfound',
1147 fm.condwrite(not m, 'defaulttemplatenotfound',
1145 _(" template '%s' not found\n"), "default")
1148 _(" template '%s' not found\n"), "default")
1146 if not p:
1149 if not p:
1147 problems += 1
1150 problems += 1
1148 fm.condwrite(not p, '',
1151 fm.condwrite(not p, '',
1149 _(" (templates seem to have been installed incorrectly)\n"))
1152 _(" (templates seem to have been installed incorrectly)\n"))
1150
1153
1151 # editor
1154 # editor
1152 editor = ui.geteditor()
1155 editor = ui.geteditor()
1153 editor = util.expandpath(editor)
1156 editor = util.expandpath(editor)
1154 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1157 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1155 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1158 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1156 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1159 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1157 _(" No commit editor set and can't find %s in PATH\n"
1160 _(" No commit editor set and can't find %s in PATH\n"
1158 " (specify a commit editor in your configuration"
1161 " (specify a commit editor in your configuration"
1159 " file)\n"), not cmdpath and editor == 'vi' and editor)
1162 " file)\n"), not cmdpath and editor == 'vi' and editor)
1160 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1163 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1161 _(" Can't find editor '%s' in PATH\n"
1164 _(" Can't find editor '%s' in PATH\n"
1162 " (specify a commit editor in your configuration"
1165 " (specify a commit editor in your configuration"
1163 " file)\n"), not cmdpath and editor)
1166 " file)\n"), not cmdpath and editor)
1164 if not cmdpath and editor != 'vi':
1167 if not cmdpath and editor != 'vi':
1165 problems += 1
1168 problems += 1
1166
1169
1167 # check username
1170 # check username
1168 username = None
1171 username = None
1169 err = None
1172 err = None
1170 try:
1173 try:
1171 username = ui.username()
1174 username = ui.username()
1172 except error.Abort as e:
1175 except error.Abort as e:
1173 err = util.forcebytestr(e)
1176 err = util.forcebytestr(e)
1174 problems += 1
1177 problems += 1
1175
1178
1176 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1179 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1177 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1180 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1178 " (specify a username in your configuration file)\n"), err)
1181 " (specify a username in your configuration file)\n"), err)
1179
1182
1180 fm.condwrite(not problems, '',
1183 fm.condwrite(not problems, '',
1181 _("no problems detected\n"))
1184 _("no problems detected\n"))
1182 if not problems:
1185 if not problems:
1183 fm.data(problems=problems)
1186 fm.data(problems=problems)
1184 fm.condwrite(problems, 'problems',
1187 fm.condwrite(problems, 'problems',
1185 _("%d problems detected,"
1188 _("%d problems detected,"
1186 " please check your install!\n"), problems)
1189 " please check your install!\n"), problems)
1187 fm.end()
1190 fm.end()
1188
1191
1189 return problems
1192 return problems
1190
1193
1191 @command('debugknown', [], _('REPO ID...'), norepo=True)
1194 @command('debugknown', [], _('REPO ID...'), norepo=True)
1192 def debugknown(ui, repopath, *ids, **opts):
1195 def debugknown(ui, repopath, *ids, **opts):
1193 """test whether node ids are known to a repo
1196 """test whether node ids are known to a repo
1194
1197
1195 Every ID must be a full-length hex node id string. Returns a list of 0s
1198 Every ID must be a full-length hex node id string. Returns a list of 0s
1196 and 1s indicating unknown/known.
1199 and 1s indicating unknown/known.
1197 """
1200 """
1198 opts = pycompat.byteskwargs(opts)
1201 opts = pycompat.byteskwargs(opts)
1199 repo = hg.peer(ui, opts, repopath)
1202 repo = hg.peer(ui, opts, repopath)
1200 if not repo.capable('known'):
1203 if not repo.capable('known'):
1201 raise error.Abort("known() not supported by target repository")
1204 raise error.Abort("known() not supported by target repository")
1202 flags = repo.known([bin(s) for s in ids])
1205 flags = repo.known([bin(s) for s in ids])
1203 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1206 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1204
1207
1205 @command('debuglabelcomplete', [], _('LABEL...'))
1208 @command('debuglabelcomplete', [], _('LABEL...'))
1206 def debuglabelcomplete(ui, repo, *args):
1209 def debuglabelcomplete(ui, repo, *args):
1207 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1210 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1208 debugnamecomplete(ui, repo, *args)
1211 debugnamecomplete(ui, repo, *args)
1209
1212
1210 @command('debuglocks',
1213 @command('debuglocks',
1211 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1214 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1212 ('W', 'force-wlock', None,
1215 ('W', 'force-wlock', None,
1213 _('free the working state lock (DANGEROUS)'))],
1216 _('free the working state lock (DANGEROUS)'))],
1214 _('[OPTION]...'))
1217 _('[OPTION]...'))
1215 def debuglocks(ui, repo, **opts):
1218 def debuglocks(ui, repo, **opts):
1216 """show or modify state of locks
1219 """show or modify state of locks
1217
1220
1218 By default, this command will show which locks are held. This
1221 By default, this command will show which locks are held. This
1219 includes the user and process holding the lock, the amount of time
1222 includes the user and process holding the lock, the amount of time
1220 the lock has been held, and the machine name where the process is
1223 the lock has been held, and the machine name where the process is
1221 running if it's not local.
1224 running if it's not local.
1222
1225
1223 Locks protect the integrity of Mercurial's data, so should be
1226 Locks protect the integrity of Mercurial's data, so should be
1224 treated with care. System crashes or other interruptions may cause
1227 treated with care. System crashes or other interruptions may cause
1225 locks to not be properly released, though Mercurial will usually
1228 locks to not be properly released, though Mercurial will usually
1226 detect and remove such stale locks automatically.
1229 detect and remove such stale locks automatically.
1227
1230
1228 However, detecting stale locks may not always be possible (for
1231 However, detecting stale locks may not always be possible (for
1229 instance, on a shared filesystem). Removing locks may also be
1232 instance, on a shared filesystem). Removing locks may also be
1230 blocked by filesystem permissions.
1233 blocked by filesystem permissions.
1231
1234
1232 Returns 0 if no locks are held.
1235 Returns 0 if no locks are held.
1233
1236
1234 """
1237 """
1235
1238
1236 if opts.get(r'force_lock'):
1239 if opts.get(r'force_lock'):
1237 repo.svfs.unlink('lock')
1240 repo.svfs.unlink('lock')
1238 if opts.get(r'force_wlock'):
1241 if opts.get(r'force_wlock'):
1239 repo.vfs.unlink('wlock')
1242 repo.vfs.unlink('wlock')
1240 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1243 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1241 return 0
1244 return 0
1242
1245
1243 now = time.time()
1246 now = time.time()
1244 held = 0
1247 held = 0
1245
1248
1246 def report(vfs, name, method):
1249 def report(vfs, name, method):
1247 # this causes stale locks to get reaped for more accurate reporting
1250 # this causes stale locks to get reaped for more accurate reporting
1248 try:
1251 try:
1249 l = method(False)
1252 l = method(False)
1250 except error.LockHeld:
1253 except error.LockHeld:
1251 l = None
1254 l = None
1252
1255
1253 if l:
1256 if l:
1254 l.release()
1257 l.release()
1255 else:
1258 else:
1256 try:
1259 try:
1257 stat = vfs.lstat(name)
1260 stat = vfs.lstat(name)
1258 age = now - stat.st_mtime
1261 age = now - stat.st_mtime
1259 user = util.username(stat.st_uid)
1262 user = util.username(stat.st_uid)
1260 locker = vfs.readlock(name)
1263 locker = vfs.readlock(name)
1261 if ":" in locker:
1264 if ":" in locker:
1262 host, pid = locker.split(':')
1265 host, pid = locker.split(':')
1263 if host == socket.gethostname():
1266 if host == socket.gethostname():
1264 locker = 'user %s, process %s' % (user, pid)
1267 locker = 'user %s, process %s' % (user, pid)
1265 else:
1268 else:
1266 locker = 'user %s, process %s, host %s' \
1269 locker = 'user %s, process %s, host %s' \
1267 % (user, pid, host)
1270 % (user, pid, host)
1268 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1271 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1269 return 1
1272 return 1
1270 except OSError as e:
1273 except OSError as e:
1271 if e.errno != errno.ENOENT:
1274 if e.errno != errno.ENOENT:
1272 raise
1275 raise
1273
1276
1274 ui.write(("%-6s free\n") % (name + ":"))
1277 ui.write(("%-6s free\n") % (name + ":"))
1275 return 0
1278 return 0
1276
1279
1277 held += report(repo.svfs, "lock", repo.lock)
1280 held += report(repo.svfs, "lock", repo.lock)
1278 held += report(repo.vfs, "wlock", repo.wlock)
1281 held += report(repo.vfs, "wlock", repo.wlock)
1279
1282
1280 return held
1283 return held
1281
1284
1282 @command('debugmergestate', [], '')
1285 @command('debugmergestate', [], '')
1283 def debugmergestate(ui, repo, *args):
1286 def debugmergestate(ui, repo, *args):
1284 """print merge state
1287 """print merge state
1285
1288
1286 Use --verbose to print out information about whether v1 or v2 merge state
1289 Use --verbose to print out information about whether v1 or v2 merge state
1287 was chosen."""
1290 was chosen."""
1288 def _hashornull(h):
1291 def _hashornull(h):
1289 if h == nullhex:
1292 if h == nullhex:
1290 return 'null'
1293 return 'null'
1291 else:
1294 else:
1292 return h
1295 return h
1293
1296
1294 def printrecords(version):
1297 def printrecords(version):
1295 ui.write(('* version %s records\n') % version)
1298 ui.write(('* version %s records\n') % version)
1296 if version == 1:
1299 if version == 1:
1297 records = v1records
1300 records = v1records
1298 else:
1301 else:
1299 records = v2records
1302 records = v2records
1300
1303
1301 for rtype, record in records:
1304 for rtype, record in records:
1302 # pretty print some record types
1305 # pretty print some record types
1303 if rtype == 'L':
1306 if rtype == 'L':
1304 ui.write(('local: %s\n') % record)
1307 ui.write(('local: %s\n') % record)
1305 elif rtype == 'O':
1308 elif rtype == 'O':
1306 ui.write(('other: %s\n') % record)
1309 ui.write(('other: %s\n') % record)
1307 elif rtype == 'm':
1310 elif rtype == 'm':
1308 driver, mdstate = record.split('\0', 1)
1311 driver, mdstate = record.split('\0', 1)
1309 ui.write(('merge driver: %s (state "%s")\n')
1312 ui.write(('merge driver: %s (state "%s")\n')
1310 % (driver, mdstate))
1313 % (driver, mdstate))
1311 elif rtype in 'FDC':
1314 elif rtype in 'FDC':
1312 r = record.split('\0')
1315 r = record.split('\0')
1313 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1316 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1314 if version == 1:
1317 if version == 1:
1315 onode = 'not stored in v1 format'
1318 onode = 'not stored in v1 format'
1316 flags = r[7]
1319 flags = r[7]
1317 else:
1320 else:
1318 onode, flags = r[7:9]
1321 onode, flags = r[7:9]
1319 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1322 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1320 % (f, rtype, state, _hashornull(hash)))
1323 % (f, rtype, state, _hashornull(hash)))
1321 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1324 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1322 ui.write((' ancestor path: %s (node %s)\n')
1325 ui.write((' ancestor path: %s (node %s)\n')
1323 % (afile, _hashornull(anode)))
1326 % (afile, _hashornull(anode)))
1324 ui.write((' other path: %s (node %s)\n')
1327 ui.write((' other path: %s (node %s)\n')
1325 % (ofile, _hashornull(onode)))
1328 % (ofile, _hashornull(onode)))
1326 elif rtype == 'f':
1329 elif rtype == 'f':
1327 filename, rawextras = record.split('\0', 1)
1330 filename, rawextras = record.split('\0', 1)
1328 extras = rawextras.split('\0')
1331 extras = rawextras.split('\0')
1329 i = 0
1332 i = 0
1330 extrastrings = []
1333 extrastrings = []
1331 while i < len(extras):
1334 while i < len(extras):
1332 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1335 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1333 i += 2
1336 i += 2
1334
1337
1335 ui.write(('file extras: %s (%s)\n')
1338 ui.write(('file extras: %s (%s)\n')
1336 % (filename, ', '.join(extrastrings)))
1339 % (filename, ', '.join(extrastrings)))
1337 elif rtype == 'l':
1340 elif rtype == 'l':
1338 labels = record.split('\0', 2)
1341 labels = record.split('\0', 2)
1339 labels = [l for l in labels if len(l) > 0]
1342 labels = [l for l in labels if len(l) > 0]
1340 ui.write(('labels:\n'))
1343 ui.write(('labels:\n'))
1341 ui.write((' local: %s\n' % labels[0]))
1344 ui.write((' local: %s\n' % labels[0]))
1342 ui.write((' other: %s\n' % labels[1]))
1345 ui.write((' other: %s\n' % labels[1]))
1343 if len(labels) > 2:
1346 if len(labels) > 2:
1344 ui.write((' base: %s\n' % labels[2]))
1347 ui.write((' base: %s\n' % labels[2]))
1345 else:
1348 else:
1346 ui.write(('unrecognized entry: %s\t%s\n')
1349 ui.write(('unrecognized entry: %s\t%s\n')
1347 % (rtype, record.replace('\0', '\t')))
1350 % (rtype, record.replace('\0', '\t')))
1348
1351
1349 # Avoid mergestate.read() since it may raise an exception for unsupported
1352 # Avoid mergestate.read() since it may raise an exception for unsupported
1350 # merge state records. We shouldn't be doing this, but this is OK since this
1353 # merge state records. We shouldn't be doing this, but this is OK since this
1351 # command is pretty low-level.
1354 # command is pretty low-level.
1352 ms = mergemod.mergestate(repo)
1355 ms = mergemod.mergestate(repo)
1353
1356
1354 # sort so that reasonable information is on top
1357 # sort so that reasonable information is on top
1355 v1records = ms._readrecordsv1()
1358 v1records = ms._readrecordsv1()
1356 v2records = ms._readrecordsv2()
1359 v2records = ms._readrecordsv2()
1357 order = 'LOml'
1360 order = 'LOml'
1358 def key(r):
1361 def key(r):
1359 idx = order.find(r[0])
1362 idx = order.find(r[0])
1360 if idx == -1:
1363 if idx == -1:
1361 return (1, r[1])
1364 return (1, r[1])
1362 else:
1365 else:
1363 return (0, idx)
1366 return (0, idx)
1364 v1records.sort(key=key)
1367 v1records.sort(key=key)
1365 v2records.sort(key=key)
1368 v2records.sort(key=key)
1366
1369
1367 if not v1records and not v2records:
1370 if not v1records and not v2records:
1368 ui.write(('no merge state found\n'))
1371 ui.write(('no merge state found\n'))
1369 elif not v2records:
1372 elif not v2records:
1370 ui.note(('no version 2 merge state\n'))
1373 ui.note(('no version 2 merge state\n'))
1371 printrecords(1)
1374 printrecords(1)
1372 elif ms._v1v2match(v1records, v2records):
1375 elif ms._v1v2match(v1records, v2records):
1373 ui.note(('v1 and v2 states match: using v2\n'))
1376 ui.note(('v1 and v2 states match: using v2\n'))
1374 printrecords(2)
1377 printrecords(2)
1375 else:
1378 else:
1376 ui.note(('v1 and v2 states mismatch: using v1\n'))
1379 ui.note(('v1 and v2 states mismatch: using v1\n'))
1377 printrecords(1)
1380 printrecords(1)
1378 if ui.verbose:
1381 if ui.verbose:
1379 printrecords(2)
1382 printrecords(2)
1380
1383
1381 @command('debugnamecomplete', [], _('NAME...'))
1384 @command('debugnamecomplete', [], _('NAME...'))
1382 def debugnamecomplete(ui, repo, *args):
1385 def debugnamecomplete(ui, repo, *args):
1383 '''complete "names" - tags, open branch names, bookmark names'''
1386 '''complete "names" - tags, open branch names, bookmark names'''
1384
1387
1385 names = set()
1388 names = set()
1386 # since we previously only listed open branches, we will handle that
1389 # since we previously only listed open branches, we will handle that
1387 # specially (after this for loop)
1390 # specially (after this for loop)
1388 for name, ns in repo.names.iteritems():
1391 for name, ns in repo.names.iteritems():
1389 if name != 'branches':
1392 if name != 'branches':
1390 names.update(ns.listnames(repo))
1393 names.update(ns.listnames(repo))
1391 names.update(tag for (tag, heads, tip, closed)
1394 names.update(tag for (tag, heads, tip, closed)
1392 in repo.branchmap().iterbranches() if not closed)
1395 in repo.branchmap().iterbranches() if not closed)
1393 completions = set()
1396 completions = set()
1394 if not args:
1397 if not args:
1395 args = ['']
1398 args = ['']
1396 for a in args:
1399 for a in args:
1397 completions.update(n for n in names if n.startswith(a))
1400 completions.update(n for n in names if n.startswith(a))
1398 ui.write('\n'.join(sorted(completions)))
1401 ui.write('\n'.join(sorted(completions)))
1399 ui.write('\n')
1402 ui.write('\n')
1400
1403
1401 @command('debugobsolete',
1404 @command('debugobsolete',
1402 [('', 'flags', 0, _('markers flag')),
1405 [('', 'flags', 0, _('markers flag')),
1403 ('', 'record-parents', False,
1406 ('', 'record-parents', False,
1404 _('record parent information for the precursor')),
1407 _('record parent information for the precursor')),
1405 ('r', 'rev', [], _('display markers relevant to REV')),
1408 ('r', 'rev', [], _('display markers relevant to REV')),
1406 ('', 'exclusive', False, _('restrict display to markers only '
1409 ('', 'exclusive', False, _('restrict display to markers only '
1407 'relevant to REV')),
1410 'relevant to REV')),
1408 ('', 'index', False, _('display index of the marker')),
1411 ('', 'index', False, _('display index of the marker')),
1409 ('', 'delete', [], _('delete markers specified by indices')),
1412 ('', 'delete', [], _('delete markers specified by indices')),
1410 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1413 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1411 _('[OBSOLETED [REPLACEMENT ...]]'))
1414 _('[OBSOLETED [REPLACEMENT ...]]'))
1412 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1413 """create arbitrary obsolete marker
1416 """create arbitrary obsolete marker
1414
1417
1415 With no arguments, displays the list of obsolescence markers."""
1418 With no arguments, displays the list of obsolescence markers."""
1416
1419
1417 opts = pycompat.byteskwargs(opts)
1420 opts = pycompat.byteskwargs(opts)
1418
1421
1419 def parsenodeid(s):
1422 def parsenodeid(s):
1420 try:
1423 try:
1421 # We do not use revsingle/revrange functions here to accept
1424 # We do not use revsingle/revrange functions here to accept
1422 # arbitrary node identifiers, possibly not present in the
1425 # arbitrary node identifiers, possibly not present in the
1423 # local repository.
1426 # local repository.
1424 n = bin(s)
1427 n = bin(s)
1425 if len(n) != len(nullid):
1428 if len(n) != len(nullid):
1426 raise TypeError()
1429 raise TypeError()
1427 return n
1430 return n
1428 except TypeError:
1431 except TypeError:
1429 raise error.Abort('changeset references must be full hexadecimal '
1432 raise error.Abort('changeset references must be full hexadecimal '
1430 'node identifiers')
1433 'node identifiers')
1431
1434
1432 if opts.get('delete'):
1435 if opts.get('delete'):
1433 indices = []
1436 indices = []
1434 for v in opts.get('delete'):
1437 for v in opts.get('delete'):
1435 try:
1438 try:
1436 indices.append(int(v))
1439 indices.append(int(v))
1437 except ValueError:
1440 except ValueError:
1438 raise error.Abort(_('invalid index value: %r') % v,
1441 raise error.Abort(_('invalid index value: %r') % v,
1439 hint=_('use integers for indices'))
1442 hint=_('use integers for indices'))
1440
1443
1441 if repo.currenttransaction():
1444 if repo.currenttransaction():
1442 raise error.Abort(_('cannot delete obsmarkers in the middle '
1445 raise error.Abort(_('cannot delete obsmarkers in the middle '
1443 'of transaction.'))
1446 'of transaction.'))
1444
1447
1445 with repo.lock():
1448 with repo.lock():
1446 n = repair.deleteobsmarkers(repo.obsstore, indices)
1449 n = repair.deleteobsmarkers(repo.obsstore, indices)
1447 ui.write(_('deleted %i obsolescence markers\n') % n)
1450 ui.write(_('deleted %i obsolescence markers\n') % n)
1448
1451
1449 return
1452 return
1450
1453
1451 if precursor is not None:
1454 if precursor is not None:
1452 if opts['rev']:
1455 if opts['rev']:
1453 raise error.Abort('cannot select revision when creating marker')
1456 raise error.Abort('cannot select revision when creating marker')
1454 metadata = {}
1457 metadata = {}
1455 metadata['user'] = opts['user'] or ui.username()
1458 metadata['user'] = opts['user'] or ui.username()
1456 succs = tuple(parsenodeid(succ) for succ in successors)
1459 succs = tuple(parsenodeid(succ) for succ in successors)
1457 l = repo.lock()
1460 l = repo.lock()
1458 try:
1461 try:
1459 tr = repo.transaction('debugobsolete')
1462 tr = repo.transaction('debugobsolete')
1460 try:
1463 try:
1461 date = opts.get('date')
1464 date = opts.get('date')
1462 if date:
1465 if date:
1463 date = util.parsedate(date)
1466 date = util.parsedate(date)
1464 else:
1467 else:
1465 date = None
1468 date = None
1466 prec = parsenodeid(precursor)
1469 prec = parsenodeid(precursor)
1467 parents = None
1470 parents = None
1468 if opts['record_parents']:
1471 if opts['record_parents']:
1469 if prec not in repo.unfiltered():
1472 if prec not in repo.unfiltered():
1470 raise error.Abort('cannot used --record-parents on '
1473 raise error.Abort('cannot used --record-parents on '
1471 'unknown changesets')
1474 'unknown changesets')
1472 parents = repo.unfiltered()[prec].parents()
1475 parents = repo.unfiltered()[prec].parents()
1473 parents = tuple(p.node() for p in parents)
1476 parents = tuple(p.node() for p in parents)
1474 repo.obsstore.create(tr, prec, succs, opts['flags'],
1477 repo.obsstore.create(tr, prec, succs, opts['flags'],
1475 parents=parents, date=date,
1478 parents=parents, date=date,
1476 metadata=metadata, ui=ui)
1479 metadata=metadata, ui=ui)
1477 tr.close()
1480 tr.close()
1478 except ValueError as exc:
1481 except ValueError as exc:
1479 raise error.Abort(_('bad obsmarker input: %s') % exc)
1482 raise error.Abort(_('bad obsmarker input: %s') % exc)
1480 finally:
1483 finally:
1481 tr.release()
1484 tr.release()
1482 finally:
1485 finally:
1483 l.release()
1486 l.release()
1484 else:
1487 else:
1485 if opts['rev']:
1488 if opts['rev']:
1486 revs = scmutil.revrange(repo, opts['rev'])
1489 revs = scmutil.revrange(repo, opts['rev'])
1487 nodes = [repo[r].node() for r in revs]
1490 nodes = [repo[r].node() for r in revs]
1488 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1491 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1489 exclusive=opts['exclusive']))
1492 exclusive=opts['exclusive']))
1490 markers.sort(key=lambda x: x._data)
1493 markers.sort(key=lambda x: x._data)
1491 else:
1494 else:
1492 markers = obsutil.getmarkers(repo)
1495 markers = obsutil.getmarkers(repo)
1493
1496
1494 markerstoiter = markers
1497 markerstoiter = markers
1495 isrelevant = lambda m: True
1498 isrelevant = lambda m: True
1496 if opts.get('rev') and opts.get('index'):
1499 if opts.get('rev') and opts.get('index'):
1497 markerstoiter = obsutil.getmarkers(repo)
1500 markerstoiter = obsutil.getmarkers(repo)
1498 markerset = set(markers)
1501 markerset = set(markers)
1499 isrelevant = lambda m: m in markerset
1502 isrelevant = lambda m: m in markerset
1500
1503
1501 fm = ui.formatter('debugobsolete', opts)
1504 fm = ui.formatter('debugobsolete', opts)
1502 for i, m in enumerate(markerstoiter):
1505 for i, m in enumerate(markerstoiter):
1503 if not isrelevant(m):
1506 if not isrelevant(m):
1504 # marker can be irrelevant when we're iterating over a set
1507 # marker can be irrelevant when we're iterating over a set
1505 # of markers (markerstoiter) which is bigger than the set
1508 # of markers (markerstoiter) which is bigger than the set
1506 # of markers we want to display (markers)
1509 # of markers we want to display (markers)
1507 # this can happen if both --index and --rev options are
1510 # this can happen if both --index and --rev options are
1508 # provided and thus we need to iterate over all of the markers
1511 # provided and thus we need to iterate over all of the markers
1509 # to get the correct indices, but only display the ones that
1512 # to get the correct indices, but only display the ones that
1510 # are relevant to --rev value
1513 # are relevant to --rev value
1511 continue
1514 continue
1512 fm.startitem()
1515 fm.startitem()
1513 ind = i if opts.get('index') else None
1516 ind = i if opts.get('index') else None
1514 cmdutil.showmarker(fm, m, index=ind)
1517 cmdutil.showmarker(fm, m, index=ind)
1515 fm.end()
1518 fm.end()
1516
1519
1517 @command('debugpathcomplete',
1520 @command('debugpathcomplete',
1518 [('f', 'full', None, _('complete an entire path')),
1521 [('f', 'full', None, _('complete an entire path')),
1519 ('n', 'normal', None, _('show only normal files')),
1522 ('n', 'normal', None, _('show only normal files')),
1520 ('a', 'added', None, _('show only added files')),
1523 ('a', 'added', None, _('show only added files')),
1521 ('r', 'removed', None, _('show only removed files'))],
1524 ('r', 'removed', None, _('show only removed files'))],
1522 _('FILESPEC...'))
1525 _('FILESPEC...'))
1523 def debugpathcomplete(ui, repo, *specs, **opts):
1526 def debugpathcomplete(ui, repo, *specs, **opts):
1524 '''complete part or all of a tracked path
1527 '''complete part or all of a tracked path
1525
1528
1526 This command supports shells that offer path name completion. It
1529 This command supports shells that offer path name completion. It
1527 currently completes only files already known to the dirstate.
1530 currently completes only files already known to the dirstate.
1528
1531
1529 Completion extends only to the next path segment unless
1532 Completion extends only to the next path segment unless
1530 --full is specified, in which case entire paths are used.'''
1533 --full is specified, in which case entire paths are used.'''
1531
1534
1532 def complete(path, acceptable):
1535 def complete(path, acceptable):
1533 dirstate = repo.dirstate
1536 dirstate = repo.dirstate
1534 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1537 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1535 rootdir = repo.root + pycompat.ossep
1538 rootdir = repo.root + pycompat.ossep
1536 if spec != repo.root and not spec.startswith(rootdir):
1539 if spec != repo.root and not spec.startswith(rootdir):
1537 return [], []
1540 return [], []
1538 if os.path.isdir(spec):
1541 if os.path.isdir(spec):
1539 spec += '/'
1542 spec += '/'
1540 spec = spec[len(rootdir):]
1543 spec = spec[len(rootdir):]
1541 fixpaths = pycompat.ossep != '/'
1544 fixpaths = pycompat.ossep != '/'
1542 if fixpaths:
1545 if fixpaths:
1543 spec = spec.replace(pycompat.ossep, '/')
1546 spec = spec.replace(pycompat.ossep, '/')
1544 speclen = len(spec)
1547 speclen = len(spec)
1545 fullpaths = opts[r'full']
1548 fullpaths = opts[r'full']
1546 files, dirs = set(), set()
1549 files, dirs = set(), set()
1547 adddir, addfile = dirs.add, files.add
1550 adddir, addfile = dirs.add, files.add
1548 for f, st in dirstate.iteritems():
1551 for f, st in dirstate.iteritems():
1549 if f.startswith(spec) and st[0] in acceptable:
1552 if f.startswith(spec) and st[0] in acceptable:
1550 if fixpaths:
1553 if fixpaths:
1551 f = f.replace('/', pycompat.ossep)
1554 f = f.replace('/', pycompat.ossep)
1552 if fullpaths:
1555 if fullpaths:
1553 addfile(f)
1556 addfile(f)
1554 continue
1557 continue
1555 s = f.find(pycompat.ossep, speclen)
1558 s = f.find(pycompat.ossep, speclen)
1556 if s >= 0:
1559 if s >= 0:
1557 adddir(f[:s])
1560 adddir(f[:s])
1558 else:
1561 else:
1559 addfile(f)
1562 addfile(f)
1560 return files, dirs
1563 return files, dirs
1561
1564
1562 acceptable = ''
1565 acceptable = ''
1563 if opts[r'normal']:
1566 if opts[r'normal']:
1564 acceptable += 'nm'
1567 acceptable += 'nm'
1565 if opts[r'added']:
1568 if opts[r'added']:
1566 acceptable += 'a'
1569 acceptable += 'a'
1567 if opts[r'removed']:
1570 if opts[r'removed']:
1568 acceptable += 'r'
1571 acceptable += 'r'
1569 cwd = repo.getcwd()
1572 cwd = repo.getcwd()
1570 if not specs:
1573 if not specs:
1571 specs = ['.']
1574 specs = ['.']
1572
1575
1573 files, dirs = set(), set()
1576 files, dirs = set(), set()
1574 for spec in specs:
1577 for spec in specs:
1575 f, d = complete(spec, acceptable or 'nmar')
1578 f, d = complete(spec, acceptable or 'nmar')
1576 files.update(f)
1579 files.update(f)
1577 dirs.update(d)
1580 dirs.update(d)
1578 files.update(dirs)
1581 files.update(dirs)
1579 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1582 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1580 ui.write('\n')
1583 ui.write('\n')
1581
1584
1582 @command('debugpickmergetool',
1585 @command('debugpickmergetool',
1583 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1586 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1584 ('', 'changedelete', None, _('emulate merging change and delete')),
1587 ('', 'changedelete', None, _('emulate merging change and delete')),
1585 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1588 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1586 _('[PATTERN]...'),
1589 _('[PATTERN]...'),
1587 inferrepo=True)
1590 inferrepo=True)
1588 def debugpickmergetool(ui, repo, *pats, **opts):
1591 def debugpickmergetool(ui, repo, *pats, **opts):
1589 """examine which merge tool is chosen for specified file
1592 """examine which merge tool is chosen for specified file
1590
1593
1591 As described in :hg:`help merge-tools`, Mercurial examines
1594 As described in :hg:`help merge-tools`, Mercurial examines
1592 configurations below in this order to decide which merge tool is
1595 configurations below in this order to decide which merge tool is
1593 chosen for specified file.
1596 chosen for specified file.
1594
1597
1595 1. ``--tool`` option
1598 1. ``--tool`` option
1596 2. ``HGMERGE`` environment variable
1599 2. ``HGMERGE`` environment variable
1597 3. configurations in ``merge-patterns`` section
1600 3. configurations in ``merge-patterns`` section
1598 4. configuration of ``ui.merge``
1601 4. configuration of ``ui.merge``
1599 5. configurations in ``merge-tools`` section
1602 5. configurations in ``merge-tools`` section
1600 6. ``hgmerge`` tool (for historical reason only)
1603 6. ``hgmerge`` tool (for historical reason only)
1601 7. default tool for fallback (``:merge`` or ``:prompt``)
1604 7. default tool for fallback (``:merge`` or ``:prompt``)
1602
1605
1603 This command writes out examination result in the style below::
1606 This command writes out examination result in the style below::
1604
1607
1605 FILE = MERGETOOL
1608 FILE = MERGETOOL
1606
1609
1607 By default, all files known in the first parent context of the
1610 By default, all files known in the first parent context of the
1608 working directory are examined. Use file patterns and/or -I/-X
1611 working directory are examined. Use file patterns and/or -I/-X
1609 options to limit target files. -r/--rev is also useful to examine
1612 options to limit target files. -r/--rev is also useful to examine
1610 files in another context without actual updating to it.
1613 files in another context without actual updating to it.
1611
1614
1612 With --debug, this command shows warning messages while matching
1615 With --debug, this command shows warning messages while matching
1613 against ``merge-patterns`` and so on, too. It is recommended to
1616 against ``merge-patterns`` and so on, too. It is recommended to
1614 use this option with explicit file patterns and/or -I/-X options,
1617 use this option with explicit file patterns and/or -I/-X options,
1615 because this option increases amount of output per file according
1618 because this option increases amount of output per file according
1616 to configurations in hgrc.
1619 to configurations in hgrc.
1617
1620
1618 With -v/--verbose, this command shows configurations below at
1621 With -v/--verbose, this command shows configurations below at
1619 first (only if specified).
1622 first (only if specified).
1620
1623
1621 - ``--tool`` option
1624 - ``--tool`` option
1622 - ``HGMERGE`` environment variable
1625 - ``HGMERGE`` environment variable
1623 - configuration of ``ui.merge``
1626 - configuration of ``ui.merge``
1624
1627
1625 If merge tool is chosen before matching against
1628 If merge tool is chosen before matching against
1626 ``merge-patterns``, this command can't show any helpful
1629 ``merge-patterns``, this command can't show any helpful
1627 information, even with --debug. In such case, information above is
1630 information, even with --debug. In such case, information above is
1628 useful to know why a merge tool is chosen.
1631 useful to know why a merge tool is chosen.
1629 """
1632 """
1630 opts = pycompat.byteskwargs(opts)
1633 opts = pycompat.byteskwargs(opts)
1631 overrides = {}
1634 overrides = {}
1632 if opts['tool']:
1635 if opts['tool']:
1633 overrides[('ui', 'forcemerge')] = opts['tool']
1636 overrides[('ui', 'forcemerge')] = opts['tool']
1634 ui.note(('with --tool %r\n') % (opts['tool']))
1637 ui.note(('with --tool %r\n') % (opts['tool']))
1635
1638
1636 with ui.configoverride(overrides, 'debugmergepatterns'):
1639 with ui.configoverride(overrides, 'debugmergepatterns'):
1637 hgmerge = encoding.environ.get("HGMERGE")
1640 hgmerge = encoding.environ.get("HGMERGE")
1638 if hgmerge is not None:
1641 if hgmerge is not None:
1639 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1642 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1640 uimerge = ui.config("ui", "merge")
1643 uimerge = ui.config("ui", "merge")
1641 if uimerge:
1644 if uimerge:
1642 ui.note(('with ui.merge=%r\n') % (uimerge))
1645 ui.note(('with ui.merge=%r\n') % (uimerge))
1643
1646
1644 ctx = scmutil.revsingle(repo, opts.get('rev'))
1647 ctx = scmutil.revsingle(repo, opts.get('rev'))
1645 m = scmutil.match(ctx, pats, opts)
1648 m = scmutil.match(ctx, pats, opts)
1646 changedelete = opts['changedelete']
1649 changedelete = opts['changedelete']
1647 for path in ctx.walk(m):
1650 for path in ctx.walk(m):
1648 fctx = ctx[path]
1651 fctx = ctx[path]
1649 try:
1652 try:
1650 if not ui.debugflag:
1653 if not ui.debugflag:
1651 ui.pushbuffer(error=True)
1654 ui.pushbuffer(error=True)
1652 tool, toolpath = filemerge._picktool(repo, ui, path,
1655 tool, toolpath = filemerge._picktool(repo, ui, path,
1653 fctx.isbinary(),
1656 fctx.isbinary(),
1654 'l' in fctx.flags(),
1657 'l' in fctx.flags(),
1655 changedelete)
1658 changedelete)
1656 finally:
1659 finally:
1657 if not ui.debugflag:
1660 if not ui.debugflag:
1658 ui.popbuffer()
1661 ui.popbuffer()
1659 ui.write(('%s = %s\n') % (path, tool))
1662 ui.write(('%s = %s\n') % (path, tool))
1660
1663
1661 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1664 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1662 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1665 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1663 '''access the pushkey key/value protocol
1666 '''access the pushkey key/value protocol
1664
1667
1665 With two args, list the keys in the given namespace.
1668 With two args, list the keys in the given namespace.
1666
1669
1667 With five args, set a key to new if it currently is set to old.
1670 With five args, set a key to new if it currently is set to old.
1668 Reports success or failure.
1671 Reports success or failure.
1669 '''
1672 '''
1670
1673
1671 target = hg.peer(ui, {}, repopath)
1674 target = hg.peer(ui, {}, repopath)
1672 if keyinfo:
1675 if keyinfo:
1673 key, old, new = keyinfo
1676 key, old, new = keyinfo
1674 r = target.pushkey(namespace, key, old, new)
1677 r = target.pushkey(namespace, key, old, new)
1675 ui.status(str(r) + '\n')
1678 ui.status(str(r) + '\n')
1676 return not r
1679 return not r
1677 else:
1680 else:
1678 for k, v in sorted(target.listkeys(namespace).iteritems()):
1681 for k, v in sorted(target.listkeys(namespace).iteritems()):
1679 ui.write("%s\t%s\n" % (util.escapestr(k),
1682 ui.write("%s\t%s\n" % (util.escapestr(k),
1680 util.escapestr(v)))
1683 util.escapestr(v)))
1681
1684
1682 @command('debugpvec', [], _('A B'))
1685 @command('debugpvec', [], _('A B'))
1683 def debugpvec(ui, repo, a, b=None):
1686 def debugpvec(ui, repo, a, b=None):
1684 ca = scmutil.revsingle(repo, a)
1687 ca = scmutil.revsingle(repo, a)
1685 cb = scmutil.revsingle(repo, b)
1688 cb = scmutil.revsingle(repo, b)
1686 pa = pvec.ctxpvec(ca)
1689 pa = pvec.ctxpvec(ca)
1687 pb = pvec.ctxpvec(cb)
1690 pb = pvec.ctxpvec(cb)
1688 if pa == pb:
1691 if pa == pb:
1689 rel = "="
1692 rel = "="
1690 elif pa > pb:
1693 elif pa > pb:
1691 rel = ">"
1694 rel = ">"
1692 elif pa < pb:
1695 elif pa < pb:
1693 rel = "<"
1696 rel = "<"
1694 elif pa | pb:
1697 elif pa | pb:
1695 rel = "|"
1698 rel = "|"
1696 ui.write(_("a: %s\n") % pa)
1699 ui.write(_("a: %s\n") % pa)
1697 ui.write(_("b: %s\n") % pb)
1700 ui.write(_("b: %s\n") % pb)
1698 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1701 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1699 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1702 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1700 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1703 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1701 pa.distance(pb), rel))
1704 pa.distance(pb), rel))
1702
1705
1703 @command('debugrebuilddirstate|debugrebuildstate',
1706 @command('debugrebuilddirstate|debugrebuildstate',
1704 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1707 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1705 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1708 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1706 'the working copy parent')),
1709 'the working copy parent')),
1707 ],
1710 ],
1708 _('[-r REV]'))
1711 _('[-r REV]'))
1709 def debugrebuilddirstate(ui, repo, rev, **opts):
1712 def debugrebuilddirstate(ui, repo, rev, **opts):
1710 """rebuild the dirstate as it would look like for the given revision
1713 """rebuild the dirstate as it would look like for the given revision
1711
1714
1712 If no revision is specified the first current parent will be used.
1715 If no revision is specified the first current parent will be used.
1713
1716
1714 The dirstate will be set to the files of the given revision.
1717 The dirstate will be set to the files of the given revision.
1715 The actual working directory content or existing dirstate
1718 The actual working directory content or existing dirstate
1716 information such as adds or removes is not considered.
1719 information such as adds or removes is not considered.
1717
1720
1718 ``minimal`` will only rebuild the dirstate status for files that claim to be
1721 ``minimal`` will only rebuild the dirstate status for files that claim to be
1719 tracked but are not in the parent manifest, or that exist in the parent
1722 tracked but are not in the parent manifest, or that exist in the parent
1720 manifest but are not in the dirstate. It will not change adds, removes, or
1723 manifest but are not in the dirstate. It will not change adds, removes, or
1721 modified files that are in the working copy parent.
1724 modified files that are in the working copy parent.
1722
1725
1723 One use of this command is to make the next :hg:`status` invocation
1726 One use of this command is to make the next :hg:`status` invocation
1724 check the actual file content.
1727 check the actual file content.
1725 """
1728 """
1726 ctx = scmutil.revsingle(repo, rev)
1729 ctx = scmutil.revsingle(repo, rev)
1727 with repo.wlock():
1730 with repo.wlock():
1728 dirstate = repo.dirstate
1731 dirstate = repo.dirstate
1729 changedfiles = None
1732 changedfiles = None
1730 # See command doc for what minimal does.
1733 # See command doc for what minimal does.
1731 if opts.get(r'minimal'):
1734 if opts.get(r'minimal'):
1732 manifestfiles = set(ctx.manifest().keys())
1735 manifestfiles = set(ctx.manifest().keys())
1733 dirstatefiles = set(dirstate)
1736 dirstatefiles = set(dirstate)
1734 manifestonly = manifestfiles - dirstatefiles
1737 manifestonly = manifestfiles - dirstatefiles
1735 dsonly = dirstatefiles - manifestfiles
1738 dsonly = dirstatefiles - manifestfiles
1736 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1739 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1737 changedfiles = manifestonly | dsnotadded
1740 changedfiles = manifestonly | dsnotadded
1738
1741
1739 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1742 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1740
1743
1741 @command('debugrebuildfncache', [], '')
1744 @command('debugrebuildfncache', [], '')
1742 def debugrebuildfncache(ui, repo):
1745 def debugrebuildfncache(ui, repo):
1743 """rebuild the fncache file"""
1746 """rebuild the fncache file"""
1744 repair.rebuildfncache(ui, repo)
1747 repair.rebuildfncache(ui, repo)
1745
1748
1746 @command('debugrename',
1749 @command('debugrename',
1747 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1750 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1748 _('[-r REV] FILE'))
1751 _('[-r REV] FILE'))
1749 def debugrename(ui, repo, file1, *pats, **opts):
1752 def debugrename(ui, repo, file1, *pats, **opts):
1750 """dump rename information"""
1753 """dump rename information"""
1751
1754
1752 opts = pycompat.byteskwargs(opts)
1755 opts = pycompat.byteskwargs(opts)
1753 ctx = scmutil.revsingle(repo, opts.get('rev'))
1756 ctx = scmutil.revsingle(repo, opts.get('rev'))
1754 m = scmutil.match(ctx, (file1,) + pats, opts)
1757 m = scmutil.match(ctx, (file1,) + pats, opts)
1755 for abs in ctx.walk(m):
1758 for abs in ctx.walk(m):
1756 fctx = ctx[abs]
1759 fctx = ctx[abs]
1757 o = fctx.filelog().renamed(fctx.filenode())
1760 o = fctx.filelog().renamed(fctx.filenode())
1758 rel = m.rel(abs)
1761 rel = m.rel(abs)
1759 if o:
1762 if o:
1760 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1763 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1761 else:
1764 else:
1762 ui.write(_("%s not renamed\n") % rel)
1765 ui.write(_("%s not renamed\n") % rel)
1763
1766
1764 @command('debugrevlog', cmdutil.debugrevlogopts +
1767 @command('debugrevlog', cmdutil.debugrevlogopts +
1765 [('d', 'dump', False, _('dump index data'))],
1768 [('d', 'dump', False, _('dump index data'))],
1766 _('-c|-m|FILE'),
1769 _('-c|-m|FILE'),
1767 optionalrepo=True)
1770 optionalrepo=True)
1768 def debugrevlog(ui, repo, file_=None, **opts):
1771 def debugrevlog(ui, repo, file_=None, **opts):
1769 """show data and statistics about a revlog"""
1772 """show data and statistics about a revlog"""
1770 opts = pycompat.byteskwargs(opts)
1773 opts = pycompat.byteskwargs(opts)
1771 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1774 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1772
1775
1773 if opts.get("dump"):
1776 if opts.get("dump"):
1774 numrevs = len(r)
1777 numrevs = len(r)
1775 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1778 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1776 " rawsize totalsize compression heads chainlen\n"))
1779 " rawsize totalsize compression heads chainlen\n"))
1777 ts = 0
1780 ts = 0
1778 heads = set()
1781 heads = set()
1779
1782
1780 for rev in xrange(numrevs):
1783 for rev in xrange(numrevs):
1781 dbase = r.deltaparent(rev)
1784 dbase = r.deltaparent(rev)
1782 if dbase == -1:
1785 if dbase == -1:
1783 dbase = rev
1786 dbase = rev
1784 cbase = r.chainbase(rev)
1787 cbase = r.chainbase(rev)
1785 clen = r.chainlen(rev)
1788 clen = r.chainlen(rev)
1786 p1, p2 = r.parentrevs(rev)
1789 p1, p2 = r.parentrevs(rev)
1787 rs = r.rawsize(rev)
1790 rs = r.rawsize(rev)
1788 ts = ts + rs
1791 ts = ts + rs
1789 heads -= set(r.parentrevs(rev))
1792 heads -= set(r.parentrevs(rev))
1790 heads.add(rev)
1793 heads.add(rev)
1791 try:
1794 try:
1792 compression = ts / r.end(rev)
1795 compression = ts / r.end(rev)
1793 except ZeroDivisionError:
1796 except ZeroDivisionError:
1794 compression = 0
1797 compression = 0
1795 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1798 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1796 "%11d %5d %8d\n" %
1799 "%11d %5d %8d\n" %
1797 (rev, p1, p2, r.start(rev), r.end(rev),
1800 (rev, p1, p2, r.start(rev), r.end(rev),
1798 r.start(dbase), r.start(cbase),
1801 r.start(dbase), r.start(cbase),
1799 r.start(p1), r.start(p2),
1802 r.start(p1), r.start(p2),
1800 rs, ts, compression, len(heads), clen))
1803 rs, ts, compression, len(heads), clen))
1801 return 0
1804 return 0
1802
1805
1803 v = r.version
1806 v = r.version
1804 format = v & 0xFFFF
1807 format = v & 0xFFFF
1805 flags = []
1808 flags = []
1806 gdelta = False
1809 gdelta = False
1807 if v & revlog.FLAG_INLINE_DATA:
1810 if v & revlog.FLAG_INLINE_DATA:
1808 flags.append('inline')
1811 flags.append('inline')
1809 if v & revlog.FLAG_GENERALDELTA:
1812 if v & revlog.FLAG_GENERALDELTA:
1810 gdelta = True
1813 gdelta = True
1811 flags.append('generaldelta')
1814 flags.append('generaldelta')
1812 if not flags:
1815 if not flags:
1813 flags = ['(none)']
1816 flags = ['(none)']
1814
1817
1815 nummerges = 0
1818 nummerges = 0
1816 numfull = 0
1819 numfull = 0
1817 numprev = 0
1820 numprev = 0
1818 nump1 = 0
1821 nump1 = 0
1819 nump2 = 0
1822 nump2 = 0
1820 numother = 0
1823 numother = 0
1821 nump1prev = 0
1824 nump1prev = 0
1822 nump2prev = 0
1825 nump2prev = 0
1823 chainlengths = []
1826 chainlengths = []
1824 chainbases = []
1827 chainbases = []
1825 chainspans = []
1828 chainspans = []
1826
1829
1827 datasize = [None, 0, 0]
1830 datasize = [None, 0, 0]
1828 fullsize = [None, 0, 0]
1831 fullsize = [None, 0, 0]
1829 deltasize = [None, 0, 0]
1832 deltasize = [None, 0, 0]
1830 chunktypecounts = {}
1833 chunktypecounts = {}
1831 chunktypesizes = {}
1834 chunktypesizes = {}
1832
1835
1833 def addsize(size, l):
1836 def addsize(size, l):
1834 if l[0] is None or size < l[0]:
1837 if l[0] is None or size < l[0]:
1835 l[0] = size
1838 l[0] = size
1836 if size > l[1]:
1839 if size > l[1]:
1837 l[1] = size
1840 l[1] = size
1838 l[2] += size
1841 l[2] += size
1839
1842
1840 numrevs = len(r)
1843 numrevs = len(r)
1841 for rev in xrange(numrevs):
1844 for rev in xrange(numrevs):
1842 p1, p2 = r.parentrevs(rev)
1845 p1, p2 = r.parentrevs(rev)
1843 delta = r.deltaparent(rev)
1846 delta = r.deltaparent(rev)
1844 if format > 0:
1847 if format > 0:
1845 addsize(r.rawsize(rev), datasize)
1848 addsize(r.rawsize(rev), datasize)
1846 if p2 != nullrev:
1849 if p2 != nullrev:
1847 nummerges += 1
1850 nummerges += 1
1848 size = r.length(rev)
1851 size = r.length(rev)
1849 if delta == nullrev:
1852 if delta == nullrev:
1850 chainlengths.append(0)
1853 chainlengths.append(0)
1851 chainbases.append(r.start(rev))
1854 chainbases.append(r.start(rev))
1852 chainspans.append(size)
1855 chainspans.append(size)
1853 numfull += 1
1856 numfull += 1
1854 addsize(size, fullsize)
1857 addsize(size, fullsize)
1855 else:
1858 else:
1856 chainlengths.append(chainlengths[delta] + 1)
1859 chainlengths.append(chainlengths[delta] + 1)
1857 baseaddr = chainbases[delta]
1860 baseaddr = chainbases[delta]
1858 revaddr = r.start(rev)
1861 revaddr = r.start(rev)
1859 chainbases.append(baseaddr)
1862 chainbases.append(baseaddr)
1860 chainspans.append((revaddr - baseaddr) + size)
1863 chainspans.append((revaddr - baseaddr) + size)
1861 addsize(size, deltasize)
1864 addsize(size, deltasize)
1862 if delta == rev - 1:
1865 if delta == rev - 1:
1863 numprev += 1
1866 numprev += 1
1864 if delta == p1:
1867 if delta == p1:
1865 nump1prev += 1
1868 nump1prev += 1
1866 elif delta == p2:
1869 elif delta == p2:
1867 nump2prev += 1
1870 nump2prev += 1
1868 elif delta == p1:
1871 elif delta == p1:
1869 nump1 += 1
1872 nump1 += 1
1870 elif delta == p2:
1873 elif delta == p2:
1871 nump2 += 1
1874 nump2 += 1
1872 elif delta != nullrev:
1875 elif delta != nullrev:
1873 numother += 1
1876 numother += 1
1874
1877
1875 # Obtain data on the raw chunks in the revlog.
1878 # Obtain data on the raw chunks in the revlog.
1876 segment = r._getsegmentforrevs(rev, rev)[1]
1879 segment = r._getsegmentforrevs(rev, rev)[1]
1877 if segment:
1880 if segment:
1878 chunktype = bytes(segment[0:1])
1881 chunktype = bytes(segment[0:1])
1879 else:
1882 else:
1880 chunktype = 'empty'
1883 chunktype = 'empty'
1881
1884
1882 if chunktype not in chunktypecounts:
1885 if chunktype not in chunktypecounts:
1883 chunktypecounts[chunktype] = 0
1886 chunktypecounts[chunktype] = 0
1884 chunktypesizes[chunktype] = 0
1887 chunktypesizes[chunktype] = 0
1885
1888
1886 chunktypecounts[chunktype] += 1
1889 chunktypecounts[chunktype] += 1
1887 chunktypesizes[chunktype] += size
1890 chunktypesizes[chunktype] += size
1888
1891
1889 # Adjust size min value for empty cases
1892 # Adjust size min value for empty cases
1890 for size in (datasize, fullsize, deltasize):
1893 for size in (datasize, fullsize, deltasize):
1891 if size[0] is None:
1894 if size[0] is None:
1892 size[0] = 0
1895 size[0] = 0
1893
1896
1894 numdeltas = numrevs - numfull
1897 numdeltas = numrevs - numfull
1895 numoprev = numprev - nump1prev - nump2prev
1898 numoprev = numprev - nump1prev - nump2prev
1896 totalrawsize = datasize[2]
1899 totalrawsize = datasize[2]
1897 datasize[2] /= numrevs
1900 datasize[2] /= numrevs
1898 fulltotal = fullsize[2]
1901 fulltotal = fullsize[2]
1899 fullsize[2] /= numfull
1902 fullsize[2] /= numfull
1900 deltatotal = deltasize[2]
1903 deltatotal = deltasize[2]
1901 if numrevs - numfull > 0:
1904 if numrevs - numfull > 0:
1902 deltasize[2] /= numrevs - numfull
1905 deltasize[2] /= numrevs - numfull
1903 totalsize = fulltotal + deltatotal
1906 totalsize = fulltotal + deltatotal
1904 avgchainlen = sum(chainlengths) / numrevs
1907 avgchainlen = sum(chainlengths) / numrevs
1905 maxchainlen = max(chainlengths)
1908 maxchainlen = max(chainlengths)
1906 maxchainspan = max(chainspans)
1909 maxchainspan = max(chainspans)
1907 compratio = 1
1910 compratio = 1
1908 if totalsize:
1911 if totalsize:
1909 compratio = totalrawsize / totalsize
1912 compratio = totalrawsize / totalsize
1910
1913
1911 basedfmtstr = '%%%dd\n'
1914 basedfmtstr = '%%%dd\n'
1912 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1915 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1913
1916
1914 def dfmtstr(max):
1917 def dfmtstr(max):
1915 return basedfmtstr % len(str(max))
1918 return basedfmtstr % len(str(max))
1916 def pcfmtstr(max, padding=0):
1919 def pcfmtstr(max, padding=0):
1917 return basepcfmtstr % (len(str(max)), ' ' * padding)
1920 return basepcfmtstr % (len(str(max)), ' ' * padding)
1918
1921
1919 def pcfmt(value, total):
1922 def pcfmt(value, total):
1920 if total:
1923 if total:
1921 return (value, 100 * float(value) / total)
1924 return (value, 100 * float(value) / total)
1922 else:
1925 else:
1923 return value, 100.0
1926 return value, 100.0
1924
1927
1925 ui.write(('format : %d\n') % format)
1928 ui.write(('format : %d\n') % format)
1926 ui.write(('flags : %s\n') % ', '.join(flags))
1929 ui.write(('flags : %s\n') % ', '.join(flags))
1927
1930
1928 ui.write('\n')
1931 ui.write('\n')
1929 fmt = pcfmtstr(totalsize)
1932 fmt = pcfmtstr(totalsize)
1930 fmt2 = dfmtstr(totalsize)
1933 fmt2 = dfmtstr(totalsize)
1931 ui.write(('revisions : ') + fmt2 % numrevs)
1934 ui.write(('revisions : ') + fmt2 % numrevs)
1932 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1935 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1933 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1936 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1934 ui.write(('revisions : ') + fmt2 % numrevs)
1937 ui.write(('revisions : ') + fmt2 % numrevs)
1935 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1938 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1936 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1939 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1937 ui.write(('revision size : ') + fmt2 % totalsize)
1940 ui.write(('revision size : ') + fmt2 % totalsize)
1938 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1941 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1939 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1942 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1940
1943
1941 def fmtchunktype(chunktype):
1944 def fmtchunktype(chunktype):
1942 if chunktype == 'empty':
1945 if chunktype == 'empty':
1943 return ' %s : ' % chunktype
1946 return ' %s : ' % chunktype
1944 elif chunktype in pycompat.bytestr(string.ascii_letters):
1947 elif chunktype in pycompat.bytestr(string.ascii_letters):
1945 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1948 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1946 else:
1949 else:
1947 return ' 0x%s : ' % hex(chunktype)
1950 return ' 0x%s : ' % hex(chunktype)
1948
1951
1949 ui.write('\n')
1952 ui.write('\n')
1950 ui.write(('chunks : ') + fmt2 % numrevs)
1953 ui.write(('chunks : ') + fmt2 % numrevs)
1951 for chunktype in sorted(chunktypecounts):
1954 for chunktype in sorted(chunktypecounts):
1952 ui.write(fmtchunktype(chunktype))
1955 ui.write(fmtchunktype(chunktype))
1953 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1956 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1954 ui.write(('chunks size : ') + fmt2 % totalsize)
1957 ui.write(('chunks size : ') + fmt2 % totalsize)
1955 for chunktype in sorted(chunktypecounts):
1958 for chunktype in sorted(chunktypecounts):
1956 ui.write(fmtchunktype(chunktype))
1959 ui.write(fmtchunktype(chunktype))
1957 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1960 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1958
1961
1959 ui.write('\n')
1962 ui.write('\n')
1960 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1963 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1961 ui.write(('avg chain length : ') + fmt % avgchainlen)
1964 ui.write(('avg chain length : ') + fmt % avgchainlen)
1962 ui.write(('max chain length : ') + fmt % maxchainlen)
1965 ui.write(('max chain length : ') + fmt % maxchainlen)
1963 ui.write(('max chain reach : ') + fmt % maxchainspan)
1966 ui.write(('max chain reach : ') + fmt % maxchainspan)
1964 ui.write(('compression ratio : ') + fmt % compratio)
1967 ui.write(('compression ratio : ') + fmt % compratio)
1965
1968
1966 if format > 0:
1969 if format > 0:
1967 ui.write('\n')
1970 ui.write('\n')
1968 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1971 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1969 % tuple(datasize))
1972 % tuple(datasize))
1970 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1973 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1971 % tuple(fullsize))
1974 % tuple(fullsize))
1972 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1975 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1973 % tuple(deltasize))
1976 % tuple(deltasize))
1974
1977
1975 if numdeltas > 0:
1978 if numdeltas > 0:
1976 ui.write('\n')
1979 ui.write('\n')
1977 fmt = pcfmtstr(numdeltas)
1980 fmt = pcfmtstr(numdeltas)
1978 fmt2 = pcfmtstr(numdeltas, 4)
1981 fmt2 = pcfmtstr(numdeltas, 4)
1979 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1982 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1980 if numprev > 0:
1983 if numprev > 0:
1981 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1984 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1982 numprev))
1985 numprev))
1983 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1986 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1984 numprev))
1987 numprev))
1985 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1988 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1986 numprev))
1989 numprev))
1987 if gdelta:
1990 if gdelta:
1988 ui.write(('deltas against p1 : ')
1991 ui.write(('deltas against p1 : ')
1989 + fmt % pcfmt(nump1, numdeltas))
1992 + fmt % pcfmt(nump1, numdeltas))
1990 ui.write(('deltas against p2 : ')
1993 ui.write(('deltas against p2 : ')
1991 + fmt % pcfmt(nump2, numdeltas))
1994 + fmt % pcfmt(nump2, numdeltas))
1992 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1995 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1993 numdeltas))
1996 numdeltas))
1994
1997
1995 @command('debugrevspec',
1998 @command('debugrevspec',
1996 [('', 'optimize', None,
1999 [('', 'optimize', None,
1997 _('print parsed tree after optimizing (DEPRECATED)')),
2000 _('print parsed tree after optimizing (DEPRECATED)')),
1998 ('', 'show-revs', True, _('print list of result revisions (default)')),
2001 ('', 'show-revs', True, _('print list of result revisions (default)')),
1999 ('s', 'show-set', None, _('print internal representation of result set')),
2002 ('s', 'show-set', None, _('print internal representation of result set')),
2000 ('p', 'show-stage', [],
2003 ('p', 'show-stage', [],
2001 _('print parsed tree at the given stage'), _('NAME')),
2004 _('print parsed tree at the given stage'), _('NAME')),
2002 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2005 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2003 ('', 'verify-optimized', False, _('verify optimized result')),
2006 ('', 'verify-optimized', False, _('verify optimized result')),
2004 ],
2007 ],
2005 ('REVSPEC'))
2008 ('REVSPEC'))
2006 def debugrevspec(ui, repo, expr, **opts):
2009 def debugrevspec(ui, repo, expr, **opts):
2007 """parse and apply a revision specification
2010 """parse and apply a revision specification
2008
2011
2009 Use -p/--show-stage option to print the parsed tree at the given stages.
2012 Use -p/--show-stage option to print the parsed tree at the given stages.
2010 Use -p all to print tree at every stage.
2013 Use -p all to print tree at every stage.
2011
2014
2012 Use --no-show-revs option with -s or -p to print only the set
2015 Use --no-show-revs option with -s or -p to print only the set
2013 representation or the parsed tree respectively.
2016 representation or the parsed tree respectively.
2014
2017
2015 Use --verify-optimized to compare the optimized result with the unoptimized
2018 Use --verify-optimized to compare the optimized result with the unoptimized
2016 one. Returns 1 if the optimized result differs.
2019 one. Returns 1 if the optimized result differs.
2017 """
2020 """
2018 opts = pycompat.byteskwargs(opts)
2021 opts = pycompat.byteskwargs(opts)
2019 aliases = ui.configitems('revsetalias')
2022 aliases = ui.configitems('revsetalias')
2020 stages = [
2023 stages = [
2021 ('parsed', lambda tree: tree),
2024 ('parsed', lambda tree: tree),
2022 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2025 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2023 ui.warn)),
2026 ui.warn)),
2024 ('concatenated', revsetlang.foldconcat),
2027 ('concatenated', revsetlang.foldconcat),
2025 ('analyzed', revsetlang.analyze),
2028 ('analyzed', revsetlang.analyze),
2026 ('optimized', revsetlang.optimize),
2029 ('optimized', revsetlang.optimize),
2027 ]
2030 ]
2028 if opts['no_optimized']:
2031 if opts['no_optimized']:
2029 stages = stages[:-1]
2032 stages = stages[:-1]
2030 if opts['verify_optimized'] and opts['no_optimized']:
2033 if opts['verify_optimized'] and opts['no_optimized']:
2031 raise error.Abort(_('cannot use --verify-optimized with '
2034 raise error.Abort(_('cannot use --verify-optimized with '
2032 '--no-optimized'))
2035 '--no-optimized'))
2033 stagenames = set(n for n, f in stages)
2036 stagenames = set(n for n, f in stages)
2034
2037
2035 showalways = set()
2038 showalways = set()
2036 showchanged = set()
2039 showchanged = set()
2037 if ui.verbose and not opts['show_stage']:
2040 if ui.verbose and not opts['show_stage']:
2038 # show parsed tree by --verbose (deprecated)
2041 # show parsed tree by --verbose (deprecated)
2039 showalways.add('parsed')
2042 showalways.add('parsed')
2040 showchanged.update(['expanded', 'concatenated'])
2043 showchanged.update(['expanded', 'concatenated'])
2041 if opts['optimize']:
2044 if opts['optimize']:
2042 showalways.add('optimized')
2045 showalways.add('optimized')
2043 if opts['show_stage'] and opts['optimize']:
2046 if opts['show_stage'] and opts['optimize']:
2044 raise error.Abort(_('cannot use --optimize with --show-stage'))
2047 raise error.Abort(_('cannot use --optimize with --show-stage'))
2045 if opts['show_stage'] == ['all']:
2048 if opts['show_stage'] == ['all']:
2046 showalways.update(stagenames)
2049 showalways.update(stagenames)
2047 else:
2050 else:
2048 for n in opts['show_stage']:
2051 for n in opts['show_stage']:
2049 if n not in stagenames:
2052 if n not in stagenames:
2050 raise error.Abort(_('invalid stage name: %s') % n)
2053 raise error.Abort(_('invalid stage name: %s') % n)
2051 showalways.update(opts['show_stage'])
2054 showalways.update(opts['show_stage'])
2052
2055
2053 treebystage = {}
2056 treebystage = {}
2054 printedtree = None
2057 printedtree = None
2055 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2058 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2056 for n, f in stages:
2059 for n, f in stages:
2057 treebystage[n] = tree = f(tree)
2060 treebystage[n] = tree = f(tree)
2058 if n in showalways or (n in showchanged and tree != printedtree):
2061 if n in showalways or (n in showchanged and tree != printedtree):
2059 if opts['show_stage'] or n != 'parsed':
2062 if opts['show_stage'] or n != 'parsed':
2060 ui.write(("* %s:\n") % n)
2063 ui.write(("* %s:\n") % n)
2061 ui.write(revsetlang.prettyformat(tree), "\n")
2064 ui.write(revsetlang.prettyformat(tree), "\n")
2062 printedtree = tree
2065 printedtree = tree
2063
2066
2064 if opts['verify_optimized']:
2067 if opts['verify_optimized']:
2065 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2068 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2066 brevs = revset.makematcher(treebystage['optimized'])(repo)
2069 brevs = revset.makematcher(treebystage['optimized'])(repo)
2067 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2070 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2068 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2071 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2069 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2072 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2070 arevs = list(arevs)
2073 arevs = list(arevs)
2071 brevs = list(brevs)
2074 brevs = list(brevs)
2072 if arevs == brevs:
2075 if arevs == brevs:
2073 return 0
2076 return 0
2074 ui.write(('--- analyzed\n'), label='diff.file_a')
2077 ui.write(('--- analyzed\n'), label='diff.file_a')
2075 ui.write(('+++ optimized\n'), label='diff.file_b')
2078 ui.write(('+++ optimized\n'), label='diff.file_b')
2076 sm = difflib.SequenceMatcher(None, arevs, brevs)
2079 sm = difflib.SequenceMatcher(None, arevs, brevs)
2077 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2080 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2078 if tag in ('delete', 'replace'):
2081 if tag in ('delete', 'replace'):
2079 for c in arevs[alo:ahi]:
2082 for c in arevs[alo:ahi]:
2080 ui.write('-%s\n' % c, label='diff.deleted')
2083 ui.write('-%s\n' % c, label='diff.deleted')
2081 if tag in ('insert', 'replace'):
2084 if tag in ('insert', 'replace'):
2082 for c in brevs[blo:bhi]:
2085 for c in brevs[blo:bhi]:
2083 ui.write('+%s\n' % c, label='diff.inserted')
2086 ui.write('+%s\n' % c, label='diff.inserted')
2084 if tag == 'equal':
2087 if tag == 'equal':
2085 for c in arevs[alo:ahi]:
2088 for c in arevs[alo:ahi]:
2086 ui.write(' %s\n' % c)
2089 ui.write(' %s\n' % c)
2087 return 1
2090 return 1
2088
2091
2089 func = revset.makematcher(tree)
2092 func = revset.makematcher(tree)
2090 revs = func(repo)
2093 revs = func(repo)
2091 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2094 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2092 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2095 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2093 if not opts['show_revs']:
2096 if not opts['show_revs']:
2094 return
2097 return
2095 for c in revs:
2098 for c in revs:
2096 ui.write("%s\n" % c)
2099 ui.write("%s\n" % c)
2097
2100
2098 @command('debugsetparents', [], _('REV1 [REV2]'))
2101 @command('debugsetparents', [], _('REV1 [REV2]'))
2099 def debugsetparents(ui, repo, rev1, rev2=None):
2102 def debugsetparents(ui, repo, rev1, rev2=None):
2100 """manually set the parents of the current working directory
2103 """manually set the parents of the current working directory
2101
2104
2102 This is useful for writing repository conversion tools, but should
2105 This is useful for writing repository conversion tools, but should
2103 be used with care. For example, neither the working directory nor the
2106 be used with care. For example, neither the working directory nor the
2104 dirstate is updated, so file status may be incorrect after running this
2107 dirstate is updated, so file status may be incorrect after running this
2105 command.
2108 command.
2106
2109
2107 Returns 0 on success.
2110 Returns 0 on success.
2108 """
2111 """
2109
2112
2110 r1 = scmutil.revsingle(repo, rev1).node()
2113 r1 = scmutil.revsingle(repo, rev1).node()
2111 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2114 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2112
2115
2113 with repo.wlock():
2116 with repo.wlock():
2114 repo.setparents(r1, r2)
2117 repo.setparents(r1, r2)
2115
2118
2116 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2119 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2117 def debugssl(ui, repo, source=None, **opts):
2120 def debugssl(ui, repo, source=None, **opts):
2118 '''test a secure connection to a server
2121 '''test a secure connection to a server
2119
2122
2120 This builds the certificate chain for the server on Windows, installing the
2123 This builds the certificate chain for the server on Windows, installing the
2121 missing intermediates and trusted root via Windows Update if necessary. It
2124 missing intermediates and trusted root via Windows Update if necessary. It
2122 does nothing on other platforms.
2125 does nothing on other platforms.
2123
2126
2124 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2127 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2125 that server is used. See :hg:`help urls` for more information.
2128 that server is used. See :hg:`help urls` for more information.
2126
2129
2127 If the update succeeds, retry the original operation. Otherwise, the cause
2130 If the update succeeds, retry the original operation. Otherwise, the cause
2128 of the SSL error is likely another issue.
2131 of the SSL error is likely another issue.
2129 '''
2132 '''
2130 if not pycompat.iswindows:
2133 if not pycompat.iswindows:
2131 raise error.Abort(_('certificate chain building is only possible on '
2134 raise error.Abort(_('certificate chain building is only possible on '
2132 'Windows'))
2135 'Windows'))
2133
2136
2134 if not source:
2137 if not source:
2135 if not repo:
2138 if not repo:
2136 raise error.Abort(_("there is no Mercurial repository here, and no "
2139 raise error.Abort(_("there is no Mercurial repository here, and no "
2137 "server specified"))
2140 "server specified"))
2138 source = "default"
2141 source = "default"
2139
2142
2140 source, branches = hg.parseurl(ui.expandpath(source))
2143 source, branches = hg.parseurl(ui.expandpath(source))
2141 url = util.url(source)
2144 url = util.url(source)
2142 addr = None
2145 addr = None
2143
2146
2144 if url.scheme == 'https':
2147 if url.scheme == 'https':
2145 addr = (url.host, url.port or 443)
2148 addr = (url.host, url.port or 443)
2146 elif url.scheme == 'ssh':
2149 elif url.scheme == 'ssh':
2147 addr = (url.host, url.port or 22)
2150 addr = (url.host, url.port or 22)
2148 else:
2151 else:
2149 raise error.Abort(_("only https and ssh connections are supported"))
2152 raise error.Abort(_("only https and ssh connections are supported"))
2150
2153
2151 from . import win32
2154 from . import win32
2152
2155
2153 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2156 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2154 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2157 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2155
2158
2156 try:
2159 try:
2157 s.connect(addr)
2160 s.connect(addr)
2158 cert = s.getpeercert(True)
2161 cert = s.getpeercert(True)
2159
2162
2160 ui.status(_('checking the certificate chain for %s\n') % url.host)
2163 ui.status(_('checking the certificate chain for %s\n') % url.host)
2161
2164
2162 complete = win32.checkcertificatechain(cert, build=False)
2165 complete = win32.checkcertificatechain(cert, build=False)
2163
2166
2164 if not complete:
2167 if not complete:
2165 ui.status(_('certificate chain is incomplete, updating... '))
2168 ui.status(_('certificate chain is incomplete, updating... '))
2166
2169
2167 if not win32.checkcertificatechain(cert):
2170 if not win32.checkcertificatechain(cert):
2168 ui.status(_('failed.\n'))
2171 ui.status(_('failed.\n'))
2169 else:
2172 else:
2170 ui.status(_('done.\n'))
2173 ui.status(_('done.\n'))
2171 else:
2174 else:
2172 ui.status(_('full certificate chain is available\n'))
2175 ui.status(_('full certificate chain is available\n'))
2173 finally:
2176 finally:
2174 s.close()
2177 s.close()
2175
2178
2176 @command('debugsub',
2179 @command('debugsub',
2177 [('r', 'rev', '',
2180 [('r', 'rev', '',
2178 _('revision to check'), _('REV'))],
2181 _('revision to check'), _('REV'))],
2179 _('[-r REV] [REV]'))
2182 _('[-r REV] [REV]'))
2180 def debugsub(ui, repo, rev=None):
2183 def debugsub(ui, repo, rev=None):
2181 ctx = scmutil.revsingle(repo, rev, None)
2184 ctx = scmutil.revsingle(repo, rev, None)
2182 for k, v in sorted(ctx.substate.items()):
2185 for k, v in sorted(ctx.substate.items()):
2183 ui.write(('path %s\n') % k)
2186 ui.write(('path %s\n') % k)
2184 ui.write((' source %s\n') % v[0])
2187 ui.write((' source %s\n') % v[0])
2185 ui.write((' revision %s\n') % v[1])
2188 ui.write((' revision %s\n') % v[1])
2186
2189
2187 @command('debugsuccessorssets',
2190 @command('debugsuccessorssets',
2188 [('', 'closest', False, _('return closest successors sets only'))],
2191 [('', 'closest', False, _('return closest successors sets only'))],
2189 _('[REV]'))
2192 _('[REV]'))
2190 def debugsuccessorssets(ui, repo, *revs, **opts):
2193 def debugsuccessorssets(ui, repo, *revs, **opts):
2191 """show set of successors for revision
2194 """show set of successors for revision
2192
2195
2193 A successors set of changeset A is a consistent group of revisions that
2196 A successors set of changeset A is a consistent group of revisions that
2194 succeed A. It contains non-obsolete changesets only unless closests
2197 succeed A. It contains non-obsolete changesets only unless closests
2195 successors set is set.
2198 successors set is set.
2196
2199
2197 In most cases a changeset A has a single successors set containing a single
2200 In most cases a changeset A has a single successors set containing a single
2198 successor (changeset A replaced by A').
2201 successor (changeset A replaced by A').
2199
2202
2200 A changeset that is made obsolete with no successors are called "pruned".
2203 A changeset that is made obsolete with no successors are called "pruned".
2201 Such changesets have no successors sets at all.
2204 Such changesets have no successors sets at all.
2202
2205
2203 A changeset that has been "split" will have a successors set containing
2206 A changeset that has been "split" will have a successors set containing
2204 more than one successor.
2207 more than one successor.
2205
2208
2206 A changeset that has been rewritten in multiple different ways is called
2209 A changeset that has been rewritten in multiple different ways is called
2207 "divergent". Such changesets have multiple successor sets (each of which
2210 "divergent". Such changesets have multiple successor sets (each of which
2208 may also be split, i.e. have multiple successors).
2211 may also be split, i.e. have multiple successors).
2209
2212
2210 Results are displayed as follows::
2213 Results are displayed as follows::
2211
2214
2212 <rev1>
2215 <rev1>
2213 <successors-1A>
2216 <successors-1A>
2214 <rev2>
2217 <rev2>
2215 <successors-2A>
2218 <successors-2A>
2216 <successors-2B1> <successors-2B2> <successors-2B3>
2219 <successors-2B1> <successors-2B2> <successors-2B3>
2217
2220
2218 Here rev2 has two possible (i.e. divergent) successors sets. The first
2221 Here rev2 has two possible (i.e. divergent) successors sets. The first
2219 holds one element, whereas the second holds three (i.e. the changeset has
2222 holds one element, whereas the second holds three (i.e. the changeset has
2220 been split).
2223 been split).
2221 """
2224 """
2222 # passed to successorssets caching computation from one call to another
2225 # passed to successorssets caching computation from one call to another
2223 cache = {}
2226 cache = {}
2224 ctx2str = str
2227 ctx2str = str
2225 node2str = short
2228 node2str = short
2226 if ui.debug():
2229 if ui.debug():
2227 def ctx2str(ctx):
2230 def ctx2str(ctx):
2228 return ctx.hex()
2231 return ctx.hex()
2229 node2str = hex
2232 node2str = hex
2230 for rev in scmutil.revrange(repo, revs):
2233 for rev in scmutil.revrange(repo, revs):
2231 ctx = repo[rev]
2234 ctx = repo[rev]
2232 ui.write('%s\n'% ctx2str(ctx))
2235 ui.write('%s\n'% ctx2str(ctx))
2233 for succsset in obsutil.successorssets(repo, ctx.node(),
2236 for succsset in obsutil.successorssets(repo, ctx.node(),
2234 closest=opts['closest'],
2237 closest=opts['closest'],
2235 cache=cache):
2238 cache=cache):
2236 if succsset:
2239 if succsset:
2237 ui.write(' ')
2240 ui.write(' ')
2238 ui.write(node2str(succsset[0]))
2241 ui.write(node2str(succsset[0]))
2239 for node in succsset[1:]:
2242 for node in succsset[1:]:
2240 ui.write(' ')
2243 ui.write(' ')
2241 ui.write(node2str(node))
2244 ui.write(node2str(node))
2242 ui.write('\n')
2245 ui.write('\n')
2243
2246
2244 @command('debugtemplate',
2247 @command('debugtemplate',
2245 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2248 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2246 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2249 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2247 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2250 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2248 optionalrepo=True)
2251 optionalrepo=True)
2249 def debugtemplate(ui, repo, tmpl, **opts):
2252 def debugtemplate(ui, repo, tmpl, **opts):
2250 """parse and apply a template
2253 """parse and apply a template
2251
2254
2252 If -r/--rev is given, the template is processed as a log template and
2255 If -r/--rev is given, the template is processed as a log template and
2253 applied to the given changesets. Otherwise, it is processed as a generic
2256 applied to the given changesets. Otherwise, it is processed as a generic
2254 template.
2257 template.
2255
2258
2256 Use --verbose to print the parsed tree.
2259 Use --verbose to print the parsed tree.
2257 """
2260 """
2258 revs = None
2261 revs = None
2259 if opts[r'rev']:
2262 if opts[r'rev']:
2260 if repo is None:
2263 if repo is None:
2261 raise error.RepoError(_('there is no Mercurial repository here '
2264 raise error.RepoError(_('there is no Mercurial repository here '
2262 '(.hg not found)'))
2265 '(.hg not found)'))
2263 revs = scmutil.revrange(repo, opts[r'rev'])
2266 revs = scmutil.revrange(repo, opts[r'rev'])
2264
2267
2265 props = {}
2268 props = {}
2266 for d in opts[r'define']:
2269 for d in opts[r'define']:
2267 try:
2270 try:
2268 k, v = (e.strip() for e in d.split('=', 1))
2271 k, v = (e.strip() for e in d.split('=', 1))
2269 if not k or k == 'ui':
2272 if not k or k == 'ui':
2270 raise ValueError
2273 raise ValueError
2271 props[k] = v
2274 props[k] = v
2272 except ValueError:
2275 except ValueError:
2273 raise error.Abort(_('malformed keyword definition: %s') % d)
2276 raise error.Abort(_('malformed keyword definition: %s') % d)
2274
2277
2275 if ui.verbose:
2278 if ui.verbose:
2276 aliases = ui.configitems('templatealias')
2279 aliases = ui.configitems('templatealias')
2277 tree = templater.parse(tmpl)
2280 tree = templater.parse(tmpl)
2278 ui.note(templater.prettyformat(tree), '\n')
2281 ui.note(templater.prettyformat(tree), '\n')
2279 newtree = templater.expandaliases(tree, aliases)
2282 newtree = templater.expandaliases(tree, aliases)
2280 if newtree != tree:
2283 if newtree != tree:
2281 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2284 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2282
2285
2283 if revs is None:
2286 if revs is None:
2284 t = formatter.maketemplater(ui, tmpl)
2287 t = formatter.maketemplater(ui, tmpl)
2285 props['ui'] = ui
2288 props['ui'] = ui
2286 ui.write(t.render(props))
2289 ui.write(t.render(props))
2287 else:
2290 else:
2288 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2291 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2289 for r in revs:
2292 for r in revs:
2290 displayer.show(repo[r], **pycompat.strkwargs(props))
2293 displayer.show(repo[r], **pycompat.strkwargs(props))
2291 displayer.close()
2294 displayer.close()
2292
2295
2293 @command('debugupdatecaches', [])
2296 @command('debugupdatecaches', [])
2294 def debugupdatecaches(ui, repo, *pats, **opts):
2297 def debugupdatecaches(ui, repo, *pats, **opts):
2295 """warm all known caches in the repository"""
2298 """warm all known caches in the repository"""
2296 with repo.wlock(), repo.lock():
2299 with repo.wlock(), repo.lock():
2297 repo.updatecaches()
2300 repo.updatecaches()
2298
2301
2299 @command('debugupgraderepo', [
2302 @command('debugupgraderepo', [
2300 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2303 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2301 ('', 'run', False, _('performs an upgrade')),
2304 ('', 'run', False, _('performs an upgrade')),
2302 ])
2305 ])
2303 def debugupgraderepo(ui, repo, run=False, optimize=None):
2306 def debugupgraderepo(ui, repo, run=False, optimize=None):
2304 """upgrade a repository to use different features
2307 """upgrade a repository to use different features
2305
2308
2306 If no arguments are specified, the repository is evaluated for upgrade
2309 If no arguments are specified, the repository is evaluated for upgrade
2307 and a list of problems and potential optimizations is printed.
2310 and a list of problems and potential optimizations is printed.
2308
2311
2309 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2312 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2310 can be influenced via additional arguments. More details will be provided
2313 can be influenced via additional arguments. More details will be provided
2311 by the command output when run without ``--run``.
2314 by the command output when run without ``--run``.
2312
2315
2313 During the upgrade, the repository will be locked and no writes will be
2316 During the upgrade, the repository will be locked and no writes will be
2314 allowed.
2317 allowed.
2315
2318
2316 At the end of the upgrade, the repository may not be readable while new
2319 At the end of the upgrade, the repository may not be readable while new
2317 repository data is swapped in. This window will be as long as it takes to
2320 repository data is swapped in. This window will be as long as it takes to
2318 rename some directories inside the ``.hg`` directory. On most machines, this
2321 rename some directories inside the ``.hg`` directory. On most machines, this
2319 should complete almost instantaneously and the chances of a consumer being
2322 should complete almost instantaneously and the chances of a consumer being
2320 unable to access the repository should be low.
2323 unable to access the repository should be low.
2321 """
2324 """
2322 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2325 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2323
2326
2324 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2327 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2325 inferrepo=True)
2328 inferrepo=True)
2326 def debugwalk(ui, repo, *pats, **opts):
2329 def debugwalk(ui, repo, *pats, **opts):
2327 """show how files match on given patterns"""
2330 """show how files match on given patterns"""
2328 opts = pycompat.byteskwargs(opts)
2331 opts = pycompat.byteskwargs(opts)
2329 m = scmutil.match(repo[None], pats, opts)
2332 m = scmutil.match(repo[None], pats, opts)
2330 ui.write(('matcher: %r\n' % m))
2333 ui.write(('matcher: %r\n' % m))
2331 items = list(repo[None].walk(m))
2334 items = list(repo[None].walk(m))
2332 if not items:
2335 if not items:
2333 return
2336 return
2334 f = lambda fn: fn
2337 f = lambda fn: fn
2335 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2338 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2336 f = lambda fn: util.normpath(fn)
2339 f = lambda fn: util.normpath(fn)
2337 fmt = 'f %%-%ds %%-%ds %%s' % (
2340 fmt = 'f %%-%ds %%-%ds %%s' % (
2338 max([len(abs) for abs in items]),
2341 max([len(abs) for abs in items]),
2339 max([len(m.rel(abs)) for abs in items]))
2342 max([len(m.rel(abs)) for abs in items]))
2340 for abs in items:
2343 for abs in items:
2341 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2344 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2342 ui.write("%s\n" % line.rstrip())
2345 ui.write("%s\n" % line.rstrip())
2343
2346
2344 @command('debugwireargs',
2347 @command('debugwireargs',
2345 [('', 'three', '', 'three'),
2348 [('', 'three', '', 'three'),
2346 ('', 'four', '', 'four'),
2349 ('', 'four', '', 'four'),
2347 ('', 'five', '', 'five'),
2350 ('', 'five', '', 'five'),
2348 ] + cmdutil.remoteopts,
2351 ] + cmdutil.remoteopts,
2349 _('REPO [OPTIONS]... [ONE [TWO]]'),
2352 _('REPO [OPTIONS]... [ONE [TWO]]'),
2350 norepo=True)
2353 norepo=True)
2351 def debugwireargs(ui, repopath, *vals, **opts):
2354 def debugwireargs(ui, repopath, *vals, **opts):
2352 opts = pycompat.byteskwargs(opts)
2355 opts = pycompat.byteskwargs(opts)
2353 repo = hg.peer(ui, opts, repopath)
2356 repo = hg.peer(ui, opts, repopath)
2354 for opt in cmdutil.remoteopts:
2357 for opt in cmdutil.remoteopts:
2355 del opts[opt[1]]
2358 del opts[opt[1]]
2356 args = {}
2359 args = {}
2357 for k, v in opts.iteritems():
2360 for k, v in opts.iteritems():
2358 if v:
2361 if v:
2359 args[k] = v
2362 args[k] = v
2360 # run twice to check that we don't mess up the stream for the next command
2363 # run twice to check that we don't mess up the stream for the next command
2361 res1 = repo.debugwireargs(*vals, **args)
2364 res1 = repo.debugwireargs(*vals, **args)
2362 res2 = repo.debugwireargs(*vals, **args)
2365 res2 = repo.debugwireargs(*vals, **args)
2363 ui.write("%s\n" % res1)
2366 ui.write("%s\n" % res1)
2364 if res1 != res2:
2367 if res1 != res2:
2365 ui.warn("%s\n" % res2)
2368 ui.warn("%s\n" % res2)
@@ -1,258 +1,263 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """
8 """
9 Algorithm works in the following way. You have two repository: local and
9 Algorithm works in the following way. You have two repository: local and
10 remote. They both contains a DAG of changelists.
10 remote. They both contains a DAG of changelists.
11
11
12 The goal of the discovery protocol is to find one set of node *common*,
12 The goal of the discovery protocol is to find one set of node *common*,
13 the set of nodes shared by local and remote.
13 the set of nodes shared by local and remote.
14
14
15 One of the issue with the original protocol was latency, it could
15 One of the issue with the original protocol was latency, it could
16 potentially require lots of roundtrips to discover that the local repo was a
16 potentially require lots of roundtrips to discover that the local repo was a
17 subset of remote (which is a very common case, you usually have few changes
17 subset of remote (which is a very common case, you usually have few changes
18 compared to upstream, while upstream probably had lots of development).
18 compared to upstream, while upstream probably had lots of development).
19
19
20 The new protocol only requires one interface for the remote repo: `known()`,
20 The new protocol only requires one interface for the remote repo: `known()`,
21 which given a set of changelists tells you if they are present in the DAG.
21 which given a set of changelists tells you if they are present in the DAG.
22
22
23 The algorithm then works as follow:
23 The algorithm then works as follow:
24
24
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 all nodes are in `unknown`.
26 all nodes are in `unknown`.
27 - Take a sample from `unknown`, call `remote.known(sample)`
27 - Take a sample from `unknown`, call `remote.known(sample)`
28 - For each node that remote knows, move it and all its ancestors to `common`
28 - For each node that remote knows, move it and all its ancestors to `common`
29 - For each node that remote doesn't know, move it and all its descendants
29 - For each node that remote doesn't know, move it and all its descendants
30 to `missing`
30 to `missing`
31 - Iterate until `unknown` is empty
31 - Iterate until `unknown` is empty
32
32
33 There are a couple optimizations, first is instead of starting with a random
33 There are a couple optimizations, first is instead of starting with a random
34 sample of missing, start by sending all heads, in the case where the local
34 sample of missing, start by sending all heads, in the case where the local
35 repo is a subset, you computed the answer in one round trip.
35 repo is a subset, you computed the answer in one round trip.
36
36
37 Then you can do something similar to the bisecting strategy used when
37 Then you can do something similar to the bisecting strategy used when
38 finding faulty changesets. Instead of random samples, you can try picking
38 finding faulty changesets. Instead of random samples, you can try picking
39 nodes that will maximize the number of nodes that will be
39 nodes that will maximize the number of nodes that will be
40 classified with it (since all ancestors or descendants will be marked as well).
40 classified with it (since all ancestors or descendants will be marked as well).
41 """
41 """
42
42
43 from __future__ import absolute_import
43 from __future__ import absolute_import
44
44
45 import collections
45 import collections
46 import random
46 import random
47
47
48 from .i18n import _
48 from .i18n import _
49 from .node import (
49 from .node import (
50 nullid,
50 nullid,
51 nullrev,
51 nullrev,
52 )
52 )
53 from . import (
53 from . import (
54 dagutil,
54 dagutil,
55 error,
55 error,
56 util,
56 util,
57 )
57 )
58
58
59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 """update an existing sample to match the expected size
60 """update an existing sample to match the expected size
61
61
62 The sample is updated with nodes exponentially distant from each head of the
62 The sample is updated with nodes exponentially distant from each head of the
63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64
64
65 If a target size is specified, the sampling will stop once this size is
65 If a target size is specified, the sampling will stop once this size is
66 reached. Otherwise sampling will happen until roots of the <nodes> set are
66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 reached.
67 reached.
68
68
69 :dag: a dag object from dagutil
69 :dag: a dag object from dagutil
70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 :sample: a sample to update
71 :sample: a sample to update
72 :quicksamplesize: optional target size of the sample"""
72 :quicksamplesize: optional target size of the sample"""
73 # if nodes is empty we scan the entire graph
73 # if nodes is empty we scan the entire graph
74 if nodes:
74 if nodes:
75 heads = dag.headsetofconnecteds(nodes)
75 heads = dag.headsetofconnecteds(nodes)
76 else:
76 else:
77 heads = dag.heads()
77 heads = dag.heads()
78 dist = {}
78 dist = {}
79 visit = collections.deque(heads)
79 visit = collections.deque(heads)
80 seen = set()
80 seen = set()
81 factor = 1
81 factor = 1
82 while visit:
82 while visit:
83 curr = visit.popleft()
83 curr = visit.popleft()
84 if curr in seen:
84 if curr in seen:
85 continue
85 continue
86 d = dist.setdefault(curr, 1)
86 d = dist.setdefault(curr, 1)
87 if d > factor:
87 if d > factor:
88 factor *= 2
88 factor *= 2
89 if d == factor:
89 if d == factor:
90 sample.add(curr)
90 sample.add(curr)
91 if quicksamplesize and (len(sample) >= quicksamplesize):
91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 return
92 return
93 seen.add(curr)
93 seen.add(curr)
94 for p in dag.parents(curr):
94 for p in dag.parents(curr):
95 if not nodes or p in nodes:
95 if not nodes or p in nodes:
96 dist.setdefault(p, d + 1)
96 dist.setdefault(p, d + 1)
97 visit.append(p)
97 visit.append(p)
98
98
99 def _takequicksample(dag, nodes, size):
99 def _takequicksample(dag, nodes, size):
100 """takes a quick sample of size <size>
100 """takes a quick sample of size <size>
101
101
102 It is meant for initial sampling and focuses on querying heads and close
102 It is meant for initial sampling and focuses on querying heads and close
103 ancestors of heads.
103 ancestors of heads.
104
104
105 :dag: a dag object
105 :dag: a dag object
106 :nodes: set of nodes to discover
106 :nodes: set of nodes to discover
107 :size: the maximum size of the sample"""
107 :size: the maximum size of the sample"""
108 sample = dag.headsetofconnecteds(nodes)
108 sample = dag.headsetofconnecteds(nodes)
109 if size <= len(sample):
109 if size <= len(sample):
110 return _limitsample(sample, size)
110 return _limitsample(sample, size)
111 _updatesample(dag, None, sample, quicksamplesize=size)
111 _updatesample(dag, None, sample, quicksamplesize=size)
112 return sample
112 return sample
113
113
114 def _takefullsample(dag, nodes, size):
114 def _takefullsample(dag, nodes, size):
115 sample = dag.headsetofconnecteds(nodes)
115 sample = dag.headsetofconnecteds(nodes)
116 # update from heads
116 # update from heads
117 _updatesample(dag, nodes, sample)
117 _updatesample(dag, nodes, sample)
118 # update from roots
118 # update from roots
119 _updatesample(dag.inverse(), nodes, sample)
119 _updatesample(dag.inverse(), nodes, sample)
120 assert sample
120 assert sample
121 sample = _limitsample(sample, size)
121 sample = _limitsample(sample, size)
122 if len(sample) < size:
122 if len(sample) < size:
123 more = size - len(sample)
123 more = size - len(sample)
124 sample.update(random.sample(list(nodes - sample), more))
124 sample.update(random.sample(list(nodes - sample), more))
125 return sample
125 return sample
126
126
127 def _limitsample(sample, desiredlen):
127 def _limitsample(sample, desiredlen):
128 """return a random subset of sample of at most desiredlen item"""
128 """return a random subset of sample of at most desiredlen item"""
129 if len(sample) > desiredlen:
129 if len(sample) > desiredlen:
130 sample = set(random.sample(sample, desiredlen))
130 sample = set(random.sample(sample, desiredlen))
131 return sample
131 return sample
132
132
133 def findcommonheads(ui, local, remote,
133 def findcommonheads(ui, local, remote,
134 initialsamplesize=100,
134 initialsamplesize=100,
135 fullsamplesize=200,
135 fullsamplesize=200,
136 abortwhenunrelated=True):
136 abortwhenunrelated=True,
137 ancestorsof=None):
137 '''Return a tuple (common, anyincoming, remoteheads) used to identify
138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
138 missing nodes from or in remote.
139 missing nodes from or in remote.
139 '''
140 '''
140 start = util.timer()
141 start = util.timer()
141
142
142 roundtrips = 0
143 roundtrips = 0
143 cl = local.changelog
144 cl = local.changelog
144 dag = dagutil.revlogdag(cl)
145 localsubset = None
146 if ancestorsof is not None:
147 rev = local.changelog.rev
148 localsubset = [rev(n) for n in ancestorsof]
149 dag = dagutil.revlogdag(cl, localsubset=localsubset)
145
150
146 # early exit if we know all the specified remote heads already
151 # early exit if we know all the specified remote heads already
147 ui.debug("query 1; heads\n")
152 ui.debug("query 1; heads\n")
148 roundtrips += 1
153 roundtrips += 1
149 ownheads = dag.heads()
154 ownheads = dag.heads()
150 sample = _limitsample(ownheads, initialsamplesize)
155 sample = _limitsample(ownheads, initialsamplesize)
151 # indices between sample and externalized version must match
156 # indices between sample and externalized version must match
152 sample = list(sample)
157 sample = list(sample)
153 batch = remote.iterbatch()
158 batch = remote.iterbatch()
154 batch.heads()
159 batch.heads()
155 batch.known(dag.externalizeall(sample))
160 batch.known(dag.externalizeall(sample))
156 batch.submit()
161 batch.submit()
157 srvheadhashes, yesno = batch.results()
162 srvheadhashes, yesno = batch.results()
158
163
159 if cl.tip() == nullid:
164 if cl.tip() == nullid:
160 if srvheadhashes != [nullid]:
165 if srvheadhashes != [nullid]:
161 return [nullid], True, srvheadhashes
166 return [nullid], True, srvheadhashes
162 return [nullid], False, []
167 return [nullid], False, []
163
168
164 # start actual discovery (we note this before the next "if" for
169 # start actual discovery (we note this before the next "if" for
165 # compatibility reasons)
170 # compatibility reasons)
166 ui.status(_("searching for changes\n"))
171 ui.status(_("searching for changes\n"))
167
172
168 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
173 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
169 if len(srvheads) == len(srvheadhashes):
174 if len(srvheads) == len(srvheadhashes):
170 ui.debug("all remote heads known locally\n")
175 ui.debug("all remote heads known locally\n")
171 return (srvheadhashes, False, srvheadhashes,)
176 return (srvheadhashes, False, srvheadhashes,)
172
177
173 if sample and len(ownheads) <= initialsamplesize and all(yesno):
178 if sample and len(ownheads) <= initialsamplesize and all(yesno):
174 ui.note(_("all local heads known remotely\n"))
179 ui.note(_("all local heads known remotely\n"))
175 ownheadhashes = dag.externalizeall(ownheads)
180 ownheadhashes = dag.externalizeall(ownheads)
176 return (ownheadhashes, True, srvheadhashes,)
181 return (ownheadhashes, True, srvheadhashes,)
177
182
178 # full blown discovery
183 # full blown discovery
179
184
180 # own nodes I know we both know
185 # own nodes I know we both know
181 # treat remote heads (and maybe own heads) as a first implicit sample
186 # treat remote heads (and maybe own heads) as a first implicit sample
182 # response
187 # response
183 common = cl.incrementalmissingrevs(srvheads)
188 common = cl.incrementalmissingrevs(srvheads)
184 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
189 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
185 common.addbases(commoninsample)
190 common.addbases(commoninsample)
186 # own nodes where I don't know if remote knows them
191 # own nodes where I don't know if remote knows them
187 undecided = set(common.missingancestors(ownheads))
192 undecided = set(common.missingancestors(ownheads))
188 # own nodes I know remote lacks
193 # own nodes I know remote lacks
189 missing = set()
194 missing = set()
190
195
191 full = False
196 full = False
192 while undecided:
197 while undecided:
193
198
194 if sample:
199 if sample:
195 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
200 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
196 missing.update(dag.descendantset(missinginsample, missing))
201 missing.update(dag.descendantset(missinginsample, missing))
197
202
198 undecided.difference_update(missing)
203 undecided.difference_update(missing)
199
204
200 if not undecided:
205 if not undecided:
201 break
206 break
202
207
203 if full or common.hasbases():
208 if full or common.hasbases():
204 if full:
209 if full:
205 ui.note(_("sampling from both directions\n"))
210 ui.note(_("sampling from both directions\n"))
206 else:
211 else:
207 ui.debug("taking initial sample\n")
212 ui.debug("taking initial sample\n")
208 samplefunc = _takefullsample
213 samplefunc = _takefullsample
209 targetsize = fullsamplesize
214 targetsize = fullsamplesize
210 else:
215 else:
211 # use even cheaper initial sample
216 # use even cheaper initial sample
212 ui.debug("taking quick initial sample\n")
217 ui.debug("taking quick initial sample\n")
213 samplefunc = _takequicksample
218 samplefunc = _takequicksample
214 targetsize = initialsamplesize
219 targetsize = initialsamplesize
215 if len(undecided) < targetsize:
220 if len(undecided) < targetsize:
216 sample = list(undecided)
221 sample = list(undecided)
217 else:
222 else:
218 sample = samplefunc(dag, undecided, targetsize)
223 sample = samplefunc(dag, undecided, targetsize)
219 sample = _limitsample(sample, targetsize)
224 sample = _limitsample(sample, targetsize)
220
225
221 roundtrips += 1
226 roundtrips += 1
222 ui.progress(_('searching'), roundtrips, unit=_('queries'))
227 ui.progress(_('searching'), roundtrips, unit=_('queries'))
223 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
228 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
224 % (roundtrips, len(undecided), len(sample)))
229 % (roundtrips, len(undecided), len(sample)))
225 # indices between sample and externalized version must match
230 # indices between sample and externalized version must match
226 sample = list(sample)
231 sample = list(sample)
227 yesno = remote.known(dag.externalizeall(sample))
232 yesno = remote.known(dag.externalizeall(sample))
228 full = True
233 full = True
229
234
230 if sample:
235 if sample:
231 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
236 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
232 common.addbases(commoninsample)
237 common.addbases(commoninsample)
233 common.removeancestorsfrom(undecided)
238 common.removeancestorsfrom(undecided)
234
239
235 # heads(common) == heads(common.bases) since common represents common.bases
240 # heads(common) == heads(common.bases) since common represents common.bases
236 # and all its ancestors
241 # and all its ancestors
237 result = dag.headsetofconnecteds(common.bases)
242 result = dag.headsetofconnecteds(common.bases)
238 # common.bases can include nullrev, but our contract requires us to not
243 # common.bases can include nullrev, but our contract requires us to not
239 # return any heads in that case, so discard that
244 # return any heads in that case, so discard that
240 result.discard(nullrev)
245 result.discard(nullrev)
241 elapsed = util.timer() - start
246 elapsed = util.timer() - start
242 ui.progress(_('searching'), None)
247 ui.progress(_('searching'), None)
243 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
248 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
244 msg = ('found %d common and %d unknown server heads,'
249 msg = ('found %d common and %d unknown server heads,'
245 ' %d roundtrips in %.4fs\n')
250 ' %d roundtrips in %.4fs\n')
246 missing = set(result) - set(srvheads)
251 missing = set(result) - set(srvheads)
247 ui.log('discovery', msg, len(result), len(missing), roundtrips,
252 ui.log('discovery', msg, len(result), len(missing), roundtrips,
248 elapsed)
253 elapsed)
249
254
250 if not result and srvheadhashes != [nullid]:
255 if not result and srvheadhashes != [nullid]:
251 if abortwhenunrelated:
256 if abortwhenunrelated:
252 raise error.Abort(_("repository is unrelated"))
257 raise error.Abort(_("repository is unrelated"))
253 else:
258 else:
254 ui.warn(_("warning: repository is unrelated\n"))
259 ui.warn(_("warning: repository is unrelated\n"))
255 return ({nullid}, True, srvheadhashes,)
260 return ({nullid}, True, srvheadhashes,)
256
261
257 anyincoming = (srvheadhashes != [nullid])
262 anyincoming = (srvheadhashes != [nullid])
258 return dag.externalizeall(result), anyincoming, srvheadhashes
263 return dag.externalizeall(result), anyincoming, srvheadhashes
@@ -1,385 +1,385 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 add
3 add
4 addremove
4 addremove
5 annotate
5 annotate
6 archive
6 archive
7 backout
7 backout
8 bisect
8 bisect
9 bookmarks
9 bookmarks
10 branch
10 branch
11 branches
11 branches
12 bundle
12 bundle
13 cat
13 cat
14 clone
14 clone
15 commit
15 commit
16 config
16 config
17 copy
17 copy
18 diff
18 diff
19 export
19 export
20 files
20 files
21 forget
21 forget
22 graft
22 graft
23 grep
23 grep
24 heads
24 heads
25 help
25 help
26 identify
26 identify
27 import
27 import
28 incoming
28 incoming
29 init
29 init
30 locate
30 locate
31 log
31 log
32 manifest
32 manifest
33 merge
33 merge
34 outgoing
34 outgoing
35 parents
35 parents
36 paths
36 paths
37 phase
37 phase
38 pull
38 pull
39 push
39 push
40 recover
40 recover
41 remove
41 remove
42 rename
42 rename
43 resolve
43 resolve
44 revert
44 revert
45 rollback
45 rollback
46 root
46 root
47 serve
47 serve
48 status
48 status
49 summary
49 summary
50 tag
50 tag
51 tags
51 tags
52 tip
52 tip
53 unbundle
53 unbundle
54 update
54 update
55 verify
55 verify
56 version
56 version
57
57
58 Show all commands that start with "a"
58 Show all commands that start with "a"
59 $ hg debugcomplete a
59 $ hg debugcomplete a
60 add
60 add
61 addremove
61 addremove
62 annotate
62 annotate
63 archive
63 archive
64
64
65 Do not show debug commands if there are other candidates
65 Do not show debug commands if there are other candidates
66 $ hg debugcomplete d
66 $ hg debugcomplete d
67 diff
67 diff
68
68
69 Show debug commands if there are no other candidates
69 Show debug commands if there are no other candidates
70 $ hg debugcomplete debug
70 $ hg debugcomplete debug
71 debugancestor
71 debugancestor
72 debugapplystreamclonebundle
72 debugapplystreamclonebundle
73 debugbuilddag
73 debugbuilddag
74 debugbundle
74 debugbundle
75 debugcapabilities
75 debugcapabilities
76 debugcheckstate
76 debugcheckstate
77 debugcolor
77 debugcolor
78 debugcommands
78 debugcommands
79 debugcomplete
79 debugcomplete
80 debugconfig
80 debugconfig
81 debugcreatestreamclonebundle
81 debugcreatestreamclonebundle
82 debugdag
82 debugdag
83 debugdata
83 debugdata
84 debugdate
84 debugdate
85 debugdeltachain
85 debugdeltachain
86 debugdirstate
86 debugdirstate
87 debugdiscovery
87 debugdiscovery
88 debugextensions
88 debugextensions
89 debugfileset
89 debugfileset
90 debugfsinfo
90 debugfsinfo
91 debuggetbundle
91 debuggetbundle
92 debugignore
92 debugignore
93 debugindex
93 debugindex
94 debugindexdot
94 debugindexdot
95 debuginstall
95 debuginstall
96 debugknown
96 debugknown
97 debuglabelcomplete
97 debuglabelcomplete
98 debuglocks
98 debuglocks
99 debugmergestate
99 debugmergestate
100 debugnamecomplete
100 debugnamecomplete
101 debugobsolete
101 debugobsolete
102 debugpathcomplete
102 debugpathcomplete
103 debugpickmergetool
103 debugpickmergetool
104 debugpushkey
104 debugpushkey
105 debugpvec
105 debugpvec
106 debugrebuilddirstate
106 debugrebuilddirstate
107 debugrebuildfncache
107 debugrebuildfncache
108 debugrename
108 debugrename
109 debugrevlog
109 debugrevlog
110 debugrevspec
110 debugrevspec
111 debugsetparents
111 debugsetparents
112 debugssl
112 debugssl
113 debugsub
113 debugsub
114 debugsuccessorssets
114 debugsuccessorssets
115 debugtemplate
115 debugtemplate
116 debugupdatecaches
116 debugupdatecaches
117 debugupgraderepo
117 debugupgraderepo
118 debugwalk
118 debugwalk
119 debugwireargs
119 debugwireargs
120
120
121 Do not show the alias of a debug command if there are other candidates
121 Do not show the alias of a debug command if there are other candidates
122 (this should hide rawcommit)
122 (this should hide rawcommit)
123 $ hg debugcomplete r
123 $ hg debugcomplete r
124 recover
124 recover
125 remove
125 remove
126 rename
126 rename
127 resolve
127 resolve
128 revert
128 revert
129 rollback
129 rollback
130 root
130 root
131 Show the alias of a debug command if there are no other candidates
131 Show the alias of a debug command if there are no other candidates
132 $ hg debugcomplete rawc
132 $ hg debugcomplete rawc
133
133
134
134
135 Show the global options
135 Show the global options
136 $ hg debugcomplete --options | sort
136 $ hg debugcomplete --options | sort
137 --color
137 --color
138 --config
138 --config
139 --cwd
139 --cwd
140 --debug
140 --debug
141 --debugger
141 --debugger
142 --encoding
142 --encoding
143 --encodingmode
143 --encodingmode
144 --help
144 --help
145 --hidden
145 --hidden
146 --noninteractive
146 --noninteractive
147 --pager
147 --pager
148 --profile
148 --profile
149 --quiet
149 --quiet
150 --repository
150 --repository
151 --time
151 --time
152 --traceback
152 --traceback
153 --verbose
153 --verbose
154 --version
154 --version
155 -R
155 -R
156 -h
156 -h
157 -q
157 -q
158 -v
158 -v
159 -y
159 -y
160
160
161 Show the options for the "serve" command
161 Show the options for the "serve" command
162 $ hg debugcomplete --options serve | sort
162 $ hg debugcomplete --options serve | sort
163 --accesslog
163 --accesslog
164 --address
164 --address
165 --certificate
165 --certificate
166 --cmdserver
166 --cmdserver
167 --color
167 --color
168 --config
168 --config
169 --cwd
169 --cwd
170 --daemon
170 --daemon
171 --daemon-postexec
171 --daemon-postexec
172 --debug
172 --debug
173 --debugger
173 --debugger
174 --encoding
174 --encoding
175 --encodingmode
175 --encodingmode
176 --errorlog
176 --errorlog
177 --help
177 --help
178 --hidden
178 --hidden
179 --ipv6
179 --ipv6
180 --name
180 --name
181 --noninteractive
181 --noninteractive
182 --pager
182 --pager
183 --pid-file
183 --pid-file
184 --port
184 --port
185 --prefix
185 --prefix
186 --profile
186 --profile
187 --quiet
187 --quiet
188 --repository
188 --repository
189 --stdio
189 --stdio
190 --style
190 --style
191 --subrepos
191 --subrepos
192 --templates
192 --templates
193 --time
193 --time
194 --traceback
194 --traceback
195 --verbose
195 --verbose
196 --version
196 --version
197 --web-conf
197 --web-conf
198 -6
198 -6
199 -A
199 -A
200 -E
200 -E
201 -R
201 -R
202 -S
202 -S
203 -a
203 -a
204 -d
204 -d
205 -h
205 -h
206 -n
206 -n
207 -p
207 -p
208 -q
208 -q
209 -t
209 -t
210 -v
210 -v
211 -y
211 -y
212
212
213 Show an error if we use --options with an ambiguous abbreviation
213 Show an error if we use --options with an ambiguous abbreviation
214 $ hg debugcomplete --options s
214 $ hg debugcomplete --options s
215 hg: command 's' is ambiguous:
215 hg: command 's' is ambiguous:
216 serve showconfig status summary
216 serve showconfig status summary
217 [255]
217 [255]
218
218
219 Show all commands + options
219 Show all commands + options
220 $ hg debugcommands
220 $ hg debugcommands
221 add: include, exclude, subrepos, dry-run
221 add: include, exclude, subrepos, dry-run
222 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
222 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
223 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
223 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
224 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
224 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
225 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
225 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
226 export: output, switch-parent, rev, text, git, binary, nodates
226 export: output, switch-parent, rev, text, git, binary, nodates
227 forget: include, exclude
227 forget: include, exclude
228 init: ssh, remotecmd, insecure
228 init: ssh, remotecmd, insecure
229 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
229 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
230 merge: force, rev, preview, tool
230 merge: force, rev, preview, tool
231 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
231 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
232 push: force, rev, bookmark, branch, new-branch, pushvars, ssh, remotecmd, insecure
232 push: force, rev, bookmark, branch, new-branch, pushvars, ssh, remotecmd, insecure
233 remove: after, force, subrepos, include, exclude
233 remove: after, force, subrepos, include, exclude
234 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
234 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
235 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
235 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
236 summary: remote
236 summary: remote
237 update: clean, check, merge, date, rev, tool
237 update: clean, check, merge, date, rev, tool
238 addremove: similarity, subrepos, include, exclude, dry-run
238 addremove: similarity, subrepos, include, exclude, dry-run
239 archive: no-decode, prefix, rev, type, subrepos, include, exclude
239 archive: no-decode, prefix, rev, type, subrepos, include, exclude
240 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
240 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
241 bisect: reset, good, bad, skip, extend, command, noupdate
241 bisect: reset, good, bad, skip, extend, command, noupdate
242 bookmarks: force, rev, delete, rename, inactive, template
242 bookmarks: force, rev, delete, rename, inactive, template
243 branch: force, clean
243 branch: force, clean
244 branches: active, closed, template
244 branches: active, closed, template
245 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
245 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
246 cat: output, rev, decode, include, exclude, template
246 cat: output, rev, decode, include, exclude, template
247 config: untrusted, edit, local, global, template
247 config: untrusted, edit, local, global, template
248 copy: after, force, include, exclude, dry-run
248 copy: after, force, include, exclude, dry-run
249 debugancestor:
249 debugancestor:
250 debugapplystreamclonebundle:
250 debugapplystreamclonebundle:
251 debugbuilddag: mergeable-file, overwritten-file, new-file
251 debugbuilddag: mergeable-file, overwritten-file, new-file
252 debugbundle: all, part-type, spec
252 debugbundle: all, part-type, spec
253 debugcapabilities:
253 debugcapabilities:
254 debugcheckstate:
254 debugcheckstate:
255 debugcolor: style
255 debugcolor: style
256 debugcommands:
256 debugcommands:
257 debugcomplete: options
257 debugcomplete: options
258 debugcreatestreamclonebundle:
258 debugcreatestreamclonebundle:
259 debugdag: tags, branches, dots, spaces
259 debugdag: tags, branches, dots, spaces
260 debugdata: changelog, manifest, dir
260 debugdata: changelog, manifest, dir
261 debugdate: extended
261 debugdate: extended
262 debugdeltachain: changelog, manifest, dir, template
262 debugdeltachain: changelog, manifest, dir, template
263 debugdirstate: nodates, datesort
263 debugdirstate: nodates, datesort
264 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
264 debugdiscovery: old, nonheads, rev, ssh, remotecmd, insecure
265 debugextensions: template
265 debugextensions: template
266 debugfileset: rev
266 debugfileset: rev
267 debugfsinfo:
267 debugfsinfo:
268 debuggetbundle: head, common, type
268 debuggetbundle: head, common, type
269 debugignore:
269 debugignore:
270 debugindex: changelog, manifest, dir, format
270 debugindex: changelog, manifest, dir, format
271 debugindexdot: changelog, manifest, dir
271 debugindexdot: changelog, manifest, dir
272 debuginstall: template
272 debuginstall: template
273 debugknown:
273 debugknown:
274 debuglabelcomplete:
274 debuglabelcomplete:
275 debuglocks: force-lock, force-wlock
275 debuglocks: force-lock, force-wlock
276 debugmergestate:
276 debugmergestate:
277 debugnamecomplete:
277 debugnamecomplete:
278 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
278 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
279 debugpathcomplete: full, normal, added, removed
279 debugpathcomplete: full, normal, added, removed
280 debugpickmergetool: rev, changedelete, include, exclude, tool
280 debugpickmergetool: rev, changedelete, include, exclude, tool
281 debugpushkey:
281 debugpushkey:
282 debugpvec:
282 debugpvec:
283 debugrebuilddirstate: rev, minimal
283 debugrebuilddirstate: rev, minimal
284 debugrebuildfncache:
284 debugrebuildfncache:
285 debugrename: rev
285 debugrename: rev
286 debugrevlog: changelog, manifest, dir, dump
286 debugrevlog: changelog, manifest, dir, dump
287 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
287 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
288 debugsetparents:
288 debugsetparents:
289 debugssl:
289 debugssl:
290 debugsub: rev
290 debugsub: rev
291 debugsuccessorssets: closest
291 debugsuccessorssets: closest
292 debugtemplate: rev, define
292 debugtemplate: rev, define
293 debugupdatecaches:
293 debugupdatecaches:
294 debugupgraderepo: optimize, run
294 debugupgraderepo: optimize, run
295 debugwalk: include, exclude
295 debugwalk: include, exclude
296 debugwireargs: three, four, five, ssh, remotecmd, insecure
296 debugwireargs: three, four, five, ssh, remotecmd, insecure
297 files: rev, print0, include, exclude, template, subrepos
297 files: rev, print0, include, exclude, template, subrepos
298 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
298 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
299 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
299 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
300 heads: rev, topo, active, closed, style, template
300 heads: rev, topo, active, closed, style, template
301 help: extension, command, keyword, system
301 help: extension, command, keyword, system
302 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
302 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
303 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
303 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
304 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
304 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
305 locate: rev, print0, fullpath, include, exclude
305 locate: rev, print0, fullpath, include, exclude
306 manifest: rev, all, template
306 manifest: rev, all, template
307 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
307 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
308 parents: rev, style, template
308 parents: rev, style, template
309 paths: template
309 paths: template
310 phase: public, draft, secret, force, rev
310 phase: public, draft, secret, force, rev
311 recover:
311 recover:
312 rename: after, force, include, exclude, dry-run
312 rename: after, force, include, exclude, dry-run
313 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
313 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
314 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
314 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
315 rollback: dry-run, force
315 rollback: dry-run, force
316 root:
316 root:
317 tag: force, local, rev, remove, edit, message, date, user
317 tag: force, local, rev, remove, edit, message, date, user
318 tags: template
318 tags: template
319 tip: patch, git, style, template
319 tip: patch, git, style, template
320 unbundle: update
320 unbundle: update
321 verify:
321 verify:
322 version: template
322 version: template
323
323
324 $ hg init a
324 $ hg init a
325 $ cd a
325 $ cd a
326 $ echo fee > fee
326 $ echo fee > fee
327 $ hg ci -q -Amfee
327 $ hg ci -q -Amfee
328 $ hg tag fee
328 $ hg tag fee
329 $ mkdir fie
329 $ mkdir fie
330 $ echo dead > fie/dead
330 $ echo dead > fie/dead
331 $ echo live > fie/live
331 $ echo live > fie/live
332 $ hg bookmark fo
332 $ hg bookmark fo
333 $ hg branch -q fie
333 $ hg branch -q fie
334 $ hg ci -q -Amfie
334 $ hg ci -q -Amfie
335 $ echo fo > fo
335 $ echo fo > fo
336 $ hg branch -qf default
336 $ hg branch -qf default
337 $ hg ci -q -Amfo
337 $ hg ci -q -Amfo
338 $ echo Fum > Fum
338 $ echo Fum > Fum
339 $ hg ci -q -AmFum
339 $ hg ci -q -AmFum
340 $ hg bookmark Fum
340 $ hg bookmark Fum
341
341
342 Test debugpathcomplete
342 Test debugpathcomplete
343
343
344 $ hg debugpathcomplete f
344 $ hg debugpathcomplete f
345 fee
345 fee
346 fie
346 fie
347 fo
347 fo
348 $ hg debugpathcomplete -f f
348 $ hg debugpathcomplete -f f
349 fee
349 fee
350 fie/dead
350 fie/dead
351 fie/live
351 fie/live
352 fo
352 fo
353
353
354 $ hg rm Fum
354 $ hg rm Fum
355 $ hg debugpathcomplete -r F
355 $ hg debugpathcomplete -r F
356 Fum
356 Fum
357
357
358 Test debugnamecomplete
358 Test debugnamecomplete
359
359
360 $ hg debugnamecomplete
360 $ hg debugnamecomplete
361 Fum
361 Fum
362 default
362 default
363 fee
363 fee
364 fie
364 fie
365 fo
365 fo
366 tip
366 tip
367 $ hg debugnamecomplete f
367 $ hg debugnamecomplete f
368 fee
368 fee
369 fie
369 fie
370 fo
370 fo
371
371
372 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
372 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
373 used for completions in some shells.
373 used for completions in some shells.
374
374
375 $ hg debuglabelcomplete
375 $ hg debuglabelcomplete
376 Fum
376 Fum
377 default
377 default
378 fee
378 fee
379 fie
379 fie
380 fo
380 fo
381 tip
381 tip
382 $ hg debuglabelcomplete f
382 $ hg debuglabelcomplete f
383 fee
383 fee
384 fie
384 fie
385 fo
385 fo
@@ -1,413 +1,552 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
19 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
20 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
21 > echo
24 > echo
22 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
23 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
24 > cd ..
30 > cd ..
25 > }
31 > }
26
32
27
33
28 Small superset:
34 Small superset:
29
35
30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
32 > <f +4 :a2
38 > <f +4 :a2
33 > +5 :b2
39 > +5 :b2
34 > <f +3 :b3'
40 > <f +3 :b3'
35
41
36 % -- a -> b tree
42 % -- a -> b tree
37 comparing with b
43 comparing with b
38 searching for changes
44 searching for changes
39 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
40 common heads: 01241442b3c2 b5714e113bc0
46 common heads: 01241442b3c2 b5714e113bc0
41 local is subset
47 local is subset
42
48
43 % -- a -> b set
49 % -- a -> b set
44 comparing with b
50 comparing with b
45 query 1; heads
51 query 1; heads
46 searching for changes
52 searching for changes
47 all local heads known remotely
53 all local heads known remotely
48 common heads: 01241442b3c2 b5714e113bc0
54 common heads: 01241442b3c2 b5714e113bc0
49 local is subset
55 local is subset
50
56
57 % -- a -> b set (tip only)
58 comparing with b
59 query 1; heads
60 searching for changes
61 all local heads known remotely
62 common heads: b5714e113bc0
63
51 % -- b -> a tree
64 % -- b -> a tree
52 comparing with a
65 comparing with a
53 searching for changes
66 searching for changes
54 unpruned common: 01241442b3c2 b5714e113bc0
67 unpruned common: 01241442b3c2 b5714e113bc0
55 common heads: 01241442b3c2 b5714e113bc0
68 common heads: 01241442b3c2 b5714e113bc0
56 remote is subset
69 remote is subset
57
70
58 % -- b -> a set
71 % -- b -> a set
59 comparing with a
72 comparing with a
60 query 1; heads
73 query 1; heads
61 searching for changes
74 searching for changes
62 all remote heads known locally
75 all remote heads known locally
63 common heads: 01241442b3c2 b5714e113bc0
76 common heads: 01241442b3c2 b5714e113bc0
64 remote is subset
77 remote is subset
78
79 % -- b -> a set (tip only)
80 comparing with a
81 query 1; heads
82 searching for changes
83 all remote heads known locally
84 common heads: 01241442b3c2 b5714e113bc0
85 remote is subset
65
86
66
87
67 Many new:
88 Many new:
68
89
69 $ testdesc '-ra1 -ra2' '-rb' '
90 $ testdesc '-ra1 -ra2' '-rb' '
70 > +2:f +3:a1 +3:b
91 > +2:f +3:a1 +3:b
71 > <f +30 :a2'
92 > <f +30 :a2'
72
93
73 % -- a -> b tree
94 % -- a -> b tree
74 comparing with b
95 comparing with b
75 searching for changes
96 searching for changes
76 unpruned common: bebd167eb94d
97 unpruned common: bebd167eb94d
77 common heads: bebd167eb94d
98 common heads: bebd167eb94d
78
99
79 % -- a -> b set
100 % -- a -> b set
80 comparing with b
101 comparing with b
81 query 1; heads
102 query 1; heads
82 searching for changes
103 searching for changes
83 taking initial sample
104 taking initial sample
84 searching: 2 queries
105 searching: 2 queries
85 query 2; still undecided: 29, sample size is: 29
106 query 2; still undecided: 29, sample size is: 29
86 2 total queries in *.????s (glob)
107 2 total queries in *.????s (glob)
87 common heads: bebd167eb94d
108 common heads: bebd167eb94d
88
109
110 % -- a -> b set (tip only)
111 comparing with b
112 query 1; heads
113 searching for changes
114 taking quick initial sample
115 searching: 2 queries
116 query 2; still undecided: 31, sample size is: 31
117 2 total queries in *.????s (glob)
118 common heads: 66f7d451a68b
119
89 % -- b -> a tree
120 % -- b -> a tree
90 comparing with a
121 comparing with a
91 searching for changes
122 searching for changes
92 unpruned common: 66f7d451a68b bebd167eb94d
123 unpruned common: 66f7d451a68b bebd167eb94d
93 common heads: bebd167eb94d
124 common heads: bebd167eb94d
94
125
95 % -- b -> a set
126 % -- b -> a set
96 comparing with a
127 comparing with a
97 query 1; heads
128 query 1; heads
98 searching for changes
129 searching for changes
99 taking initial sample
130 taking initial sample
100 searching: 2 queries
131 searching: 2 queries
101 query 2; still undecided: 2, sample size is: 2
132 query 2; still undecided: 2, sample size is: 2
102 2 total queries in *.????s (glob)
133 2 total queries in *.????s (glob)
103 common heads: bebd167eb94d
134 common heads: bebd167eb94d
135
136 % -- b -> a set (tip only)
137 comparing with a
138 query 1; heads
139 searching for changes
140 taking initial sample
141 searching: 2 queries
142 query 2; still undecided: 2, sample size is: 2
143 2 total queries in *.????s (glob)
144 common heads: bebd167eb94d
104
145
105 Both sides many new with stub:
146 Both sides many new with stub:
106
147
107 $ testdesc '-ra1 -ra2' '-rb' '
148 $ testdesc '-ra1 -ra2' '-rb' '
108 > +2:f +2:a1 +30 :b
149 > +2:f +2:a1 +30 :b
109 > <f +30 :a2'
150 > <f +30 :a2'
110
151
111 % -- a -> b tree
152 % -- a -> b tree
112 comparing with b
153 comparing with b
113 searching for changes
154 searching for changes
114 unpruned common: 2dc09a01254d
155 unpruned common: 2dc09a01254d
115 common heads: 2dc09a01254d
156 common heads: 2dc09a01254d
116
157
117 % -- a -> b set
158 % -- a -> b set
118 comparing with b
159 comparing with b
119 query 1; heads
160 query 1; heads
120 searching for changes
161 searching for changes
121 taking initial sample
162 taking initial sample
122 searching: 2 queries
163 searching: 2 queries
123 query 2; still undecided: 29, sample size is: 29
164 query 2; still undecided: 29, sample size is: 29
124 2 total queries in *.????s (glob)
165 2 total queries in *.????s (glob)
125 common heads: 2dc09a01254d
166 common heads: 2dc09a01254d
126
167
168 % -- a -> b set (tip only)
169 comparing with b
170 query 1; heads
171 searching for changes
172 taking quick initial sample
173 searching: 2 queries
174 query 2; still undecided: 31, sample size is: 31
175 2 total queries in *.????s (glob)
176 common heads: 66f7d451a68b
177
127 % -- b -> a tree
178 % -- b -> a tree
128 comparing with a
179 comparing with a
129 searching for changes
180 searching for changes
130 unpruned common: 2dc09a01254d 66f7d451a68b
181 unpruned common: 2dc09a01254d 66f7d451a68b
131 common heads: 2dc09a01254d
182 common heads: 2dc09a01254d
132
183
133 % -- b -> a set
184 % -- b -> a set
134 comparing with a
185 comparing with a
135 query 1; heads
186 query 1; heads
136 searching for changes
187 searching for changes
137 taking initial sample
188 taking initial sample
138 searching: 2 queries
189 searching: 2 queries
139 query 2; still undecided: 29, sample size is: 29
190 query 2; still undecided: 29, sample size is: 29
140 2 total queries in *.????s (glob)
191 2 total queries in *.????s (glob)
141 common heads: 2dc09a01254d
192 common heads: 2dc09a01254d
193
194 % -- b -> a set (tip only)
195 comparing with a
196 query 1; heads
197 searching for changes
198 taking initial sample
199 searching: 2 queries
200 query 2; still undecided: 29, sample size is: 29
201 2 total queries in *.????s (glob)
202 common heads: 2dc09a01254d
142
203
143
204
144 Both many new:
205 Both many new:
145
206
146 $ testdesc '-ra' '-rb' '
207 $ testdesc '-ra' '-rb' '
147 > +2:f +30 :b
208 > +2:f +30 :b
148 > <f +30 :a'
209 > <f +30 :a'
149
210
150 % -- a -> b tree
211 % -- a -> b tree
151 comparing with b
212 comparing with b
152 searching for changes
213 searching for changes
153 unpruned common: 66f7d451a68b
214 unpruned common: 66f7d451a68b
154 common heads: 66f7d451a68b
215 common heads: 66f7d451a68b
155
216
156 % -- a -> b set
217 % -- a -> b set
157 comparing with b
218 comparing with b
158 query 1; heads
219 query 1; heads
159 searching for changes
220 searching for changes
160 taking quick initial sample
221 taking quick initial sample
161 searching: 2 queries
222 searching: 2 queries
162 query 2; still undecided: 31, sample size is: 31
223 query 2; still undecided: 31, sample size is: 31
163 2 total queries in *.????s (glob)
224 2 total queries in *.????s (glob)
164 common heads: 66f7d451a68b
225 common heads: 66f7d451a68b
165
226
227 % -- a -> b set (tip only)
228 comparing with b
229 query 1; heads
230 searching for changes
231 taking quick initial sample
232 searching: 2 queries
233 query 2; still undecided: 31, sample size is: 31
234 2 total queries in *.????s (glob)
235 common heads: 66f7d451a68b
236
166 % -- b -> a tree
237 % -- b -> a tree
167 comparing with a
238 comparing with a
168 searching for changes
239 searching for changes
169 unpruned common: 66f7d451a68b
240 unpruned common: 66f7d451a68b
170 common heads: 66f7d451a68b
241 common heads: 66f7d451a68b
171
242
172 % -- b -> a set
243 % -- b -> a set
173 comparing with a
244 comparing with a
174 query 1; heads
245 query 1; heads
175 searching for changes
246 searching for changes
176 taking quick initial sample
247 taking quick initial sample
177 searching: 2 queries
248 searching: 2 queries
178 query 2; still undecided: 31, sample size is: 31
249 query 2; still undecided: 31, sample size is: 31
179 2 total queries in *.????s (glob)
250 2 total queries in *.????s (glob)
180 common heads: 66f7d451a68b
251 common heads: 66f7d451a68b
252
253 % -- b -> a set (tip only)
254 comparing with a
255 query 1; heads
256 searching for changes
257 taking quick initial sample
258 searching: 2 queries
259 query 2; still undecided: 31, sample size is: 31
260 2 total queries in *.????s (glob)
261 common heads: 66f7d451a68b
181
262
182
263
183 Both many new skewed:
264 Both many new skewed:
184
265
185 $ testdesc '-ra' '-rb' '
266 $ testdesc '-ra' '-rb' '
186 > +2:f +30 :b
267 > +2:f +30 :b
187 > <f +50 :a'
268 > <f +50 :a'
188
269
189 % -- a -> b tree
270 % -- a -> b tree
190 comparing with b
271 comparing with b
191 searching for changes
272 searching for changes
192 unpruned common: 66f7d451a68b
273 unpruned common: 66f7d451a68b
193 common heads: 66f7d451a68b
274 common heads: 66f7d451a68b
194
275
195 % -- a -> b set
276 % -- a -> b set
196 comparing with b
277 comparing with b
197 query 1; heads
278 query 1; heads
198 searching for changes
279 searching for changes
199 taking quick initial sample
280 taking quick initial sample
200 searching: 2 queries
281 searching: 2 queries
201 query 2; still undecided: 51, sample size is: 51
282 query 2; still undecided: 51, sample size is: 51
202 2 total queries in *.????s (glob)
283 2 total queries in *.????s (glob)
203 common heads: 66f7d451a68b
284 common heads: 66f7d451a68b
204
285
286 % -- a -> b set (tip only)
287 comparing with b
288 query 1; heads
289 searching for changes
290 taking quick initial sample
291 searching: 2 queries
292 query 2; still undecided: 51, sample size is: 51
293 2 total queries in *.????s (glob)
294 common heads: 66f7d451a68b
295
205 % -- b -> a tree
296 % -- b -> a tree
206 comparing with a
297 comparing with a
207 searching for changes
298 searching for changes
208 unpruned common: 66f7d451a68b
299 unpruned common: 66f7d451a68b
209 common heads: 66f7d451a68b
300 common heads: 66f7d451a68b
210
301
211 % -- b -> a set
302 % -- b -> a set
212 comparing with a
303 comparing with a
213 query 1; heads
304 query 1; heads
214 searching for changes
305 searching for changes
215 taking quick initial sample
306 taking quick initial sample
216 searching: 2 queries
307 searching: 2 queries
217 query 2; still undecided: 31, sample size is: 31
308 query 2; still undecided: 31, sample size is: 31
218 2 total queries in *.????s (glob)
309 2 total queries in *.????s (glob)
219 common heads: 66f7d451a68b
310 common heads: 66f7d451a68b
311
312 % -- b -> a set (tip only)
313 comparing with a
314 query 1; heads
315 searching for changes
316 taking quick initial sample
317 searching: 2 queries
318 query 2; still undecided: 31, sample size is: 31
319 2 total queries in *.????s (glob)
320 common heads: 66f7d451a68b
220
321
221
322
222 Both many new on top of long history:
323 Both many new on top of long history:
223
324
224 $ testdesc '-ra' '-rb' '
325 $ testdesc '-ra' '-rb' '
225 > +1000:f +30 :b
326 > +1000:f +30 :b
226 > <f +50 :a'
327 > <f +50 :a'
227
328
228 % -- a -> b tree
329 % -- a -> b tree
229 comparing with b
330 comparing with b
230 searching for changes
331 searching for changes
231 unpruned common: 7ead0cba2838
332 unpruned common: 7ead0cba2838
232 common heads: 7ead0cba2838
333 common heads: 7ead0cba2838
233
334
234 % -- a -> b set
335 % -- a -> b set
235 comparing with b
336 comparing with b
236 query 1; heads
337 query 1; heads
237 searching for changes
338 searching for changes
238 taking quick initial sample
339 taking quick initial sample
239 searching: 2 queries
340 searching: 2 queries
240 query 2; still undecided: 1049, sample size is: 11
341 query 2; still undecided: 1049, sample size is: 11
241 sampling from both directions
342 sampling from both directions
242 searching: 3 queries
343 searching: 3 queries
243 query 3; still undecided: 31, sample size is: 31
344 query 3; still undecided: 31, sample size is: 31
244 3 total queries in *.????s (glob)
345 3 total queries in *.????s (glob)
245 common heads: 7ead0cba2838
346 common heads: 7ead0cba2838
246
347
348 % -- a -> b set (tip only)
349 comparing with b
350 query 1; heads
351 searching for changes
352 taking quick initial sample
353 searching: 2 queries
354 query 2; still undecided: 1049, sample size is: 11
355 sampling from both directions
356 searching: 3 queries
357 query 3; still undecided: 31, sample size is: 31
358 3 total queries in *.????s (glob)
359 common heads: 7ead0cba2838
360
247 % -- b -> a tree
361 % -- b -> a tree
248 comparing with a
362 comparing with a
249 searching for changes
363 searching for changes
250 unpruned common: 7ead0cba2838
364 unpruned common: 7ead0cba2838
251 common heads: 7ead0cba2838
365 common heads: 7ead0cba2838
252
366
253 % -- b -> a set
367 % -- b -> a set
254 comparing with a
368 comparing with a
255 query 1; heads
369 query 1; heads
256 searching for changes
370 searching for changes
257 taking quick initial sample
371 taking quick initial sample
258 searching: 2 queries
372 searching: 2 queries
259 query 2; still undecided: 1029, sample size is: 11
373 query 2; still undecided: 1029, sample size is: 11
260 sampling from both directions
374 sampling from both directions
261 searching: 3 queries
375 searching: 3 queries
262 query 3; still undecided: 15, sample size is: 15
376 query 3; still undecided: 15, sample size is: 15
263 3 total queries in *.????s (glob)
377 3 total queries in *.????s (glob)
264 common heads: 7ead0cba2838
378 common heads: 7ead0cba2838
379
380 % -- b -> a set (tip only)
381 comparing with a
382 query 1; heads
383 searching for changes
384 taking quick initial sample
385 searching: 2 queries
386 query 2; still undecided: 1029, sample size is: 11
387 sampling from both directions
388 searching: 3 queries
389 query 3; still undecided: 15, sample size is: 15
390 3 total queries in *.????s (glob)
391 common heads: 7ead0cba2838
265
392
266
393
267 One with >200 heads, which used to use up all of the sample:
394 One with >200 heads, which used to use up all of the sample:
268
395
269 $ hg init manyheads
396 $ hg init manyheads
270 $ cd manyheads
397 $ cd manyheads
271 $ echo "+300:r @a" >dagdesc
398 $ echo "+300:r @a" >dagdesc
272 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
399 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
273 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
400 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
401 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
402 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
403 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
404 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
405 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
406 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
407 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
408 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
409 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
410 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
411 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 $ echo "@b *r+3" >>dagdesc # one more head
412 $ echo "@b *r+3" >>dagdesc # one more head
286 $ hg debugbuilddag <dagdesc
413 $ hg debugbuilddag <dagdesc
287 reading DAG from stdin
414 reading DAG from stdin
288
415
289 $ hg heads -t --template . | wc -c
416 $ hg heads -t --template . | wc -c
290 \s*261 (re)
417 \s*261 (re)
291
418
292 $ hg clone -b a . a
419 $ hg clone -b a . a
293 adding changesets
420 adding changesets
294 adding manifests
421 adding manifests
295 adding file changes
422 adding file changes
296 added 1340 changesets with 0 changes to 0 files (+259 heads)
423 added 1340 changesets with 0 changes to 0 files (+259 heads)
297 new changesets 1ea73414a91b:1c51e2c80832
424 new changesets 1ea73414a91b:1c51e2c80832
298 updating to branch a
425 updating to branch a
299 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
426 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
300 $ hg clone -b b . b
427 $ hg clone -b b . b
301 adding changesets
428 adding changesets
302 adding manifests
429 adding manifests
303 adding file changes
430 adding file changes
304 added 304 changesets with 0 changes to 0 files
431 added 304 changesets with 0 changes to 0 files
305 new changesets 1ea73414a91b:513314ca8b3a
432 new changesets 1ea73414a91b:513314ca8b3a
306 updating to branch b
433 updating to branch b
307 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
434 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
308
435
309 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
436 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
310 comparing with b
437 comparing with b
311 query 1; heads
438 query 1; heads
312 searching for changes
439 searching for changes
313 taking quick initial sample
440 taking quick initial sample
314 searching: 2 queries
441 searching: 2 queries
315 query 2; still undecided: 1240, sample size is: 100
442 query 2; still undecided: 1240, sample size is: 100
316 sampling from both directions
443 sampling from both directions
317 searching: 3 queries
444 searching: 3 queries
318 query 3; still undecided: 1140, sample size is: 200
445 query 3; still undecided: 1140, sample size is: 200
319 sampling from both directions
446 sampling from both directions
320 searching: 4 queries
447 searching: 4 queries
321 query 4; still undecided: \d+, sample size is: 200 (re)
448 query 4; still undecided: \d+, sample size is: 200 (re)
322 sampling from both directions
449 sampling from both directions
323 searching: 5 queries
450 searching: 5 queries
324 query 5; still undecided: \d+, sample size is: 200 (re)
451 query 5; still undecided: \d+, sample size is: 200 (re)
325 sampling from both directions
452 sampling from both directions
326 searching: 6 queries
453 searching: 6 queries
327 query 6; still undecided: \d+, sample size is: \d+ (re)
454 query 6; still undecided: \d+, sample size is: \d+ (re)
328 6 total queries in *.????s (glob)
455 6 total queries in *.????s (glob)
329 common heads: 3ee37d65064a
456 common heads: 3ee37d65064a
457 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
458 comparing with b
459 query 1; heads
460 searching for changes
461 taking quick initial sample
462 searching: 2 queries
463 query 2; still undecided: 303, sample size is: 9
464 sampling from both directions
465 searching: 3 queries
466 query 3; still undecided: 3, sample size is: 3
467 3 total queries in *.????s (glob)
468 common heads: 3ee37d65064a
330
469
331 Test actual protocol when pulling one new head in addition to common heads
470 Test actual protocol when pulling one new head in addition to common heads
332
471
333 $ hg clone -U b c
472 $ hg clone -U b c
334 $ hg -R c id -ir tip
473 $ hg -R c id -ir tip
335 513314ca8b3a
474 513314ca8b3a
336 $ hg -R c up -qr default
475 $ hg -R c up -qr default
337 $ touch c/f
476 $ touch c/f
338 $ hg -R c ci -Aqm "extra head"
477 $ hg -R c ci -Aqm "extra head"
339 $ hg -R c id -i
478 $ hg -R c id -i
340 e64a39e7da8b
479 e64a39e7da8b
341
480
342 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
481 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
343 $ cat hg.pid >> $DAEMON_PIDS
482 $ cat hg.pid >> $DAEMON_PIDS
344
483
345 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
484 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
346 comparing with http://localhost:$HGPORT/
485 comparing with http://localhost:$HGPORT/
347 searching for changes
486 searching for changes
348 e64a39e7da8b
487 e64a39e7da8b
349
488
350 $ killdaemons.py
489 $ killdaemons.py
351 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
490 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
352 "GET /?cmd=capabilities HTTP/1.1" 200 -
491 "GET /?cmd=capabilities HTTP/1.1" 200 -
353 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
492 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
354 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
493 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
355 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
494 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
356 $ cat errors.log
495 $ cat errors.log
357
496
358 $ cd ..
497 $ cd ..
359
498
360
499
361 Issue 4438 - test coverage for 3ef893520a85 issues.
500 Issue 4438 - test coverage for 3ef893520a85 issues.
362
501
363 $ mkdir issue4438
502 $ mkdir issue4438
364 $ cd issue4438
503 $ cd issue4438
365 #if false
504 #if false
366 generate new bundles:
505 generate new bundles:
367 $ hg init r1
506 $ hg init r1
368 $ for i in `$PYTHON $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
507 $ for i in `$PYTHON $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
369 $ hg clone -q r1 r2
508 $ hg clone -q r1 r2
370 $ for i in `$PYTHON $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
509 $ for i in `$PYTHON $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
371 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
510 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
372 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
511 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
373 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
512 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
374 #else
513 #else
375 use existing bundles:
514 use existing bundles:
376 $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1
515 $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1
377 $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2
516 $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2
378 #endif
517 #endif
379
518
380 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
519 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
381
520
382 $ hg -R r1 outgoing r2 -T'{rev} '
521 $ hg -R r1 outgoing r2 -T'{rev} '
383 comparing with r2
522 comparing with r2
384 searching for changes
523 searching for changes
385 101 102 103 104 105 106 107 108 109 110 (no-eol)
524 101 102 103 104 105 106 107 108 109 110 (no-eol)
386
525
387 The case where all the 'initialsamplesize' samples already were common would
526 The case where all the 'initialsamplesize' samples already were common would
388 give 'all remote heads known locally' without checking the remaining heads -
527 give 'all remote heads known locally' without checking the remaining heads -
389 fixed in 86c35b7ae300:
528 fixed in 86c35b7ae300:
390
529
391 $ cat >> $TESTTMP/unrandomsample.py << EOF
530 $ cat >> $TESTTMP/unrandomsample.py << EOF
392 > import random
531 > import random
393 > def sample(population, k):
532 > def sample(population, k):
394 > return sorted(population)[:k]
533 > return sorted(population)[:k]
395 > random.sample = sample
534 > random.sample = sample
396 > EOF
535 > EOF
397
536
398 $ cat >> r1/.hg/hgrc << EOF
537 $ cat >> r1/.hg/hgrc << EOF
399 > [extensions]
538 > [extensions]
400 > unrandomsample = $TESTTMP/unrandomsample.py
539 > unrandomsample = $TESTTMP/unrandomsample.py
401 > EOF
540 > EOF
402
541
403 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox=
542 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox=
404 comparing with r2
543 comparing with r2
405 searching for changes
544 searching for changes
406 101 102 103 104 105 106 107 108 109 110 (no-eol)
545 101 102 103 104 105 106 107 108 109 110 (no-eol)
407 $ hg -R r1 --config extensions.blackbox= blackbox
546 $ hg -R r1 --config extensions.blackbox= blackbox
408 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
547 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
409 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
548 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
410 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
549 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
411 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
550 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
412 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 --config *extensions.blackbox=* blackbox (glob)
551 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 --config *extensions.blackbox=* blackbox (glob)
413 $ cd ..
552 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now