##// END OF EJS Templates
discovery: add new set-based discovery...
Peter Arrenbrecht -
r14164:cb98fed5 default
parent child Browse files
Show More
@@ -0,0 +1,242 b''
1 # dagutil.py - dag utilities for mercurial
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 from node import nullrev
10
11
12 class basedag(object):
13 '''generic interface for DAGs
14
15 terms:
16 "ix" (short for index) identifies a nodes internally,
17 "id" identifies one externally.
18
19 All params are ixs unless explicitly suffixed otherwise.
20 Pluralized params are lists or sets.
21 '''
22
23 def __init__(self):
24 self._inverse = None
25
26 def nodeset(self):
27 '''set of all node idxs'''
28 raise NotImplementedError()
29
30 def heads(self):
31 '''list of head ixs'''
32 raise NotImplementedError()
33
34 def parents(self, ix):
35 '''list of parents ixs of ix'''
36 raise NotImplementedError()
37
38 def inverse(self):
39 '''inverse DAG, where parents becomes children, etc.'''
40 raise NotImplementedError()
41
42 def ancestorset(self, starts, stops=None):
43 '''set of all ancestors of starts (incl), but stop walk at stops (excl)'''
44 raise NotImplementedError()
45
46 def descendantset(self, starts, stops=None):
47 '''set of all descendants of starts (incl), but stop walk at stops (excl)'''
48 return self.inverse().ancestorset(starts, stops)
49
50 def headsetofconnecteds(self, ixs):
51 '''subset of connected list of ixs so that no node has a descendant in it
52
53 By "connected list" we mean that if an ancestor and a descendant are in
54 the list, then so is at least one path connecting them.'''
55 raise NotImplementedError()
56
57 def externalize(self, ix):
58 '''return a list of (or set if given a set) of node ids'''
59 return self._externalize(ix)
60
61 def externalizeall(self, ixs):
62 '''return a list of (or set if given a set) of node ids'''
63 ids = self._externalizeall(ixs)
64 if isinstance(ixs, set):
65 return set(ids)
66 return list(ids)
67
68 def internalize(self, id):
69 '''return a list of (or set if given a set) of node ixs'''
70 return self._internalize(id)
71
72 def internalizeall(self, ids, filterunknown=False):
73 '''return a list of (or set if given a set) of node ids'''
74 ixs = self._internalizeall(ids, filterunknown)
75 if isinstance(ids, set):
76 return set(ixs)
77 return list(ixs)
78
79
80 class genericdag(basedag):
81 '''generic implementations for DAGs'''
82
83 def ancestorset(self, starts, stops=None):
84 stops = stops and set(stops) or set()
85 seen = set()
86 pending = list(starts)
87 while pending:
88 n = pending.pop()
89 if n not in seen and n not in stops:
90 seen.add(n)
91 pending.extend(self.parents(n))
92 return seen
93
94 def headsetofconnecteds(self, ixs):
95 hds = set(ixs)
96 if not hds:
97 return hds
98 for n in ixs:
99 for p in self.parents(n):
100 hds.discard(p)
101 assert hds
102 return hds
103
104
105 class revlogbaseddag(basedag):
106 '''generic dag interface to a revlog'''
107
108 def __init__(self, revlog, nodeset):
109 basedag.__init__(self)
110 self._revlog = revlog
111 self._heads = None
112 self._nodeset = nodeset
113
114 def nodeset(self):
115 return self._nodeset
116
117 def heads(self):
118 if self._heads is None:
119 self._heads = self._getheads()
120 return self._heads
121
122 def _externalize(self, ix):
123 return self._revlog.index[ix][7]
124 def _externalizeall(self, ixs):
125 idx = self._revlog.index
126 return [idx[i][7] for i in ixs]
127
128 def _internalize(self, id):
129 ix = self._revlog.rev(id)
130 if ix == nullrev:
131 raise LookupError(id, self._revlog.indexfile, _('nullid'))
132 return ix
133 def _internalizeall(self, ids, filterunknown):
134 rl = self._revlog
135 if filterunknown:
136 return [r for r in map(rl.nodemap.get, ids)
137 if r is not None and r != nullrev]
138 return map(self._internalize, ids)
139
140
141 class revlogdag(revlogbaseddag):
142 '''dag interface to a revlog'''
143
144 def __init__(self, revlog):
145 revlogbaseddag.__init__(self, revlog, set(xrange(len(revlog))))
146
147 def _getheads(self):
148 return [r for r in self._revlog.headrevs() if r != nullrev]
149
150 def parents(self, ix):
151 rlog = self._revlog
152 idx = rlog.index
153 revdata = idx[ix]
154 prev = revdata[5]
155 if prev != nullrev:
156 prev2 = revdata[6]
157 if prev2 == nullrev:
158 return [prev]
159 return [prev, prev2]
160 prev2 = revdata[6]
161 if prev2 != nullrev:
162 return [prev2]
163 return []
164
165 def inverse(self):
166 if self._inverse is None:
167 self._inverse = inverserevlogdag(self)
168 return self._inverse
169
170 def ancestorset(self, starts, stops=None):
171 rlog = self._revlog
172 idx = rlog.index
173 stops = stops and set(stops) or set()
174 seen = set()
175 pending = list(starts)
176 while pending:
177 rev = pending.pop()
178 if rev not in seen and rev not in stops:
179 seen.add(rev)
180 revdata = idx[rev]
181 for i in [5, 6]:
182 prev = revdata[i]
183 if prev != nullrev:
184 pending.append(prev)
185 return seen
186
187 def headsetofconnecteds(self, ixs):
188 if not ixs:
189 return set()
190 rlog = self._revlog
191 idx = rlog.index
192 headrevs = set(ixs)
193 for rev in ixs:
194 revdata = idx[rev]
195 for i in [5, 6]:
196 prev = revdata[i]
197 if prev != nullrev:
198 headrevs.discard(prev)
199 assert headrevs
200 return headrevs
201
202
203 class inverserevlogdag(revlogbaseddag, genericdag):
204 '''inverse of an existing revlog dag; see revlogdag.inverse()'''
205
206 def __init__(self, orig):
207 revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
208 self._orig = orig
209 self._children = {}
210 self._roots = []
211 self._walkfrom = len(self._revlog) - 1
212
213 def _walkto(self, walkto):
214 rev = self._walkfrom
215 cs = self._children
216 roots = self._roots
217 idx = self._revlog.index
218 while rev >= walkto:
219 data = idx[rev]
220 isroot = True
221 for prev in [data[5], data[6]]: # parent revs
222 if prev != nullrev:
223 cs.setdefault(prev, []).append(rev)
224 isroot = False
225 if isroot:
226 roots.append(rev)
227 rev -= 1
228 self._walkfrom = rev - 1
229
230 def _getheads(self):
231 self._walkto(nullrev)
232 return self._roots
233
234 def parents(self, ix):
235 if ix is None:
236 return []
237 if ix <= self._walkfrom:
238 self._walkto(ix)
239 return self._children.get(ix, [])
240
241 def inverse(self):
242 return self._orig
@@ -0,0 +1,178 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 from node import nullid
10 from i18n import _
11 import random, collections, util, dagutil
12
13 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
14 # if nodes is empty we scan the entire graph
15 if nodes:
16 heads = dag.headsetofconnecteds(nodes)
17 else:
18 heads = dag.heads()
19 dist = {}
20 visit = collections.deque(heads)
21 seen = set()
22 factor = 1
23 while visit:
24 curr = visit.popleft()
25 if curr in seen:
26 continue
27 d = dist.setdefault(curr, 1)
28 if d > factor:
29 factor *= 2
30 if d == factor:
31 if curr not in always: # need this check for the early exit below
32 sample.add(curr)
33 if quicksamplesize and (len(sample) >= quicksamplesize):
34 return
35 seen.add(curr)
36 for p in dag.parents(curr):
37 if not nodes or p in nodes:
38 dist.setdefault(p, d + 1)
39 visit.append(p)
40
41 def _setupsample(dag, nodes, size):
42 if len(nodes) <= size:
43 return set(nodes), None, 0
44 always = set(dag.heads())
45 desiredlen = size - len(always)
46 if desiredlen <= 0:
47 # This could be bad if there are very many heads, all unknown to the
48 # server. We're counting on long request support here.
49 return always, None, desiredlen
50 return always, set(), desiredlen
51
52 def _takequicksample(dag, nodes, size, initial):
53 always, sample, desiredlen = _setupsample(dag, nodes, size)
54 if sample is None:
55 return always
56 if initial:
57 fromset = None
58 else:
59 fromset = nodes
60 _updatesample(dag, fromset, sample, always, quicksamplesize=desiredlen)
61 sample.update(always)
62 return sample
63
64 def _takefullsample(dag, nodes, size):
65 always, sample, desiredlen = _setupsample(dag, nodes, size)
66 if sample is None:
67 return always
68 # update from heads
69 _updatesample(dag, nodes, sample, always)
70 # update from roots
71 _updatesample(dag.inverse(), nodes, sample, always)
72 assert sample
73 if len(sample) > desiredlen:
74 sample = set(random.sample(sample, desiredlen))
75 elif len(sample) < desiredlen:
76 more = desiredlen - len(sample)
77 sample.update(random.sample(list(nodes - sample - always), more))
78 sample.update(always)
79 return sample
80
81 def findcommonheads(ui, local, remote,
82 initialsamplesize=100,
83 fullsamplesize=200,
84 abortwhenunrelated=True):
85 '''Return a tuple (common, anyincoming, remoteheads) used to identify missing
86 nodes from or in remote.
87
88 shortcutlocal determines whether we try use direct access to localrepo if
89 remote is actually local.
90 '''
91 roundtrips = 0
92 cl = local.changelog
93 dag = dagutil.revlogdag(cl)
94 nodes = dag.nodeset()
95
96 # early exit if we know all the specified server heads already
97 ui.debug("query 1; heads\n")
98 roundtrips += 1
99 srvheadhashes = remote.heads()
100
101 ## TODO We might want to request an additional random sample of the server's
102 ## nodes batched with the heads query here.
103
104 if cl.tip() == nullid:
105 if srvheadhashes != [nullid]:
106 return [nullid], True, srvheadhashes
107 return [nullid], False, []
108
109 # start actual discovery (we note this before the next "if" for compatibility
110 # reasons)
111 ui.status(_("searching for changes\n"))
112
113 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
114 if len(srvheads) == len(srvheadhashes):
115 ui.note("all remote heads known locally\n")
116 return (srvheadhashes, False, srvheadhashes,)
117
118 # full blown discovery
119 undecided = nodes # own nodes where I don't know if the server knows them
120 common = set() # own nodes I know we both know
121 missing = set() # own nodes I know the server lacks
122
123 # treat remote heads as a first implicit sample response
124 common.update(dag.ancestorset(srvheads))
125 undecided.difference_update(common)
126 # use cheapish initial sample
127 if common:
128 ui.debug("taking initial sample\n")
129 sample = _takefullsample(dag, undecided, size=fullsamplesize)
130 else:
131 ui.debug("taking quick initial sample\n")
132 sample = _takequicksample(dag, nodes, size=initialsamplesize,
133 initial=True)
134
135 roundtrips += 1
136 ui.progress(_('searching'), roundtrips, unit=_('queries'))
137 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
138 % (roundtrips, len(undecided), len(sample)))
139 # indices between sample and externalized version must match
140 sample = list(sample)
141 yesno = remote.known(dag.externalizeall(sample))
142
143 while undecided:
144 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
145 common.update(dag.ancestorset(commoninsample, common))
146
147 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
148 missing.update(dag.descendantset(missinginsample, missing))
149
150 undecided.difference_update(missing)
151 undecided.difference_update(common)
152
153 if not undecided:
154 break
155
156 ui.note("sampling from both directions\n")
157 sample = _takefullsample(dag, undecided, size=fullsamplesize)
158
159 roundtrips += 1
160 ui.progress(_('searching'), roundtrips, unit=_('queries'))
161 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
162 % (roundtrips, len(undecided), len(sample)))
163 # indices between sample and externalized version must match
164 sample = list(sample)
165 yesno = remote.known(dag.externalizeall(sample))
166
167 result = dag.headsetofconnecteds(common)
168 ui.progress(_('searching'), None)
169 ui.debug("%d total queries\n" % roundtrips)
170
171 if not result and srvheadhashes != [nullid]:
172 if abortwhenunrelated:
173 raise util.Abort(_("repository is unrelated"))
174 else:
175 ui.warn(_("warning: repository is unrelated\n"))
176 return (set([nullid]), True, srvheadhashes,)
177
178 return (dag.externalizeall(result), True, srvheadhashes,)
@@ -0,0 +1,271 b''
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -e foo ]; then rm -rf foo; fi
7 > hg init foo
8 > cd foo
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
12 > echo
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug
18 > echo
19 > echo "% -- b -> a tree"
20 > hg -R b debugdiscovery a --verbose --old
21 > echo
22 > echo "% -- b -> a set"
23 > hg -R b debugdiscovery a --verbose --debug
24 > cd ..
25 > }
26
27
28 Small superset:
29
30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 > +2:f +1:a1:b1
32 > <f +4 :a2
33 > +5 :b2
34 > <f +3 :b3'
35
36 % -- a -> b tree
37 comparing with b
38 searching for changes
39 unpruned common: b5714e113bc0 66f7d451a68b 01241442b3c2
40 common heads: b5714e113bc0 01241442b3c2
41 local is subset
42
43 % -- a -> b set
44 comparing with b
45 query 1; heads
46 searching for changes
47 taking initial sample
48 searching: 2 queries
49 query 2; still undecided: 4, sample size is: 4
50 2 total queries
51 common heads: b5714e113bc0 01241442b3c2
52 local is subset
53
54 % -- b -> a tree
55 comparing with a
56 searching for changes
57 unpruned common: b5714e113bc0 01241442b3c2
58 common heads: b5714e113bc0 01241442b3c2
59 remote is subset
60
61 % -- b -> a set
62 comparing with a
63 query 1; heads
64 searching for changes
65 all remote heads known locally
66 common heads: b5714e113bc0 01241442b3c2
67 remote is subset
68
69
70 Many new:
71
72 $ testdesc '-ra1 -ra2' '-rb' '
73 > +2:f +3:a1 +3:b
74 > <f +30 :a2'
75
76 % -- a -> b tree
77 comparing with b
78 searching for changes
79 unpruned common: bebd167eb94d
80 common heads: bebd167eb94d
81
82 % -- a -> b set
83 comparing with b
84 query 1; heads
85 searching for changes
86 taking quick initial sample
87 searching: 2 queries
88 query 2; still undecided: 35, sample size is: 35
89 2 total queries
90 common heads: bebd167eb94d
91
92 % -- b -> a tree
93 comparing with a
94 searching for changes
95 unpruned common: bebd167eb94d 66f7d451a68b
96 common heads: bebd167eb94d
97
98 % -- b -> a set
99 comparing with a
100 query 1; heads
101 searching for changes
102 taking initial sample
103 searching: 2 queries
104 query 2; still undecided: 3, sample size is: 3
105 2 total queries
106 common heads: bebd167eb94d
107
108
109 Both sides many new with stub:
110
111 $ testdesc '-ra1 -ra2' '-rb' '
112 > +2:f +2:a1 +30 :b
113 > <f +30 :a2'
114
115 % -- a -> b tree
116 comparing with b
117 searching for changes
118 unpruned common: 2dc09a01254d
119 common heads: 2dc09a01254d
120
121 % -- a -> b set
122 comparing with b
123 query 1; heads
124 searching for changes
125 taking quick initial sample
126 searching: 2 queries
127 query 2; still undecided: 34, sample size is: 34
128 2 total queries
129 common heads: 2dc09a01254d
130
131 % -- b -> a tree
132 comparing with a
133 searching for changes
134 unpruned common: 66f7d451a68b 2dc09a01254d
135 common heads: 2dc09a01254d
136
137 % -- b -> a set
138 comparing with a
139 query 1; heads
140 searching for changes
141 taking initial sample
142 searching: 2 queries
143 query 2; still undecided: 30, sample size is: 30
144 2 total queries
145 common heads: 2dc09a01254d
146
147
148 Both many new:
149
150 $ testdesc '-ra' '-rb' '
151 > +2:f +30 :b
152 > <f +30 :a'
153
154 % -- a -> b tree
155 comparing with b
156 searching for changes
157 unpruned common: 66f7d451a68b
158 common heads: 66f7d451a68b
159
160 % -- a -> b set
161 comparing with b
162 query 1; heads
163 searching for changes
164 taking quick initial sample
165 searching: 2 queries
166 query 2; still undecided: 32, sample size is: 32
167 2 total queries
168 common heads: 66f7d451a68b
169
170 % -- b -> a tree
171 comparing with a
172 searching for changes
173 unpruned common: 66f7d451a68b
174 common heads: 66f7d451a68b
175
176 % -- b -> a set
177 comparing with a
178 query 1; heads
179 searching for changes
180 taking quick initial sample
181 searching: 2 queries
182 query 2; still undecided: 32, sample size is: 32
183 2 total queries
184 common heads: 66f7d451a68b
185
186
187 Both many new skewed:
188
189 $ testdesc '-ra' '-rb' '
190 > +2:f +30 :b
191 > <f +50 :a'
192
193 % -- a -> b tree
194 comparing with b
195 searching for changes
196 unpruned common: 66f7d451a68b
197 common heads: 66f7d451a68b
198
199 % -- a -> b set
200 comparing with b
201 query 1; heads
202 searching for changes
203 taking quick initial sample
204 searching: 2 queries
205 query 2; still undecided: 52, sample size is: 52
206 2 total queries
207 common heads: 66f7d451a68b
208
209 % -- b -> a tree
210 comparing with a
211 searching for changes
212 unpruned common: 66f7d451a68b
213 common heads: 66f7d451a68b
214
215 % -- b -> a set
216 comparing with a
217 query 1; heads
218 searching for changes
219 taking quick initial sample
220 searching: 2 queries
221 query 2; still undecided: 32, sample size is: 32
222 2 total queries
223 common heads: 66f7d451a68b
224
225
226 Both many new on top of long history:
227
228 $ testdesc '-ra' '-rb' '
229 > +1000:f +30 :b
230 > <f +50 :a'
231
232 % -- a -> b tree
233 comparing with b
234 searching for changes
235 unpruned common: 7ead0cba2838
236 common heads: 7ead0cba2838
237
238 % -- a -> b set
239 comparing with b
240 query 1; heads
241 searching for changes
242 taking quick initial sample
243 searching: 2 queries
244 query 2; still undecided: 1050, sample size is: 11
245 sampling from both directions
246 searching: 3 queries
247 query 3; still undecided: 31, sample size is: 31
248 3 total queries
249 common heads: 7ead0cba2838
250
251 % -- b -> a tree
252 comparing with a
253 searching for changes
254 unpruned common: 7ead0cba2838
255 common heads: 7ead0cba2838
256
257 % -- b -> a set
258 comparing with a
259 query 1; heads
260 searching for changes
261 taking quick initial sample
262 searching: 2 queries
263 query 2; still undecided: 1030, sample size is: 11
264 sampling from both directions
265 searching: 3 queries
266 query 3; still undecided: 16, sample size is: 16
267 3 total queries
268 common heads: 7ead0cba2838
269
270
271
@@ -15,6 +15,7 b' import archival, changegroup, cmdutil, s'
15 15 import merge as mergemod
16 16 import minirst, revset, templatefilters
17 17 import dagparser, context, simplemerge
18 import random, setdiscovery, treediscovery, dagutil
18 19
19 20 # Commands start here, listed alphabetically
20 21
@@ -1471,6 +1472,65 b' def debugignore(ui, repo, *values, **opt'
1471 1472 else:
1472 1473 raise util.Abort(_("no ignore patterns found"))
1473 1474
1475 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1476 """runs the changeset discovery protocol in isolation"""
1477 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1478 remote = hg.repository(hg.remoteui(repo, opts), remoteurl)
1479 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1480
1481 # make sure tests are repeatable
1482 random.seed(12323)
1483
1484 def doit(localheads, remoteheads):
1485 if opts.get('old'):
1486 if localheads:
1487 raise util.Abort('cannot use localheads with old style discovery')
1488 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1489 force=True)
1490 common = set(common)
1491 if not opts.get('nonheads'):
1492 ui.write("unpruned common: %s\n" % " ".join([short(n)
1493 for n in common]))
1494 dag = dagutil.revlogdag(repo.changelog)
1495 all = dag.ancestorset(dag.internalizeall(common))
1496 common = dag.externalizeall(dag.headsetofconnecteds(all))
1497 else:
1498 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1499 common = set(common)
1500 rheads = set(hds)
1501 lheads = set(repo.heads())
1502 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1503 if lheads <= common:
1504 ui.write("local is subset\n")
1505 elif rheads <= common:
1506 ui.write("remote is subset\n")
1507
1508 serverlogs = opts.get('serverlog')
1509 if serverlogs:
1510 for filename in serverlogs:
1511 logfile = open(filename, 'r')
1512 try:
1513 line = logfile.readline()
1514 while line:
1515 parts = line.strip().split(';')
1516 op = parts[1]
1517 if op == 'cg':
1518 pass
1519 elif op == 'cgss':
1520 doit(parts[2].split(' '), parts[3].split(' '))
1521 elif op == 'unb':
1522 doit(parts[3].split(' '), parts[2].split(' '))
1523 line = logfile.readline()
1524 finally:
1525 logfile.close()
1526
1527 else:
1528 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1529 opts.get('remote_head'))
1530 localrevs = opts.get('local_head')
1531 doit(localrevs, remoterevs)
1532
1533
1474 1534 def debugindex(ui, repo, file_, **opts):
1475 1535 """dump the contents of an index file"""
1476 1536 r = None
@@ -4513,6 +4573,14 b' table = {'
4513 4573 [('e', 'extended', None, _('try extended date formats'))],
4514 4574 _('[-e] DATE [RANGE]')),
4515 4575 "debugdata": (debugdata, [], _('FILE REV')),
4576 "debugdiscovery": (debugdiscovery,
4577 [('', 'old', None,
4578 _('use old-style discovery')),
4579 ('', 'nonheads', None,
4580 _('use old-style discovery with non-heads included')),
4581 ] + remoteopts,
4582 _('[-l REV] [-r REV] [-b BRANCH]...'
4583 ' [OTHER]')),
4516 4584 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4517 4585 "debuggetbundle":
4518 4586 (debuggetbundle,
@@ -7,7 +7,7 b''
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 import util, error
10 import util, error, setdiscovery, treediscovery
11 11
12 12 def findcommonincoming(repo, remote, heads=None, force=False):
13 13 """Return a tuple (common, anyincoming, heads) used to identify the common
@@ -20,145 +20,28 b' def findcommonincoming(repo, remote, hea'
20 20 changegroupsubset. No code except for pull should be relying on this fact
21 21 any longer.
22 22 "heads" is either the supplied heads, or else the remote's heads.
23
24 If you pass heads and they are all known locally, the reponse lists justs
25 these heads in "common" and in "heads".
23 26 """
24 27
25 m = repo.changelog.nodemap
26 search = []
27 fetch = set()
28 seen = set()
29 seenbranch = set()
30 base = set()
31
32 if not heads:
33 heads = remote.heads()
34
35 if repo.changelog.tip() == nullid:
36 base.add(nullid)
37 if heads != [nullid]:
38 return [nullid], [nullid], list(heads)
39 return [nullid], [], []
40
41 # assume we're closer to the tip than the root
42 # and start by examining the heads
43 repo.ui.status(_("searching for changes\n"))
44
45 if remote.capable('getbundle'):
46 myheads = repo.heads()
47 known = remote.known(myheads)
48 if util.all(known):
49 hasincoming = set(heads).difference(set(myheads)) and True
50 return myheads, hasincoming, heads
51
52 unknown = []
53 for h in heads:
54 if h not in m:
55 unknown.append(h)
56 else:
57 base.add(h)
58
59 heads = unknown
60 if not unknown:
61 return list(base), [], []
62
63 req = set(unknown)
64 reqcnt = 0
65
66 # search through remote branches
67 # a 'branch' here is a linear segment of history, with four parts:
68 # head, root, first parent, second parent
69 # (a branch always has two parents (or none) by definition)
70 unknown = remote.branches(unknown)
71 while unknown:
72 r = []
73 while unknown:
74 n = unknown.pop(0)
75 if n[0] in seen:
76 continue
28 if not remote.capable('getbundle'):
29 return treediscovery.findcommonincoming(repo, remote, heads, force)
77 30
78 repo.ui.debug("examining %s:%s\n"
79 % (short(n[0]), short(n[1])))
80 if n[0] == nullid: # found the end of the branch
81 pass
82 elif n in seenbranch:
83 repo.ui.debug("branch already found\n")
84 continue
85 elif n[1] and n[1] in m: # do we know the base?
86 repo.ui.debug("found incomplete branch %s:%s\n"
87 % (short(n[0]), short(n[1])))
88 search.append(n[0:2]) # schedule branch range for scanning
89 seenbranch.add(n)
90 else:
91 if n[1] not in seen and n[1] not in fetch:
92 if n[2] in m and n[3] in m:
93 repo.ui.debug("found new changeset %s\n" %
94 short(n[1]))
95 fetch.add(n[1]) # earliest unknown
96 for p in n[2:4]:
97 if p in m:
98 base.add(p) # latest known
99
100 for p in n[2:4]:
101 if p not in req and p not in m:
102 r.append(p)
103 req.add(p)
104 seen.add(n[0])
105
106 if r:
107 reqcnt += 1
108 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
109 repo.ui.debug("request %d: %s\n" %
110 (reqcnt, " ".join(map(short, r))))
111 for p in xrange(0, len(r), 10):
112 for b in remote.branches(r[p:p + 10]):
113 repo.ui.debug("received %s:%s\n" %
114 (short(b[0]), short(b[1])))
115 unknown.append(b)
31 if heads:
32 allknown = True
33 nm = repo.changelog.nodemap
34 for h in heads:
35 if nm.get(h) is None:
36 allknown = False
37 break
38 if allknown:
39 return (heads, False, heads)
116 40
117 # do binary search on the branches we found
118 while search:
119 newsearch = []
120 reqcnt += 1
121 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
122 for n, l in zip(search, remote.between(search)):
123 l.append(n[1])
124 p = n[0]
125 f = 1
126 for i in l:
127 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
128 if i in m:
129 if f <= 2:
130 repo.ui.debug("found new branch changeset %s\n" %
131 short(p))
132 fetch.add(p)
133 base.add(i)
134 else:
135 repo.ui.debug("narrowed branch search to %s:%s\n"
136 % (short(p), short(i)))
137 newsearch.append((p, i))
138 break
139 p, f = i, f * 2
140 search = newsearch
141
142 # sanity check our fetch list
143 for f in fetch:
144 if f in m:
145 raise error.RepoError(_("already have changeset ")
146 + short(f[:4]))
147
148 base = list(base)
149 if base == [nullid]:
150 if force:
151 repo.ui.warn(_("warning: repository is unrelated\n"))
152 else:
153 raise util.Abort(_("repository is unrelated"))
154
155 repo.ui.debug("found new changesets starting at " +
156 " ".join([short(f) for f in fetch]) + "\n")
157
158 repo.ui.progress(_('searching'), None)
159 repo.ui.debug("%d total queries\n" % reqcnt)
160
161 return base, list(fetch), heads
41 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
42 abortwhenunrelated=not force)
43 common, anyinc, srvheads = res
44 return (list(common), anyinc, heads or list(srvheads))
162 45
163 46 def prepush(repo, remote, force, revs, newbranch):
164 47 '''Analyze the local and remote repositories and determine which
@@ -174,9 +57,7 b' def prepush(repo, remote, force, revs, n'
174 57 changegroup is a readable file-like object whose read() returns
175 58 successive changegroup chunks ready to be sent over the wire and
176 59 remoteheads is the list of remote heads.'''
177 remoteheads = remote.heads()
178 common, inc, _rheads = findcommonincoming(repo, remote, heads=remoteheads,
179 force=force)
60 common, inc, remoteheads = findcommonincoming(repo, remote, force=force)
180 61
181 62 cl = repo.changelog
182 63 outg = cl.findmissing(common, revs)
@@ -617,6 +617,17 b' class revlog(object):'
617 617 assert heads
618 618 return (orderedout, roots, heads)
619 619
620 def headrevs(self):
621 count = len(self)
622 if not count:
623 return [nullrev]
624 ishead = [1] * (count + 1)
625 index = self.index
626 for r in xrange(count):
627 e = index[r]
628 ishead[e[5]] = ishead[e[6]] = 0
629 return [r for r in xrange(count) if ishead[r]]
630
620 631 def heads(self, start=None, stop=None):
621 632 """return the list of all nodes that have no children
622 633
@@ -626,15 +637,9 b' class revlog(object):'
626 637 as if they had no children
627 638 """
628 639 if start is None and stop is None:
629 count = len(self)
630 if not count:
640 if not len(self):
631 641 return [nullid]
632 ishead = [1] * (count + 1)
633 index = self.index
634 for r in xrange(count):
635 e = index[r]
636 ishead[e[5]] = ishead[e[6]] = 0
637 return [self.node(r) for r in xrange(count) if ishead[r]]
642 return [self.node(r) for r in self.headrevs()]
638 643
639 644 if start is None:
640 645 start = nullid
@@ -10,15 +10,12 b' from i18n import _'
10 10 import util, error
11 11
12 12 def findcommonincoming(repo, remote, heads=None, force=False):
13 """Return a tuple (common, anyincoming, heads) used to identify the common
13 """Return a tuple (common, fetch, heads) used to identify the common
14 14 subset of nodes between repo and remote.
15 15
16 16 "common" is a list of (at least) the heads of the common subset.
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 locally. If remote does not support getbundle, this actually is a list of
19 roots of the nodes that would be incoming, to be supplied to
20 changegroupsubset. No code except for pull should be relying on this fact
21 any longer.
17 "fetch" is a list of roots of the nodes that would be incoming, to be
18 supplied to changegroupsubset.
22 19 "heads" is either the supplied heads, or else the remote's heads.
23 20 """
24 21
@@ -42,13 +39,6 b' def findcommonincoming(repo, remote, hea'
42 39 # and start by examining the heads
43 40 repo.ui.status(_("searching for changes\n"))
44 41
45 if remote.capable('getbundle'):
46 myheads = repo.heads()
47 known = remote.known(myheads)
48 if util.all(known):
49 hasincoming = set(heads).difference(set(myheads)) and True
50 return myheads, hasincoming, heads
51
52 42 unknown = []
53 43 for h in heads:
54 44 if h not in m:
@@ -159,130 +149,3 b' def findcommonincoming(repo, remote, hea'
159 149 repo.ui.debug("%d total queries\n" % reqcnt)
160 150
161 151 return base, list(fetch), heads
162
163 def prepush(repo, remote, force, revs, newbranch):
164 '''Analyze the local and remote repositories and determine which
165 changesets need to be pushed to the remote. Return value depends
166 on circumstances:
167
168 If we are not going to push anything, return a tuple (None,
169 outgoing) where outgoing is 0 if there are no outgoing
170 changesets and 1 if there are, but we refuse to push them
171 (e.g. would create new remote heads).
172
173 Otherwise, return a tuple (changegroup, remoteheads), where
174 changegroup is a readable file-like object whose read() returns
175 successive changegroup chunks ready to be sent over the wire and
176 remoteheads is the list of remote heads.'''
177 remoteheads = remote.heads()
178 common, inc, _rheads = findcommonincoming(repo, remote, heads=remoteheads,
179 force=force)
180
181 cl = repo.changelog
182 outg = cl.findmissing(common, revs)
183
184 if not outg:
185 repo.ui.status(_("no changes found\n"))
186 return None, 1
187
188 if not force and remoteheads != [nullid]:
189 if remote.capable('branchmap'):
190 # Check for each named branch if we're creating new remote heads.
191 # To be a remote head after push, node must be either:
192 # - unknown locally
193 # - a local outgoing head descended from update
194 # - a remote head that's known locally and not
195 # ancestral to an outgoing head
196
197 # 1. Create set of branches involved in the push.
198 branches = set(repo[n].branch() for n in outg)
199
200 # 2. Check for new branches on the remote.
201 remotemap = remote.branchmap()
202 newbranches = branches - set(remotemap)
203 if newbranches and not newbranch: # new branch requires --new-branch
204 branchnames = ', '.join(sorted(newbranches))
205 raise util.Abort(_("push creates new remote branches: %s!")
206 % branchnames,
207 hint=_("use 'hg push --new-branch' to create"
208 " new remote branches"))
209 branches.difference_update(newbranches)
210
211 # 3. Construct the initial oldmap and newmap dicts.
212 # They contain information about the remote heads before and
213 # after the push, respectively.
214 # Heads not found locally are not included in either dict,
215 # since they won't be affected by the push.
216 # unsynced contains all branches with incoming changesets.
217 oldmap = {}
218 newmap = {}
219 unsynced = set()
220 for branch in branches:
221 remotebrheads = remotemap[branch]
222 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
223 oldmap[branch] = prunedbrheads
224 newmap[branch] = list(prunedbrheads)
225 if len(remotebrheads) > len(prunedbrheads):
226 unsynced.add(branch)
227
228 # 4. Update newmap with outgoing changes.
229 # This will possibly add new heads and remove existing ones.
230 ctxgen = (repo[n] for n in outg)
231 repo._updatebranchcache(newmap, ctxgen)
232
233 else:
234 # 1-4b. old servers: Check for new topological heads.
235 # Construct {old,new}map with branch = None (topological branch).
236 # (code based on _updatebranchcache)
237 oldheads = set(h for h in remoteheads if h in cl.nodemap)
238 newheads = oldheads.union(outg)
239 if len(newheads) > 1:
240 for latest in reversed(outg):
241 if latest not in newheads:
242 continue
243 minhrev = min(cl.rev(h) for h in newheads)
244 reachable = cl.reachable(latest, cl.node(minhrev))
245 reachable.remove(latest)
246 newheads.difference_update(reachable)
247 branches = set([None])
248 newmap = {None: newheads}
249 oldmap = {None: oldheads}
250 unsynced = inc and branches or set()
251
252 # 5. Check for new heads.
253 # If there are more heads after the push than before, a suitable
254 # error message, depending on unsynced status, is displayed.
255 error = None
256 for branch in branches:
257 newhs = set(newmap[branch])
258 oldhs = set(oldmap[branch])
259 if len(newhs) > len(oldhs):
260 if error is None:
261 if branch:
262 error = _("push creates new remote heads "
263 "on branch '%s'!") % branch
264 else:
265 error = _("push creates new remote heads!")
266 if branch in unsynced:
267 hint = _("you should pull and merge or "
268 "use push -f to force")
269 else:
270 hint = _("did you forget to merge? "
271 "use push -f to force")
272 if branch:
273 repo.ui.debug("new remote heads on branch '%s'\n" % branch)
274 for h in (newhs - oldhs):
275 repo.ui.debug("new remote head %s\n" % short(h))
276 if error:
277 raise util.Abort(error, hint=hint)
278
279 # 6. Check for unsynced changes on involved branches.
280 if unsynced:
281 repo.ui.warn(_("note: unsynced remote changes!\n"))
282
283 if revs is None:
284 # use the fast path, no race possible on push
285 cg = repo._changegroup(outg, 'push')
286 else:
287 cg = repo.getbundle('push', heads=revs, common=common)
288 return cg, remoteheads
@@ -82,7 +82,9 b' Extension disabled for lack of a hook'
82 82 hgrc = """
83 83 """
84 84 pushing to ../b
85 query 1; heads
85 86 searching for changes
87 all remote heads known locally
86 88 3 changesets found
87 89 list of changesets:
88 90 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -135,7 +137,9 b' Extension disabled for lack of acl.sourc'
135 137 pretxnchangegroup.acl = python:hgext.acl.hook
136 138 """
137 139 pushing to ../b
140 query 1; heads
138 141 searching for changes
142 all remote heads known locally
139 143 invalidating branch cache (tip differs)
140 144 3 changesets found
141 145 list of changesets:
@@ -192,7 +196,9 b' No [acl.allow]/[acl.deny]'
192 196 sources = push
193 197 """
194 198 pushing to ../b
199 query 1; heads
195 200 searching for changes
201 all remote heads known locally
196 202 invalidating branch cache (tip differs)
197 203 3 changesets found
198 204 list of changesets:
@@ -258,7 +264,9 b' Empty [acl.allow]'
258 264 [acl.allow]
259 265 """
260 266 pushing to ../b
267 query 1; heads
261 268 searching for changes
269 all remote heads known locally
262 270 invalidating branch cache (tip differs)
263 271 3 changesets found
264 272 list of changesets:
@@ -322,7 +330,9 b' fred is allowed inside foo/'
322 330 foo/** = fred
323 331 """
324 332 pushing to ../b
333 query 1; heads
325 334 searching for changes
335 all remote heads known locally
326 336 3 changesets found
327 337 list of changesets:
328 338 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -390,7 +400,9 b' Empty [acl.deny]'
390 400 [acl.deny]
391 401 """
392 402 pushing to ../b
403 query 1; heads
393 404 searching for changes
405 all remote heads known locally
394 406 3 changesets found
395 407 list of changesets:
396 408 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -455,7 +467,9 b' fred is allowed inside foo/, but not foo'
455 467 foo/bar/** = fred
456 468 """
457 469 pushing to ../b
470 query 1; heads
458 471 searching for changes
472 all remote heads known locally
459 473 3 changesets found
460 474 list of changesets:
461 475 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -525,7 +539,9 b' fred is allowed inside foo/, but not foo'
525 539 foo/Bar/** = fred
526 540 """
527 541 pushing to ../b
542 query 1; heads
528 543 searching for changes
544 all remote heads known locally
529 545 3 changesets found
530 546 list of changesets:
531 547 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -592,7 +608,9 b' fred is allowed inside foo/, but not foo'
592 608 foo/Bar/** = fred
593 609 """
594 610 pushing to ../b
611 query 1; heads
595 612 searching for changes
613 all remote heads known locally
596 614 3 changesets found
597 615 list of changesets:
598 616 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -661,7 +679,9 b' barney is allowed everywhere'
661 679 ** = barney
662 680 """
663 681 pushing to ../b
682 query 1; heads
664 683 searching for changes
684 all remote heads known locally
665 685 3 changesets found
666 686 list of changesets:
667 687 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -733,7 +753,9 b' wilma can change files with a .txt exten'
733 753 **/*.txt = wilma
734 754 """
735 755 pushing to ../b
756 query 1; heads
736 757 searching for changes
758 all remote heads known locally
737 759 invalidating branch cache (tip differs)
738 760 3 changesets found
739 761 list of changesets:
@@ -810,7 +832,9 b' file specified by acl.config does not ex'
810 832 config = ../acl.config
811 833 """
812 834 pushing to ../b
835 query 1; heads
813 836 searching for changes
837 all remote heads known locally
814 838 3 changesets found
815 839 list of changesets:
816 840 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -880,7 +904,9 b' betty is allowed inside foo/ by a acl.co'
880 904 foo/** = betty
881 905 """
882 906 pushing to ../b
907 query 1; heads
883 908 searching for changes
909 all remote heads known locally
884 910 3 changesets found
885 911 list of changesets:
886 912 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -962,7 +988,9 b' acl.config can set only [acl.allow]/[acl'
962 988 changegroup.acl = false
963 989 """
964 990 pushing to ../b
991 query 1; heads
965 992 searching for changes
993 all remote heads known locally
966 994 3 changesets found
967 995 list of changesets:
968 996 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -1035,7 +1063,9 b' fred is always allowed'
1035 1063 ** = fred
1036 1064 """
1037 1065 pushing to ../b
1066 query 1; heads
1038 1067 searching for changes
1068 all remote heads known locally
1039 1069 invalidating branch cache (tip differs)
1040 1070 3 changesets found
1041 1071 list of changesets:
@@ -1105,7 +1135,9 b' no one is allowed inside foo/Bar/'
1105 1135 foo/Bar/** = *
1106 1136 """
1107 1137 pushing to ../b
1138 query 1; heads
1108 1139 searching for changes
1140 all remote heads known locally
1109 1141 invalidating branch cache (tip differs)
1110 1142 3 changesets found
1111 1143 list of changesets:
@@ -1178,7 +1210,9 b' OS-level groups'
1178 1210 ** = @group1
1179 1211 """
1180 1212 pushing to ../b
1213 query 1; heads
1181 1214 searching for changes
1215 all remote heads known locally
1182 1216 3 changesets found
1183 1217 list of changesets:
1184 1218 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -1248,7 +1282,9 b' OS-level groups'
1248 1282 foo/Bar/** = @group1
1249 1283 """
1250 1284 pushing to ../b
1285 query 1; heads
1251 1286 searching for changes
1287 all remote heads known locally
1252 1288 invalidating branch cache (tip differs)
1253 1289 3 changesets found
1254 1290 list of changesets:
@@ -1359,7 +1395,9 b' No branch acls specified'
1359 1395 [extensions]
1360 1396 """
1361 1397 pushing to ../b
1398 query 1; heads
1362 1399 searching for changes
1400 all remote heads known locally
1363 1401 4 changesets found
1364 1402 list of changesets:
1365 1403 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -1436,7 +1474,9 b' Branch acl deny test'
1436 1474 foobar = *
1437 1475 """
1438 1476 pushing to ../b
1477 query 1; heads
1439 1478 searching for changes
1479 all remote heads known locally
1440 1480 invalidating branch cache (tip differs)
1441 1481 4 changesets found
1442 1482 list of changesets:
@@ -1512,7 +1552,9 b' Branch acl empty allow test'
1512 1552 [acl.allow.branches]
1513 1553 """
1514 1554 pushing to ../b
1555 query 1; heads
1515 1556 searching for changes
1557 all remote heads known locally
1516 1558 4 changesets found
1517 1559 list of changesets:
1518 1560 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -1583,7 +1625,9 b' Branch acl allow other'
1583 1625 * = george
1584 1626 """
1585 1627 pushing to ../b
1628 query 1; heads
1586 1629 searching for changes
1630 all remote heads known locally
1587 1631 4 changesets found
1588 1632 list of changesets:
1589 1633 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -1648,7 +1692,9 b' Branch acl allow other'
1648 1692 * = george
1649 1693 """
1650 1694 pushing to ../b
1695 query 1; heads
1651 1696 searching for changes
1697 all remote heads known locally
1652 1698 4 changesets found
1653 1699 list of changesets:
1654 1700 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
@@ -1730,7 +1776,9 b' push foobar into the remote'
1730 1776 * = george
1731 1777 """
1732 1778 pushing to ../b
1779 query 1; heads
1733 1780 searching for changes
1781 all remote heads known locally
1734 1782 invalidating branch cache (tip differs)
1735 1783 4 changesets found
1736 1784 list of changesets:
@@ -1812,7 +1860,9 b' Branch acl conflicting deny'
1812 1860 * = george
1813 1861 """
1814 1862 pushing to ../b
1863 query 1; heads
1815 1864 searching for changes
1865 all remote heads known locally
1816 1866 invalidating branch cache (tip differs)
1817 1867 4 changesets found
1818 1868 list of changesets:
@@ -39,7 +39,6 b' import bookmark by name'
39 39 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
40 40 $ hg pull -B X ../a
41 41 pulling from ../a
42 searching for changes
43 42 no changes found
44 43 importing bookmark X
45 44 $ hg bookmark
@@ -173,7 +172,6 b' hgweb'
173 172 foobar 000000000000
174 173 $ hg pull -B Z http://localhost:$HGPORT/
175 174 pulling from http://localhost:$HGPORT/
176 searching for changes
177 175 no changes found
178 176 not updating divergent bookmark X
179 177 importing bookmark Z
@@ -561,7 +561,9 b' bundle single branch'
561 561 == bundling
562 562
563 563 $ hg bundle bundle.hg part --debug
564 query 1; heads
564 565 searching for changes
566 all remote heads known locally
565 567 2 changesets found
566 568 list of changesets:
567 569 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
@@ -75,6 +75,7 b' Show debug commands if there are no othe'
75 75 debugdag
76 76 debugdata
77 77 debugdate
78 debugdiscovery
78 79 debugfsinfo
79 80 debuggetbundle
80 81 debugignore
@@ -219,6 +220,7 b' Show all commands + options'
219 220 debugdag: tags, branches, dots, spaces
220 221 debugdata:
221 222 debugdate: extended
223 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
222 224 debugfsinfo:
223 225 debuggetbundle: head, common, type
224 226 debugignore:
@@ -189,7 +189,7 b' listkeys hook'
189 189 $ hg pull -B bar ../a
190 190 pulling from ../a
191 191 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
192 searching for changes
192 no changes found
193 193 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
194 194 importing bookmark bar
195 195 $ cd ../a
@@ -31,14 +31,12 b''
31 31
32 32 $ hg push --debug ../a
33 33 pushing to ../a
34 query 1; heads
34 35 searching for changes
35 examining 1c9246a22a0a:d8d565842d04
36 found incomplete branch 1c9246a22a0a:d8d565842d04
37 searching: 1 queries
38 narrowing 1:1 d8d565842d04
39 found new branch changeset 1c9246a22a0a
40 found new changesets starting at 1c9246a22a0a
41 1 total queries
36 taking quick initial sample
37 searching: 2 queries
38 query 2; still undecided: 2, sample size is: 2
39 2 total queries
42 40 new remote heads on branch 'default'
43 41 new remote head 1e108cc5548c
44 42 abort: push creates new remote heads on branch 'default'!
@@ -27,9 +27,10 b' check that {1} syntax works'
27 27 using http://localhost:$HGPORT/
28 28 sending capabilities command
29 29 comparing with parts://localhost/
30 query 1; heads
30 31 sending heads command
31 32 searching for changes
32 sending known command
33 all remote heads known locally
33 34 no changes found
34 35 [1]
35 36
@@ -219,7 +219,6 b' test pushkeys and bookmarks'
219 219 $ hg book -f -r 0 foo
220 220 $ hg pull -B foo
221 221 pulling from ssh://user@dummy/remote
222 searching for changes
223 222 no changes found
224 223 updating bookmark foo
225 224 importing bookmark foo
General Comments 0
You need to be logged in to leave comments. Login now