##// END OF EJS Templates
discovery: log discovery result in non-trivial cases...
marmoute -
r32713:28240b75 default
parent child Browse files
Show More
@@ -1,253 +1,256 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from __future__ import absolute_import
44 44
45 45 import collections
46 46 import random
47 47
48 48 from .i18n import _
49 49 from .node import (
50 50 nullid,
51 51 nullrev,
52 52 )
53 53 from . import (
54 54 dagutil,
55 55 error,
56 56 util,
57 57 )
58 58
59 59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 60 """update an existing sample to match the expected size
61 61
62 62 The sample is updated with nodes exponentially distant from each head of the
63 63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64 64
65 65 If a target size is specified, the sampling will stop once this size is
66 66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 67 reached.
68 68
69 69 :dag: a dag object from dagutil
70 70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 71 :sample: a sample to update
72 72 :quicksamplesize: optional target size of the sample"""
73 73 # if nodes is empty we scan the entire graph
74 74 if nodes:
75 75 heads = dag.headsetofconnecteds(nodes)
76 76 else:
77 77 heads = dag.heads()
78 78 dist = {}
79 79 visit = collections.deque(heads)
80 80 seen = set()
81 81 factor = 1
82 82 while visit:
83 83 curr = visit.popleft()
84 84 if curr in seen:
85 85 continue
86 86 d = dist.setdefault(curr, 1)
87 87 if d > factor:
88 88 factor *= 2
89 89 if d == factor:
90 90 sample.add(curr)
91 91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 92 return
93 93 seen.add(curr)
94 94 for p in dag.parents(curr):
95 95 if not nodes or p in nodes:
96 96 dist.setdefault(p, d + 1)
97 97 visit.append(p)
98 98
99 99 def _takequicksample(dag, nodes, size):
100 100 """takes a quick sample of size <size>
101 101
102 102 It is meant for initial sampling and focuses on querying heads and close
103 103 ancestors of heads.
104 104
105 105 :dag: a dag object
106 106 :nodes: set of nodes to discover
107 107 :size: the maximum size of the sample"""
108 108 sample = dag.headsetofconnecteds(nodes)
109 109 if size <= len(sample):
110 110 return _limitsample(sample, size)
111 111 _updatesample(dag, None, sample, quicksamplesize=size)
112 112 return sample
113 113
114 114 def _takefullsample(dag, nodes, size):
115 115 sample = dag.headsetofconnecteds(nodes)
116 116 # update from heads
117 117 _updatesample(dag, nodes, sample)
118 118 # update from roots
119 119 _updatesample(dag.inverse(), nodes, sample)
120 120 assert sample
121 121 sample = _limitsample(sample, size)
122 122 if len(sample) < size:
123 123 more = size - len(sample)
124 124 sample.update(random.sample(list(nodes - sample), more))
125 125 return sample
126 126
127 127 def _limitsample(sample, desiredlen):
128 128 """return a random subset of sample of at most desiredlen item"""
129 129 if len(sample) > desiredlen:
130 130 sample = set(random.sample(sample, desiredlen))
131 131 return sample
132 132
133 133 def findcommonheads(ui, local, remote,
134 134 initialsamplesize=100,
135 135 fullsamplesize=200,
136 136 abortwhenunrelated=True):
137 137 '''Return a tuple (common, anyincoming, remoteheads) used to identify
138 138 missing nodes from or in remote.
139 139 '''
140 140 start = util.timer()
141 141
142 142 roundtrips = 0
143 143 cl = local.changelog
144 144 dag = dagutil.revlogdag(cl)
145 145
146 146 # early exit if we know all the specified remote heads already
147 147 ui.debug("query 1; heads\n")
148 148 roundtrips += 1
149 149 ownheads = dag.heads()
150 150 sample = _limitsample(ownheads, initialsamplesize)
151 151 # indices between sample and externalized version must match
152 152 sample = list(sample)
153 153 batch = remote.iterbatch()
154 154 batch.heads()
155 155 batch.known(dag.externalizeall(sample))
156 156 batch.submit()
157 157 srvheadhashes, yesno = batch.results()
158 158
159 159 if cl.tip() == nullid:
160 160 if srvheadhashes != [nullid]:
161 161 return [nullid], True, srvheadhashes
162 162 return [nullid], False, []
163 163
164 164 # start actual discovery (we note this before the next "if" for
165 165 # compatibility reasons)
166 166 ui.status(_("searching for changes\n"))
167 167
168 168 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
169 169 if len(srvheads) == len(srvheadhashes):
170 170 ui.debug("all remote heads known locally\n")
171 171 return (srvheadhashes, False, srvheadhashes,)
172 172
173 173 if sample and len(ownheads) <= initialsamplesize and all(yesno):
174 174 ui.note(_("all local heads known remotely\n"))
175 175 ownheadhashes = dag.externalizeall(ownheads)
176 176 return (ownheadhashes, True, srvheadhashes,)
177 177
178 178 # full blown discovery
179 179
180 180 # own nodes I know we both know
181 181 # treat remote heads (and maybe own heads) as a first implicit sample
182 182 # response
183 183 common = cl.incrementalmissingrevs(srvheads)
184 184 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
185 185 common.addbases(commoninsample)
186 186 # own nodes where I don't know if remote knows them
187 187 undecided = set(common.missingancestors(ownheads))
188 188 # own nodes I know remote lacks
189 189 missing = set()
190 190
191 191 full = False
192 192 while undecided:
193 193
194 194 if sample:
195 195 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
196 196 missing.update(dag.descendantset(missinginsample, missing))
197 197
198 198 undecided.difference_update(missing)
199 199
200 200 if not undecided:
201 201 break
202 202
203 203 if full or common.hasbases():
204 204 if full:
205 205 ui.note(_("sampling from both directions\n"))
206 206 else:
207 207 ui.debug("taking initial sample\n")
208 208 samplefunc = _takefullsample
209 209 targetsize = fullsamplesize
210 210 else:
211 211 # use even cheaper initial sample
212 212 ui.debug("taking quick initial sample\n")
213 213 samplefunc = _takequicksample
214 214 targetsize = initialsamplesize
215 215 if len(undecided) < targetsize:
216 216 sample = list(undecided)
217 217 else:
218 218 sample = samplefunc(dag, undecided, targetsize)
219 219 sample = _limitsample(sample, targetsize)
220 220
221 221 roundtrips += 1
222 222 ui.progress(_('searching'), roundtrips, unit=_('queries'))
223 223 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
224 224 % (roundtrips, len(undecided), len(sample)))
225 225 # indices between sample and externalized version must match
226 226 sample = list(sample)
227 227 yesno = remote.known(dag.externalizeall(sample))
228 228 full = True
229 229
230 230 if sample:
231 231 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
232 232 common.addbases(commoninsample)
233 233 common.removeancestorsfrom(undecided)
234 234
235 235 # heads(common) == heads(common.bases) since common represents common.bases
236 236 # and all its ancestors
237 237 result = dag.headsetofconnecteds(common.bases)
238 238 # common.bases can include nullrev, but our contract requires us to not
239 239 # return any heads in that case, so discard that
240 240 result.discard(nullrev)
241 241 elapsed = util.timer() - start
242 242 ui.progress(_('searching'), None)
243 243 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
244 msg = 'found %d common and %d missing heads, %d roundtrips in %.4fs\n'
245 ui.log('discovery', msg, len(result), len(srvheadhashes), roundtrips,
246 elapsed)
244 247
245 248 if not result and srvheadhashes != [nullid]:
246 249 if abortwhenunrelated:
247 250 raise error.Abort(_("repository is unrelated"))
248 251 else:
249 252 ui.warn(_("warning: repository is unrelated\n"))
250 253 return ({nullid}, True, srvheadhashes,)
251 254
252 255 anyincoming = (srvheadhashes != [nullid])
253 256 return dag.externalizeall(result), anyincoming, srvheadhashes
@@ -1,405 +1,410 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- b -> a tree"
20 20 > hg -R b debugdiscovery a --verbose --old --config
21 21 > echo
22 22 > echo "% -- b -> a set"
23 23 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
24 24 > cd ..
25 25 > }
26 26
27 27
28 28 Small superset:
29 29
30 30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 31 > +2:f +1:a1:b1
32 32 > <f +4 :a2
33 33 > +5 :b2
34 34 > <f +3 :b3'
35 35
36 36 % -- a -> b tree
37 37 comparing with b
38 38 searching for changes
39 39 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
40 40 common heads: 01241442b3c2 b5714e113bc0
41 41 local is subset
42 42
43 43 % -- a -> b set
44 44 comparing with b
45 45 query 1; heads
46 46 searching for changes
47 47 all local heads known remotely
48 48 common heads: 01241442b3c2 b5714e113bc0
49 49 local is subset
50 50
51 51 % -- b -> a tree
52 52 comparing with a
53 53 searching for changes
54 54 unpruned common: 01241442b3c2 b5714e113bc0
55 55 common heads: 01241442b3c2 b5714e113bc0
56 56 remote is subset
57 57
58 58 % -- b -> a set
59 59 comparing with a
60 60 query 1; heads
61 61 searching for changes
62 62 all remote heads known locally
63 63 common heads: 01241442b3c2 b5714e113bc0
64 64 remote is subset
65 65
66 66
67 67 Many new:
68 68
69 69 $ testdesc '-ra1 -ra2' '-rb' '
70 70 > +2:f +3:a1 +3:b
71 71 > <f +30 :a2'
72 72
73 73 % -- a -> b tree
74 74 comparing with b
75 75 searching for changes
76 76 unpruned common: bebd167eb94d
77 77 common heads: bebd167eb94d
78 78
79 79 % -- a -> b set
80 80 comparing with b
81 81 query 1; heads
82 82 searching for changes
83 83 taking initial sample
84 84 searching: 2 queries
85 85 query 2; still undecided: 29, sample size is: 29
86 86 2 total queries in *.????s (glob)
87 87 common heads: bebd167eb94d
88 88
89 89 % -- b -> a tree
90 90 comparing with a
91 91 searching for changes
92 92 unpruned common: 66f7d451a68b bebd167eb94d
93 93 common heads: bebd167eb94d
94 94
95 95 % -- b -> a set
96 96 comparing with a
97 97 query 1; heads
98 98 searching for changes
99 99 taking initial sample
100 100 searching: 2 queries
101 101 query 2; still undecided: 2, sample size is: 2
102 102 2 total queries in *.????s (glob)
103 103 common heads: bebd167eb94d
104 104
105 105 Both sides many new with stub:
106 106
107 107 $ testdesc '-ra1 -ra2' '-rb' '
108 108 > +2:f +2:a1 +30 :b
109 109 > <f +30 :a2'
110 110
111 111 % -- a -> b tree
112 112 comparing with b
113 113 searching for changes
114 114 unpruned common: 2dc09a01254d
115 115 common heads: 2dc09a01254d
116 116
117 117 % -- a -> b set
118 118 comparing with b
119 119 query 1; heads
120 120 searching for changes
121 121 taking initial sample
122 122 searching: 2 queries
123 123 query 2; still undecided: 29, sample size is: 29
124 124 2 total queries in *.????s (glob)
125 125 common heads: 2dc09a01254d
126 126
127 127 % -- b -> a tree
128 128 comparing with a
129 129 searching for changes
130 130 unpruned common: 2dc09a01254d 66f7d451a68b
131 131 common heads: 2dc09a01254d
132 132
133 133 % -- b -> a set
134 134 comparing with a
135 135 query 1; heads
136 136 searching for changes
137 137 taking initial sample
138 138 searching: 2 queries
139 139 query 2; still undecided: 29, sample size is: 29
140 140 2 total queries in *.????s (glob)
141 141 common heads: 2dc09a01254d
142 142
143 143
144 144 Both many new:
145 145
146 146 $ testdesc '-ra' '-rb' '
147 147 > +2:f +30 :b
148 148 > <f +30 :a'
149 149
150 150 % -- a -> b tree
151 151 comparing with b
152 152 searching for changes
153 153 unpruned common: 66f7d451a68b
154 154 common heads: 66f7d451a68b
155 155
156 156 % -- a -> b set
157 157 comparing with b
158 158 query 1; heads
159 159 searching for changes
160 160 taking quick initial sample
161 161 searching: 2 queries
162 162 query 2; still undecided: 31, sample size is: 31
163 163 2 total queries in *.????s (glob)
164 164 common heads: 66f7d451a68b
165 165
166 166 % -- b -> a tree
167 167 comparing with a
168 168 searching for changes
169 169 unpruned common: 66f7d451a68b
170 170 common heads: 66f7d451a68b
171 171
172 172 % -- b -> a set
173 173 comparing with a
174 174 query 1; heads
175 175 searching for changes
176 176 taking quick initial sample
177 177 searching: 2 queries
178 178 query 2; still undecided: 31, sample size is: 31
179 179 2 total queries in *.????s (glob)
180 180 common heads: 66f7d451a68b
181 181
182 182
183 183 Both many new skewed:
184 184
185 185 $ testdesc '-ra' '-rb' '
186 186 > +2:f +30 :b
187 187 > <f +50 :a'
188 188
189 189 % -- a -> b tree
190 190 comparing with b
191 191 searching for changes
192 192 unpruned common: 66f7d451a68b
193 193 common heads: 66f7d451a68b
194 194
195 195 % -- a -> b set
196 196 comparing with b
197 197 query 1; heads
198 198 searching for changes
199 199 taking quick initial sample
200 200 searching: 2 queries
201 201 query 2; still undecided: 51, sample size is: 51
202 202 2 total queries in *.????s (glob)
203 203 common heads: 66f7d451a68b
204 204
205 205 % -- b -> a tree
206 206 comparing with a
207 207 searching for changes
208 208 unpruned common: 66f7d451a68b
209 209 common heads: 66f7d451a68b
210 210
211 211 % -- b -> a set
212 212 comparing with a
213 213 query 1; heads
214 214 searching for changes
215 215 taking quick initial sample
216 216 searching: 2 queries
217 217 query 2; still undecided: 31, sample size is: 31
218 218 2 total queries in *.????s (glob)
219 219 common heads: 66f7d451a68b
220 220
221 221
222 222 Both many new on top of long history:
223 223
224 224 $ testdesc '-ra' '-rb' '
225 225 > +1000:f +30 :b
226 226 > <f +50 :a'
227 227
228 228 % -- a -> b tree
229 229 comparing with b
230 230 searching for changes
231 231 unpruned common: 7ead0cba2838
232 232 common heads: 7ead0cba2838
233 233
234 234 % -- a -> b set
235 235 comparing with b
236 236 query 1; heads
237 237 searching for changes
238 238 taking quick initial sample
239 239 searching: 2 queries
240 240 query 2; still undecided: 1049, sample size is: 11
241 241 sampling from both directions
242 242 searching: 3 queries
243 243 query 3; still undecided: 31, sample size is: 31
244 244 3 total queries in *.????s (glob)
245 245 common heads: 7ead0cba2838
246 246
247 247 % -- b -> a tree
248 248 comparing with a
249 249 searching for changes
250 250 unpruned common: 7ead0cba2838
251 251 common heads: 7ead0cba2838
252 252
253 253 % -- b -> a set
254 254 comparing with a
255 255 query 1; heads
256 256 searching for changes
257 257 taking quick initial sample
258 258 searching: 2 queries
259 259 query 2; still undecided: 1029, sample size is: 11
260 260 sampling from both directions
261 261 searching: 3 queries
262 262 query 3; still undecided: 15, sample size is: 15
263 263 3 total queries in *.????s (glob)
264 264 common heads: 7ead0cba2838
265 265
266 266
267 267 One with >200 heads, which used to use up all of the sample:
268 268
269 269 $ hg init manyheads
270 270 $ cd manyheads
271 271 $ echo "+300:r @a" >dagdesc
272 272 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
273 273 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 274 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 275 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 276 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 277 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 278 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 279 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 280 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 281 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 282 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 283 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 284 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 285 $ echo "@b *r+3" >>dagdesc # one more head
286 286 $ hg debugbuilddag <dagdesc
287 287 reading DAG from stdin
288 288
289 289 $ hg heads -t --template . | wc -c
290 290 \s*261 (re)
291 291
292 292 $ hg clone -b a . a
293 293 adding changesets
294 294 adding manifests
295 295 adding file changes
296 296 added 1340 changesets with 0 changes to 0 files (+259 heads)
297 297 updating to branch a
298 298 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
299 299 $ hg clone -b b . b
300 300 adding changesets
301 301 adding manifests
302 302 adding file changes
303 303 added 304 changesets with 0 changes to 0 files
304 304 updating to branch b
305 305 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
306 306
307 307 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
308 308 comparing with b
309 309 query 1; heads
310 310 searching for changes
311 311 taking quick initial sample
312 312 searching: 2 queries
313 313 query 2; still undecided: 1240, sample size is: 100
314 314 sampling from both directions
315 315 searching: 3 queries
316 316 query 3; still undecided: 1140, sample size is: 200
317 317 sampling from both directions
318 318 searching: 4 queries
319 319 query 4; still undecided: \d+, sample size is: 200 (re)
320 320 sampling from both directions
321 321 searching: 5 queries
322 322 query 5; still undecided: \d+, sample size is: 200 (re)
323 323 sampling from both directions
324 324 searching: 6 queries
325 325 query 6; still undecided: \d+, sample size is: \d+ (re)
326 326 6 total queries in *.????s (glob)
327 327 common heads: 3ee37d65064a
328 328
329 329 Test actual protocol when pulling one new head in addition to common heads
330 330
331 331 $ hg clone -U b c
332 332 $ hg -R c id -ir tip
333 333 513314ca8b3a
334 334 $ hg -R c up -qr default
335 335 $ touch c/f
336 336 $ hg -R c ci -Aqm "extra head"
337 337 $ hg -R c id -i
338 338 e64a39e7da8b
339 339
340 340 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
341 341 $ cat hg.pid >> $DAEMON_PIDS
342 342
343 343 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
344 344 comparing with http://localhost:$HGPORT/
345 345 searching for changes
346 346 e64a39e7da8b
347 347
348 348 $ killdaemons.py
349 349 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
350 350 "GET /?cmd=capabilities HTTP/1.1" 200 -
351 351 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
352 352 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
353 353 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
354 354 $ cat errors.log
355 355
356 356 $ cd ..
357 357
358 358
359 359 Issue 4438 - test coverage for 3ef893520a85 issues.
360 360
361 361 $ mkdir issue4438
362 362 $ cd issue4438
363 363 #if false
364 364 generate new bundles:
365 365 $ hg init r1
366 366 $ for i in `python $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
367 367 $ hg clone -q r1 r2
368 368 $ for i in `python $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
369 369 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
370 370 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
371 371 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
372 372 #else
373 373 use existing bundles:
374 374 $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1
375 375 $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2
376 376 #endif
377 377
378 378 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
379 379
380 380 $ hg -R r1 outgoing r2 -T'{rev} '
381 381 comparing with r2
382 382 searching for changes
383 383 101 102 103 104 105 106 107 108 109 110 (no-eol)
384 384
385 385 The case where all the 'initialsamplesize' samples already were common would
386 386 give 'all remote heads known locally' without checking the remaining heads -
387 387 fixed in 86c35b7ae300:
388 388
389 389 $ cat >> $TESTTMP/unrandomsample.py << EOF
390 390 > import random
391 391 > def sample(population, k):
392 392 > return sorted(population)[:k]
393 393 > random.sample = sample
394 394 > EOF
395 395
396 396 $ cat >> r1/.hg/hgrc << EOF
397 397 > [extensions]
398 398 > unrandomsample = $TESTTMP/unrandomsample.py
399 399 > EOF
400 400
401 $ hg -R r1 outgoing r2 -T'{rev} '
401 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox=
402 402 comparing with r2
403 403 searching for changes
404 404 101 102 103 104 105 106 107 108 109 110 (no-eol)
405 $ hg -R r1 --config extensions.blackbox= blackbox
406 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> outgoing r2 '-T{rev} ' (glob)
407 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 101 missing heads, 2 roundtrips in *.????s (glob)
408 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 '-T{rev} ' --config 'extensions.blackbox=' exited 0 after *.?? seconds (glob)
409 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> blackbox (glob)
405 410 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now